summaryrefslogtreecommitdiffstats
path: root/ansible_collections
diff options
context:
space:
mode:
Diffstat (limited to 'ansible_collections')
-rw-r--r--ansible_collections/amazon/aws/CHANGELOG.rst15
-rw-r--r--ansible_collections/amazon/aws/FILES.json16
-rw-r--r--ansible_collections/amazon/aws/MANIFEST.json4
-rw-r--r--ansible_collections/amazon/aws/changelogs/changelog.yaml16
-rw-r--r--ansible_collections/amazon/aws/docs/docsite/rst/CHANGELOG.rst15
-rw-r--r--ansible_collections/amazon/aws/plugins/module_utils/common.py2
-rw-r--r--ansible_collections/amazon/aws/plugins/modules/backup_plan_info.py23
-rw-r--r--ansible_collections/amazon/aws/plugins/modules/ec2_instance.py3
-rw-r--r--ansible_collections/amazon/aws/plugins/modules/s3_object.py3
-rw-r--r--ansible_collections/amazon/aws/tests/integration/targets/backup_plan/tasks/main.yml31
-rw-r--r--ansible_collections/ansible/netcommon/CHANGELOG.rst7
-rw-r--r--ansible_collections/ansible/netcommon/FILES.json8
-rw-r--r--ansible_collections/ansible/netcommon/MANIFEST.json4
-rw-r--r--ansible_collections/ansible/netcommon/changelogs/changelog.yaml8
-rw-r--r--ansible_collections/ansible/netcommon/plugins/module_utils/network/common/rm_base/resource_module.py4
-rw-r--r--ansible_collections/ansible/netcommon/plugins/plugin_utils/cliconf_base.py3
-rw-r--r--ansible_collections/ansible/windows/.azure-pipelines/azure-pipelines.yml14
-rw-r--r--ansible_collections/ansible/windows/CHANGELOG.rst20
-rw-r--r--ansible_collections/ansible/windows/FILES.json106
-rw-r--r--ansible_collections/ansible/windows/MANIFEST.json4
-rw-r--r--ansible_collections/ansible/windows/changelogs/changelog.yaml20
-rw-r--r--ansible_collections/ansible/windows/plugins/modules/async_status.ps12
-rw-r--r--ansible_collections/ansible/windows/plugins/modules/async_status.yml18
-rw-r--r--ansible_collections/ansible/windows/plugins/modules/setup.ps142
-rw-r--r--ansible_collections/ansible/windows/plugins/modules/win_command.py6
-rw-r--r--ansible_collections/ansible/windows/plugins/modules/win_copy.py2
-rw-r--r--ansible_collections/ansible/windows/plugins/modules/win_dns_client.py8
-rw-r--r--ansible_collections/ansible/windows/plugins/modules/win_domain.py2
-rw-r--r--ansible_collections/ansible/windows/plugins/modules/win_domain_membership.py42
-rw-r--r--ansible_collections/ansible/windows/plugins/modules/win_dsc.py32
-rw-r--r--ansible_collections/ansible/windows/plugins/modules/win_feature.py4
-rw-r--r--ansible_collections/ansible/windows/plugins/modules/win_find.py10
-rw-r--r--ansible_collections/ansible/windows/plugins/modules/win_get_url.py4
-rw-r--r--ansible_collections/ansible/windows/plugins/modules/win_optional_feature.py4
-rw-r--r--ansible_collections/ansible/windows/plugins/modules/win_package.py20
-rw-r--r--ansible_collections/ansible/windows/plugins/modules/win_path.py4
-rw-r--r--ansible_collections/ansible/windows/plugins/modules/win_powershell.ps137
-rw-r--r--ansible_collections/ansible/windows/plugins/modules/win_powershell.py71
-rw-r--r--ansible_collections/ansible/windows/plugins/modules/win_regedit.py2
-rw-r--r--ansible_collections/ansible/windows/plugins/modules/win_service.py32
-rw-r--r--ansible_collections/ansible/windows/plugins/modules/win_share.py4
-rw-r--r--ansible_collections/ansible/windows/plugins/modules/win_updates.py16
-rw-r--r--ansible_collections/ansible/windows/plugins/modules/win_user.ps167
-rw-r--r--ansible_collections/ansible/windows/plugins/modules/win_user.py27
-rw-r--r--ansible_collections/ansible/windows/plugins/modules/win_user_right.py16
-rw-r--r--ansible_collections/ansible/windows/tests/integration/targets/async_status/aliases2
-rw-r--r--ansible_collections/ansible/windows/tests/integration/targets/async_status/tasks/main.yml29
-rw-r--r--ansible_collections/ansible/windows/tests/integration/targets/win_powershell/tasks/failure.yml51
-rw-r--r--ansible_collections/ansible/windows/tests/integration/targets/win_powershell/tasks/main.yml3
-rw-r--r--ansible_collections/ansible/windows/tests/integration/targets/win_powershell/tasks/tests.yml1852
-rw-r--r--ansible_collections/ansible/windows/tests/integration/targets/win_setup/tasks/main.yml1
-rw-r--r--ansible_collections/ansible/windows/tests/integration/targets/win_user/tasks/tests.yml170
-rw-r--r--ansible_collections/ansible/windows/tests/sanity/ignore-2.18.txt1
-rwxr-xr-xansible_collections/ansible/windows/tests/utils/shippable/lint.sh2
-rw-r--r--ansible_collections/ansible_community.py2
-rw-r--r--ansible_collections/ansible_release.py2
-rw-r--r--ansible_collections/awx/awx/FILES.json919
-rw-r--r--ansible_collections/awx/awx/MANIFEST.json4
-rw-r--r--ansible_collections/awx/awx/plugins/module_utils/controller_api.py29
-rw-r--r--ansible_collections/awx/awx/plugins/modules/ad_hoc_command.py2
-rw-r--r--ansible_collections/awx/awx/plugins/modules/application.py3
-rw-r--r--ansible_collections/awx/awx/plugins/modules/import.py2
-rw-r--r--ansible_collections/awx/awx/plugins/modules/notification_template.py3
-rw-r--r--ansible_collections/awx/awx/test/awx/conftest.py2
-rw-r--r--ansible_collections/awx/awx/tests/sanity/ignore-2.17.txt1
-rw-r--r--ansible_collections/azure/azcollection/CHANGELOG.md65
-rw-r--r--ansible_collections/azure/azcollection/CONTRIBUTING.md6
-rw-r--r--ansible_collections/azure/azcollection/FILES.json493
-rw-r--r--ansible_collections/azure/azcollection/MANIFEST.json4
-rw-r--r--ansible_collections/azure/azcollection/README.md2
-rw-r--r--ansible_collections/azure/azcollection/meta/execution-environment.yml2
-rw-r--r--ansible_collections/azure/azcollection/meta/runtime.yml21
-rw-r--r--ansible_collections/azure/azcollection/plugins/doc_fragments/azure.py2
-rw-r--r--ansible_collections/azure/azcollection/plugins/inventory/azure_rm.py3
-rw-r--r--ansible_collections/azure/azcollection/plugins/lookup/azure_keyvault_secret.py24
-rw-r--r--ansible_collections/azure/azcollection/plugins/module_utils/azure_rm_common.py31
-rw-r--r--ansible_collections/azure/azcollection/plugins/modules/azure_rm_accesstoken_info.py2
-rw-r--r--ansible_collections/azure/azcollection/plugins/modules/azure_rm_adapplication.py326
-rw-r--r--ansible_collections/azure/azcollection/plugins/modules/azure_rm_adapplication_info.py44
-rw-r--r--ansible_collections/azure/azcollection/plugins/modules/azure_rm_adgroup.py92
-rw-r--r--ansible_collections/azure/azcollection/plugins/modules/azure_rm_adgroup_info.py46
-rw-r--r--ansible_collections/azure/azcollection/plugins/modules/azure_rm_aduser.py78
-rw-r--r--ansible_collections/azure/azcollection/plugins/modules/azure_rm_aduser_info.py33
-rw-r--r--ansible_collections/azure/azcollection/plugins/modules/azure_rm_aks.py103
-rw-r--r--ansible_collections/azure/azcollection/plugins/modules/azure_rm_aksagentpool.py656
-rw-r--r--ansible_collections/azure/azcollection/plugins/modules/azure_rm_aksagentpool_info.py131
-rw-r--r--ansible_collections/azure/azcollection/plugins/modules/azure_rm_backupazurevm.py10
-rw-r--r--ansible_collections/azure/azcollection/plugins/modules/azure_rm_cognitivesearch.py18
-rw-r--r--ansible_collections/azure/azcollection/plugins/modules/azure_rm_datalakestore.py807
-rw-r--r--ansible_collections/azure/azcollection/plugins/modules/azure_rm_datalakestore_info.py468
-rw-r--r--ansible_collections/azure/azcollection/plugins/modules/azure_rm_loadbalancer.py208
-rw-r--r--ansible_collections/azure/azcollection/plugins/modules/azure_rm_localnetworkgateway.py427
-rw-r--r--ansible_collections/azure/azcollection/plugins/modules/azure_rm_localnetworkgateway_info.py267
-rw-r--r--ansible_collections/azure/azcollection/plugins/modules/azure_rm_networkinterface.py64
-rw-r--r--ansible_collections/azure/azcollection/plugins/modules/azure_rm_postgresqlflexiblefirewallrule.py2
-rw-r--r--ansible_collections/azure/azcollection/plugins/modules/azure_rm_postgresqlflexibleserver.py102
-rw-r--r--ansible_collections/azure/azcollection/plugins/modules/azure_rm_postgresqlflexibleserver_info.py29
-rw-r--r--ansible_collections/azure/azcollection/plugins/modules/azure_rm_rediscache_info.py4
-rw-r--r--ansible_collections/azure/azcollection/plugins/modules/azure_rm_sqlmidatabase.py392
-rw-r--r--ansible_collections/azure/azcollection/plugins/modules/azure_rm_sqlmidatabase_info.py305
-rw-r--r--ansible_collections/azure/azcollection/plugins/modules/azure_rm_sqlmidblongtermretentionpolicy.py277
-rw-r--r--ansible_collections/azure/azcollection/plugins/modules/azure_rm_sqlmidblongtermretentionpolicy_info.py228
-rw-r--r--ansible_collections/azure/azcollection/plugins/modules/azure_rm_sqlmidbshorttermretentionpolicy.py244
-rw-r--r--ansible_collections/azure/azcollection/plugins/modules/azure_rm_sqlmidbshorttermretentionpolicy_info.py206
-rw-r--r--ansible_collections/azure/azcollection/plugins/modules/azure_rm_storageaccountmanagementpolicy.py672
-rw-r--r--ansible_collections/azure/azcollection/plugins/modules/azure_rm_storageaccountmanagementpolicy_info.py245
-rw-r--r--ansible_collections/azure/azcollection/plugins/modules/azure_rm_virtualmachine.py89
-rw-r--r--ansible_collections/azure/azcollection/plugins/modules/azure_rm_virtualmachinescalesetinstance.py43
-rw-r--r--ansible_collections/azure/azcollection/plugins/modules/azure_rm_virtualmachinescalesetinstance_info.py52
-rw-r--r--ansible_collections/azure/azcollection/plugins/modules/azure_rm_virtualnetworkgatewaynatrule.py354
-rw-r--r--ansible_collections/azure/azcollection/plugins/modules/azure_rm_virtualnetworkgatewaynatrule_info.py236
-rw-r--r--ansible_collections/azure/azcollection/plugins/modules/azure_rm_vmsku_info.py311
-rw-r--r--ansible_collections/azure/azcollection/plugins/modules/azure_rm_webapp.py309
-rw-r--r--ansible_collections/azure/azcollection/plugins/modules/azure_rm_webapp_info.py25
-rw-r--r--ansible_collections/azure/azcollection/plugins/modules/azure_rm_webappaccessrestriction.py141
-rw-r--r--ansible_collections/azure/azcollection/plugins/modules/azure_rm_webappaccessrestriction_info.py52
-rw-r--r--ansible_collections/azure/azcollection/pr-pipelines.yml7
-rw-r--r--ansible_collections/azure/azcollection/requirements.txt (renamed from ansible_collections/azure/azcollection/requirements-azure.txt)7
-rw-r--r--ansible_collections/azure/azcollection/sanity-requirements.txt (renamed from ansible_collections/azure/azcollection/sanity-requirements-azure.txt)0
-rw-r--r--ansible_collections/azure/azcollection/tests/integration/requirements.txt51
-rw-r--r--ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_adapplication/tasks/main.yml12
-rw-r--r--ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_aduser/tasks/main.yml84
-rw-r--r--ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_aks/tasks/main.yml65
-rw-r--r--ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_aks/tasks/minimal-cluster.yml142
-rw-r--r--ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_aksagentpool/tasks/main.yml111
-rw-r--r--ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_cognitivesearch/tasks/main.yml4
-rw-r--r--ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_cosmosdbaccount/tasks/main.yml28
-rw-r--r--ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_datalakestore/tasks/main.yml199
-rw-r--r--ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_eventhub/tasks/main.yml4
-rw-r--r--ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_expressroute/tasks/main.yml18
-rw-r--r--ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_image/tasks/main.yml5
-rw-r--r--ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_loadbalancer/tasks/main.yml138
-rw-r--r--ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_localnetworkgateway/aliases3
-rw-r--r--ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_localnetworkgateway/meta/main.yml (renamed from ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_datalakestore/meta/main.yml)0
-rw-r--r--ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_localnetworkgateway/tasks/main.yml114
-rw-r--r--ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_mariadbserver/aliases1
-rw-r--r--ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_mysqlserver/aliases1
-rw-r--r--ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_networkinterface/tasks/main.yml46
-rw-r--r--ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_postgresqlflexibleserver/tasks/main.yml156
-rw-r--r--ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_privatelinkservice/tasks/main.yml1
-rw-r--r--ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_sqlmanageddatabase/aliases (renamed from ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_datalakestore/aliases)2
-rw-r--r--ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_sqlmanageddatabase/meta/main.yml2
-rw-r--r--ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_sqlmanageddatabase/tasks/main.yml264
-rw-r--r--ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_storageaccountmanagementpolicy/aliases3
-rw-r--r--ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_storageaccountmanagementpolicy/meta/main.yml2
-rw-r--r--ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_storageaccountmanagementpolicy/tasks/main.yml242
-rw-r--r--ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_storageblob/tasks/main.yml19
-rw-r--r--ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_virtualmachine/inventory.yml4
-rw-r--r--ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_virtualmachine/tasks/azure_test_public_ip.yml9
-rw-r--r--ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_virtualmachine/tasks/azure_test_skus.yml136
-rw-r--r--ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_virtualmachineextension/tasks/main.yml5
-rw-r--r--ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_virtualmachinescaleset/tasks/main.yml12
-rw-r--r--ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_virtualnetworkgatewaynatrule/aliases3
-rw-r--r--ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_virtualnetworkgatewaynatrule/meta/main.yml2
-rw-r--r--ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_virtualnetworkgatewaynatrule/tasks/main.yml131
-rw-r--r--ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_webapp/tasks/main.yml68
-rw-r--r--ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_webappaccessrestriction/tasks/main.yml44
-rw-r--r--ansible_collections/azure/azcollection/tests/integration/targets/inventory_azure/playbooks/setup.yml21
-rw-r--r--ansible_collections/azure/azcollection/tests/integration/targets/inventory_azure/playbooks/teardown.yml7
-rw-r--r--ansible_collections/azure/azcollection/tests/integration/targets/inventory_azure/playbooks/test_inventory_filter.yml21
-rw-r--r--ansible_collections/azure/azcollection/tests/integration/targets/inventory_azure/playbooks/vars.yml1
-rwxr-xr-xansible_collections/azure/azcollection/tests/integration/targets/inventory_azure/runme.sh5
-rw-r--r--ansible_collections/azure/azcollection/tests/integration/targets/inventory_azure/templates/filter.yml14
-rwxr-xr-xansible_collections/azure/azcollection/tests/utils/ado/ado.sh4
-rw-r--r--ansible_collections/cisco/dnac/.github/workflows/docs.yml10
-rw-r--r--ansible_collections/cisco/dnac/FILES.json2246
-rw-r--r--ansible_collections/cisco/dnac/MANIFEST.json4
-rw-r--r--ansible_collections/cisco/dnac/Pipfile12
-rw-r--r--ansible_collections/cisco/dnac/README.md12
-rw-r--r--ansible_collections/cisco/dnac/changelogs/changelog.yaml66
-rw-r--r--ansible_collections/cisco/dnac/playbooks/configs_backup.yml39
-rw-r--r--ansible_collections/cisco/dnac/playbooks/credentials.template2
-rw-r--r--ansible_collections/cisco/dnac/playbooks/device_credential_workflow_manager.yml21
-rw-r--r--ansible_collections/cisco/dnac/playbooks/device_provision_workflow.yml32
-rw-r--r--ansible_collections/cisco/dnac/playbooks/dnac.log (renamed from ansible_collections/cyberark/conjur/tests/unit/plugins/lookup/__init__.py)0
-rw-r--r--ansible_collections/cisco/dnac/playbooks/events_and_notifications_workflow_manager.yml88
-rw-r--r--ansible_collections/cisco/dnac/playbooks/input_events_and_notification.yml40
-rw-r--r--ansible_collections/cisco/dnac/playbooks/ise_radius_integration_workflow_manager.yml109
-rw-r--r--ansible_collections/cisco/dnac/playbooks/network_compliance_workflow_manager.yml151
-rw-r--r--ansible_collections/cisco/dnac/playbooks/network_settings_workflow_manager.yml60
-rw-r--r--ansible_collections/cisco/dnac/playbooks/template_workflow_manager.yml1
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/application_policy_application_set.py234
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/application_policy_application_set_count_info.py89
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/application_policy_application_set_info.py95
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/application_sets.py10
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/application_sets_info.py4
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/applications.py14
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/applications_count_v2_info.py89
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/applications_health_info.py8
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/applications_info.py6
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/applications_v2.py275
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/applications_v2_info.py95
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/auth_token_create.py84
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/authentication_policy_servers.py338
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/buildings_planned_access_points_info.py4
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/cli_credential.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/client_detail_info.py4
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/client_health_info.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/client_proximity_info.py4
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/compliance_device_by_id_info.py4
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/compliance_device_details_info.py4
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/compliance_device_info.py4
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/configuration_archive_details_info.py99
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/configuration_template_deploy.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/configuration_template_deploy_v2.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/configuration_template_project.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/device_configurations_export.py4
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/device_details_info.py6
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/device_health_info.py8
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/device_reboot_apreboot.py181
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/discovery_summary_info.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/dna_event_snmp_config_info.py97
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/eox_status_device_info.py8
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/eox_status_summary_info.py4
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/event_artifact_info.py4
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/event_info.py4
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/event_series_audit_logs_info.py8
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/event_series_audit_logs_parent_records_info.py8
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/event_series_audit_logs_summary_info.py4
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/event_series_count_info.py4
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/event_series_info.py8
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/event_snmp_config.py239
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/event_subscription.py8
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/event_subscription_details_email_info.py4
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/event_subscription_details_rest_info.py4
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/event_subscription_details_syslog_info.py4
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/event_subscription_email_info.py4
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/event_subscription_info.py4
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/event_subscription_rest_info.py4
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/event_subscription_syslog_info.py4
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/event_syslog_config.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/event_syslog_config_info.py4
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/event_webhook_create.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/event_webhook_read_info.py97
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/event_webhook_update.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/flexible_report_content_info.py100
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/flexible_report_execute.py86
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/flexible_report_executions_info.py89
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/flexible_report_schedule.py195
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/flexible_report_schedule_info.py98
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/flexible_report_schedules_info.py87
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/global_pool_info.py4
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/http_read_credential.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/http_write_credential.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/integration_settings_instances_itsm.py1
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/integration_settings_itsm_instances_info.py87
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/integration_settings_status_info.py87
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/ise_integration_status_info.py87
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/issues_info.py8
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/lan_automation_log_info.py4
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/lan_automation_sessions_info.py87
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/lan_automation_status_info.py4
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/lan_automation_update.py88
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/lan_automation_update_device.py92
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/lan_automation_update_v2.py88
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/lan_automation_v2.py86
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/license_device_license_summary_info.py6
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/maps_export.py86
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/maps_import.py85
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/maps_import_perform.py86
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/maps_import_start.py84
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/maps_import_status_info.py89
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/maps_supported_access_points_info.py87
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/netconf_credential.py4
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/network_device.py7
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/network_device_config_task_info.py89
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/network_device_config_write_memory.py86
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/network_device_count_info.py8
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/network_device_equipment_info.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/network_device_export.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/network_device_insight_device_link_info.py99
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/network_device_ip_address_info.py98
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/network_device_management_address_update.py88
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/network_device_user_defined_field.py1
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/network_device_user_defined_field_delete.py87
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/network_device_user_defined_field_update.py88
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/network_v2.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/path_trace.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/path_trace_info.py16
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/planned_access_points.py286
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/planned_access_points_info.py4
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/pnp_device.py47
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/pnp_device_authorize.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/pnp_device_claim_to_site.py8
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/pnp_device_count_info.py6
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/pnp_device_info.py6
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/pnp_server_profile_update.py26
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/pnp_workflow.py1
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/reports.py4
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/reserve_ip_subpool.py10
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/reserve_ip_subpool_info.py10
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/reserve_ip_subpool_update.py4
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/roles.py262
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/sda_anycast_gateways.py284
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/sda_anycast_gateways_count_info.py97
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/sda_anycast_gateways_info.py103
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/sda_authentication_profiles.py189
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/sda_authentication_profiles_info.py91
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/sda_extranet_policies.py263
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/sda_extranet_policies_count_info.py87
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/sda_extranet_policies_info.py93
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/sda_fabric_authentication_profile.py7
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/sda_fabric_border_device.py7
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/sda_fabric_devices.py279
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/sda_fabric_devices_count_info.py93
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/sda_fabric_devices_info.py97
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/sda_fabric_devices_layer2_handoffs.py261
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/sda_fabric_devices_layer2_handoffs_count_info.py91
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/sda_fabric_devices_layer2_handoffs_info.py95
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/sda_fabric_devices_layer2_handoffs_ip_transits.py279
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/sda_fabric_devices_layer2_handoffs_ip_transits_count_info.py91
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/sda_fabric_devices_layer2_handoffs_ip_transits_info.py95
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/sda_fabric_devices_layer2_handoffs_sda_transits.py264
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/sda_fabric_devices_layer2_handoffs_sda_transits_count_info.py91
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/sda_fabric_devices_layer2_handoffs_sda_transits_info.py95
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/sda_fabric_sites.py263
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/sda_fabric_sites_count_info.py87
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/sda_fabric_sites_info.py95
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/sda_fabric_zones.py262
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/sda_fabric_zones_count_info.py87
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/sda_fabric_zones_info.py95
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/sda_port_assignments.py291
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/sda_port_assignments_count_info.py97
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/sda_port_assignments_info.py101
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/sda_provision_devices.py270
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/sda_provision_devices_count_info.py89
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/sda_provision_devices_info.py97
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/sda_site_member_member_info.py97
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/sda_virtual_network_ip_pool.py3
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/security_advisories_ids_per_device_info.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/sensor.py40
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/sensor_test_template_edit.py54
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/site_assign_device.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/site_count_v2_info.py89
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/site_health_info.py8
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/site_membership_info.py4
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/site_v2_info.py97
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/snmpv2_read_community_credential.py4
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/snmpv2_write_community_credential.py4
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/snmpv3_credential.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/swim_import_local.py10
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/system_health_info.py4
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/system_performance_historical_info.py4
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/system_performance_info.py4
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/tag_count_info.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/tag_info.py4
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/tag_member.py19
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/tag_member_count_info.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/tag_member_info.py4
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/tag_membership.py4
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/topology_network_health_info.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/transit_peer_network.py36
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/user.py69
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/user_info.py4
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/users_external_authentication.py179
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/users_external_authentication_info.py87
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/users_external_servers_aaa_attribute.py201
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/users_external_servers_aaa_attribute_info.py87
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/wireless_accespoint_configuration.py8
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/wireless_accesspoint_configuration_create.py140
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/wireless_dynamic_interface.py25
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/wireless_enterprise_ssid.py65
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/wireless_profile.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/wireless_psk_override.py10
-rw-r--r--ansible_collections/cisco/dnac/plugins/action/wireless_sensor_test_results_info.py4
-rw-r--r--ansible_collections/cisco/dnac/plugins/doc_fragments/module.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/doc_fragments/module_info.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/module_utils/dnac.py137
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/accesspoint_configuration_details_by_task_id_info.py7
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/app_policy_default_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/app_policy_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/app_policy_intent_create.py11
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/app_policy_queuing_profile.py7
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/app_policy_queuing_profile_count_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/app_policy_queuing_profile_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/application_policy_application_set.py117
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/application_policy_application_set_count_info.py67
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/application_policy_application_set_info.py101
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/application_sets.py13
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/application_sets_count_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/application_sets_info.py9
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/applications.py50
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/applications_count_info.py7
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/applications_count_v2_info.py67
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/applications_health_info.py13
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/applications_info.py35
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/applications_v2.py342
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/applications_v2_info.py152
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/assign_device_to_site.py16
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/associate_site_to_network_profile.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/auth_token_create.py58
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/authentication_import_certificate.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/authentication_import_certificate_p12.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/authentication_policy_servers.py255
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/authentication_policy_servers_info.py17
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/buildings_planned_access_points_info.py9
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/business_sda_hostonboarding_ssid_ippool.py31
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/business_sda_hostonboarding_ssid_ippool_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/business_sda_virtual_network_summary_info.py20
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/business_sda_wireless_controller_create.py11
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/business_sda_wireless_controller_delete.py4
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/cli_credential.py23
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/client_detail_info.py205
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/client_enrichment_details_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/client_health_info.py100
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/client_proximity_info.py9
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/command_runner_run_command.py16
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/compliance_check_run.py15
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/compliance_device_by_id_info.py98
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/compliance_device_details_count_info.py14
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/compliance_device_details_info.py20
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/compliance_device_info.py30
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/compliance_device_status_count_info.py11
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/configuration_archive_details_info.py135
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/configuration_template.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/configuration_template_clone.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/configuration_template_create.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/configuration_template_deploy.py20
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/configuration_template_deploy_status_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/configuration_template_deploy_v2.py20
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/configuration_template_export_project.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/configuration_template_export_template.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/configuration_template_import_project.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/configuration_template_import_template.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/configuration_template_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/configuration_template_project.py818
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/configuration_template_project_info.py214
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/configuration_template_version_create.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/configuration_template_version_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/credential_to_site_by_siteid_create_v2.py7
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/device_configs_backup_workflow_manager.py623
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/device_configurations_export.py26
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/device_credential_create.py13
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/device_credential_delete.py9
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/device_credential_info.py9
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/device_credential_intent.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/device_credential_update.py9
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/device_credential_workflow_manager.py335
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/device_details_info.py129
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/device_enrichment_details_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/device_family_identifiers_details_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/device_health_info.py100
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/device_interface_by_ip_info.py23
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/device_interface_count_info.py11
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/device_interface_info.py23
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/device_interface_isis_info.py23
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/device_interface_ospf_info.py23
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/device_reboot_apreboot.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/device_reboot_apreboot_info.py7
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/device_replacement.py34
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/device_replacement_count_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/device_replacement_deploy.py9
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/device_replacement_info.py9
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/disassociate_site_to_network_profile.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/disasterrecovery_system_operationstatus_info.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/disasterrecovery_system_status_info.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/discovery.py127
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/discovery_count_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/discovery_device_count_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/discovery_device_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/discovery_device_range_info.py9
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/discovery_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/discovery_job_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/discovery_range_delete.py10
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/discovery_range_info.py11
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/discovery_summary_info.py37
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/dna_command_runner_keywords_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/dna_event_snmp_config_info.py103
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/dnac_packages_info.py7
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/dnacaap_management_execution_status_info.py9
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/endpoint_analytics_profiling_rules.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/endpoint_analytics_profiling_rules_info.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/eox_status_device_info.py40
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/eox_status_summary_info.py9
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/event_api_status_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/event_artifact_count_info.py7
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/event_artifact_info.py13
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/event_config_connector_types_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/event_count_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/event_email_config.py17
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/event_email_config_create.py13
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/event_email_config_info.py11
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/event_email_config_update.py13
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/event_info.py9
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/event_series_audit_logs_info.py13
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/event_series_audit_logs_parent_records_info.py13
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/event_series_audit_logs_summary_info.py9
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/event_series_count_info.py9
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/event_series_info.py13
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/event_snmp_config.py144
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/event_snmp_config_info.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/event_subscription.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/event_subscription_count_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/event_subscription_details_email_info.py9
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/event_subscription_details_rest_info.py20
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/event_subscription_details_syslog_info.py9
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/event_subscription_email.py8
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/event_subscription_email_info.py9
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/event_subscription_info.py9
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/event_subscription_rest.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/event_subscription_rest_info.py9
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/event_subscription_syslog.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/event_subscription_syslog_info.py9
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/event_syslog_config.py13
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/event_syslog_config_info.py11
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/event_webhook_create.py9
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/event_webhook_read_info.py112
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/event_webhook_update.py9
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/events_and_notifications_workflow_manager.py5162
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/execute_suggested_actions_commands.py7
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/file_import.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/file_info.py8
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/file_namespace_files_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/file_namespaces_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/flexible_report_content_info.py72
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/flexible_report_execute.py70
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/flexible_report_executions_info.py84
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/flexible_report_schedule.py65
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/flexible_report_schedule_info.py64
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/flexible_report_schedules_info.py69
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/global_credential_delete.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/global_credential_info.py9
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/global_credential_update.py7
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/global_credential_v2.py81
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/global_credential_v2_info.py7
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/global_pool.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/global_pool_info.py37
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/golden_image_create.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/golden_tag_image_delete.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/golden_tag_image_details_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/http_read_credential.py26
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/http_write_credential.py26
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/integration_settings_instances_itsm.py9
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/integration_settings_instances_itsm_info.py7
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/integration_settings_itsm_instances_info.py78
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/integration_settings_status_info.py74
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/interface_info.py73
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/interface_network_device_detail_info.py31
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/interface_network_device_info.py23
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/interface_network_device_range_info.py23
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/interface_operation_create.py9
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/interface_update.py13
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/inventory_intent.py93
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/inventory_workflow_manager.py93
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/ise_integration_status_info.py74
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/ise_radius_integration_workflow_manager.py1578
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/issues_enrichment_details_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/issues_info.py31
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/itsm_cmdb_sync_status_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/itsm_integration_events_failed_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/itsm_integration_events_retry.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/lan_automation_count_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/lan_automation_create.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/lan_automation_delete.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/lan_automation_log_by_serial_number_info.py7
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/lan_automation_log_info.py9
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/lan_automation_sessions_info.py68
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/lan_automation_status_info.py18
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/lan_automation_update.py78
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/lan_automation_update_device.py122
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/lan_automation_update_v2.py84
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/lan_automation_v2.py166
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/license_device_count_info.py13
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/license_device_deregistration.py13
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/license_device_license_details_info.py116
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/license_device_license_summary_info.py114
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/license_device_registration.py13
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/license_smart_account_details_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/license_term_details_info.py13
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/license_usage_details_info.py31
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/license_virtual_account_change.py13
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/license_virtual_account_details_info.py13
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/maps_export.py66
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/maps_import.py61
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/maps_import_perform.py63
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/maps_import_start.py56
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/maps_import_status_info.py117
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/maps_supported_access_points_info.py72
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/netconf_credential.py20
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/network_compliance_workflow_manager.py1454
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/network_create.py7
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/network_device.py78
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/network_device_by_ip_info.py19
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/network_device_by_serial_number_info.py21
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/network_device_chassis_details_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/network_device_config_count_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/network_device_config_info.py9
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/network_device_config_task_info.py82
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/network_device_config_write_memory.py66
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/network_device_count_info.py35
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/network_device_custom_prompt.py13
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/network_device_custom_prompt_info.py11
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/network_device_equipment_info.py20
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/network_device_export.py17
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/network_device_functional_capability_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/network_device_global_polling_interval_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/network_device_info.py19
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/network_device_insight_device_link_info.py126
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/network_device_interface_neighbor_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/network_device_interface_poe_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/network_device_inventory_insight_link_mismatch_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/network_device_ip_address_info.py119
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/network_device_lexicographically_sorted_info.py14
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/network_device_linecard_details_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/network_device_management_address_update.py69
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/network_device_meraki_organization_info.py7
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/network_device_module_count_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/network_device_module_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/network_device_poe_info.py7
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/network_device_polling_interval_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/network_device_range_info.py19
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/network_device_register_for_wsa_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/network_device_stack_details_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/network_device_summary_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/network_device_supervisor_card_details_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/network_device_sync.py9
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/network_device_update_role.py11
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/network_device_user_defined_field.py7
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/network_device_user_defined_field_delete.py70
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/network_device_user_defined_field_info.py9
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/network_device_user_defined_field_update.py81
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/network_device_vlan_info.py12
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/network_device_wireless_lan_info.py36
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/network_device_with_snmp_v3_des_info.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/network_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/network_settings_workflow_manager.py2016
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/network_update.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/network_v2.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/network_v2_info.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/nfv_profile.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/nfv_profile_info.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/nfv_provision.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/nfv_provision_detail_info.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/nfv_provision_details.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/path_trace.py11
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/path_trace_info.py42
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/planned_access_points.py313
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/planned_access_points_info.py9
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/platform_nodes_configuration_summary_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/platform_release_summary_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/pnp_device.py1258
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/pnp_device_authorize.py11
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/pnp_device_claim.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/pnp_device_claim_to_site.py29
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/pnp_device_config_preview.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/pnp_device_count_info.py23
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/pnp_device_history_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/pnp_device_import.py1003
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/pnp_device_info.py27
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/pnp_device_reset.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/pnp_device_unclaim.py7
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/pnp_global_settings.py171
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/pnp_global_settings_info.py6
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/pnp_server_profile_update.py86
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/pnp_smart_account_domains_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/pnp_virtual_account_add.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/pnp_virtual_account_deregister.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/pnp_virtual_account_devices_sync.py7
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/pnp_virtual_account_sync_result_info.py7
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/pnp_virtual_accounts_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/pnp_workflow.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/pnp_workflow_count_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/pnp_workflow_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/profiling_rules_count_info.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/profiling_rules_in_bulk_create.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/projects_details_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/provision_workflow_manager.py603
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/qos_device_interface.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/qos_device_interface_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/qos_device_interface_info_count_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/reports.py9
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/reports_executions_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/reports_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/reports_view_group_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/reports_view_group_view_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/reserve_ip_subpool.py23
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/reserve_ip_subpool_create.py23
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/reserve_ip_subpool_delete.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/reserve_ip_subpool_info.py36
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/reserve_ip_subpool_update.py32
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/role_permissions_info.py9
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/roles.py127
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/roles_info.py11
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_anycast_gateways.py203
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_anycast_gateways_count_info.py89
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_anycast_gateways_info.py122
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_authentication_profiles.py96
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_authentication_profiles_info.py84
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_count_info.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_device_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_device_role_info.py18
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_extranet_policies.py140
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_extranet_policies_count_info.py64
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_extranet_policies_info.py89
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_fabric.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_fabric_authentication_profile.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_fabric_authentication_profile_info.py31
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_fabric_border_device.py7
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_fabric_border_device_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_fabric_control_plane_device.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_fabric_control_plane_device_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_fabric_devices.py215
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_fabric_devices_count_info.py79
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_fabric_devices_info.py108
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_fabric_devices_layer2_handoffs.py134
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_fabric_devices_layer2_handoffs_count_info.py74
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_fabric_devices_layer2_handoffs_info.py91
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_fabric_devices_layer2_handoffs_ip_transits.py209
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_fabric_devices_layer2_handoffs_ip_transits_count_info.py74
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_fabric_devices_layer2_handoffs_ip_transits_info.py98
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_fabric_devices_layer2_handoffs_sda_transits.py157
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_fabric_devices_layer2_handoffs_sda_transits_count_info.py74
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_fabric_devices_layer2_handoffs_sda_transits_info.py92
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_fabric_edge_device.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_fabric_edge_device_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_fabric_info.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_fabric_site.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_fabric_site_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_fabric_sites.py123
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_fabric_sites_count_info.py64
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_fabric_sites_info.py89
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_fabric_zones.py120
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_fabric_zones_count_info.py64
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_fabric_zones_info.py88
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_multicast.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_multicast_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_port_assignment_for_access_point.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_port_assignment_for_access_point_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_port_assignment_for_user_device.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_port_assignment_for_user_device_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_port_assignments.py188
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_port_assignments_count_info.py89
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_port_assignments_info.py110
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_provision_device.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_provision_device_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_provision_devices.py141
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_provision_devices_count_info.py69
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_provision_devices_info.py93
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_site_member_member_info.py141
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_virtual_network.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_virtual_network_info.py14
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_virtual_network_ip_pool.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_virtual_network_ip_pool_info.py9
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_virtual_network_v2.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sda_virtual_network_v2_info.py9
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/security_advisories_devices_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/security_advisories_ids_per_device_info.py34
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/security_advisories_info.py34
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/security_advisories_per_device_info.py34
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/security_advisories_summary_info.py21
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sensor.py970
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sensor_info.py9
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sensor_test_run.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sensor_test_template_duplicate.py272
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sensor_test_template_edit.py1141
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/service_provider_create.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/service_provider_info.py9
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/service_provider_profile_delete.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/service_provider_update.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/service_provider_v2.py7
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/service_provider_v2_info.py7
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/site_assign_credential.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/site_assign_device.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/site_count_info.py9
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/site_count_v2_info.py67
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/site_create.py17
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/site_delete.py7
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/site_design_floormap.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/site_design_floormap_info.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/site_health_info.py116
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/site_info.py17
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/site_intent.py112
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/site_membership_info.py9
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/site_update.py51
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/site_v2_info.py112
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/site_workflow_manager.py115
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/snmp_properties.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/snmp_properties_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/snmpv2_read_community_credential.py7
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/snmpv2_write_community_credential.py7
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/snmpv3_credential.py29
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/sp_profile_delete_v2.py9
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/swim_image_details_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/swim_import_local.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/swim_import_via_url.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/swim_trigger_activation.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/swim_trigger_distribution.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/syslog_config_create.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/syslog_config_update.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/system_health_count_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/system_health_info.py9
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/system_performance_historical_info.py18
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/system_performance_info.py14
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/tag.py41
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/tag_count_info.py10
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/tag_info.py13
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/tag_member.py16
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/tag_member_count_info.py10
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/tag_member_info.py13
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/tag_member_type_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/tag_membership.py20
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/task_count_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/task_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/task_operation_info.py11
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/task_tree_info.py11
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/template_preview.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/template_workflow_manager.py748
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/templates_details_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/threat_detail.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/threat_detail_count.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/threat_summary.py2
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/topology_layer_2_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/topology_layer_3_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/topology_network_health_info.py30
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/topology_physical_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/topology_site_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/topology_vlan_details_info.py7
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/transit_peer_network.py17
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/transit_peer_network_info.py11
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/user.py35
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/user_enrichment_details_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/user_info.py22
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/users_external_authentication.py61
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/users_external_authentication_info.py65
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/users_external_servers_aaa_attribute.py84
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/users_external_servers_aaa_attribute_info.py65
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/users_external_servers_info.py13
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/wireless_accespoint_configuration.py81
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/wireless_accesspoint_configuration_create.py328
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/wireless_accesspoint_configuration_summary_info.py7
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/wireless_dynamic_interface.py36
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/wireless_dynamic_interface_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/wireless_enterprise_ssid.py110
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/wireless_enterprise_ssid_info.py16
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/wireless_profile.py16
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/wireless_profile_info.py5
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/wireless_provision_access_point.py10
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/wireless_provision_device_create.py28
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/wireless_provision_device_update.py32
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/wireless_provision_ssid_create_provision.py32
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/wireless_provision_ssid_delete_reprovision.py11
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/wireless_psk_override.py43
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/wireless_rf_profile.py27
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/wireless_rf_profile_info.py82
-rw-r--r--ansible_collections/cisco/dnac/plugins/modules/wireless_sensor_test_results_info.py134
-rw-r--r--ansible_collections/cisco/dnac/plugins/plugin_utils/dnac.py2
-rw-r--r--ansible_collections/cisco/dnac/requirements.txt2
-rw-r--r--ansible_collections/cisco/dnac/tests/sanity/ignore-2.10.txt4
-rw-r--r--ansible_collections/cisco/dnac/tests/sanity/ignore-2.11.txt4
-rw-r--r--ansible_collections/cisco/dnac/tests/sanity/ignore-2.12.txt4
-rw-r--r--ansible_collections/cisco/dnac/tests/sanity/ignore-2.13.txt2
-rw-r--r--ansible_collections/cisco/dnac/tests/sanity/ignore-2.14.txt2
-rw-r--r--ansible_collections/cisco/dnac/tests/sanity/ignore-2.15.txt2
-rw-r--r--ansible_collections/cisco/dnac/tests/sanity/ignore-2.9.txt4
-rw-r--r--ansible_collections/cisco/ise/FILES.json22
-rw-r--r--ansible_collections/cisco/ise/MANIFEST.json4
-rw-r--r--ansible_collections/cisco/ise/README.md2
-rw-r--r--ansible_collections/cisco/ise/changelogs/changelog.yaml9
-rw-r--r--ansible_collections/cisco/ise/meta/runtime.yml2
-rw-r--r--ansible_collections/cisco/ise/roles/aws_deployment/meta/main.yml52
-rw-r--r--ansible_collections/cisco/ise/roles/personas_deployment/meta/main.yml2
-rw-r--r--ansible_collections/cisco/nxos/.pre-commit-config.yaml4
-rw-r--r--ansible_collections/cisco/nxos/CHANGELOG.rst17
-rw-r--r--ansible_collections/cisco/nxos/FILES.json10310
-rw-r--r--ansible_collections/cisco/nxos/MANIFEST.json4
-rw-r--r--ansible_collections/cisco/nxos/README.md1
-rw-r--r--ansible_collections/cisco/nxos/changelogs/changelog.yaml14
-rw-r--r--ansible_collections/cisco/nxos/plugins/cliconf/nxos.py65
-rw-r--r--ansible_collections/cisco/nxos/plugins/module_utils/network/nxos/config/acls/acls.py12
-rw-r--r--ansible_collections/cisco/nxos/plugins/module_utils/network/nxos/config/l3_interfaces/l3_interfaces.py4
-rw-r--r--ansible_collections/cisco/nxos/plugins/module_utils/network/nxos/config/route_maps/route_maps.py1
-rw-r--r--ansible_collections/cisco/nxos/plugins/module_utils/network/nxos/facts/static_routes/static_routes.py30
-rw-r--r--ansible_collections/cisco/nxos/plugins/module_utils/network/nxos/rm_templates/static_routes.py14
-rw-r--r--ansible_collections/cisco/nxos/plugins/modules/nxos_file_copy.py6
-rw-r--r--ansible_collections/cisco/nxos/tests/unit/modules/network/nxos/test_nxos_route_maps.py89
-rw-r--r--ansible_collections/cisco/nxos/tests/unit/modules/network/nxos/test_nxos_static_routes.py78
-rw-r--r--ansible_collections/community/dns/.github/workflows/ansible-test.yml1
-rw-r--r--ansible_collections/community/dns/CHANGELOG.md356
-rw-r--r--ansible_collections/community/dns/CHANGELOG.rst17
-rw-r--r--ansible_collections/community/dns/FILES.json16
-rw-r--r--ansible_collections/community/dns/MANIFEST.json4
-rw-r--r--ansible_collections/community/dns/changelogs/changelog.yaml19
-rw-r--r--ansible_collections/community/dns/changelogs/config.yaml1
-rw-r--r--ansible_collections/community/dns/plugins/public_suffix_list.dat674
-rw-r--r--ansible_collections/community/dns/tests/integration/targets/lookup_lookup/tasks/main.yml2
-rw-r--r--ansible_collections/community/dns/tests/integration/targets/lookup_lookup_as_dict/tasks/main.yml2
-rw-r--r--ansible_collections/community/docker/.azure-pipelines/azure-pipelines.yml2
-rw-r--r--ansible_collections/community/docker/.github/workflows/ansible-test.yml12
-rw-r--r--ansible_collections/community/docker/.github/workflows/ee.yml8
-rw-r--r--ansible_collections/community/docker/CHANGELOG.md433
-rw-r--r--ansible_collections/community/docker/CHANGELOG.rst14
-rw-r--r--ansible_collections/community/docker/FILES.json28
-rw-r--r--ansible_collections/community/docker/MANIFEST.json4
-rw-r--r--ansible_collections/community/docker/changelogs/changelog.yaml14
-rw-r--r--ansible_collections/community/docker/plugins/doc_fragments/compose_v2.py4
-rw-r--r--ansible_collections/community/docker/plugins/module_utils/compose_v2.py3
-rw-r--r--ansible_collections/community/docker/plugins/modules/docker_compose.py3
-rw-r--r--ansible_collections/community/docker/plugins/modules/docker_compose_v2.py2
-rwxr-xr-xansible_collections/community/docker/tests/integration/targets/connection_docker/runme.sh8
-rwxr-xr-xansible_collections/community/docker/tests/integration/targets/connection_docker_api/runme.sh8
-rw-r--r--ansible_collections/community/docker/tests/integration/targets/docker_compose/tasks/main.yml6
-rw-r--r--ansible_collections/community/docker/tests/integration/targets/setup_docker_compose_v1/vars/Archlinux.yml2
-rw-r--r--ansible_collections/community/general/.azure-pipelines/azure-pipelines.yml4
-rw-r--r--ansible_collections/community/general/.github/BOTMETA.yml22
-rw-r--r--ansible_collections/community/general/CHANGELOG.md110
-rw-r--r--ansible_collections/community/general/CHANGELOG.rst67
-rw-r--r--ansible_collections/community/general/FILES.json460
-rw-r--r--ansible_collections/community/general/MANIFEST.json4
-rw-r--r--ansible_collections/community/general/changelogs/changelog.yaml108
-rw-r--r--ansible_collections/community/general/docs/docsite/extra-docs.yml4
-rw-r--r--ansible_collections/community/general/docs/docsite/helper/lists_mergeby/default-common.yml16
-rw-r--r--ansible_collections/community/general/docs/docsite/helper/lists_mergeby/default-recursive-true.yml9
-rw-r--r--ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-001.yml2
-rw-r--r--ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-001_vars/default-common.yml16
-rw-r--r--ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-001_vars/list3.yml3
-rw-r--r--ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-002.yml2
-rw-r--r--ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-002_vars/default-common.yml16
-rw-r--r--ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-002_vars/list3.yml3
-rw-r--r--ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-003.yml2
-rw-r--r--ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-003_vars/default-recursive-true.yml9
-rw-r--r--ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-003_vars/list3.yml3
-rw-r--r--ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-004.yml2
-rw-r--r--ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-004_vars/default-recursive-true.yml9
-rw-r--r--ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-004_vars/list3.yml3
-rw-r--r--ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-005.yml2
-rw-r--r--ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-005_vars/default-recursive-true.yml9
-rw-r--r--ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-005_vars/list3.yml3
-rw-r--r--ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-006.yml2
-rw-r--r--ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-006_vars/default-recursive-true.yml9
-rw-r--r--ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-006_vars/list3.yml3
-rw-r--r--ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-007.yml2
-rw-r--r--ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-007_vars/default-recursive-true.yml9
-rw-r--r--ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-007_vars/list3.yml3
-rw-r--r--ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-008.yml2
-rw-r--r--ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-008_vars/default-recursive-true.yml9
-rw-r--r--ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-008_vars/list3.yml3
-rw-r--r--ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-009.yml14
-rw-r--r--ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-009_vars/default-common.yml12
-rw-r--r--ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-009_vars/list3.yml6
-rw-r--r--ansible_collections/community/general/docs/docsite/helper/lists_mergeby/examples.yml56
-rw-r--r--ansible_collections/community/general/docs/docsite/helper/lists_mergeby/examples_all.rst.j24
-rw-r--r--ansible_collections/community/general/docs/docsite/helper/lists_mergeby/extra-vars.yml7
-rw-r--r--ansible_collections/community/general/docs/docsite/helper/lists_mergeby/filter_guide_abstract_informations_merging_lists_of_dictionaries.rst.j244
-rw-r--r--ansible_collections/community/general/docs/docsite/helper/lists_mergeby/list3.out.j22
-rw-r--r--ansible_collections/community/general/docs/docsite/helper/lists_mergeby/playbook.yml10
-rw-r--r--ansible_collections/community/general/docs/docsite/rst/filter_guide_abstract_informations_merging_lists_of_dictionaries.rst248
-rw-r--r--ansible_collections/community/general/docs/docsite/rst/guide_deps.rst74
-rw-r--r--ansible_collections/community/general/docs/docsite/rst/guide_vardict.rst176
-rw-r--r--ansible_collections/community/general/meta/runtime.yml2
-rw-r--r--ansible_collections/community/general/plugins/callback/opentelemetry.py18
-rw-r--r--ansible_collections/community/general/plugins/doc_fragments/django.py9
-rw-r--r--ansible_collections/community/general/plugins/doc_fragments/proxmox.py7
-rw-r--r--ansible_collections/community/general/plugins/filter/keep_keys.py138
-rw-r--r--ansible_collections/community/general/plugins/filter/lists_mergeby.py212
-rw-r--r--ansible_collections/community/general/plugins/filter/remove_keys.py138
-rw-r--r--ansible_collections/community/general/plugins/filter/replace_keys.py180
-rw-r--r--ansible_collections/community/general/plugins/inventory/opennebula.py3
-rw-r--r--ansible_collections/community/general/plugins/module_utils/cmd_runner.py53
-rw-r--r--ansible_collections/community/general/plugins/module_utils/consul.py67
-rw-r--r--ansible_collections/community/general/plugins/module_utils/django.py39
-rw-r--r--ansible_collections/community/general/plugins/module_utils/proxmox.py8
-rw-r--r--ansible_collections/community/general/plugins/module_utils/redfish_utils.py56
-rw-r--r--ansible_collections/community/general/plugins/modules/ansible_galaxy_install.py27
-rw-r--r--ansible_collections/community/general/plugins/modules/cargo.py64
-rw-r--r--ansible_collections/community/general/plugins/modules/consul_agent_check.py254
-rw-r--r--ansible_collections/community/general/plugins/modules/consul_agent_service.py289
-rw-r--r--ansible_collections/community/general/plugins/modules/consul_auth_method.py2
-rw-r--r--ansible_collections/community/general/plugins/modules/consul_binding_rule.py2
-rw-r--r--ansible_collections/community/general/plugins/modules/consul_policy.py2
-rw-r--r--ansible_collections/community/general/plugins/modules/consul_role.py2
-rw-r--r--ansible_collections/community/general/plugins/modules/consul_token.py4
-rw-r--r--ansible_collections/community/general/plugins/modules/django_check.py113
-rw-r--r--ansible_collections/community/general/plugins/modules/django_createcachetable.py67
-rw-r--r--ansible_collections/community/general/plugins/modules/git_config.py41
-rw-r--r--ansible_collections/community/general/plugins/modules/homectl.py25
-rw-r--r--ansible_collections/community/general/plugins/modules/ipa_dnsrecord.py27
-rw-r--r--ansible_collections/community/general/plugins/modules/keycloak_client.py111
-rw-r--r--ansible_collections/community/general/plugins/modules/keycloak_clientscope.py38
-rw-r--r--ansible_collections/community/general/plugins/modules/launchd.py3
-rw-r--r--ansible_collections/community/general/plugins/modules/openbsd_pkg.py26
-rw-r--r--ansible_collections/community/general/plugins/modules/pacman.py5
-rw-r--r--ansible_collections/community/general/plugins/modules/proxmox_kvm.py1
-rw-r--r--ansible_collections/community/general/plugins/modules/proxmox_vm_info.py33
-rw-r--r--ansible_collections/community/general/plugins/modules/redfish_command.py30
-rw-r--r--ansible_collections/community/general/plugins/modules/redfish_info.py22
-rw-r--r--ansible_collections/community/general/plugins/modules/redis_info.py48
-rw-r--r--ansible_collections/community/general/plugins/modules/udm_user.py26
-rw-r--r--ansible_collections/community/general/plugins/plugin_utils/keys_filter.py141
-rw-r--r--ansible_collections/community/general/tests/integration/targets/ansible_galaxy_install/tasks/main.yml55
-rw-r--r--ansible_collections/community/general/tests/integration/targets/cargo/tasks/main.yml1
-rw-r--r--ansible_collections/community/general/tests/integration/targets/cargo/tasks/test_directory.yml122
-rw-r--r--ansible_collections/community/general/tests/integration/targets/consul/tasks/consul_agent_check.yml114
-rw-r--r--ansible_collections/community/general/tests/integration/targets/consul/tasks/consul_agent_service.yml89
-rw-r--r--ansible_collections/community/general/tests/integration/targets/consul/tasks/main.yml2
-rw-r--r--ansible_collections/community/general/tests/integration/targets/filter_keep_keys/aliases5
-rw-r--r--ansible_collections/community/general/tests/integration/targets/filter_keep_keys/tasks/keep_keys.yml79
-rw-r--r--ansible_collections/community/general/tests/integration/targets/filter_keep_keys/tasks/main.yml7
-rw-r--r--ansible_collections/community/general/tests/integration/targets/filter_keep_keys/vars/main.yml33
-rw-r--r--ansible_collections/community/general/tests/integration/targets/filter_remove_keys/aliases5
-rw-r--r--ansible_collections/community/general/tests/integration/targets/filter_remove_keys/tasks/main.yml7
-rw-r--r--ansible_collections/community/general/tests/integration/targets/filter_remove_keys/tasks/remove_keys.yml79
-rw-r--r--ansible_collections/community/general/tests/integration/targets/filter_remove_keys/vars/main.yml33
-rw-r--r--ansible_collections/community/general/tests/integration/targets/filter_replace_keys/aliases5
-rw-r--r--ansible_collections/community/general/tests/integration/targets/filter_replace_keys/tasks/fn-test-replace_keys.yml21
-rw-r--r--ansible_collections/community/general/tests/integration/targets/filter_replace_keys/tasks/main.yml7
-rw-r--r--ansible_collections/community/general/tests/integration/targets/filter_replace_keys/tasks/replace_keys.yml56
-rw-r--r--ansible_collections/community/general/tests/integration/targets/filter_replace_keys/vars/main.yml58
-rw-r--r--ansible_collections/community/general/tests/integration/targets/git_config/tasks/unset_value.yml24
-rw-r--r--ansible_collections/community/general/tests/integration/targets/keycloak_client/tasks/main.yml128
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/module_utils/test_cmd_runner.py5
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_django_check.py13
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_django_check.yaml27
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_django_createcachetable.py13
-rw-r--r--ansible_collections/community/general/tests/unit/plugins/modules/test_django_createcachetable.yaml15
-rw-r--r--ansible_collections/community/hrobot/.github/patchback.yml9
-rw-r--r--ansible_collections/community/hrobot/.github/workflows/ansible-test.yml2
-rw-r--r--ansible_collections/community/hrobot/CHANGELOG.md132
-rw-r--r--ansible_collections/community/hrobot/CHANGELOG.rst17
-rw-r--r--ansible_collections/community/hrobot/FILES.json37
-rw-r--r--ansible_collections/community/hrobot/MANIFEST.json4
-rw-r--r--ansible_collections/community/hrobot/README.md4
-rw-r--r--ansible_collections/community/hrobot/changelogs/changelog.yaml20
-rw-r--r--ansible_collections/community/hrobot/changelogs/config.yaml1
-rw-r--r--ansible_collections/community/hrobot/plugins/modules/boot.py6
-rw-r--r--ansible_collections/community/hrobot/tests/unit/plugins/modules/test_boot.py8
-rw-r--r--ansible_collections/community/hrobot/tests/unit/plugins/plugin_utils/test_unsafe.py133
-rw-r--r--ansible_collections/community/network/.azure-pipelines/azure-pipelines.yml108
-rw-r--r--ansible_collections/community/network/CHANGELOG.rst14
-rw-r--r--ansible_collections/community/network/FILES.json21
-rw-r--r--ansible_collections/community/network/MANIFEST.json4
-rw-r--r--ansible_collections/community/network/README.md4
-rw-r--r--ansible_collections/community/network/changelogs/changelog.yaml11
-rw-r--r--ansible_collections/community/network/plugins/terminal/exos.py1
-rw-r--r--ansible_collections/community/network/tests/sanity/ignore-2.17.txt24
-rw-r--r--ansible_collections/community/network/tests/sanity/ignore-2.18.txt920
-rwxr-xr-xansible_collections/community/network/tests/utils/shippable/shippable.sh1
-rw-r--r--ansible_collections/community/routeros/.github/workflows/ansible-test.yml6
-rw-r--r--ansible_collections/community/routeros/.github/workflows/ee.yml8
-rw-r--r--ansible_collections/community/routeros/CHANGELOG.md209
-rw-r--r--ansible_collections/community/routeros/CHANGELOG.rst32
-rw-r--r--ansible_collections/community/routeros/FILES.json20
-rw-r--r--ansible_collections/community/routeros/MANIFEST.json4
-rw-r--r--ansible_collections/community/routeros/README.md4
-rw-r--r--ansible_collections/community/routeros/changelogs/changelog.yaml11
-rw-r--r--ansible_collections/community/routeros/changelogs/config.yaml3
-rw-r--r--ansible_collections/community/routeros/plugins/module_utils/_api_data.py31
-rw-r--r--ansible_collections/community/routeros/plugins/modules/api_info.py1
-rw-r--r--ansible_collections/community/routeros/plugins/modules/api_modify.py1
-rw-r--r--ansible_collections/community/zabbix/.github/workflows/agent.yml3
-rw-r--r--ansible_collections/community/zabbix/.github/workflows/javagateway.yml3
-rw-r--r--ansible_collections/community/zabbix/.github/workflows/proxy.yml19
-rw-r--r--ansible_collections/community/zabbix/.github/workflows/server.yml6
-rw-r--r--ansible_collections/community/zabbix/.github/workflows/web.yml4
-rw-r--r--ansible_collections/community/zabbix/CHANGELOG.rst32
-rw-r--r--ansible_collections/community/zabbix/FILES.json606
-rw-r--r--ansible_collections/community/zabbix/MANIFEST.json4
-rw-r--r--ansible_collections/community/zabbix/README.md2
-rw-r--r--ansible_collections/community/zabbix/changelogs/.plugin-cache.yaml32
-rw-r--r--ansible_collections/community/zabbix/changelogs/changelog.yaml2059
-rw-r--r--ansible_collections/community/zabbix/docs/ZABBIX_AGENT_ROLE.md291
-rw-r--r--ansible_collections/community/zabbix/docs/ZABBIX_JAVAGATEWAY_ROLE.md1
-rw-r--r--ansible_collections/community/zabbix/docs/ZABBIX_PROXY_ROLE.md1
-rw-r--r--ansible_collections/community/zabbix/docs/ZABBIX_SERVER_ROLE.md1
-rw-r--r--ansible_collections/community/zabbix/docs/ZABBIX_WEB_ROLE.md1
-rw-r--r--ansible_collections/community/zabbix/molecule/requirements.txt4
-rw-r--r--ansible_collections/community/zabbix/molecule/zabbix_agent_tests/common/molecule.yml2
-rw-r--r--ansible_collections/community/zabbix/molecule/zabbix_agent_tests/molecule/agent2/molecule.yml3
-rw-r--r--ansible_collections/community/zabbix/molecule/zabbix_proxy/Dockerfile.debian.j23
-rw-r--r--ansible_collections/community/zabbix/molecule/zabbix_proxy/Dockerfile.redhat.j23
-rw-r--r--ansible_collections/community/zabbix/molecule/zabbix_proxy/molecule.yml11
-rw-r--r--ansible_collections/community/zabbix/molecule/zabbix_proxy/prepare.yml16
-rw-r--r--ansible_collections/community/zabbix/molecule/zabbix_server/molecule.yml9
-rw-r--r--ansible_collections/community/zabbix/molecule/zabbix_web/prepare.yml11
-rw-r--r--ansible_collections/community/zabbix/plugins/modules/zabbix_discoveryrule.py460
-rw-r--r--ansible_collections/community/zabbix/plugins/modules/zabbix_host_events_update.py244
-rw-r--r--ansible_collections/community/zabbix/plugins/modules/zabbix_item.py579
-rw-r--r--ansible_collections/community/zabbix/plugins/modules/zabbix_itemprototype.py609
-rw-r--r--ansible_collections/community/zabbix/plugins/modules/zabbix_script.py66
-rw-r--r--ansible_collections/community/zabbix/plugins/modules/zabbix_trigger.py444
-rw-r--r--ansible_collections/community/zabbix/plugins/modules/zabbix_triggerprototype.py448
-rw-r--r--ansible_collections/community/zabbix/roles/zabbix_agent/README.md291
-rw-r--r--ansible_collections/community/zabbix/roles/zabbix_agent/defaults/main.yml147
-rw-r--r--ansible_collections/community/zabbix/roles/zabbix_agent/tasks/Debian.yml45
-rw-r--r--ansible_collections/community/zabbix/roles/zabbix_agent/tasks/Linux.yml84
-rw-r--r--ansible_collections/community/zabbix/roles/zabbix_agent/tasks/Windows.yml58
-rw-r--r--ansible_collections/community/zabbix/roles/zabbix_agent/tasks/Windows_conf.yml5
-rw-r--r--ansible_collections/community/zabbix/roles/zabbix_agent/tasks/api.yml45
-rw-r--r--ansible_collections/community/zabbix/roles/zabbix_agent/tasks/main.yml98
-rw-r--r--ansible_collections/community/zabbix/roles/zabbix_agent/tasks/psk_identity.yml84
-rw-r--r--ansible_collections/community/zabbix/roles/zabbix_agent/tasks/psk_secret.yml112
-rw-r--r--ansible_collections/community/zabbix/roles/zabbix_agent/tasks/selinux.yml2
-rw-r--r--ansible_collections/community/zabbix/roles/zabbix_agent/tasks/tlspsk_auto.yml14
-rw-r--r--ansible_collections/community/zabbix/roles/zabbix_agent/tasks/tlspsk_auto_agent2.yml14
-rw-r--r--ansible_collections/community/zabbix/roles/zabbix_agent/tasks/tlspsk_auto_agent2_common.yml53
-rw-r--r--ansible_collections/community/zabbix/roles/zabbix_agent/tasks/tlspsk_auto_agent2_linux.yml80
-rw-r--r--ansible_collections/community/zabbix/roles/zabbix_agent/tasks/tlspsk_auto_agent2_windows.yml66
-rw-r--r--ansible_collections/community/zabbix/roles/zabbix_agent/tasks/tlspsk_auto_common.yml52
-rw-r--r--ansible_collections/community/zabbix/roles/zabbix_agent/tasks/tlspsk_auto_linux.yml80
-rw-r--r--ansible_collections/community/zabbix/roles/zabbix_agent/tasks/tlspsk_auto_windows.yml67
-rw-r--r--ansible_collections/community/zabbix/roles/zabbix_agent/templates/agent.conf.j2142
-rw-r--r--ansible_collections/community/zabbix/roles/zabbix_agent/templates/zabbix_agent2.conf.j2140
-rw-r--r--ansible_collections/community/zabbix/roles/zabbix_agent/templates/zabbix_agentd.conf.j2149
-rw-r--r--ansible_collections/community/zabbix/roles/zabbix_agent/vars/Debian.yml7
-rw-r--r--ansible_collections/community/zabbix/roles/zabbix_agent/vars/RedHat.yml3
-rw-r--r--ansible_collections/community/zabbix/roles/zabbix_agent/vars/agent2_vars.yml9
-rw-r--r--ansible_collections/community/zabbix/roles/zabbix_agent/vars/agent_vars.yml10
-rw-r--r--ansible_collections/community/zabbix/roles/zabbix_javagateway/README.md1
-rw-r--r--ansible_collections/community/zabbix/roles/zabbix_javagateway/tasks/Debian.yml5
-rw-r--r--ansible_collections/community/zabbix/roles/zabbix_javagateway/vars/Debian.yml3
-rw-r--r--ansible_collections/community/zabbix/roles/zabbix_proxy/README.md1
-rw-r--r--ansible_collections/community/zabbix/roles/zabbix_proxy/defaults/main.yml2
-rw-r--r--ansible_collections/community/zabbix/roles/zabbix_proxy/tasks/Debian.yml106
-rw-r--r--ansible_collections/community/zabbix/roles/zabbix_proxy/tasks/RedHat.yml108
-rw-r--r--ansible_collections/community/zabbix/roles/zabbix_proxy/tasks/initialize-mysql.yml144
-rw-r--r--ansible_collections/community/zabbix/roles/zabbix_proxy/tasks/initialize-pgsql.yml73
-rw-r--r--ansible_collections/community/zabbix/roles/zabbix_proxy/tasks/initialize-sqlite3.yml62
-rw-r--r--ansible_collections/community/zabbix/roles/zabbix_proxy/tasks/main.yml68
-rw-r--r--ansible_collections/community/zabbix/roles/zabbix_proxy/tasks/mysql.yml172
-rw-r--r--ansible_collections/community/zabbix/roles/zabbix_proxy/tasks/postgresql.yml96
-rw-r--r--ansible_collections/community/zabbix/roles/zabbix_proxy/tasks/sqlite3.yml63
-rw-r--r--ansible_collections/community/zabbix/roles/zabbix_proxy/templates/zabbix_proxy.conf.j22
-rw-r--r--ansible_collections/community/zabbix/roles/zabbix_proxy/vars/Debian.yml31
-rw-r--r--ansible_collections/community/zabbix/roles/zabbix_proxy/vars/RedHat.yml34
-rw-r--r--ansible_collections/community/zabbix/roles/zabbix_proxy/vars/main.yml7
-rw-r--r--ansible_collections/community/zabbix/roles/zabbix_server/README.md1
-rw-r--r--ansible_collections/community/zabbix/roles/zabbix_server/defaults/main.yml12
-rw-r--r--ansible_collections/community/zabbix/roles/zabbix_server/tasks/RedHat.yml3
-rw-r--r--ansible_collections/community/zabbix/roles/zabbix_server/tasks/initialize-mysql.yml5
-rw-r--r--ansible_collections/community/zabbix/roles/zabbix_server/tasks/initialize-pgsql.yml16
-rw-r--r--ansible_collections/community/zabbix/roles/zabbix_server/tasks/selinux.yml116
-rw-r--r--ansible_collections/community/zabbix/roles/zabbix_server/vars/Debian.yml3
-rw-r--r--ansible_collections/community/zabbix/roles/zabbix_web/README.md1
-rw-r--r--ansible_collections/community/zabbix/roles/zabbix_web/templates/php-fpm.conf.j22
-rw-r--r--ansible_collections/community/zabbix/roles/zabbix_web/vars/Debian.yml3
-rw-r--r--ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_discoveryrule/meta/main.yml3
-rw-r--r--ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_discoveryrule/tasks/main.yml29
-rw-r--r--ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_discoveryrule/tasks/zabbix_setup.yml25
-rw-r--r--ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_discoveryrule/tasks/zabbix_teardown.yml9
-rw-r--r--ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_discoveryrule/tasks/zabbix_tests.yml147
-rw-r--r--ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_host_events_update/meta/main.yml3
-rw-r--r--ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_host_events_update/tasks/main.yml13
-rw-r--r--ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_host_events_update/tasks/zabbix_setup.yml61
-rw-r--r--ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_host_events_update/tasks/zabbix_teardown.yml9
-rw-r--r--ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_host_events_update/tasks/zabbix_tests.yml59
-rw-r--r--ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_item/meta/main.yml3
-rw-r--r--ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_item/tasks/main.yml28
-rw-r--r--ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_item/tasks/zabbix_setup.yml25
-rw-r--r--ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_item/tasks/zabbix_teardown.yml9
-rw-r--r--ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_item/tasks/zabbix_tests.yml246
-rw-r--r--ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_itemprototype/meta/main.yml3
-rw-r--r--ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_itemprototype/tasks/main.yml30
-rw-r--r--ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_itemprototype/tasks/zabbix_setup.yml36
-rw-r--r--ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_itemprototype/tasks/zabbix_teardown.yml9
-rw-r--r--ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_itemprototype/tasks/zabbix_tests.yml248
-rw-r--r--ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_trigger/meta/main.yml3
-rw-r--r--ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_trigger/tasks/main.yml28
-rw-r--r--ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_trigger/tasks/zabbix_setup.yml47
-rw-r--r--ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_trigger/tasks/zabbix_teardown.yml21
-rw-r--r--ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_trigger/tasks/zabbix_tests.yml159
-rw-r--r--ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_triggerprototype/meta/main.yml3
-rw-r--r--ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_triggerprototype/tasks/main.yml44
-rw-r--r--ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_triggerprototype/tasks/zabbix_setup.yml48
-rw-r--r--ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_triggerprototype/tasks/zabbix_teardown.yml15
-rw-r--r--ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_triggerprototype/tasks/zabbix_tests.yml180
-rw-r--r--ansible_collections/containers/podman/CHANGELOG.rst87
-rw-r--r--ansible_collections/containers/podman/FILES.json1210
-rw-r--r--ansible_collections/containers/podman/MANIFEST.json4
-rw-r--r--ansible_collections/containers/podman/changelogs/changelog.yaml49
-rw-r--r--ansible_collections/containers/podman/docs/buildah_connection.html3
-rw-r--r--ansible_collections/containers/podman/docs/environment_variables.html1
-rw-r--r--ansible_collections/containers/podman/docs/genindex.html1
-rw-r--r--ansible_collections/containers/podman/docs/index.html4
-rw-r--r--ansible_collections/containers/podman/docs/objects.invbin7128 -> 7894 bytes
-rw-r--r--ansible_collections/containers/podman/docs/podman_connection.html3
-rw-r--r--ansible_collections/containers/podman/docs/podman_container_exec_module.html3
-rw-r--r--ansible_collections/containers/podman/docs/podman_container_info_module.html3
-rw-r--r--ansible_collections/containers/podman/docs/podman_container_module.html463
-rw-r--r--ansible_collections/containers/podman/docs/podman_containers_module.html3
-rw-r--r--ansible_collections/containers/podman/docs/podman_export_module.html3
-rw-r--r--ansible_collections/containers/podman/docs/podman_generate_systemd_module.html3
-rw-r--r--ansible_collections/containers/podman/docs/podman_image_info_module.html3
-rw-r--r--ansible_collections/containers/podman/docs/podman_image_module.html90
-rw-r--r--ansible_collections/containers/podman/docs/podman_import_module.html3
-rw-r--r--ansible_collections/containers/podman/docs/podman_load_module.html3
-rw-r--r--ansible_collections/containers/podman/docs/podman_login_info_module.html3
-rw-r--r--ansible_collections/containers/podman/docs/podman_login_module.html3
-rw-r--r--ansible_collections/containers/podman/docs/podman_logout_module.html3
-rw-r--r--ansible_collections/containers/podman/docs/podman_network_info_module.html3
-rw-r--r--ansible_collections/containers/podman/docs/podman_network_module.html74
-rw-r--r--ansible_collections/containers/podman/docs/podman_play_module.html3
-rw-r--r--ansible_collections/containers/podman/docs/podman_pod_info_module.html3
-rw-r--r--ansible_collections/containers/podman/docs/podman_pod_module.html214
-rw-r--r--ansible_collections/containers/podman/docs/podman_prune_module.html3
-rw-r--r--ansible_collections/containers/podman/docs/podman_runlabel_module.html3
-rw-r--r--ansible_collections/containers/podman/docs/podman_save_module.html7
-rw-r--r--ansible_collections/containers/podman/docs/podman_search_module.html273
-rw-r--r--ansible_collections/containers/podman/docs/podman_secret_info_module.html3
-rw-r--r--ansible_collections/containers/podman/docs/podman_secret_module.html31
-rw-r--r--ansible_collections/containers/podman/docs/podman_tag_module.html3
-rw-r--r--ansible_collections/containers/podman/docs/podman_unshare_become.html3
-rw-r--r--ansible_collections/containers/podman/docs/podman_volume_info_module.html3
-rw-r--r--ansible_collections/containers/podman/docs/podman_volume_module.html5
-rw-r--r--ansible_collections/containers/podman/docs/search.html1
-rw-r--r--ansible_collections/containers/podman/plugins/module_utils/podman/common.py85
-rw-r--r--ansible_collections/containers/podman/plugins/module_utils/podman/podman_container_lib.py998
-rw-r--r--ansible_collections/containers/podman/plugins/module_utils/podman/podman_pod_lib.py406
-rw-r--r--ansible_collections/containers/podman/plugins/module_utils/podman/quadlet.py111
-rw-r--r--ansible_collections/containers/podman/plugins/modules/podman_container.py196
-rw-r--r--ansible_collections/containers/podman/plugins/modules/podman_image.py187
-rw-r--r--ansible_collections/containers/podman/plugins/modules/podman_network.py190
-rw-r--r--ansible_collections/containers/podman/plugins/modules/podman_pod.py128
-rw-r--r--ansible_collections/containers/podman/plugins/modules/podman_search.py131
-rw-r--r--ansible_collections/containers/podman/plugins/modules/podman_secret.py84
-rw-r--r--ansible_collections/containers/podman/plugins/modules/podman_volume.py54
-rw-r--r--ansible_collections/containers/podman/tests/integration/targets/podman_container_idempotency/tasks/idem_all.yml9
-rw-r--r--ansible_collections/containers/podman/tests/integration/targets/podman_container_idempotency/tasks/idem_bool_list_dict.yml468
-rw-r--r--ansible_collections/containers/podman/tests/integration/targets/podman_container_idempotency/tasks/idem_ports.yml22
-rw-r--r--ansible_collections/containers/podman/tests/integration/targets/podman_container_idempotency/tasks/idem_stopsignal.yml21
-rw-r--r--ansible_collections/containers/podman/tests/integration/targets/podman_container_idempotency/tasks/idem_users.yml6
-rw-r--r--ansible_collections/containers/podman/tests/integration/targets/podman_container_idempotency/tasks/idem_volumes.yml100
-rw-r--r--ansible_collections/containers/podman/tests/integration/targets/podman_container_idempotency/tasks/idem_workdir.yml7
-rw-r--r--ansible_collections/containers/podman/tests/integration/targets/podman_container_idempotency/tasks/main.yml1
-rw-r--r--ansible_collections/containers/podman/tests/integration/targets/podman_container_idempotency/tasks/root-podman.yml4
-rw-r--r--ansible_collections/containers/podman/tests/integration/targets/podman_container_idempotency/tasks/rootless-podman-network.yml4
-rw-r--r--ansible_collections/containers/podman/tests/integration/targets/podman_image/tasks/idem_push.yml534
-rw-r--r--ansible_collections/containers/podman/tests/integration/targets/podman_image/tasks/main.yml97
-rw-r--r--ansible_collections/containers/podman/tests/integration/targets/podman_network/tasks/main.yml360
-rw-r--r--ansible_collections/containers/podman/tests/integration/targets/podman_pod/tasks/idempotency.yml363
-rw-r--r--ansible_collections/containers/podman/tests/integration/targets/podman_pod/tasks/main.yml10
-rw-r--r--ansible_collections/containers/podman/tests/integration/targets/podman_pod/tasks/resource-limit.yml2
-rw-r--r--ansible_collections/containers/podman/tests/integration/targets/podman_pod/tasks/root-pod.yml2
-rw-r--r--ansible_collections/containers/podman/tests/integration/targets/podman_search/tasks/main.yml50
-rw-r--r--ansible_collections/containers/podman/tests/integration/targets/podman_secret/tasks/main.yml158
-rw-r--r--ansible_collections/containers/podman/tests/integration/targets/podman_volume/tasks/main.yml73
-rw-r--r--ansible_collections/containers/podman/tests/unit/plugins/modules/test_container_lib.py72
-rw-r--r--ansible_collections/cyberark/conjur/.github/workflows/ansible-test.yml2
-rw-r--r--ansible_collections/cyberark/conjur/.gitignore1
-rw-r--r--ansible_collections/cyberark/conjur/CHANGELOG.md6
-rw-r--r--ansible_collections/cyberark/conjur/CONTRIBUTING.md28
-rw-r--r--ansible_collections/cyberark/conjur/FILES.json595
-rw-r--r--ansible_collections/cyberark/conjur/MANIFEST.json4
-rw-r--r--ansible_collections/cyberark/conjur/plugins/lookup/conjur_variable.py164
-rw-r--r--ansible_collections/cyberark/conjur/roles/conjur_host_identity/tests/junit/cleanup-conjur-identity2
-rw-r--r--ansible_collections/cyberark/conjur/roles/conjur_host_identity/tests/junit/configure-conjur-identity2
-rwxr-xr-xansible_collections/cyberark/conjur/roles/conjur_host_identity/tests/test.sh4
-rw-r--r--ansible_collections/cyberark/conjur/roles/conjur_host_identity/tests/test_cases/bad-ssl-config/playbook.yml2
-rw-r--r--ansible_collections/cyberark/conjur/roles/conjur_host_identity/tests/test_cases/cleanup-conjur-identity/playbook.yml4
-rw-r--r--ansible_collections/cyberark/conjur/roles/conjur_host_identity/tests/test_cases/configure-conjur-identity/playbook.yml2
-rw-r--r--ansible_collections/cyberark/conjur/roles/conjur_host_identity/tests/test_cases/misconfig-conjur-identity/playbook.yml2
-rw-r--r--ansible_collections/cyberark/conjur/roles/conjur_host_identity/tests/test_cases/not-conjurized/playbook.yml2
-rw-r--r--ansible_collections/cyberark/conjur/tests/conjur_variable/junit/retrieve-variable2
-rw-r--r--ansible_collections/cyberark/conjur/tests/conjur_variable/junit/retrieve-variable-bad-cert-path2
-rw-r--r--ansible_collections/cyberark/conjur/tests/conjur_variable/junit/retrieve-variable-bad-certs2
-rw-r--r--ansible_collections/cyberark/conjur/tests/conjur_variable/junit/retrieve-variable-disable-verify-certs2
-rw-r--r--ansible_collections/cyberark/conjur/tests/conjur_variable/junit/retrieve-variable-into-file2
-rw-r--r--ansible_collections/cyberark/conjur/tests/conjur_variable/junit/retrieve-variable-no-cert-provided2
-rw-r--r--ansible_collections/cyberark/conjur/tests/conjur_variable/junit/retrieve-variable-with-authn-token2
-rw-r--r--ansible_collections/cyberark/conjur/tests/conjur_variable/junit/retrieve-variable-with-authn-token-bad-cert2
-rw-r--r--ansible_collections/cyberark/conjur/tests/conjur_variable/junit/retrieve-variable-with-spaces-secret2
-rwxr-xr-xansible_collections/cyberark/conjur/tests/conjur_variable/test.sh3
-rw-r--r--ansible_collections/cyberark/conjur/tests/sanity/ignore-2.10.txt6
-rw-r--r--ansible_collections/cyberark/conjur/tests/sanity/ignore-2.11.txt6
-rw-r--r--ansible_collections/cyberark/conjur/tests/sanity/ignore-2.12.txt6
-rw-r--r--ansible_collections/cyberark/conjur/tests/sanity/ignore-2.13.txt8
-rw-r--r--ansible_collections/cyberark/conjur/tests/sanity/ignore-2.14.txt8
-rw-r--r--ansible_collections/cyberark/conjur/tests/sanity/ignore-2.15.txt8
-rw-r--r--ansible_collections/cyberark/conjur/tests/sanity/ignore-2.9.txt4
-rw-r--r--ansible_collections/cyberark/conjur/tests/unit/plugins/lookup/test_conjur_variable.py79
-rw-r--r--ansible_collections/dellemc/openmanage/.github/CODEOWNERS6
-rw-r--r--ansible_collections/dellemc/openmanage/CHANGELOG.rst44
-rw-r--r--ansible_collections/dellemc/openmanage/FILES.json6147
-rw-r--r--ansible_collections/dellemc/openmanage/MANIFEST.json4
-rw-r--r--ansible_collections/dellemc/openmanage/README.md68
-rw-r--r--ansible_collections/dellemc/openmanage/changelogs/changelog.yaml55
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/idrac_attributes.rst42
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/idrac_bios.rst82
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/idrac_boot.rst90
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/idrac_certificates.rst22
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/idrac_diagnostics.rst20
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/idrac_firmware.rst54
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/idrac_firmware_info.rst22
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/idrac_license.rst20
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/idrac_lifecycle_controller_job_status_info.rst24
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/idrac_lifecycle_controller_jobs.rst22
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/idrac_lifecycle_controller_logs.rst26
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/idrac_lifecycle_controller_status_info.rst22
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/idrac_network.rst26
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/idrac_network_attributes.rst110
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/idrac_os_deployment.rst24
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/idrac_redfish_storage_controller.rst128
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/idrac_reset.rst18
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/idrac_server_config_profile.rst142
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/idrac_session.rst46
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/idrac_storage_volume.rst22
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/idrac_syslog.rst26
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/idrac_system_info.rst22
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/idrac_timezone_ntp.rst26
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/idrac_user.rst62
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/idrac_user_info.rst34
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/idrac_virtual_media.rst46
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_active_directory.rst60
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_alert_policies.rst26
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_alert_policies_actions_info.rst30
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_alert_policies_category_info.rst30
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_alert_policies_info.rst30
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_alert_policies_message_id_info.rst30
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_application_alerts_smtp.rst36
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_application_alerts_syslog.rst32
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_application_certificate.rst48
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_application_console_preferences.rst20
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_application_network_address.rst80
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_application_network_proxy.rst44
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_application_network_settings.rst42
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_application_network_time.rst40
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_application_network_webserver.rst32
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_application_security_settings.rst48
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_chassis_slots.rst46
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_configuration_compliance_baseline.rst68
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_configuration_compliance_info.rst36
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_device_group.rst52
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_device_info.rst46
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_device_local_access_configuration.rst20
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_device_location.rst38
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_device_mgmt_network.rst100
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_device_network_services.rst40
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_device_power_settings.rst50
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_device_quick_deploy.rst16
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_devices.rst20
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_diagnostics.rst80
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_discovery.rst82
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_domain_user_groups.rst46
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_firmware.rst64
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_firmware_baseline.rst56
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_firmware_baseline_compliance_info.rst44
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_firmware_baseline_info.rst32
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_firmware_catalog.rst76
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_groups.rst58
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_identity_pool.rst52
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_job_info.rst36
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_network_port_breakout.rst30
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_network_vlan.rst44
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_network_vlan_info.rst36
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_powerstate.rst34
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_profile.rst106
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_profile_info.rst42
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_server_interface_profile_info.rst34
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_server_interface_profiles.rst60
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_session.rst190
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_smart_fabric.rst50
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_smart_fabric_info.rst34
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_smart_fabric_uplink.rst52
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_smart_fabric_uplink_info.rst42
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_template.rst98
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_template_identity_pool.rst30
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_template_info.rst30
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_template_network_vlan.rst48
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_template_network_vlan_info.rst36
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_user.rst44
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/ome_user_info.rst32
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/redfish_event_subscription.rst48
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/redfish_firmware.rst40
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/redfish_firmware_rollback.rst40
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/redfish_powerstate.rst56
-rw-r--r--ansible_collections/dellemc/openmanage/docs/modules/redfish_storage_volume.rst20
-rw-r--r--ansible_collections/dellemc/openmanage/playbooks/ome/ome_session.yml52
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/doc_fragments/idrac_auth_options.py4
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/doc_fragments/idrac_x_auth_options.py69
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/doc_fragments/ome_auth_options.py20
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/doc_fragments/omem_auth_options.py20
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/doc_fragments/oment_auth_options.py20
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/doc_fragments/redfish_auth_options.py20
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/module_utils/idrac_redfish.py57
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/module_utils/ome.py49
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/module_utils/redfish.py48
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/module_utils/session_utils.py75
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/idrac_attributes.py15
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/idrac_boot.py18
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/idrac_certificates.py21
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/idrac_diagnostics.py11
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/idrac_license.py21
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/idrac_network_attributes.py74
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/idrac_redfish_storage_controller.py10
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/idrac_reset.py13
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/idrac_server_config_profile.py13
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/idrac_session.py57
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/idrac_storage_volume.py32
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/idrac_user.py15
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/idrac_user_info.py15
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/idrac_virtual_media.py16
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_active_directory.py13
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_alert_policies.py11
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_alert_policies_actions_info.py11
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_alert_policies_category_info.py11
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_alert_policies_info.py15
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_alert_policies_message_id_info.py11
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_application_alerts_smtp.py13
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_application_alerts_syslog.py12
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_application_certificate.py31
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_application_console_preferences.py19
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_application_network_address.py13
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_application_network_proxy.py13
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_application_network_settings.py12
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_application_network_time.py13
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_application_network_webserver.py13
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_application_security_settings.py12
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_chassis_slots.py13
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_configuration_compliance_baseline.py13
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_configuration_compliance_info.py13
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_device_group.py21
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_device_info.py14
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_device_local_access_configuration.py9
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_device_location.py13
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_device_mgmt_network.py13
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_device_network_services.py13
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_device_power_settings.py13
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_device_quick_deploy.py13
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_devices.py9
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_diagnostics.py13
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_discovery.py13
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_domain_user_groups.py11
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_firmware.py11
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_firmware_baseline.py13
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_firmware_baseline_compliance_info.py13
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_firmware_baseline_info.py13
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_firmware_catalog.py13
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_groups.py13
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_identity_pool.py13
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_job_info.py13
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_network_port_breakout.py13
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_network_vlan.py13
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_network_vlan_info.py13
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_powerstate.py13
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_profile.py13
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_profile_info.py16
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_server_interface_profile_info.py18
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_server_interface_profiles.py13
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_session.py408
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_smart_fabric.py13
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_smart_fabric_info.py11
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_smart_fabric_uplink.py13
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_smart_fabric_uplink_info.py11
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_template.py13
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_template_identity_pool.py13
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_template_info.py13
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_template_network_vlan.py13
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_template_network_vlan_info.py14
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_user.py13
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/ome_user_info.py13
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/redfish_event_subscription.py12
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/redfish_firmware.py14
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/redfish_firmware_rollback.py11
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/redfish_powerstate.py13
-rw-r--r--ansible_collections/dellemc/openmanage/plugins/modules/redfish_storage_volume.py10
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_attributes/molecule/default/converge.yml35
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_bios/molecule/clear_pending_attributes/prepare.yml4
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_source_override_enabled_as_continuous_reset_type_none/converge.yml4
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_source_override_enabled_as_disabled_reset_type_none/converge.yml4
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_source_override_enabled_as_once_reset_type_none/converge.yml6
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_source_override_mode_legacy_job_wait_false/converge.yml6
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_certificate/molecule/default/converge.yml6
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_certificate/molecule/default/molecule.yml12
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/backplane/converge.yml16
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/passensor/converge.yml51
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/physicaldisk/converge.yml37
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/powersupply/converge.yml50
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/systemmetrics/converge.yml48
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/tests/asserts/mmetrics_assert.yml56
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/tests/asserts/passensor_assert.yml39
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/tests/asserts/powersupply_assert.yml42
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/tests/asserts/tmetrics_assert.yml54
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_job_queue/molecule/default/converge.yml10
-rw-r--r--ansible_collections/dellemc/openmanage/roles/idrac_job_queue/molecule/delete_job/converge.yml6
-rw-r--r--ansible_collections/dellemc/openmanage/roles/redfish_storage_volume/molecule/default/converge.yml89
-rw-r--r--ansible_collections/dellemc/openmanage/roles/redfish_storage_volume/molecule/default/molecule.yml10
-rw-r--r--ansible_collections/dellemc/openmanage/tests/README.md2
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_boot.py4
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_certificates.py11
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_diagnostics.py6
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_license.py25
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_reset.py6
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_session.py4
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_storage_volume.py25
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_certificate.py6
-rw-r--r--ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_session.py448
-rw-r--r--ansible_collections/dellemc/powerflex/.github/CODEOWNERS2
-rw-r--r--ansible_collections/dellemc/powerflex/.github/workflows/ansible-test.yml14
-rw-r--r--ansible_collections/dellemc/powerflex/CHANGELOG.rst10
-rw-r--r--ansible_collections/dellemc/powerflex/FILES.json112
-rw-r--r--ansible_collections/dellemc/powerflex/MANIFEST.json10
-rw-r--r--ansible_collections/dellemc/powerflex/README.md95
-rw-r--r--ansible_collections/dellemc/powerflex/changelogs/changelog.yaml7
-rw-r--r--ansible_collections/dellemc/powerflex/docs/CONTRIBUTING.md8
-rw-r--r--ansible_collections/dellemc/powerflex/docs/INSTALLATION.md4
-rw-r--r--ansible_collections/dellemc/powerflex/docs/ISSUE_TRIAGE.md6
-rw-r--r--ansible_collections/dellemc/powerflex/docs/MAINTAINERS.md3
-rw-r--r--ansible_collections/dellemc/powerflex/docs/MAINTAINER_GUIDE.md2
-rw-r--r--ansible_collections/dellemc/powerflex/docs/Release Notes.md14
-rw-r--r--ansible_collections/dellemc/powerflex/docs/SECURITY.md2
-rw-r--r--ansible_collections/dellemc/powerflex/docs/modules/device.rst4
-rw-r--r--ansible_collections/dellemc/powerflex/docs/modules/fault_set.rst4
-rw-r--r--ansible_collections/dellemc/powerflex/docs/modules/info.rst4
-rw-r--r--ansible_collections/dellemc/powerflex/docs/modules/mdm_cluster.rst4
-rw-r--r--ansible_collections/dellemc/powerflex/docs/modules/protection_domain.rst4
-rw-r--r--ansible_collections/dellemc/powerflex/docs/modules/replication_consistency_group.rst4
-rw-r--r--ansible_collections/dellemc/powerflex/docs/modules/replication_pair.rst4
-rw-r--r--ansible_collections/dellemc/powerflex/docs/modules/resource_group.rst4
-rw-r--r--ansible_collections/dellemc/powerflex/docs/modules/sdc.rst4
-rw-r--r--ansible_collections/dellemc/powerflex/docs/modules/sds.rst4
-rw-r--r--ansible_collections/dellemc/powerflex/docs/modules/snapshot.rst4
-rw-r--r--ansible_collections/dellemc/powerflex/docs/modules/snapshot_policy.rst4
-rw-r--r--ansible_collections/dellemc/powerflex/docs/modules/storagepool.rst252
-rw-r--r--ansible_collections/dellemc/powerflex/docs/modules/volume.rst4
-rw-r--r--ansible_collections/dellemc/powerflex/meta/execution-environment.yml2
-rw-r--r--ansible_collections/dellemc/powerflex/meta/runtime.yml2
-rw-r--r--ansible_collections/dellemc/powerflex/playbooks/modules/storagepool.yml53
-rw-r--r--ansible_collections/dellemc/powerflex/plugins/doc_fragments/powerflex.py4
-rw-r--r--ansible_collections/dellemc/powerflex/plugins/module_utils/storage/dell/utils.py4
-rw-r--r--ansible_collections/dellemc/powerflex/plugins/modules/storagepool.py1144
-rw-r--r--ansible_collections/dellemc/powerflex/roles/powerflex_config/tasks/main.yml8
-rw-r--r--ansible_collections/dellemc/powerflex/roles/powerflex_gateway/tasks/install_keepalived.yml2
-rw-r--r--ansible_collections/dellemc/powerflex/roles/powerflex_lia/tasks/install_lia.yml24
-rw-r--r--ansible_collections/dellemc/powerflex/roles/powerflex_mdm/tasks/install_powerflex4x_mdm.yml7
-rw-r--r--ansible_collections/dellemc/powerflex/roles/powerflex_sdc/defaults/main.yml1
-rw-r--r--ansible_collections/dellemc/powerflex/roles/powerflex_sdc/tasks/install_sdc.yml2
-rw-r--r--ansible_collections/dellemc/powerflex/roles/powerflex_sdc/tasks/register_esxi_sdc.yml8
-rw-r--r--ansible_collections/dellemc/powerflex/roles/powerflex_sdr/tasks/add_sdr.yml10
-rw-r--r--ansible_collections/dellemc/powerflex/roles/powerflex_sdr/tasks/remove_sdr.yml43
-rw-r--r--ansible_collections/dellemc/powerflex/roles/powerflex_sds/tasks/install_sds.yml8
-rw-r--r--ansible_collections/dellemc/powerflex/roles/powerflex_tb/tasks/connect_mdm_cluster.yml131
-rw-r--r--ansible_collections/dellemc/powerflex/roles/powerflex_tb/tasks/install_tb4x.yml7
-rw-r--r--ansible_collections/dellemc/powerflex/roles/powerflex_tb/tasks/set_tb_ips.yml18
-rw-r--r--ansible_collections/dellemc/powerflex/tests/sanity/ignore-2.14.txt45
-rw-r--r--ansible_collections/dellemc/powerflex/tests/sanity/ignore-2.15.txt2
-rw-r--r--ansible_collections/dellemc/powerflex/tests/sanity/ignore-2.16.txt2
-rw-r--r--ansible_collections/dellemc/powerflex/tests/unit/plugins/module_utils/mock_storagepool_api.py87
-rw-r--r--ansible_collections/dellemc/powerflex/tests/unit/plugins/modules/test_mdm_cluster.py144
-rw-r--r--ansible_collections/dellemc/powerflex/tests/unit/plugins/modules/test_storagepool.py635
-rw-r--r--ansible_collections/ieisystem/inmanage/.github/settings.yml5
-rw-r--r--ansible_collections/ieisystem/inmanage/.github/workflows/ansible-test.yml54
-rw-r--r--ansible_collections/ieisystem/inmanage/CHANGELOG.rst200
-rw-r--r--ansible_collections/ieisystem/inmanage/CODE_OF_CONDUCT.md3
-rw-r--r--ansible_collections/ieisystem/inmanage/FILES.json1972
-rw-r--r--ansible_collections/ieisystem/inmanage/LICENSE674
-rw-r--r--ansible_collections/ieisystem/inmanage/MANIFEST.json36
-rw-r--r--ansible_collections/ieisystem/inmanage/README.md92
-rw-r--r--ansible_collections/ieisystem/inmanage/changelogs/changelog.yaml453
-rw-r--r--ansible_collections/ieisystem/inmanage/changelogs/config.yaml29
-rw-r--r--ansible_collections/ieisystem/inmanage/changelogs/fragments/.keep0
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/ad_group_info_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/ad_group_module.rst431
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/ad_info_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/adapter_info_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/add_ldisk_module.rst589
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/alert_policy_info_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/audit_log_info_module.rst358
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/auto_capture_info_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/backplane_info_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/backup_module.rst355
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/bios_export_module.rst318
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/bios_import_module.rst318
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/bios_info_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/bmc_info_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/boot_image_info_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/boot_option_info_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/clear_audit_log_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/clear_event_log_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/clear_system_log_module.rst334
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/collect_blackbox_module.rst319
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/collect_log_module.rst318
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/connect_media_info_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/cpu_info_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/del_session_module.rst318
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/dns_info_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/download_auto_screenshot_module.rst318
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/download_manual_screenshot_module.rst318
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/edit_ad_module.rst459
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/edit_alert_policy_module.rst430
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/edit_auto_capture_module.rst322
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/edit_bios_module.rst377
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/edit_boot_image_module.rst327
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/edit_boot_option_module.rst365
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/edit_connect_media_module.rst360
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/edit_dns_module.rst620
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/edit_event_log_policy_module.rst322
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/edit_fan_module.rst359
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/edit_fru_module.rst352
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/edit_ipv4_module.rst427
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/edit_ipv6_module.rst444
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/edit_kvm_module.rst543
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/edit_ldap_module.rst526
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/edit_ldisk_module.rst378
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/edit_log_setting_module.rst474
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/edit_m6_log_setting_module.rst490
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/edit_manual_capture_module.rst322
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/edit_media_instance_module.rst502
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/edit_ncsi_module.rst389
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/edit_network_bond_module.rst363
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/edit_network_link_module.rst392
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/edit_network_module.rst343
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/edit_ntp_module.rst486
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/edit_pdisk_module.rst482
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/edit_power_budget_module.rst685
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/edit_power_restore_module.rst323
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/edit_power_status_module.rst325
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/edit_preserve_config_module.rst373
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/edit_psu_config_module.rst339
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/edit_psu_peak_module.rst343
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/edit_restore_factory_default_module.rst368
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/edit_service_module.rst428
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/edit_smtp_com_module.rst578
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/edit_smtp_dest_module.rst383
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/edit_smtp_module.rst587
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/edit_snmp_module.rst540
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/edit_snmp_trap_module.rst598
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/edit_threshold_module.rst410
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/edit_uid_module.rst343
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/edit_virtual_media_module.rst511
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/edit_vlan_module.rst381
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/event_log_info_module.rst358
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/event_log_policy_info_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/fan_info_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/fru_info_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/fw_version_info_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/gpu_info_module.rst303
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/hard_disk_info_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/hba_info_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/index.rst149
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/kvm_info_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/ldap_group_info_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/ldap_group_module.rst431
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/ldap_info_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/ldisk_info_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/log_setting_info_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/media_instance_info_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/mem_info_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/ncsi_info_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/network_bond_info_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/network_info_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/network_link_info_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/ntp_info_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/onboard_disk_info_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/pcie_info_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/pdisk_info_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/power_budget_info_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/power_consumption_info_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/power_restore_info_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/power_status_info_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/preserve_config_info_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/psu_config_info_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/psu_info_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/psu_peak_info_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/raid_info_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/reset_bmc_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/reset_kvm_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/restore_module.rst348
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/self_test_info_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/sensor_info_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/server_info_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/service_info_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/session_info_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/smtp_info_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/snmp_info_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/snmp_trap_info_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/support_info_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/system_log_info_module.rst384
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/temp_info_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/threshold_info_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/uid_info_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/update_cpld_module.rst362
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/update_fw_module.rst426
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/update_psu_module.rst338
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/user_group_info_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/user_group_module.rst532
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/user_info_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/user_module.rst454
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/virtual_media_info_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/docs/volt_info_module.rst302
-rw-r--r--ansible_collections/ieisystem/inmanage/meta/runtime.yml2
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/doc_fragments/inmanage.py59
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/module_utils/inmanage.py54
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/ad_group.py158
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/ad_group_info.py99
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/ad_info.py99
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/adapter_info.py119
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/add_ldisk.py221
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/alert_policy_info.py99
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/audit_log_info.py126
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/auto_capture_info.py99
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/backplane_info.py99
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/backup.py120
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/bios_export.py107
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/bios_import.py109
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/bios_info.py99
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/bmc_info.py99
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/boot_image_info.py99
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/boot_option_info.py99
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/clear_audit_log.py101
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/clear_event_log.py101
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/clear_system_log.py115
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/collect_blackbox.py108
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/collect_log.py107
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/connect_media_info.py99
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/cpu_info.py99
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/del_session.py109
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/dns_info.py99
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/download_auto_screenshot.py107
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/download_manual_screenshot.py107
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/edit_ad.py163
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/edit_alert_policy.py156
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/edit_auto_capture.py110
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/edit_bios.py134
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/edit_boot_image.py111
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/edit_boot_option.py123
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/edit_connect_media.py126
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/edit_dns.py231
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/edit_event_log_policy.py110
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/edit_fan.py126
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/edit_fru.py121
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/edit_ipv4.py159
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/edit_ipv6.py166
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/edit_kvm.py191
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/edit_ldap.py193
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/edit_ldisk.py131
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/edit_log_setting.py164
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/edit_m6_log_setting.py174
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/edit_manual_capture.py110
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/edit_media_instance.py167
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/edit_ncsi.py133
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/edit_network.py118
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/edit_network_bond.py123
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/edit_network_link.py140
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/edit_ntp.py173
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/edit_pdisk.py178
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/edit_power_budget.py260
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/edit_power_restore.py110
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/edit_power_status.py110
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/edit_preserve_config.py136
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/edit_psu_config.py116
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/edit_psu_peak.py121
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/edit_restore_factory_default.py131
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/edit_service.py150
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/edit_smtp.py213
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/edit_smtp_com.py204
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/edit_smtp_dest.py136
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/edit_snmp.py188
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/edit_snmp_trap.py217
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/edit_threshold.py141
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/edit_uid.py121
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/edit_virtual_media.py182
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/edit_vlan.py136
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/event_log_info.py126
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/event_log_policy_info.py99
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/fan_info.py99
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/fru_info.py99
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/fw_version_info.py99
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/gpu_info.py100
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/hard_disk_info.py99
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/hba_info.py99
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/kvm_info.py99
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/ldap_group.py158
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/ldap_group_info.py99
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/ldap_info.py99
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/ldisk_info.py99
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/log_setting_info.py99
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/media_instance_info.py99
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/mem_info.py99
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/ncsi_info.py99
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/network_bond_info.py99
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/network_info.py99
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/network_link_info.py99
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/ntp_info.py99
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/onboard_disk_info.py99
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/pcie_info.py99
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/pdisk_info.py99
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/power_budget_info.py99
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/power_consumption_info.py99
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/power_restore_info.py99
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/power_status_info.py99
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/preserve_config_info.py99
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/psu_config_info.py99
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/psu_info.py99
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/psu_peak_info.py99
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/raid_info.py99
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/reset_bmc.py101
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/reset_kvm.py101
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/restore.py117
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/self_test_info.py99
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/sensor_info.py99
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/server_info.py99
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/service_info.py99
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/session_info.py99
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/smtp_info.py99
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/snmp_info.py99
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/snmp_trap_info.py99
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/support_info.py99
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/system_log_info.py134
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/temp_info.py99
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/threshold_info.py99
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/uid_info.py99
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/update_cpld.py130
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/update_fw.py154
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/update_psu.py117
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/user.py165
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/user_group.py204
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/user_group_info.py99
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/user_info.py99
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/virtual_media_info.py99
-rw-r--r--ansible_collections/ieisystem/inmanage/plugins/modules/volt_info.py99
-rw-r--r--ansible_collections/ieisystem/inmanage/requirements.txt1
-rw-r--r--ansible_collections/inspur/ispim/CHANGELOG.rst8
-rw-r--r--ansible_collections/inspur/ispim/FILES.json10
-rw-r--r--ansible_collections/inspur/ispim/MANIFEST.json4
-rw-r--r--ansible_collections/inspur/ispim/README.md180
-rw-r--r--ansible_collections/inspur/ispim/changelogs/.plugin-cache.yaml2
-rw-r--r--ansible_collections/inspur/ispim/changelogs/changelog.yaml7
-rw-r--r--ansible_collections/inspur/ispim/meta/runtime.yml4
-rw-r--r--ansible_collections/kubernetes/core/.ansible-lint-ignore2
-rw-r--r--ansible_collections/kubernetes/core/.github/workflows/integration-tests-kubevirt.yaml127
-rw-r--r--ansible_collections/kubernetes/core/CHANGELOG.rst13
-rw-r--r--ansible_collections/kubernetes/core/FILES.json67
-rw-r--r--ansible_collections/kubernetes/core/MANIFEST.json4
-rw-r--r--ansible_collections/kubernetes/core/Makefile2
-rw-r--r--ansible_collections/kubernetes/core/README.md2
-rw-r--r--ansible_collections/kubernetes/core/changelogs/changelog.yaml11
-rw-r--r--ansible_collections/kubernetes/core/changelogs/fragments/20240530-ansible-core-support-update.yaml3
-rw-r--r--ansible_collections/kubernetes/core/changelogs/fragments/inventory-update_removal_date.yml2
-rw-r--r--ansible_collections/kubernetes/core/docs/kubernetes.core.k8s_inventory.rst4
-rw-r--r--ansible_collections/kubernetes/core/docs/kubernetes.core.kubectl_connection.rst76
-rw-r--r--ansible_collections/kubernetes/core/meta/runtime.yml4
-rw-r--r--ansible_collections/kubernetes/core/plugins/connection/kubectl.py75
-rw-r--r--ansible_collections/kubernetes/core/plugins/inventory/k8s.py8
-rw-r--r--ansible_collections/kubernetes/core/tools/kubevirt_list_targets.py22
-rw-r--r--ansible_collections/lowlydba/sqlserver/.github/FUNDING.yml1
-rw-r--r--ansible_collections/lowlydba/sqlserver/.github/workflows/ansible-test-windows.yml4
-rw-r--r--ansible_collections/lowlydba/sqlserver/.github/workflows/ansible-test.yml10
-rw-r--r--ansible_collections/lowlydba/sqlserver/.github/workflows/release.yml2
-rw-r--r--ansible_collections/lowlydba/sqlserver/CHANGELOG.rst12
-rw-r--r--ansible_collections/lowlydba/sqlserver/FILES.json2048
-rw-r--r--ansible_collections/lowlydba/sqlserver/MANIFEST.json4
-rw-r--r--ansible_collections/lowlydba/sqlserver/changelogs/changelog.yaml11
-rw-r--r--ansible_collections/lowlydba/sqlserver/plugins/modules/ag_listener.ps16
-rw-r--r--ansible_collections/lowlydba/sqlserver/plugins/modules/ag_listener.py19
-rw-r--r--ansible_collections/lowlydba/sqlserver/plugins/modules/spn.py10
-rw-r--r--ansible_collections/lowlydba/sqlserver/tests/integration/targets/win_ag_listener/tasks/main.yml6
-rw-r--r--ansible_collections/microsoft/ad/.ansible-lint2
-rw-r--r--ansible_collections/microsoft/ad/.azure-pipelines/azure-pipelines.yml19
-rw-r--r--ansible_collections/microsoft/ad/.gitignore3
-rw-r--r--ansible_collections/microsoft/ad/CHANGELOG.rst26
-rw-r--r--ansible_collections/microsoft/ad/FILES.json197
-rw-r--r--ansible_collections/microsoft/ad/MANIFEST.json4
-rw-r--r--ansible_collections/microsoft/ad/changelogs/changelog.yaml36
-rw-r--r--ansible_collections/microsoft/ad/docs/docsite/extra-docs.yml1
-rw-r--r--ansible_collections/microsoft/ad/docs/docsite/rst/guide_ad_module_authentication.rst120
-rw-r--r--ansible_collections/microsoft/ad/docs/docsite/rst/guide_attributes.rst74
-rw-r--r--ansible_collections/microsoft/ad/docs/docsite/rst/guide_ldap_connection.rst2
-rw-r--r--ansible_collections/microsoft/ad/docs/docsite/rst/guide_migration.rst24
-rw-r--r--ansible_collections/microsoft/ad/plugins/action/domain.py32
-rw-r--r--ansible_collections/microsoft/ad/plugins/action/domain_child.py8
-rw-r--r--ansible_collections/microsoft/ad/plugins/action/domain_controller.py32
-rw-r--r--ansible_collections/microsoft/ad/plugins/doc_fragments/ad_object.py44
-rw-r--r--ansible_collections/microsoft/ad/plugins/doc_fragments/ldap_connection.py1
-rw-r--r--ansible_collections/microsoft/ad/plugins/inventory/ldap.py7
-rw-r--r--ansible_collections/microsoft/ad/plugins/module_utils/_ADObject.psm1340
-rw-r--r--ansible_collections/microsoft/ad/plugins/modules/computer.ps174
-rw-r--r--ansible_collections/microsoft/ad/plugins/modules/computer.py59
-rw-r--r--ansible_collections/microsoft/ad/plugins/modules/domain.py1
-rw-r--r--ansible_collections/microsoft/ad/plugins/modules/domain_child.ps1242
-rw-r--r--ansible_collections/microsoft/ad/plugins/modules/domain_child.yml184
-rw-r--r--ansible_collections/microsoft/ad/plugins/modules/domain_controller.py5
-rw-r--r--ansible_collections/microsoft/ad/plugins/modules/group.ps1135
-rw-r--r--ansible_collections/microsoft/ad/plugins/modules/group.py57
-rw-r--r--ansible_collections/microsoft/ad/plugins/modules/membership.ps12
-rw-r--r--ansible_collections/microsoft/ad/plugins/modules/object.py8
-rw-r--r--ansible_collections/microsoft/ad/plugins/modules/object_info.py4
-rw-r--r--ansible_collections/microsoft/ad/plugins/modules/ou.ps11
-rw-r--r--ansible_collections/microsoft/ad/plugins/modules/ou.py15
-rw-r--r--ansible_collections/microsoft/ad/plugins/modules/user.ps1108
-rw-r--r--ansible_collections/microsoft/ad/plugins/modules/user.py78
-rw-r--r--ansible_collections/microsoft/ad/plugins/plugin_utils/_module_with_reboot.py37
-rw-r--r--ansible_collections/microsoft/ad/tests/integration/targets/computer/tasks/tests.yml52
-rw-r--r--ansible_collections/microsoft/ad/tests/integration/targets/domain_child/README.md36
-rw-r--r--ansible_collections/microsoft/ad/tests/integration/targets/domain_child/Vagrantfile27
-rw-r--r--ansible_collections/microsoft/ad/tests/integration/targets/domain_child/aliases2
-rw-r--r--ansible_collections/microsoft/ad/tests/integration/targets/domain_child/ansible.cfg4
-rw-r--r--ansible_collections/microsoft/ad/tests/integration/targets/domain_child/inventory.yml28
-rw-r--r--ansible_collections/microsoft/ad/tests/integration/targets/domain_child/setup.yml71
-rw-r--r--ansible_collections/microsoft/ad/tests/integration/targets/domain_child/tasks/cross_domain.yml596
-rw-r--r--ansible_collections/microsoft/ad/tests/integration/targets/domain_child/tasks/main_child.yml98
-rw-r--r--ansible_collections/microsoft/ad/tests/integration/targets/domain_child/tasks/main_tree.yml91
-rw-r--r--ansible_collections/microsoft/ad/tests/integration/targets/domain_child/test.yml146
-rw-r--r--ansible_collections/microsoft/ad/tests/integration/targets/group/tasks/tests.yml11
-rw-r--r--ansible_collections/microsoft/ad/tests/integration/targets/membership/tasks/main.yml269
-rw-r--r--ansible_collections/microsoft/ad/tests/integration/targets/ou/tasks/tests.yml3
-rw-r--r--ansible_collections/microsoft/ad/tests/integration/targets/user/tasks/tests.yml15
-rw-r--r--ansible_collections/microsoft/ad/tests/sanity/ignore-2.14.txt1
-rw-r--r--ansible_collections/microsoft/ad/tests/sanity/ignore-2.15.txt1
-rw-r--r--ansible_collections/microsoft/ad/tests/sanity/ignore-2.16.txt1
-rw-r--r--ansible_collections/microsoft/ad/tests/sanity/ignore-2.17.txt1
-rw-r--r--ansible_collections/microsoft/ad/tests/sanity/ignore-2.18.txt1
-rwxr-xr-xansible_collections/microsoft/ad/tests/utils/shippable/lint.sh2
-rw-r--r--ansible_collections/netbox/netbox/.ansible-lint15
-rw-r--r--ansible_collections/netbox/netbox/.ansible-lint-ignore3
-rw-r--r--ansible_collections/netbox/netbox/.github/workflows/main.yml31
-rw-r--r--ansible_collections/netbox/netbox/.github/workflows/tests.yml50
-rw-r--r--ansible_collections/netbox/netbox/.readthedocs.yml4
-rw-r--r--ansible_collections/netbox/netbox/.yamllint32
-rw-r--r--ansible_collections/netbox/netbox/CHANGELOG.rst37
-rw-r--r--ansible_collections/netbox/netbox/FILES.json3658
-rw-r--r--ansible_collections/netbox/netbox/MANIFEST.json4
-rw-r--r--ansible_collections/netbox/netbox/README.md174
-rw-r--r--ansible_collections/netbox/netbox/changelogs/changelog.yaml616
-rw-r--r--ansible_collections/netbox/netbox/changelogs/fragments/619-v3.0-ci-fixes.yml3
-rw-r--r--ansible_collections/netbox/netbox/changelogs/fragments/allow-jinja2.yml3
-rw-r--r--ansible_collections/netbox/netbox/changelogs/fragments/issue-558.yml3
-rw-r--r--ansible_collections/netbox/netbox/changelogs/fragments/issue-951.yml3
-rw-r--r--ansible_collections/netbox/netbox/changelogs/fragments/serial_and_asset.yml3
-rw-r--r--ansible_collections/netbox/netbox/docs/_extensions/pygments_lexer.py4
-rw-r--r--ansible_collections/netbox/netbox/docs/conf.py2
-rw-r--r--ansible_collections/netbox/netbox/docs/getting_started/contributing/modules/architecture.rst22
-rw-r--r--ansible_collections/netbox/netbox/docs/getting_started/contributing/modules/new_module.rst18
-rw-r--r--ansible_collections/netbox/netbox/docs/getting_started/contributing/modules/update_module.rst6
-rw-r--r--ansible_collections/netbox/netbox/docs/getting_started/how-to-use/advanced.rst34
-rw-r--r--ansible_collections/netbox/netbox/docs/getting_started/how-to-use/modules.rst10
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/environment_variables.rst2
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/index.rst7
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/nb_inventory_inventory.rst35
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/nb_lookup_lookup.rst9
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_aggregate_module.rst6
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_asn_module.rst7
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_cable_module.rst6
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_circuit_module.rst6
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_circuit_termination_module.rst6
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_circuit_type_module.rst6
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_cluster_group_module.rst6
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_cluster_module.rst6
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_cluster_type_module.rst6
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_config_context_module.rst10
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_config_template_module.rst8
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_console_port_module.rst6
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_console_port_template_module.rst6
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_console_server_port_module.rst6
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_console_server_port_template_module.rst6
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_contact_group_module.rst7
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_contact_module.rst10
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_contact_role_module.rst6
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_custom_field_choice_set_module.rst6
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_custom_field_module.rst55
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_custom_link_module.rst57
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_device_bay_module.rst7
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_device_bay_template_module.rst7
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_device_interface_module.rst8
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_device_interface_template_module.rst6
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_device_module.rst6
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_device_role_module.rst6
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_device_type_module.rst8
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_export_template_module.rst53
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_fhrp_group_assignment_module.rst8
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_fhrp_group_module.rst7
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_front_port_module.rst6
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_front_port_template_module.rst6
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_interface_module.rst4
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_inventory_item_module.rst8
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_inventory_item_role_module.rst6
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_ip_address_module.rst6
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_ipam_role_module.rst6
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_journal_entry_module.rst6
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_l2vpn_module.rst10
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_l2vpn_termination_module.rst8
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_location_module.rst6
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_manufacturer_module.rst6
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_module_bay_module.rst8
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_module_module.rst12
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_module_type_module.rst12
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_platform_module.rst8
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_power_feed_module.rst6
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_power_outlet_module.rst52
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_power_outlet_template_module.rst7
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_power_panel_module.rst6
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_power_port_module.rst6
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_power_port_template_module.rst6
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_prefix_module.rst12
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_provider_module.rst6
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_provider_network_module.rst6
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_rack_group_module.rst6
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_rack_module.rst6
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_rack_role_module.rst6
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_rear_port_module.rst6
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_rear_port_template_module.rst6
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_region_module.rst6
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_rir_module.rst8
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_route_target_module.rst6
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_service_module.rst6
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_service_template_module.rst8
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_site_group_module.rst6
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_site_module.rst6
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_tag_module.rst6
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_tenant_group_module.rst7
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_tenant_module.rst6
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_virtual_chassis_module.rst6
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_virtual_disk_module.rst8
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_virtual_machine_module.rst6
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_vlan_group_module.rst6
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_vlan_module.rst6
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_vm_interface_module.rst12
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_vrf_module.rst6
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_webhook_module.rst22
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_wireless_lan_group_module.rst8
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_wireless_lan_module.rst10
-rw-r--r--ansible_collections/netbox/netbox/docs/plugins/netbox_wireless_link_module.rst10
-rw-r--r--ansible_collections/netbox/netbox/meta/runtime.yml4
-rw-r--r--ansible_collections/netbox/netbox/plugins/inventory/nb_inventory.py82
-rw-r--r--ansible_collections/netbox/netbox/plugins/lookup/nb_lookup.py27
-rw-r--r--ansible_collections/netbox/netbox/plugins/module_utils/netbox_utils.py24
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_aggregate.py2
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_asn.py3
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_cable.py2
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_circuit.py2
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_circuit_termination.py4
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_circuit_type.py2
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_cluster.py2
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_cluster_group.py2
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_cluster_type.py2
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_config_context.py6
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_config_template.py6
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_console_port.py2
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_console_port_template.py2
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_console_server_port.py2
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_console_server_port_template.py2
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_contact.py8
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_contact_group.py3
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_contact_role.py2
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_custom_field.py38
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_custom_field_choice_set.py10
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_custom_link.py44
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_device.py12
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_device_bay.py3
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_device_bay_template.py3
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_device_interface.py8
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_device_interface_template.py2
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_device_role.py2
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_device_type.py16
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_export_template.py32
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_fhrp_group.py5
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_fhrp_group_assignment.py12
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_front_port.py2
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_front_port_template.py2
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_inventory_item.py12
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_inventory_item_role.py8
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_ip_address.py8
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_ipam_role.py2
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_journal_entry.py2
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_l2vpn.py12
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_l2vpn_termination.py4
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_location.py2
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_manufacturer.py2
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_module.py14
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_module_bay.py6
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_module_type.py16
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_platform.py4
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_power_feed.py2
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_power_outlet.py11
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_power_outlet_template.py4
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_power_panel.py2
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_power_port.py2
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_power_port_template.py2
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_prefix.py8
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_provider.py2
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_provider_network.py2
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_rack.py2
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_rack_group.py2
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_rack_role.py2
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_rear_port.py2
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_rear_port_template.py2
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_region.py2
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_rir.py4
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_route_target.py2
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_service.py2
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_service_template.py12
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_site.py2
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_site_group.py2
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_tag.py2
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_tenant.py2
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_tenant_group.py3
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_virtual_chassis.py2
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_virtual_disk.py10
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_virtual_machine.py2
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_vlan.py2
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_vlan_group.py2
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_vm_interface.py8
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_vrf.py2
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_webhook.py32
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_wireless_lan.py12
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_wireless_lan_group.py10
-rw-r--r--ansible_collections/netbox/netbox/plugins/modules/netbox_wireless_link.py16
-rw-r--r--ansible_collections/netbox/netbox/poetry.lock1739
-rw-r--r--ansible_collections/netbox/netbox/pyproject.toml8
-rw-r--r--ansible_collections/netbox/netbox/requirements.yml2
-rw-r--r--ansible_collections/netbox/netbox/test-requirements.txt2
-rw-r--r--ansible_collections/netbox/netbox/tests/config.yml3
-rwxr-xr-xansible_collections/netbox/netbox/tests/integration/netbox-deploy.py106
-rwxr-xr-xansible_collections/netbox/netbox/tests/integration/render_config.sh1
-rwxr-xr-xansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.5/compare_inventory_json.py2
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.5/files/test-inventory-bearer-token.yml12
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.5/files/test-inventory-jinja2-filter.yml12
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.5/files/test-inventory-jinja2.yml22
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.5/files/test-inventory-legacy.yml2
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.5/files/test-inventory-noracks.yml12
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.5/files/test-inventory-options-flatten.yml24
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.5/files/test-inventory-options.yml22
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.5/files/test-inventory-plurals-flatten.yml20
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.5/files/test-inventory-plurals.yml14
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.5/files/test-inventory.yml12
-rwxr-xr-xansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.6/compare_inventory_json.py2
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.6/files/test-inventory-bearer-token.yml12
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.6/files/test-inventory-jinja2-filter.yml12
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.6/files/test-inventory-jinja2.yml22
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.6/files/test-inventory-legacy.yml2
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.6/files/test-inventory-noracks.yml12
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.6/files/test-inventory-options-flatten.yml24
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.6/files/test-inventory-options.yml22
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.6/files/test-inventory-plurals-flatten.yml20
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.6/files/test-inventory-plurals.yml14
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.6/files/test-inventory.yml12
-rwxr-xr-xansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.7/compare_inventory_json.py2
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.7/files/test-inventory-bearer-token.yml12
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.7/files/test-inventory-jinja2-filter.yml12
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.7/files/test-inventory-jinja2.yml22
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.7/files/test-inventory-legacy.yml2
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.7/files/test-inventory-noracks.yml12
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.7/files/test-inventory-options-flatten.yml24
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.7/files/test-inventory-options.yml22
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.7/files/test-inventory-plurals-flatten.yml20
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.7/files/test-inventory-plurals.yml14
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.7/files/test-inventory.yml14
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/.gitignore1
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/aliases1
-rwxr-xr-xansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/compare_inventory_json.py152
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-bearer-token.json1417
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-bearer-token.yml32
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-jinja2-filter.json1233
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-jinja2-filter.yml33
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-jinja2.json384
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-jinja2.yml62
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-legacy.json449
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-legacy.yml10
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-noracks.json1447
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-noracks.yml28
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-options-flatten.json1372
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-options-flatten.yml41
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-options.json384
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-options.yml62
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-plurals-flatten.json440
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-plurals-flatten.yml33
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-plurals.json1491
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-plurals.yml38
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory.json1505
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory.yml31
-rwxr-xr-xansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/runme.sh72
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/runme_config.template6
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/regression-v3.5/tasks/main.yml165
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/regression-v3.6/tasks/main.yml165
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/regression-v3.7/tasks/main.yml165
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/regression-v4.0/tasks/main.yml284
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/main.yml323
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_aggregate.yml44
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_asn.yml10
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_cable.yml36
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_circuit.yml18
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_circuit_termination.yml26
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_circuit_type.yml34
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_cluster.yml40
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_cluster_group.yml34
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_cluster_type.yml34
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_config_context.yml44
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_console_port.yml20
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_console_port_template.yml20
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_console_server_port.yml20
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_console_server_port_template.yml20
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_contact.yml64
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_contact_role.yml28
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_custom_field.yml48
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_custom_link.yml30
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_device.yml134
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_device_bay.yml34
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_device_bay_template.yml16
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_device_interface.yml118
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_device_interface_template.yml40
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_device_role.yml30
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_device_type.yml34
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_export_template.yml32
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_fhrp_group.yml16
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_fhrp_group_assignment.yml8
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_front_port.yml24
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_front_port_template.yml24
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_inventory_item.yml50
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_inventory_item_role.yml30
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_ip_address.yml114
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_ipam_role.yml26
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_journal_entry.yml4
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_l2vpn.yml30
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_l2vpn_termination.yml10
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_location.yml16
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_lookup.yml67
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_manufacturer.yml20
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_module.yml20
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_module_bay.yml20
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_module_type.yml24
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_platform.yml20
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_power_feed.yml21
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_power_outlet.yml20
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_power_outlet_template.yml20
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_power_panel.yml20
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_power_port.yml22
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_power_port_template.yml40
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_prefix.yml100
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_provider.yml18
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_provider_network.yml18
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_rack.yml118
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_rack_group.yml12
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_rack_role.yml18
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_rear_port.yml24
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_rear_port_template.yml24
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_region.yml26
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_rir.yml20
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_route_target.yml78
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_service.yml100
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_service_template.yml20
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_site.yml48
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_site_group.yml16
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_tag.yml40
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_tenant.yml66
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_tenant_group.yml80
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_virtual_chassis.yml34
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_virtual_machine.yml50
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_vlan.yml64
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_vlan_group.yml110
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_vm_interface.yml72
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_vrf.yml44
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_webhook.yml38
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_wireless_lan.yml54
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_wireless_lan_group.yml52
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_wireless_link.yml42
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/main.yml332
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_aggregate.yml44
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_asn.yml10
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_cable.yml36
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_circuit.yml18
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_circuit_termination.yml26
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_circuit_type.yml34
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_cluster.yml40
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_cluster_group.yml34
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_cluster_type.yml34
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_config_context.yml44
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_config_template.yml36
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_console_port.yml20
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_console_port_template.yml20
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_console_server_port.yml20
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_console_server_port_template.yml20
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_contact.yml64
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_contact_role.yml28
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_custom_field.yml48
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_custom_field_choice_set.yml48
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_custom_link.yml30
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_device.yml134
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_device_bay.yml34
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_device_bay_template.yml16
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_device_interface.yml118
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_device_interface_template.yml40
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_device_role.yml30
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_device_type.yml34
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_export_template.yml32
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_fhrp_group.yml16
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_fhrp_group_assignment.yml8
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_front_port.yml24
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_front_port_template.yml24
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_inventory_item.yml50
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_inventory_item_role.yml30
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_ip_address.yml114
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_ipam_role.yml26
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_journal_entry.yml4
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_l2vpn.yml30
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_l2vpn_termination.yml10
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_location.yml16
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_lookup.yml67
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_manufacturer.yml20
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_module.yml20
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_module_bay.yml20
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_module_type.yml24
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_platform.yml20
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_power_feed.yml21
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_power_outlet.yml20
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_power_outlet_template.yml20
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_power_panel.yml20
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_power_port.yml22
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_power_port_template.yml40
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_prefix.yml100
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_provider.yml18
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_provider_network.yml18
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_rack.yml118
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_rack_group.yml12
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_rack_role.yml20
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_rear_port.yml24
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_rear_port_template.yml24
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_region.yml26
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_rir.yml20
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_route_target.yml78
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_service.yml100
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_service_template.yml20
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_site.yml48
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_site_group.yml16
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_tag.yml40
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_tenant.yml66
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_tenant_group.yml80
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_virtual_chassis.yml34
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_virtual_machine.yml50
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_vlan.yml64
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_vlan_group.yml110
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_vm_interface.yml72
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_vrf.yml44
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_webhook.yml38
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_wireless_lan.yml54
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_wireless_lan_group.yml52
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_wireless_link.yml42
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/main.yml318
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_aggregate.yml44
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_asn.yml10
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_cable.yml36
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_circuit.yml18
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_circuit_termination.yml26
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_circuit_type.yml34
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_cluster.yml40
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_cluster_group.yml34
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_cluster_type.yml34
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_config_context.yml44
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_config_template.yml36
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_console_port.yml20
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_console_port_template.yml20
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_console_server_port.yml20
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_console_server_port_template.yml20
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_contact.yml64
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_contact_role.yml28
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_custom_field.yml46
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_custom_link.yml30
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_device.yml134
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_device_bay.yml34
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_device_bay_template.yml16
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_device_interface.yml118
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_device_interface_template.yml40
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_device_role.yml30
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_device_type.yml34
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_export_template.yml32
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_fhrp_group.yml16
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_fhrp_group_assignment.yml8
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_front_port.yml24
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_front_port_template.yml24
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_inventory_item.yml50
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_inventory_item_role.yml30
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_ip_address.yml108
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_ipam_role.yml26
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_journal_entry.yml4
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_l2vpn.yml30
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_l2vpn_termination.yml10
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_location.yml16
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_lookup.yml67
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_manufacturer.yml20
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_module.yml20
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_module_bay.yml20
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_module_type.yml24
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_platform.yml20
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_power_feed.yml21
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_power_outlet.yml20
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_power_outlet_template.yml20
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_power_panel.yml20
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_power_port.yml22
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_power_port_template.yml40
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_prefix.yml100
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_provider.yml18
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_provider_network.yml18
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_rack.yml118
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_rack_group.yml12
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_rack_role.yml18
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_rear_port.yml24
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_rear_port_template.yml24
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_region.yml26
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_rir.yml20
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_route_target.yml78
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_service.yml100
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_service_template.yml20
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_site.yml48
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_site_group.yml16
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_tag.yml40
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_tenant.yml66
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_tenant_group.yml80
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_virtual_chassis.yml34
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_virtual_disk.yml34
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_virtual_machine.yml50
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_vlan.yml64
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_vlan_group.yml110
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_vm_interface.yml72
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_vrf.yml44
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_webhook.yml38
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_wireless_lan.yml54
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_wireless_lan_group.yml52
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_wireless_link.yml42
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/main.yml330
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_aggregate.yml115
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_asn.yml88
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_cable.yml193
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_circuit.yml109
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_circuit_termination.yml129
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_circuit_type.yml96
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_cluster.yml98
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_cluster_group.yml96
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_cluster_type.yml96
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_config_context.yml107
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_config_template.yml82
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_console_port.yml108
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_console_port_template.yml105
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_console_server_port.yml108
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_console_server_port_template.yml105
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_contact.yml100
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_contact_role.yml94
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_custom_field.yml129
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_custom_link.yml113
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_device.yml243
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_device_bay.yml87
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_device_bay_template.yml81
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_device_interface.yml312
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_device_interface_template.yml109
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_device_role.yml101
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_device_type.yml132
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_export_template.yml118
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_fhrp_group.yml91
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_fhrp_group_assignment.yml92
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_front_port.yml150
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_front_port_template.yml147
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_inventory_item.yml203
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_inventory_item_role.yml98
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_ip_address.yml352
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_ipam_role.yml94
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_journal_entry.yml26
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_l2vpn.yml99
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_l2vpn_termination.yml94
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_location.yml85
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_lookup.yml90
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_manufacturer.yml91
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_module.yml107
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_module_bay.yml105
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_module_type.yml97
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_platform.yml92
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_power_feed.yml126
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_power_outlet.yml114
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_power_outlet_template.yml111
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_power_panel.yml105
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_power_port.yml125
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_power_port_template.yml212
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_prefix.yml245
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_provider.yml79
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_provider_network.yml83
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_rack.yml226
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_rack_group.yml62
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_rack_role.yml81
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_rear_port.yml139
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_rear_port_template.yml136
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_region.yml79
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_rir.yml79
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_route_target.yml204
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_service.yml196
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_service_template.yml115
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_site.yml165
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_site_group.yml81
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_tag.yml110
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_tenant.yml106
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_tenant_group.yml129
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_virtual_chassis.yml131
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_virtual_disk.yml87
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_virtual_machine.yml128
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_vlan.yml193
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_vlan_group.yml314
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_vm_interface.yml159
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_vrf.yml137
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_webhook.yml121
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_wireless_lan.yml103
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_wireless_lan_group.yml97
-rw-r--r--ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_wireless_link.yml130
-rw-r--r--ansible_collections/netbox/netbox/tests/netbox-docker/v3.5/docker-compose.override.yml12
-rw-r--r--ansible_collections/netbox/netbox/tests/netbox-docker/v3.6/docker-compose.override.yml12
-rw-r--r--ansible_collections/netbox/netbox/tests/netbox-docker/v3.7/docker-compose.override.yml12
-rw-r--r--ansible_collections/netbox/netbox/tests/netbox-docker/v4.0/docker-compose.override.yml25
-rw-r--r--ansible_collections/netbox/netbox/tests/sanity/ignore-2.11.txt1
-rw-r--r--ansible_collections/netbox/netbox/tox-ansible.ini11
-rw-r--r--ansible_collections/purestorage/flasharray/.github/workflows/main.yml3
-rw-r--r--ansible_collections/purestorage/flasharray/CHANGELOG.rst10
-rw-r--r--ansible_collections/purestorage/flasharray/FILES.json37
-rw-r--r--ansible_collections/purestorage/flasharray/MANIFEST.json4
-rw-r--r--ansible_collections/purestorage/flasharray/README.md90
-rw-r--r--ansible_collections/purestorage/flasharray/changelogs/.plugin-cache.yaml2
-rw-r--r--ansible_collections/purestorage/flasharray/changelogs/changelog.yaml11
-rw-r--r--ansible_collections/purestorage/flasharray/changelogs/fragments/565_fix_clear_interface.yaml2
-rw-r--r--ansible_collections/purestorage/flasharray/changelogs/fragments/567_newarray_network.yaml2
-rw-r--r--ansible_collections/purestorage/flasharray/changelogs/fragments/569_enhanced_snap_policy.yaml2
-rw-r--r--ansible_collections/purestorage/flasharray/plugins/modules/purefa_info.py2
-rw-r--r--ansible_collections/purestorage/flasharray/plugins/modules/purefa_network.py27
-rw-r--r--ansible_collections/purestorage/flasharray/plugins/modules/purefa_policy.py109
-rw-r--r--ansible_collections/vultr/cloud/.github/workflows/cleanup.yml2
-rw-r--r--ansible_collections/vultr/cloud/.github/workflows/integration.yml4
-rw-r--r--ansible_collections/vultr/cloud/.github/workflows/sanity.yml3
-rw-r--r--ansible_collections/vultr/cloud/.github/workflows/unit.yml4
-rw-r--r--ansible_collections/vultr/cloud/CHANGELOG.rst8
-rw-r--r--ansible_collections/vultr/cloud/FILES.json1460
-rw-r--r--ansible_collections/vultr/cloud/MANIFEST.json4
-rw-r--r--ansible_collections/vultr/cloud/README.md2
-rw-r--r--ansible_collections/vultr/cloud/changelogs/changelog.yaml7
-rw-r--r--ansible_collections/vultr/cloud/plugins/doc_fragments/vultr_v2.py5
-rw-r--r--ansible_collections/vultr/cloud/plugins/module_utils/vultr_v2.py10
-rw-r--r--ansible_collections/vultr/cloud/plugins/modules/bare_metal.py7
-rw-r--r--ansible_collections/vultr/cloud/plugins/modules/instance.py49
-rw-r--r--ansible_collections/vultr/cloud/tests/integration/targets/cleanup/tasks/cleanup_vpc.yml2
-rw-r--r--ansible_collections/vultr/cloud/tests/integration/targets/instance/defaults/main.yml35
-rw-r--r--ansible_collections/vultr/cloud/tests/integration/targets/instance/tasks/absent.yml1
-rw-r--r--ansible_collections/vultr/cloud/tests/integration/targets/instance/tasks/tests.yml2
2521 files changed, 189322 insertions, 42355 deletions
diff --git a/ansible_collections/amazon/aws/CHANGELOG.rst b/ansible_collections/amazon/aws/CHANGELOG.rst
index f867fc9e4..7a4fa628a 100644
--- a/ansible_collections/amazon/aws/CHANGELOG.rst
+++ b/ansible_collections/amazon/aws/CHANGELOG.rst
@@ -4,6 +4,21 @@ amazon.aws Release Notes
.. contents:: Topics
+v8.0.1
+======
+
+Release Summary
+---------------
+
+This release includes some bug fixes for the `s3_object`, `ec2_instance` and `backup_plan_info` modules.
+
+Bugfixes
+--------
+
+- backup_plan_info - Bugfix to enable getting info of all backup plans (https://github.com/ansible-collections/amazon.aws/pull/2083).
+- ec2_instance - do not ignore IPv6 addresses when a single network interface is specified (https://github.com/ansible-collections/amazon.aws/pull/1979).
+- s3_object - fixed issue which was causing ``MemoryError`` exceptions when downloading large files (https://github.com/ansible-collections/amazon.aws/issues/2107).
+
v8.0.0
======
diff --git a/ansible_collections/amazon/aws/FILES.json b/ansible_collections/amazon/aws/FILES.json
index ad0709691..5ab9c8f81 100644
--- a/ansible_collections/amazon/aws/FILES.json
+++ b/ansible_collections/amazon/aws/FILES.json
@@ -221,7 +221,7 @@
"name": "changelogs/changelog.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d715aea5ddd297081527d05cc6d228ce4d3e181dcc9bb050ccbd6a6d8952d73a",
+ "chksum_sha256": "f6d1f00ee0eececd872d0f458d345624da9358ae68c8ca582a3425fe04d0c9ed",
"format": 1
},
{
@@ -256,7 +256,7 @@
"name": "docs/docsite/rst/CHANGELOG.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "da8a721530a2d8c3c3e98ddc0f99b457215be51f4dac8b2a89f388a4ed0813c9",
+ "chksum_sha256": "6530e4dba4b633fc2a3f72c6f6f095a1774152817b945b350af39ca71b90ccc7",
"format": 1
},
{
@@ -543,7 +543,7 @@
"name": "plugins/module_utils/common.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b4931cbe751c155b1c71aae1b5c36f22301d289c7b2051e821fe51613c8a55a9",
+ "chksum_sha256": "509e4309e0a800b28877fefb7069fa1c631d8ec82700c113b768973de67ae39f",
"format": 1
},
{
@@ -739,7 +739,7 @@
"name": "plugins/modules/backup_plan_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9366677f43a8135847f52a1b1096f64efe1d072fe61d31040d61abfaa8971ff0",
+ "chksum_sha256": "21b12b755f7218b9fba1747e7ac8bf13a16784b77661fd13b4458cdbecd1953e",
"format": 1
},
{
@@ -921,7 +921,7 @@
"name": "plugins/modules/ec2_instance.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ffd0c2409add19494978e82ca7451aed82d416dc6fc3702a0dff629cbe6ece6e",
+ "chksum_sha256": "4a83e8de49534d032c7d81e32a1439e04c48803dc09392c3c4638296f73709ef",
"format": 1
},
{
@@ -1467,7 +1467,7 @@
"name": "plugins/modules/s3_object.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e17e5c421f076a0748478720a844df908d2350c46b63c8c03769995352d03da0",
+ "chksum_sha256": "985047a0b4e4e54b33ed8aff159e5511d3ad02ef6e10408faadadabba5d8863e",
"format": 1
},
{
@@ -1852,7 +1852,7 @@
"name": "tests/integration/targets/backup_plan/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "aa2adbe631ff74e50cd0cbd7fe7e2c36d532e47c75cf95efea01ba3d0e313cdf",
+ "chksum_sha256": "a4ce0be3a740f111d89bb2ad4e55fe00d70ef45a2fb7bf44c87bfe455b15345c",
"format": 1
},
{
@@ -11806,7 +11806,7 @@
"name": "CHANGELOG.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "da8a721530a2d8c3c3e98ddc0f99b457215be51f4dac8b2a89f388a4ed0813c9",
+ "chksum_sha256": "6530e4dba4b633fc2a3f72c6f6f095a1774152817b945b350af39ca71b90ccc7",
"format": 1
},
{
diff --git a/ansible_collections/amazon/aws/MANIFEST.json b/ansible_collections/amazon/aws/MANIFEST.json
index a1dd8b884..056adfaf0 100644
--- a/ansible_collections/amazon/aws/MANIFEST.json
+++ b/ansible_collections/amazon/aws/MANIFEST.json
@@ -2,7 +2,7 @@
"collection_info": {
"namespace": "amazon",
"name": "aws",
- "version": "8.0.0",
+ "version": "8.0.1",
"authors": [
"Ansible (https://github.com/ansible)"
],
@@ -25,7 +25,7 @@
"name": "FILES.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3973d6f227d778fb935a07725724715ec807dc1b82e133e1f9fd73b95968b735",
+ "chksum_sha256": "a1aac297de79ad7bcb7613aafc27594950c06e3bb9a9f75fd77d5031b8369c6e",
"format": 1
},
"format": 1
diff --git a/ansible_collections/amazon/aws/changelogs/changelog.yaml b/ansible_collections/amazon/aws/changelogs/changelog.yaml
index 546b1fe22..c02fa995c 100644
--- a/ansible_collections/amazon/aws/changelogs/changelog.yaml
+++ b/ansible_collections/amazon/aws/changelogs/changelog.yaml
@@ -3026,3 +3026,19 @@ releases:
- sanity-boto3.yml
- sanity-simple.yml
release_date: '2024-05-16'
+ 8.0.1:
+ changes:
+ bugfixes:
+ - backup_plan_info - Bugfix to enable getting info of all backup plans (https://github.com/ansible-collections/amazon.aws/pull/2083).
+ - ec2_instance - do not ignore IPv6 addresses when a single network interface
+ is specified (https://github.com/ansible-collections/amazon.aws/pull/1979).
+ - s3_object - fixed issue which was causing ``MemoryError`` exceptions when
+ downloading large files (https://github.com/ansible-collections/amazon.aws/issues/2107).
+ release_summary: This release includes some bug fixes for the `s3_object`, `ec2_instance`
+ and `backup_plan_info` modules.
+ fragments:
+ - 1979-do-not-ignore-ipv6-addresses.yaml
+ - 2083-backup_plan_info-bugfix-get-info-for-all-plans.yml
+ - 2107-s3_download.yml
+ - release_summary.yml
+ release_date: '2024-06-05'
diff --git a/ansible_collections/amazon/aws/docs/docsite/rst/CHANGELOG.rst b/ansible_collections/amazon/aws/docs/docsite/rst/CHANGELOG.rst
index f867fc9e4..7a4fa628a 100644
--- a/ansible_collections/amazon/aws/docs/docsite/rst/CHANGELOG.rst
+++ b/ansible_collections/amazon/aws/docs/docsite/rst/CHANGELOG.rst
@@ -4,6 +4,21 @@ amazon.aws Release Notes
.. contents:: Topics
+v8.0.1
+======
+
+Release Summary
+---------------
+
+This release includes some bug fixes for the `s3_object`, `ec2_instance` and `backup_plan_info` modules.
+
+Bugfixes
+--------
+
+- backup_plan_info - Bugfix to enable getting info of all backup plans (https://github.com/ansible-collections/amazon.aws/pull/2083).
+- ec2_instance - do not ignore IPv6 addresses when a single network interface is specified (https://github.com/ansible-collections/amazon.aws/pull/1979).
+- s3_object - fixed issue which was causing ``MemoryError`` exceptions when downloading large files (https://github.com/ansible-collections/amazon.aws/issues/2107).
+
v8.0.0
======
diff --git a/ansible_collections/amazon/aws/plugins/module_utils/common.py b/ansible_collections/amazon/aws/plugins/module_utils/common.py
index e802a8d80..ae768e2e3 100644
--- a/ansible_collections/amazon/aws/plugins/module_utils/common.py
+++ b/ansible_collections/amazon/aws/plugins/module_utils/common.py
@@ -4,7 +4,7 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
AMAZON_AWS_COLLECTION_NAME = "amazon.aws"
-AMAZON_AWS_COLLECTION_VERSION = "8.0.0"
+AMAZON_AWS_COLLECTION_VERSION = "8.0.1"
_collection_info_context = {
diff --git a/ansible_collections/amazon/aws/plugins/modules/backup_plan_info.py b/ansible_collections/amazon/aws/plugins/modules/backup_plan_info.py
index 096857d5b..98f49f6b6 100644
--- a/ansible_collections/amazon/aws/plugins/modules/backup_plan_info.py
+++ b/ansible_collections/amazon/aws/plugins/modules/backup_plan_info.py
@@ -20,7 +20,6 @@ options:
backup_plan_names:
type: list
elements: str
- required: true
description:
- Specifies a list of plan names.
extends_documentation_fragment:
@@ -31,10 +30,11 @@ extends_documentation_fragment:
EXAMPLES = r"""
# Note: These examples do not set authentication details, see the AWS Guide for details.
-# Gather information about all backup plans
-- amazon.aws.backup_plan_info
-# Gather information about a particular backup plan
-- amazon.aws.backup_plan_info:
+- name: Gather information about all backup plans
+ amazon.aws.backup_plan_info:
+
+- name: Gather information about a particular backup plan
+ amazon.aws.backup_plan_info:
backup plan_names:
- elastic
"""
@@ -110,10 +110,21 @@ from ansible_collections.amazon.aws.plugins.module_utils.modules import AnsibleA
from ansible_collections.amazon.aws.plugins.module_utils.retries import AWSRetry
+def get_all_backup_plans_info(client):
+ paginator = client.get_paginator("list_backup_plans")
+ return paginator.paginate().build_full_result()
+
+
def get_backup_plan_detail(client, module):
backup_plan_list = []
backup_plan_names = module.params.get("backup_plan_names")
+ if backup_plan_names is None:
+ backup_plan_names = []
+ backup_plan_list_info = get_all_backup_plans_info(client)["BackupPlansList"]
+ for backup_plan in backup_plan_list_info:
+ backup_plan_names.append(backup_plan["BackupPlanName"])
+
for name in backup_plan_names:
backup_plan_list.extend(get_plan_details(module, client, name))
@@ -122,7 +133,7 @@ def get_backup_plan_detail(client, module):
def main():
argument_spec = dict(
- backup_plan_names=dict(type="list", elements="str", required=True),
+ backup_plan_names=dict(type="list", elements="str"),
)
module = AnsibleAWSModule(argument_spec=argument_spec, supports_check_mode=True)
diff --git a/ansible_collections/amazon/aws/plugins/modules/ec2_instance.py b/ansible_collections/amazon/aws/plugins/modules/ec2_instance.py
index c09cce97b..9f5c3eb65 100644
--- a/ansible_collections/amazon/aws/plugins/modules/ec2_instance.py
+++ b/ansible_collections/amazon/aws/plugins/modules/ec2_instance.py
@@ -1339,6 +1339,9 @@ def build_network_spec(params):
sub = get_default_subnet(default_vpc, availability_zone=module.params.get("availability_zone"))
spec["SubnetId"] = sub["SubnetId"]
+ if network.get("ipv6_addresses"):
+ spec["Ipv6Addresses"] = [{"Ipv6Address": a} for a in network.get("ipv6_addresses", [])]
+
if network.get("private_ip_address"):
spec["PrivateIpAddress"] = network["private_ip_address"]
diff --git a/ansible_collections/amazon/aws/plugins/modules/s3_object.py b/ansible_collections/amazon/aws/plugins/modules/s3_object.py
index 0486d3b9f..c3e45004a 100644
--- a/ansible_collections/amazon/aws/plugins/modules/s3_object.py
+++ b/ansible_collections/amazon/aws/plugins/modules/s3_object.py
@@ -783,9 +783,6 @@ def upload_s3file(
def download_s3file(module, s3, bucket, obj, dest, retries, version=None):
if module.check_mode:
module.exit_json(msg="GET operation skipped - running in check mode", changed=True)
- # retries is the number of loops; range/xrange needs to be one
- # more to get that count of loops.
- _get_object_content(module, s3, bucket, obj, version)
optional_kwargs = {"ExtraArgs": {"VersionId": version}} if version else {}
for x in range(0, retries + 1):
diff --git a/ansible_collections/amazon/aws/tests/integration/targets/backup_plan/tasks/main.yml b/ansible_collections/amazon/aws/tests/integration/targets/backup_plan/tasks/main.yml
index ee8f62ec9..79e6e1479 100644
--- a/ansible_collections/amazon/aws/tests/integration/targets/backup_plan/tasks/main.yml
+++ b/ansible_collections/amazon/aws/tests/integration/targets/backup_plan/tasks/main.yml
@@ -344,12 +344,41 @@
- backup_plan_create_result.exists is true
- backup_plan_create_result.changed is false
+ - name: Create another backup plan
+ amazon.aws.backup_plan:
+ backup_plan_name: "{{ backup_plan_name }}-1"
+ rules:
+ - rule_name: daily
+ target_backup_vault_name: "{{ backup_vault_name }}"
+ tags:
+ Environment: Test
+ register: backup_plan_create_result_1
+
+ - name: Verify backup plan create result
+ ansible.builtin.assert:
+ that:
+ - backup_plan_create_result_1.exists is true
+ - backup_plan_create_result_1.changed is true
+
+ - name: Get info of all install plans
+ amazon.aws.backup_plan_info:
+ register: backup_plan_info_result
+
+ - name: Assert that info of all backup plans is fetched
+ ansible.builtin.assert:
+ that:
+ - backup_plan_info_result is not failed
+ - backup_plan_info_result.backup_plans | length > 1
+
always:
- name: Delete AWS Backup plan created during this test
amazon.aws.backup_plan:
- backup_plan_name: "{{ backup_plan_name }}"
+ backup_plan_name: "{{ item }}"
state: absent
ignore_errors: true
+ with_items:
+ - "{{ backup_plan_name }}"
+ - "{{ backup_plan_name }}-1"
- name: Delete AWS Backup vault created during this test
amazon.aws.backup_vault:
diff --git a/ansible_collections/ansible/netcommon/CHANGELOG.rst b/ansible_collections/ansible/netcommon/CHANGELOG.rst
index b1bb37ecd..c89b8f4ce 100644
--- a/ansible_collections/ansible/netcommon/CHANGELOG.rst
+++ b/ansible_collections/ansible/netcommon/CHANGELOG.rst
@@ -4,6 +4,13 @@ Ansible Netcommon Collection Release Notes
.. contents:: Topics
+v6.1.3
+======
+
+Bugfixes
+--------
+
+- The v6.1.2 release introduced a change in cliconfbase's edit_config() signature which broke many platform cliconfs. This patch release reverts that change.
v6.1.2
======
diff --git a/ansible_collections/ansible/netcommon/FILES.json b/ansible_collections/ansible/netcommon/FILES.json
index 33f826157..bb597a0e2 100644
--- a/ansible_collections/ansible/netcommon/FILES.json
+++ b/ansible_collections/ansible/netcommon/FILES.json
@@ -25,7 +25,7 @@
"name": "CHANGELOG.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b40e19a7d49799a39ac5792db41fcd718da6a6a464ac019c1392440715ebe696",
+ "chksum_sha256": "690837a751689de16b90f19af27388a4c0f8fb545e15ebef3cd4bd4ce8733c17",
"format": 1
},
{
@@ -319,7 +319,7 @@
"name": "changelogs/changelog.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "09c82c169ea9a27fe2017f4cebeb33b9d0c725e84c1b522bf26b11a8aece541d",
+ "chksum_sha256": "71100c142c6a0c37deb885158a6a3b7fa1f4f2c9f4254f0e4f5ad15b9968e8a7",
"format": 1
},
{
@@ -641,7 +641,7 @@
"name": "plugins/plugin_utils/cliconf_base.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "65f525c5c2e1c913c0610131f3b2e410138a753a6ca74ffe133869745a8a7249",
+ "chksum_sha256": "c89984aaa5de16b37c62171f29bcec5f06feea8f4b940a0cd737da36e896791e",
"format": 1
},
{
@@ -872,7 +872,7 @@
"name": "plugins/module_utils/network/common/rm_base/resource_module.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "dc597d0e0ca16d8a13eafe0746e91e2c3a1a1e99d85fd3955be1fd26d0763ca5",
+ "chksum_sha256": "5cb6abe87e749395e561358871af00218dde17b81aaeee506cc468b36288229c",
"format": 1
},
{
diff --git a/ansible_collections/ansible/netcommon/MANIFEST.json b/ansible_collections/ansible/netcommon/MANIFEST.json
index 5979fa82b..178605712 100644
--- a/ansible_collections/ansible/netcommon/MANIFEST.json
+++ b/ansible_collections/ansible/netcommon/MANIFEST.json
@@ -2,7 +2,7 @@
"collection_info": {
"namespace": "ansible",
"name": "netcommon",
- "version": "6.1.2",
+ "version": "6.1.3",
"authors": [
"Ansible Network Community (ansible-network)"
],
@@ -31,7 +31,7 @@
"name": "FILES.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9a263caca7585b5e1422aeebea188ed2351581e7bfabdea302f518e4ebe26755",
+ "chksum_sha256": "e1c37c2702f4d5aa1bfa49cfac4d9ac59e3271cf9b8b93e9c91bd69babc76c49",
"format": 1
},
"format": 1
diff --git a/ansible_collections/ansible/netcommon/changelogs/changelog.yaml b/ansible_collections/ansible/netcommon/changelogs/changelog.yaml
index a72e63459..d071d2b3c 100644
--- a/ansible_collections/ansible/netcommon/changelogs/changelog.yaml
+++ b/ansible_collections/ansible/netcommon/changelogs/changelog.yaml
@@ -766,3 +766,11 @@ releases:
fragments:
- 614-fix-parse_cli_textfsm-doc.yaml
release_date: "2024-05-22"
+ 6.1.3:
+ changes:
+ bugfixes:
+ - The v6.1.2 release introduced a change in cliconfbase's edit_config() signature
+ which broke many platform cliconfs. This patch release reverts that change.
+ fragments:
+ - bug_653.yaml
+ release_date: "2024-05-29"
diff --git a/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/rm_base/resource_module.py b/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/rm_base/resource_module.py
index 1bbefd515..7fd97c132 100644
--- a/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/rm_base/resource_module.py
+++ b/ansible_collections/ansible/netcommon/plugins/module_utils/network/common/rm_base/resource_module.py
@@ -145,9 +145,9 @@ class ResourceModule(RmEngineBase): # pylint: disable=R0902
else:
self.addcmd(have, parser, True)
- def run_commands(self, err_responses=None):
+ def run_commands(self):
"""Send commands to the device"""
if self.commands and self.state in self.ACTION_STATES:
if not self._module.check_mode:
- self._connection.edit_config(candidate=self.commands, err_responses=err_responses)
+ self._connection.edit_config(candidate=self.commands)
self.changed = True
diff --git a/ansible_collections/ansible/netcommon/plugins/plugin_utils/cliconf_base.py b/ansible_collections/ansible/netcommon/plugins/plugin_utils/cliconf_base.py
index b3c1941df..c48873391 100644
--- a/ansible_collections/ansible/netcommon/plugins/plugin_utils/cliconf_base.py
+++ b/ansible_collections/ansible/netcommon/plugins/plugin_utils/cliconf_base.py
@@ -215,7 +215,6 @@ class CliconfBase(CliconfBaseBase):
replace=None,
diff=False,
comment=None,
- err_responses=None,
):
"""Loads the candidate configuration into the network device
@@ -235,8 +234,6 @@ class CliconfBase(CliconfBaseBase):
the file in this case should be present on the remote host in the mentioned path as a
prerequisite.
:param comment: Commit comment provided it is supported by remote host.
- :param err_responses: A list of error regexes that will be used to evaluate the responses received
- from executing the candidate command(s).
:return: Returns a json string with contains configuration applied on remote host, the returned
response on executing configuration commands and platform relevant data.
{
diff --git a/ansible_collections/ansible/windows/.azure-pipelines/azure-pipelines.yml b/ansible_collections/ansible/windows/.azure-pipelines/azure-pipelines.yml
index cbaafaa72..28d01d9db 100644
--- a/ansible_collections/ansible/windows/.azure-pipelines/azure-pipelines.yml
+++ b/ansible_collections/ansible/windows/.azure-pipelines/azure-pipelines.yml
@@ -56,6 +56,19 @@ stages:
test: units
- name: Lint
test: lint
+ - stage: Ansible_2_17
+ displayName: Ansible 2.17
+ dependsOn: []
+ jobs:
+ - template: templates/matrix.yml
+ parameters:
+ nameFormat: '{0}'
+ testFormat: '2.17/{0}'
+ targets:
+ - name: Sanity
+ test: sanity
+ - name: Units
+ test: units
- stage: Ansible_2_16
displayName: Ansible 2.16
dependsOn: []
@@ -115,6 +128,7 @@ stages:
condition: succeededOrFailed()
dependsOn:
- Ansible_devel
+ - Ansible_2_17
- Ansible_2_16
- Ansible_2_15
- Ansible_2_14
diff --git a/ansible_collections/ansible/windows/CHANGELOG.rst b/ansible_collections/ansible/windows/CHANGELOG.rst
index 48a9a9fc9..1d2c514b2 100644
--- a/ansible_collections/ansible/windows/CHANGELOG.rst
+++ b/ansible_collections/ansible/windows/CHANGELOG.rst
@@ -4,6 +4,26 @@ Ansible Windows Release Notes
.. contents:: Topics
+v2.4.0
+======
+
+Release Summary
+---------------
+
+Release summary for v2.4.0
+
+Minor Changes
+-------------
+
+- win_powershell - Added the ``sensitive_parameters`` option that can be used to pass in a SecureString or PSCredential parameter value.
+- win_setup - Added the ``ansible_win_rm_certificate_thumbprint`` fact to display the thumbprint of the certificate in use
+- win_user - Added the ability to set an account expiration date using the ``account_expires`` option - https://github.com/ansible-collections/ansible.windows/issues/610
+
+Bugfixes
+--------
+
+- setup - Provide WMI/CIM fallback for facts that rely on SMBIOS when that is unavailable
+
v2.3.0
======
diff --git a/ansible_collections/ansible/windows/FILES.json b/ansible_collections/ansible/windows/FILES.json
index ee44a2284..3c6f85f02 100644
--- a/ansible_collections/ansible/windows/FILES.json
+++ b/ansible_collections/ansible/windows/FILES.json
@@ -102,7 +102,7 @@
"name": ".azure-pipelines/azure-pipelines.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4dcac98245286676cbcda2580f918f3ffdc3874d3f83c3eded64f948087bd4f8",
+ "chksum_sha256": "cd282cf96e5caf51b3ea623dd87394abc63d2c02798b164600e81c33277c76e6",
"format": 1
},
{
@@ -172,7 +172,7 @@
"name": "changelogs/changelog.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7e96736ab27ac37496f3bd52510226d8debc87e09107d10bd10a369029ce74e2",
+ "chksum_sha256": "a83cc87a6458cb2bad52d6311232c0a2daf92b217fd132e61a4468f98fb7a3f0",
"format": 1
},
{
@@ -382,21 +382,21 @@
"name": "plugins/modules/async_status.ps1",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "396c9882e0413c0217b24c6ad3c75bed9db49959fe04b509229b88a7ae41a069",
+ "chksum_sha256": "06020b3e8a2080f55c6bec83062b33e4a127033e900a952863ea8f55f53a2183",
"format": 1
},
{
"name": "plugins/modules/async_status.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bffa3eb6c20f56ba6acb2a119f7551c856ad50b5dce32560ee079b6bc3700eaf",
+ "chksum_sha256": "3194de43e3cc735380e29cc74c87e08a0cffdcd0e1d109ccd54297bd1dc2f56e",
"format": 1
},
{
"name": "plugins/modules/setup.ps1",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7f34028f9bbbb0f4202603c4d466988f6104e4ef3c778b006e122cde7a4be141",
+ "chksum_sha256": "e846302970e97bb31b9fe3bee0db0db6144bf0b098cb1daa19d26c9db41e9cd5",
"format": 1
},
{
@@ -473,7 +473,7 @@
"name": "plugins/modules/win_command.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9dbc7f5f207a6762d45d7132e842d03054d0e0e76076da9ff27ee6af79688f73",
+ "chksum_sha256": "00c1d220110b26c84e6e29384fa274ff20bc94bfafa102519931c8a554dc3714",
"format": 1
},
{
@@ -487,7 +487,7 @@
"name": "plugins/modules/win_copy.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c726f21cff63d0defce7f373bfc1129b2dff494e973da4577ae24b428da75669",
+ "chksum_sha256": "f3e89e0d9cf5d36e660d7c1f0a4ab181ad4ff09040531d579c3a83454c257e6f",
"format": 1
},
{
@@ -501,7 +501,7 @@
"name": "plugins/modules/win_dns_client.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "32af082f986227b0a386b9dbcdac4075c403f8d359a868220770b5b92d8c2c68",
+ "chksum_sha256": "0f5d212ff270d0f24c827681cfc2e691c59c74821a34463f2eccfcceb45245e3",
"format": 1
},
{
@@ -515,7 +515,7 @@
"name": "plugins/modules/win_domain.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "64e0f48d5ade09e1312f56242b069ff7657f7ab9f22d4507387c00ff41e816c8",
+ "chksum_sha256": "88fd8b87012c57b5472190c01bc55df338cc7810771998bda6013dac018d1f12",
"format": 1
},
{
@@ -543,7 +543,7 @@
"name": "plugins/modules/win_domain_membership.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3da29eec1655a12057d2e8940ab43972342bdf174f0961f144df2499d7e3047d",
+ "chksum_sha256": "e775a4f0cb563c2831536e9fd9943d3434924ccc1da3a27e195ceacadf82a1ed",
"format": 1
},
{
@@ -557,7 +557,7 @@
"name": "plugins/modules/win_dsc.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8136b4a00aa50dc45807961b134fdb90bda6910e11fd018f4f5ab07122504501",
+ "chksum_sha256": "8075980fc37cd982ca502b7342d00c7e34e0d1fecdebd9a593808aa13f7aa070",
"format": 1
},
{
@@ -585,7 +585,7 @@
"name": "plugins/modules/win_feature.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b88fafe01105abe19f6219f2c01c41e7ba9bbc26daf1a4d7e329cc01c84d4b5c",
+ "chksum_sha256": "eb13a591dd1963271253b12ecbe93a83abe096d39e5c4a66eacaca750c7e4406",
"format": 1
},
{
@@ -613,7 +613,7 @@
"name": "plugins/modules/win_find.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b25c7f4a8acc7d1416cf12c313e74ce8083d9e167f22542b839dbc78f12b0e52",
+ "chksum_sha256": "d7a6d27509b686ddf4dd823fb431777417435e3df501763aca81ca792e1a0ccd",
"format": 1
},
{
@@ -627,7 +627,7 @@
"name": "plugins/modules/win_get_url.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6321e8d873d003f3300812be17ca89e16eb02c40a66ec76115b2378d5f754501",
+ "chksum_sha256": "fe6003db7e4183f12484bcf43a35de468ffdaf830a1fafac16a0fb8c7ebe91b8",
"format": 1
},
{
@@ -683,7 +683,7 @@
"name": "plugins/modules/win_optional_feature.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a7aa3e9d3b20fe9cbd781642bb013713b3cd00712c6f9ac84fa54b7cc897314d",
+ "chksum_sha256": "a06d2d6cbbc454ee47e0570b5f8cc36af9fd67924b22a778221cc63eba2a59e2",
"format": 1
},
{
@@ -711,7 +711,7 @@
"name": "plugins/modules/win_package.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c5d8e260e703b9517f49989a843008c3b5ca9832b8189e4a1513637cf3a2e4b9",
+ "chksum_sha256": "fae52e508e3b31385893c569c6ac7d8fd79287c760e6dcd4be8e0aa5cbe4fee7",
"format": 1
},
{
@@ -725,7 +725,7 @@
"name": "plugins/modules/win_path.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2176bfeb8b9ce4f2807f98800990fd7c6f452dd5fa1d7c3569df33c4dc810d2f",
+ "chksum_sha256": "48f00e3442b256174222fa4d54df0466d13bdac8b203fdfe95ffba23a3bbe521",
"format": 1
},
{
@@ -746,14 +746,14 @@
"name": "plugins/modules/win_powershell.ps1",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "49c982d0496c115b9aa7c36bdeb5f8f464dd15d9cd3869cd4cb58cca45123ba7",
+ "chksum_sha256": "e4f8681ab376cae799124ffaabd6762afc8e804990364908da712373202b1e2b",
"format": 1
},
{
"name": "plugins/modules/win_powershell.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fd0defe9c53c356d3d802ce66ade7dda6da36acbd41644f40ebe645504369fb9",
+ "chksum_sha256": "ef0434693e978248462d2c8aaaed67cab9fd538258d0d7e3a6e6634df5995a12",
"format": 1
},
{
@@ -788,7 +788,7 @@
"name": "plugins/modules/win_regedit.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "44657550c4693529803bb00eb8c8bcfcdc070c9ffa1e5ab17265821dc7c1e3ca",
+ "chksum_sha256": "4acca4d4995f318b0037b865ce2ab7bffe2529e86864f38573541e500ce475b9",
"format": 1
},
{
@@ -802,7 +802,7 @@
"name": "plugins/modules/win_service.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6746dc7ea566add97af6171f58291256722cb57ee9d58d0c15b3ee3b4f5e2216",
+ "chksum_sha256": "f243946d26c4ffdc48719cb3d965316cf7ce8e8d8062bff2b15bc8da03c4490e",
"format": 1
},
{
@@ -830,7 +830,7 @@
"name": "plugins/modules/win_share.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1946aa9d1564d82c759fb7cac382501b7f8153be588bc7264c454f3294e094d5",
+ "chksum_sha256": "2673bf46c720aa991019b2a4d95a8fc4d60f800672a59e82f0219fea7a20b087",
"format": 1
},
{
@@ -893,7 +893,7 @@
"name": "plugins/modules/win_updates.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6e4ab07dff651e6058d7c9d22c2b6d0da017e1b52021efbe9228c015fe0f33d8",
+ "chksum_sha256": "01229f020a7941a0ddec18b1078d7ac67e3f00f732600317af46695f970e25e2",
"format": 1
},
{
@@ -914,14 +914,14 @@
"name": "plugins/modules/win_user.ps1",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c1f16bf57effc13db21918a25ed285262f7c86f077863023b3bb2cb84c6cd957",
+ "chksum_sha256": "3a9429387629acd359b5806e35d6f941caa21599290e1b9287fc3e708e91c045",
"format": 1
},
{
"name": "plugins/modules/win_user.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7db1b3fd6a9c756a18ccd1ed1482b8f9922330609a4e5b47fac50453740ea063",
+ "chksum_sha256": "88f9a3c3fb71430890dbc12a00e7649c3a436a4832df788459640330d06e31e1",
"format": 1
},
{
@@ -935,7 +935,7 @@
"name": "plugins/modules/win_user_right.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4f7229aeb955ab046c90fe1d0e8dfc5af5fac7bd131e952c1f8b2331ca4bf04e",
+ "chksum_sha256": "5eb95b634ba9bd5ff5db5dfc216504a8e81c8e105eed1f08db1545642af41f53",
"format": 1
},
{
@@ -1016,6 +1016,34 @@
"format": 1
},
{
+ "name": "tests/integration/targets/async_status",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/async_status/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/async_status/tasks/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "f341c844fea9c51e24e1b0f8912a9c5adcbabad63c4e4982ee66bb9f336e10f8",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/async_status/aliases",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "dc703b2aafe0e1f7932183c6e082324f41e33d5c7f52daa9451093099293a682",
+ "format": 1
+ },
+ {
"name": "tests/integration/targets/module_utils_Process",
"ftype": "dir",
"chksum_type": null,
@@ -3067,17 +3095,24 @@
"format": 1
},
{
+ "name": "tests/integration/targets/win_powershell/tasks/failure.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "8b0bf12292621ac91fa3c166ba79ada8baf3f4ad81d12818659d672afcdb27a7",
+ "format": 1
+ },
+ {
"name": "tests/integration/targets/win_powershell/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0be9b21e151f15c2fcbd36433cd6c17b9282955177e82a23c38be492c46274e5",
+ "chksum_sha256": "a17d207122930e08055248d1ba4375786197f1a038d96887d51b7ba5db44753f",
"format": 1
},
{
"name": "tests/integration/targets/win_powershell/tasks/tests.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fe796a80871c31908105e3973e174cb3043f978a8b2dc1b026e6f5f0ed483f7e",
+ "chksum_sha256": "36da8133d60060261f1c0a014294e1af7276c16eeb4d18fd93d14b9fda99d2f7",
"format": 1
},
{
@@ -3546,7 +3581,7 @@
"name": "tests/integration/targets/win_setup/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1297abeed14a8fd2d05d78ca5d15763c5a51201528f4fbbafea4fcdd69bab57e",
+ "chksum_sha256": "4085e31cf98c8636587eae198f8b94075b3b5138468c07b1f5a8dfb533329959",
"format": 1
},
{
@@ -4036,7 +4071,7 @@
"name": "tests/integration/targets/win_user/tasks/tests.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "62168c734727085f5ac89c1e3f23034a233b38122f35400676bb7a65e012fa2b",
+ "chksum_sha256": "4a4a1941f121cee2de54bc7057a8795654694abfabe7b175cbc0ef7711b00861",
"format": 1
},
{
@@ -4236,6 +4271,13 @@
"format": 1
},
{
+ "name": "tests/sanity/ignore-2.18.txt",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "1a959b8511ecc31322b41d41944d3e12400b7fc2034eec6b972c2c2459687e84",
+ "format": 1
+ },
+ {
"name": "tests/unit",
"ftype": "dir",
"chksum_type": null,
@@ -4442,7 +4484,7 @@
"name": "tests/utils/shippable/lint.sh",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f945142c1487de0996b9bc46bc18c82d5c0b6c9470f0f48b87634c2e4b0eabf5",
+ "chksum_sha256": "c7bc94e43d0337251fa9f22f3ec4516604e109f7cd7fa35c7f1fa2a90f5c2ccc",
"format": 1
},
{
@@ -4505,7 +4547,7 @@
"name": "CHANGELOG.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4aeb1572cd41ed5600dd26ffb7747d532a0fb1c751097c55459b4497d6aa1043",
+ "chksum_sha256": "a92bd599e373536435d100fe89ac72c259d989753cbeb18be4f39b7c4864da1c",
"format": 1
},
{
diff --git a/ansible_collections/ansible/windows/MANIFEST.json b/ansible_collections/ansible/windows/MANIFEST.json
index 18d623791..ce4a07c08 100644
--- a/ansible_collections/ansible/windows/MANIFEST.json
+++ b/ansible_collections/ansible/windows/MANIFEST.json
@@ -2,7 +2,7 @@
"collection_info": {
"namespace": "ansible",
"name": "windows",
- "version": "2.3.0",
+ "version": "2.4.0",
"authors": [
"Jordan Borean @jborean93",
"Matt Davis @nitzmahone"
@@ -24,7 +24,7 @@
"name": "FILES.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "364a63bb6c71b9145d657116212ea87cbaba8f3d45e8ea2e6e9d9116c8ed2305",
+ "chksum_sha256": "e7cad49eaeb35b6c7b802710e9161ebc54b0980838d2aa150721ad11ecdac331",
"format": 1
},
"format": 1
diff --git a/ansible_collections/ansible/windows/changelogs/changelog.yaml b/ansible_collections/ansible/windows/changelogs/changelog.yaml
index aaed98b98..fb03bdd08 100644
--- a/ansible_collections/ansible/windows/changelogs/changelog.yaml
+++ b/ansible_collections/ansible/windows/changelogs/changelog.yaml
@@ -712,3 +712,23 @@ releases:
- win_get_url-tls13.yml
- win_powershell-fix-using.yml
release_date: '2024-03-20'
+ 2.4.0:
+ changes:
+ bugfixes:
+ - setup - Provide WMI/CIM fallback for facts that rely on SMBIOS when that is
+ unavailable
+ minor_changes:
+ - win_powershell - Added the ``sensitive_parameters`` option that can be used
+ to pass in a SecureString or PSCredential parameter value.
+ - win_setup - Added the ``ansible_win_rm_certificate_thumbprint`` fact to display
+ the thumbprint of the certificate in use
+ - win_user - Added the ability to set an account expiration date using the ``account_expires``
+ option - https://github.com/ansible-collections/ansible.windows/issues/610
+ release_summary: Release summary for v2.4.0
+ fragments:
+ - release-2.4.0.yml
+ - setup-smbios.yml
+ - win_powershell-securestring.yml
+ - win_setup-fact.yml
+ - win_user-account-expires.yml
+ release_date: '2024-06-10'
diff --git a/ansible_collections/ansible/windows/plugins/modules/async_status.ps1 b/ansible_collections/ansible/windows/plugins/modules/async_status.ps1
index c239acf62..63dd2119a 100644
--- a/ansible_collections/ansible/windows/plugins/modules/async_status.ps1
+++ b/ansible_collections/ansible/windows/plugins/modules/async_status.ps1
@@ -6,7 +6,7 @@
$results = @{ changed = $false }
-$parsed_args = Parse-Args $args
+$parsed_args = Parse-Args $args -supports_check_mode $true
$jid = Get-AnsibleParam $parsed_args "jid" -failifempty $true -resultobj $results
$mode = Get-AnsibleParam $parsed_args "mode" -Default "status" -ValidateSet "status", "cleanup"
diff --git a/ansible_collections/ansible/windows/plugins/modules/async_status.yml b/ansible_collections/ansible/windows/plugins/modules/async_status.yml
index 34b90bfee..df3357f68 100644
--- a/ansible_collections/ansible/windows/plugins/modules/async_status.yml
+++ b/ansible_collections/ansible/windows/plugins/modules/async_status.yml
@@ -21,6 +21,24 @@ DOCUMENTATION:
type: str
choices: [cleanup, status]
default: status
+ extends_documentation_fragment:
+ - action_common_attributes
+ - action_common_attributes.flow
+ attributes:
+ action:
+ support: full
+ async:
+ support: none
+ check_mode:
+ support: full
+ version_added: '2.4.0'
+ diff_mode:
+ support: none
+ bypass_host_loop:
+ support: none
+ platform:
+ support: full
+ platforms: windows
author:
- Ansible Core Team
diff --git a/ansible_collections/ansible/windows/plugins/modules/setup.ps1 b/ansible_collections/ansible/windows/plugins/modules/setup.ps1
index 33b73c5dd..314709460 100644
--- a/ansible_collections/ansible/windows/plugins/modules/setup.ps1
+++ b/ansible_collections/ansible/windows/plugins/modules/setup.ps1
@@ -579,7 +579,21 @@ $factMeta = @(
@{
Subsets = 'bios'
Code = {
- $bios = New-Object -TypeName Ansible.Windows.Setup.SMBIOSInfo
+ try {
+ $bios = New-Object -TypeName Ansible.Windows.Setup.SMBIOSInfo
+ }
+ catch [System.ComponentModel.Win32Exception] {
+ # Fallback to CIM which may not work for non-admins but better
+ # than nothing
+ $win32CS = Get-CimInstance -ClassName Win32_ComputerSystem -Property Model
+ $win32Bios = Get-CimInstance -ClassName Win32_Bios -Property ReleaseDate, SMBIOSBIOSVersion, SerialNumber
+ $bios = [PSCustomObject]@{
+ ReleaseDate = $win32Bios.ReleaseDate
+ SMBIOSBIOSVersion = $win32Bios.SMBIOSBIOSVersion
+ Model = $win32CS.Model.Trim()
+ SerialNumber = $win32Bios.SerialNumber
+ }
+ }
$releaseDate = if ($bios.ReleaseDate) {
$bios.ReleaseDate.ToUniversalTime().ToString('MM/dd/yyyy')
@@ -834,7 +848,13 @@ $factMeta = @(
@{
Subsets = 'platform'
Code = {
- $bios = New-Object -TypeName Ansible.Windows.Setup.SMBIOSInfo
+ try {
+ $bios = New-Object -TypeName Ansible.Windows.Setup.SMBIOSInfo
+ }
+ catch [System.ComponentModel.Win32Exception] {
+ $bios = Get-CimInstance -ClassName Win32_ComputerSystem -Property Manufacturer
+ }
+
$domainInfo = New-Object -TypeName Ansible.Windows.Setup.DomainInfo
$systemInfo = New-Object -TypeName Ansible.Windows.Setup.SystemInfo
$osVersion = [Environment]::OSVersion
@@ -959,7 +979,15 @@ $factMeta = @(
@{
Subsets = 'processor'
Code = {
- $bios = New-Object -TypeName Ansible.Windows.Setup.SMBIOSInfo
+ try {
+ $bios = New-Object -TypeName Ansible.Windows.Setup.SMBIOSInfo
+ }
+ catch [System.ComponentModel.Win32Exception] {
+ $procCount = Get-CimInstance -ClassName Win32_ComputerSystem -Property NumberOfProcessors
+ $bios = [PSCustomObject]@{
+ ProcessorInfo = [object[]]::new($procCount.NumberOfProcessors)
+ }
+ }
$systemInfo = New-Object -TypeName Ansible.Windows.Setup.SystemInfo
$getParams = @{
@@ -1057,13 +1085,19 @@ $factMeta = @(
$certs | Sort-Object -Property NotAfter | Select-Object -First 1 | ForEach-Object -Process {
# this fact was renamed from ansible_winrm_certificate_expires due to collision with ansible_winrm_X connection var pattern
$ansibleFacts.ansible_win_rm_certificate_expires = $_.NotAfter.ToString('yyyy-MM-dd HH:mm:ss')
+ $ansibleFacts.ansible_win_rm_certificate_thumbprint = $_.Thumbprint
}
}
},
@{
Subsets = 'virtual'
Code = {
- $bios = New-Object -TypeName Ansible.Windows.Setup.SMBIOSInfo
+ try {
+ $bios = New-Object -TypeName Ansible.Windows.Setup.SMBIOSInfo
+ }
+ catch [System.ComponentModel.Win32Exception] {
+ $bios = Get-CimInstance -ClassName Win32_ComputerSystem -Property Model, Manufacturer
+ }
$modelMap = @{
kvm = @('KVM', 'KVM Server', 'Bochs', 'AHV')
diff --git a/ansible_collections/ansible/windows/plugins/modules/win_command.py b/ansible_collections/ansible/windows/plugins/modules/win_command.py
index 51b9147b2..daad641e2 100644
--- a/ansible_collections/ansible/windows/plugins/modules/win_command.py
+++ b/ansible_collections/ansible/windows/plugins/modules/win_command.py
@@ -113,9 +113,9 @@ EXAMPLES = r'''
ansible.windows.win_command:
# When using argv, each entry is quoted in the module
argv:
- - C:\Program Files\My Application\run.exe
- - argument 1
- - -force
+ - C:\Program Files\My Application\run.exe
+ - argument 1
+ - -force
'''
RETURN = r'''
diff --git a/ansible_collections/ansible/windows/plugins/modules/win_copy.py b/ansible_collections/ansible/windows/plugins/modules/win_copy.py
index cd8cc7cfb..b001cd7ae 100644
--- a/ansible_collections/ansible/windows/plugins/modules/win_copy.py
+++ b/ansible_collections/ansible/windows/plugins/modules/win_copy.py
@@ -114,7 +114,7 @@ EXAMPLES = r"""
ansible.windows.win_copy:
src: /srv/myfiles/foo.conf
dest: C:\Temp\renamed-foo.conf
- backup: yes
+ backup: true
- name: Copy a single file keeping the filename
ansible.windows.win_copy:
diff --git a/ansible_collections/ansible/windows/plugins/modules/win_dns_client.py b/ansible_collections/ansible/windows/plugins/modules/win_dns_client.py
index c0f8c8ae7..9e7fdbda6 100644
--- a/ansible_collections/ansible/windows/plugins/modules/win_dns_client.py
+++ b/ansible_collections/ansible/windows/plugins/modules/win_dns_client.py
@@ -43,16 +43,16 @@ EXAMPLES = r'''
ansible.windows.win_dns_client:
adapter_names: '*'
dns_servers:
- - 192.168.34.5
- - 192.168.34.6
+ - 192.168.34.5
+ - 192.168.34.6
log_path: C:\dns_log.txt
- name: Set IPv6 DNS servers on the adapter named Ethernet
ansible.windows.win_dns_client:
adapter_names: Ethernet
dns_servers:
- - '2001:db8::2'
- - '2001:db8::3'
+ - '2001:db8::2'
+ - '2001:db8::3'
- name: Configure all adapters whose names begin with Ethernet to use DHCP-assigned DNS values
ansible.windows.win_dns_client:
diff --git a/ansible_collections/ansible/windows/plugins/modules/win_domain.py b/ansible_collections/ansible/windows/plugins/modules/win_domain.py
index 6dfc2f90e..0d61cadd5 100644
--- a/ansible_collections/ansible/windows/plugins/modules/win_domain.py
+++ b/ansible_collections/ansible/windows/plugins/modules/win_domain.py
@@ -100,7 +100,7 @@ EXAMPLES = r'''
- name: Create new Windows domain in a new forest with specific parameters
ansible.windows.win_domain:
- create_dns_delegation: no
+ create_dns_delegation: false
database_path: C:\Windows\NTDS
dns_domain_name: ansible.vagrant
domain_mode: Win2012R2
diff --git a/ansible_collections/ansible/windows/plugins/modules/win_domain_membership.py b/ansible_collections/ansible/windows/plugins/modules/win_domain_membership.py
index 8d96cd0c6..25768ad17 100644
--- a/ansible_collections/ansible/windows/plugins/modules/win_domain_membership.py
+++ b/ansible_collections/ansible/windows/plugins/modules/win_domain_membership.py
@@ -68,37 +68,39 @@ reboot_required:
'''
EXAMPLES = r'''
-
# host should be a member of domain ansible.vagrant; module will ensure the hostname is mydomainclient
# and will use the passed credentials to join domain if necessary.
# Ansible connection should use local credentials if possible.
# If a reboot is required, the second task will trigger one and wait until the host is available.
-- hosts: winclient
+- name: Play to join the hsots to a domain
+ hosts: winclient
gather_facts: false
tasks:
- - ansible.windows.win_domain_membership:
- dns_domain_name: ansible.vagrant
- hostname: mydomainclient
- domain_admin_user: testguy@ansible.vagrant
- domain_admin_password: password123!
- domain_ou_path: "OU=Windows,OU=Servers,DC=ansible,DC=vagrant"
- state: domain
- register: domain_state
-
- - ansible.windows.win_reboot:
- when: domain_state.reboot_required
-
+ - name: Join host to the ansible.vagrant domain
+ ansible.windows.win_domain_membership:
+ dns_domain_name: ansible.vagrant
+ hostname: mydomainclient
+ domain_admin_user: testguy@ansible.vagrant
+ domain_admin_password: password123!
+ domain_ou_path: "OU=Windows,OU=Servers,DC=ansible,DC=vagrant"
+ state: domain
+ register: domain_state
+ - name: Reboot host after domain join
+ ansible.windows.win_reboot:
+ when: domain_state.reboot_required
# Host should be in workgroup mywg- module will use the passed credentials to clean-unjoin domain if possible.
# Ansible connection should use local credentials if possible.
# The domain admin credentials can be sourced from a vault-encrypted variable
-- hosts: winclient
+- name: Play to set the hosts workgroup
+ hosts: winclient
gather_facts: false
tasks:
- - ansible.windows.win_domain_membership:
- workgroup_name: mywg
- domain_admin_user: '{{ win_domain_admin_user }}'
- domain_admin_password: '{{ win_domain_admin_password }}'
- state: workgroup
+ - name: Set workgroup to mywg
+ ansible.windows.win_domain_membership:
+ workgroup_name: mywg
+ domain_admin_user: '{{ win_domain_admin_user }}'
+ domain_admin_password: '{{ win_domain_admin_password }}'
+ state: workgroup
'''
diff --git a/ansible_collections/ansible/windows/plugins/modules/win_dsc.py b/ansible_collections/ansible/windows/plugins/modules/win_dsc.py
index 7c6a5384b..324d23653 100644
--- a/ansible_collections/ansible/windows/plugins/modules/win_dsc.py
+++ b/ansible_collections/ansible/windows/plugins/modules/win_dsc.py
@@ -110,8 +110,8 @@ EXAMPLES = r'''
Key: HKEY_CURRENT_USER\ExampleKey
ValueName: TestValue
ValueData: TestData
- PsDscRunAsCredential_username: '{{ansible_user}}'
- PsDscRunAsCredential_password: '{{ansible_password}}'
+ PsDscRunAsCredential_username: '{{ ansible_user }}'
+ PsDscRunAsCredential_password: '{{ ansible_password }}'
no_log: true
- name: Create file with multiple attributes
@@ -119,8 +119,8 @@ EXAMPLES = r'''
resource_name: File
DestinationPath: C:\ansible\dsc
Attributes: # can also be a comma separated string, e.g. 'Hidden, System'
- - Hidden
- - System
+ - Hidden
+ - System
Ensure: Present
Type: Directory
@@ -143,21 +143,21 @@ EXAMPLES = r'''
State: Started
PhysicalPath: C:\inetpub\wwwroot
BindingInfo: # Example of a CimInstance[] DSC parameter (list of dicts)
- - Protocol: https
- Port: 1234
- CertificateStoreName: MY
- CertificateThumbprint: C676A89018C4D5902353545343634F35E6B3A659
- HostName: DSCTest
- IPAddress: '*'
- SSLFlags: '1'
- - Protocol: http
- Port: 4321
- IPAddress: '*'
+ - Protocol: https
+ Port: 1234
+ CertificateStoreName: MY
+ CertificateThumbprint: C676A89018C4D5902353545343634F35E6B3A659
+ HostName: DSCTest
+ IPAddress: '*'
+ SSLFlags: '1'
+ - Protocol: http
+ Port: 4321
+ IPAddress: '*'
AuthenticationInfo: # Example of a CimInstance DSC parameter (dict)
- Anonymous: no
+ Anonymous: false
Basic: true
Digest: false
- Windows: yes
+ Windows: true
'''
RETURN = r'''
diff --git a/ansible_collections/ansible/windows/plugins/modules/win_feature.py b/ansible_collections/ansible/windows/plugins/modules/win_feature.py
index e23778fb1..de3e8e589 100644
--- a/ansible_collections/ansible/windows/plugins/modules/win_feature.py
+++ b/ansible_collections/ansible/windows/plugins/modules/win_feature.py
@@ -61,8 +61,8 @@ EXAMPLES = r'''
- name: Install IIS (Web-Server and Web-Common-Http)
ansible.windows.win_feature:
name:
- - Web-Server
- - Web-Common-Http
+ - Web-Server
+ - Web-Common-Http
state: present
- name: Install NET-Framework-Core from file
diff --git a/ansible_collections/ansible/windows/plugins/modules/win_find.py b/ansible_collections/ansible/windows/plugins/modules/win_find.py
index 1ed25e0ae..ae4e224f3 100644
--- a/ansible_collections/ansible/windows/plugins/modules/win_find.py
+++ b/ansible_collections/ansible/windows/plugins/modules/win_find.py
@@ -119,8 +119,8 @@ EXAMPLES = r'''
- name: Find files in multiple paths
ansible.windows.win_find:
paths:
- - C:\Temp
- - D:\Temp
+ - C:\Temp
+ - D:\Temp
- name: Find files in directory while searching recursively
ansible.windows.win_find:
@@ -136,7 +136,7 @@ EXAMPLES = r'''
- name: Find files with .log and .out extension using powershell wildcards
ansible.windows.win_find:
paths: D:\Temp
- patterns: [ '*.log', '*.out' ]
+ patterns: ['*.log', '*.out']
- name: Find files in path based on regex pattern
ansible.windows.win_find:
@@ -192,8 +192,8 @@ EXAMPLES = r'''
- name: Find folders/symlinks in multiple paths
ansible.windows.win_find:
paths:
- - C:\Temp
- - D:\Temp
+ - C:\Temp
+ - D:\Temp
file_type: directory
- name: Find files and return SHA256 checksum of files found
diff --git a/ansible_collections/ansible/windows/plugins/modules/win_get_url.py b/ansible_collections/ansible/windows/plugins/modules/win_get_url.py
index 1f6ef80f6..fc4daa065 100644
--- a/ansible_collections/ansible/windows/plugins/modules/win_get_url.py
+++ b/ansible_collections/ansible/windows/plugins/modules/win_get_url.py
@@ -116,7 +116,7 @@ EXAMPLES = r'''
dest: C:\temp\earthrise.jpg
checksum_url: http://www.example.com/sha256sum.txt
checksum_algorithm: sha256
- force: True
+ force: true
- name: Download src with sha256 checksum url
ansible.windows.win_get_url:
@@ -124,7 +124,7 @@ EXAMPLES = r'''
dest: C:\temp\earthrise.jpg
checksum: a97e6837f60cec6da4491bab387296bbcd72bdba
checksum_algorithm: sha1
- force: True
+ force: true
'''
RETURN = r'''
diff --git a/ansible_collections/ansible/windows/plugins/modules/win_optional_feature.py b/ansible_collections/ansible/windows/plugins/modules/win_optional_feature.py
index 33d75e963..d64efa267 100644
--- a/ansible_collections/ansible/windows/plugins/modules/win_optional_feature.py
+++ b/ansible_collections/ansible/windows/plugins/modules/win_optional_feature.py
@@ -69,8 +69,8 @@ EXAMPLES = r'''
- name: Install multiple features in one task
ansible.windows.win_optional_feature:
name:
- - NetFx3
- - Microsoft-Windows-Subsystem-Linux
+ - NetFx3
+ - Microsoft-Windows-Subsystem-Linux
state: present
'''
diff --git a/ansible_collections/ansible/windows/plugins/modules/win_package.py b/ansible_collections/ansible/windows/plugins/modules/win_package.py
index f9ce92eb2..0d293c639 100644
--- a/ansible_collections/ansible/windows/plugins/modules/win_package.py
+++ b/ansible_collections/ansible/windows/plugins/modules/win_package.py
@@ -206,22 +206,22 @@ EXAMPLES = r'''
path: http://download.microsoft.com/download/1/6/B/16B06F60-3B20-4FF2-B699-5E9B7962F9AE/VSU_4/vcredist_x64.exe
product_id: '{CF2BEA3C-26EA-32F8-AA9B-331F7E34BA97}'
arguments:
- - /install
- - /passive
- - /norestart
+ - /install
+ - /passive
+ - /norestart
- name: Install MSBuild thingy with arguments split to prevent quotes
ansible.windows.win_package:
path: https://download.visualstudio.microsoft.com/download/pr/9665567e-f580-4acd-85f2-bc94a1db745f/vs_BuildTools.exe
product_id: '{D1437F51-786A-4F57-A99C-F8E94FBA1BD8}'
arguments:
- - --norestart
- - --passive
- - --wait
- - --add
- - Microsoft.Net.Component.4.6.1.TargetingPack
- - --add
- - Microsoft.Net.Component.4.6.TargetingPack
+ - --norestart
+ - --passive
+ - --wait
+ - --add
+ - Microsoft.Net.Component.4.6.1.TargetingPack
+ - --add
+ - Microsoft.Net.Component.4.6.TargetingPack
- name: Install Remote Desktop Connection Manager from msi with a permanent log
ansible.windows.win_package:
diff --git a/ansible_collections/ansible/windows/plugins/modules/win_path.py b/ansible_collections/ansible/windows/plugins/modules/win_path.py
index fd9f36f66..c506ea9b0 100644
--- a/ansible_collections/ansible/windows/plugins/modules/win_path.py
+++ b/ansible_collections/ansible/windows/plugins/modules/win_path.py
@@ -61,8 +61,8 @@ EXAMPLES = r'''
- name: Ensure that system32 and Powershell are present on the global system path, and in the specified order
ansible.windows.win_path:
elements:
- - '%SystemRoot%\system32'
- - '%SystemRoot%\system32\WindowsPowerShell\v1.0'
+ - '%SystemRoot%\system32'
+ - '%SystemRoot%\system32\WindowsPowerShell\v1.0'
- name: Ensure that C:\Program Files\MyJavaThing is not on the current user's CLASSPATH
ansible.windows.win_path:
diff --git a/ansible_collections/ansible/windows/plugins/modules/win_powershell.ps1 b/ansible_collections/ansible/windows/plugins/modules/win_powershell.ps1
index 4e2aee895..1568ea5b8 100644
--- a/ansible_collections/ansible/windows/plugins/modules/win_powershell.ps1
+++ b/ansible_collections/ansible/windows/plugins/modules/win_powershell.ps1
@@ -17,6 +17,22 @@ $spec = @{
error_action = @{ type = 'str'; choices = 'silently_continue', 'continue', 'stop'; default = 'continue' }
executable = @{ type = 'str' }
parameters = @{ type = 'dict' }
+ sensitive_parameters = @{
+ type = 'list'
+ elements = 'dict'
+ options = @{
+ name = @{ type = 'str'; required = $true }
+ username = @{ type = 'str' }
+ password = @{ type = 'str'; no_log = $true }
+ value = @{ type = 'str'; no_log = $true }
+ }
+ mutually_exclusive = @(
+ , @('value', 'username')
+ , @('value', 'password')
+ )
+ required_one_of = @(, @('username', 'value'))
+ required_together = @(, @('username', 'password'))
+ }
removes = @{ type = 'str' }
script = @{ type = 'str'; required = $true }
}
@@ -717,6 +733,27 @@ $OutputEncoding = [Console]::InputEncoding = [Console]::OutputEncoding = $utf8No
$parameters[$kvp.Key] = $kvp.Value
}
}
+ if ($module.Params.sensitive_parameters) {
+ foreach ($paramDetails in $module.Params.sensitive_parameters) {
+ $value = if ($paramDetails.username) {
+ $credPass = if ($paramDetails.password) {
+ $paramDetails.password | ConvertTo-SecureString -AsPlainText -Force
+ }
+ else {
+ [securestring]::new()
+ }
+ [pscredential]::new($paramDetails.username, $credPass)
+ }
+ elseif ($paramDetails.value) {
+ $paramDetails.value | ConvertTo-SecureString -AsPlainText -Force
+ }
+ else {
+ [securestring]::new()
+ }
+
+ $parameters[$paramDetails.name] = $value
+ }
+ }
if ($supportsShouldProcess) {
# We do this last to ensure we take precedence over any user inputted settings.
$parameters.Confirm = $false # Ensure we don't block on any confirmation prompts
diff --git a/ansible_collections/ansible/windows/plugins/modules/win_powershell.py b/ansible_collections/ansible/windows/plugins/modules/win_powershell.py
index 6eb5733d4..2bb70d65e 100644
--- a/ansible_collections/ansible/windows/plugins/modules/win_powershell.py
+++ b/ansible_collections/ansible/windows/plugins/modules/win_powershell.py
@@ -74,6 +74,39 @@ options:
- The PowerShell script to run.
type: str
required: true
+ sensitive_parameters:
+ description:
+ - Parameters to pass into the script as a SecureString or PSCredential.
+ - Each sensitive value will be marked with C(no_log) to ensure they are
+ not exposed in the module invocation args logs.
+ - The I(value) suboption can be used to create a SecureString value while
+ I(username) and I(password) can be used to create a PSCredential value.
+ type: list
+ elements: dict
+ suboptions:
+ name:
+ description:
+ - The name of the parameter to pass this value to.
+ required: true
+ type: str
+ value:
+ description:
+ - The string to pass as a SecureString of the parameter specified by
+ I(name).
+ - This is mutually exclusive with I(username) and I(password).
+ type: str
+ username:
+ description:
+ - The C(UserName) for the PSCredential value.
+ - This is mutually exclusive with I(value).
+ - This value is B(NOT) added to the C(no_log) list.
+ type: str
+ password:
+ description:
+ - The C(Password) for the PSCredential value.
+ - This is mutually exclusive with I(value) and must be set when
+ I(username) is provided.
+ type: str
seealso:
- module: ansible.windows.win_command
- module: ansible.windows.win_shell
@@ -149,11 +182,11 @@ EXAMPLES = r'''
$PSVersionTable.PSVersion.Major
executable: pwsh.exe
arguments:
- - -ExecutionPolicy
- - ByPass
+ - -ExecutionPolicy
+ - ByPass
register: pwsh_output
failed_when:
- - pwsh_output.output[0] != 7
+ - pwsh_output.output[0] != 7
- name: Run code in check mode
ansible.windows.win_powershell:
@@ -176,7 +209,7 @@ EXAMPLES = r'''
else {
echo 'also running in check mode'
}
- check_mode: yes
+ check_mode: true
- name: Return a failure back to Ansible
ansible.windows.win_powershell:
@@ -208,6 +241,36 @@ EXAMPLES = r'''
Write-Verbose "Hello World!"
Write-Debug "Hello World!"
+- name: Set sensitive parameter value as SecureString parameter
+ ansible.windows.win_powershell:
+ script: |
+ param(
+ [string]$Uri,
+ [SecureString]$Token
+ )
+
+ Invoke-WebRequest -Uri $Uri -Token $Token
+ parameters:
+ Uri: foo
+ sensitive_parameters:
+ - name: Token
+ value: '{{ sensitive_value }}'
+
+- name: Set credential parameter
+ ansible.windows.win_powershell:
+ script: |
+ param(
+ [string]$Uri,
+ [PSCredential]$Credential
+ )
+
+ Invoke-WebRequest -Uri $Uri -Credential $Credential
+ parameters:
+ Uri: foo
+ sensitive_parameters:
+ - name: Credential
+ username: CredUserName
+ password: '{{ sensitive_value }}'
'''
RETURN = r'''
diff --git a/ansible_collections/ansible/windows/plugins/modules/win_regedit.py b/ansible_collections/ansible/windows/plugins/modules/win_regedit.py
index df4201b40..3266de40e 100644
--- a/ansible_collections/ansible/windows/plugins/modules/win_regedit.py
+++ b/ansible_collections/ansible/windows/plugins/modules/win_regedit.py
@@ -125,7 +125,7 @@ EXAMPLES = r'''
ansible.windows.win_regedit:
path: HKCU:\Software\MyCompany
name: hello
- data: [0xbe,0xef,0xbe,0xef,0xbe,0xef,0xbe,0xef,0xbe,0xef]
+ data: [0xbe, 0xef, 0xbe, 0xef, 0xbe, 0xef, 0xbe, 0xef, 0xbe, 0xef]
type: binary
- name: Add or update registry path MyCompany, with expand string entry 'hello'
diff --git a/ansible_collections/ansible/windows/plugins/modules/win_service.py b/ansible_collections/ansible/windows/plugins/modules/win_service.py
index 26d5dbcab..ffbb1ce55 100644
--- a/ansible_collections/ansible/windows/plugins/modules/win_service.py
+++ b/ansible_collections/ansible/windows/plugins/modules/win_service.py
@@ -316,7 +316,7 @@ EXAMPLES = r'''
ansible.windows.win_user_right:
name: SeServiceLogonRight
users:
- - DOMAIN\User
+ - DOMAIN\User
action: add
- name: Set the log on user to a domain account
@@ -371,20 +371,20 @@ EXAMPLES = r'''
- name: Set dependencies to ones only in the list
ansible.windows.win_service:
name: service name
- dependencies: [ service1, service2 ]
+ dependencies: [service1, service2]
- name: Add dependencies to existing dependencies
ansible.windows.win_service:
name: service name
- dependencies: [ service1, service2 ]
+ dependencies: [service1, service2]
dependency_action: add
- name: Remove dependencies from existing dependencies
ansible.windows.win_service:
name: service name
dependencies:
- - service1
- - service2
+ - service1
+ - service2
dependency_action: remove
- name: Set required privileges for a service
@@ -392,8 +392,8 @@ EXAMPLES = r'''
name: service name
username: NT SERVICE\LocalService
required_privileges:
- - SeBackupPrivilege
- - SeRestorePrivilege
+ - SeBackupPrivilege
+ - SeRestorePrivilege
- name: Remove all required privileges for a service
ansible.windows.win_service:
@@ -405,12 +405,12 @@ EXAMPLES = r'''
ansible.windows.win_service:
name: service name
failure_actions:
- - type: restart
- - type: run_command
- delay_ms: 1000
- - type: restart
- delay_ms: 5000
- - type: reboot
+ - type: restart
+ - type: run_command
+ delay_ms: 1000
+ - type: restart
+ delay_ms: 5000
+ - type: reboot
failure_command: C:\Windows\System32\cmd.exe /c mkdir C:\temp
failure_reboot_msg: Restarting host because service name has failed
failure_reset_period_sec: '0xFFFFFFFF'
@@ -419,9 +419,9 @@ EXAMPLES = r'''
ansible.windows.win_service:
name: service name
failure_actions:
- - type: restart
- delay_ms: 5000
- - type: none
+ - type: restart
+ delay_ms: 5000
+ - type: none
- name: Remove failure action information
ansible.windows.win_service:
diff --git a/ansible_collections/ansible/windows/plugins/modules/win_share.py b/ansible_collections/ansible/windows/plugins/modules/win_share.py
index 4e1dc9ec8..6d1cc0410 100644
--- a/ansible_collections/ansible/windows/plugins/modules/win_share.py
+++ b/ansible_collections/ansible/windows/plugins/modules/win_share.py
@@ -102,7 +102,7 @@ EXAMPLES = r'''
name: company
description: top secret share
path: C:\shares\company
- list: yes
+ list: true
full: Administrators,CEO
read: Global
@@ -112,7 +112,7 @@ EXAMPLES = r'''
scope_name: FCMSSQL01
description: SQL Backups
path: E:\sqlbackup
- list: yes
+ list: true
full: svc.mssql
read: Global
diff --git a/ansible_collections/ansible/windows/plugins/modules/win_updates.py b/ansible_collections/ansible/windows/plugins/modules/win_updates.py
index f73346323..e8f9b5db7 100644
--- a/ansible_collections/ansible/windows/plugins/modules/win_updates.py
+++ b/ansible_collections/ansible/windows/plugins/modules/win_updates.py
@@ -178,25 +178,25 @@ EXAMPLES = r"""
- name: Install all security updates with automatic reboots
ansible.windows.win_updates:
category_names:
- - SecurityUpdates
+ - SecurityUpdates
reboot: true
- name: Install only particular updates based on the KB numbers
ansible.windows.win_updates:
category_names:
- - SecurityUpdates
+ - SecurityUpdates
accept_list:
- - KB4056892
- - KB4073117
+ - KB4056892
+ - KB4073117
- name: Exclude updates based on the update title
ansible.windows.win_updates:
category_names:
- - SecurityUpdates
- - CriticalUpdates
+ - SecurityUpdates
+ - CriticalUpdates
reject_list:
- - Windows Malicious Software Removal Tool for Windows
- - \d{4}-\d{2} Cumulative Update for Windows Server 2016
+ - Windows Malicious Software Removal Tool for Windows
+ - \d{4}-\d{2} Cumulative Update for Windows Server 2016
# Optionally, you can increase the reboot_timeout to survive long updates during reboot
- name: Ensure we wait long enough for the updates to be applied during reboot
diff --git a/ansible_collections/ansible/windows/plugins/modules/win_user.ps1 b/ansible_collections/ansible/windows/plugins/modules/win_user.ps1
index 0fc6523bf..8bbe457dc 100644
--- a/ansible_collections/ansible/windows/plugins/modules/win_user.ps1
+++ b/ansible_collections/ansible/windows/plugins/modules/win_user.ps1
@@ -9,6 +9,7 @@
$spec = @{
options = @{
account_disabled = @{ type = 'bool' }
+ account_expires = @{ type = 'str' }
account_locked = @{ type = 'bool' }
description = @{ type = 'str' }
fullname = @{ type = 'str' }
@@ -31,6 +32,7 @@ $spec = @{
$module = [Ansible.Basic.AnsibleModule]::Create($args, $spec)
$accountDisabled = $module.Params.account_disabled
+$accountExpiresRaw = $module.Params.account_expires
$accountLocked = $module.Params.account_locked
$description = $module.Params.description
$fullname = $module.Params.fullname
@@ -50,12 +52,45 @@ $userCannotChangePassword = $module.Params.user_cannot_change_password
$module.Diff.before = ""
$module.Diff.after = ""
+$ADS_UF_PASSWD_CANT_CHANGE = 64
+$ADS_UF_DONT_EXPIRE_PASSWD = 65536
+$DATE_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.FFFFFFFK"
+
if ($accountLocked -eq $true) {
$module.FailJson("account_locked must be set to 'no' if provided")
}
-$ADS_UF_PASSWD_CANT_CHANGE = 64
-$ADS_UF_DONT_EXPIRE_PASSWD = 65536
+$accountExpires = if ($accountExpiresRaw) {
+ [DateTime]$dtValue = 0
+
+ # Python strftime doesn't provide a native way to create a TZ in the
+ # ±HH:MM. We need to manually convert ±HHMM into the format needed by
+ # .NET.
+ if ($accountExpiresRaw -match '[+-](\d{2})(\d{2})$') {
+ $dtLength = $accountExpiresRaw.Length - 4
+ $accountExpiresRaw = "$($accountExpiresRaw.Substring(0, $dtLength))$($Matches[1]):$($Matches[2])"
+ }
+
+ if ($accountExpiresRaw -eq 'never') {
+ $accountExpiresRaw
+ }
+ elseif ([DateTime]::TryParseExact(
+ $accountExpiresRaw,
+ [string[]]@("yyyy-MM-dd", $DATE_FORMAT),
+ [System.Globalization.CultureInfo]::InvariantCulture,
+ [System.Globalization.DateTimeStyles]::AssumeUniversal,
+ [ref]$dtValue)) {
+ $dtValue
+ }
+ else {
+ $msg = -join @(
+ "Failed to parse account_expires as datetime string. "
+ "Expecting datetime in yyyy-MM-dd or yyyy-MM-ddTHH:mm:ss.FFFFFFFK format."
+ )
+ $module.FailJson($msg)
+ }
+}
+
$ADSI = [ADSI]"WinNT://$env:COMPUTERNAME"
Function Get-AnsibleLocalGroup {
@@ -100,6 +135,13 @@ Function Get-AnsibleLocalUser {
$sid = New-Object -TypeName System.Security.Principal.SecurityIdentifier -ArgumentList $_.ObjectSid.Value, 0
$flags = $_.UserFlags.Value
+ $accountExpirationDate = if ($_.PSObject.Properties.Name.Contains('AccountExpirationDate')) {
+ $_.AccountExpirationDate.Value
+ }
+ else {
+ 'never'
+ }
+
[PSCustomObject]@{
Name = $_.Name.Value
FullName = $_.FullName.Value
@@ -107,6 +149,7 @@ Function Get-AnsibleLocalUser {
Description = $_.Description.Value
HomeDirectory = $_.HomeDirectory.Value
LoginScript = $_.LoginScript.Value
+ AccountExpires = $accountExpirationDate
PasswordExpired = [bool]$_.PasswordExpired.Value
PasswordNeverExpires = [bool]($flags -band $ADS_UF_DONT_EXPIRE_PASSWD)
Profile = $_.Profile.Value
@@ -154,8 +197,16 @@ Function Get-UserDiff {
$groups.Add($name)
}
+ $accountExpirationString = if ($User.AccountExpires -and $User.AccountExpires -ne 'never') {
+ $User.AccountExpires.ToString($DATE_FORMAT)
+ }
+ else {
+ 'never'
+ }
+
@{
account_disabled = $User.AccountDisabled
+ account_expires = $accountExpirationString
account_locked = $User.IsAccountLocked
description = $User.Description
fullname = $User.FullName
@@ -270,6 +321,18 @@ if ($state -eq 'present') {
$module.Diff.after.account_disabled = $accountDisabled
}
+ if ($null -ne $accountExpires -and $accountExpires -ne $user.AccountExpires) {
+ if ($accountExpires -eq 'never') {
+ $user.BaseObject.Put("AccountExpirationDate", ([DateTime]::new(1970, 1, 1)))
+ $module.Diff.after.account_expires = 'never'
+ }
+ else {
+ $user.BaseObject.Put("AccountExpirationDate", $accountExpires)
+ $module.Diff.after.account_expires = $accountExpires.ToString($DATE_FORMAT)
+ }
+ $module.Result.changed = $true
+ }
+
if ($null -ne $accountLocked -and $accountLocked -ne $user.IsAccountLocked) {
$user.BaseObject.IsAccountLocked = $accountLocked
$module.Result.changed = $true
diff --git a/ansible_collections/ansible/windows/plugins/modules/win_user.py b/ansible_collections/ansible/windows/plugins/modules/win_user.py
index e9420b676..f4db547d5 100644
--- a/ansible_collections/ansible/windows/plugins/modules/win_user.py
+++ b/ansible_collections/ansible/windows/plugins/modules/win_user.py
@@ -17,6 +17,15 @@ options:
- C(true) will disable the user account.
- C(false) will clear the disabled flag.
type: bool
+ account_expires:
+ description:
+ - Set the account expiration date for the user.
+ - This value should be in the format C(%Y-%m-%d) or C(%Y-%m-%dT%H:%M:%S%z).
+ The timezone can be omitted in the long format and will default to UTC.
+ The format of C(%z) is C(±HHMM), C(±HH:MM), or C(Z) for UTC.
+ - Set the value to C(never) to remove the account expiration date.
+ type: str
+ version_added: 2.4.0
account_locked:
description:
- Only C(false) can be set and it will unlock the user account if locked.
@@ -127,6 +136,24 @@ EXAMPLES = r'''
ansible.windows.win_user:
name: bob
state: absent
+
+- name: Set an account expiration date to the 27th of October 2024 at 2:30PM UTC
+ ansible.windows.win_user:
+ name: bob
+ state: present
+ account_expires: '2024-10-27T14:30:00Z'
+
+- name: Set an account expiration 30 days in the future
+ ansible.windows.win_user:
+ name: bob
+ state: present
+ account_expires: '{{ "%Y-%m-%dT%H:%M:%S%z" | ansible.builtin.strftime(now().timestamp() + (60 * 60 * 24 * 30)) }}'
+
+- name: Remove account expiration date
+ ansible.windows.win_user:
+ name: bob
+ state: present
+ account_expires: never
'''
RETURN = r'''
diff --git a/ansible_collections/ansible/windows/plugins/modules/win_user_right.py b/ansible_collections/ansible/windows/plugins/modules/win_user_right.py
index 9d5f5cc95..7399e389a 100644
--- a/ansible_collections/ansible/windows/plugins/modules/win_user_right.py
+++ b/ansible_collections/ansible/windows/plugins/modules/win_user_right.py
@@ -60,26 +60,26 @@ EXAMPLES = r'''
ansible.windows.win_user_right:
name: SeDenyInteractiveLogonRight
users:
- - Guest
- - Users
+ - Guest
+ - Users
action: set
- name: Add account to Log on as a service
ansible.windows.win_user_right:
name: SeServiceLogonRight
users:
- - .\Administrator
- - '{{ansible_hostname}}\local-user'
+ - .\Administrator
+ - '{{ansible_hostname}}\local-user'
action: add
- name: Remove accounts who can create Symbolic links
ansible.windows.win_user_right:
name: SeCreateSymbolicLinkPrivilege
users:
- - SYSTEM
- - Administrators
- - DOMAIN\User
- - group@DOMAIN.COM
+ - SYSTEM
+ - Administrators
+ - DOMAIN\User
+ - group@DOMAIN.COM
action: remove
- name: Remove all accounts who cannot log on remote interactively
diff --git a/ansible_collections/ansible/windows/tests/integration/targets/async_status/aliases b/ansible_collections/ansible/windows/tests/integration/targets/async_status/aliases
new file mode 100644
index 000000000..ccd8a25e8
--- /dev/null
+++ b/ansible_collections/ansible/windows/tests/integration/targets/async_status/aliases
@@ -0,0 +1,2 @@
+windows
+shippable/windows/group1
diff --git a/ansible_collections/ansible/windows/tests/integration/targets/async_status/tasks/main.yml b/ansible_collections/ansible/windows/tests/integration/targets/async_status/tasks/main.yml
new file mode 100644
index 000000000..95dcc45f0
--- /dev/null
+++ b/ansible_collections/ansible/windows/tests/integration/targets/async_status/tasks/main.yml
@@ -0,0 +1,29 @@
+- name: run async task
+ win_ping:
+ async: 30
+ poll: 2
+ register: async_res
+
+- name: assert run async task
+ assert:
+ that:
+ - async_res is finished
+ - async_res is successful
+ - async_res.ping == 'pong'
+
+- name: check mode support was added in ansible 2.18
+ when: ansible_version.full is version('2.18', '>=')
+ block:
+ - name: run async task - check mode
+ win_ping:
+ async: 30
+ poll: 2
+ register: async_res_check
+ check_mode: true
+
+ - name: assert run async task - check mode
+ assert:
+ that:
+ - async_res_check is finished
+ - async_res_check is successful
+ - async_res_check.ping == 'pong'
diff --git a/ansible_collections/ansible/windows/tests/integration/targets/win_powershell/tasks/failure.yml b/ansible_collections/ansible/windows/tests/integration/targets/win_powershell/tasks/failure.yml
new file mode 100644
index 000000000..ee789fc01
--- /dev/null
+++ b/ansible_collections/ansible/windows/tests/integration/targets/win_powershell/tasks/failure.yml
@@ -0,0 +1,51 @@
+- name: expect failure when secure string value or username not provided
+ win_powershell:
+ script: '"test"'
+ sensitive_parameters:
+ - name: Param
+ register: fail_no_ss_value
+ failed_when: >-
+ fail_no_ss_value.msg != 'one of the following is required: username, value found in sensitive_parameters'
+
+- name: expect failure when secure string password is not provided with username
+
+ win_powershell:
+ script: '"test"'
+ sensitive_parameters:
+ - name: Param
+ username: user
+ register: fail_no_ss_password
+ failed_when: >-
+ fail_no_ss_password.msg != 'parameters are required together: username, password found in sensitive_parameters'
+
+- name: expect failure when secure string username is not provided with password
+ win_powershell:
+ script: '"test"'
+ sensitive_parameters:
+ - name: Param
+ password: secret
+ register: fail_no_ss_username
+ failed_when: >-
+ fail_no_ss_username.msg != 'parameters are required together: username, password found in sensitive_parameters'
+
+- name: expect failure when value and username are provided
+ win_powershell:
+ script: '"test"'
+ sensitive_parameters:
+ - name: Param
+ value: secret1
+ username: user
+ register: fail_ss_value_and_username
+ failed_when: >-
+ fail_ss_value_and_username.msg != 'parameters are mutually exclusive: value, username found in sensitive_parameters'
+
+- name: expect failure when value and password are provided
+ win_powershell:
+ script: '"test"'
+ sensitive_parameters:
+ - name: Param
+ value: secret1
+ password: secret2
+ register: fail_ss_value_and_password
+ failed_when: >-
+ fail_ss_value_and_password.msg != 'parameters are mutually exclusive: value, password found in sensitive_parameters'
diff --git a/ansible_collections/ansible/windows/tests/integration/targets/win_powershell/tasks/main.yml b/ansible_collections/ansible/windows/tests/integration/targets/win_powershell/tasks/main.yml
index 7391af1c5..c989b4aae 100644
--- a/ansible_collections/ansible/windows/tests/integration/targets/win_powershell/tasks/main.yml
+++ b/ansible_collections/ansible/windows/tests/integration/targets/win_powershell/tasks/main.yml
@@ -25,6 +25,9 @@
register: dt_values
changed_when: False
+- name: run failure tests
+ import_tasks: failure.yml
+
- name: run tests using current interpreter
import_tasks: tests.yml
diff --git a/ansible_collections/ansible/windows/tests/integration/targets/win_powershell/tasks/tests.yml b/ansible_collections/ansible/windows/tests/integration/targets/win_powershell/tasks/tests.yml
index bc689234d..682de3296 100644
--- a/ansible_collections/ansible/windows/tests/integration/targets/win_powershell/tasks/tests.yml
+++ b/ansible_collections/ansible/windows/tests/integration/targets/win_powershell/tasks/tests.yml
@@ -1,914 +1,970 @@
-- name: run script with various output types
+# - name: run script with various output types
+# win_powershell:
+# executable: '{{ pwsh_executable | default(omit) }}'
+# script: |
+# $null
+# 'string'
+# 1
+# [IO.FileAttributes]'Hidden, Archive'
+# [IO.FileAccess]'Read'
+# [object]
+# [string]
+# [char]'a'
+# [Exception]"abc"
+
+# # Date tests
+# $epoch_unspec = New-Object -TypeName DateTime -ArgumentList 1970, 1, 1
+# $epoch_local = New-Object -TypeName DateTime -ArgumentList 1970, 1, 1, 0, 0, 0, ([DateTimeKind]::Local)
+# $epoch_utc = New-Object -TypeName DateTime -ArgumentList 1970, 1, 1, 0, 0, 0, ([DateTimeKind]::Utc)
+
+# $epoch_unspec
+# $epoch_unspec.ToLocalTime()
+# $epoch_unspec.ToUniversalTime()
+
+# $epoch_local
+# $epoch_local.ToLocalTime()
+# $epoch_local.ToUniversalTime()
+
+# $epoch_utc
+# $epoch_utc.ToLocalTime()
+# $epoch_utc.ToUniversalTime()
+
+# ([DateTimeOffset]$epoch_utc).ToOffset([TimeSpan]::FromHours(2))
+
+# # List tests
+# ,@()
+# ,@(1)
+# ,@($null)
+# ,@(
+# 'entry 1',
+# $null,
+# 1,
+# @(
+# 'level2',
+# @(
+# 'level3',
+# 'value'
+# )
+# ),
+# @(),
+# @(1),
+# @($null),
+# @{
+# key = 'value'
+# exceed = Get-Item $env:SystemRoot
+# }
+# )
+
+# # Dictionary tests
+# @{}
+# @{
+# foo = 'bar'
+# list = @(
+# @{ foo = 'bar' }
+# 'value 2',
+# [string]
+# )
+# empty_list = @()
+# null_list = @($null)
+# list_with_1 = @(1)
+# nested = @{
+# foo = 'bar'
+# exceed = @{
+# foo = 'bar'
+# }
+# }
+# }
+# $hash = @{foo = 'bar'}
+# Add-Member -InputObject $hash -NotePropertyName foo -NotePropertyValue hidden
+# $hash
+
+# # Classes with properties
+# [PSCustomObject]@{
+# Key = 'value'
+# DateTime = $epoch_utc
+# Enum = [IO.FileAccess]::Read
+# List = @(
+# 'value 1', 'value 2'
+# )
+# Nested = [PSCustomObject]@{
+# Exceed = @{
+# foo = 'bar'
+# }
+# Key = 'value'
+# }
+# }
+
+# Get-Item $env:SystemRoot
+
+# register: output_types
+
+# - name: assert script with various output types
+# assert:
+# that:
+# - output_types is changed
+# - output_types.debug == []
+# - output_types.error == []
+# - output_types.host_err == ''
+# - output_types.host_out == ''
+# - output_types.information == []
+# - output_types.output|length == 28
+# - output_types.output[0] == None
+
+# - output_types.output[1] == 'string'
+
+# - output_types.output[2] == 1
+
+# - output_types.output[3]['String'] == 'Hidden, Archive'
+# - output_types.output[3]['Type'] == 'System.IO.FileAttributes'
+# - output_types.output[3]['Value'] == 34
+
+# - output_types.output[4]['String'] == 'Read'
+# - output_types.output[4]['Type'] == 'System.IO.FileAccess'
+# - output_types.output[4]['Value'] == 1
+
+# - output_types.output[5]['AssemblyQualifiedName'].startswith('System.Object, ')
+# - output_types.output[5]['BaseType'] == None
+# - output_types.output[5]['FullName'] == 'System.Object'
+# - output_types.output[5]['Name'] == 'Object'
+
+# - output_types.output[6]['AssemblyQualifiedName'].startswith('System.String, ')
+# - output_types.output[6]['BaseType']['AssemblyQualifiedName'].startswith('System.Object, ')
+# - output_types.output[6]['BaseType']['BaseType'] == None
+# - output_types.output[6]['BaseType']['FullName'] == 'System.Object'
+# - output_types.output[6]['BaseType']['Name'] == 'Object'
+# - output_types.output[6]['FullName'] == 'System.String'
+# - output_types.output[6]['Name'] == 'String'
+
+# - output_types.output[7] == 'a'
+
+# - output_types.output[8]['Data'] == {}
+# - output_types.output[8]['HResult'] == -2146233088
+# - output_types.output[8]['HelpLink'] == None
+# - output_types.output[8]['InnerException'] == None
+# - output_types.output[8]['Message'] == 'abc'
+# - output_types.output[8]['Source'] == None
+# - output_types.output[8]['StackTrace'] == None
+# - output_types.output[8]['TargetSite'] == None
+
+# - output_types.output[9] == dt_values.stdout_lines[0]
+# - output_types.output[10] == dt_values.stdout_lines[1]
+# - output_types.output[11] == dt_values.stdout_lines[2]
+
+# - output_types.output[12] == dt_values.stdout_lines[3]
+# - output_types.output[13] == dt_values.stdout_lines[4]
+# - output_types.output[14] == dt_values.stdout_lines[5]
+
+# - output_types.output[15] == dt_values.stdout_lines[6]
+# - output_types.output[16] == dt_values.stdout_lines[7]
+# - output_types.output[17] == dt_values.stdout_lines[8]
+
+# - output_types.output[18] == dt_values.stdout_lines[9]
+
+# - output_types.output[19] == []
+
+# - output_types.output[20] == [1]
+
+# - output_types.output[21] == [None]
+
+# - output_types.output[22]|length == 8
+# - output_types.output[22][0] == 'entry 1'
+# - output_types.output[22][1] == None
+# - output_types.output[22][2] == 1
+# - output_types.output[22][3] == ['level2', 'level3 value']
+# - output_types.output[22][4] == []
+# - output_types.output[22][5] == [1]
+# - output_types.output[22][6] == [None]
+# - output_types.output[22][7]['key'] == 'value'
+# - output_types.output[22][7]['exceed'] == 'C:\Windows'
+
+# - output_types.output[23] == {}
+
+# - output_types.output[24]['foo'] == 'bar'
+# - output_types.output[24]['list']|length == 3
+# - output_types.output[24]['list'][0] == 'System.Collections.Hashtable'
+# - output_types.output[24]['list'][1] == 'value 2'
+# - output_types.output[24]['list'][2] == 'System.String'
+# - output_types.output[24]['empty_list'] == []
+# - output_types.output[24]['null_list'] == [None]
+# - output_types.output[24]['list_with_1'] == [1]
+# - output_types.output[24]['nested']['exceed'] == 'System.Collections.Hashtable'
+
+# - 'output_types.output[25] == {"foo": "bar"}'
+
+# - output_types.output[26]['Key'] == 'value'
+# - output_types.output[26]['DateTime'] == '1970-01-01T00:00:00.0000000Z'
+# - output_types.output[26]['Enum']['String'] == 'Read'
+# - output_types.output[26]['Enum']['Type'] == 'System.IO.FileAccess'
+# - output_types.output[26]['Enum']['Value'] == 1
+# - output_types.output[26]['List'] == ['value 1', 'value 2']
+# - output_types.output[26]['Nested']['Exceed'] == 'System.Collections.Hashtable'
+# - output_types.output[26]['Nested']['Key'] == 'value'
+
+# - output_types.output[27]['BaseName'] == 'Windows'
+# - output_types.output[27]['Exists'] == True
+# - output_types.output[27]['FullName'] == 'C:\Windows'
+# - output_types.output[27]['PSDrive']['Name'] == 'C'
+# - output_types.output[27]['PSDrive']['Provider'] == 'Microsoft.PowerShell.Core\FileSystem'
+# - output_types.output[27]['PSProvider']['Drives'] == 'C'
+# - output_types.output[27]['PSProvider']['ImplementingType'] == 'Microsoft.PowerShell.Commands.FileSystemProvider'
+# - output_types.output[27]['PSProvider']['Name'] == 'FileSystem'
+
+# - output_types.result == {}
+# - output_types.verbose == []
+# - output_types.warning == []
+
+# - name: output with larger depth
+# win_powershell:
+# executable: '{{ pwsh_executable | default(omit) }}'
+# depth: 3
+# script: |
+# @(
+# 'normal 0',
+# @(
+# 'normal 1',
+# @(
+# 'normal 2',
+# @(
+# 'normal 3',
+# @(
+# 'squashed',
+# @(
+# 'even more squashed'
+# )
+# )
+
+# )
+# )
+# )
+# )
+# register: higher_depth
+
+# - name: assert output with larger depth without executable
+# assert:
+# that:
+# - higher_depth.output == ['normal 0', ['normal 1', ['normal 2', ['normal 3', 'squashed System.Object[]']]]]
+# when: not pwsh_executable is defined
+
+# - name: assert output with larger depth with executable
+# assert:
+# that:
+# - higher_depth.output == ['normal 0', ['normal 1', ['normal 2', ['normal 3', 'squashed System.Collections.ArrayList']]]]
+# when: pwsh_executable is defined
+
+# - name: set explicit value on Ansible.Result
+# win_powershell:
+# executable: '{{ pwsh_executable | default(omit) }}'
+# script: |
+# $Ansible.Result = @(
+# (New-Object -TypeName DateTime -ArgumentList 1970, 1, 1, 0, 0, 0, ([DateTimeKind]::Utc)),
+# 'string'
+# )
+# register: result_ansible
+
+# - name: assert set explicit value on Ansible.Result
+# assert:
+# that:
+# - result_ansible is changed
+# - result_ansible.output == []
+# - result_ansible.result == ['1970-01-01T00:00:00.0000000Z', 'string']
+
+# - name: get temporary directory
+# win_powershell:
+# executable: '{{ pwsh_executable | default(omit) }}'
+# script: |
+# $tmp = $Ansible.Tmpdir
+# $null = New-Item -Path "$tmp\Directory" -ItemType Directory
+
+# $tmp
+# register: tmpdir
+
+# - name: check that tmpdir doesn't exist anymore
+# win_stat:
+# path: '{{ tmpdir.output[0] }}'
+# register: tmpdir_actual
+
+# - name: assert get temporary directory
+# assert:
+# that:
+# - tmpdir is changed
+# - not tmpdir_actual.stat.exists
+
+# - name: dont fail with error record
+# win_powershell:
+# executable: '{{ pwsh_executable | default(omit) }}'
+# script: |
+# 'output 1'
+# Write-Error -Message 'error'
+# 'output 2'
+# register: error_record
+
+# - name: assert dont fail with error record
+# assert:
+# that:
+# - error_record is changed
+# - error_record.error|length == 1
+# - error_record.error[0]['category_info']['activity'] == 'Write-Error'
+# - error_record.error[0]['category_info']['category'] == 'NotSpecified'
+# - error_record.error[0]['category_info']['category_id'] == 0
+# - error_record.error[0]['category_info']['reason'] == 'WriteErrorException'
+# - error_record.error[0]['category_info']['target_name'] == ''
+# - error_record.error[0]['category_info']['target_type'] == ''
+# - error_record.error[0]['error_details'] == None
+# - error_record.error[0]['exception']['help_link'] == None
+# - error_record.error[0]['exception']['hresult'] == -2146233087
+# - error_record.error[0]['exception']['inner_exception'] == None
+# - error_record.error[0]['exception']['message'] == 'error'
+# - error_record.error[0]['exception']['source'] == None
+# - error_record.error[0]['exception']['type'] == 'Microsoft.PowerShell.Commands.WriteErrorException'
+# - error_record.error[0]['fully_qualified_error_id'] == 'Microsoft.PowerShell.Commands.WriteErrorException'
+# - error_record.error[0]['output'] is defined
+# - error_record.error[0]['pipeline_iteration_info'] == [0, 0]
+# - "error_record.error[0]['script_stack_trace'] == 'at <ScriptBlock>, <No file>: line 2'"
+# - error_record.error[0]['target_object'] == None
+# - error_record.output == ['output 1', 'output 2']
+
+# - name: fail with error record and ErrorActionPreference Stop
+# win_powershell:
+# executable: '{{ pwsh_executable | default(omit) }}'
+# error_action: stop
+# script: |
+# 'output 1'
+# Write-Error -Message 'error'
+# 'output 2'
+# register: error_record_stop
+# ignore_errors: yes
+
+# - name: assert fail with error record and ErrorActionPreference Stop
+# assert:
+# that:
+# - error_record_stop is failed
+# - error_record_stop is failed
+# - error_record_stop.error|length == 1
+# - error_record_stop.error[0]['exception']['message'] == 'error'
+# - error_record_stop.output == ['output 1']
+
+# - name: output more complex error record
+# win_powershell:
+# executable: '{{ pwsh_executable | default(omit) }}'
+# script: |
+# Function Test-Function {
+# [CmdletBinding()]
+# param (
+# [Parameter(Mandatory, ValueFromPipeline)]
+# [Object]
+# $InputObject
+# )
+
+# process {
+# if ($InputObject -eq 2) {
+# $errorParams = @{
+# Exception = ([ComponentModel.Win32Exception]5)
+# Message = 'error message'
+# Category = 'PermissionDenied'
+# ErrorId = 'error id'
+# TargetObject = 'some object'
+# RecommendedAction = 'recommended action'
+# CategoryActivity = 'ran pipeline'
+# CategoryReason = 'touch luck'
+# CategoryTargetName = 'target'
+# CategoryTargetType = 'directory'
+# }
+# Write-Error @errorParams
+# }
+# }
+
+# }
+# 1..3 | Test-Function
+# register: complex_error_record
+
+# - name: assert output more complex error record
+# assert:
+# that:
+# - complex_error_record is changed
+# - complex_error_record.error|length == 1
+# - complex_error_record.error[0]['category_info']['activity'] == 'Write-Error'
+# - complex_error_record.error[0]['category_info']['category'] == 'PermissionDenied'
+# - complex_error_record.error[0]['category_info']['category_id'] == 18
+# - complex_error_record.error[0]['category_info']['reason'] == 'touch luck'
+# - complex_error_record.error[0]['category_info']['target_name'] == 'target'
+# - complex_error_record.error[0]['category_info']['target_type'] == 'directory'
+# - complex_error_record.error[0]['error_details']['message'] == 'error message'
+# - complex_error_record.error[0]['error_details']['recommended_action'] == 'recommended action'
+# - complex_error_record.error[0]['exception']['help_link'] == None
+# - complex_error_record.error[0]['exception']['hresult'] == -2147467259
+# - complex_error_record.error[0]['exception']['inner_exception'] == None
+# - complex_error_record.error[0]['exception']['message'] == 'Access is denied'
+# - complex_error_record.error[0]['exception']['source'] == None
+# - complex_error_record.error[0]['exception']['type'] == 'System.ComponentModel.Win32Exception'
+# - complex_error_record.error[0]['fully_qualified_error_id'] == 'error id,Test-Function'
+# - complex_error_record.error[0]['output'] is defined
+# - complex_error_record.error[0]['pipeline_iteration_info'] == [2, 2]
+# - "complex_error_record.error[0]['script_stack_trace'] == 'at Test-Function<Process>, <No file>: line 23\\r\\nat <ScriptBlock>, <No file>: line 28'"
+# - complex_error_record.error[0]['target_object'] == 'some object'
+
+# - name: failure with terminating exception
+# win_powershell:
+# executable: '{{ pwsh_executable | default(omit) }}'
+# script: |
+# 'output 1'
+# throw "exception"
+# 'output 2'
+# register: failed_exception
+# ignore_errors: yes
+
+# - name: assert failure with terminating exception
+# assert:
+# that:
+# - failed_exception is failed
+# - failed_exception.error|length == 1
+# - failed_exception.error[0]['exception']['message'] == 'exception'
+# - failed_exception.output == ['output 1']
+
+# - name: failure with Ansible.Failed
+# win_powershell:
+# executable: '{{ pwsh_executable | default(omit) }}'
+# script: |
+# 'output 1'
+# $Ansible.Failed = $true
+# 'output 2'
+# register: failed_ansible
+# ignore_errors: yes
+
+# - name: assert failure with Ansible.Failed
+# assert:
+# that:
+# - failed_ansible is failed
+# - failed_ansible.error == []
+# - failed_ansible.output == ['output 1', 'output 2']
+
+# - name: Ansible.Failed cannot overwrite terminating exception
+# win_powershell:
+# executable: '{{ pwsh_executable | default(omit) }}'
+# script: |
+# $Ansible.Failed = $false
+# throw "exception"
+# register: term_beats_failed
+# ignore_errors: yes
+
+# - name: assert Ansible.Failed cannot overwrite terminating exception
+# assert:
+# that:
+# - term_beats_failed is failed
+# - term_beats_failed.error|length == 1
+# - term_beats_failed.error[0]['exception']['message'] == 'exception'
+
+# - name: error with interactive prompt
+# win_powershell:
+# executable: '{{ pwsh_executable | default(omit) }}'
+# script: $Host.UI.ReadLine()
+# register: error_noninteractive
+# ignore_errors: yes
+
+# - name: assert error with interactive prompt
+# assert:
+# that:
+# - error_noninteractive is changed
+# - not error_noninteractive is failed # This isn't considered a terminating exception in PowerShell so no failure here.
+# - error_noninteractive.error|length == 1
+# - "'PowerShell is in NonInteractive mode. Read and Prompt functionality is not available.' in error_noninteractive.error[0]['exception']['message']"
+# - error_noninteractive.output == []
+
+# - name: run script with parameters
+# win_powershell:
+# executable: '{{ pwsh_executable | default(omit) }}'
+# script: |
+# [CmdletBinding()]
+# param (
+# [String]
+# $String,
+
+# [Switch]
+# $Switch,
+
+# [Bool]
+# $Bool,
+
+# [int]
+# $Int,
+
+# [Object[]]
+# $List,
+
+# [Hashtable]
+# $Dict
+# )
+
+# @{
+# String = $String
+# Switch = $Switch
+# Bool = $Bool
+# Int = $Int
+# List = $List
+# Dict = $Dict
+# }
+# parameters:
+# String: string
+# Switch: True
+# Bool: False
+# Int: 1
+# List:
+# - abc
+# - 123
+# Dict:
+# Key: Value
+# register: parameters
+
+# - name: assert run script with parameters
+# assert:
+# that:
+# - parameters is changed
+# - parameters.output|length == 1
+# - parameters.output[0]['String'] == 'string'
+# - parameters.output[0]['Switch'] == True
+# - parameters.output[0]['Bool'] == False
+# - parameters.output[0]['Int'] == 1
+# - parameters.output[0]['List'] == ['abc', 123]
+# - "parameters.output[0]['Dict'] == {'Key': 'Value'}"
+
+# - name: write debug/verbose/warning streams
+# win_powershell:
+# executable: '{{ pwsh_executable | default(omit) }}'
+# script: |
+# $DebugPreference = 'Continue'
+# $VerbosePreference = 'Continue'
+# $WarningPreference = 'Continue'
+
+# Write-Debug 'debug'
+# Write-Verbose 'verbose'
+# Write-Warning 'warning'
+
+# register: extra_streams
+
+# - name: assert write debug/verbose/warning streams
+# assert:
+# that:
+# - extra_streams is changed
+# - "extra_streams.host_out == 'DEBUG: debug\\r\\nVERBOSE: verbose\\r\\nWARNING: warning\\r\\n'"
+# - extra_streams.debug == ['debug']
+# - extra_streams.verbose == ['verbose']
+# - extra_streams.warning == ['warning']
+
+# - name: output information record
+# win_powershell:
+# script: |
+# $epoch = New-Object -TypeName DateTime -ArgumentList 1970, 1, 1, 0, 0, 0, ([DateTimeKind]::Utc)
+# Write-Information -MessageData $epoch -Tags tag1
+# register: info_record
+# when: use_executable.stdout | trim | bool # Information records were only added in v5
+
+# - name: assert output information record
+# assert:
+# that:
+# - info_record is changed
+# - info_record.information|length == 1
+# - info_record.information[0]['message_data'] == '1970-01-01T00:00:00.0000000Z'
+# - info_record.information[0]['source'] == 'Write-Information'
+# - info_record.information[0]['tags'] == ['tag1']
+# - info_record.information[0]['time_generated'].endswith('Z')
+# - info_record.output == []
+# when: use_executable.stdout | trim | bool
+
+# - name: verify confirmation prompts aren't called
+# win_powershell:
+# script: |
+# [CmdletBinding(SupportsShouldProcess, ConfirmImpact='High')]
+# param ()
+
+# $PSCmdlet.ShouldProcess('action')
+# register: ignore_confirm
+
+# - name: assert verify confirmation prompts aren't called
+# assert:
+# that:
+# - ignore_confirm is changed
+# - ignore_confirm.output == [True]
+
+# - name: try to run an invalid script
+# win_powershell:
+# executable: '{{ pwsh_executable | default(omit) }}'
+# script: |
+# def my_function():
+# print("abc")
+
+# def main():
+# my_function()
+
+# if __name__ == '__main__':
+# main()
+
+# register: invalid_script
+# ignore_errors: yes
+
+# - name: assert try to run an invalid script
+# assert:
+# that:
+# - invalid_script is failed
+
+# - name: run script with custom location
+# win_powershell:
+# executable: '{{ pwsh_executable | default(omit) }}'
+# script: $pwd.Path
+# chdir: '{{ remote_tmp_dir }}'
+# register: filesystem_chdir
+
+# - name: assert run script with custom location
+# assert:
+# that:
+# - filesystem_chdir is changed
+# - filesystem_chdir.output == [remote_tmp_dir]
+
+# - name: run script with non-filesystem location
+# win_powershell:
+# executable: '{{ pwsh_executable | default(omit) }}'
+# script: $pwd.Path
+# chdir: Cert:\LocalMachine\My
+# register: cert_chdir
+
+# - name: assert run script with non-filesystem location
+# assert:
+# that:
+# - cert_chdir is changed
+# - cert_chdir.output == ['Cert:\LocalMachine\My']
+
+# - name: skip execution when not in check mode
+# win_powershell:
+# executable: '{{ pwsh_executable | default(omit) }}'
+# script: echo "hi"
+# register: check
+# check_mode: yes
+
+# - name: assert skip execution when not in check mode
+# assert:
+# that:
+# - check is changed
+# - check.msg == 'skipped, running in check mode'
+# - check.output == []
+
+# - name: run check mode aware script
+# win_powershell:
+# executable: '{{ pwsh_executable | default(omit) }}'
+# script: |
+# [CmdletBinding(SupportsShouldProcess)]
+# param ()
+
+# $Ansible.CheckMode
+# register: check_aware
+# check_mode: yes
+
+# - name: assert run check mode aware script
+# assert:
+# that:
+# - check_aware is changed
+# - check_aware.output == [True]
+
+# - name: SupportsShouldProcess with explicit value
+# win_powershell:
+# executable: '{{ pwsh_executable | default(omit) }}'
+# script: |
+# [CmdletBinding(SupportsShouldProcess=$true)]
+# param ()
+
+# $PSCmdlet.ShouldProcess('resource')
+# register: check_aware_true
+# check_mode: yes
+
+# - name: assert SupportsShouldProcess with explicit value
+# assert:
+# that:
+# - check_aware_true is changed
+# - check_aware_true.output == [False]
+
+# - name: skip check mode with SupportsShouldProcess=$false
+# win_powershell:
+# executable: '{{ pwsh_executable | default(omit) }}'
+# script: |
+# [CmdletBinding(SupportsShouldProcess=$false)]
+# param ()
+
+# $PSCmdlet.ShouldProcess('resource')
+# register: check_aware_false
+# check_mode: yes
+
+# - name: assert skip check mode with SupportsShouldProcess=$false
+# assert:
+# that:
+# - check_aware_false is changed
+# - check_aware_false.msg == 'skipped, running in check mode'
+# - check_aware_false.output == []
+
+# - name: skip check mode without SupportsShouldProcess
+# win_powershell:
+# executable: '{{ pwsh_executable | default(omit) }}'
+# script: |
+# [CmdletBinding()]
+# param ()
+
+# $PSCmdlet.ShouldProcess('resource')
+# register: check_unaware
+# check_mode: yes
+
+# - name: assert skip check mode without SupportsShouldProcess
+# assert:
+# that:
+# - check_unaware is changed
+# - check_unaware.msg == 'skipped, running in check mode'
+# - check_unaware.output == []
+
+# - name: do not skip if file does not exist
+# win_powershell:
+# executable: '{{ pwsh_executable | default(omit) }}'
+# script: '"output"'
+# creates: missing
+# register: creates_missing
+
+# - name: assert do not skip if file does not exist
+# assert:
+# that:
+# - creates_missing is changed
+# - creates_missing.output == ['output']
+
+# - name: skip if file exists
+# win_powershell:
+# executable: '{{ pwsh_executable | default(omit) }}'
+# script: '"output"'
+# creates: '{{ remote_tmp_dir }}'
+# register: creates_exists
+
+# - name: assert skip if file exists
+# assert:
+# that:
+# - not creates_exists is changed
+# - creates_exists.msg == 'skipped, since ' + remote_tmp_dir + ' exists'
+# - creates_exists.output == []
+
+# - name: skip for creates non-filesystem
+# win_powershell:
+# executable: '{{ pwsh_executable | default(omit) }}'
+# script: '"output"'
+# creates: cert:\LocalMachine\*
+# register: creates_non_fs
+
+# - name: assert skip for creates non-filesystem
+# assert:
+# that:
+# - not creates_non_fs is changed
+# - creates_non_fs.msg == 'skipped, since cert:\LocalMachine\* exists'
+# - creates_non_fs.output == []
+
+# - name: skip if removes does not exist
+# win_powershell:
+# executable: '{{ pwsh_executable | default(omit) }}'
+# script: '"output"'
+# removes: C:\Windows\Missing\file.txt
+# register: removes_missing
+
+# - name: assert skip if removes does not exist
+# assert:
+# that:
+# - not removes_missing is changed
+# - removes_missing.msg == 'skipped, since C:\\Windows\\Missing\\file.txt does not exist'
+# - removes_missing.output == []
+
+# - name: do not skip if removes exists
+# win_powershell:
+# executable: '{{ pwsh_executable | default(omit) }}'
+# script: '"output"'
+# removes: '{{ remote_tmp_dir }}'
+# register: removes_exists
+
+# - name: assert do not skip if removes exists
+# assert:
+# that:
+# - removes_exists is changed
+# - removes_exists.output == ['output']
+
+# - name: do not skip if removes exists non-filesystem
+# win_powershell:
+# executable: '{{ pwsh_executable | default(omit) }}'
+# script: '"output"'
+# removes: cert:\LocalMachine\*
+# register: removes_exists_non_fs
+
+# - name: assert do not skip if removes exists non-filesystem
+# assert:
+# that:
+# - removes_exists_non_fs is changed
+# - removes_exists_non_fs.output == ['output']
+
+# - name: script changed status
+# win_powershell:
+# executable: '{{ pwsh_executable | default(omit) }}'
+# script: $Ansible.Changed = $false
+# register: script_changed
+
+# - name: assert script changed status
+# assert:
+# that:
+# - not script_changed is changed
+
+# - name: capture console output as host output
+# win_powershell:
+# executable: '{{ pwsh_executable | default(omit) }}'
+# script: |
+# $poop = [Char]::ConvertFromUtf32(0x1F4A9)
+# $Host.UI.WriteLine("host café $poop")
+# $Host.UI.WriteErrorLine("error café $poop")
+
+# $subProcessCommand = [Convert]::ToBase64String([Text.Encoding]::Unicode.GetBytes({
+# $p = [Char]::ConvertFromUtf32(0x1F4A9)
+
+# [Console]::Out.WriteLine("sub stdout café $p")
+# [Console]::Error.WriteLine("sub stderr café $p")
+# }.ToString()))
+
+# # Calling a process directly goes to the output/error stream. Calling it with Start-Process with -NoNewWindow
+# # means the sub process will inherit the current console handles and should be captured in the host output.
+# $processParams = @{
+# FilePath = 'powershell.exe'
+# ArgumentList = "-EncodedCommand $subProcessCommand"
+# Wait = $true
+# NoNewWindow = $true
+# }
+# Start-Process @processParams
+
+# [Console]::Out.WriteLine("stdout café $poop")
+# [Console]::Error.WriteLine("stderr café $poop")
+# register: host_output
+
+# - name: assert capture console output as host output
+# assert:
+# that:
+# - host_output is changed
+# - host_output.host_err == 'error café 💩\r\nsub stderr café 💩\r\nstderr café 💩\r\n'
+# - host_output.host_out == 'host café 💩\r\nsub stdout café 💩\r\nstdout café 💩\r\n'
+# - host_output.error == []
+# - host_output.output == []
+
+# # Primitive types should strip out the ETS props to avoid recursive and deep nesting serializtion
+# # problems. This replicates the behaviour of ConvertTo-Json in newer pwsh versions.
+# # https://github.com/ansible-collections/ansible.windows/issues/360
+# - name: output primitive types that contains heavily nested ETS properties
+# win_powershell:
+# script: |
+# $noteProp = New-Object -TypeName System.Management.Automation.PSNoteProperty -ArgumentList @(
+# 'ETSProp', [type]
+# )
+# $str = "foo" | Write-Output
+# $str.PSObject.Properties.Add($noteProp)
+# $str
+
+# $int = 1 | Write-Output
+# $int.PSObject.Properties.Add($noteProp)
+# $int
+# register: primitive_with_ets
+
+# - name: assert output primitive types that contain heavily nested ETS properties
+# assert:
+# that:
+# - primitive_with_ets is changed
+# - primitive_with_ets.error == []
+# - primitive_with_ets.output == ["foo", 1]
+
+# # TargetObject on an error record needs to use Depth properly
+# # https://github.com/ansible-collections/ansible.windows/issues/375
+# - name: output error record target object with deeply nested values
+# win_powershell:
+# script: |
+# Write-Error -Message err -TargetObject @{'1'=@{'2'=@{'3'=@{'4'=@{foo='bar'}}}}}
+# register: err_nested_to
+
+# - name: assert output error record target object with deeply nested values
+# assert:
+# that:
+# - err_nested_to is changed
+# - err_nested_to.error | length == 1
+# # Depth is 2 so it will fully enumerate 2 objects deep and on the 3rd stringify the value
+# - "err_nested_to.error[0].target_object == {'1': {'2': {'3': 'System.Collections.Hashtable'}}}"
+
+# - name: run script that sets diff output
+# win_powershell:
+# executable: '{{ pwsh_executable | default(omit) }}'
+# script: |
+# $Ansible.Diff = @{
+# before = @{
+# foo = 'foo'
+# }
+# after = @{
+# foo = 'bar'
+# nested = @{'1'=@{'2'=@{foo='bar'}}}
+# }
+# }
+
+# register: diff
+# diff: true
+
+# - name: assert run script that sets diff output
+# assert:
+# that:
+# - diff is changed
+# - "diff.diff.before == {'foo': 'foo'}"
+# - (diff.diff.after.keys() | sort) == ["foo", "nested"]
+# - diff.diff.after.foo == 'bar'
+# # Depth also controls the diff nesting
+# - "diff.diff.after.nested == {'1': {'2': 'System.Collections.Hashtable'}}"
+
+- name: run script with SecureString value
win_powershell:
executable: '{{ pwsh_executable | default(omit) }}'
script: |
- $null
- 'string'
- 1
- [IO.FileAttributes]'Hidden, Archive'
- [IO.FileAccess]'Read'
- [object]
- [string]
- [char]'a'
- [Exception]"abc"
-
- # Date tests
- $epoch_unspec = New-Object -TypeName DateTime -ArgumentList 1970, 1, 1
- $epoch_local = New-Object -TypeName DateTime -ArgumentList 1970, 1, 1, 0, 0, 0, ([DateTimeKind]::Local)
- $epoch_utc = New-Object -TypeName DateTime -ArgumentList 1970, 1, 1, 0, 0, 0, ([DateTimeKind]::Utc)
-
- $epoch_unspec
- $epoch_unspec.ToLocalTime()
- $epoch_unspec.ToUniversalTime()
-
- $epoch_local
- $epoch_local.ToLocalTime()
- $epoch_local.ToUniversalTime()
-
- $epoch_utc
- $epoch_utc.ToLocalTime()
- $epoch_utc.ToUniversalTime()
-
- ([DateTimeOffset]$epoch_utc).ToOffset([TimeSpan]::FromHours(2))
-
- # List tests
- ,@()
- ,@(1)
- ,@($null)
- ,@(
- 'entry 1',
- $null,
- 1,
- @(
- 'level2',
- @(
- 'level3',
- 'value'
- )
- ),
- @(),
- @(1),
- @($null),
- @{
- key = 'value'
- exceed = Get-Item $env:SystemRoot
- }
- )
+ param ($SecureString1, $SecureString2)
- # Dictionary tests
- @{}
@{
- foo = 'bar'
- list = @(
- @{ foo = 'bar' }
- 'value 2',
- [string]
- )
- empty_list = @()
- null_list = @($null)
- list_with_1 = @(1)
- nested = @{
- foo = 'bar'
- exceed = @{
- foo = 'bar'
- }
- }
- }
- $hash = @{foo = 'bar'}
- Add-Member -InputObject $hash -NotePropertyName foo -NotePropertyValue hidden
- $hash
-
- # Classes with properties
- [PSCustomObject]@{
- Key = 'value'
- DateTime = $epoch_utc
- Enum = [IO.FileAccess]::Read
- List = @(
- 'value 1', 'value 2'
- )
- Nested = [PSCustomObject]@{
- Exceed = @{
- foo = 'bar'
- }
- Key = 'value'
- }
- }
-
- Get-Item $env:SystemRoot
-
- register: output_types
-
-- name: assert script with various output types
- assert:
- that:
- - output_types is changed
- - output_types.debug == []
- - output_types.error == []
- - output_types.host_err == ''
- - output_types.host_out == ''
- - output_types.information == []
- - output_types.output|length == 28
- - output_types.output[0] == None
-
- - output_types.output[1] == 'string'
-
- - output_types.output[2] == 1
-
- - output_types.output[3]['String'] == 'Hidden, Archive'
- - output_types.output[3]['Type'] == 'System.IO.FileAttributes'
- - output_types.output[3]['Value'] == 34
-
- - output_types.output[4]['String'] == 'Read'
- - output_types.output[4]['Type'] == 'System.IO.FileAccess'
- - output_types.output[4]['Value'] == 1
-
- - output_types.output[5]['AssemblyQualifiedName'].startswith('System.Object, ')
- - output_types.output[5]['BaseType'] == None
- - output_types.output[5]['FullName'] == 'System.Object'
- - output_types.output[5]['Name'] == 'Object'
-
- - output_types.output[6]['AssemblyQualifiedName'].startswith('System.String, ')
- - output_types.output[6]['BaseType']['AssemblyQualifiedName'].startswith('System.Object, ')
- - output_types.output[6]['BaseType']['BaseType'] == None
- - output_types.output[6]['BaseType']['FullName'] == 'System.Object'
- - output_types.output[6]['BaseType']['Name'] == 'Object'
- - output_types.output[6]['FullName'] == 'System.String'
- - output_types.output[6]['Name'] == 'String'
-
- - output_types.output[7] == 'a'
-
- - output_types.output[8]['Data'] == {}
- - output_types.output[8]['HResult'] == -2146233088
- - output_types.output[8]['HelpLink'] == None
- - output_types.output[8]['InnerException'] == None
- - output_types.output[8]['Message'] == 'abc'
- - output_types.output[8]['Source'] == None
- - output_types.output[8]['StackTrace'] == None
- - output_types.output[8]['TargetSite'] == None
-
- - output_types.output[9] == dt_values.stdout_lines[0]
- - output_types.output[10] == dt_values.stdout_lines[1]
- - output_types.output[11] == dt_values.stdout_lines[2]
-
- - output_types.output[12] == dt_values.stdout_lines[3]
- - output_types.output[13] == dt_values.stdout_lines[4]
- - output_types.output[14] == dt_values.stdout_lines[5]
-
- - output_types.output[15] == dt_values.stdout_lines[6]
- - output_types.output[16] == dt_values.stdout_lines[7]
- - output_types.output[17] == dt_values.stdout_lines[8]
-
- - output_types.output[18] == dt_values.stdout_lines[9]
-
- - output_types.output[19] == []
-
- - output_types.output[20] == [1]
-
- - output_types.output[21] == [None]
-
- - output_types.output[22]|length == 8
- - output_types.output[22][0] == 'entry 1'
- - output_types.output[22][1] == None
- - output_types.output[22][2] == 1
- - output_types.output[22][3] == ['level2', 'level3 value']
- - output_types.output[22][4] == []
- - output_types.output[22][5] == [1]
- - output_types.output[22][6] == [None]
- - output_types.output[22][7]['key'] == 'value'
- - output_types.output[22][7]['exceed'] == 'C:\Windows'
-
- - output_types.output[23] == {}
-
- - output_types.output[24]['foo'] == 'bar'
- - output_types.output[24]['list']|length == 3
- - output_types.output[24]['list'][0] == 'System.Collections.Hashtable'
- - output_types.output[24]['list'][1] == 'value 2'
- - output_types.output[24]['list'][2] == 'System.String'
- - output_types.output[24]['empty_list'] == []
- - output_types.output[24]['null_list'] == [None]
- - output_types.output[24]['list_with_1'] == [1]
- - output_types.output[24]['nested']['exceed'] == 'System.Collections.Hashtable'
-
- - 'output_types.output[25] == {"foo": "bar"}'
-
- - output_types.output[26]['Key'] == 'value'
- - output_types.output[26]['DateTime'] == '1970-01-01T00:00:00.0000000Z'
- - output_types.output[26]['Enum']['String'] == 'Read'
- - output_types.output[26]['Enum']['Type'] == 'System.IO.FileAccess'
- - output_types.output[26]['Enum']['Value'] == 1
- - output_types.output[26]['List'] == ['value 1', 'value 2']
- - output_types.output[26]['Nested']['Exceed'] == 'System.Collections.Hashtable'
- - output_types.output[26]['Nested']['Key'] == 'value'
-
- - output_types.output[27]['BaseName'] == 'Windows'
- - output_types.output[27]['Exists'] == True
- - output_types.output[27]['FullName'] == 'C:\Windows'
- - output_types.output[27]['PSDrive']['Name'] == 'C'
- - output_types.output[27]['PSDrive']['Provider'] == 'Microsoft.PowerShell.Core\FileSystem'
- - output_types.output[27]['PSProvider']['Drives'] == 'C'
- - output_types.output[27]['PSProvider']['ImplementingType'] == 'Microsoft.PowerShell.Commands.FileSystemProvider'
- - output_types.output[27]['PSProvider']['Name'] == 'FileSystem'
-
- - output_types.result == {}
- - output_types.verbose == []
- - output_types.warning == []
-
-- name: output with larger depth
- win_powershell:
- executable: '{{ pwsh_executable | default(omit) }}'
- depth: 3
- script: |
- @(
- 'normal 0',
- @(
- 'normal 1',
- @(
- 'normal 2',
- @(
- 'normal 3',
- @(
- 'squashed',
- @(
- 'even more squashed'
- )
- )
-
- )
- )
- )
- )
- register: higher_depth
-
-- name: assert output with larger depth without executable
- assert:
- that:
- - higher_depth.output == ['normal 0', ['normal 1', ['normal 2', ['normal 3', 'squashed System.Object[]']]]]
- when: not pwsh_executable is defined
-
-- name: assert output with larger depth with executable
- assert:
- that:
- - higher_depth.output == ['normal 0', ['normal 1', ['normal 2', ['normal 3', 'squashed System.Collections.ArrayList']]]]
- when: pwsh_executable is defined
-
-- name: set explicit value on Ansible.Result
- win_powershell:
- executable: '{{ pwsh_executable | default(omit) }}'
- script: |
- $Ansible.Result = @(
- (New-Object -TypeName DateTime -ArgumentList 1970, 1, 1, 0, 0, 0, ([DateTimeKind]::Utc)),
- 'string'
- )
- register: result_ansible
-
-- name: assert set explicit value on Ansible.Result
- assert:
- that:
- - result_ansible is changed
- - result_ansible.output == []
- - result_ansible.result == ['1970-01-01T00:00:00.0000000Z', 'string']
-
-- name: get temporary directory
- win_powershell:
- executable: '{{ pwsh_executable | default(omit) }}'
- script: |
- $tmp = $Ansible.Tmpdir
- $null = New-Item -Path "$tmp\Directory" -ItemType Directory
-
- $tmp
- register: tmpdir
-
-- name: check that tmpdir doesn't exist anymore
- win_stat:
- path: '{{ tmpdir.output[0] }}'
- register: tmpdir_actual
-
-- name: assert get temporary directory
- assert:
- that:
- - tmpdir is changed
- - not tmpdir_actual.stat.exists
-
-- name: dont fail with error record
- win_powershell:
- executable: '{{ pwsh_executable | default(omit) }}'
- script: |
- 'output 1'
- Write-Error -Message 'error'
- 'output 2'
- register: error_record
-
-- name: assert dont fail with error record
- assert:
- that:
- - error_record is changed
- - error_record.error|length == 1
- - error_record.error[0]['category_info']['activity'] == 'Write-Error'
- - error_record.error[0]['category_info']['category'] == 'NotSpecified'
- - error_record.error[0]['category_info']['category_id'] == 0
- - error_record.error[0]['category_info']['reason'] == 'WriteErrorException'
- - error_record.error[0]['category_info']['target_name'] == ''
- - error_record.error[0]['category_info']['target_type'] == ''
- - error_record.error[0]['error_details'] == None
- - error_record.error[0]['exception']['help_link'] == None
- - error_record.error[0]['exception']['hresult'] == -2146233087
- - error_record.error[0]['exception']['inner_exception'] == None
- - error_record.error[0]['exception']['message'] == 'error'
- - error_record.error[0]['exception']['source'] == None
- - error_record.error[0]['exception']['type'] == 'Microsoft.PowerShell.Commands.WriteErrorException'
- - error_record.error[0]['fully_qualified_error_id'] == 'Microsoft.PowerShell.Commands.WriteErrorException'
- - error_record.error[0]['output'] is defined
- - error_record.error[0]['pipeline_iteration_info'] == [0, 0]
- - "error_record.error[0]['script_stack_trace'] == 'at <ScriptBlock>, <No file>: line 2'"
- - error_record.error[0]['target_object'] == None
- - error_record.output == ['output 1', 'output 2']
-
-- name: fail with error record and ErrorActionPreference Stop
- win_powershell:
- executable: '{{ pwsh_executable | default(omit) }}'
- error_action: stop
- script: |
- 'output 1'
- Write-Error -Message 'error'
- 'output 2'
- register: error_record_stop
- ignore_errors: yes
-
-- name: assert fail with error record and ErrorActionPreference Stop
- assert:
- that:
- - error_record_stop is failed
- - error_record_stop is failed
- - error_record_stop.error|length == 1
- - error_record_stop.error[0]['exception']['message'] == 'error'
- - error_record_stop.output == ['output 1']
-
-- name: output more complex error record
- win_powershell:
- executable: '{{ pwsh_executable | default(omit) }}'
- script: |
- Function Test-Function {
- [CmdletBinding()]
- param (
- [Parameter(Mandatory, ValueFromPipeline)]
- [Object]
- $InputObject
- )
-
- process {
- if ($InputObject -eq 2) {
- $errorParams = @{
- Exception = ([ComponentModel.Win32Exception]5)
- Message = 'error message'
- Category = 'PermissionDenied'
- ErrorId = 'error id'
- TargetObject = 'some object'
- RecommendedAction = 'recommended action'
- CategoryActivity = 'ran pipeline'
- CategoryReason = 'touch luck'
- CategoryTargetName = 'target'
- CategoryTargetType = 'directory'
- }
- Write-Error @errorParams
- }
- }
-
+ SecureString1 = [System.Net.NetworkCredential]::new('', $SecureString1).Password
+ SecureString2 = [System.Net.NetworkCredential]::new('', $SecureString2).Password
}
- 1..3 | Test-Function
- register: complex_error_record
+ sensitive_parameters:
+ - name: SecureString1
+ value: ''
+ - name: SecureString2
+ value: secret
+ register: secure_string
-- name: assert output more complex error record
+- name: assert run script with SecureString value
assert:
that:
- - complex_error_record is changed
- - complex_error_record.error|length == 1
- - complex_error_record.error[0]['category_info']['activity'] == 'Write-Error'
- - complex_error_record.error[0]['category_info']['category'] == 'PermissionDenied'
- - complex_error_record.error[0]['category_info']['category_id'] == 18
- - complex_error_record.error[0]['category_info']['reason'] == 'touch luck'
- - complex_error_record.error[0]['category_info']['target_name'] == 'target'
- - complex_error_record.error[0]['category_info']['target_type'] == 'directory'
- - complex_error_record.error[0]['error_details']['message'] == 'error message'
- - complex_error_record.error[0]['error_details']['recommended_action'] == 'recommended action'
- - complex_error_record.error[0]['exception']['help_link'] == None
- - complex_error_record.error[0]['exception']['hresult'] == -2147467259
- - complex_error_record.error[0]['exception']['inner_exception'] == None
- - complex_error_record.error[0]['exception']['message'] == 'Access is denied'
- - complex_error_record.error[0]['exception']['source'] == None
- - complex_error_record.error[0]['exception']['type'] == 'System.ComponentModel.Win32Exception'
- - complex_error_record.error[0]['fully_qualified_error_id'] == 'error id,Test-Function'
- - complex_error_record.error[0]['output'] is defined
- - complex_error_record.error[0]['pipeline_iteration_info'] == [2, 2]
- - "complex_error_record.error[0]['script_stack_trace'] == 'at Test-Function<Process>, <No file>: line 23\\r\\nat <ScriptBlock>, <No file>: line 28'"
- - complex_error_record.error[0]['target_object'] == 'some object'
-
-- name: failure with terminating exception
- win_powershell:
- executable: '{{ pwsh_executable | default(omit) }}'
- script: |
- 'output 1'
- throw "exception"
- 'output 2'
- register: failed_exception
- ignore_errors: yes
+ - secure_string.output[0].SecureString1 == ''
+ - secure_string.output[0].SecureString2 == 'secret'
-- name: assert failure with terminating exception
- assert:
- that:
- - failed_exception is failed
- - failed_exception.error|length == 1
- - failed_exception.error[0]['exception']['message'] == 'exception'
- - failed_exception.output == ['output 1']
-
-- name: failure with Ansible.Failed
+- name: run script with PSCredential value
win_powershell:
executable: '{{ pwsh_executable | default(omit) }}'
script: |
- 'output 1'
- $Ansible.Failed = $true
- 'output 2'
- register: failed_ansible
- ignore_errors: yes
-
-- name: assert failure with Ansible.Failed
- assert:
- that:
- - failed_ansible is failed
- - failed_ansible.error == []
- - failed_ansible.output == ['output 1', 'output 2']
-
-- name: Ansible.Failed cannot overwrite terminating exception
- win_powershell:
- executable: '{{ pwsh_executable | default(omit) }}'
- script: |
- $Ansible.Failed = $false
- throw "exception"
- register: term_beats_failed
- ignore_errors: yes
-
-- name: assert Ansible.Failed cannot overwrite terminating exception
- assert:
- that:
- - term_beats_failed is failed
- - term_beats_failed.error|length == 1
- - term_beats_failed.error[0]['exception']['message'] == 'exception'
-
-- name: error with interactive prompt
- win_powershell:
- executable: '{{ pwsh_executable | default(omit) }}'
- script: $Host.UI.ReadLine()
- register: error_noninteractive
- ignore_errors: yes
-
-- name: assert error with interactive prompt
- assert:
- that:
- - error_noninteractive is changed
- - not error_noninteractive is failed # This isn't considered a terminating exception in PowerShell so no failure here.
- - error_noninteractive.error|length == 1
- - "'PowerShell is in NonInteractive mode. Read and Prompt functionality is not available.' in error_noninteractive.error[0]['exception']['message']"
- - error_noninteractive.output == []
-
-- name: run script with parameters
- win_powershell:
- executable: '{{ pwsh_executable | default(omit) }}'
- script: |
- [CmdletBinding()]
- param (
- [String]
- $String,
-
- [Switch]
- $Switch,
-
- [Bool]
- $Bool,
-
- [int]
- $Int,
-
- [Object[]]
- $List,
-
- [Hashtable]
- $Dict
- )
+ param ($Cred1, $Cred2)
@{
- String = $String
- Switch = $Switch
- Bool = $Bool
- Int = $Int
- List = $List
- Dict = $Dict
- }
- parameters:
- String: string
- Switch: True
- Bool: False
- Int: 1
- List:
- - abc
- - 123
- Dict:
- Key: Value
- register: parameters
-
-- name: assert run script with parameters
- assert:
- that:
- - parameters is changed
- - parameters.output|length == 1
- - parameters.output[0]['String'] == 'string'
- - parameters.output[0]['Switch'] == True
- - parameters.output[0]['Bool'] == False
- - parameters.output[0]['Int'] == 1
- - parameters.output[0]['List'] == ['abc', 123]
- - "parameters.output[0]['Dict'] == {'Key': 'Value'}"
-
-- name: write debug/verbose/warning streams
- win_powershell:
- executable: '{{ pwsh_executable | default(omit) }}'
- script: |
- $DebugPreference = 'Continue'
- $VerbosePreference = 'Continue'
- $WarningPreference = 'Continue'
-
- Write-Debug 'debug'
- Write-Verbose 'verbose'
- Write-Warning 'warning'
-
- register: extra_streams
-
-- name: assert write debug/verbose/warning streams
- assert:
- that:
- - extra_streams is changed
- - "extra_streams.host_out == 'DEBUG: debug\\r\\nVERBOSE: verbose\\r\\nWARNING: warning\\r\\n'"
- - extra_streams.debug == ['debug']
- - extra_streams.verbose == ['verbose']
- - extra_streams.warning == ['warning']
-
-- name: output information record
- win_powershell:
- script: |
- $epoch = New-Object -TypeName DateTime -ArgumentList 1970, 1, 1, 0, 0, 0, ([DateTimeKind]::Utc)
- Write-Information -MessageData $epoch -Tags tag1
- register: info_record
- when: use_executable.stdout | trim | bool # Information records were only added in v5
-
-- name: assert output information record
- assert:
- that:
- - info_record is changed
- - info_record.information|length == 1
- - info_record.information[0]['message_data'] == '1970-01-01T00:00:00.0000000Z'
- - info_record.information[0]['source'] == 'Write-Information'
- - info_record.information[0]['tags'] == ['tag1']
- - info_record.information[0]['time_generated'].endswith('Z')
- - info_record.output == []
- when: use_executable.stdout | trim | bool
-
-- name: verify confirmation prompts aren't called
- win_powershell:
- script: |
- [CmdletBinding(SupportsShouldProcess, ConfirmImpact='High')]
- param ()
-
- $PSCmdlet.ShouldProcess('action')
- register: ignore_confirm
-
-- name: assert verify confirmation prompts aren't called
- assert:
- that:
- - ignore_confirm is changed
- - ignore_confirm.output == [True]
-
-- name: try to run an invalid script
- win_powershell:
- executable: '{{ pwsh_executable | default(omit) }}'
- script: |
- def my_function():
- print("abc")
-
- def main():
- my_function()
-
- if __name__ == '__main__':
- main()
-
- register: invalid_script
- ignore_errors: yes
-
-- name: assert try to run an invalid script
- assert:
- that:
- - invalid_script is failed
-
-- name: run script with custom location
- win_powershell:
- executable: '{{ pwsh_executable | default(omit) }}'
- script: $pwd.Path
- chdir: '{{ remote_tmp_dir }}'
- register: filesystem_chdir
-
-- name: assert run script with custom location
- assert:
- that:
- - filesystem_chdir is changed
- - filesystem_chdir.output == [remote_tmp_dir]
-
-- name: run script with non-filesystem location
- win_powershell:
- executable: '{{ pwsh_executable | default(omit) }}'
- script: $pwd.Path
- chdir: Cert:\LocalMachine\My
- register: cert_chdir
-
-- name: assert run script with non-filesystem location
- assert:
- that:
- - cert_chdir is changed
- - cert_chdir.output == ['Cert:\LocalMachine\My']
-
-- name: skip execution when not in check mode
- win_powershell:
- executable: '{{ pwsh_executable | default(omit) }}'
- script: echo "hi"
- register: check
- check_mode: yes
-
-- name: assert skip execution when not in check mode
- assert:
- that:
- - check is changed
- - check.msg == 'skipped, running in check mode'
- - check.output == []
-
-- name: run check mode aware script
- win_powershell:
- executable: '{{ pwsh_executable | default(omit) }}'
- script: |
- [CmdletBinding(SupportsShouldProcess)]
- param ()
-
- $Ansible.CheckMode
- register: check_aware
- check_mode: yes
-
-- name: assert run check mode aware script
- assert:
- that:
- - check_aware is changed
- - check_aware.output == [True]
-
-- name: SupportsShouldProcess with explicit value
- win_powershell:
- executable: '{{ pwsh_executable | default(omit) }}'
- script: |
- [CmdletBinding(SupportsShouldProcess=$true)]
- param ()
-
- $PSCmdlet.ShouldProcess('resource')
- register: check_aware_true
- check_mode: yes
-
-- name: assert SupportsShouldProcess with explicit value
- assert:
- that:
- - check_aware_true is changed
- - check_aware_true.output == [False]
-
-- name: skip check mode with SupportsShouldProcess=$false
- win_powershell:
- executable: '{{ pwsh_executable | default(omit) }}'
- script: |
- [CmdletBinding(SupportsShouldProcess=$false)]
- param ()
-
- $PSCmdlet.ShouldProcess('resource')
- register: check_aware_false
- check_mode: yes
-
-- name: assert skip check mode with SupportsShouldProcess=$false
- assert:
- that:
- - check_aware_false is changed
- - check_aware_false.msg == 'skipped, running in check mode'
- - check_aware_false.output == []
-
-- name: skip check mode without SupportsShouldProcess
- win_powershell:
- executable: '{{ pwsh_executable | default(omit) }}'
- script: |
- [CmdletBinding()]
- param ()
-
- $PSCmdlet.ShouldProcess('resource')
- register: check_unaware
- check_mode: yes
-
-- name: assert skip check mode without SupportsShouldProcess
- assert:
- that:
- - check_unaware is changed
- - check_unaware.msg == 'skipped, running in check mode'
- - check_unaware.output == []
-
-- name: do not skip if file does not exist
- win_powershell:
- executable: '{{ pwsh_executable | default(omit) }}'
- script: '"output"'
- creates: missing
- register: creates_missing
-
-- name: assert do not skip if file does not exist
- assert:
- that:
- - creates_missing is changed
- - creates_missing.output == ['output']
-
-- name: skip if file exists
- win_powershell:
- executable: '{{ pwsh_executable | default(omit) }}'
- script: '"output"'
- creates: '{{ remote_tmp_dir }}'
- register: creates_exists
-
-- name: assert skip if file exists
- assert:
- that:
- - not creates_exists is changed
- - creates_exists.msg == 'skipped, since ' + remote_tmp_dir + ' exists'
- - creates_exists.output == []
-
-- name: skip for creates non-filesystem
- win_powershell:
- executable: '{{ pwsh_executable | default(omit) }}'
- script: '"output"'
- creates: cert:\LocalMachine\*
- register: creates_non_fs
-
-- name: assert skip for creates non-filesystem
- assert:
- that:
- - not creates_non_fs is changed
- - creates_non_fs.msg == 'skipped, since cert:\LocalMachine\* exists'
- - creates_non_fs.output == []
-
-- name: skip if removes does not exist
- win_powershell:
- executable: '{{ pwsh_executable | default(omit) }}'
- script: '"output"'
- removes: C:\Windows\Missing\file.txt
- register: removes_missing
-
-- name: assert skip if removes does not exist
- assert:
- that:
- - not removes_missing is changed
- - removes_missing.msg == 'skipped, since C:\\Windows\\Missing\\file.txt does not exist'
- - removes_missing.output == []
-
-- name: do not skip if removes exists
- win_powershell:
- executable: '{{ pwsh_executable | default(omit) }}'
- script: '"output"'
- removes: '{{ remote_tmp_dir }}'
- register: removes_exists
-
-- name: assert do not skip if removes exists
- assert:
- that:
- - removes_exists is changed
- - removes_exists.output == ['output']
-
-- name: do not skip if removes exists non-filesystem
- win_powershell:
- executable: '{{ pwsh_executable | default(omit) }}'
- script: '"output"'
- removes: cert:\LocalMachine\*
- register: removes_exists_non_fs
-
-- name: assert do not skip if removes exists non-filesystem
- assert:
- that:
- - removes_exists_non_fs is changed
- - removes_exists_non_fs.output == ['output']
-
-- name: script changed status
- win_powershell:
- executable: '{{ pwsh_executable | default(omit) }}'
- script: $Ansible.Changed = $false
- register: script_changed
-
-- name: assert script changed status
- assert:
- that:
- - not script_changed is changed
-
-- name: capture console output as host output
- win_powershell:
- executable: '{{ pwsh_executable | default(omit) }}'
- script: |
- $poop = [Char]::ConvertFromUtf32(0x1F4A9)
- $Host.UI.WriteLine("host café $poop")
- $Host.UI.WriteErrorLine("error café $poop")
-
- $subProcessCommand = [Convert]::ToBase64String([Text.Encoding]::Unicode.GetBytes({
- $p = [Char]::ConvertFromUtf32(0x1F4A9)
-
- [Console]::Out.WriteLine("sub stdout café $p")
- [Console]::Error.WriteLine("sub stderr café $p")
- }.ToString()))
-
- # Calling a process directly goes to the output/error stream. Calling it with Start-Process with -NoNewWindow
- # means the sub process will inherit the current console handles and should be captured in the host output.
- $processParams = @{
- FilePath = 'powershell.exe'
- ArgumentList = "-EncodedCommand $subProcessCommand"
- Wait = $true
- NoNewWindow = $true
- }
- Start-Process @processParams
-
- [Console]::Out.WriteLine("stdout café $poop")
- [Console]::Error.WriteLine("stderr café $poop")
- register: host_output
-
-- name: assert capture console output as host output
- assert:
- that:
- - host_output is changed
- - host_output.host_err == 'error café 💩\r\nsub stderr café 💩\r\nstderr café 💩\r\n'
- - host_output.host_out == 'host café 💩\r\nsub stdout café 💩\r\nstdout café 💩\r\n'
- - host_output.error == []
- - host_output.output == []
-
-# Primitive types should strip out the ETS props to avoid recursive and deep nesting serializtion
-# problems. This replicates the behaviour of ConvertTo-Json in newer pwsh versions.
-# https://github.com/ansible-collections/ansible.windows/issues/360
-- name: output primitive types that contains heavily nested ETS properties
- win_powershell:
- script: |
- $noteProp = New-Object -TypeName System.Management.Automation.PSNoteProperty -ArgumentList @(
- 'ETSProp', [type]
- )
- $str = "foo" | Write-Output
- $str.PSObject.Properties.Add($noteProp)
- $str
-
- $int = 1 | Write-Output
- $int.PSObject.Properties.Add($noteProp)
- $int
- register: primitive_with_ets
-
-- name: assert output primitive types that contain heavily nested ETS properties
- assert:
- that:
- - primitive_with_ets is changed
- - primitive_with_ets.error == []
- - primitive_with_ets.output == ["foo", 1]
-
-# TargetObject on an error record needs to use Depth properly
-# https://github.com/ansible-collections/ansible.windows/issues/375
-- name: output error record target object with deeply nested values
- win_powershell:
- script: |
- Write-Error -Message err -TargetObject @{'1'=@{'2'=@{'3'=@{'4'=@{foo='bar'}}}}}
- register: err_nested_to
-
-- name: assert output error record target object with deeply nested values
- assert:
- that:
- - err_nested_to is changed
- - err_nested_to.error | length == 1
- # Depth is 2 so it will fully enumerate 2 objects deep and on the 3rd stringify the value
- - "err_nested_to.error[0].target_object == {'1': {'2': {'3': 'System.Collections.Hashtable'}}}"
-
-- name: run script that sets diff output
- win_powershell:
- executable: '{{ pwsh_executable | default(omit) }}'
- script: |
- $Ansible.Diff = @{
- before = @{
- foo = 'foo'
- }
- after = @{
- foo = 'bar'
- nested = @{'1'=@{'2'=@{foo='bar'}}}
- }
+ Cred1 = @{
+ UserName = $Cred1.UserName
+ Password = $Cred1.GetNetworkCredential().Password
+ }
+ Cred2 = @{
+ UserName = $Cred2.UserName
+ Password = $Cred2.GetNetworkCredential().Password
+ }
}
-
- register: diff
- diff: true
-
-- name: assert run script that sets diff output
- assert:
- that:
- - diff is changed
- - "diff.diff.before == {'foo': 'foo'}"
- - (diff.diff.after.keys() | sort) == ["foo", "nested"]
- - diff.diff.after.foo == 'bar'
- # Depth also controls the diff nesting
- - "diff.diff.after.nested == {'1': {'2': 'System.Collections.Hashtable'}}"
+ sensitive_parameters:
+ - name: Cred1
+ username: user1
+ password: ''
+ - name: Cred2
+ username: user2
+ password: secret
+ register: pscredential
+
+- name: assert run script with SecureString value
+ assert:
+ that:
+ - pscredential.output[0].Cred1.UserName == 'user1'
+ - pscredential.output[0].Cred1.Password == ''
+ - pscredential.output[0].Cred2.UserName == 'user2'
+ - pscredential.output[0].Cred2.Password == 'secret'
diff --git a/ansible_collections/ansible/windows/tests/integration/targets/win_setup/tasks/main.yml b/ansible_collections/ansible/windows/tests/integration/targets/win_setup/tasks/main.yml
index 3c7b30712..35697475a 100644
--- a/ansible_collections/ansible/windows/tests/integration/targets/win_setup/tasks/main.yml
+++ b/ansible_collections/ansible/windows/tests/integration/targets/win_setup/tasks/main.yml
@@ -58,6 +58,7 @@
assert:
that:
- "setup_result.ansible_facts.ansible_win_rm_certificate_expires"
+ - "setup_result.ansible_facts.ansible_win_rm_certificate_thumbprint"
when: ansible_ssh_port|default(5986) != 5985
- name: test gather_subset "!all"
diff --git a/ansible_collections/ansible/windows/tests/integration/targets/win_user/tasks/tests.yml b/ansible_collections/ansible/windows/tests/integration/targets/win_user/tasks/tests.yml
index 99ac39a8a..adec51fe4 100644
--- a/ansible_collections/ansible/windows/tests/integration/targets/win_user/tasks/tests.yml
+++ b/ansible_collections/ansible/windows/tests/integration/targets/win_user/tasks/tests.yml
@@ -424,6 +424,176 @@
- "win_user_invalid_group_result.msg"
- win_user_invalid_group_result.msg is match("group 'Userz' not found")
+- name: get expected dates based on remote timezone
+ win_powershell:
+ script: |
+ [DateTime]::new(2040, 10, 27, 0, 0, 0, [DateTimeKind]::Utc).ToLocalTime()
+ [DateTime]::new(2040, 10, 27, 14, 30, 0, [DateTimeKind]::Utc).ToLocalTime()
+ [DateTime]::new(2040, 10, 27, 13, 30, 0, [DateTimeKind]::Utc).ToLocalTime()
+ [DateTimeOffset]::new(
+ [DateTime]::new(2040, 10, 27, 14, 30, 0, [DateTimeKind]::Unspecified),
+ (New-TimeSpan -Hours 2)).UtcDateTime.ToLocalTime()
+ register: expected_dates
+
+- name: expect failure when account_expires is invalid
+ win_user:
+ name: '{{ test_win_user_name }}'
+ account_expires: invalid
+ register: account_expires_invalid
+ failed_when:
+ - >-
+ account_expires_invalid.msg != "Failed to parse account_expires as datetime string. Expecting datetime in yyyy-MM-dd or yyyy-MM-ddTHH:mm:ss.FFFFFFFK format."
+
+- name: set account expiration date - short form
+ win_user:
+ name: '{{ test_win_user_name }}'
+ account_expires: '2040-10-27'
+ register: account_expires_short_result
+
+- name: get result of set account expiration date - short form
+ win_powershell:
+ script: param ($Name); Get-LocalUser -Name $Name | Select-Object -ExpandProperty AccountExpires
+ parameters:
+ Name: '{{ test_win_user_name }}'
+ register: account_expires_short_result_actual
+
+- name: assert set account expiration date - short form
+ assert:
+ that:
+ - account_expires_short_result is changed
+ - account_expires_short_result_actual.output == [expected_dates.output[0]]
+
+- name: set account expiration date - short form - idempotent
+ win_user:
+ name: '{{ test_win_user_name }}'
+ account_expires: '2040-10-27'
+ register: account_expires_short_result_again
+
+- name: assert set account expiration date - short form - idempotent
+ assert:
+ that:
+ - not account_expires_short_result_again is changed
+
+- name: set account expiration date - long form
+ win_user:
+ name: '{{ test_win_user_name }}'
+ account_expires: '2040-10-27T14:30:00'
+ register: account_expires_long_result
+
+- name: get result of set account expiration date - long form
+ win_powershell:
+ script: param ($Name); Get-LocalUser -Name $Name | Select-Object -ExpandProperty AccountExpires
+ parameters:
+ Name: '{{ test_win_user_name }}'
+ register: account_expires_long_result_actual
+
+- name: assert set account expiration date - long form
+ assert:
+ that:
+ - account_expires_long_result is changed
+ - account_expires_long_result_actual.output == [expected_dates.output[1]]
+
+- name: set account expiration date - long form - idempotent
+ win_user:
+ name: '{{ test_win_user_name }}'
+ account_expires: '2040-10-27T14:30:00'
+ register: account_expires_long_result_again
+
+- name: assert set account expiration date - long form - idempotent
+ assert:
+ that:
+ - not account_expires_long_result_again is changed
+
+- name: set account expiration date - long form with Z
+ win_user:
+ name: '{{ test_win_user_name }}'
+ account_expires: '2040-10-27T13:30:00Z'
+ register: account_expires_long_z_result
+
+- name: get result of set account expiration date - long form with Z
+ win_powershell:
+ script: param ($Name); Get-LocalUser -Name $Name | Select-Object -ExpandProperty AccountExpires
+ parameters:
+ Name: '{{ test_win_user_name }}'
+ register: account_expires_long_z_result_actual
+
+- name: assert set account expiration date - long form with Z
+ assert:
+ that:
+ - account_expires_long_z_result is changed
+ - account_expires_long_z_result_actual.output == [expected_dates.output[2]]
+
+- name: set account expiration date - long form with Z - idempotent
+ win_user:
+ name: '{{ test_win_user_name }}'
+ account_expires: '2040-10-27T13:30:00Z'
+ register: account_expires_long_z_result_again
+
+- name: assert set account expiration date - long form - idempotent
+ assert:
+ that:
+ - not account_expires_long_result_again is changed
+
+- name: set account expiration date - long form with tz
+ win_user:
+ name: '{{ test_win_user_name }}'
+ account_expires: '2040-10-27T14:30:00+0200'
+ register: account_expires_long_tz_result
+
+- name: get result of set account expiration date - long form with tz
+ win_powershell:
+ script: param ($Name); Get-LocalUser -Name $Name | Select-Object -ExpandProperty AccountExpires
+ parameters:
+ Name: '{{ test_win_user_name }}'
+ register: account_expires_long_tz_result_actual
+
+- name: assert set account expiration date - long form with tz
+ assert:
+ that:
+ - account_expires_long_tz_result is changed
+ - account_expires_long_tz_result_actual.output == [expected_dates.output[3]]
+
+- name: set account expiration date - long form with tz - idempotent
+ win_user:
+ name: '{{ test_win_user_name }}'
+ account_expires: '2040-10-27T14:30:00+02:00'
+ register: account_expires_long_tz_result_again
+
+- name: assert set account expiration date - long form with tz - idempotent
+ assert:
+ that:
+ - not account_expires_long_tz_result_again is changed
+
+- name: remove account expiration date
+ win_user:
+ name: '{{ test_win_user_name }}'
+ account_expires: never
+ register: account_expires_remove_result
+
+- name: get result of remove account expiration date
+ win_powershell:
+ script: param ($Name); Get-LocalUser -Name $Name | Select-Object -ExpandProperty AccountExpires
+ parameters:
+ Name: '{{ test_win_user_name }}'
+ register: account_expires_remove_result_actual
+
+- name: assert remove account expiration date
+ assert:
+ that:
+ - account_expires_remove_result is changed
+ - account_expires_remove_result_actual.output == []
+
+- name: remove account expiration date - idempotent
+ win_user:
+ name: '{{ test_win_user_name }}'
+ account_expires: never
+ register: account_expires_remove_result_again
+
+- name: assert remove account expiration date - idempotent
+ assert:
+ that:
+ - not account_expires_remove_result_again is changed
+
- name: remove existing test user if present (check mode)
win_user:
name: '{{ test_win_user_name }}'
diff --git a/ansible_collections/ansible/windows/tests/sanity/ignore-2.18.txt b/ansible_collections/ansible/windows/tests/sanity/ignore-2.18.txt
new file mode 100644
index 000000000..e0f85ea92
--- /dev/null
+++ b/ansible_collections/ansible/windows/tests/sanity/ignore-2.18.txt
@@ -0,0 +1 @@
+tests/integration/targets/win_dsc/files/xTestCompositeDsc/1.0.0/DSCResources/xTestComposite/xTestComposite.schema.psm1 pslint!skip # Pwsh cannot parse DSC to MOF on Linux
diff --git a/ansible_collections/ansible/windows/tests/utils/shippable/lint.sh b/ansible_collections/ansible/windows/tests/utils/shippable/lint.sh
index 12b5b4cd2..9d60edb2a 100755
--- a/ansible_collections/ansible/windows/tests/utils/shippable/lint.sh
+++ b/ansible_collections/ansible/windows/tests/utils/shippable/lint.sh
@@ -5,6 +5,6 @@ set -o pipefail -eux
# This is aligned with the galaxy-importer used by AH
# https://github.com/ansible/galaxy-importer/blob/d4b5e6d12088ba452f129f4824bd049be5543358/setup.cfg#L22C4-L22C33
python -m pip install \
- 'ansible-lint>=6.2.2,<=6.14.3'
+ 'ansible-lint>=6.2.2,<=6.22.1'
ansible-lint
diff --git a/ansible_collections/ansible_community.py b/ansible_collections/ansible_community.py
index 274eb0ec7..3d15428bf 100644
--- a/ansible_collections/ansible_community.py
+++ b/ansible_collections/ansible_community.py
@@ -15,7 +15,7 @@ def main():
parser.add_argument(
'--version',
action='version',
- version='Ansible community version 10.0.1',
+ version='Ansible community version 10.1.0',
help="show the version of the Ansible community package",
)
parser.parse_args()
diff --git a/ansible_collections/ansible_release.py b/ansible_collections/ansible_release.py
index 8717270b0..078d02a9b 100644
--- a/ansible_collections/ansible_release.py
+++ b/ansible_collections/ansible_release.py
@@ -3,4 +3,4 @@
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
-ansible_version = '10.0.1' \ No newline at end of file
+ansible_version = '10.1.0' \ No newline at end of file
diff --git a/ansible_collections/awx/awx/FILES.json b/ansible_collections/awx/awx/FILES.json
index c006332f0..f3b57203b 100644
--- a/ansible_collections/awx/awx/FILES.json
+++ b/ansible_collections/awx/awx/FILES.json
@@ -8,493 +8,500 @@
"format": 1
},
{
- "name": "COPYING",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "7c50cd9b85e2b7eebaea2b5618b402862b01d5a66befff8e41401ef3f14e471a",
+ "name": "images",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "TESTING.md",
+ "name": "images/completeness_test_output.png",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4691e79c8038d8e985610fb613cd2f4799d4740b0a6ca1b72d3266528088a272",
+ "chksum_sha256": "6367684c4b5edd3e1e8fdcb9270d68ca54040d5d17108734f3d3a2b9df5878ba",
"format": 1
},
{
- "name": "bindep.txt",
+ "name": "README.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7205dda85d2cd5501b3344e9f18e4acd09583056aab5e8a05554ba29a3b8fad8",
+ "chksum_sha256": "41997bded547ba6a168ee41fd5379a44f20dcd155979067564b79d14af6e3fd3",
"format": 1
},
{
- "name": "images",
+ "name": "test",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "images/completeness_test_output.png",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "6367684c4b5edd3e1e8fdcb9270d68ca54040d5d17108734f3d3a2b9df5878ba",
+ "name": "test/awx",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "requirements.txt",
+ "name": "test/awx/test_settings.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2eb11923e1347afc5075a7871e206a8f15a68471c90012f7386e9db0875e70bf",
+ "chksum_sha256": "436c13933936e7b80dd26c61ea1dbf492c13974f2922f1543c4fe6e6b0fab0dd",
"format": 1
},
{
- "name": "meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "test/awx/test_schedule.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "dadfd1c19c4c828dd84128ca484b837c6a904a09e92bcee12cb7cda408562c81",
"format": 1
},
{
- "name": "meta/runtime.yml",
+ "name": "test/awx/test_job.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3bf2312f22f63103185491d64c013cc23cb829f9600b8e6876c98a16e7d4a0c1",
+ "chksum_sha256": "76ba45e14438425f7511d196613928d64253e1912a45b71ea842b1cb2c3ca335",
"format": 1
},
{
- "name": "plugins",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "test/awx/conftest.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "b5f885b3aba783c30905d472f18ba4db207ee4955c1107e0621a08d6b57c4271",
"format": 1
},
{
- "name": "plugins/doc_fragments",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "test/awx/test_bulk.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "a10cc6ca47f1cd560e5adff57f88b4628b6ff3ec20874a751ae57d7474cfb7d5",
"format": 1
},
{
- "name": "plugins/doc_fragments/auth_legacy.py",
+ "name": "test/awx/test_team.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c2f10b81ecb89088c7c295430d4a71de26e3700b26e8344cdc7950908a738fd3",
+ "chksum_sha256": "cbbdbdb3be0b0d80dcfcf337ed0095774cf73ef0e937d3e8dc5abab21739db5d",
"format": 1
},
{
- "name": "plugins/doc_fragments/auth.py",
+ "name": "test/awx/test_role_user_assignment.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "83f0ecfb936c89bd2b589eff8710617080f539e943f38bf57b45b4eb753564e8",
+ "chksum_sha256": "62e76a504873f12b631f99a7275a6673e98964258f0775606860a28d7e3efc73",
"format": 1
},
{
- "name": "plugins/doc_fragments/auth_plugin.py",
+ "name": "test/awx/test_module_utils.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f1c61a9880edca852582f58c635c32264b0d1b3218ebdeb9db9d23081267c070",
+ "chksum_sha256": "a5118e383f1370175dc7900ec3abae2ee53bd77ecf8853ca333ffcbf625b216f",
"format": 1
},
{
- "name": "plugins/inventory",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "test/awx/test_inventory.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "70eac0cf78806e37406137fcfb97e5a249fd6b091b1f18e812278573049a4111",
"format": 1
},
{
- "name": "plugins/inventory/controller.py",
+ "name": "test/awx/test_role.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "29e7cd36a2b18ee616e31cbbec6a6e103f3f18ebe13f2bb87167b915163ca4bf",
+ "chksum_sha256": "9d4aec4ecb6a4429c08125de3c7b1161db794a2ed32cebd9e593d14fd90375d4",
"format": 1
},
{
- "name": "plugins/lookup",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "test/awx/test_token.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "118145cdd5f6a03df7a7a608d5f9e510236b2a54f9bcd456f4294ba69f0f4fad",
"format": 1
},
{
- "name": "plugins/lookup/controller_api.py",
+ "name": "test/awx/test_role_definition.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5e79f19c9dee4fa0c3a88126a630fa6163249c332d73a44370f64836e22d4b27",
+ "chksum_sha256": "da3d110330ea7f35d99701fb748e71ada4bc458b6c5f66be5a317c433e3976ea",
"format": 1
},
{
- "name": "plugins/lookup/schedule_rrule.py",
+ "name": "test/awx/test_instance_group.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c3ec7b8f134eca3a9f04156213b584792fc4e3397e3b9f82b5044e9ec662c7a2",
+ "chksum_sha256": "9ce22bf5e6baa63ab096c9377478f8a3af33624def33e52753342e435924e573",
"format": 1
},
{
- "name": "plugins/lookup/schedule_rruleset.py",
+ "name": "test/awx/test_group.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0141249f5cbe0651f96d3ba0a627a4d7e7376bd0e5b2b29e63ef44f9c243feb3",
+ "chksum_sha256": "1ecf188e82d4c848de64c8f7fd7af2d4adb6887c6a448771ff51bb43c4fa8128",
"format": 1
},
{
- "name": "plugins/module_utils",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "test/awx/test_credential.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "c1bfcf99f4de13b3cc4a76c7f33fa3a51fde175afff5730b4743695cbeb69643",
"format": 1
},
{
- "name": "plugins/module_utils/awxkit.py",
+ "name": "test/awx/test_workflow_job_template_node.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8b2398e4e7893f203b26f6c85d510cc4c41a79c53e1937710807233e62e35f58",
+ "chksum_sha256": "0806356bfd91b28153baa63ca8cbf8f7da1125dd5150e38e73aa37c65e236f6b",
"format": 1
},
{
- "name": "plugins/module_utils/controller_api.py",
+ "name": "test/awx/test_organization.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a5b5a041efc42a832ebef422bac6283da1e846a7d6f3d8dcdcd5ad797de50747",
+ "chksum_sha256": "091d48906e4bf5ffaceead8f49281e05184cefce1546f387ce29a232d6694ec9",
"format": 1
},
{
- "name": "plugins/modules",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "test/awx/test_job_template.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "2f0e924d79cd0b2bccbe4664e27ec07cb96a274d289229d80be09687f099171e",
"format": 1
},
{
- "name": "plugins/modules/__init__.py",
+ "name": "test/awx/test_project.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "fbb93e524df51b788f12746ffb52bf5105f67b3ae6b89403bed51ed1f2da9c12",
"format": 1
},
{
- "name": "plugins/modules/ad_hoc_command_wait.py",
+ "name": "test/awx/test_export.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "67bc716ec04dfc77cb751cda6013ee54fa0cd3ed3afabc5ba0d146cc9712c996",
+ "chksum_sha256": "c24a35265af8ff90f6456d39d0cc84cc9ce765d9fc3d45a4a8ac945e2538ff6b",
"format": 1
},
{
- "name": "plugins/modules/controller_meta.py",
+ "name": "test/awx/test_instance.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c66ebbe3a0eab6a9d28d517824ebf8478afdf14981c6c931f08592503c243cdd",
+ "chksum_sha256": "2b60fecf79c63341b0eece0d9941a655dfeac89b3565e78f5bb39c5ec77b91af",
"format": 1
},
{
- "name": "plugins/modules/import.py",
+ "name": "test/awx/test_label.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a7a03186251ef644ba03c49e7e23a799f8046abddb9ea20fff68dd09fe759680",
+ "chksum_sha256": "cd957d0b0cab6dd51539baf3fb27b659b91a8e57b20aae4c5cce7eaec9cec494",
"format": 1
},
{
- "name": "plugins/modules/job_cancel.py",
+ "name": "test/awx/test_completeness.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d64f698909919b05c9c47a65f24c861c3cabe33c039944f6120d49a2ac7d40da",
+ "chksum_sha256": "56bc8d4297a9c7c4fdb712fd838803cc9233023288f5555f3e3d229117604eee",
"format": 1
},
{
- "name": "plugins/modules/job_list.py",
+ "name": "test/awx/test_application.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2ea8024bfc9612c005745a13a508c40d320b4c204bf18fcd495f72789d9adb40",
+ "chksum_sha256": "a106d5fbffbe1eaec36d8247979ca637ee733a29abf94d955c48be8d2fd16842",
"format": 1
},
{
- "name": "plugins/modules/job_wait.py",
+ "name": "test/awx/test_inventory_source.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7e7459abf351f6c172401eec4ba579dc8566f8a55fd022cc8eec9fa5a3399067",
+ "chksum_sha256": "a14f69db1bf6cec594e64c2963b415560b78eac1f9cbe7d4c09586b494e11bde",
"format": 1
},
{
- "name": "plugins/modules/workflow_node_wait.py",
+ "name": "test/awx/test_workflow_job_template.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5b19778b005fbaa3e0a3abc645a6d6452bc0ad52e89fe04141d051f6ddafbb73",
+ "chksum_sha256": "9e23b2e1055900a86dc2fb42165ca567936b319b69d3000c36434df28c6ae007",
"format": 1
},
{
- "name": "plugins/modules/ad_hoc_command_cancel.py",
+ "name": "test/awx/test_ad_hoc_wait.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3338e10af9ccd0e4178b8e1ec1e7064b00ab90e64665f846a2123f10d9d151f4",
+ "chksum_sha256": "daed2a74d3f64fd0300255050dc8c732158db401323f44da66ccb4bf84b59633",
"format": 1
},
{
- "name": "plugins/modules/subscriptions.py",
+ "name": "test/awx/test_notification_template.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f497ab9ada8f89650422bf85deef386e32b774dfff9e1de07b387fba32d890a8",
+ "chksum_sha256": "f40d5b65fbc78d12570f37799c8e240cfb90d9948421d3db82af6427fd14854f",
"format": 1
},
{
- "name": "plugins/modules/workflow_approval.py",
+ "name": "test/awx/test_credential_type.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "533e52fc20ca99e935154f5ba3ec30c2055f42d1c51c3bc2cf1570af7f951c33",
+ "chksum_sha256": "1fe388a0c19f08006c7718766d5faa79540dd3b14547ced43b5a237a2c2fd877",
"format": 1
},
{
- "name": "plugins/modules/bulk_host_create.py",
+ "name": "test/awx/test_user.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9398fec791845d1ddb304cd344d308127f89ce6ed5a8341f2c6047b4e2d22ed4",
+ "chksum_sha256": "f9520b058e16e4e4800d3a5f70cd28650a365fa357afa1d41a8c63bf3354027e",
"format": 1
},
{
- "name": "plugins/modules/bulk_job_launch.py",
+ "name": "test/awx/test_role_team_assignment.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f19617e62f57b0a81fb9ebbf209583fbd9c74d3e2b85ca0102b2fd1b2bd77b81",
+ "chksum_sha256": "f3ac9236893e4737e6eb32b3a3d217d13d520de63ecd6050debdf157a968ec7a",
"format": 1
},
{
- "name": "plugins/modules/credential_input_source.py",
+ "name": "test/awx/test_credential_input_source.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "78ce109c0cc51aa8d66f0146ac9f448559b186cc2b155ee2a8ed96cebe65909d",
+ "chksum_sha256": "9637a418c0b0e59261ec0d1c206ff2d3574a41a8a169068bbf74588e3a4214b2",
"format": 1
},
{
- "name": "plugins/modules/credential_type.py",
+ "name": "COPYING",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1867c68c3b43c0f27a8e87dcbb66b22fdfc1cd659cc1747a6686573f6a7d6be9",
+ "chksum_sha256": "7c50cd9b85e2b7eebaea2b5618b402862b01d5a66befff8e41401ef3f14e471a",
"format": 1
},
{
- "name": "plugins/modules/group.py",
+ "name": "TESTING.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7dc98af3cbdfc5ea0cf4cffe0c86c846837dfebe15c16939ed863b730cb05578",
+ "chksum_sha256": "4691e79c8038d8e985610fb613cd2f4799d4740b0a6ca1b72d3266528088a272",
"format": 1
},
{
- "name": "plugins/modules/host.py",
+ "name": "bindep.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b973c5f3790dc0c084e97a6f6b0c87209632f1ff348bc36466cc13392e774c79",
+ "chksum_sha256": "7205dda85d2cd5501b3344e9f18e4acd09583056aab5e8a05554ba29a3b8fad8",
"format": 1
},
{
- "name": "plugins/modules/instance_group.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "14c7f97d44a266a9d73d8ef1fc85f8c15ecbab611ff8133026ef943e211ba7c2",
+ "name": "meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/inventory.py",
+ "name": "meta/runtime.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1a430ae0b6371f884c14a9022552846f5174100ef02601a6ece649cb45a9e7e9",
+ "chksum_sha256": "3bf2312f22f63103185491d64c013cc23cb829f9600b8e6876c98a16e7d4a0c1",
"format": 1
},
{
- "name": "plugins/modules/inventory_source.py",
+ "name": "requirements.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "279efe4103630b4961baeb468c43f9d75c657c0beb36ece73982d31080931403",
+ "chksum_sha256": "2eb11923e1347afc5075a7871e206a8f15a68471c90012f7386e9db0875e70bf",
"format": 1
},
{
- "name": "plugins/modules/inventory_source_update.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "e0b79d76d7d9f817f709a8dacbcb2a105a214c33e63449decaec65adebac6d74",
+ "name": "plugins",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/job_launch.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "1c26e876232d5658537b9d041879f93e024e52c901551b5e6ad89354d6023d71",
+ "name": "plugins/module_utils",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/job_template.py",
+ "name": "plugins/module_utils/controller_api.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "77a9774f8baae9a417aa3f7170f4866307ea3e7f29fbe93a00df98f33e8c99b0",
+ "chksum_sha256": "752c0049712a8a2ed7122f5f84f89123346cc57b7fe3c017d6b89f2d527fdf55",
"format": 1
},
{
- "name": "plugins/modules/label.py",
+ "name": "plugins/module_utils/awxkit.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f3bde75b41fd4c92037f759ae00e9ebd76f27c91ab54857f167715db1930b0a8",
+ "chksum_sha256": "8b2398e4e7893f203b26f6c85d510cc4c41a79c53e1937710807233e62e35f58",
"format": 1
},
{
- "name": "plugins/modules/license.py",
+ "name": "plugins/modules",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/application.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cf54b3a3c82fb705cb36a81cdab5e3cc25c8c8a798f3f43d22927c3164e97e69",
+ "chksum_sha256": "ad6aca46b5d2dcd602f0c419fd1e3269dca61c5b896a0742f0283d8b50dd651d",
"format": 1
},
{
- "name": "plugins/modules/notification_template.py",
+ "name": "plugins/modules/workflow_job_template.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "638dbb75383b11b1227b2120424ee4f9f37861747c9aa5fca6df0665fe97fc86",
+ "chksum_sha256": "18fa1ae60ba7c409cd3baf67215ebf3b8d680e10a1402ca66f362166ab668cb4",
"format": 1
},
{
- "name": "plugins/modules/organization.py",
+ "name": "plugins/modules/instance_group.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0c46808d096cba86747d9e9d69da660d44341c885fb966e4ffad8499b7d8d055",
+ "chksum_sha256": "14c7f97d44a266a9d73d8ef1fc85f8c15ecbab611ff8133026ef943e211ba7c2",
"format": 1
},
{
- "name": "plugins/modules/project.py",
+ "name": "plugins/modules/credential_type.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d65ade1efc240c94eadc37d984cfec8094c655ed4d162243d170e947bad4b425",
+ "chksum_sha256": "1867c68c3b43c0f27a8e87dcbb66b22fdfc1cd659cc1747a6686573f6a7d6be9",
"format": 1
},
{
- "name": "plugins/modules/project_update.py",
+ "name": "plugins/modules/workflow_approval.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "952bedbca07dba7de277849b45eb258f51420ed8e814fa35acd47dc5e5f8f82f",
+ "chksum_sha256": "533e52fc20ca99e935154f5ba3ec30c2055f42d1c51c3bc2cf1570af7f951c33",
"format": 1
},
{
- "name": "plugins/modules/role.py",
+ "name": "plugins/modules/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "31aebaae562881a3a9ecea61f5d26747559cbed69dcf45fbe21d9a65412ea56c",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/modules/schedule.py",
+ "name": "plugins/modules/group.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "517c76a73dae1fadc7c47c0c8544ae00c6f88daff7a3e72c67aa9501265177ac",
+ "chksum_sha256": "7dc98af3cbdfc5ea0cf4cffe0c86c846837dfebe15c16939ed863b730cb05578",
"format": 1
},
{
- "name": "plugins/modules/team.py",
+ "name": "plugins/modules/subscriptions.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e53a544c12fc2de705bce163e3babf6d95fae6d088f1415a61debeb07d60f991",
+ "chksum_sha256": "f497ab9ada8f89650422bf85deef386e32b774dfff9e1de07b387fba32d890a8",
"format": 1
},
{
- "name": "plugins/modules/token.py",
+ "name": "plugins/modules/notification_template.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cc35a007403827d4994793d9bb4a5d573d9b532c605feee6d97b119037ead203",
+ "chksum_sha256": "79bbaa845c53b3e21d6fd606f9450282ccdf3f6792262dad4b8f2a6a5bc7321e",
"format": 1
},
{
- "name": "plugins/modules/user.py",
+ "name": "plugins/modules/ad_hoc_command_cancel.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cd3cc005d12434b3cac4238b3dfc0d2976f53f0c2e8c0dafc02953ad1393e377",
+ "chksum_sha256": "3338e10af9ccd0e4178b8e1ec1e7064b00ab90e64665f846a2123f10d9d151f4",
"format": 1
},
{
- "name": "plugins/modules/workflow_job_template.py",
+ "name": "plugins/modules/schedule.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "18fa1ae60ba7c409cd3baf67215ebf3b8d680e10a1402ca66f362166ab668cb4",
+ "chksum_sha256": "517c76a73dae1fadc7c47c0c8544ae00c6f88daff7a3e72c67aa9501265177ac",
"format": 1
},
{
- "name": "plugins/modules/workflow_job_template_node.py",
+ "name": "plugins/modules/job_launch.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "023e170fb1db59ec4d4acecee21befc2aa03199110368d4bd0682ad4adf84092",
+ "chksum_sha256": "1c26e876232d5658537b9d041879f93e024e52c901551b5e6ad89354d6023d71",
"format": 1
},
{
- "name": "plugins/modules/ad_hoc_command.py",
+ "name": "plugins/modules/inventory_source_update.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e0bc12df3832ad4c12eb7977c3dd69bee9eb0afdbd88755c06f01c2c42146869",
+ "chksum_sha256": "e0b79d76d7d9f817f709a8dacbcb2a105a214c33e63449decaec65adebac6d74",
"format": 1
},
{
- "name": "plugins/modules/execution_environment.py",
+ "name": "plugins/modules/role_team_assignment.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e00a63a1ddccfef4ad20725799ba67dbdfd8b9c720d9f04a92f05683bf7dc4e1",
+ "chksum_sha256": "a0c07c5132ab9f02701357d5ec0e0bb2ee2d75c36aded59edf8fb422aa868aa1",
"format": 1
},
{
- "name": "plugins/modules/export.py",
+ "name": "plugins/modules/role_user_assignment.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b62aa153c8a819461f3bcbd27a4de731e477dffc4d05def8829687901b71aec9",
+ "chksum_sha256": "06a4bf55937ea3b80cb2b418a8f6d858b861cd8391e4f7fe5e6765ff7eb0ab9b",
"format": 1
},
{
- "name": "plugins/modules/settings.py",
+ "name": "plugins/modules/project.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c62170baca6d9ecf0be7a68d148daa75569e87a6ddffb92a39f3bc4ea08e9e0a",
+ "chksum_sha256": "d65ade1efc240c94eadc37d984cfec8094c655ed4d162243d170e947bad4b425",
"format": 1
},
{
- "name": "plugins/modules/application.py",
+ "name": "plugins/modules/user.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b79e9f80c4ef79fbacd134d4239f6eae08971838c3a57b79143e57a08bd836d9",
+ "chksum_sha256": "cd3cc005d12434b3cac4238b3dfc0d2976f53f0c2e8c0dafc02953ad1393e377",
"format": 1
},
{
- "name": "plugins/modules/bulk_host_delete.py",
+ "name": "plugins/modules/inventory.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "546fb1eb2104db87c1d608144590e38d361af2ba734caa62fc61586e49a124ad",
+ "chksum_sha256": "1a430ae0b6371f884c14a9022552846f5174100ef02601a6ece649cb45a9e7e9",
"format": 1
},
{
- "name": "plugins/modules/credential.py",
+ "name": "plugins/modules/role.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e6d52d95b7e59a41b5d3715c3b55857c1b70baf7a0e8e44e9cc66db67b87685f",
+ "chksum_sha256": "31aebaae562881a3a9ecea61f5d26747559cbed69dcf45fbe21d9a65412ea56c",
"format": 1
},
{
- "name": "plugins/modules/instance.py",
+ "name": "plugins/modules/project_update.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fb4467c11809837fee04ed77ddcf154414070a0b0458742e8d6dcb3be045a9f3",
+ "chksum_sha256": "952bedbca07dba7de277849b45eb258f51420ed8e814fa35acd47dc5e5f8f82f",
"format": 1
},
{
- "name": "plugins/modules/role_definition.py",
+ "name": "plugins/modules/label.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e8a1eada7ef01e9cb72c38a45c42334960cac6fc5bfce1304103a332f8e99537",
+ "chksum_sha256": "f3bde75b41fd4c92037f759ae00e9ebd76f27c91ab54857f167715db1930b0a8",
"format": 1
},
{
- "name": "plugins/modules/role_team_assignment.py",
+ "name": "plugins/modules/workflow_job_template_node.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a0c07c5132ab9f02701357d5ec0e0bb2ee2d75c36aded59edf8fb422aa868aa1",
+ "chksum_sha256": "023e170fb1db59ec4d4acecee21befc2aa03199110368d4bd0682ad4adf84092",
"format": 1
},
{
- "name": "plugins/modules/role_user_assignment.py",
+ "name": "plugins/modules/controller_meta.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "06a4bf55937ea3b80cb2b418a8f6d858b861cd8391e4f7fe5e6765ff7eb0ab9b",
+ "chksum_sha256": "c66ebbe3a0eab6a9d28d517824ebf8478afdf14981c6c931f08592503c243cdd",
"format": 1
},
{
@@ -505,241 +512,241 @@
"format": 1
},
{
- "name": "test",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/credential_input_source.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "78ce109c0cc51aa8d66f0146ac9f448559b186cc2b155ee2a8ed96cebe65909d",
"format": 1
},
{
- "name": "test/awx",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/token.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "cc35a007403827d4994793d9bb4a5d573d9b532c605feee6d97b119037ead203",
"format": 1
},
{
- "name": "test/awx/test_ad_hoc_wait.py",
+ "name": "plugins/modules/inventory_source.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "daed2a74d3f64fd0300255050dc8c732158db401323f44da66ccb4bf84b59633",
+ "chksum_sha256": "279efe4103630b4961baeb468c43f9d75c657c0beb36ece73982d31080931403",
"format": 1
},
{
- "name": "test/awx/test_application.py",
+ "name": "plugins/modules/settings.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a106d5fbffbe1eaec36d8247979ca637ee733a29abf94d955c48be8d2fd16842",
+ "chksum_sha256": "c62170baca6d9ecf0be7a68d148daa75569e87a6ddffb92a39f3bc4ea08e9e0a",
"format": 1
},
{
- "name": "test/awx/test_credential_input_source.py",
+ "name": "plugins/modules/ad_hoc_command.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9637a418c0b0e59261ec0d1c206ff2d3574a41a8a169068bbf74588e3a4214b2",
+ "chksum_sha256": "bbde58f92c013fbdf9ca5cd697a0505fcde9057e64c9c3efb5a64da8999d58a4",
"format": 1
},
{
- "name": "test/awx/test_credential_type.py",
+ "name": "plugins/modules/execution_environment.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1fe388a0c19f08006c7718766d5faa79540dd3b14547ced43b5a237a2c2fd877",
+ "chksum_sha256": "e00a63a1ddccfef4ad20725799ba67dbdfd8b9c720d9f04a92f05683bf7dc4e1",
"format": 1
},
{
- "name": "test/awx/test_group.py",
+ "name": "plugins/modules/job_list.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1ecf188e82d4c848de64c8f7fd7af2d4adb6887c6a448771ff51bb43c4fa8128",
+ "chksum_sha256": "2ea8024bfc9612c005745a13a508c40d320b4c204bf18fcd495f72789d9adb40",
"format": 1
},
{
- "name": "test/awx/test_instance_group.py",
+ "name": "plugins/modules/bulk_host_create.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9ce22bf5e6baa63ab096c9377478f8a3af33624def33e52753342e435924e573",
+ "chksum_sha256": "9398fec791845d1ddb304cd344d308127f89ce6ed5a8341f2c6047b4e2d22ed4",
"format": 1
},
{
- "name": "test/awx/test_inventory.py",
+ "name": "plugins/modules/team.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "70eac0cf78806e37406137fcfb97e5a249fd6b091b1f18e812278573049a4111",
+ "chksum_sha256": "e53a544c12fc2de705bce163e3babf6d95fae6d088f1415a61debeb07d60f991",
"format": 1
},
{
- "name": "test/awx/test_job.py",
+ "name": "plugins/modules/bulk_job_launch.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "76ba45e14438425f7511d196613928d64253e1912a45b71ea842b1cb2c3ca335",
+ "chksum_sha256": "f19617e62f57b0a81fb9ebbf209583fbd9c74d3e2b85ca0102b2fd1b2bd77b81",
"format": 1
},
{
- "name": "test/awx/test_label.py",
+ "name": "plugins/modules/import.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cd957d0b0cab6dd51539baf3fb27b659b91a8e57b20aae4c5cce7eaec9cec494",
+ "chksum_sha256": "b4bbaaf0b4c4454bbed2299fa3c58e22d00aafd9e83023f7052c5a53e088046e",
"format": 1
},
{
- "name": "test/awx/test_settings.py",
+ "name": "plugins/modules/job_template.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "436c13933936e7b80dd26c61ea1dbf492c13974f2922f1543c4fe6e6b0fab0dd",
+ "chksum_sha256": "77a9774f8baae9a417aa3f7170f4866307ea3e7f29fbe93a00df98f33e8c99b0",
"format": 1
},
{
- "name": "test/awx/test_team.py",
+ "name": "plugins/modules/organization.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cbbdbdb3be0b0d80dcfcf337ed0095774cf73ef0e937d3e8dc5abab21739db5d",
+ "chksum_sha256": "0c46808d096cba86747d9e9d69da660d44341c885fb966e4ffad8499b7d8d055",
"format": 1
},
{
- "name": "test/awx/test_token.py",
+ "name": "plugins/modules/job_cancel.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "118145cdd5f6a03df7a7a608d5f9e510236b2a54f9bcd456f4294ba69f0f4fad",
+ "chksum_sha256": "d64f698909919b05c9c47a65f24c861c3cabe33c039944f6120d49a2ac7d40da",
"format": 1
},
{
- "name": "test/awx/test_user.py",
+ "name": "plugins/modules/license.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f9520b058e16e4e4800d3a5f70cd28650a365fa357afa1d41a8c63bf3354027e",
+ "chksum_sha256": "cf54b3a3c82fb705cb36a81cdab5e3cc25c8c8a798f3f43d22927c3164e97e69",
"format": 1
},
{
- "name": "test/awx/test_workflow_job_template_node.py",
+ "name": "plugins/modules/host.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0806356bfd91b28153baa63ca8cbf8f7da1125dd5150e38e73aa37c65e236f6b",
+ "chksum_sha256": "b973c5f3790dc0c084e97a6f6b0c87209632f1ff348bc36466cc13392e774c79",
"format": 1
},
{
- "name": "test/awx/test_inventory_source.py",
+ "name": "plugins/modules/bulk_host_delete.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a14f69db1bf6cec594e64c2963b415560b78eac1f9cbe7d4c09586b494e11bde",
+ "chksum_sha256": "546fb1eb2104db87c1d608144590e38d361af2ba734caa62fc61586e49a124ad",
"format": 1
},
{
- "name": "test/awx/test_module_utils.py",
+ "name": "plugins/modules/role_definition.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a5118e383f1370175dc7900ec3abae2ee53bd77ecf8853ca333ffcbf625b216f",
+ "chksum_sha256": "e8a1eada7ef01e9cb72c38a45c42334960cac6fc5bfce1304103a332f8e99537",
"format": 1
},
{
- "name": "test/awx/test_project.py",
+ "name": "plugins/modules/export.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fbb93e524df51b788f12746ffb52bf5105f67b3ae6b89403bed51ed1f2da9c12",
+ "chksum_sha256": "b62aa153c8a819461f3bcbd27a4de731e477dffc4d05def8829687901b71aec9",
"format": 1
},
{
- "name": "test/awx/test_schedule.py",
+ "name": "plugins/modules/instance.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "dadfd1c19c4c828dd84128ca484b837c6a904a09e92bcee12cb7cda408562c81",
+ "chksum_sha256": "fb4467c11809837fee04ed77ddcf154414070a0b0458742e8d6dcb3be045a9f3",
"format": 1
},
{
- "name": "test/awx/test_credential.py",
+ "name": "plugins/modules/ad_hoc_command_wait.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c1bfcf99f4de13b3cc4a76c7f33fa3a51fde175afff5730b4743695cbeb69643",
+ "chksum_sha256": "67bc716ec04dfc77cb751cda6013ee54fa0cd3ed3afabc5ba0d146cc9712c996",
"format": 1
},
{
- "name": "test/awx/test_job_template.py",
+ "name": "plugins/modules/workflow_node_wait.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2f0e924d79cd0b2bccbe4664e27ec07cb96a274d289229d80be09687f099171e",
+ "chksum_sha256": "5b19778b005fbaa3e0a3abc645a6d6452bc0ad52e89fe04141d051f6ddafbb73",
"format": 1
},
{
- "name": "test/awx/test_organization.py",
+ "name": "plugins/modules/credential.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "091d48906e4bf5ffaceead8f49281e05184cefce1546f387ce29a232d6694ec9",
+ "chksum_sha256": "e6d52d95b7e59a41b5d3715c3b55857c1b70baf7a0e8e44e9cc66db67b87685f",
"format": 1
},
{
- "name": "test/awx/test_notification_template.py",
+ "name": "plugins/modules/job_wait.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f40d5b65fbc78d12570f37799c8e240cfb90d9948421d3db82af6427fd14854f",
+ "chksum_sha256": "7e7459abf351f6c172401eec4ba579dc8566f8a55fd022cc8eec9fa5a3399067",
"format": 1
},
{
- "name": "test/awx/conftest.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "9627cca4a40d87832000ec2335ab68888394f6f3bc1d04358147ec6971a69ea2",
+ "name": "plugins/inventory",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "test/awx/test_bulk.py",
+ "name": "plugins/inventory/controller.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a10cc6ca47f1cd560e5adff57f88b4628b6ff3ec20874a751ae57d7474cfb7d5",
+ "chksum_sha256": "29e7cd36a2b18ee616e31cbbec6a6e103f3f18ebe13f2bb87167b915163ca4bf",
"format": 1
},
{
- "name": "test/awx/test_completeness.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "56bc8d4297a9c7c4fdb712fd838803cc9233023288f5555f3e3d229117604eee",
+ "name": "plugins/lookup",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "test/awx/test_export.py",
+ "name": "plugins/lookup/controller_api.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c24a35265af8ff90f6456d39d0cc84cc9ce765d9fc3d45a4a8ac945e2538ff6b",
+ "chksum_sha256": "5e79f19c9dee4fa0c3a88126a630fa6163249c332d73a44370f64836e22d4b27",
"format": 1
},
{
- "name": "test/awx/test_instance.py",
+ "name": "plugins/lookup/schedule_rruleset.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2b60fecf79c63341b0eece0d9941a655dfeac89b3565e78f5bb39c5ec77b91af",
+ "chksum_sha256": "0141249f5cbe0651f96d3ba0a627a4d7e7376bd0e5b2b29e63ef44f9c243feb3",
"format": 1
},
{
- "name": "test/awx/test_role.py",
+ "name": "plugins/lookup/schedule_rrule.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9d4aec4ecb6a4429c08125de3c7b1161db794a2ed32cebd9e593d14fd90375d4",
+ "chksum_sha256": "c3ec7b8f134eca3a9f04156213b584792fc4e3397e3b9f82b5044e9ec662c7a2",
"format": 1
},
{
- "name": "test/awx/test_role_definition.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "da3d110330ea7f35d99701fb748e71ada4bc458b6c5f66be5a317c433e3976ea",
+ "name": "plugins/doc_fragments",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "test/awx/test_role_team_assignment.py",
+ "name": "plugins/doc_fragments/auth_plugin.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f3ac9236893e4737e6eb32b3a3d217d13d520de63ecd6050debdf157a968ec7a",
+ "chksum_sha256": "f1c61a9880edca852582f58c635c32264b0d1b3218ebdeb9db9d23081267c070",
"format": 1
},
{
- "name": "test/awx/test_role_user_assignment.py",
+ "name": "plugins/doc_fragments/auth_legacy.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "62e76a504873f12b631f99a7275a6673e98964258f0775606860a28d7e3efc73",
+ "chksum_sha256": "c2f10b81ecb89088c7c295430d4a71de26e3700b26e8344cdc7950908a738fd3",
"format": 1
},
{
- "name": "test/awx/test_workflow_job_template.py",
+ "name": "plugins/doc_fragments/auth.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9e23b2e1055900a86dc2fb42165ca567936b319b69d3000c36434df28c6ae007",
+ "chksum_sha256": "83f0ecfb936c89bd2b589eff8710617080f539e943f38bf57b45b4eb753564e8",
"format": 1
},
{
@@ -750,6 +757,13 @@
"format": 1
},
{
+ "name": "tests/config.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "4cb8bf065737689916cda6a2856fcfb8bc27f49224a4b2c2fde842e3b0e76fbb",
+ "format": 1
+ },
+ {
"name": "tests/integration",
"ftype": "dir",
"chksum_type": null,
@@ -764,66 +778,87 @@
"format": 1
},
{
- "name": "tests/integration/targets/ad_hoc_command",
+ "name": "tests/integration/targets/project",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/ad_hoc_command/tasks",
+ "name": "tests/integration/targets/project/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/ad_hoc_command/tasks/main.yml",
+ "name": "tests/integration/targets/project/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "87ec6c33a66b6dd969c6ed08693a02a51fc43c21b3980a81a5edd29d644050aa",
+ "chksum_sha256": "c2f8af4ca361cc63c465d7b0895ef2ad7a3741f36e30133b5c33696ba2cf7474",
"format": 1
},
{
- "name": "tests/integration/targets/ad_hoc_command_cancel",
+ "name": "tests/integration/targets/bulk_host_create",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/ad_hoc_command_cancel/tasks",
+ "name": "tests/integration/targets/bulk_host_create/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/ad_hoc_command_cancel/tasks/main.yml",
+ "name": "tests/integration/targets/bulk_host_create/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "655ba448d6b451adbb4006c964c3ca21c7303787b3c336bd946752122bdb8d94",
+ "chksum_sha256": "39be6c097c03152ec1873732d8e09e34639464acad4f9d42ef3e13b6f87d47d1",
"format": 1
},
{
- "name": "tests/integration/targets/ad_hoc_command_wait",
+ "name": "tests/integration/targets/credential_input_source",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/ad_hoc_command_wait/tasks",
+ "name": "tests/integration/targets/credential_input_source/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/ad_hoc_command_wait/tasks/main.yml",
+ "name": "tests/integration/targets/credential_input_source/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1e5ced853c1d15d2db163bd5dbb56b1ca419363327dc32bd6a1448560e194c2f",
+ "chksum_sha256": "83dae6f438515cce80bf4b25d24d472de42feafe46b9f8faacaa7cdf18c1eb16",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/notification_template",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/notification_template/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/notification_template/tasks/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "9d4d3cef57b6e950d9af710bb519050823ef67fd6aa1c6b60c6f2fc33387dc67",
"format": 1
},
{
@@ -848,66 +883,66 @@
"format": 1
},
{
- "name": "tests/integration/targets/credential",
+ "name": "tests/integration/targets/team",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/credential/tasks",
+ "name": "tests/integration/targets/team/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/credential/tasks/main.yml",
+ "name": "tests/integration/targets/team/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "113f6f4c35885fe2a1ebca33523c9a6284a36ae470963e41a12909e444e0281d",
+ "chksum_sha256": "f59433918d9495fd5a3b296a0a70963f539571e742e3c790e5d88912eec8e2ef",
"format": 1
},
{
- "name": "tests/integration/targets/credential_input_source",
+ "name": "tests/integration/targets/host",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/credential_input_source/tasks",
+ "name": "tests/integration/targets/host/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/credential_input_source/tasks/main.yml",
+ "name": "tests/integration/targets/host/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "83dae6f438515cce80bf4b25d24d472de42feafe46b9f8faacaa7cdf18c1eb16",
+ "chksum_sha256": "bc0849abd5d11fe3fa3039ed0ae93d4e812d915e66c47488f91cc4309b2d7d77",
"format": 1
},
{
- "name": "tests/integration/targets/credential_type",
+ "name": "tests/integration/targets/instance_group",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/credential_type/tasks",
+ "name": "tests/integration/targets/instance_group/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/credential_type/tasks/main.yml",
+ "name": "tests/integration/targets/instance_group/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6d5026a4ca77513f73aa620ea14b74e0248c0dde7b5d75b318970154d7f4bf56",
+ "chksum_sha256": "5d38320dd4563f17fffd12b510a91dd26e40bfd5f408f75f96b930a38fcfd135",
"format": 1
},
{
@@ -932,24 +967,24 @@
"format": 1
},
{
- "name": "tests/integration/targets/execution_environment",
+ "name": "tests/integration/targets/user",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/execution_environment/tasks",
+ "name": "tests/integration/targets/user/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/execution_environment/tasks/main.yml",
+ "name": "tests/integration/targets/user/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "950122a04ad3b7406fa9a140e02ab190a0f7f53dfec791fdcb1fb2781ebae54d",
+ "chksum_sha256": "76733e6cbaba31ccf3510bea4ec1166c93195d5ef385035bb773fba854db3659",
"format": 1
},
{
@@ -981,801 +1016,773 @@
"format": 1
},
{
- "name": "tests/integration/targets/group",
+ "name": "tests/integration/targets/bulk_job_launch",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/group/tasks",
+ "name": "tests/integration/targets/bulk_job_launch/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/group/tasks/main.yml",
+ "name": "tests/integration/targets/bulk_job_launch/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fe499cf6b248289df031a4d32328dbe45184f4fe7d92423522b1705715010904",
+ "chksum_sha256": "bbb7480d55d9d2261e22d0c55de3a0bada691608a6542385b234633f0493be75",
"format": 1
},
{
- "name": "tests/integration/targets/host",
+ "name": "tests/integration/targets/credential_type",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/host/tasks",
+ "name": "tests/integration/targets/credential_type/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/host/tasks/main.yml",
+ "name": "tests/integration/targets/credential_type/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bc0849abd5d11fe3fa3039ed0ae93d4e812d915e66c47488f91cc4309b2d7d77",
+ "chksum_sha256": "6d5026a4ca77513f73aa620ea14b74e0248c0dde7b5d75b318970154d7f4bf56",
"format": 1
},
{
- "name": "tests/integration/targets/import",
+ "name": "tests/integration/targets/inventory_source",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/import/aliases",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "52e1315ef042495cdf2b0ce22d8ba47f726dce15b968e301a795be1f69045f20",
- "format": 1
- },
- {
- "name": "tests/integration/targets/import/tasks",
+ "name": "tests/integration/targets/inventory_source/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/import/tasks/main.yml",
+ "name": "tests/integration/targets/inventory_source/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "59c0ace95e680d9874fe15c76889c1b4beb38d2d3c66a11499581b0f328ec25a",
+ "chksum_sha256": "7e581c4634196ff64cee14278c55b2292534605006e5a20edadcfe17e778caa9",
"format": 1
},
{
- "name": "tests/integration/targets/instance_group",
+ "name": "tests/integration/targets/bulk_host_delete",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/instance_group/tasks",
+ "name": "tests/integration/targets/bulk_host_delete/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/instance_group/tasks/main.yml",
+ "name": "tests/integration/targets/bulk_host_delete/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5d38320dd4563f17fffd12b510a91dd26e40bfd5f408f75f96b930a38fcfd135",
+ "chksum_sha256": "2a55e10b05593a8ad6f5e2325c3d51f76b5ef040a81990dc3c7a4adaeebcf545",
"format": 1
},
{
- "name": "tests/integration/targets/inventory",
+ "name": "tests/integration/targets/schedule",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/inventory/tasks",
+ "name": "tests/integration/targets/schedule/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/inventory/tasks/main.yml",
+ "name": "tests/integration/targets/schedule/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "21f6d6fa5e76c8578949fd204edef4cdb77b5bff310b681bcd75ac1873a67f18",
- "format": 1
- },
- {
- "name": "tests/integration/targets/inventory_source",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "7419baf00681606be0a250140ace5f7d07804f8b650ecb5f0937147064e307f0",
"format": 1
},
{
- "name": "tests/integration/targets/inventory_source/tasks",
+ "name": "tests/integration/targets/import",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/inventory_source/tasks/main.yml",
+ "name": "tests/integration/targets/import/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7e581c4634196ff64cee14278c55b2292534605006e5a20edadcfe17e778caa9",
- "format": 1
- },
- {
- "name": "tests/integration/targets/inventory_source_update",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "52e1315ef042495cdf2b0ce22d8ba47f726dce15b968e301a795be1f69045f20",
"format": 1
},
{
- "name": "tests/integration/targets/inventory_source_update/tasks",
+ "name": "tests/integration/targets/import/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/inventory_source_update/tasks/main.yml",
+ "name": "tests/integration/targets/import/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "06a737371470d7a996e2eb092d820fe536dac73f3abcec56acf002da62fe05d9",
+ "chksum_sha256": "59c0ace95e680d9874fe15c76889c1b4beb38d2d3c66a11499581b0f328ec25a",
"format": 1
},
{
- "name": "tests/integration/targets/job_cancel",
+ "name": "tests/integration/targets/workflow_job_template",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/job_cancel/tasks",
+ "name": "tests/integration/targets/workflow_job_template/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/job_cancel/tasks/main.yml",
+ "name": "tests/integration/targets/workflow_job_template/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0c8b9d511b1ce96b7d5a359b1043ffc18004c5d3c23052b82423512b46ecd7b4",
+ "chksum_sha256": "872f3cd4d0d7656ddf7f9239c1139351af6341e5e40e0ac2981ed152fff26a5b",
"format": 1
},
{
- "name": "tests/integration/targets/job_launch",
+ "name": "tests/integration/targets/token",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/job_launch/tasks",
+ "name": "tests/integration/targets/token/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/job_launch/tasks/main.yml",
+ "name": "tests/integration/targets/token/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fbb658e8212fcb120200db84212f6b469fff700c1fd233b486ec82a816572323",
+ "chksum_sha256": "e7245f5039915d720baba505b4ab74421c7db138bb45b50917942f3eb05799af",
"format": 1
},
{
- "name": "tests/integration/targets/job_list",
+ "name": "tests/integration/targets/module_utils",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/job_list/tasks",
+ "name": "tests/integration/targets/module_utils/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/job_list/tasks/main.yml",
+ "name": "tests/integration/targets/module_utils/tasks/test_named_reference.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "668f25abe2486218893b6137f5b765301229f649ed3a779a6756496c14f42595",
+ "chksum_sha256": "0e1babbc9e57e06629675f6d2b0d4fe891880b177a572d355391753e670c5156",
"format": 1
},
{
- "name": "tests/integration/targets/job_template",
+ "name": "tests/integration/targets/module_utils/tasks/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "2fb7f33e9695e68ec87d2b9bc52ad2196c2d542bccb7d5b185ebc8e54a8cc237",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/job_list",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/job_template/tasks",
+ "name": "tests/integration/targets/job_list/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/job_template/tasks/main.yml",
+ "name": "tests/integration/targets/job_list/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b1232f2cc28b0ff4ff742c2c5847a649d3d9fcc716d3611a938b277088a0e30b",
+ "chksum_sha256": "668f25abe2486218893b6137f5b765301229f649ed3a779a6756496c14f42595",
"format": 1
},
{
- "name": "tests/integration/targets/job_wait",
+ "name": "tests/integration/targets/label",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/job_wait/tasks",
+ "name": "tests/integration/targets/label/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/job_wait/tasks/main.yml",
+ "name": "tests/integration/targets/label/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "76cf3f8c1cfac81eaaf0f5b76494fc3b7605a15eeb73f6fc83efbbe28a4073b3",
+ "chksum_sha256": "19b0b776638ca5722ddc8fc85fc90fba2639abecbd5dc5bbc6d7ee3c53b5ff16",
"format": 1
},
{
- "name": "tests/integration/targets/label",
+ "name": "tests/integration/targets/ad_hoc_command",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/label/tasks",
+ "name": "tests/integration/targets/ad_hoc_command/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/label/tasks/main.yml",
+ "name": "tests/integration/targets/ad_hoc_command/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "19b0b776638ca5722ddc8fc85fc90fba2639abecbd5dc5bbc6d7ee3c53b5ff16",
+ "chksum_sha256": "87ec6c33a66b6dd969c6ed08693a02a51fc43c21b3980a81a5edd29d644050aa",
"format": 1
},
{
- "name": "tests/integration/targets/lookup_api_plugin",
+ "name": "tests/integration/targets/inventory_source_update",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/lookup_api_plugin/tasks",
+ "name": "tests/integration/targets/inventory_source_update/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/lookup_api_plugin/tasks/main.yml",
+ "name": "tests/integration/targets/inventory_source_update/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "48c0f03d42f73933cc99e584057db0ca9720c280d4fe8a414795ee1f30da68a1",
+ "chksum_sha256": "06a737371470d7a996e2eb092d820fe536dac73f3abcec56acf002da62fe05d9",
"format": 1
},
{
- "name": "tests/integration/targets/lookup_rruleset",
+ "name": "tests/integration/targets/job_wait",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/lookup_rruleset/tasks",
+ "name": "tests/integration/targets/job_wait/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/lookup_rruleset/tasks/main.yml",
+ "name": "tests/integration/targets/job_wait/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "30edd075f50be0fbf9c57c9d7db10ae7086749d78817e8a24b680301ba339cb5",
+ "chksum_sha256": "76cf3f8c1cfac81eaaf0f5b76494fc3b7605a15eeb73f6fc83efbbe28a4073b3",
"format": 1
},
{
- "name": "tests/integration/targets/notification_template",
+ "name": "tests/integration/targets/job_template",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/notification_template/tasks",
+ "name": "tests/integration/targets/job_template/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/notification_template/tasks/main.yml",
+ "name": "tests/integration/targets/job_template/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9d4d3cef57b6e950d9af710bb519050823ef67fd6aa1c6b60c6f2fc33387dc67",
+ "chksum_sha256": "b1232f2cc28b0ff4ff742c2c5847a649d3d9fcc716d3611a938b277088a0e30b",
"format": 1
},
{
- "name": "tests/integration/targets/organization",
+ "name": "tests/integration/targets/schedule_rrule",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/organization/tasks",
+ "name": "tests/integration/targets/schedule_rrule/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/organization/tasks/main.yml",
+ "name": "tests/integration/targets/schedule_rrule/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6e18cbff8118f617a9b594f630739356922030c2d802a0eb92df0ddbe4f1e1fe",
+ "chksum_sha256": "d17ccb32f5b43b887164df4a328a6e70ab222a295b916ac5f46770543f8d4019",
"format": 1
},
{
- "name": "tests/integration/targets/project",
+ "name": "tests/integration/targets/ad_hoc_command_wait",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/project/tasks",
+ "name": "tests/integration/targets/ad_hoc_command_wait/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/project/tasks/main.yml",
+ "name": "tests/integration/targets/ad_hoc_command_wait/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c2f8af4ca361cc63c465d7b0895ef2ad7a3741f36e30133b5c33696ba2cf7474",
+ "chksum_sha256": "1e5ced853c1d15d2db163bd5dbb56b1ca419363327dc32bd6a1448560e194c2f",
"format": 1
},
{
- "name": "tests/integration/targets/project_update",
+ "name": "tests/integration/targets/role_team_assignment",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/project_update/tasks",
+ "name": "tests/integration/targets/role_team_assignment/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/project_update/tasks/main.yml",
+ "name": "tests/integration/targets/role_team_assignment/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6fd0d5b30de4d13f2d2e5fb7497c0fbbc13fa92a31fc43d4a21b19d3f53249ce",
+ "chksum_sha256": "f5fd6177577695507040f8a0654b0a0fbb2198f3c880788db054d6a4a13ecdcf",
"format": 1
},
{
- "name": "tests/integration/targets/role",
+ "name": "tests/integration/targets/job_cancel",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/role/tasks",
+ "name": "tests/integration/targets/job_cancel/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/role/tasks/main.yml",
+ "name": "tests/integration/targets/job_cancel/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "24e699037b51baceceb9f18e1e43464608766127f5eeb4df1fed5991a35aea7f",
+ "chksum_sha256": "0c8b9d511b1ce96b7d5a359b1043ffc18004c5d3c23052b82423512b46ecd7b4",
"format": 1
},
{
- "name": "tests/integration/targets/schedule",
+ "name": "tests/integration/targets/lookup_api_plugin",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/schedule/tasks",
+ "name": "tests/integration/targets/lookup_api_plugin/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/schedule/tasks/main.yml",
+ "name": "tests/integration/targets/lookup_api_plugin/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7419baf00681606be0a250140ace5f7d07804f8b650ecb5f0937147064e307f0",
+ "chksum_sha256": "48c0f03d42f73933cc99e584057db0ca9720c280d4fe8a414795ee1f30da68a1",
"format": 1
},
{
- "name": "tests/integration/targets/schedule_rrule",
+ "name": "tests/integration/targets/role_user_assignment",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/schedule_rrule/tasks",
+ "name": "tests/integration/targets/role_user_assignment/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/schedule_rrule/tasks/main.yml",
+ "name": "tests/integration/targets/role_user_assignment/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d17ccb32f5b43b887164df4a328a6e70ab222a295b916ac5f46770543f8d4019",
+ "chksum_sha256": "288362d519c2a8b9fbf83be627abb4b3d3eb9f7cb89a65cbdabd2039fdcab988",
"format": 1
},
{
- "name": "tests/integration/targets/settings",
+ "name": "tests/integration/targets/instance",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/settings/tasks",
+ "name": "tests/integration/targets/instance/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/settings/tasks/main.yml",
+ "name": "tests/integration/targets/instance/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f781f919fb476162d13503c6a6aa952b46fd61fc60b38988f161d0025bbbd900",
+ "chksum_sha256": "dfa2174eba47b8edb3a65912224adc7d5e9fd9d180d0da5db2e05dab7f3520a5",
"format": 1
},
{
- "name": "tests/integration/targets/team",
+ "name": "tests/integration/targets/project_update",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/team/tasks",
+ "name": "tests/integration/targets/project_update/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/team/tasks/main.yml",
+ "name": "tests/integration/targets/project_update/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f59433918d9495fd5a3b296a0a70963f539571e742e3c790e5d88912eec8e2ef",
+ "chksum_sha256": "6fd0d5b30de4d13f2d2e5fb7497c0fbbc13fa92a31fc43d4a21b19d3f53249ce",
"format": 1
},
{
- "name": "tests/integration/targets/token",
+ "name": "tests/integration/targets/credential",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/token/tasks",
+ "name": "tests/integration/targets/credential/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/token/tasks/main.yml",
+ "name": "tests/integration/targets/credential/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e7245f5039915d720baba505b4ab74421c7db138bb45b50917942f3eb05799af",
+ "chksum_sha256": "113f6f4c35885fe2a1ebca33523c9a6284a36ae470963e41a12909e444e0281d",
"format": 1
},
{
- "name": "tests/integration/targets/user",
+ "name": "tests/integration/targets/workflow_approval",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/user/tasks",
+ "name": "tests/integration/targets/workflow_approval/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/user/tasks/main.yml",
+ "name": "tests/integration/targets/workflow_approval/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "76733e6cbaba31ccf3510bea4ec1166c93195d5ef385035bb773fba854db3659",
+ "chksum_sha256": "2ccc6f9f0aafef620896c56a32e1dd07f2f83f61c6b8c4d4bbaa6a220495b91d",
"format": 1
},
{
- "name": "tests/integration/targets/workflow_job_template",
+ "name": "tests/integration/targets/job_launch",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/workflow_job_template/tasks",
+ "name": "tests/integration/targets/job_launch/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/workflow_job_template/tasks/main.yml",
+ "name": "tests/integration/targets/job_launch/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "872f3cd4d0d7656ddf7f9239c1139351af6341e5e40e0ac2981ed152fff26a5b",
+ "chksum_sha256": "fbb658e8212fcb120200db84212f6b469fff700c1fd233b486ec82a816572323",
"format": 1
},
{
- "name": "tests/integration/targets/workflow_launch",
+ "name": "tests/integration/targets/inventory",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/workflow_launch/tasks",
+ "name": "tests/integration/targets/inventory/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/workflow_launch/tasks/main.yml",
+ "name": "tests/integration/targets/inventory/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4ceb05ce9ac229472ee12b492754aeacba13b8060a8e973467aba8a9d3d793b6",
+ "chksum_sha256": "21f6d6fa5e76c8578949fd204edef4cdb77b5bff310b681bcd75ac1873a67f18",
"format": 1
},
{
- "name": "tests/integration/targets/instance",
+ "name": "tests/integration/targets/ad_hoc_command_cancel",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/instance/tasks",
+ "name": "tests/integration/targets/ad_hoc_command_cancel/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/instance/tasks/main.yml",
+ "name": "tests/integration/targets/ad_hoc_command_cancel/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "dfa2174eba47b8edb3a65912224adc7d5e9fd9d180d0da5db2e05dab7f3520a5",
+ "chksum_sha256": "655ba448d6b451adbb4006c964c3ca21c7303787b3c336bd946752122bdb8d94",
"format": 1
},
{
- "name": "tests/integration/targets/workflow_approval",
+ "name": "tests/integration/targets/lookup_rruleset",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/workflow_approval/tasks",
+ "name": "tests/integration/targets/lookup_rruleset/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/workflow_approval/tasks/main.yml",
+ "name": "tests/integration/targets/lookup_rruleset/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2ccc6f9f0aafef620896c56a32e1dd07f2f83f61c6b8c4d4bbaa6a220495b91d",
+ "chksum_sha256": "30edd075f50be0fbf9c57c9d7db10ae7086749d78817e8a24b680301ba339cb5",
"format": 1
},
{
- "name": "tests/integration/targets/bulk_host_create",
+ "name": "tests/integration/targets/role_definition",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/bulk_host_create/tasks",
+ "name": "tests/integration/targets/role_definition/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/bulk_host_create/tasks/main.yml",
+ "name": "tests/integration/targets/role_definition/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "39be6c097c03152ec1873732d8e09e34639464acad4f9d42ef3e13b6f87d47d1",
+ "chksum_sha256": "53d19702349573c129db2e53baf99602bb846787490aab19e2f8f29c892f5735",
"format": 1
},
{
- "name": "tests/integration/targets/bulk_job_launch",
+ "name": "tests/integration/targets/settings",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/bulk_job_launch/tasks",
+ "name": "tests/integration/targets/settings/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/bulk_job_launch/tasks/main.yml",
+ "name": "tests/integration/targets/settings/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bbb7480d55d9d2261e22d0c55de3a0bada691608a6542385b234633f0493be75",
+ "chksum_sha256": "f781f919fb476162d13503c6a6aa952b46fd61fc60b38988f161d0025bbbd900",
"format": 1
},
{
- "name": "tests/integration/targets/module_utils",
+ "name": "tests/integration/targets/group",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/module_utils/tasks",
+ "name": "tests/integration/targets/group/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/module_utils/tasks/main.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "2fb7f33e9695e68ec87d2b9bc52ad2196c2d542bccb7d5b185ebc8e54a8cc237",
- "format": 1
- },
- {
- "name": "tests/integration/targets/module_utils/tasks/test_named_reference.yml",
+ "name": "tests/integration/targets/group/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0e1babbc9e57e06629675f6d2b0d4fe891880b177a572d355391753e670c5156",
+ "chksum_sha256": "fe499cf6b248289df031a4d32328dbe45184f4fe7d92423522b1705715010904",
"format": 1
},
{
- "name": "tests/integration/targets/bulk_host_delete",
+ "name": "tests/integration/targets/role",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/bulk_host_delete/tasks",
+ "name": "tests/integration/targets/role/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/bulk_host_delete/tasks/main.yml",
+ "name": "tests/integration/targets/role/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2a55e10b05593a8ad6f5e2325c3d51f76b5ef040a81990dc3c7a4adaeebcf545",
+ "chksum_sha256": "24e699037b51baceceb9f18e1e43464608766127f5eeb4df1fed5991a35aea7f",
"format": 1
},
{
- "name": "tests/integration/targets/role_definition",
+ "name": "tests/integration/targets/execution_environment",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/role_definition/tasks",
+ "name": "tests/integration/targets/execution_environment/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/role_definition/tasks/main.yml",
+ "name": "tests/integration/targets/execution_environment/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "53d19702349573c129db2e53baf99602bb846787490aab19e2f8f29c892f5735",
+ "chksum_sha256": "950122a04ad3b7406fa9a140e02ab190a0f7f53dfec791fdcb1fb2781ebae54d",
"format": 1
},
{
- "name": "tests/integration/targets/role_team_assignment",
+ "name": "tests/integration/targets/organization",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/role_team_assignment/tasks",
+ "name": "tests/integration/targets/organization/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/role_team_assignment/tasks/main.yml",
+ "name": "tests/integration/targets/organization/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f5fd6177577695507040f8a0654b0a0fbb2198f3c880788db054d6a4a13ecdcf",
+ "chksum_sha256": "6e18cbff8118f617a9b594f630739356922030c2d802a0eb92df0ddbe4f1e1fe",
"format": 1
},
{
- "name": "tests/integration/targets/role_user_assignment",
+ "name": "tests/integration/targets/workflow_launch",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/role_user_assignment/tasks",
+ "name": "tests/integration/targets/workflow_launch/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/role_user_assignment/tasks/main.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "288362d519c2a8b9fbf83be627abb4b3d3eb9f7cb89a65cbdabd2039fdcab988",
- "format": 1
- },
- {
- "name": "tests/config.yml",
+ "name": "tests/integration/targets/workflow_launch/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4cb8bf065737689916cda6a2856fcfb8bc27f49224a4b2c2fde842e3b0e76fbb",
+ "chksum_sha256": "4ceb05ce9ac229472ee12b492754aeacba13b8060a8e973467aba8a9d3d793b6",
"format": 1
},
{
@@ -1786,10 +1793,10 @@
"format": 1
},
{
- "name": "tests/sanity/ignore-2.14.txt",
+ "name": "tests/sanity/ignore-2.16.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "05b621f6ff40c091ab1c07947c43d817ed37af7acfc0f8bef7b1453eb03b3aa7",
+ "chksum_sha256": "f6dce33e05558d94ecc8ebacc8a5011e9defc1b197fcc13c4335868b6d6c4952",
"format": 1
},
{
@@ -1800,17 +1807,17 @@
"format": 1
},
{
- "name": "tests/sanity/ignore-2.16.txt",
+ "name": "tests/sanity/ignore-2.14.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6dce33e05558d94ecc8ebacc8a5011e9defc1b197fcc13c4335868b6d6c4952",
+ "chksum_sha256": "05b621f6ff40c091ab1c07947c43d817ed37af7acfc0f8bef7b1453eb03b3aa7",
"format": 1
},
{
- "name": "README.md",
+ "name": "tests/sanity/ignore-2.17.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "41997bded547ba6a168ee41fd5379a44f20dcd155979067564b79d14af6e3fd3",
+ "chksum_sha256": "05b621f6ff40c091ab1c07947c43d817ed37af7acfc0f8bef7b1453eb03b3aa7",
"format": 1
}
],
diff --git a/ansible_collections/awx/awx/MANIFEST.json b/ansible_collections/awx/awx/MANIFEST.json
index 4c999edb6..a0e3f39c9 100644
--- a/ansible_collections/awx/awx/MANIFEST.json
+++ b/ansible_collections/awx/awx/MANIFEST.json
@@ -2,7 +2,7 @@
"collection_info": {
"namespace": "awx",
"name": "awx",
- "version": "24.3.1",
+ "version": "24.5.0",
"authors": [
"AWX Project Contributors <awx-project@googlegroups.com>"
],
@@ -29,7 +29,7 @@
"name": "FILES.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c65f7b80630613726dd21cd3d4a41265ca38ec6b254150d0f7c59be171fc27ee",
+ "chksum_sha256": "d48dd332a0566666b74475b3bd965842bfa41c8de34d2dba4b1e5631953874c3",
"format": 1
},
"format": 1
diff --git a/ansible_collections/awx/awx/plugins/module_utils/controller_api.py b/ansible_collections/awx/awx/plugins/module_utils/controller_api.py
index 0f48fc2df..4bcb13a6d 100644
--- a/ansible_collections/awx/awx/plugins/module_utils/controller_api.py
+++ b/ansible_collections/awx/awx/plugins/module_utils/controller_api.py
@@ -17,7 +17,7 @@ import time
import re
from json import loads, dumps
from os.path import isfile, expanduser, split, join, exists, isdir
-from os import access, R_OK, getcwd, environ
+from os import access, R_OK, getcwd, environ, getenv
try:
@@ -107,7 +107,7 @@ class ControllerModule(AnsibleModule):
# Perform magic depending on whether controller_oauthtoken is a string or a dict
if self.params.get('controller_oauthtoken'):
token_param = self.params.get('controller_oauthtoken')
- if type(token_param) is dict:
+ if isinstance(token_param, dict):
if 'token' in token_param:
self.oauth_token = self.params.get('controller_oauthtoken')['token']
else:
@@ -148,9 +148,10 @@ class ControllerModule(AnsibleModule):
# Make sure we start with /api/vX
if not endpoint.startswith("/"):
endpoint = "/{0}".format(endpoint)
- prefix = self.url_prefix.rstrip("/")
- if not endpoint.startswith(prefix + "/api/"):
- endpoint = prefix + "/api/v2{0}".format(endpoint)
+ hostname_prefix = self.url_prefix.rstrip("/")
+ api_path = self.api_path()
+ if not endpoint.startswith(hostname_prefix + api_path):
+ endpoint = hostname_prefix + f"{api_path}v2{endpoint}"
if not endpoint.endswith('/') and '?' not in endpoint:
endpoint = "{0}/".format(endpoint)
@@ -215,7 +216,7 @@ class ControllerModule(AnsibleModule):
try:
config_data = yaml.load(config_string, Loader=yaml.SafeLoader)
# If this is an actual ini file, yaml will return the whole thing as a string instead of a dict
- if type(config_data) is not dict:
+ if not isinstance(config_data, dict):
raise AssertionError("The yaml config file is not properly formatted as a dict.")
try_config_parsing = False
@@ -257,7 +258,7 @@ class ControllerModule(AnsibleModule):
if honorred_setting in config_data:
# Veriffy SSL must be a boolean
if honorred_setting == 'verify_ssl':
- if type(config_data[honorred_setting]) is str:
+ if isinstance(config_data[honorred_setting], str):
setattr(self, honorred_setting, strtobool(config_data[honorred_setting]))
else:
setattr(self, honorred_setting, bool(config_data[honorred_setting]))
@@ -291,7 +292,7 @@ class ControllerModule(AnsibleModule):
class ControllerAPIModule(ControllerModule):
# TODO: Move the collection version check into controller_module.py
# This gets set by the make process so whatever is in here is irrelevant
- _COLLECTION_VERSION = "0.0.1-devel"
+ _COLLECTION_VERSION = "24.5.0"
_COLLECTION_TYPE = "awx"
# This maps the collections type (awx/tower) to the values returned by the API
# Those values can be found in awx/api/generics.py line 204
@@ -603,6 +604,14 @@ class ControllerAPIModule(ControllerModule):
status_code = response.status
return {'status_code': status_code, 'json': response_json}
+ def api_path(self):
+
+ default_api_path = "/api/"
+ if self._COLLECTION_TYPE != "awx":
+ default_api_path = "/api/controller/"
+ prefix = getenv('CONTROLLER_OPTIONAL_API_URLPATTERN_PREFIX', default_api_path)
+ return prefix
+
def authenticate(self, **kwargs):
if self.username and self.password:
# Attempt to get a token from /api/v2/tokens/ by giving it our username/password combo
@@ -613,7 +622,7 @@ class ControllerAPIModule(ControllerModule):
"scope": "write",
}
# Preserve URL prefix
- endpoint = self.url_prefix.rstrip('/') + '/api/v2/tokens/'
+ endpoint = self.url_prefix.rstrip('/') + f'{self.api_path()}v2/tokens/'
# Post to the tokens endpoint with baisc auth to try and get a token
api_token_url = (self.url._replace(path=endpoint)).geturl()
@@ -1002,7 +1011,7 @@ class ControllerAPIModule(ControllerModule):
if self.authenticated and self.oauth_token_id:
# Attempt to delete our current token from /api/v2/tokens/
# Post to the tokens endpoint with baisc auth to try and get a token
- endpoint = self.url_prefix.rstrip('/') + '/api/v2/tokens/{0}/'.format(self.oauth_token_id)
+ endpoint = self.url_prefix.rstrip('/') + f'{self.api_path()}v2/tokens/{self.oauth_token_id}/'
api_token_url = (self.url._replace(path=endpoint, query=None)).geturl() # in error cases, fail_json exists before exception handling
try:
diff --git a/ansible_collections/awx/awx/plugins/modules/ad_hoc_command.py b/ansible_collections/awx/awx/plugins/modules/ad_hoc_command.py
index 5864d392a..10d1c7e35 100644
--- a/ansible_collections/awx/awx/plugins/modules/ad_hoc_command.py
+++ b/ansible_collections/awx/awx/plugins/modules/ad_hoc_command.py
@@ -163,7 +163,7 @@ def main():
for arg in ['job_type', 'limit', 'forks', 'verbosity', 'extra_vars', 'become_enabled', 'diff_mode']:
if module.params.get(arg):
# extra_var can receive a dict or a string, if a dict covert it to a string
- if arg == 'extra_vars' and type(module.params.get(arg)) is not str:
+ if arg == 'extra_vars' and not isinstance(module.params.get(arg), str):
post_data[arg] = json.dumps(module.params.get(arg))
else:
post_data[arg] = module.params.get(arg)
diff --git a/ansible_collections/awx/awx/plugins/modules/application.py b/ansible_collections/awx/awx/plugins/modules/application.py
index 1933143af..4c858394d 100644
--- a/ansible_collections/awx/awx/plugins/modules/application.py
+++ b/ansible_collections/awx/awx/plugins/modules/application.py
@@ -121,6 +121,7 @@ def main():
client_type = module.params.get('client_type')
organization = module.params.get('organization')
redirect_uris = module.params.get('redirect_uris')
+ skip_authorization = module.params.get('skip_authorization')
state = module.params.get('state')
# Attempt to look up the related items the user specified (these will fail the module if not found)
@@ -146,6 +147,8 @@ def main():
application_fields['description'] = description
if redirect_uris is not None:
application_fields['redirect_uris'] = ' '.join(redirect_uris)
+ if skip_authorization is not None:
+ application_fields['skip_authorization'] = skip_authorization
response = module.create_or_update_if_needed(application, application_fields, endpoint='applications', item_type='application', auto_exit=False)
if 'client_id' in response:
diff --git a/ansible_collections/awx/awx/plugins/modules/import.py b/ansible_collections/awx/awx/plugins/modules/import.py
index fe66b2a7a..ae0180ccd 100644
--- a/ansible_collections/awx/awx/plugins/modules/import.py
+++ b/ansible_collections/awx/awx/plugins/modules/import.py
@@ -56,7 +56,7 @@ import logging
# In this module we don't use EXPORTABLE_RESOURCES, we just want to validate that our installed awxkit has import/export
try:
- from awxkit.api.pages.api import EXPORTABLE_RESOURCES # noqa
+ from awxkit.api.pages.api import EXPORTABLE_RESOURCES # noqa: F401; pylint: disable=unused-import
HAS_EXPORTABLE_RESOURCES = True
except ImportError:
diff --git a/ansible_collections/awx/awx/plugins/modules/notification_template.py b/ansible_collections/awx/awx/plugins/modules/notification_template.py
index bb1df60d3..e44e2be5e 100644
--- a/ansible_collections/awx/awx/plugins/modules/notification_template.py
+++ b/ansible_collections/awx/awx/plugins/modules/notification_template.py
@@ -50,6 +50,7 @@ options:
description:
- The type of notification to be sent.
choices:
+ - 'awssns'
- 'email'
- 'grafana'
- 'irc'
@@ -219,7 +220,7 @@ def main():
copy_from=dict(),
description=dict(),
organization=dict(),
- notification_type=dict(choices=['email', 'grafana', 'irc', 'mattermost', 'pagerduty', 'rocketchat', 'slack', 'twilio', 'webhook']),
+ notification_type=dict(choices=['awssns', 'email', 'grafana', 'irc', 'mattermost', 'pagerduty', 'rocketchat', 'slack', 'twilio', 'webhook']),
notification_configuration=dict(type='dict'),
messages=dict(type='dict'),
state=dict(choices=['present', 'absent', 'exists'], default='present'),
diff --git a/ansible_collections/awx/awx/test/awx/conftest.py b/ansible_collections/awx/awx/test/awx/conftest.py
index b7fb6333d..42500342a 100644
--- a/ansible_collections/awx/awx/test/awx/conftest.py
+++ b/ansible_collections/awx/awx/test/awx/conftest.py
@@ -19,7 +19,7 @@ from ansible.module_utils.six import raise_from
from ansible_base.rbac.models import RoleDefinition, DABPermission
from awx.main.tests.functional.conftest import _request
-from awx.main.tests.functional.conftest import credentialtype_scm, credentialtype_ssh # noqa: F401; pylint: disable=unused-variable
+from awx.main.tests.functional.conftest import credentialtype_scm, credentialtype_ssh # noqa: F401; pylint: disable=unused-import
from awx.main.models import (
Organization,
Project,
diff --git a/ansible_collections/awx/awx/tests/sanity/ignore-2.17.txt b/ansible_collections/awx/awx/tests/sanity/ignore-2.17.txt
new file mode 100644
index 000000000..19512ea0c
--- /dev/null
+++ b/ansible_collections/awx/awx/tests/sanity/ignore-2.17.txt
@@ -0,0 +1 @@
+plugins/modules/export.py validate-modules:nonexistent-parameter-documented # needs awxkit to construct argspec
diff --git a/ansible_collections/azure/azcollection/CHANGELOG.md b/ansible_collections/azure/azcollection/CHANGELOG.md
index f271fc377..27c5e9889 100644
--- a/ansible_collections/azure/azcollection/CHANGELOG.md
+++ b/ansible_collections/azure/azcollection/CHANGELOG.md
@@ -1,5 +1,70 @@
# Change Log
+## v2.4.0 (2024-05-30)
+
+### NEW MODULES
+ - azure_rm_storageaccountmanagementpolicy: Add support for manage storage account management policy ([#1536](https://github.com/ansible-collections/azure/pull/1536))
+ - azure_rm_storageaccountmanagementpolicy_info: Add support for manage storage account management policy ([#1536](https://github.com/ansible-collections/azure/pull/1536))
+ - azure_rm_virtualnetworkgatewaynatrule: Add support for managed virtual network gateway nat rule ([#1525](https://github.com/ansible-collections/azure/pull/1525))
+ - azure_rm_virtualnetworkgatewaynatrule_info: Add support for virtual network gateway nat rule ([#1525](https://github.com/ansible-collections/azure/pull/1525))
+ - azure_rm_localnetworkgateway: Add support for mange local network gateway ([#1523](https://github.com/ansible-collections/azure/pull/1523))
+ - azure_rm_localnetworkgateway_info: Add fetch for mange local network gateway facts ([#1523](https://github.com/ansible-collections/azure/pull/1523))
+ - azure_rm_sqlmidatabase: Add support for managed SQL managed database ([#1548](https://github.com/ansible-collections/azure/pull/1548))
+ - azure_rm_sqlmidatabase_info: Add support for fetch the managed SQL managed database ([#1548](https://github.com/ansible-collections/azure/pull/1548))
+ - azure_rm_sqlmidblongtermretentionpolicy: Add support for managed SQL managed database long term retention policy ([#1548](https://github.com/ansible-collections/azure/pull/1548))
+ - azure_rm_sqlmidblongtermretentionpolicy_info: Add support for fetch managed SQL managed database long term retention policy ([#1548](https://github.com/ansible-collections/azure/pull/1548))
+ - azure_rm_sqlmidbshorttermretentionpolicy: Add support for fetch managed SQL managed database short term retention policy ([#1548](https://github.com/ansible-collections/azure/pull/1548))
+ - azure_rm_sqlmidbshorttermretentionpolicy_info: Add support for fetch managed SQL managed database short term retention policy ([#1548](https://github.com/ansible-collections/azure/pull/1548))
+ - azure_rm_vmsku_info: Add support for list VM SKUs ([#1546](https://github.com/ansible-collections/azure/pull/1546))
+ - tests/integration/requirements.txt: Symlink requirements-azure.txt from tests/integration ([#1551](https://github.com/ansible-collections/azure/pull/1551))
+
+### FEATURE ENHANCEMENT
+ - azure_rm_aduser: Add support for `on_premises_extension_attributes` ([#1518](https://github.com/ansible-collections/azure/pull/1518))
+ - azure_rm_aduser_info: Add support for `on_premises_extension_attributes` ([#1518](https://github.com/ansible-collections/azure/pull/1518))
+ - azure_keyvault_secret: Add support for `cloud_type` ([#1517](https://github.com/ansible-collections/azure/pull/1517))
+ - azure_rm_postgresqlflexibleserver: Add support for `identity` ([#1528](https://github.com/ansible-collections/azure/pull/1528))
+ - azure_rm_postgresqlflexibleserver_info: Add support for `identity` ([#1528](https://github.com/ansible-collections/azure/pull/1528))
+ - plugins/inventory/azure_rm.py: Expand Inventory filter integration tests ([#1547](https://github.com/ansible-collections/azure/pull/1547))
+ - azure_rm_webapp: Add support for `site_auth_settings` ([#1538](https://github.com/ansible-collections/azure/pull/1538))
+ - azure_rm_webapp_info: Add support for `site_auth_settings` ([#1538](https://github.com/ansible-collections/azure/pull/1538))
+ - azure_rm_aks:
+ - Add support for UserAssigned Identity ([#1543](https://github.com/ansible-collections/azure/pull/1543))
+ - Add `managedNATGateway` and `userAssignedNATGateway` to `outbound_type` ([#1537](https://github.com/ansible-collections/azure/pull/1537))
+ - azure_rm_webappaccessrestriction: Add more parameters to `ip_security_restrictions` ([#1558](https://github.com/ansible-collections/azure/pull/1558))
+ - azure_rm_webappaccessrestriction_info: Add more parameters to `ip_security_restrictions` ([#1558](https://github.com/ansible-collections/azure/pull/1558))
+ - azure_rm_virtualmachine: Add support for attaching existing managed data disks at VM creation ([#1430](https://github.com/ansible-collections/azure/pull/1430))
+ - azure_rm_aksagentpool: Add support for more parameters ([#1477](https://github.com/ansible-collections/azure/pull/1477))
+ - azure_rm_aksagentpool_info: Add support for more parameters ([#1477](https://github.com/ansible-collections/azure/pull/1477))
+ - azure_rm_adgroup: Allow service principals and nested groups to be returned in membership attributes ([#1507](https://github.com/ansible-collections/azure/pull/1507))
+ - azure_rm_adgroup_info: Allow service principals and nested groups to be returned in membership attributes ([#1507](https://github.com/ansible-collections/azure/pull/1507))
+ - azure_rm_backupazurevm: No need to json serialization the response ([#1531](https://github.com/ansible-collections/azure/pull/1531))
+
+### BUG FIXING
+ - azure_rm_adapplication: Fix `optional_claims` handling ([#1480](https://github.com/ansible-collections/azure/pull/1480))
+ - azure_rm_cognitivesearch: Fix test failed ([#1520](https://github.com/ansible-collections/azure/pull/1520))
+ - azure_rm_common.py: Fix the inconsistency between custom classes and Python SDK attributes ([#1554](https://github.com/ansible-collections/azure/pull/1554))
+ - meta/runtime.yml:
+ - Keep action_groups and modules list consistent ([#1553](https://github.com/ansible-collections/azure/pull/1553))
+ - Delete the deprecate modules ([#1556](https://github.com/ansible-collections/azure/pull/1556))
+ - azure_rm_rediscache_info: Fix typo ([#1550](https://github.com/ansible-collections/azure/pull/1550))
+ - plugins/inventory/azure_rm.py: Fix inventory host processing ([#1545](https://github.com/ansible-collections/azure/pull/1545))
+ - azure_rm_accesstoken_info: Fix authorization issue ([#1541](https://github.com/ansible-collections/azure/pull/1541))
+ - azure_rm_adgroup: Support update functionality ([#1530](https://github.com/ansible-collections/azure/pull/1530))
+ - azure_rm_webapp: Delete the imported logging module ([#1567](https://github.com/ansible-collections/azure/pull/1567))
+ - azure_rm_postgresqlflexiblefirewallrule: Delete the logging module ([#1567](https://github.com/ansible-collections/azure/pull/1567))
+ - azure_rm_loadbalancer: Remove functionality which should have been removed for Ansible 2.9 ([#1508](https://github.com/ansible-collections/azure/pull/1508))
+ - azure_rm_networkinterface: Remove functionality which should have been removed for Ansible 2.9 ([#1508](https://github.com/ansible-collections/azure/pull/1508))
+ - azure_rm_localnetworkgateway: Fix documentation mistakes ([#1563](https://github.com/ansible-collections/azure/pull/1563))
+ - azure_rm_virtualmachine: Create `_own_nsg_` tag only if `created_nsg` is `true` ([#1565](https://github.com/ansible-collections/azure/pull/1565))
+ - azure_rm_storageblob: Fix authentication issue when shared keys disabled ([#1564](https://github.com/ansible-collections/azure/pull/1564))
+ - azure_rm_virtualmachinescalesetinstance_info: Fixed obtaining flexible VMSS instances failed ([#1529](https://github.com/ansible-collections/azure/pull/1529))
+
+### BREAKING CHANGE:
+ - azure_rm_datalakestore: Deprecate `azure-mgmt-datalake-store` ([#1555](https://github.com/ansible-collections/azure/pull/1555))
+ - azure_rm_datalakestore_info: Deprecate `azure_rm_datalakestore_info` ([#1555](https://github.com/ansible-collections/azure/pull/1555))
+ - requirements.txt: Rename `requirements-azure.txt` to `requirements.txt` ([#1552](https://github.com/ansible-collections/azure/pull/1552))
+ - sanity-requirements.txt: Rename `sanity-requirements-azure.txt` to `sanity-requirements.txt` ([#1552](https://github.com/ansible-collections/azure/pull/1552))
+
## v2.3.0 (2024-03-27)
### NEW MODULES
diff --git a/ansible_collections/azure/azcollection/CONTRIBUTING.md b/ansible_collections/azure/azcollection/CONTRIBUTING.md
index 8358024a3..def176635 100644
--- a/ansible_collections/azure/azcollection/CONTRIBUTING.md
+++ b/ansible_collections/azure/azcollection/CONTRIBUTING.md
@@ -17,8 +17,8 @@ When contributing to this repository, please first discuss the change you wish t
. venv/bin/activate
pip3 install -U pip
pip3 install ansible
- pip3 install -r requirements-azure.txt
- pip3 install -r sanity-requirements-azure.txt
+ pip3 install -r requirements.txt
+ pip3 install -r sanity-requirements.txt
```
## Running tests
@@ -51,7 +51,7 @@ Additional `ansible-test` resources:
1. Please provide integration tests showing the changed behavior/functionality under `tests/integration/targets/<relevant-module>/tasks`.
1. Think about updating the documentation and examples for the changed module.
-1. Please run a sanity check. Install prerequisites `pip install -r sanity-requirements-azure.txt`, run with `ansible-test sanity --color -v --junit`. Read more at https://docs.ansible.com/ansible/latest/dev_guide/testing_sanity.html.
+1. Please run a sanity check. Install prerequisites `pip install -r sanity-requirements.txt`, run with `ansible-test sanity --color -v --junit`. Read more at https://docs.ansible.com/ansible/latest/dev_guide/testing_sanity.html.
1. There is a script `tests/utils/ado/ado.sh` for running tests inside an Azure DevOps pipeline. Unfortunately the pipeline and results are not visible for the public. You can perhaps adapt the parts of the script or use a small playbook to run the task list of the integration tests mentioned above.
## Release Process
diff --git a/ansible_collections/azure/azcollection/FILES.json b/ansible_collections/azure/azcollection/FILES.json
index 57fe6593a..89f7b8038 100644
--- a/ansible_collections/azure/azcollection/FILES.json
+++ b/ansible_collections/azure/azcollection/FILES.json
@@ -18,14 +18,14 @@
"name": "meta/execution-environment.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4ed6d806d5b0456d5d6ab6e46d68bdbe7b46b10b4352a80ae8b8487220337742",
+ "chksum_sha256": "0e30f905b015d1d8a17d3a540cb8892b479fcbf9cb873ac2ff0665fe499f318e",
"format": 1
},
{
"name": "meta/runtime.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d034d1f9a361fef10f399b0a9e563b6b08df2a190432b69aced0323849298fcf",
+ "chksum_sha256": "b20ca160210ef90dd028b531f408c398538edc3f739ed6acd56f679764074d60",
"format": 1
},
{
@@ -74,7 +74,7 @@
"name": "tests/utils/ado/ado.sh",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6e74c310ccc00f71866dc1cd4bc73a66de6a38b2d8dceb2aef6d8bf2a20908f9",
+ "chksum_sha256": "c7dadf203058cd24f2c40f32d0f0a876dac25fad06184ea2bc5fb6e29161d9e4",
"format": 1
},
{
@@ -270,7 +270,7 @@
"name": "tests/integration/targets/azure_rm_webapp/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9774c1a314a6b5fef489d9405550a1395b9468bbea779af6ca95ae47af6386a9",
+ "chksum_sha256": "d462724d603f647080b3432af1e17d420e449077c59df7f3f4609fc5a3b158a2",
"format": 1
},
{
@@ -718,7 +718,7 @@
"name": "tests/integration/targets/azure_rm_aksagentpool/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "88959c5262b2f543793b47314b9bce24d267e32f9c3f419f71a242dbbccadf12",
+ "chksum_sha256": "ee9076297fe0a2b3cadab664bd2ab8f9e79b2a124b3a6d93f1c44a5fe2fb579a",
"format": 1
},
{
@@ -781,7 +781,7 @@
"name": "tests/integration/targets/azure_rm_virtualmachine/tasks/azure_test_public_ip.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fc7a86147194125bacfe94a37a05af001ff6d68b6ee5b759d6561ceea51b6b33",
+ "chksum_sha256": "1d0616c0d69966ed1c2c8be54be02fa504886f509c926b33325fe2aa2478cd60",
"format": 1
},
{
@@ -806,6 +806,13 @@
"format": 1
},
{
+ "name": "tests/integration/targets/azure_rm_virtualmachine/tasks/azure_test_skus.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "ffa9c0c2af337f8cec9f7b89ac35d90dc2115029124baccbad99a0448b87228a",
+ "format": 1
+ },
+ {
"name": "tests/integration/targets/azure_rm_virtualmachine/tasks/azure_test_image_latest.yml",
"ftype": "file",
"chksum_type": "sha256",
@@ -865,7 +872,7 @@
"name": "tests/integration/targets/azure_rm_virtualmachine/inventory.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "dbfa6b4c9d720fa3c687dfb4c1101701708593184aaf0950ea66cca1f7bf63eb",
+ "chksum_sha256": "1026ed40845d0bac9ebe24f762cb7ae7f8a4106f009d7f5dcb78012e9a2b0d04",
"format": 1
},
{
@@ -1082,14 +1089,14 @@
"name": "tests/integration/targets/azure_rm_aks/tasks/minimal-cluster.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2b2be2027bb762b22ccf731a32baf3fa8b2675369c08b76c1499b3ea10902add",
+ "chksum_sha256": "498b94fcebf9ede6141bda0bbbadf14ef3eafcca168126018ca02c4a1ee7d169",
"format": 1
},
{
"name": "tests/integration/targets/azure_rm_aks/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "683619ec3541848983b2f513eae4d885c97d3040ac188d252aec7304c3440c0b",
+ "chksum_sha256": "f422e499d76bdabbe76f1b96731abe2fc0dffba34961b40bf351475dae9af0ae",
"format": 1
},
{
@@ -1383,7 +1390,7 @@
"name": "tests/integration/targets/azure_rm_eventhub/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c4ef2c774f92b1d1c61c070407cb15cec4492593f03dfc095031136a6fc315e4",
+ "chksum_sha256": "3a85ebeff8f8be5b1241569a00c38d59bd340c0f6fb536d5b00fb34f4b205183",
"format": 1
},
{
@@ -1929,7 +1936,7 @@
"name": "tests/integration/targets/azure_rm_virtualmachineextension/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9e3cb1009bcddcf4db403356c71a3862d40d9978395362480795d87961229f8d",
+ "chksum_sha256": "9e14c469074c567713a26ac54f6858c525f4fc093ee99dc47c8a3adad2317611",
"format": 1
},
{
@@ -1992,7 +1999,7 @@
"name": "tests/integration/targets/azure_rm_webappaccessrestriction/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "23178ef47e9dbb43a51984becf44d72972a399952958fd2a76d71d6338864252",
+ "chksum_sha256": "487fcfc238a725862690c3b7aa968873a2839e2e2e568e41d113d7204f62580a",
"format": 1
},
{
@@ -2034,7 +2041,7 @@
"name": "tests/integration/targets/azure_rm_mysqlserver/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2c74420c4bc9cc5d8af14406a1d97ea762c6b8afa4de8728a4a833b2e742cf41",
+ "chksum_sha256": "e8fa125d2071b693fa257c8ca5976af713c6861aabe41f1aafe898de48e0dcfc",
"format": 1
},
{
@@ -2678,7 +2685,7 @@
"name": "tests/integration/targets/azure_rm_postgresqlflexibleserver/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b6fa3f158220b7cd8d38e7a773ee15e4f9781e627df4e3a209142f6c430414f1",
+ "chksum_sha256": "1c183408812df20a1e5416736ab072410277fcec3209eb764f18245ff470ad22",
"format": 1
},
{
@@ -2790,7 +2797,7 @@
"name": "tests/integration/targets/azure_rm_mariadbserver/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0f1123b3696957f55774d4316a4461182998975a0db1f960ef289b20b5e926c7",
+ "chksum_sha256": "c2b0c67ab1a27132c26a7d0768c63194b16ef651a91408bfd1f9646a04e41584",
"format": 1
},
{
@@ -2850,6 +2857,48 @@
"format": 1
},
{
+ "name": "tests/integration/targets/azure_rm_virtualnetworkgatewaynatrule",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/azure_rm_virtualnetworkgatewaynatrule/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/azure_rm_virtualnetworkgatewaynatrule/meta/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d00bd90476595ae7d97d3ca8f8aa7ff475ebc3dd87d7d12e862bf0527f7bd81f",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/azure_rm_virtualnetworkgatewaynatrule/aliases",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "109bb8ece8ca54a061f03f7050aca3401610606b79a6e313d2556dcb04fda637",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/azure_rm_virtualnetworkgatewaynatrule/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/azure_rm_virtualnetworkgatewaynatrule/tasks/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "0dad8e7600bf9afdde5db7736b87f4d29bb3523fd55f72aa1b4357de196f2039",
+ "format": 1
+ },
+ {
"name": "tests/integration/targets/azure_rm_routetable",
"ftype": "dir",
"chksum_type": null,
@@ -3014,7 +3063,7 @@
"name": "tests/integration/targets/azure_rm_cognitivesearch/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3712847b582b6a3a3f461055963ce51034e160b1e174d06b8b8f6eeecd4bc19",
+ "chksum_sha256": "9ea4303f86d14b8ce34ef9f49ba3c42c680f3ef236e82d177ec0ca5b1847be6e",
"format": 1
},
{
@@ -3098,7 +3147,7 @@
"name": "tests/integration/targets/azure_rm_cosmosdbaccount/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e44d0c16980fee5a6cca7dc10c26e4bb487b761eb6aa87c5cee17e527c37453d",
+ "chksum_sha256": "e20f224fa3c981a35b36a5d015c3a310e9e9140d4da727e3d5700d4d820646ce",
"format": 1
},
{
@@ -3182,7 +3231,7 @@
"name": "tests/integration/targets/azure_rm_privatelinkservice/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "26d7dc158898d9dbc017b66108ef335ff0b4a5ba01d2566cecbec6ad767763be",
+ "chksum_sha256": "0217aa78bbf022c890e4b3b1c4d9e979928b2d89446d236ffa49f0d85b72a214",
"format": 1
},
{
@@ -3266,7 +3315,7 @@
"name": "tests/integration/targets/azure_rm_loadbalancer/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6b6dc390a0291155225b4305560c255bf3a36be60542213e2ad2fcc6fb9daf94",
+ "chksum_sha256": "17c0008d7d4478bf2c46548fbc575fe09c5feea4fa1220b0c6ed81c3d9cedca4",
"format": 1
},
{
@@ -3354,6 +3403,48 @@
"format": 1
},
{
+ "name": "tests/integration/targets/azure_rm_sqlmanageddatabase",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/azure_rm_sqlmanageddatabase/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/azure_rm_sqlmanageddatabase/meta/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d00bd90476595ae7d97d3ca8f8aa7ff475ebc3dd87d7d12e862bf0527f7bd81f",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/azure_rm_sqlmanageddatabase/aliases",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "be93ff3f283965eee2a98d5254d46fc1f8643c67e5364cc32f30a6278156136c",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/azure_rm_sqlmanageddatabase/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/azure_rm_sqlmanageddatabase/tasks/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "978ae8cc363ee0bc2dadb1440a0da469ebbc841ad5b0784b32de2ebc36ba9d0f",
+ "format": 1
+ },
+ {
"name": "tests/integration/targets/azure_rm_virtualmachinescaleset",
"ftype": "dir",
"chksum_type": null,
@@ -3392,7 +3483,49 @@
"name": "tests/integration/targets/azure_rm_virtualmachinescaleset/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2b560766f54992fab349101fb4528f9bf12022337f573ad7f8bc3a9b80e9dff7",
+ "chksum_sha256": "947482d7d00c73ad0bdf10f3338d5d88545d8c9452d9cdcef949f6517baf2b4d",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/azure_rm_storageaccountmanagementpolicy",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/azure_rm_storageaccountmanagementpolicy/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/azure_rm_storageaccountmanagementpolicy/meta/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d00bd90476595ae7d97d3ca8f8aa7ff475ebc3dd87d7d12e862bf0527f7bd81f",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/azure_rm_storageaccountmanagementpolicy/aliases",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "109bb8ece8ca54a061f03f7050aca3401610606b79a6e313d2556dcb04fda637",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/azure_rm_storageaccountmanagementpolicy/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/azure_rm_storageaccountmanagementpolicy/tasks/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "66a66526c148b5f7b1199052e5b11748f809af5196473858803740fd7889826f",
"format": 1
},
{
@@ -3581,7 +3714,7 @@
"name": "tests/integration/targets/inventory_azure/playbooks/vars.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "23fff20fb969471888915a24760b7a90723a30e3e0e6b7c7745864470c898027",
+ "chksum_sha256": "14e6cd31e3c01c64380ea41da324c28fdb69013e1ab588340a52878521ee45d7",
"format": 1
},
{
@@ -3595,7 +3728,14 @@
"name": "tests/integration/targets/inventory_azure/playbooks/setup.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e95740d1317dda6c7a610a482d33b29e2035293ff74e10a2bcc0f2997ba85430",
+ "chksum_sha256": "e52019613e959bd8c5cd2b5acb0893e284c82e914a9c8a4855de39aa490848f7",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/inventory_azure/playbooks/test_inventory_filter.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "7fe385bf9ed8dc9a1f673d8d2700d19d4719cef0b372271582a330780f9fcb4d",
"format": 1
},
{
@@ -3616,7 +3756,7 @@
"name": "tests/integration/targets/inventory_azure/playbooks/teardown.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1f44c8ef223d0219855c453cd859b44b7cd4b2fa10fb1f8cacac75e020975fc8",
+ "chksum_sha256": "b814aff009dbcc170e45c7eb754e23f0f711ecfcfeee671edec78d8dcd9fb994",
"format": 1
},
{
@@ -3634,6 +3774,13 @@
"format": 1
},
{
+ "name": "tests/integration/targets/inventory_azure/templates/filter.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "f5ac04f954afcf425f27a4b3f04a6b251a6232a881ac6d07028027fc1d2a6b28",
+ "format": 1
+ },
+ {
"name": "tests/integration/targets/inventory_azure/templates/basic2.yml",
"ftype": "file",
"chksum_type": "sha256",
@@ -3644,7 +3791,7 @@
"name": "tests/integration/targets/inventory_azure/runme.sh",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "203fe16581a305486f7d5a2feafad324ed5515a96c26b4f4d84ab34a1cdcb6a3",
+ "chksum_sha256": "c97ddb33d20d39a8792d42d7c10bf940b595eb4c54ccc770e7637d7d7d79b1ad",
"format": 1
},
{
@@ -3742,7 +3889,7 @@
"name": "tests/integration/targets/azure_rm_image/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f15012224b91b0dcb340a2dfcd3d360a36bf2ed291b88a4206914ac5db295466",
+ "chksum_sha256": "000d058a325415d8c5273575da71d0b841945db7e8544dec89abd543cf2a7a5c",
"format": 1
},
{
@@ -4166,6 +4313,48 @@
"format": 1
},
{
+ "name": "tests/integration/targets/azure_rm_localnetworkgateway",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/azure_rm_localnetworkgateway/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/azure_rm_localnetworkgateway/meta/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d00bd90476595ae7d97d3ca8f8aa7ff475ebc3dd87d7d12e862bf0527f7bd81f",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/azure_rm_localnetworkgateway/aliases",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "109bb8ece8ca54a061f03f7050aca3401610606b79a6e313d2556dcb04fda637",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/azure_rm_localnetworkgateway/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/azure_rm_localnetworkgateway/tasks/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "1671301ff0e021480b49e44a541902ee03e0b19ed146feb4c65e3b72564cc925",
+ "format": 1
+ },
+ {
"name": "tests/integration/targets/azure_rm_registrationdefinition",
"ftype": "dir",
"chksum_type": null,
@@ -4372,7 +4561,7 @@
"name": "tests/integration/targets/azure_rm_aduser/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "978e84fe852bac2909bcf4d707ecd577b164e6bd25c694464403ff855562b369",
+ "chksum_sha256": "4007425fe50e73b451fb181b1dc93a4fde08d097a4e0a52198d7113ed0a8e8df",
"format": 1
},
{
@@ -4582,49 +4771,7 @@
"name": "tests/integration/targets/azure_rm_expressroute/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0fee3fb92d49fcebdc6564a4becd35f638cfa294e5d893fc5adf2fff21ac072b",
- "format": 1
- },
- {
- "name": "tests/integration/targets/azure_rm_datalakestore",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
- "format": 1
- },
- {
- "name": "tests/integration/targets/azure_rm_datalakestore/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
- "format": 1
- },
- {
- "name": "tests/integration/targets/azure_rm_datalakestore/meta/main.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "d00bd90476595ae7d97d3ca8f8aa7ff475ebc3dd87d7d12e862bf0527f7bd81f",
- "format": 1
- },
- {
- "name": "tests/integration/targets/azure_rm_datalakestore/aliases",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "528893af1cac01f38ab277aec0138b83c74b2576464d96eb7f3da330784edaff",
- "format": 1
- },
- {
- "name": "tests/integration/targets/azure_rm_datalakestore/tasks",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
- "format": 1
- },
- {
- "name": "tests/integration/targets/azure_rm_datalakestore/tasks/main.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "0731e52280287102127653617210703eeb3bd7abf3125367f33df7fd8d0be818",
+ "chksum_sha256": "454d18a3654dde5beb768bea56fdd4ef2458179bc79a48c1cb62265034158efb",
"format": 1
},
{
@@ -4708,7 +4855,7 @@
"name": "tests/integration/targets/azure_rm_networkinterface/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cd2b9fd7ff5b256d26839ba3646b9347fa95f8fd6b1104e7f2835e1d7b7b2624",
+ "chksum_sha256": "baf43905968e29eb26a5d990d8b28ad01bd872e204b0edea4cfaa07f24cd81da",
"format": 1
},
{
@@ -4750,7 +4897,7 @@
"name": "tests/integration/targets/azure_rm_adapplication/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e7b04ba312d08b8fdee715292776842a8533aed97ad6aae4c25ebcba04c8ccf2",
+ "chksum_sha256": "01dfe6079ef9bf4766404294c2f63eccea68790e9ed762ffcb0ddf8a1d4e8c55",
"format": 1
},
{
@@ -4960,7 +5107,7 @@
"name": "tests/integration/targets/azure_rm_storageblob/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7175cd6cb47363104377b380cc011fa46f815bbc69b2d81cbd347786159bda9e",
+ "chksum_sha256": "0775e6b9e57bfde86683496fba1c56817948fa396743ab2986f40001e4655694",
"format": 1
},
{
@@ -5146,31 +5293,31 @@
"format": 1
},
{
- "name": "shippable.yml",
+ "name": "tests/integration/requirements.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "515333579eea59619360d72e38cc2c5c9a8b43ff59cd3ddcc12c5b0172553b4a",
+ "chksum_sha256": "83fee6c4cd46119ab129ecb1012323a4e3acef73fcc5c4018cbd1aa89d6dca7a",
"format": 1
},
{
- "name": "README.md",
+ "name": "shippable.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "addfda03504e77be9156f1d5a1dec8ec3d9f81d08498ac814e7f782adbdc0e21",
+ "chksum_sha256": "515333579eea59619360d72e38cc2c5c9a8b43ff59cd3ddcc12c5b0172553b4a",
"format": 1
},
{
- "name": "CHANGELOG.md",
+ "name": "README.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9968490b2917b41466fcf58ff804ea27213672db0b91ac0f55ce076246a69913",
+ "chksum_sha256": "5514773c36ef505855887df54a38b3c7d496dc6b5532f8474fbbe2160ac5f8de",
"format": 1
},
{
- "name": "requirements-azure.txt",
+ "name": "CHANGELOG.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5ed84a767e40fa7e5bdb9dccd739cb355185b1414a1beab6892a1bb8d3e6062d",
+ "chksum_sha256": "7c3f32f6956b0e39738ae3510371f7fcb977bd0665b592b83ee1b9b9a9aa5f24",
"format": 1
},
{
@@ -5198,7 +5345,7 @@
"name": "plugins/doc_fragments/azure.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f2146d8b6b05f3dc1571069061663ff89f7527880e36a7f4fc4ac2e87ef0ee68",
+ "chksum_sha256": "f7203fe2730a03b98ee8106966dd80dce7ce8ce4a3e4decd9c106b5462b843ba",
"format": 1
},
{
@@ -5226,7 +5373,7 @@
"name": "plugins/module_utils/azure_rm_common.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0f2d416d8b73431c47251128c3a0b7eee12c412cc63f951ffc64caace37920ac",
+ "chksum_sha256": "f7fb7925b32c02007adcd2bbf2359813376f358976d59ca315586f27908b2eff",
"format": 1
},
{
@@ -5324,7 +5471,7 @@
"name": "plugins/modules/azure_rm_adapplication_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e04053b0c584772df10dcd46845772eefb57116b655ced27ee6786fff8334f12",
+ "chksum_sha256": "7f24aa5ded46894cd49b39c28e79b6abbdac4cb47c31ad982667b0ce439e7d2b",
"format": 1
},
{
@@ -5387,7 +5534,7 @@
"name": "plugins/modules/azure_rm_webapp_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a420da6e6d479985a088e2f2c614bc462ad003d5814d81e4a6db489aef906a83",
+ "chksum_sha256": "52040df37e0732e052577d7bca4f92bef544958f674f271d7154d0f9ccb52123",
"format": 1
},
{
@@ -5426,6 +5573,13 @@
"format": 1
},
{
+ "name": "plugins/modules/azure_rm_virtualnetworkgatewaynatrule.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "8760f67750b1eb2994c66d2d8214206a46c153ec8242f93a3e795a3ee77b46d0",
+ "format": 1
+ },
+ {
"name": "plugins/modules/azure_rm_sqlfirewallrule_info.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -5436,7 +5590,7 @@
"name": "plugins/modules/azure_rm_adgroup.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "152a3792aa58f812d46db9dc8b8ebfd3c78abf0406297d4b4c274ba23b398a1e",
+ "chksum_sha256": "06eacd6abd4a6dd192976d020a1e0662dbb9049476fef925da7e51131736c129",
"format": 1
},
{
@@ -5447,6 +5601,13 @@
"format": 1
},
{
+ "name": "plugins/modules/azure_rm_storageaccountmanagementpolicy.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "80c77fc072912a82ff2302343051ee8b7993da45d83f0f7eea51cd80beb616d5",
+ "format": 1
+ },
+ {
"name": "plugins/modules/azure_rm_firewallpolicy.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -5478,7 +5639,7 @@
"name": "plugins/modules/azure_rm_webappaccessrestriction_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c72ded459dbd8c58642c77aba5cfab56a86782f4e19958ded25f9754a32c1591",
+ "chksum_sha256": "d9349df2a2c45a49fecc4b74897269a65efc75e103f728875ebacafb9d1d27fa",
"format": 1
},
{
@@ -5534,7 +5695,7 @@
"name": "plugins/modules/azure_rm_aduser_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "59f317a97611776b87dd9d822f4234368027d5de8b98730851ba644ee9259031",
+ "chksum_sha256": "fbef98f5a2d79eda9fd59391c2a07c415fd87dee8d778a11923fb4f0ead02b6b",
"format": 1
},
{
@@ -5646,7 +5807,7 @@
"name": "plugins/modules/azure_rm_networkinterface.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d6f2859a51857b23dd42dc71b71005de7a540c1b8f68178232237a4c0ac06e14",
+ "chksum_sha256": "4c0051e0e4870a4cdab376c746118b7f6710c9420e4d7a49ebbff7c2d1630b53",
"format": 1
},
{
@@ -5800,7 +5961,7 @@
"name": "plugins/modules/azure_rm_cognitivesearch.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8665ce3ff083d18608c7ed9b5fb778d6248e9cfcc096b464535d041457d9af00",
+ "chksum_sha256": "e160b95200bd079eef2f992b4d452f6cbb3314e8668f4d65a63bdd7047bba5e9",
"format": 1
},
{
@@ -5828,14 +5989,21 @@
"name": "plugins/modules/azure_rm_virtualmachine.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b002dccc80c26c8960dc842680bbcce59ec55561a5678d73c5ab1eb248183ce0",
+ "chksum_sha256": "0b8c74e553344c6be40ff70367051cbd300f04e828e2ecaf46890662cc0b5d5f",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/azure_rm_sqlmidblongtermretentionpolicy_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "97734732b444fd1f900cf6f72008101c80e68e863a48d76c5f31aae10d891a1c",
"format": 1
},
{
"name": "plugins/modules/azure_rm_backupazurevm.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "697611d8016ee00fdf81ee52c5d2174b670b3aa4a602498220cb4f1ffb455f9b",
+ "chksum_sha256": "6755df5a512af321a7f368cee44850e3107677fdecb549a4b7836d5bdff26651",
"format": 1
},
{
@@ -5874,6 +6042,13 @@
"format": 1
},
{
+ "name": "plugins/modules/azure_rm_sqlmidatabase_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "346bd24a4823dac19815993673f907de4dd715459aca506e2656d20193d3771d",
+ "format": 1
+ },
+ {
"name": "plugins/modules/azure_rm_iotdevice.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -5930,6 +6105,13 @@
"format": 1
},
{
+ "name": "plugins/modules/azure_rm_storageaccountmanagementpolicy_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "a43710e515eb2a2d39aff332680a870e9a2375f56750dcccdc7770c1df28a74c",
+ "format": 1
+ },
+ {
"name": "plugins/modules/azure_rm_containerregistryreplication_info.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -6003,7 +6185,7 @@
"name": "plugins/modules/azure_rm_postgresqlflexibleserver.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ea3b2ce15955406227c0367da00cebae6c44e18b9ef0c47720367e4fdd9397fc",
+ "chksum_sha256": "cf3b99161d8f3db99bb8dd2f1bb5be099217e33f56c00dc32c00bb24312d9acd",
"format": 1
},
{
@@ -6084,6 +6266,13 @@
"format": 1
},
{
+ "name": "plugins/modules/azure_rm_localnetworkgateway.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "adc79bbfab386101bdaa959bc83939b389b50c3a232f5cdc7cdbd5a5a52c47c1",
+ "format": 1
+ },
+ {
"name": "plugins/modules/azure_rm_ddosprotectionplan_info.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -6192,7 +6381,7 @@
"name": "plugins/modules/azure_rm_aksagentpool.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "35eede66b4539d8ebfabaeba04dc70f75614d3bc94b2d19a721b527269e44404",
+ "chksum_sha256": "7a35e2d99594b65d9856cdc8f55077d538014a5b150f2432110aec31650097f3",
"format": 1
},
{
@@ -6234,7 +6423,7 @@
"name": "plugins/modules/azure_rm_aks.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d63a516ee5a970d80bea017097e3d474a59b34c0dc58f0d0ecaced542dfbb5fa",
+ "chksum_sha256": "4856852175a9ed9393112b4b6b9cc78d8a7cedd8cdc22af8f1fe8a9c28e95c53",
"format": 1
},
{
@@ -6420,6 +6609,13 @@
"format": 1
},
{
+ "name": "plugins/modules/azure_rm_localnetworkgateway_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "933fe6f6ef580ae3027f0baf07ea18f02612f1f13593a0f63c7650f9464cea48",
+ "format": 1
+ },
+ {
"name": "plugins/modules/azure_rm_mariadbdatabase.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -6549,7 +6745,7 @@
"name": "plugins/modules/azure_rm_webapp.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3905f0d95ea3661bb716b8b1f6c7d0806ce92cb5eeec61824d0ec8657d75824f",
+ "chksum_sha256": "3351258e14bcaee9b5b80e739adf8be765ed9e3d2a0d61fa78066bcf2b9492d1",
"format": 1
},
{
@@ -6563,7 +6759,7 @@
"name": "plugins/modules/azure_rm_postgresqlflexibleserver_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8be887b3634bf62a040d6ffbb5d1d5317d672761144d3967856a574cac4c7f17",
+ "chksum_sha256": "72a200cab9e9e88c9fe97ef18a232470582343ee92a6a2076b861fa5769f4454",
"format": 1
},
{
@@ -6728,6 +6924,20 @@
"format": 1
},
{
+ "name": "plugins/modules/azure_rm_vmsku_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "9879aed3be63a091c6751c73f5c975958654d93dcd85e51bc57ff548fcd83761",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/azure_rm_sqlmidatabase.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "73afdcfe76387142017c4cea8bf830840a3466aff5f1b88da9e30d48665aef17",
+ "format": 1
+ },
+ {
"name": "plugins/modules/azure_rm_recoveryservicesvault.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -6735,6 +6945,13 @@
"format": 1
},
{
+ "name": "plugins/modules/azure_rm_virtualnetworkgatewaynatrule_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "9e7b7b26dfc0f117e522521801d6e0219796b0bfa4752cb708eb80ff54910fd5",
+ "format": 1
+ },
+ {
"name": "plugins/modules/azure_rm_postgresqlfirewallrule.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -6745,7 +6962,7 @@
"name": "plugins/modules/azure_rm_aksagentpool_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "92c6e3a0f537164ae87f3cf64dd071238a5c8349d7ca84a16826720fc5b82e44",
+ "chksum_sha256": "8fa9bcb4e95c7eb0832f15411c90bdfbd98fa3f7aa4fe18b8e393aa579a99277",
"format": 1
},
{
@@ -6794,7 +7011,7 @@
"name": "plugins/modules/azure_rm_webappaccessrestriction.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a31793686f398ccb3ea7c32aad8341b808e06449bfe5ed468303d74a5406cf2a",
+ "chksum_sha256": "94a2b9c1365943cd7e693801205a1b35a2c4964d41ab626efce5a673c2f7714f",
"format": 1
},
{
@@ -6829,21 +7046,14 @@
"name": "plugins/modules/azure_rm_aduser.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f455cefd4a7af853f672ef7bffbc09422cd9f659841ac35ddc60a4a29c5cde65",
+ "chksum_sha256": "a81615ef44bc971af8f40ebe36ad5a506540f2ac9de62750cd6059eb1253a4ec",
"format": 1
},
{
"name": "plugins/modules/azure_rm_loadbalancer.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "40bcb2a088750276d350e5170a7993a5e92117948835d6bd30dcf5989db2cf11",
- "format": 1
- },
- {
- "name": "plugins/modules/azure_rm_datalakestore_info.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "64ced3d20472f4e29c76baed328f40e9bc1483583f04fbf7e69e9346b5ed07ad",
+ "chksum_sha256": "3d82e7614ae63f4a2b5b6e5c5154621a51f145431cfc7f8301e7a527c7ad1d49",
"format": 1
},
{
@@ -6892,7 +7102,14 @@
"name": "plugins/modules/azure_rm_virtualmachinescalesetinstance_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d79dad68277ae5d3c8c72e1265606e28b17e19c26d2dd94452723a08b22e490d",
+ "chksum_sha256": "6082701501e00cb0ae70bc7e6e6bc41f801688ec9410ed9b3b792298ee1b1182",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/azure_rm_sqlmidbshorttermretentionpolicy.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "7d9987c45827fa732438ce3191a5f130ab1c2a4329010c3b5d1afe2cebac3e3e",
"format": 1
},
{
@@ -6913,7 +7130,7 @@
"name": "plugins/modules/azure_rm_virtualmachinescalesetinstance.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "29c4528fce1aea43abda1fed65f9d49ef13b592592174c2305fd581e62cdcf8d",
+ "chksum_sha256": "3bc41c6bdf4497388092f31fa2f93a1714fee34bc31400014d3e48a806f688f9",
"format": 1
},
{
@@ -6973,17 +7190,10 @@
"format": 1
},
{
- "name": "plugins/modules/azure_rm_datalakestore.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "da55af371c87610112c5ff6dd9d8a1c1e3fa38ffd2e3bf8d7887f9a0dfdfd03b",
- "format": 1
- },
- {
"name": "plugins/modules/azure_rm_adapplication.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "49e82eb39f31d45623c7713aa5e6f1a8ed2cb7eeffbd6eeca67a19ac8283687a",
+ "chksum_sha256": "ca40adcbe28168f7543c9cd8c591351797010759b7bb261e4bf6287b1114efde",
"format": 1
},
{
@@ -7060,7 +7270,7 @@
"name": "plugins/modules/azure_rm_postgresqlflexiblefirewallrule.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8db7ef3ea60e698e7b5a65be465d6a639927cc63d91a2da61e1cb6a5f89fe7b8",
+ "chksum_sha256": "312142b7cbb5bc5885faa3150c6b4f4facbd8947e941ca3471b8c3db11e11b68",
"format": 1
},
{
@@ -7123,7 +7333,7 @@
"name": "plugins/modules/azure_rm_adgroup_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c2802b5f00ffe0edaf458e13a4929c6949f61d4d75e1d2b965445eb923fdf206",
+ "chksum_sha256": "0ca71fa39c2fec612f47f1882439eddfa830d7876ebe83e41f6cd649566e6a3d",
"format": 1
},
{
@@ -7169,6 +7379,13 @@
"format": 1
},
{
+ "name": "plugins/modules/azure_rm_sqlmidblongtermretentionpolicy.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "1542e5381089973684f8b436c6450dc76c42ea86823a5365b8c7a92d14c9adae",
+ "format": 1
+ },
+ {
"name": "plugins/modules/azure_rm_functionapp.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -7207,7 +7424,7 @@
"name": "plugins/modules/azure_rm_accesstoken_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "05f2949f236a8f79c474c1caa57df258695e9f0a87f9072abe4aef9ca1d323d6",
+ "chksum_sha256": "fa14d907bbb728e39ba2d2d641bde7432086f82eb8418939535c5020eed063d4",
"format": 1
},
{
@@ -7260,10 +7477,17 @@
"format": 1
},
{
+ "name": "plugins/modules/azure_rm_sqlmidbshorttermretentionpolicy_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "96b890442e5096495fb8a20e2b9433fd6264be726bc2d6d7ec505c0971487a9b",
+ "format": 1
+ },
+ {
"name": "plugins/modules/azure_rm_rediscache_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a10c697c6832b6d01d4c3645819ee5a8f254715d5b5ac4bdd8db94dbe8fa5602",
+ "chksum_sha256": "1077222318629d3d75a3cb33f028d94a9dd244de03f8e1c365543191475f0100",
"format": 1
},
{
@@ -7333,7 +7557,7 @@
"name": "plugins/inventory/azure_rm.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "47b59006cb06078a81321afbd3b443cb9255af87a78989411a8d42c61f9b66ec",
+ "chksum_sha256": "618f746d6c7754062d64d74b0d062ea2c95a7df483b8c264fe035383fffb6a9a",
"format": 1
},
{
@@ -7347,7 +7571,7 @@
"name": "plugins/lookup/azure_keyvault_secret.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "80c27a8f8599afe6734f00e807a39e293e3569cf09517d2e09254996fe8c8818",
+ "chksum_sha256": "6d83b837326b17f2346dc2ce8d5e8fc49b691d99ac19e7a38960632e68b16969",
"format": 1
},
{
@@ -7361,28 +7585,28 @@
"name": "CONTRIBUTING.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "290a3dfba9b53aecbf86badacfdcb7f320c9a924e02a8f0b4824785a551d0a39",
+ "chksum_sha256": "d8deff7133f403ec517ca4c28704781f3949ec6036dafaa664ad92f15672c625",
"format": 1
},
{
- "name": "azure-pipelines.yml",
+ "name": "sanity-requirements.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1f55cf75a25502f8322df2fc8640e52f85fed4459b8c26fee3c75295f9c45552",
+ "chksum_sha256": "62326475a7ffd00afcdaf8a3d89158f778593e56e8b694a388daf9f523b53648",
"format": 1
},
{
- "name": "CredScanSuppressions.json",
+ "name": "azure-pipelines.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "98c0ee6a0b7a115787cf109a8c0e2ea4a61df6751ecda37915b22ffb44a1128d",
+ "chksum_sha256": "1f55cf75a25502f8322df2fc8640e52f85fed4459b8c26fee3c75295f9c45552",
"format": 1
},
{
- "name": "sanity-requirements-azure.txt",
+ "name": "CredScanSuppressions.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "62326475a7ffd00afcdaf8a3d89158f778593e56e8b694a388daf9f523b53648",
+ "chksum_sha256": "98c0ee6a0b7a115787cf109a8c0e2ea4a61df6751ecda37915b22ffb44a1128d",
"format": 1
},
{
@@ -7396,7 +7620,7 @@
"name": "pr-pipelines.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ce013c171143675f4d44e20b4b7d3907c5cee12c53f055c2c3bf6edebb1342ea",
+ "chksum_sha256": "7d55584268373d432d198db717c9da339c660ff169d27ff7dfa665f317d933a2",
"format": 1
},
{
@@ -7407,6 +7631,13 @@
"format": 1
},
{
+ "name": "requirements.txt",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "83fee6c4cd46119ab129ecb1012323a4e3acef73fcc5c4018cbd1aa89d6dca7a",
+ "format": 1
+ },
+ {
"name": "LICENSE",
"ftype": "file",
"chksum_type": "sha256",
diff --git a/ansible_collections/azure/azcollection/MANIFEST.json b/ansible_collections/azure/azcollection/MANIFEST.json
index c78a03285..7cff91198 100644
--- a/ansible_collections/azure/azcollection/MANIFEST.json
+++ b/ansible_collections/azure/azcollection/MANIFEST.json
@@ -2,7 +2,7 @@
"collection_info": {
"namespace": "azure",
"name": "azcollection",
- "version": "2.3.0",
+ "version": "2.4.0",
"authors": [
"Microsoft <ansible@microsoft.com>"
],
@@ -31,7 +31,7 @@
"name": "FILES.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "27255da19a036c3db65d132f41dc1d28980b84543f99724b5d1e8d529804eabe",
+ "chksum_sha256": "639f4d22a875902471bb829d432cc8ce23d4cd45442be62e49782873e19670be",
"format": 1
},
"format": 1
diff --git a/ansible_collections/azure/azcollection/README.md b/ansible_collections/azure/azcollection/README.md
index cdc2953a6..196e4ee7a 100644
--- a/ansible_collections/azure/azcollection/README.md
+++ b/ansible_collections/azure/azcollection/README.md
@@ -24,7 +24,7 @@ ansible-galaxy collection install azure.azcollection
Install dependencies required by the collection (adjust path to collection if necessary):
```bash
-pip3 install -r ~/.ansible/collections/ansible_collections/azure/azcollection/requirements-azure.txt
+pip3 install -r ~/.ansible/collections/ansible_collections/azure/azcollection/requirements.txt
```
To upgrade to the latest version of Azure collection:
diff --git a/ansible_collections/azure/azcollection/meta/execution-environment.yml b/ansible_collections/azure/azcollection/meta/execution-environment.yml
index 08fbf66a3..283dbe334 100644
--- a/ansible_collections/azure/azcollection/meta/execution-environment.yml
+++ b/ansible_collections/azure/azcollection/meta/execution-environment.yml
@@ -1,3 +1,3 @@
dependencies:
- python: requirements-azure.txt
+ python: requirements.txt
version: 1
diff --git a/ansible_collections/azure/azcollection/meta/runtime.yml b/ansible_collections/azure/azcollection/meta/runtime.yml
index 5870f6d89..2da8ce1a3 100644
--- a/ansible_collections/azure/azcollection/meta/runtime.yml
+++ b/ansible_collections/azure/azcollection/meta/runtime.yml
@@ -2,6 +2,7 @@
requires_ansible: '>=2.14.0'
action_groups:
all:
+ - azure.azcollection.azure_rm_accesstoken_info
- azure.azcollection.azure_rm_account_info
- azure.azcollection.azure_rm_adapplication
- azure.azcollection.azure_rm_adapplication_info
@@ -18,6 +19,7 @@ action_groups:
- azure.azcollection.azure_rm_aksagentpool
- azure.azcollection.azure_rm_aksagentpool_info
- azure.azcollection.azure_rm_aksagentpoolversion_info
+ - azure.azcollection.azure_rm_akscredentials_info
- azure.azcollection.azure_rm_aksupgrade_info
- azure.azcollection.azure_rm_aksversion_info
- azure.azcollection.azure_rm_apimanagement
@@ -47,6 +49,7 @@ action_groups:
- azure.azcollection.azure_rm_bastionhost
- azure.azcollection.azure_rm_bastionhost_info
- azure.azcollection.azure_rm_batchaccount
+ - azure.azcollection.azure_rm_batchaccount_info
- azure.azcollection.azure_rm_cdnendpoint
- azure.azcollection.azure_rm_cdnendpoint_info
- azure.azcollection.azure_rm_cdnprofile
@@ -67,8 +70,6 @@ action_groups:
- azure.azcollection.azure_rm_cosmosdbaccount_info
- azure.azcollection.azure_rm_datafactory
- azure.azcollection.azure_rm_datafactory_info
- - azure.azcollection.azure_rm_datalakestore
- - azure.azcollection.azure_rm_datalakestore_info
- azure.azcollection.azure_rm_ddosprotectionplan
- azure.azcollection.azure_rm_ddosprotectionplan_info
- azure.azcollection.azure_rm_deployment
@@ -169,12 +170,20 @@ action_groups:
- azure.azcollection.azure_rm_notificationhub_info
- azure.azcollection.azure_rm_openshiftmanagedcluster
- azure.azcollection.azure_rm_openshiftmanagedcluster_info
+ - azure.azcollection.azure_rm_openshiftmanagedclusterkubeconfig_info
- azure.azcollection.azure_rm_postgresqlconfiguration
- azure.azcollection.azure_rm_postgresqlconfiguration_info
- azure.azcollection.azure_rm_postgresqldatabase
- azure.azcollection.azure_rm_postgresqldatabase_info
- azure.azcollection.azure_rm_postgresqlfirewallrule
- azure.azcollection.azure_rm_postgresqlfirewallrule_info
+ - azure.azcollection.azure_rm_postgresqlflexibleconfiguration_info
+ - azure.azcollection.azure_rm_postgresqlflexibledatabase
+ - azure.azcollection.azure_rm_postgresqlflexibledatabase_info
+ - azure.azcollection.azure_rm_postgresqlflexiblefirewallrule
+ - azure.azcollection.azure_rm_postgresqlflexiblefirewallrule_info
+ - azure.azcollection.azure_rm_postgresqlflexibleserver
+ - azure.azcollection.azure_rm_postgresqlflexibleserver_info
- azure.azcollection.azure_rm_postgresqlserver
- azure.azcollection.azure_rm_postgresqlserver_info
- azure.azcollection.azure_rm_privatednsrecordset
@@ -195,6 +204,8 @@ action_groups:
- azure.azcollection.azure_rm_proximityplacementgroup_info
- azure.azcollection.azure_rm_publicipaddress
- azure.azcollection.azure_rm_publicipaddress_info
+ - azure.azcollection.azure_rm_publicipprefix
+ - azure.azcollection.azure_rm_publicipprefix_info
- azure.azcollection.azure_rm_recoveryservicesvault
- azure.azcollection.azure_rm_recoveryservicesvault_info
- azure.azcollection.azure_rm_rediscache
@@ -231,10 +242,12 @@ action_groups:
- azure.azcollection.azure_rm_sqlelasticpool_info
- azure.azcollection.azure_rm_sqlfirewallrule
- azure.azcollection.azure_rm_sqlfirewallrule_info
- - azure.azcollection.azure_rm_sqlserver
- - azure.azcollection.azure_rm_sqlserver_info
- azure.azcollection.azure_rm_sqlmanagedinstance
- azure.azcollection.azure_rm_sqlmanagedinstance_info
+ - azure.azcollection.azure_rm_sqlserver
+ - azure.azcollection.azure_rm_sqlserver_info
+ - azure.azcollection.azure_rm_sshpublickey
+ - azure.azcollection.azure_rm_sshpublickey_info
- azure.azcollection.azure_rm_storageaccount
- azure.azcollection.azure_rm_storageaccount_info
- azure.azcollection.azure_rm_storageblob
diff --git a/ansible_collections/azure/azcollection/plugins/doc_fragments/azure.py b/ansible_collections/azure/azcollection/plugins/doc_fragments/azure.py
index 74c1286ac..9fc975084 100644
--- a/ansible_collections/azure/azcollection/plugins/doc_fragments/azure.py
+++ b/ansible_collections/azure/azcollection/plugins/doc_fragments/azure.py
@@ -134,7 +134,7 @@ options:
requirements:
- python >= 2.7
- The host that executes this module must have the azure.azcollection collection installed via galaxy
- - All python packages listed in collection's requirements-azure.txt must be installed via pip on the host that executes modules from azure.azcollection
+ - All python packages listed in collection's requirements.txt must be installed via pip on the host that executes modules from azure.azcollection
- Full installation instructions may be found https://galaxy.ansible.com/azure/azcollection
notes:
diff --git a/ansible_collections/azure/azcollection/plugins/inventory/azure_rm.py b/ansible_collections/azure/azcollection/plugins/inventory/azure_rm.py
index 12970dec3..0da9d4fcb 100644
--- a/ansible_collections/azure/azcollection/plugins/inventory/azure_rm.py
+++ b/ansible_collections/azure/azcollection/plugins/inventory/azure_rm.py
@@ -602,7 +602,8 @@ class AzureHost(object):
# set nic-related values from the primary NIC first
for nic in sorted(self.nics, key=lambda n: n.is_primary, reverse=True):
# and from the primary IP config per NIC first
- for ipc in sorted(nic._nic_model['properties']['ipConfigurations'], key=lambda i: i['properties'].get('primary', False), reverse=True):
+ for ipc in sorted(nic._nic_model.get('properties', {}).get('ipConfigurations', []),
+ key=lambda i: i.get('properties', {}).get('primary', False), reverse=True):
try:
subnet = ipc['properties'].get('subnet')
if subnet:
diff --git a/ansible_collections/azure/azcollection/plugins/lookup/azure_keyvault_secret.py b/ansible_collections/azure/azcollection/plugins/lookup/azure_keyvault_secret.py
index 5e693e4b3..6a6dd8f10 100644
--- a/ansible_collections/azure/azcollection/plugins/lookup/azure_keyvault_secret.py
+++ b/ansible_collections/azure/azcollection/plugins/lookup/azure_keyvault_secret.py
@@ -34,6 +34,8 @@ options:
description: Tenant id of service principal.
use_msi:
description: MSI token autodiscover, default is true.
+ cloud_type:
+ description: Specify which cloud, such as C(azure), C(usgovcloudapi).
notes:
- If version is not provided, this plugin will return the latest version of the secret.
- If ansible is running on Azure Virtual Machine with MSI enabled, client_id, secret and tenant isn't required.
@@ -51,6 +53,10 @@ EXAMPLE = """
debug:
msg: msg: "{{ lookup('azure.azcollection.azure_keyvault_secret', 'testsecret', vault_url=key_vault_uri)}}"
+- name: Look up secret with cloud type
+ debug:
+ msg: msg: "{{ lookup('azure.azcollection.azure_keyvault_secret', 'testsecret', cloud_type='usgovcloudapi', vault_url=key_vault_uri)}}"
+
- name: Look up secret when ansible host is MSI enabled Azure VM
debug:
msg: "the value of this secret is {{
@@ -133,15 +139,6 @@ TOKEN_ACQUIRED = False
logger = logging.getLogger("azure.identity").setLevel(logging.ERROR)
-token_params = {
- 'api-version': '2018-02-01',
- 'resource': 'https://vault.azure.net'
-}
-
-token_headers = {
- 'Metadata': 'true'
-}
-
def lookup_secret_non_msi(terms, vault_url, kwargs):
@@ -178,6 +175,15 @@ class LookupModule(LookupBase):
TOKEN_ACQUIRED = False
token = None
+ token_params = {
+ 'api-version': '2018-02-01',
+ 'resource': 'https://vault.{0}.net'.format(kwargs.get('cloud_type', 'azure'))
+ }
+
+ token_headers = {
+ 'Metadata': 'true'
+ }
+
if use_msi:
try:
token_res = requests.get('http://169.254.169.254/metadata/identity/oauth2/token',
diff --git a/ansible_collections/azure/azcollection/plugins/module_utils/azure_rm_common.py b/ansible_collections/azure/azcollection/plugins/module_utils/azure_rm_common.py
index 79b5167b1..c747fc72c 100644
--- a/ansible_collections/azure/azcollection/plugins/module_utils/azure_rm_common.py
+++ b/ansible_collections/azure/azcollection/plugins/module_utils/azure_rm_common.py
@@ -279,8 +279,6 @@ try:
except ImportError:
import azure.mgmt.recoveryservicesbackup.activestamp.models as RecoveryServicesBackupModels
from azure.mgmt.search import SearchManagementClient
- from azure.mgmt.datalake.store import DataLakeStoreAccountManagementClient
- import azure.mgmt.datalake.store.models as DataLakeStoreAccountModel
from azure.mgmt.notificationhubs import NotificationHubsManagementClient
from azure.mgmt.eventhub import EventHubManagementClient
from azure.mgmt.datafactory import DataFactoryManagementClient
@@ -704,6 +702,12 @@ class AzureRMModuleBase(object):
account = self.storage_client.storage_accounts.get_properties(resource_group_name=resource_group_name, account_name=storage_account_name)
if auth_mode == 'login' and self.azure_auth.credentials.get('credential'):
credential = self.azure_auth.credentials['credential']
+ elif (auth_mode == 'login' and self.azure_auth.credentials.get('tenant')
+ and self.azure_auth.credentials.get('client_id')
+ and self.azure_auth.credentials.get('secret')):
+ credential = client_secret.ClientSecretCredential(tenant_id=self.azure_auth.credentials.get('tenant'),
+ client_id=self.azure_auth.credentials.get('client_id'),
+ client_secret=self.azure_auth.credentials.get('secret'))
else:
account_keys = self.storage_client.storage_accounts.list_keys(resource_group_name=resource_group_name, account_name=storage_account_name)
credential = account_keys.keys[0].value
@@ -947,11 +951,17 @@ class AzureRMModuleBase(object):
def _ansible_get_models(self, *arg, **kwarg):
return self._ansible_models
- setattr(client, '_ansible_models', importlib.import_module(client_type.__module__).models)
+ try:
+ setattr(client, '_ansible_models', importlib.import_module(client_type.__module__).models)
+ except AttributeError:
+ setattr(client, '_ansible_models', importlib.import_module(client_type.__module__)._models)
client.models = types.MethodType(_ansible_get_models, client)
if self.azure_auth._cert_validation_mode == 'ignore':
- client._config.session_configuration_callback = self._validation_ignore_callback
+ if hasattr(client, '_config'):
+ client._config.session_configuration_callback = self._validation_ignore_callback
+ else:
+ client.config.session_configuration_callback = self._validation_ignore_callback
return client
@@ -1361,19 +1371,6 @@ class AzureRMModuleBase(object):
return self._search_client
@property
- def datalake_store_client(self):
- self.log('Getting datalake store client...')
- if not self._datalake_store_client:
- self._datalake_store_client = self.get_mgmt_svc_client(DataLakeStoreAccountManagementClient,
- base_url=self._cloud_environment.endpoints.resource_manager,
- api_version='2016-11-01')
- return self._datalake_store_client
-
- @property
- def datalake_store_models(self):
- return DataLakeStoreAccountModel
-
- @property
def notification_hub_client(self):
self.log('Getting notification hub client')
if not self._notification_hub_client:
diff --git a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_accesstoken_info.py b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_accesstoken_info.py
index cf9569868..c020d4dd1 100644
--- a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_accesstoken_info.py
+++ b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_accesstoken_info.py
@@ -95,7 +95,7 @@ class AzureRMAccessToken(AzureRMModuleBase):
super(AzureRMAccessToken, self).__init__(derived_arg_spec=self.module_arg_spec,
supports_check_mode=True,
supports_tags=False,
- is_ad_resource=False)
+ is_ad_resource=True)
def exec_module(self, **kwargs):
for key in list(self.module_arg_spec.keys()):
diff --git a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_adapplication.py b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_adapplication.py
index b428463aa..33270da27 100644
--- a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_adapplication.py
+++ b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_adapplication.py
@@ -146,33 +146,101 @@ options:
optional_claims:
description:
- Declare the optional claims for the application.
- type: list
- elements: dict
+ type: dict
suboptions:
- name:
- description:
- - The name of the optional claim.
- type: str
- required: True
- source:
+ access_token_claims :
description:
- - The source (directory object) of the claim.
- - There are predefined claims and user-defined claims from extension properties.
- - If the source value is null, the claim is a predefined optional claim.
- - If the source value is user, the value in the name property is the extension property from the user object.
- type: str
- essential:
+ - The optional claims returned in the JWT access token
+ type: list
+ elements: dict
+ suboptions:
+ name:
+ description:
+ - The name of the optional claim.
+ type: str
+ required: True
+ source:
+ description:
+ - The source (directory object) of the claim.
+ - There are predefined claims and user-defined claims from extension properties.
+ - If the source value is null, the claim is a predefined optional claim.
+ - If the source value is user, the value in the name property is the extension property from the user object.
+ type: str
+ essential:
+ description:
+ - If the value is true, the claim specified by the client is necessary to ensure a smooth authorization experience\
+ for the specific task requested by the end user.
+ - The default value is false.
+ default: false
+ type: bool
+ additional_properties:
+ description:
+ - Additional properties of the claim.
+ - If a property exists in this collection, it modifies the behavior of the optional claim specified in the name property.
+ type: list
+ elements: str
+ id_token_claims:
description:
- - If the value is true, the claim specified by the client is necessary to ensure a smooth authorization experience
- for the specific task requested by the end user.
- - The default value is false.
- default: false
- type: bool
- additional_properties:
+ - The optional claims returned in the JWT ID token
+ type: list
+ elements: dict
+ suboptions:
+ name:
+ description:
+ - The name of the optional claim.
+ type: str
+ required: True
+ source:
+ description:
+ - The source (directory object) of the claim.
+ - There are predefined claims and user-defined claims from extension properties.
+ - If the source value is null, the claim is a predefined optional claim.
+ - If the source value is user, the value in the name property is the extension property from the user object.
+ type: str
+ essential:
+ description:
+ - If the value is true, the claim specified by the client is necessary to ensure a smooth authorization experience\
+ for the specific task requested by the end user.
+ - The default value is false.
+ default: false
+ type: bool
+ additional_properties:
+ description:
+ - Additional properties of the claim.
+ - If a property exists in this collection, it modifies the behavior of the optional claim specified in the name property.
+ type: list
+ elements: str
+ saml2_token_claims:
description:
- - Additional properties of the claim.
- - If a property exists in this collection, it modifies the behavior of the optional claim specified in the name property.
- type: str
+ - The optional claims returned in the SAML token
+ type: list
+ elements: dict
+ suboptions:
+ name:
+ description:
+ - The name of the optional claim.
+ type: str
+ required: True
+ source:
+ description:
+ - The source (directory object) of the claim.
+ - There are predefined claims and user-defined claims from extension properties.
+ - If the source value is null, the claim is a predefined optional claim.
+ - If the source value is user, the value in the name property is the extension property rom the user object.
+ type: str
+ essential:
+ description:
+ - If the value is true, the claim specified by the client is necessary to ensure a smooth authorization experience\
+ for the specific task requested by the end user.
+ - The default value is false.
+ default: false
+ type: bool
+ additional_properties:
+ description:
+ - Additional properties of the claim.
+ - If a property exists in this collection, it modifies the behavior of the optional claim specified in the name property.
+ type: list
+ elements: str
password:
description:
- App password, aka 'client secret'.
@@ -293,87 +361,99 @@ EXAMPLES = '''
'''
RETURN = '''
-output:
+display_name:
+ description:
+ - Object's display name or its prefix.
+ type: str
+ returned: always
+ sample: fredAKSCluster
+app_id:
+ description:
+ - The application ID.
+ returned: always
+ type: str
+ sample: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+object_id:
+ description:
+ - Object ID of the application
+ returned: always
+ type: str
+ sample: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+sign_in_audience:
description:
- - Current state of the adapplication.
+ - The application can be used from any Azure AD tenants.
+ returned: always
+ type: str
+ sample: AzureADandPersonalMicrosoftAccount
+available_to_other_tenants:
+ description:
+ - The application can be used from any Azure AD tenants.
+ returned: always
+ type: str
+ sample: AzureADandPersonalMicrosoftAccount
+homepage:
+ description:
+ - The url where users can sign in and use your app.
+ returned: always
+ type: str
+ sample: null
+identifier_uris:
+ description:
+ - Space-separated unique URIs that Azure AD can use for this app.
+ returned: always
+ type: list
+ sample: []
+oauth2_allow_implicit_flow:
+ description:
+ - Whether to allow implicit grant flow for OAuth2.
+ returned: always
+ type: bool
+ sample: false
+public_client_reply_urls:
+ description:
+ - The public client redirect urls.
+ - Space-separated URIs to which Azure AD will redirect in response to an OAuth 2.0 request.
+ returned: always
+ type: list
+ sample: []
+web_reply_urls:
+ description:
+ - The web redirect urls.
+ - Space-separated URIs to which Azure AD will redirect in response to an OAuth 2.0 request.
+ returned: always
+ type: list
+ sample: []
+spa_reply_urls:
+ description:
+ - The spa redirect urls.
+ - Space-separated URIs to which Azure AD will redirect in response to an OAuth 2.0 request.
+ returned: always
+ type: list
+ sample: []
+optional_claims:
+ description:
+ - Declare the optional claims for the application.
type: complex
- returned: awalys
+ returned: always
contains:
- display_name:
- description:
- - Object's display name or its prefix.
- type: str
- returned: always
- sample: fredAKSCluster
- app_id:
- description:
- - The application ID.
- returned: always
- type: str
- sample: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
- object_id:
- description:
- - Object ID of the application
- returned: always
- type: str
- sample: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
- sign_in_audience:
- description:
- - The application can be used from any Azure AD tenants.
- returned: always
- type: str
- sample: AzureADandPersonalMicrosoftAccount
- available_to_other_tenants:
- description:
- - The application can be used from any Azure AD tenants.
- returned: always
- type: str
- sample: AzureADandPersonalMicrosoftAccount
- homepage:
- description:
- - The url where users can sign in and use your app.
- returned: always
- type: str
- sample: null
- identifier_uris:
+ access_token_claims :
description:
- - Space-separated unique URIs that Azure AD can use for this app.
- returned: always
+ - The optional claims returned in the JWT access token
type: list
- sample: []
- oauth2_allow_implicit_flow:
- description:
- - Whether to allow implicit grant flow for OAuth2.
returned: always
- type: bool
- sample: false
- optional_claims:
+ sample: ['name': 'aud', 'source': null, 'essential': false, 'additional_properties': []]
+ id_token_claims:
description:
- - The optional claims for the application.
- returned: always
+ - The optional claims returned in the JWT ID token
type: list
- sample: []
- public_client_reply_urls:
- description:
- - The public client redirect urls.
- - Space-separated URIs to which Azure AD will redirect in response to an OAuth 2.0 request.
returned: always
- type: list
- sample: []
- web_reply_urls:
+ sample: ['name': 'acct', 'source': null, 'essential': false, 'additional_properties': []]
+ saml2_token_claims:
description:
- - The web redirect urls.
- - Space-separated URIs to which Azure AD will redirect in response to an OAuth 2.0 request.
- returned: always
+ - The optional claims returned in the SAML token
type: list
- sample: []
- spa_reply_urls:
- description:
- - The spa redirect urls.
- - Space-separated URIs to which Azure AD will redirect in response to an OAuth 2.0 request.
returned: always
- type: list
- sample: []
+ sample: ['name': 'acct', 'source': null, 'essential': false, 'additional_properties': []]
'''
from ansible_collections.azure.azcollection.plugins.module_utils.azure_rm_common_ext import AzureRMModuleBaseExt
@@ -395,6 +475,8 @@ try:
from msgraph.generated.models.spa_application import SpaApplication
from msgraph.generated.models.public_client_application import PublicClientApplication
from msgraph.generated.models.implicit_grant_settings import ImplicitGrantSettings
+ from msgraph.generated.models.optional_claim import OptionalClaim
+ from msgraph.generated.models.optional_claims import OptionalClaims
except ImportError:
# This is handled in azure_rm_common
pass
@@ -419,7 +501,7 @@ app_role_spec = dict(
)
)
-optional_claims_spec = dict(
+claims_spec = dict(
name=dict(
type='str',
required=True
@@ -432,9 +514,11 @@ optional_claims_spec = dict(
default=False
),
additional_properties=dict(
- type='str'
+ type='list',
+ elements='str'
)
)
+
required_resource_accesses_spec = dict(
resource_app_id=dict(
type='str'
@@ -481,7 +565,14 @@ class AzureRMADApplication(AzureRMModuleBaseExt):
key_value=dict(type='str', no_log=True),
native_app=dict(type='bool'),
oauth2_allow_implicit_flow=dict(type='bool'),
- optional_claims=dict(type='list', elements='dict', options=optional_claims_spec),
+ optional_claims=dict(
+ type='dict',
+ options=dict(
+ access_token_claims=dict(type='list', elements='dict', no_log=True, options=claims_spec),
+ id_token_claims=dict(type='list', elements='dict', no_log=True, options=claims_spec),
+ saml2_token_claims=dict(type='list', elements='dict', no_log=True, options=claims_spec),
+ )
+ ),
password=dict(type='str', no_log=True),
public_client_reply_urls=dict(type='list', elements='str'),
web_reply_urls=dict(type='list', elements='str', aliases=['reply_urls']),
@@ -559,6 +650,9 @@ class AzureRMADApplication(AzureRMModuleBaseExt):
if self.app_roles:
app_roles = self.build_app_roles(self.app_roles)
+ if self.optional_claims:
+ optional_claims = self.build_optional_claims(self.optional_claims)
+
create_app = Application(
sign_in_audience=self.sign_in_audience,
web=WebApplication(
@@ -576,7 +670,7 @@ class AzureRMADApplication(AzureRMModuleBaseExt):
password_credentials=password_creds,
required_resource_access=required_accesses,
app_roles=app_roles,
- optional_claims=self.optional_claims
+ optional_claims=optional_claims
# allow_guests_sign_in=self.allow_guests_sign_in,
)
response = asyncio.get_event_loop().run_until_complete(self.create_application(create_app))
@@ -603,6 +697,9 @@ class AzureRMADApplication(AzureRMModuleBaseExt):
if self.app_roles:
app_roles = self.build_app_roles(self.app_roles)
+ if self.optional_claims:
+ optional_claims = self.build_optional_claims(self.optional_claims)
+
app_update_param = Application(
sign_in_audience=self.sign_in_audience,
web=WebApplication(
@@ -621,7 +718,7 @@ class AzureRMADApplication(AzureRMModuleBaseExt):
required_resource_access=required_accesses,
# allow_guests_sign_in=self.allow_guests_sign_in,
app_roles=app_roles,
- optional_claims=self.optional_claims)
+ optional_claims=optional_claims)
asyncio.get_event_loop().run_until_complete(self.update_application(
obj_id=old_response['object_id'], update_app=app_update_param))
@@ -665,6 +762,15 @@ class AzureRMADApplication(AzureRMModuleBaseExt):
return True
return False
+ def serialize_claims(self, claims):
+ if claims is None:
+ return None
+ return [{
+ "additional_properties": claim.additional_properties,
+ "essential": claim.essential,
+ "name": claim.name,
+ "source": claim.source} for claim in claims]
+
def to_dict(self, object):
app_roles = [{
'id': app_role.id,
@@ -673,6 +779,11 @@ class AzureRMADApplication(AzureRMModuleBaseExt):
'value': app_role.value,
"description": app_role.description
} for app_role in object.app_roles]
+ optional_claims = {
+ "access_token": self.serialize_claims(object.optional_claims.access_token),
+ "id_token": self.serialize_claims(object.optional_claims.id_token),
+ "saml2_token": self.serialize_claims(object.optional_claims.saml2_token)
+ } if object.optional_claims is not None else object.optional_claims
return dict(
app_id=object.app_id,
object_id=object.id,
@@ -683,7 +794,7 @@ class AzureRMADApplication(AzureRMModuleBaseExt):
homepage=object.web.home_page_url,
identifier_uris=object.identifier_uris,
oauth2_allow_implicit_flow=object.web.implicit_grant_settings.enable_access_token_issuance,
- optional_claims=object.optional_claims,
+ optional_claims=optional_claims,
# allow_guests_sign_in=object.allow_guests_sign_in,
web_reply_urls=object.web.redirect_uris,
spa_reply_urls=object.spa.redirect_uris,
@@ -762,6 +873,25 @@ class AzureRMADApplication(AzureRMModuleBaseExt):
result.append(role)
return result
+ def build_optional_claims(self, optional_claims):
+
+ def build_claims(claims_dict):
+ if claims_dict is None:
+ return None
+ return [OptionalClaim(
+ essential=claim.get("essential"),
+ name=claim.get("name"),
+ source=claim.get("source"),
+ additional_properties=claim.get("additional_properties")
+ ) for claim in claims_dict]
+
+ claims = OptionalClaims(
+ access_token=build_claims(optional_claims.get("access_token_claims")),
+ id_token=build_claims(optional_claims.get("id_token_claims")),
+ saml2_token=build_claims(optional_claims.get("saml2_token_claims"))
+ )
+ return claims
+
async def create_application(self, creat_app):
return await self._client.applications.post(body=creat_app)
diff --git a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_adapplication_info.py b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_adapplication_info.py
index 167b82552..e3eb53aac 100644
--- a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_adapplication_info.py
+++ b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_adapplication_info.py
@@ -129,6 +129,30 @@ applications:
returned: always
type: list
sample: []
+ optional_claims:
+ description:
+ - Declare the optional claims for the application.
+ type: complex
+ returned: always
+ contains:
+ access_token_claims :
+ description:
+ - The optional claims returned in the JWT access token
+ type: list
+ returned: always
+ sample: ['name': 'aud', 'source': null, 'essential': false, 'additional_properties': []]
+ id_token_claims:
+ description:
+ - The optional claims returned in the JWT ID token
+ type: list
+ returned: always
+ sample: ['name': 'acct', 'source': null, 'essential': false, 'additional_properties': []]
+ saml2_token_claims:
+ description:
+ - The optional claims returned in the SAML token
+ type: list
+ returned: always
+ sample: ['name': 'acct', 'source': null, 'essential': false, 'additional_properties': []]
'''
from ansible_collections.azure.azcollection.plugins.module_utils.azure_rm_common_ext import AzureRMModuleBase
@@ -191,8 +215,17 @@ class AzureRMADApplicationInfo(AzureRMModuleBase):
return self.results
+ def serialize_claims(self, claims):
+ if claims is None:
+ return None
+ return [{
+ "additional_properties": claim.additional_properties,
+ "essential": claim.essential,
+ "name": claim.name,
+ "source": claim.source} for claim in claims]
+
def to_dict(self, object):
- return dict(
+ response = dict(
app_id=object.app_id,
object_id=object.id,
app_display_name=object.display_name,
@@ -201,9 +234,16 @@ class AzureRMADApplicationInfo(AzureRMModuleBase):
sign_in_audience=object.sign_in_audience,
web_reply_urls=object.web.redirect_uris,
spa_reply_urls=object.spa.redirect_uris,
- public_client_reply_urls=object.public_client.redirect_uris
+ public_client_reply_urls=object.public_client.redirect_uris,
+ optional_claims=dict(access_token=[], id_token=[], saml2_token=[])
)
+ if object.optional_claims is not None:
+ response['optional_claims']['id_token'] = self.serialize_claims(object.optional_claims.id_token)
+ response['optional_claims']['saml2_token'] = self.serialize_claims(object.optional_claims.saml2_token)
+ response['optional_claims']['access_token'] = self.serialize_claims(object.optional_claims.access_token)
+ return response
+
async def get_application(self, obj_id):
return await self._client.applications.by_application_id(obj_id).get()
diff --git a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_adgroup.py b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_adgroup.py
index 1693794a7..4f7f3ed5a 100644
--- a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_adgroup.py
+++ b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_adgroup.py
@@ -63,6 +63,12 @@ options:
- The azure ad objects asserted to not be owners of the group.
type: list
elements: str
+ raw_membership:
+ description:
+ - By default the group_members return property is flattened and partially filtered of non-User objects before return. \
+ This argument disables those transformations.
+ default: false
+ type: bool
description:
description:
- An optional description for the group.
@@ -109,6 +115,15 @@ EXAMPLES = '''
- "{{ ad_object_1_object_id }}"
- "{{ ad_object_2_object_id }}"
+- name: Ensure Users are Members of a Group using object_id. Specify the group_membership return should be unfiltered
+ azure_rm_adgroup:
+ object_id: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+ state: 'present'
+ present_members:
+ - "{{ ad_object_1_object_id }}"
+ - "{{ ad_object_2_object_id }}"
+ raw_membership: true
+
- name: Ensure Users are not Members of a Group using display_name and mail_nickname
azure_rm_adgroup:
display_name: "Group-Name"
@@ -117,7 +132,7 @@ EXAMPLES = '''
absent_members:
- "{{ ad_object_1_object_id }}"
-- name: Ensure Users are Members of a Group using object_id
+- name: Ensure Users are not Members of a Group using object_id
azure_rm_adgroup:
object_id: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
state: 'present'
@@ -150,7 +165,7 @@ EXAMPLES = '''
- "{{ ad_object_1_object_id }}"
- "{{ ad_object_2_object_id }}"
-- name: Ensure Users are Owners of a Group using object_id
+- name: Ensure Users are not Owners of a Group using object_id
azure_rm_adgroup:
object_id: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
state: 'present'
@@ -203,7 +218,7 @@ group_owners:
type: list
group_members:
description:
- - The members of the group.
+ - The members of the group. If raw_membership is false, this contains the transitive members property. Otherwise, it contains the members property.
returned: always
type: list
description:
@@ -222,6 +237,7 @@ try:
from msgraph.generated.models.group import Group
from msgraph.generated.groups.item.transitive_members.transitive_members_request_builder import \
TransitiveMembersRequestBuilder
+ from msgraph.generated.groups.item.group_item_request_builder import GroupItemRequestBuilder
from msgraph.generated.models.reference_create import ReferenceCreate
except ImportError:
# This is handled in azure_rm_common
@@ -239,6 +255,7 @@ class AzureRMADGroup(AzureRMModuleBase):
present_owners=dict(type='list', elements='str'),
absent_members=dict(type='list', elements='str'),
absent_owners=dict(type='list', elements='str'),
+ raw_membership=dict(type='bool', default=False),
description=dict(type='str'),
state=dict(
type='str',
@@ -257,6 +274,7 @@ class AzureRMADGroup(AzureRMModuleBase):
self.state = None
self.results = dict(changed=False)
self._client = None
+ self.raw_membership = False
super(AzureRMADGroup, self).__init__(derived_arg_spec=self.module_arg_spec,
supports_check_mode=False,
@@ -267,9 +285,6 @@ class AzureRMADGroup(AzureRMModuleBase):
for key in list(self.module_arg_spec.keys()):
setattr(self, key, kwargs[key])
- # TODO remove ad_groups return. Returns as one object always
- ad_groups = []
-
try:
self._client = self.get_msgraph_client()
ad_groups = []
@@ -280,12 +295,38 @@ class AzureRMADGroup(AzureRMModuleBase):
if ad_groups:
self.object_id = ad_groups[0].id
- elif self.object_id:
+ if self.object_id:
ad_groups = [asyncio.get_event_loop().run_until_complete(self.get_group(self.object_id))]
if ad_groups:
if self.state == "present":
self.results["changed"] = False
+
+ if self.description is not None and self.description != ad_groups[0].description:
+ self.results["changed"] = True
+ else:
+ self.description = ad_groups[0].description
+ if self.display_name is not None and self.display_name != ad_groups[0].display_name:
+ self.results["changed"] = True
+ else:
+ self.display_name = ad_groups[0].display_name
+ if self.mail_nickname is not None and self.mail_nickname != ad_groups[0].mail_nickname:
+ self.results["changed"] = True
+ else:
+ self.mail_nickname = ad_groups[0].mail_nickname
+ if self.results["changed"]:
+ group = Group(
+ mail_enabled=False,
+ security_enabled=True,
+ group_types=[],
+ display_name=self.display_name,
+ mail_nickname=self.mail_nickname,
+ description=self.description
+ )
+
+ asyncio.get_event_loop().run_until_complete(self.update_group(ad_groups[0].id, group))
+ ad_groups = [asyncio.get_event_loop().run_until_complete(self.get_group(self.object_id))]
+
elif self.state == "absent":
asyncio.get_event_loop().run_until_complete(self.delete_group(self.object_id))
ad_groups = []
@@ -325,7 +366,7 @@ class AzureRMADGroup(AzureRMModuleBase):
if self.present_members or self.absent_members:
ret = asyncio.get_event_loop().run_until_complete(self.get_group_members(group_id))
- current_members = [object.id for object in ret.value]
+ current_members = [object.id for object in ret]
if self.present_members:
present_members_by_object_id = self.dictionary_from_object_urls(self.present_members)
@@ -361,7 +402,7 @@ class AzureRMADGroup(AzureRMModuleBase):
if owners_to_add:
for owner_object_id in owners_to_add:
asyncio.get_event_loop().run_until_complete(
- self.add_gropup_owner(group_id, present_owners_by_object_id[owner_object_id]))
+ self.add_group_owner(group_id, present_owners_by_object_id[owner_object_id]))
self.results["changed"] = True
if self.absent_owners:
@@ -369,7 +410,7 @@ class AzureRMADGroup(AzureRMModuleBase):
if owners_to_remove:
for owner in owners_to_remove:
- asyncio.get_event_loop().run_until_complete(self.remove_gropup_owner(group_id, owner))
+ asyncio.get_event_loop().run_until_complete(self.remove_group_owner(group_id, owner))
self.results["changed"] = True
def dictionary_from_object_urls(self, object_urls):
@@ -439,10 +480,13 @@ class AzureRMADGroup(AzureRMModuleBase):
if results["object_id"] and (self.present_members or self.absent_members):
ret = asyncio.get_event_loop().run_until_complete(self.get_group_members(results["object_id"]))
- results["group_members"] = [self.result_to_dict(object) for object in ret.value]
+ results["group_members"] = [self.result_to_dict(object) for object in ret]
return results
+ async def update_group(self, group_id, group):
+ return await self._client.groups.by_group_id(group_id).patch(body=group)
+
async def create_group(self, create_group):
return await self._client.groups.post(body=create_group)
@@ -469,6 +513,12 @@ class AzureRMADGroup(AzureRMModuleBase):
return []
async def get_group_members(self, group_id, filters=None):
+ if self.raw_membership:
+ return await self.get_raw_group_members(group_id, filters)
+ else:
+ return await self.get_transitive_group_members(group_id, filters)
+
+ async def get_transitive_group_members(self, group_id, filters=None):
request_configuration = TransitiveMembersRequestBuilder.TransitiveMembersRequestBuilderGetRequestConfiguration(
query_parameters=TransitiveMembersRequestBuilder.TransitiveMembersRequestBuilderGetQueryParameters(
count=True,
@@ -476,8 +526,22 @@ class AzureRMADGroup(AzureRMModuleBase):
)
if filters:
request_configuration.query_parameters.filter = filters
- return await self._client.groups.by_group_id(group_id).transitive_members.get(
+ response = await self._client.groups.by_group_id(group_id).transitive_members.get(
request_configuration=request_configuration)
+ return response.value
+
+ async def get_raw_group_members(self, group_id, filters=None):
+ request_configuration = GroupItemRequestBuilder.GroupItemRequestBuilderGetRequestConfiguration(
+ query_parameters=GroupItemRequestBuilder.GroupItemRequestBuilderGetQueryParameters(
+ # this ensures service principals are returned
+ # see https://learn.microsoft.com/en-us/graph/api/group-list-members?view=graph-rest-1.0&tabs=http
+ expand=["members"]
+ ),
+ )
+ if filters:
+ request_configuration.query_parameters.filter = filters
+ group = await self._client.groups.by_group_id(group_id).get(request_configuration=request_configuration)
+ return group.members
async def add_group_member(self, group_id, obj_id):
request_body = ReferenceCreate(
@@ -496,13 +560,13 @@ class AzureRMADGroup(AzureRMModuleBase):
)
return await self._client.groups.by_group_id(group_id).owners.get(request_configuration=request_configuration)
- async def add_gropup_owner(self, group_id, obj_id):
+ async def add_group_owner(self, group_id, obj_id):
request_body = ReferenceCreate(
odata_id="https://graph.microsoft.com/v1.0/users/{0}".format(obj_id),
)
await self._client.groups.by_group_id(group_id).owners.ref.post(body=request_body)
- async def remove_gropup_owner(self, group_id, obj_id):
+ async def remove_group_owner(self, group_id, obj_id):
await self._client.groups.by_group_id(group_id).owners.by_directory_object_id(obj_id).ref.delete()
diff --git a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_adgroup_info.py b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_adgroup_info.py
index 3525bdf1b..04393c02e 100644
--- a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_adgroup_info.py
+++ b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_adgroup_info.py
@@ -55,6 +55,12 @@ options:
- Indicate whether the groups in which a groups is a member should be returned with the returned groups.
default: False
type: bool
+ raw_membership:
+ description:
+ - By default the group_members return property is flattened and partially filtered of non-User objects before return.\
+ This argument disables those transformations.
+ default: false
+ type: bool
all:
description:
- If True, will return all groups in tenant.
@@ -84,6 +90,12 @@ EXAMPLES = '''
object_id: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
return_owners: true
return_group_members: true
+- name: Return a specific group using object_id and return the owners and members of the group. Return service principals and nested groups.
+ azure_rm_adgroup_info:
+ object_id: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+ return_owners: true
+ return_group_members: true
+ raw_membership: true
- name: Return a specific group using object_id and return the groups the group is a member of
azure_rm_adgroup_info:
@@ -153,7 +165,7 @@ group_owners:
type: list
group_members:
description:
- - The members of the group.
+ - The members of the group. If raw_membership is set, this field may contain non-user objects (groups, service principals, etc)
returned: always
type: list
description:
@@ -173,6 +185,7 @@ try:
TransitiveMembersRequestBuilder
from msgraph.generated.groups.item.get_member_groups.get_member_groups_post_request_body import \
GetMemberGroupsPostRequestBody
+ from msgraph.generated.groups.item.group_item_request_builder import GroupItemRequestBuilder
except ImportError:
# This is handled in azure_rm_common
pass
@@ -190,6 +203,7 @@ class AzureRMADGroupInfo(AzureRMModuleBase):
return_owners=dict(type='bool', default=False),
return_group_members=dict(type='bool', default=False),
return_member_groups=dict(type='bool', default=False),
+ raw_membership=dict(type='bool', default=False),
all=dict(type='bool', default=False),
)
@@ -201,6 +215,7 @@ class AzureRMADGroupInfo(AzureRMModuleBase):
self.return_owners = False
self.return_group_members = False
self.return_member_groups = False
+ self.raw_membership = False
self.all = False
self.results = dict(changed=False)
@@ -301,7 +316,7 @@ class AzureRMADGroupInfo(AzureRMModuleBase):
if results["object_id"] and self.return_group_members:
ret = asyncio.get_event_loop().run_until_complete(self.get_group_members(results["object_id"]))
- results["group_members"] = [self.result_to_dict(object) for object in ret.value]
+ results["group_members"] = [self.result_to_dict(object) for object in ret]
if results["object_id"] and self.return_member_groups:
ret = asyncio.get_event_loop().run_until_complete(self.get_member_groups(results["object_id"]))
@@ -310,7 +325,7 @@ class AzureRMADGroupInfo(AzureRMModuleBase):
if results["object_id"] and self.check_membership:
filter = "id eq '{0}' ".format(self.check_membership)
ret = asyncio.get_event_loop().run_until_complete(self.get_group_members(results["object_id"], filter))
- results["is_member_of"] = True if ret.value and len(ret.value) != 0 else False
+ results["is_member_of"] = True if ret and len(ret) != 0 else False
return results
@@ -352,17 +367,34 @@ class AzureRMADGroupInfo(AzureRMModuleBase):
return await self._client.groups.by_group_id(group_id).owners.get(request_configuration=request_configuration)
async def get_group_members(self, group_id, filters=None):
+ if self.raw_membership:
+ return await self.get_raw_group_members(group_id, filters)
+ else:
+ return await self.get_transitive_group_members(group_id, filters)
+
+ async def get_transitive_group_members(self, group_id, filters=None):
request_configuration = TransitiveMembersRequestBuilder.TransitiveMembersRequestBuilderGetRequestConfiguration(
query_parameters=TransitiveMembersRequestBuilder.TransitiveMembersRequestBuilderGetQueryParameters(
count=True,
- select=['id', 'displayName', 'userPrincipalName', 'mailNickname', 'mail', 'accountEnabled', 'userType',
- 'appId', 'appRoleAssignmentRequired']
-
),
)
if filters:
request_configuration.query_parameters.filter = filters
- return await self._client.groups.by_group_id(group_id).transitive_members.get(
+ response = await self._client.groups.by_group_id(group_id).transitive_members.get(
+ request_configuration=request_configuration)
+ return response.value
+
+ async def get_raw_group_members(self, group_id, filters=None):
+ request_configuration = GroupItemRequestBuilder.GroupItemRequestBuilderGetRequestConfiguration(
+ query_parameters=GroupItemRequestBuilder.GroupItemRequestBuilderGetQueryParameters(
+ # this ensures service principals are returned
+ # see https://learn.microsoft.com/en-us/graph/api/group-list-members?view=graph-rest-1.0&tabs=http
+ expand=["members"]
+ ),
+ )
+ if filters:
+ request_configuration.query_parameters.filter = filters
+ return await self._client.groups.by_group_id(group_id).members.get(
request_configuration=request_configuration)
async def get_member_groups(self, obj_id):
diff --git a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_aduser.py b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_aduser.py
index 1e0a238c0..e1c792649 100644
--- a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_aduser.py
+++ b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_aduser.py
@@ -119,6 +119,18 @@ options:
- The maximum length is 64 characters.Returned only on $select.
- Supports $filter (eq, ne, not, ge, le, in, startsWith, and eq on null values).
type: str
+ on_premises_extension_attributes:
+ description:
+ - Contains extensionAttributes1-15 for the user.
+ - These extension attributes are also known as Exchange custom attributes 1-15.
+ - For an onPremisesSyncEnabled user, the source of authority for this set of properties is the on-premises and is read-only.
+ - For a cloud-only user (where onPremisesSyncEnabled is false), these properties can be set during the creation or update of a user object.
+ - For a cloud-only user previously synced from on-premises Active Directory, these properties are read-only in Microsoft Graph\
+ but can be fully managed through the Exchange Admin Center or the Exchange Online V2 module in PowerShell.
+ type: dict
+ aliases:
+ - extension_attributes
+
extends_documentation_fragment:
- azure.azcollection.azure
@@ -143,6 +155,10 @@ EXAMPLES = '''
usage_location: "US"
mail: "{{ user_principal_name }}@contoso.com"
company_name: 'Test Company'
+ on_premises_extension_attributes:
+ extension_attribute1: "test_extension_attribute1"
+ extension_attribute2: "test_extension_attribute2"
+ extension_attribute11: "test_extension_attribute11"
- name: Update user with new value for account_enabled
azure_rm_aduser:
@@ -205,6 +221,17 @@ company_name:
type: str
returned: always
sample: 'Test Company'
+on_premises_extension_attributes:
+ description:
+ - Contains extensionAttributes1-15 for the user.
+ - These extension attributes are also known as Exchange custom attributes 1-15.
+ - For an onPremisesSyncEnabled user, the source of authority for this set of properties is the on-premises and is read-only.
+ - For a cloud-only user (where onPremisesSyncEnabled is false), these properties can be set during the creation or update of a user object.
+ - For a cloud-only user previously synced from on-premises Active Directory, these properties are read-only in Microsoft Graph\
+ but can be fully managed through the Exchange Admin Center or the Exchange Online V2 module in PowerShell.
+ type: dict
+ returned: always
+ sample: {}
'''
from ansible_collections.azure.azcollection.plugins.module_utils.azure_rm_common_ext import AzureRMModuleBase
@@ -212,6 +239,7 @@ from ansible_collections.azure.azcollection.plugins.module_utils.azure_rm_common
try:
import asyncio
from msgraph.generated.models.password_profile import PasswordProfile
+ from msgraph.generated.models.on_premises_extension_attributes import OnPremisesExtensionAttributes
from msgraph.generated.models.user import User
from msgraph.generated.users.users_request_builder import UsersRequestBuilder
except ImportError:
@@ -239,7 +267,8 @@ class AzureRMADUser(AzureRMModuleBase):
surname=dict(type='str'),
user_type=dict(type='str'),
mail=dict(type='str'),
- company_name=dict(type='str')
+ company_name=dict(type='str'),
+ on_premises_extension_attributes=dict(type='dict', aliases=['extension_attributes'])
)
self.user_principal_name = None
@@ -259,6 +288,7 @@ class AzureRMADUser(AzureRMModuleBase):
self.user_type = None
self.mail = None
self.company_name = None
+ self.on_premises_extension_attributes = None
self.log_path = None
self.log_mode = None
@@ -288,6 +318,13 @@ class AzureRMADUser(AzureRMModuleBase):
if self.state == 'present':
+ extension_attributes = None
+
+ if self.on_premises_extension_attributes:
+ extension_attributes = OnPremisesExtensionAttributes(
+ **self.on_premises_extension_attributes
+ )
+
if ad_user: # Update, changed
password = None
@@ -298,7 +335,6 @@ class AzureRMADUser(AzureRMModuleBase):
)
should_update = False
-
if self.on_premises_immutable_id and ad_user.on_premises_immutable_id != self.on_premises_immutable_id:
should_update = True
if should_update or self.usage_location and ad_user.usage_location != self.usage_location:
@@ -321,9 +357,12 @@ class AzureRMADUser(AzureRMModuleBase):
should_update = True
if should_update or self.company_name and ad_user.company_name != self.company_name:
should_update = True
-
+ if should_update or (
+ self.on_premises_extension_attributes and
+ self.on_premises_extension_attributes_to_dict(ad_user.on_premises_extension_attributes) != self.on_premises_extension_attributes):
+ should_update = True
if should_update:
- asyncio.get_event_loop().run_until_complete(self.update_user(ad_user, password))
+ asyncio.get_event_loop().run_until_complete(self.update_user(ad_user, password, extension_attributes))
self.results['changed'] = True
@@ -335,7 +374,7 @@ class AzureRMADUser(AzureRMModuleBase):
self.results['changed'] = False
else: # Create, changed
- asyncio.get_event_loop().run_until_complete(self.create_user())
+ asyncio.get_event_loop().run_until_complete(self.create_user(extension_attributes))
self.results['changed'] = True
ad_user = self.get_exisiting_user()
@@ -391,6 +430,16 @@ class AzureRMADUser(AzureRMModuleBase):
raise
return ad_user
+ def on_premises_extension_attributes_to_dict(self, on_premises_extension_attributes):
+ extension_attributes = {}
+ for index in range(1, 16 + 1):
+ attribute_name = f'extension_attribute{index}'
+ if hasattr(on_premises_extension_attributes, attribute_name):
+ attr_value = getattr(on_premises_extension_attributes, attribute_name)
+ if attr_value is not None:
+ extension_attributes[attribute_name] = attr_value
+ return extension_attributes
+
def to_dict(self, object):
return dict(
object_id=object.id,
@@ -400,10 +449,11 @@ class AzureRMADUser(AzureRMModuleBase):
mail=object.mail,
account_enabled=object.account_enabled,
user_type=object.user_type,
- company_name=object.company_name
+ company_name=object.company_name,
+ on_premises_extension_attributes=self.on_premises_extension_attributes_to_dict(object.on_premises_extension_attributes)
)
- async def update_user(self, ad_user, password):
+ async def update_user(self, ad_user, password, extension_attributes):
request_body = User(
on_premises_immutable_id=self.on_premises_immutable_id,
usage_location=self.usage_location,
@@ -415,11 +465,12 @@ class AzureRMADUser(AzureRMModuleBase):
password_profile=password,
user_principal_name=self.user_principal_name,
mail_nickname=self.mail_nickname,
- company_name=self.company_name
+ company_name=self.company_name,
+ on_premises_extension_attributes=extension_attributes
)
return await self._client.users.by_user_id(ad_user.id).patch(body=request_body)
- async def create_user(self):
+ async def create_user(self, extension_attributes):
password = PasswordProfile(
password=self.password_profile
)
@@ -435,7 +486,8 @@ class AzureRMADUser(AzureRMModuleBase):
surname=self.surname,
user_type=self.user_type,
mail=self.mail,
- company_name=self.company_name
+ company_name=self.company_name,
+ on_premises_extension_attributes=extension_attributes
)
return await self._client.users.post(body=request_body)
@@ -446,7 +498,8 @@ class AzureRMADUser(AzureRMModuleBase):
request_configuration = UsersRequestBuilder.UsersRequestBuilderGetRequestConfiguration(
query_parameters=UsersRequestBuilder.UsersRequestBuilderGetQueryParameters(
select=["accountEnabled", "displayName", "mail", "mailNickname", "id", "userPrincipalName", "userType",
- "onPremisesImmutableId", "usageLocation", "givenName", "surname", "companyName"]
+ "onPremisesImmutableId", "usageLocation", "givenName", "surname", "companyName",
+ "OnPremisesExtensionAttributes"]
),
)
return await self._client.users.by_user_id(object).get(request_configuration=request_configuration)
@@ -457,7 +510,8 @@ class AzureRMADUser(AzureRMModuleBase):
query_parameters=UsersRequestBuilder.UsersRequestBuilderGetQueryParameters(
filter=filter,
select=["accountEnabled", "displayName", "mail", "mailNickname", "id", "userPrincipalName",
- "userType", "onPremisesImmutableId", "usageLocation", "givenName", "surname", "companyName"],
+ "userType", "onPremisesImmutableId", "usageLocation", "givenName", "surname", "companyName",
+ "OnPremisesExtensionAttributes"],
count=True
),
headers={'ConsistencyLevel': "eventual", }
diff --git a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_aduser_info.py b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_aduser_info.py
index 98c30be57..e71066a89 100644
--- a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_aduser_info.py
+++ b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_aduser_info.py
@@ -143,6 +143,17 @@ company_name:
type: str
returned: always
sample: "Test Company"
+on_premises_extension_attributes:
+ description:
+ - Contains extensionAttributes1-15 for the user.
+ - These extension attributes are also known as Exchange custom attributes 1-15.
+ - For an onPremisesSyncEnabled user, the source of authority for this set of properties is the on-premises and is read-only.
+ - For a cloud-only user (where onPremisesSyncEnabled is false), these properties can be set during the creation or update of a user object.
+ - For a cloud-only user previously synced from on-premises Active Directory, these properties are read-only in Microsoft Graph/
+ but can be fully managed through the Exchange Admin Center or the Exchange Online V2 module in PowerShell.
+ type: dict
+ returned: always
+ sample: {}
'''
from ansible_collections.azure.azcollection.plugins.module_utils.azure_rm_common_ext import AzureRMModuleBase
@@ -224,7 +235,6 @@ class AzureRMADUserInfo(AzureRMModuleBase):
elif self.all:
# this returns as a list, since we parse multiple pages
ad_users = asyncio.get_event_loop().run_until_complete(self.get_users())
-
self.results['ad_users'] = [self.to_dict(user) for user in ad_users]
except Exception as e:
@@ -232,6 +242,16 @@ class AzureRMADUserInfo(AzureRMModuleBase):
return self.results
+ def on_premises_extension_attributes_to_dict(self, on_premises_extension_attributes):
+ extension_attributes = {}
+ for index in range(1, 16 + 1):
+ attribute_name = f'extension_attribute{index}'
+ if hasattr(on_premises_extension_attributes, attribute_name):
+ attr_value = getattr(on_premises_extension_attributes, attribute_name)
+ if attr_value is not None:
+ extension_attributes[attribute_name] = attr_value
+ return extension_attributes
+
def to_dict(self, object):
return dict(
object_id=object.id,
@@ -241,13 +261,15 @@ class AzureRMADUserInfo(AzureRMModuleBase):
mail=object.mail,
account_enabled=object.account_enabled,
user_type=object.user_type,
- company_name=object.company_name
+ company_name=object.company_name,
+ on_premises_extension_attributes=self.on_premises_extension_attributes_to_dict(object.on_premises_extension_attributes)
)
async def get_user(self, object):
request_configuration = UsersRequestBuilder.UsersRequestBuilderGetRequestConfiguration(
query_parameters=UsersRequestBuilder.UsersRequestBuilderGetQueryParameters(
- select=["accountEnabled", "displayName", "mail", "mailNickname", "id", "userPrincipalName", "userType", "companyName"]
+ select=["accountEnabled", "displayName", "mail", "mailNickname", "id", "userPrincipalName",
+ "userType", "companyName", "onPremisesExtensionAttributes"]
),
)
return await self._client.users.by_user_id(object).get(request_configuration=request_configuration)
@@ -255,7 +277,8 @@ class AzureRMADUserInfo(AzureRMModuleBase):
async def get_users(self):
request_configuration = UsersRequestBuilder.UsersRequestBuilderGetRequestConfiguration(
query_parameters=UsersRequestBuilder.UsersRequestBuilderGetQueryParameters(
- select=["accountEnabled", "displayName", "mail", "mailNickname", "id", "userPrincipalName", "userType", "companyName"]
+ select=["accountEnabled", "displayName", "mail", "mailNickname", "id", "userPrincipalName",
+ "userType", "companyName", "onPremisesExtensionAttributes"]
),
)
users = []
@@ -276,7 +299,7 @@ class AzureRMADUserInfo(AzureRMModuleBase):
query_parameters=UsersRequestBuilder.UsersRequestBuilderGetQueryParameters(
filter=filter,
select=["accountEnabled", "displayName", "mail", "mailNickname", "id", "userPrincipalName",
- "userType", "companyName"],
+ "userType", "companyName", "onPremisesExtensionAttributes"],
count=True
),
))
diff --git a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_aks.py b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_aks.py
index 0fb5095fe..0e1565a2c 100644
--- a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_aks.py
+++ b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_aks.py
@@ -15,6 +15,8 @@ version_added: "0.1.2"
short_description: Manage a managed Azure Container Service (AKS) instance
description:
- Create, update and delete a managed Azure Container Service (AKS) instance.
+ - You can only specify C(identity) or C(service_principal), not both. If you don't specify either it will
+ default to identity->type->SystemAssigned.
options:
resource_group:
@@ -170,7 +172,7 @@ options:
type: str
service_principal:
description:
- - The service principal suboptions. If not provided - use system-assigned managed identity.
+ - The service principal suboptions.
type: dict
suboptions:
client_id:
@@ -182,6 +184,25 @@ options:
description:
- The secret password associated with the service principal.
type: str
+ identity:
+ description:
+ - Identity for the Server.
+ type: dict
+ version_added: '2.4.0'
+ suboptions:
+ type:
+ description:
+ - Type of the managed identity
+ required: false
+ choices:
+ - UserAssigned
+ - SystemAssigned
+ default: SystemAssigned
+ type: str
+ user_assigned_identities:
+ description:
+ - User Assigned Managed Identity
+ type: str
enable_rbac:
description:
- Enable RBAC.
@@ -247,6 +268,8 @@ options:
choices:
- loadBalancer
- userDefinedRouting
+ - managedNATGateway
+ - userAssignedNATGateway
api_server_access_profile:
description:
- Profile of API Access configuration.
@@ -590,6 +613,9 @@ state:
provisioning_state: Succeeded
service_principal_profile:
client_id: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+ identity:
+ "type": "UserAssigned"
+ "user_assigned_identities": {}
pod_identity_profile: {
"allow_network_plugin_kubenet": false,
"user_assigned_identities": [
@@ -633,6 +659,7 @@ def create_aks_dict(aks):
kubernetes_version=aks.kubernetes_version,
tags=aks.tags,
linux_profile=create_linux_profile_dict(aks.linux_profile),
+ identity=aks.identity.as_dict() if aks.identity else None,
service_principal_profile=create_service_principal_profile_dict(
aks.service_principal_profile),
provisioning_state=aks.provisioning_state,
@@ -810,7 +837,7 @@ network_profile_spec = dict(
dns_service_ip=dict(type='str'),
docker_bridge_cidr=dict(type='str'),
load_balancer_sku=dict(type='str', choices=['standard', 'basic']),
- outbound_type=dict(type='str', default='loadBalancer', choices=['userDefinedRouting', 'loadBalancer'])
+ outbound_type=dict(type='str', default='loadBalancer', choices=['userDefinedRouting', 'loadBalancer', 'userAssignedNATGateway', 'managedNATGateway'])
)
@@ -830,6 +857,19 @@ api_server_access_profile_spec = dict(
)
+managed_identity_spec = dict(
+ type=dict(type='str', choices=['SystemAssigned', 'UserAssigned'], default='SystemAssigned'),
+ user_assigned_identities=dict(type='str'),
+)
+
+
+class dotdict(dict):
+ """dot.notation access to dictionary attributes"""
+ __getattr__ = dict.get
+ __setattr__ = dict.__setitem__
+ __delattr__ = dict.__delitem__
+
+
class AzureRMManagedCluster(AzureRMModuleBaseExt):
"""Configuration class for an Azure RM container service (AKS) resource"""
@@ -870,6 +910,14 @@ class AzureRMManagedCluster(AzureRMModuleBaseExt):
type='dict',
options=service_principal_spec
),
+ identity=dict(
+ type='dict',
+ options=managed_identity_spec,
+ required_if=[
+ ('type', 'UserAssigned', [
+ 'user_assigned_identities']),
+ ]
+ ),
enable_rbac=dict(
type='bool',
default=False
@@ -930,6 +978,7 @@ class AzureRMManagedCluster(AzureRMModuleBaseExt):
self.linux_profile = None
self.agent_pool_profiles = None
self.service_principal = None
+ self.identity = None
self.enable_rbac = False
self.network_profile = None
self.aad_profile = None
@@ -938,6 +987,8 @@ class AzureRMManagedCluster(AzureRMModuleBaseExt):
self.node_resource_group = None
self.pod_identity_profile = None
+ mutually_exclusive = [('identity', 'service_principal')]
+
required_if = [
('state', 'present', [
'dns_prefix', 'agent_pool_profiles'])
@@ -948,7 +999,8 @@ class AzureRMManagedCluster(AzureRMModuleBaseExt):
super(AzureRMManagedCluster, self).__init__(derived_arg_spec=self.module_arg_spec,
supports_check_mode=True,
supports_tags=True,
- required_if=required_if)
+ required_if=required_if,
+ mutually_exclusive=mutually_exclusive)
def exec_module(self, **kwargs):
"""Main module execution method"""
@@ -972,6 +1024,11 @@ class AzureRMManagedCluster(AzureRMModuleBaseExt):
available_versions = self.get_all_versions()
if not response:
to_be_updated = True
+ # Default to SystemAssigned if service_principal is not specified
+ if not self.service_principal and not self.identity:
+ self.identity = dotdict({'type': 'SystemAssigned'})
+ if self.identity:
+ changed, self.identity = self.update_identity(self.identity, {})
if self.kubernetes_version not in available_versions.keys():
self.fail("Unsupported kubernetes version. Expected one of {0} but got {1}".format(available_versions.keys(), self.kubernetes_version))
else:
@@ -1118,6 +1175,14 @@ class AzureRMManagedCluster(AzureRMModuleBaseExt):
else:
self.pod_identity_profile = response['pod_identity_profile']
+ # Default to SystemAssigned if service_principal is not specified
+ if not self.service_principal and not self.identity:
+ self.identity = dotdict({'type': 'SystemAssigned'})
+ if self.identity:
+ changed, self.identity = self.update_identity(self.identity, response['identity'])
+ if changed:
+ to_be_updated = True
+
if update_agentpool:
self.log("Need to update agentpool")
if not self.check_mode:
@@ -1177,12 +1242,12 @@ class AzureRMManagedCluster(AzureRMModuleBaseExt):
if self.agent_pool_profiles:
agentpools = [self.create_agent_pool_profile_instance(profile) for profile in self.agent_pool_profiles]
+ # Only service_principal or identity can be specified, but default to SystemAssigned if none specified.
if self.service_principal:
service_principal_profile = self.create_service_principal_profile_instance(self.service_principal)
identity = None
else:
service_principal_profile = None
- identity = self.managedcluster_models.ManagedClusterIdentity(type='SystemAssigned')
if self.linux_profile:
linux_profile = self.create_linux_profile_instance(self.linux_profile)
@@ -1206,7 +1271,7 @@ class AzureRMManagedCluster(AzureRMModuleBaseExt):
service_principal_profile=service_principal_profile,
agent_pool_profiles=agentpools,
linux_profile=linux_profile,
- identity=identity,
+ identity=self.identity,
enable_rbac=self.enable_rbac,
network_profile=self.create_network_profile_instance(self.network_profile),
aad_profile=self.create_aad_profile_instance(self.aad_profile),
@@ -1386,6 +1451,34 @@ class AzureRMManagedCluster(AzureRMModuleBaseExt):
result[name] = self.managedcluster_models.ManagedClusterAddonProfile(config=config, enabled=config['enabled'])
return result
+ # AKS only supports a single UserAssigned Identity
+ def update_identity(self, param_identity, curr_identity):
+ user_identity = None
+ changed = False
+ current_managed_type = curr_identity.get('type', 'SystemAssigned')
+ current_managed_identity = curr_identity.get('user_assigned_identities', {})
+ param_managed_identity = param_identity.get('user_assigned_identities')
+
+ # If type set to SystamAssigned, and Resource has SystamAssigned, nothing to do
+ if 'SystemAssigned' in param_identity.get('type') and current_managed_type == 'SystemAssigned':
+ pass
+ # If type set to SystemAssigned, and Resource has current identity, remove UserAssigned identity
+ elif param_identity.get('type') == 'SystemAssigned':
+ changed = True
+ # If type in module args contains 'UserAssigned'
+ elif 'UserAssigned' in param_identity.get('type'):
+ if param_managed_identity not in current_managed_identity.keys():
+ user_identity = {param_managed_identity: {}}
+ changed = True
+
+ new_identity = self.managedcluster_models.ManagedClusterIdentity(
+ type=param_identity.get('type'),
+ )
+ if user_identity:
+ new_identity.user_assigned_identities = user_identity
+
+ return changed, new_identity
+
def main():
"""Main execution"""
diff --git a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_aksagentpool.py b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_aksagentpool.py
index f84362e95..aaf4f9876 100644
--- a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_aksagentpool.py
+++ b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_aksagentpool.py
@@ -102,6 +102,337 @@ options:
description:
- Maximum number of pods that can run on a node.
type: int
+ kubelet_disk_type:
+ description:
+ - Determines the placement of emptyDir volumes, container runtime data root, and Kubelet ephemeral storage.
+ type: str
+ choices:
+ - OS
+ - Temporary
+ workload_runtime:
+ description:
+ - Determines the type of workload a node can run.
+ type: str
+ choices:
+ - OCIContainer
+ - WasmWasi
+ os_sku:
+ description:
+ - Specifies an OS SKU.
+ - This value must not be specified if OSType is Windows.
+ type: str
+ choices:
+ - Ubuntu
+ - CBLMariner
+ scale_down_mode:
+ description:
+ - This also effects the cluster autoscaler behavior.
+ - If not specified, it defaults to C(Delete).
+ type: str
+ default: Delete
+ choices:
+ - Delete
+ - Deallocate
+ upgrade_settings:
+ description:
+ - Settings for upgrading the agentpool.
+ type: dict
+ suboptions:
+ max_surge:
+ description:
+ - This can either be set to an integer, sucha as C(5) or percentage C(50%).
+ - If a percentage is specified, it is the percentage of the total agent pool size at the time of the upgrade.
+ - For percentages, fractional nodes are rounded up.
+ - If not specified, the default is C(1).
+ type: str
+ power_state:
+ description:
+ - When an Agent Pool is first created it is initially C(Running).
+ - The Agent Pool can be stopped by setting this field to C(Stopped).
+ - A stopped Agent Pool stops all of its VMs and does not accrue billing charges.
+ - An Agent Pool can only be stopped if it is Running and provisioning state is Succeeded.
+ type: dict
+ suboptions:
+ code:
+ description:
+ - Tells whether the cluster is C(Running) or C(Stopped).
+ type: str
+ choices:
+ - Running
+ - Stopped
+ enable_node_public_ip:
+ description:
+ - Some scenarios may require nodes in a node pool to receive theirown dedicated public IP addresses.
+ - A common scenario is for gaming workloads, where a console needs to make a direct connection to a cloud virtual machine to minimize hops.
+ type: bool
+ scale_set_priority:
+ description:
+ - The Virtual Machine Scale Set priority.
+ - If not specified, the default is C(Regular).
+ type: str
+ choices:
+ - Spot
+ - Regular
+ node_public_ip_prefix_id:
+ description:
+ - The Azure Public IP prefix's ID.
+ type: str
+ scale_set_eviction_policy:
+ description:
+ - This cannot be specified unless the I(scale_set_priority=Spot).
+ - If not specified, the default is C(Delete).
+ type: str
+ choices:
+ - Delete
+ - Deallocate
+ spot_max_price:
+ description:
+ - Possible values are any decimal value greater than zero or -1.
+ - Indicates the willingness to pay any on-demand price.
+ type: float
+ proximity_placement_group_id:
+ description:
+ - The ID for Proximity Placement Group.
+ type: str
+ kubelet_config:
+ description:
+ - The Kubelet configuration on the agent pool nodes.
+ type: dict
+ suboptions:
+ cpu_manager_policy:
+ description:
+ - Kubernetes CPU management policies.
+ - The default is C(none).
+ type: str
+ default: none
+ choices:
+ - none
+ - static
+ cpu_cfs_quota:
+ description:
+ - The default is C(true).
+ type: bool
+ default: true
+ cpu_cfs_quota_period:
+ description:
+ - The default is C(100ms).
+ - Valid values are a sequence of decimal numbers with an optional fraction and a unit suffix.
+ type: str
+ default: 100ms
+ image_gc_high_threshold:
+ description:
+ - To disable image garbage collection, set to C(100).
+ - The default is C(85)
+ type: int
+ default: 85
+ image_gc_low_threshold:
+ description:
+ - This cannot be set higher than imageGcHighThreshold.
+ - The default is C(80).
+ type: int
+ default: 80
+ topology_manager_policy:
+ description:
+ - Kubernetes Topology Manager policies.
+ - The default is C(none).
+ type: str
+ default: none
+ choices:
+ - none
+ - best-effort
+ - restricted
+ - single-numa-node
+ allowed_unsafe_sysctls:
+ description:
+ - Allowed list of unsafe sysctls or unsafe sysctl patterns.
+ type: list
+ elements: str
+ fail_swap_on:
+ description:
+ - If set to true it will make the Kubelet fail to start if swap is enabled on the node.
+ type: bool
+ container_log_max_size_mb:
+ description:
+ - The maximum size of container log file before it is rotated.
+ type: int
+ container_log_max_files:
+ description:
+ - The maximum number of container log files that can be present for a container. The number must be ≥ 2.
+ type: int
+ pod_max_pids:
+ description:
+ - The maximum number of processes per pod.
+ type: int
+ linux_os_config:
+ description:
+ - The OS configuration of Linux agent nodes.
+ type: dict
+ suboptions:
+ sysctls:
+ description:
+ - Sysctl settings for Linux agent nodes.
+ type: dict
+ suboptions:
+ net_core_somaxconn:
+ description:
+ - Sysctl setting net.core.somaxconn.
+ type: int
+ net_core_netdev_max_backlog:
+ description:
+ - Sysctl setting net.core.netdev_max_backlog.
+ type: int
+ net_core_rmem_default:
+ description:
+ - Sysctl setting net.core.rmem_default.
+ type: int
+ net_core_rmem_max:
+ description:
+ - Sysctl setting net.core.rmem_max.
+ type: int
+ net_core_wmem_default:
+ description:
+ - Sysctl setting net.core.wmem_default.
+ type: int
+ net_core_wmem_max:
+ description:
+ - Sysctl setting net.core.wmem_max.
+ type: int
+ net_core_optmem_max:
+ description:
+ - Sysctl setting net.core.optmem_max.
+ type: int
+ net_ipv4_tcp_max_syn_backlog:
+ description:
+ - Sysctl setting net.ipv4.tcp_max_syn_backlog.
+ type: int
+ net_ipv4_tcp_max_tw_buckets:
+ description:
+ - Sysctl setting net.ipv4.tcp_max_tw_buckets.
+ type: int
+ net_ipv4_tcp_fin_timeout:
+ description:
+ - Sysctl setting net.ipv4.tcp_fin_timeout.
+ type: int
+ net_ipv4_tcp_keepalive_time:
+ description:
+ - Sysctl setting net.ipv4.tcp_keepalive_time.
+ type: int
+ net_ipv4_tcp_keepalive_probes:
+ description:
+ - Sysctl setting net.ipv4.tcp_keepalive_probes.
+ type: int
+ net_ipv4_tcpkeepalive_intvl:
+ description:
+ - Sysctl setting net.ipv4.tcp_keepalive_intvl.
+ type: int
+ net_ipv4_tcp_tw_reuse:
+ description:
+ - Sysctl setting net.ipv4.tcp_tw_reuse.
+ type: bool
+ net_ipv4_ip_local_port_range:
+ description:
+ - Sysctl setting net.ipv4.ip_local_port_range.
+ type: str
+ net_ipv4_neigh_default_gc_thresh1:
+ description:
+ - Sysctl setting net.ipv4.neigh.default.gc_thresh1.
+ type: int
+ net_ipv4_neigh_default_gc_thresh2:
+ description:
+ - Sysctl setting net.ipv4.neigh.default.gc_thresh2.
+ type: int
+ net_ipv4_neigh_default_gc_thresh3:
+ description:
+ - Sysctl setting net.ipv4.neigh.default.gc_thresh3.
+ type: int
+ fs_inotify_max_user_watches:
+ description:
+ - Sysctl setting fs.inotify.max_user_watches.
+ type: int
+ fs_file_max:
+ description:
+ - Sysctl setting fs.file-max.
+ type: int
+ fs_aio_max_nr:
+ description:
+ - Sysctl setting fs.aio-max-nr.
+ type: int
+ fs_nr_open:
+ description:
+ - Sysctl setting fs.nr_open.
+ type: int
+ kernel_threads_max:
+ description:
+ - Sysctl setting kernel.threads-max.
+ type: int
+ vm_max_map_count:
+ description:
+ - Sysctl setting vm.max_map_count.
+ type: int
+ vm_swappiness:
+ description:
+ - Sysctl setting vm.swappiness.
+ type: int
+ vm_vfs_cache_pressure:
+ description:
+ - Sysctl setting vm.vfs_cache_pressure.
+ type: int
+ net_netfilter_nf_conntrack_max:
+ description:
+ - sysctl setting net.netfilter.nf_conntrack_max.
+ type: int
+ net_netfilter_nf_conntrack_buckets:
+ description:
+ - Sysctl setting net.netfilter.nf_conntrack_buckets.
+ type: int
+ transparent_huge_page_enabled:
+ description:
+ - The node agent pool transparent hugepage.
+ - The default is C(always).
+ type: str
+ default: always
+ choices:
+ - always
+ - madvise
+ - never
+ transparent_huge_page_defrag:
+ description:
+ - The node agent pool transparent huge page deferag.
+ - The default is C(madvise).
+ type: str
+ default: madvise
+ choices:
+ - always
+ - defer
+ - defer+madvise
+ - madvise
+ - never
+ swap_file_size_mb:
+ description:
+ - The size in MB of a swap file that will be created on each node.
+ type: int
+ enable_encryption_at_host:
+ description:
+ - This is only supported on certain VM sizes and in certain Azure regions.
+ type: bool
+ enable_ultra_ssd:
+ description:
+ - Whether to enable UltraSSD.
+ type: bool
+ enable_fips:
+ description:
+ - Whether enable FIPS node pool.
+ type: bool
+ gpu_instance_profile:
+ description:
+ - GPUInstanceProfile to be used to specify GPU MIG instance profile for supported GPU VM SKU.
+ type: str
+ choices:
+ - MIG1g
+ - MIG2g
+ - MIG3g
+ - MIG4g
+ - MIG7g
state:
description:
- State of the automation runbook. Use C(present) to create or update a automation runbook and use C(absent) to delete.
@@ -137,6 +468,31 @@ EXAMPLES = '''
availability_zones:
- 1
- 2
+- name: Create a node agent pool with custom config
+ azure_rm_aksagentpool:
+ resource_group: "{{ resource_group }}"
+ cluster_name: "min{{ rpfx }}"
+ name: default-new2
+ count: 1
+ vm_size: Standard_B2s
+ type_properties_type: VirtualMachineScaleSets
+ mode: System
+ node_labels: {"release":"stable"}
+ max_pods: 42
+ enable_auto_scaling: true
+ min_count: 1
+ max_count: 10
+ orchestrator_version: 1.23.5
+ availability_zones:
+ - 1
+ kubelet_config:
+ cpu_manager_policy: static
+ cpu_cfs_quota: true
+ fail_swap_on: false
+ linux_os_config:
+ transparent_huge_page_enabled: madvise
+ swap_file_size_mb: 1500
+ transparent_huge_page_defrag: defer+madvise
- name: Delete node agent pool
azure_rm_aksagentpool:
resource_group: "{{ resource_group }}"
@@ -313,6 +669,97 @@ aks_agent_pools:
type: str
returned: always
sample: null
+ kubelet_config:
+ description:
+ - The Kubelet configuration on the agent pool nodes.
+ type: dict
+ returned: always
+ sample: {
+ cpu_cfs_quota: true,
+ cpu_cfs_quota_period: 100ms,
+ cpu_manager_policy: static,
+ fail_swap_on: false,
+ image_gc_high_threshold: 85,
+ image_gc_low_threshold: 80,
+ topology_manager_policy: none
+ }
+ linux_os_config:
+ description:
+ - The OS configuration of Linux agent nodes.
+ type: dict
+ returned: always
+ sample: {
+ swap_file_size_mb: 1500,
+ sysctls: {},
+ transparent_huge_page_defrag: defer+madvise,
+ transparent_huge_page_enabled: madvise
+ }
+ power_state:
+ description:
+ - The agent pool's power state.
+ type: dict
+ returned: always
+ sample: {code: Running}
+ os_sku:
+ description:
+ - The node agent pool's SKU.
+ type: str
+ returned: always
+ sample: Ubuntu
+ tags:
+ description:
+ - The tags of the node agent pool.
+ type: dict
+ returned: always
+ sample: {key1: value1, key2: value2}
+ kubelet_disk_type:
+ description:
+ - Determines the placement of emptyDir volumes, container runtime data root, and Kubelet ephemeral storage.
+ type: str
+ returned: always
+ sample: OS
+ workload_runtime:
+ description:
+ - Determines the type of workload a node can run.
+ type: str
+ returned: always
+ sample: OCIContainer
+ scale_down_mode:
+ description:
+ - This also effects the cluster autoscaler behavior.
+ type: str
+ returned: always
+ sample: Delete
+ node_public_ip_prefix_id:
+ description:
+ - The Azure Public IP prefix's ID.
+ type: str
+ returned: always
+ sample: "/subscriptions/xxx-xxx/resourceGroups/myRG/providers/Microsoft.Network/publicIPPrefixes/pip01"
+ proximity_placement_group_id:
+ description:
+ - The ID for Proximity Placement Group.
+ type: str
+ returned: always
+ sample: /subscriptions/xxx-xxx/resourceGroups/myRG/providers/Microsoft.Compute/proximityPlacementGroups/proxi01
+ enable_encryption_at_host:
+ description:
+ - This is only supported on certain VM sizes and in certain Azure regions.
+ type: bool
+ returned: always
+ sample: false
+ enable_ultra_ssd:
+ description:
+ - Whether enable FIPS node pool.
+ type: bool
+ returned: always
+ sample: false
+ gpu_instance_profile:
+ description:
+ - GPUInstanceProfile to be used to specify GPU MIG instance profile for supported GPU VM SKU.
+ type: str
+ returned: always
+ sample: MIG1g
'''
from ansible_collections.azure.azcollection.plugins.module_utils.azure_rm_common import AzureRMModuleBase
@@ -387,6 +834,144 @@ class AzureRMAksAgentPool(AzureRMModuleBase):
max_pods=dict(
type='int'
),
+ kubelet_disk_type=dict(
+ type='str', choices=['OS', 'Temporary']
+ ),
+ workload_runtime=dict(
+ type='str', choices=['OCIContainer', 'WasmWasi']
+ ),
+ os_sku=dict(
+ type='str', choices=["Ubuntu", "CBLMariner"]
+ ),
+ scale_down_mode=dict(
+ type='str',
+ choices=['Delete', 'Deallocate'],
+ default='Delete'
+ ),
+ upgrade_settings=dict(
+ type='dict',
+ options=dict(
+ max_surge=dict(
+ type='str'
+ )
+ )
+ ),
+ power_state=dict(
+ type='dict',
+ options=dict(
+ code=dict(
+ type='str',
+ choices=['Running', 'Stopped']
+ )
+ )
+ ),
+ enable_node_public_ip=dict(
+ type='bool'
+ ),
+ scale_set_priority=dict(
+ type='str',
+ choices=["Spot", "Regular"],
+ ),
+ node_public_ip_prefix_id=dict(
+ type='str'
+ ),
+ scale_set_eviction_policy=dict(
+ type='str',
+ choices=['Delete', 'Deallocate'],
+ ),
+ spot_max_price=dict(
+ type='float'
+ ),
+ proximity_placement_group_id=dict(
+ type='str'
+ ),
+ kubelet_config=dict(
+ type='dict',
+ options=dict(
+ cpu_manager_policy=dict(type='str', choices=['none', 'static'], default='none'),
+ cpu_cfs_quota=dict(type='bool', default='true'),
+ cpu_cfs_quota_period=dict(type='str', default='100ms'),
+ image_gc_high_threshold=dict(type='int', default=85),
+ image_gc_low_threshold=dict(type='int', default=80),
+ topology_manager_policy=dict(
+ type='str',
+ default='none',
+ choices=['none', 'best-effort', 'restricted', 'single-numa-node']
+ ),
+ allowed_unsafe_sysctls=dict(
+ type='list',
+ elements='str'
+ ),
+ fail_swap_on=dict(type='bool'),
+ container_log_max_size_mb=dict(type='int'),
+ container_log_max_files=dict(type='int'),
+ pod_max_pids=dict(type='int')
+ )
+ ),
+ linux_os_config=dict(
+ type='dict',
+ options=dict(
+ sysctls=dict(
+ type='dict',
+ options=dict(
+ net_core_somaxconn=dict(type='int'),
+ net_core_netdev_max_backlog=dict(type='int'),
+ net_core_rmem_default=dict(type='int'),
+ net_core_rmem_max=dict(type='int'),
+ net_core_wmem_default=dict(type='int'),
+ net_core_wmem_max=dict(type='int'),
+ net_core_optmem_max=dict(type='int'),
+ net_ipv4_tcp_max_syn_backlog=dict(type='int'),
+ net_ipv4_tcp_max_tw_buckets=dict(type='int'),
+ net_ipv4_tcp_fin_timeout=dict(type='int'),
+ net_ipv4_tcp_keepalive_time=dict(type='int'),
+ net_ipv4_tcp_keepalive_probes=dict(type='int'),
+ net_ipv4_tcpkeepalive_intvl=dict(type='int'),
+ net_ipv4_tcp_tw_reuse=dict(type='bool'),
+ net_ipv4_ip_local_port_range=dict(type='str'),
+ net_ipv4_neigh_default_gc_thresh1=dict(type='int'),
+ net_ipv4_neigh_default_gc_thresh2=dict(type='int'),
+ net_ipv4_neigh_default_gc_thresh3=dict(type='int'),
+ net_netfilter_nf_conntrack_max=dict(type='int'),
+ net_netfilter_nf_conntrack_buckets=dict(type='int'),
+ fs_inotify_max_user_watches=dict(type='int'),
+ fs_file_max=dict(type='int'),
+ fs_aio_max_nr=dict(type='int'),
+ fs_nr_open=dict(type='int'),
+ kernel_threads_max=dict(type='int'),
+ vm_max_map_count=dict(type='int'),
+ vm_swappiness=dict(type='int'),
+ vm_vfs_cache_pressure=dict(type='int')
+ )
+ ),
+ transparent_huge_page_enabled=dict(
+ type='str',
+ choices=['always', 'madvise', 'never'],
+ default='always'
+ ),
+ swap_file_size_mb=dict(
+ type='int'
+ ),
+ transparent_huge_page_defrag=dict(
+ type='str',
+ default='madvise',
+ choices=['always', 'defer', 'defer+madvise', 'madvise', 'never']
+ )
+ )
+ ),
+ enable_encryption_at_host=dict(
+ type='bool'
+ ),
+ enable_ultra_ssd=dict(
+ type='bool'
+ ),
+ enable_fips=dict(
+ type='bool'
+ ),
+ gpu_instance_profile=dict(
+ type='str',
+ choices=["MIG1g", "MIG2g", "MIG3g", "MIG4g", "MIG7g"]
+ ),
state=dict(
type='str',
choices=['present', 'absent'],
@@ -413,13 +998,32 @@ class AzureRMAksAgentPool(AzureRMModuleBase):
self.node_labels = None
self.min_count = None
self.max_pods = None
+ self.tags = None
+ self.kubelet_disk_type = None
+ self.workload_runtime = None
+ self.os_sku = None
+ self.scale_down_mode = None
+ self.upgrade_settings = None
+ self.power_state = None
+ self.enable_node_public_ip = None
+ self.scale_set_priority = None
+ self.node_public_ip_prefix_id = None
+ self.scale_set_eviction_policy = None
+ self.spot_max_price = None
+ self.proximity_placement_group_id = None
+ self.kubelet_config = None
+ self.linux_os_config = None
+ self.enable_encryption_at_host = None
+ self.enable_ultra_ssd = None
+ self.enable_fips = None
+ self.gpu_instance_profile = None
self.body = dict()
super(AzureRMAksAgentPool, self).__init__(self.module_arg_spec, supports_check_mode=True, supports_tags=True)
def exec_module(self, **kwargs):
- for key in list(self.module_arg_spec):
+ for key in list(self.module_arg_spec) + ['tags']:
setattr(self, key, kwargs[key])
if key not in ['resource_group', 'cluster_name', 'name', 'state']:
self.body[key] = kwargs[key]
@@ -430,8 +1034,16 @@ class AzureRMAksAgentPool(AzureRMModuleBase):
if self.state == 'present':
if agent_pool:
+ update_tags, self.body['tags'] = self.update_tags(agent_pool.get('tags'))
for key in self.body.keys():
- if self.body[key] is not None and self.body[key] != agent_pool[key]:
+ if key == 'tags':
+ if update_tags:
+ changed = True
+ elif self.body[key] is not None and isinstance(self.body[key], dict):
+ for item in self.body[key].keys():
+ if self.body[key][item] is not None and self.body[key][item] != agent_pool[key].get(item):
+ changed = True
+ elif self.body[key] is not None and self.body[key] != agent_pool[key] and key not in ['scale_set_priority', 'spot_max_price']:
changed = True
else:
self.body[key] = agent_pool[key]
@@ -509,14 +1121,54 @@ class AzureRMAksAgentPool(AzureRMModuleBase):
spot_max_price=agent_pool.spot_max_price,
node_labels=agent_pool.node_labels,
node_taints=agent_pool.node_taints,
+ tags=agent_pool.tags,
+ kubelet_disk_type=agent_pool.kubelet_disk_type,
+ workload_runtime=agent_pool.workload_runtime,
+ os_sku=agent_pool.os_sku,
+ scale_down_mode=agent_pool.scale_down_mode,
+ power_state=dict(),
+ node_public_ip_prefix_id=agent_pool.node_public_ip_prefix_id,
+ proximity_placement_group_id=agent_pool.proximity_placement_group_id,
+ kubelet_config=dict(),
+ linux_os_config=dict(),
+ enable_encryption_at_host=agent_pool.enable_encryption_at_host,
+ enable_ultra_ssd=agent_pool.enable_ultra_ssd,
+ enable_fips=agent_pool.enable_fips,
+ gpu_instance_profile=agent_pool.gpu_instance_profile
)
if agent_pool.upgrade_settings is not None:
agent_pool_dict['upgrade_settings']['max_surge'] = agent_pool.upgrade_settings.max_surge
+ else:
+ agent_pool_dict['upgrade_settings'] = None
if agent_pool.availability_zones is not None:
for key in agent_pool.availability_zones:
agent_pool_dict['availability_zones'].append(int(key))
+ else:
+ agent_pool_dict['availability_zones'] = None
+
+ if agent_pool.kubelet_config is not None:
+ agent_pool_dict['kubelet_config'] = agent_pool.kubelet_config.as_dict()
+ else:
+ agent_pool_dict['kubelet_config'] = None
+
+ if agent_pool.power_state is not None:
+ agent_pool_dict['power_state']['code'] = agent_pool.power_state.code
+ else:
+ agent_pool_dict['power_state'] = None
+
+ if agent_pool.linux_os_config is not None:
+ agent_pool_dict['linux_os_config']['transparent_huge_page_enabled'] = agent_pool.linux_os_config.transparent_huge_page_enabled
+ agent_pool_dict['linux_os_config']['transparent_huge_page_defrag'] = agent_pool.linux_os_config.transparent_huge_page_defrag
+ agent_pool_dict['linux_os_config']['swap_file_size_mb'] = agent_pool.linux_os_config.swap_file_size_mb
+ agent_pool_dict['linux_os_config']['sysctls'] = dict()
+ if agent_pool.linux_os_config.sysctls is not None:
+ agent_pool_dict['linux_os_config']['sysctls'] = agent_pool.linux_os_config.sysctls.as_dict()
+ else:
+ agent_pool_dict['linux_os_config']['sysctls'] = None
+ else:
+ agent_pool_dict['linux_os_config'] = None
return agent_pool_dict
diff --git a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_aksagentpool_info.py b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_aksagentpool_info.py
index 59f3b696d..99ba08254 100644
--- a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_aksagentpool_info.py
+++ b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_aksagentpool_info.py
@@ -168,6 +168,12 @@ aks_agent_pools:
type: str
returned: always
sample: Linux
+ os_sku:
+ description:
+ - OS SKU to be used to specify os type.
+ type: str
+ returned: always
+ sample: Windows2022
provisioning_state:
description:
- The current deployment or provisioning state, which only appears in the response.
@@ -222,6 +228,91 @@ aks_agent_pools:
type: str
returned: always
sample: null
+ kubelet_config:
+ description:
+ - The Kubelet configuration on the agent pool nodes.
+ type: dict
+ returned: always
+ sample: {
+ cpu_cfs_quota: true,
+ cpu_cfs_quota_period: 100ms,
+ cpu_manager_policy: static,
+ fail_swap_on: false,
+ image_gc_high_threshold: 85,
+ image_gc_low_threshold: 80,
+ topology_manager_policy: none
+ }
+ linux_os_config:
+ description:
+ - The OS configuration of Linux agent nodes.
+ type: dict
+ returned: always
+ sample: {
+ swap_file_size_mb: 1500,
+ sysctls: {},
+ transparent_huge_page_defrag: defer+madvise,
+ transparent_huge_page_enabled: madvise
+ }
+ power_state:
+ description:
+ - The agent pool's power state.
+ type: dict
+ returned: always
+ sample: {code: Running}
+ tags:
+ description:
+ - The tags of the node agent pool.
+ type: dict
+ returned: always
+ sample: {key1: value1, key2: value2}
+ kubelet_disk_type:
+ description:
+ - Determines the placement of emptyDir volumes, container runtime data root, and Kubelet ephemeral storage.
+ type: str
+ returned: always
+ sample: OS
+ workload_runtime:
+ description:
+ - Determines the type of workload a node can run.
+ type: str
+ returned: always
+ sample: OCIContainer
+ scale_down_mode:
+ description:
+ - This also effects the cluster autoscaler behavior.
+ type: str
+ returned: always
+ sample: Delete
+ node_public_ip_prefix_id:
+ description:
+ - The Azure Public IP prefix's ID.
+ type: str
+ returned: always
+ sample: "/subscriptions/xxx-xxx/resourceGroups/myRG/providers/Microsoft.Network/publicIPPrefixes/pip01"
+ proximity_placement_group_id:
+ description:
+ - The ID for Proximity Placement Group.
+ type: str
+ returned: always
+ sample: /subscriptions/xxx-xxx/resourceGroups/myRG/providers/Microsoft.Compute/proximityPlacementGroups/proxi01
+ enable_encryption_at_host:
+ description:
+ - This is only supported on certain VM sizes and in certain Azure regions.
+ type: bool
+ returned: always
+ sample: false
+ enable_ultra_ssd:
+ description:
+ - Whether enable FIPS node pool.
+ type: bool
+ returned: always
+ sample: false
+ gpu_instance_profile:
+ description:
+ - GPUInstanceProfile to be used to specify GPU MIG instance profile for supported GPU VM SKU.
+ type: str
+ returned: always
+ sample: MIG1g
'''
from ansible_collections.azure.azcollection.plugins.module_utils.azure_rm_common import AzureRMModuleBase
@@ -301,6 +392,7 @@ class AzureRMAgentPoolInfo(AzureRMModuleBase):
vnet_subnet_id=agent_pool.vnet_subnet_id,
max_pods=agent_pool.max_pods,
os_type=agent_pool.os_type,
+ os_sku=agent_pool.os_sku,
max_count=agent_pool.max_count,
min_count=agent_pool.min_count,
enable_auto_scaling=agent_pool.enable_auto_scaling,
@@ -317,14 +409,53 @@ class AzureRMAgentPoolInfo(AzureRMModuleBase):
spot_max_price=agent_pool.spot_max_price,
node_labels=agent_pool.node_labels,
node_taints=agent_pool.node_taints,
+ tags=agent_pool.tags,
+ kubelet_disk_type=agent_pool.kubelet_disk_type,
+ workload_runtime=agent_pool.workload_runtime,
+ scale_down_mode=agent_pool.scale_down_mode,
+ power_state=dict(),
+ node_public_ip_prefix_id=agent_pool.node_public_ip_prefix_id,
+ proximity_placement_group_id=agent_pool.proximity_placement_group_id,
+ kubelet_config=dict(),
+ linux_os_config=dict(),
+ enable_encryption_at_host=agent_pool.enable_encryption_at_host,
+ enable_ultra_ssd=agent_pool.enable_ultra_ssd,
+ enable_fips=agent_pool.enable_fips,
+ gpu_instance_profile=agent_pool.gpu_instance_profile
)
if agent_pool.upgrade_settings is not None:
agent_pool_dict['upgrade_settings']['max_surge'] = agent_pool.upgrade_settings.max_surge
+ else:
+ agent_pool_dict['upgrade_settings'] = None
if agent_pool.availability_zones is not None:
for key in agent_pool.availability_zones:
agent_pool_dict['availability_zones'].append(int(key))
+ else:
+ agent_pool_dict['availability_zones'] = None
+
+ if agent_pool.kubelet_config is not None:
+ agent_pool_dict['kubelet_config'] = agent_pool.kubelet_config.as_dict()
+ else:
+ agent_pool_dict['kubelet_config'] = None
+
+ if agent_pool.linux_os_config is not None:
+ agent_pool_dict['linux_os_config']['transparent_huge_page_enabled'] = agent_pool.linux_os_config.transparent_huge_page_enabled
+ agent_pool_dict['linux_os_config']['transparent_huge_page_defrag'] = agent_pool.linux_os_config.transparent_huge_page_defrag
+ agent_pool_dict['linux_os_config']['swap_file_size_mb'] = agent_pool.linux_os_config.swap_file_size_mb
+ agent_pool_dict['linux_os_config']['sysctls'] = dict()
+ if agent_pool.linux_os_config.sysctls is not None:
+ agent_pool_dict['linux_os_config']['sysctls'] = agent_pool.linux_os_config.sysctls.as_dict()
+ else:
+ agent_pool_dict['linux_os_config']['sysctls'] = None
+ else:
+ agent_pool_dict['linux_os_config'] = None
+
+ if agent_pool.power_state is not None:
+ agent_pool_dict['power_state']['code'] = agent_pool.power_state.code
+ else:
+ agent_pool_dict['power_state'] = None
return agent_pool_dict
diff --git a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_backupazurevm.py b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_backupazurevm.py
index 863839329..eb6c297d4 100644
--- a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_backupazurevm.py
+++ b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_backupazurevm.py
@@ -379,14 +379,8 @@ class BackupAzureVM(AzureRMModuleBaseExt):
self.log('Error attempting to backup azure vm.')
self.fail(
'Error while taking on-demand backup: {0}'.format(str(e)))
-
- if hasattr(response, 'body'):
- response = json.loads(response.body())
- elif hasattr(response, 'context'):
- response = response.context['deserialized_data']
- else:
- self.fail("Create or Updating fail, no match message return, return info as {0}".format(response))
-
+ # The return value is None, which only triggers the backup. Backups also take some time to complete.
+ response = dict(msg='The backup has been successfully triggered, please monitor the backup process on the Backup Jobs page')
return response
diff --git a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_cognitivesearch.py b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_cognitivesearch.py
index a8852c583..01dda868e 100644
--- a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_cognitivesearch.py
+++ b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_cognitivesearch.py
@@ -395,31 +395,45 @@ class AzureRMSearch(AzureRMModuleBase):
self.results['changed'] = True
search_update_model.identity = self.search_client.services.models.Identity(type=self.identity)
+ network_update = False
if self.network_rule_set:
for rule in self.network_rule_set:
if len(self.network_rule_set) != len(self.account_dict.get('network_rule_set')) or rule not in self.account_dict.get('network_rule_set'):
self.results['changed'] = True
+ network_update = True
self.firewall_list.append(self.search_client.services.models.IpRule(value=rule))
search_update_model.network_rule_set = dict(ip_rules=self.firewall_list)
+ elif not network_update:
+ firewall_list = []
+ for rule in self.account_dict.get('network_rule_set', []):
+ firewall_list.append(self.search_client.services.models.IpRule(value=rule))
+ search_update_model.network_rule_set = dict(ip_rules=firewall_list)
if self.partition_count and self.account_dict.get('partition_count') != self.partition_count:
self.results['changed'] = True
search_update_model.partition_count = self.partition_count
+ else:
+ search_update_model.partition_count = self.account_dict.get('partition_count')
if self.public_network_access and self.account_dict.get('public_network_access').lower() != self.public_network_access.lower():
self.results['changed'] = True
search_update_model.public_network_access = self.public_network_access
+ else:
+ search_update_model.public_network_access = self.account_dict.get('public_network_access')
if self.replica_count and self.account_dict.get('replica_count') != self.replica_count:
self.results['changed'] = True
search_update_model.replica_count = self.replica_count
+ else:
+ search_update_model.replica_count = self.account_dict.get('replica_count')
if self.sku and self.account_dict.get('sku') != self.sku:
self.fail("Updating sku of an existing search service is not allowed.")
- if self.tags and self.account_dict.get('tags') != self.tags:
+ update_tags, new_tags = self.update_tags(self.account_dict.get('tags'))
+ if update_tags:
self.results['changed'] = True
- search_update_model.tags = self.tags
+ search_update_model.tags = new_tags
self.log('Updating search {0}'.format(self.name))
diff --git a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_datalakestore.py b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_datalakestore.py
deleted file mode 100644
index b46907339..000000000
--- a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_datalakestore.py
+++ /dev/null
@@ -1,807 +0,0 @@
-#!/usr/bin/python
-#
-# Copyright (c) 2020 David Duque Hernández, (@next-davidduquehernandez)
-#
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-from __future__ import absolute_import, division, print_function
-__metaclass__ = type
-
-
-DOCUMENTATION = '''
-module: azure_rm_datalakestore
-version_added: "1.4.0"
-short_description: Manage Azure data lake store
-description:
- - Create, update or delete a data lake store.
-options:
- default_group:
- description:
- - The default owner group for all new folders and files created in the Data Lake Store account.
- type: str
- encryption_config:
- description:
- - The Key Vault encryption configuration.
- type: dict
- suboptions:
- type:
- description:
- - The type of encryption configuration being used.
- choices:
- - UserManaged
- - ServiceManaged
- required: true
- type: str
- key_vault_meta_info:
- description:
- - The Key Vault information for connecting to user managed encryption keys.
- type: dict
- suboptions:
- key_vault_resource_id:
- description:
- - The resource identifier for the user managed Key Vault being used to encrypt.
- type: str
- required: true
- encryption_key_name:
- description:
- - The name of the user managed encryption key.
- type: str
- required: true
- encryption_key_version:
- description:
- - The version of the user managed encryption key.
- type: str
- required: true
- encryption_state:
- description:
- - The current state of encryption for this Data Lake Store account.
- choices:
- - Enabled
- - Disabled
- type: str
- firewall_allow_azure_ips:
- description:
- - The current state of allowing or disallowing IPs originating within Azure through the firewall.
- - If the firewall is disabled, this is not enforced.
- choices:
- - Enabled
- - Disabled
- type: str
- firewall_rules:
- description:
- - The list of firewall rules associated with this Data Lake Store account.
- type: list
- elements: dict
- suboptions:
- name:
- description:
- - The unique name of the firewall rule to create.
- type: str
- required: true
- start_ip_address:
- description:
- - The start IP address for the firewall rule.
- - This can be either ipv4 or ipv6.
- - Start and End should be in the same protocol.
- type: str
- required: true
- end_ip_address:
- description:
- - The end IP address for the firewall rule.
- - This can be either ipv4 or ipv6.
- - Start and End should be in the same protocol.
- type: str
- required: true
- firewall_state:
- description:
- - The current state of the IP address firewall for this Data Lake Store account.
- choices:
- - Enabled
- - Disabled
- type: str
- identity:
- description:
- - The Key Vault encryption identity, if any.
- choices:
- - SystemAssigned
- type: str
- location:
- description:
- - The resource location.
- type: str
- name:
- description:
- - The name of the Data Lake Store account.
- type: str
- required: true
- new_tier:
- description:
- - The commitment tier to use for next month.
- choices:
- - Consumption
- - Commitment_1TB
- - Commitment_10TB
- - Commitment_100TB
- - Commitment_500TB
- - Commitment_1PB
- - Commitment_5PB
- type: str
- resource_group:
- description:
- - The name of the Azure resource group to use.
- required: true
- type: str
- aliases:
- - resource_group_name
- state:
- description:
- - State of the data lake store. Use C(present) to create or update a data lake store and use C(absent) to delete it.
- default: present
- choices:
- - absent
- - present
- type: str
- virtual_network_rules:
- description:
- - The list of virtual network rules associated with this Data Lake Store account.
- type: list
- elements: dict
- suboptions:
- name:
- description:
- - The unique name of the virtual network rule to create.
- type: str
- required: true
- subnet_id:
- description:
- - The resource identifier for the subnet.
- type: str
- required: true
-
-extends_documentation_fragment:
- - azure.azcollection.azure
- - azure.azcollection.azure_tags
-
-author:
- - David Duque Hernández (@next-davidduquehernandez)
-'''
-
-EXAMPLES = '''
-- name: Create Azure Data Lake Store
- azure_rm_datalakestore:
- resource_group: myResourceGroup
- name: myDataLakeStore
-'''
-
-RETURN = '''
-state:
- description:
- - Facts for Azure Data Lake Store created/updated.
- returned: always
- type: complex
- contains:
- account_id:
- description:
- - The unique identifier associated with this Data Lake Store account.
- returned: always
- type: str
- sample: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
- creation_time:
- description:
- - The account creation time.
- returned: always
- type: str
- sample: '2020-01-01T00:00:00.000000+00:00'
- current_tier:
- description:
- - The commitment tier in use for the current month.
- type: str
- returned: always
- sample: Consumption
- default_group:
- description:
- - The default owner group for all new folders and files created in the Data Lake Store account.
- type: str
- sample: null
- encryption_config:
- description:
- - The Key Vault encryption configuration.
- type: complex
- contains:
- type:
- description:
- - The type of encryption configuration being used.
- type: str
- returned: always
- sample: ServiceManaged
- key_vault_meta_info:
- description:
- - The Key Vault information for connecting to user managed encryption keys.
- type: complex
- contains:
- key_vault_resource_id:
- description:
- - The resource identifier for the user managed Key Vault being used to encrypt.
- type: str
- returned: always
- sample: /subscriptions/{subscriptionId}/resourceGroups/myResourceGroup/providers/Microsoft.KeyVault/vaults/tstkv
- encryption_key_name:
- description:
- - The name of the user managed encryption key.
- type: str
- returned: always
- sample: KeyName
- encryption_key_version:
- description:
- - The version of the user managed encryption key.
- type: str
- returned: always
- sample: 86a1e3b7406f45afa0d54e21eff47e39
- encryption_provisioning_state:
- description:
- - The current state of encryption provisioning for this Data Lake Store account.
- type: str
- sample: Succeeded
- encryption_state:
- description:
- - The current state of encryption for this Data Lake Store account.
- type: str
- returned: always
- sample: Enabled
- endpoint:
- description:
- - The full CName endpoint for this account.
- returned: always
- type: str
- sample: testaccount.azuredatalakestore.net
- firewall_allow_azure_ips:
- description:
- - The current state of allowing or disallowing IPs originating within Azure through the firewall.
- - If the firewall is disabled, this is not enforced.
- type: str
- returned: always
- sample: Disabled
- firewall_rules:
- description:
- - The list of firewall rules associated with this Data Lake Store account.
- type: list
- returned: always
- contains:
- name:
- description:
- - The resource name.
- type: str
- returned: always
- sample: Example Name
- start_ip_address:
- description:
- - The start IP address for the firewall rule.
- - This can be either ipv4 or ipv6.
- - Start and End should be in the same protocol.
- type: str
- returned: always
- sample: 192.168.1.1
- end_ip_address:
- description:
- - The end IP address for the firewall rule.
- - This can be either ipv4 or ipv6.
- - Start and End should be in the same protocol.
- type: str
- returned: always
- sample: 192.168.1.254
- firewall_state:
- description:
- - The current state of the IP address firewall for this Data Lake Store account.
- type: str
- returned: always
- sample: Enabled
- id:
- description:
- - The resource identifier.
- returned: always
- type: str
- sample: /subscriptions/{subscriptionId}/resourceGroups/myResourceGroup/providers/Microsoft.DataLakeStore/accounts/testaccount
- identity:
- description:
- - The Key Vault encryption identity, if any.
- type: complex
- contains:
- type:
- description:
- - The type of encryption being used.
- type: str
- sample: SystemAssigned
- principal_id:
- description:
- - The principal identifier associated with the encryption.
- type: str
- sample: 00000000-0000-0000-0000-000000000000
- tenant_id:
- description:
- - The tenant identifier associated with the encryption.
- type: str
- sample: 00000000-0000-0000-0000-000000000000
- last_modified_time:
- description:
- - The account last modified time.
- returned: always
- type: str
- sample: '2020-01-01T00:00:00.000000+00:00'
- location:
- description:
- - The resource location.
- returned: always
- type: str
- sample: westeurope
- name:
- description:
- - The resource name.
- returned: always
- type: str
- sample: testaccount
- new_tier:
- description:
- - The commitment tier to use for next month.
- type: str
- returned: always
- sample: Consumption
- provisioning_state:
- description:
- - The provisioning status of the Data Lake Store account.
- returned: always
- type: str
- sample: Succeeded
- state:
- description:
- - The state of the Data Lake Store account.
- returned: always
- type: str
- sample: Active
- tags:
- description:
- - The resource tags.
- returned: always
- type: dict
- sample: { "tag1":"abc" }
- trusted_id_providers:
- description:
- - The current state of the trusted identity provider feature for this Data Lake Store account.
- type: list
- returned: always
- contains:
- id:
- description:
- - The resource identifier.
- type: str
- name:
- description:
- - The resource name.
- type: str
- type:
- description:
- - The resource type.
- type: str
- id_provider:
- description:
- - The URL of this trusted identity provider.
- type: str
- trusted_id_provider_state:
- description:
- - The list of trusted identity providers associated with this Data Lake Store account.
- type: str
- returned: always
- sample: Enabled
- type:
- description:
- - The resource type.
- returned: always
- type: str
- sample: Microsoft.DataLakeStore/accounts
- virtual_network_rules:
- description:
- - The list of virtual network rules associated with this Data Lake Store account.
- type: list
- returned: always
- contains:
- name:
- description:
- - The resource name.
- type: str
- sample: Rule Name
- subnet_id:
- description:
- - The resource identifier for the subnet.
- type: str
- sample: /subscriptions/{subscriptionId}/resourceGroups/myResourceGroup/providers/Microsoft.Network/virtualNetworks/vnet/subnets/default
-
-'''
-
-from ansible_collections.azure.azcollection.plugins.module_utils.azure_rm_common import AzureRMModuleBase
-
-try:
- from azure.core.exceptions import ResourceNotFoundError
-except ImportError:
- # This is handled in azure_rm_common
- pass
-
-firewall_rules_item = dict(
- name=dict(type='str', required=True),
- start_ip_address=dict(type='str', required=True),
- end_ip_address=dict(type='str', required=True)
-)
-
-virtual_network_rules_item = dict(
- name=dict(type='str', required=True),
- subnet_id=dict(type='str', required=True)
-)
-
-
-class AzureRMDatalakeStore(AzureRMModuleBase):
- def __init__(self):
-
- self.module_arg_spec = dict(
- default_group=dict(type='str'),
- encryption_config=dict(
- type='dict',
- options=dict(
- type=dict(type='str', choices=['UserManaged', 'ServiceManaged'], required=True),
- key_vault_meta_info=dict(
- type='dict',
- no_log=True,
- options=dict(
- key_vault_resource_id=dict(type='str', required=True),
- encryption_key_name=dict(type='str', required=True),
- encryption_key_version=dict(type='str', no_log=True, required=True)
- )
- ),
- )
- ),
- encryption_state=dict(type='str', choices=['Enabled', 'Disabled']),
- firewall_allow_azure_ips=dict(type='str', choices=['Enabled', 'Disabled']),
- firewall_rules=dict(
- type='list',
- elements='dict',
- options=firewall_rules_item
- ),
- firewall_state=dict(type='str', choices=['Enabled', 'Disabled']),
- identity=dict(type='str', choices=['SystemAssigned']),
- location=dict(type='str'),
- name=dict(type='str', required=True),
- new_tier=dict(type='str', choices=['Consumption', 'Commitment_1TB', 'Commitment_10TB', 'Commitment_100TB',
- 'Commitment_500TB', 'Commitment_1PB', 'Commitment_5PB']),
- resource_group=dict(type='str', required=True, aliases=['resource_group_name']),
- state=dict(type='str', default='present', choices=['present', 'absent']),
- virtual_network_rules=dict(
- type='list',
- elements='dict',
- options=virtual_network_rules_item
- ),
- )
-
- self.state = None
- self.name = None
- self.resource_group = None
- self.location = None
- self.new_tier = None
- self.default_group = None
- self.encryption_config = dict()
- self.encryption_config_model = None
- self.encryption_state = None
- self.firewall_state = None
- self.firewall_allow_azure_ips = None
- self.firewall_rules = None
- self.firewall_rules_model = None
- self.virtual_network_rules = None
- self.virtual_network_rules_model = None
- self.identity = None
- self.identity_model = None
-
- self.results = dict(changed=False)
- self.account_dict = None
-
- super(AzureRMDatalakeStore, self).__init__(derived_arg_spec=self.module_arg_spec,
- supports_check_mode=False,
- supports_tags=True)
-
- def exec_module(self, **kwargs):
- self.module.deprecate("The azure_rm_datalakestore.py will deprecated. Azure Data Lake Storage Gen1 retired on February 29,2024", version=(2.3, ))
- for key in list(self.module_arg_spec.keys()) + ['tags']:
- setattr(self, key, kwargs[key])
-
- if self.encryption_config:
- key_vault_meta_info_model = None
- if self.encryption_config.get('key_vault_meta_info'):
- key_vault_meta_info_model = self.datalake_store_models.KeyVaultMetaInfo(
- key_vault_resource_id=self.encryption_config.get('key_vault_meta_info').get('key_vault_resource_id'),
- encryption_key_name=self.encryption_config.get('key_vault_meta_info').get('encryption_key_name'),
- encryption_key_version=self.encryption_config.get('key_vault_meta_info').get('encryption_key_version')
- )
- self.encryption_config_model = self.datalake_store_models.EncryptionConfig(type=self.encryption_config.get('type'),
- key_vault_meta_info=key_vault_meta_info_model)
-
- if self.identity is not None:
- self.identity_model = self.datalake_store_models.EncryptionIdentity(
- type=self.identity
- )
-
- resource_group = self.get_resource_group(self.resource_group)
- if not self.location:
- self.location = resource_group.location
-
- self.account_dict = self.get_datalake_store()
-
- if self.account_dict is not None:
- self.results['state'] = self.account_dict
- else:
- self.results['state'] = dict()
-
- if self.state == 'present':
- if not self.account_dict:
- self.results['state'] = self.create_datalake_store()
- else:
- self.results['state'] = self.update_datalake_store()
- else:
- self.delete_datalake_store()
- self.results['state'] = dict(state='Deleted')
-
- return self.results
-
- def check_name_availability(self):
- self.log('Checking name availability for {0}'.format(self.name))
- try:
- response = self.datalake_store_client.accounts.check_name_availability(self.location, parameters={'name': self.name})
- except Exception as e:
- self.log('Error attempting to validate name.')
- self.fail("Error checking name availability: {0}".format(str(e)))
- if not response.name_available:
- self.log('Error name not available.')
- self.fail("{0} - {1}".format(response.message, response.reason))
-
- def create_datalake_store(self):
- self.log("Creating datalake store {0}".format(self.name))
-
- if not self.location:
- self.fail('Parameter error: location required when creating a datalake store account.')
-
- self.check_name_availability()
- self.results['changed'] = True
-
- if self.check_mode:
- account_dict = dict(
- name=self.name,
- resource_group=self.resource_group,
- location=self.location
- )
- return account_dict
-
- if self.firewall_rules is not None:
- self.firewall_rules_model = list()
- for rule in self.firewall_rules:
- rule_model = self.datalake_store_models.CreateFirewallRuleWithAccountParameters(
- name=rule.get('name'),
- start_ip_address=rule.get('start_ip_address'),
- end_ip_address=rule.get('end_ip_address'))
- self.firewall_rules_model.append(rule_model)
-
- if self.virtual_network_rules is not None:
- self.virtual_network_rules_model = list()
- for vnet_rule in self.virtual_network_rules:
- vnet_rule_model = self.datalake_store_models.CreateVirtualNetworkRuleWithAccountParameters(
- name=vnet_rule.get('name'),
- subnet_id=vnet_rule.get('subnet_id'))
- self.virtual_network_rules_model.append(vnet_rule_model)
-
- parameters = self.datalake_store_models.CreateDataLakeStoreAccountParameters(
- default_group=self.default_group,
- encryption_config=self.encryption_config_model,
- encryption_state=self.encryption_state,
- firewall_allow_azure_ips=self.firewall_allow_azure_ips,
- firewall_rules=self.firewall_rules_model,
- firewall_state=self.firewall_state,
- identity=self.identity_model,
- location=self.location,
- new_tier=self.new_tier,
- tags=self.tags,
- virtual_network_rules=self.virtual_network_rules_model
- )
-
- self.log(str(parameters))
- try:
- poller = self.datalake_store_client.accounts.begin_create(self.resource_group, self.name, parameters)
- self.get_poller_result(poller)
- except Exception as e:
- self.log('Error creating datalake store.')
- self.fail("Failed to create datalake store: {0}".format(str(e)))
-
- return self.get_datalake_store()
-
- def update_datalake_store(self):
- self.log("Updating datalake store {0}".format(self.name))
-
- parameters = self.datalake_store_models.UpdateDataLakeStoreAccountParameters()
-
- if self.tags:
- update_tags, self.account_dict['tags'] = self.update_tags(self.account_dict['tags'])
- if update_tags:
- self.results['changed'] = True
- parameters.tags = self.account_dict['tags']
-
- if self.new_tier and self.account_dict.get('new_tier') != self.new_tier:
- self.results['changed'] = True
- parameters.new_tier = self.new_tier
-
- if self.default_group and self.account_dict.get('default_group') != self.default_group:
- self.results['changed'] = True
- parameters.default_group = self.default_group
-
- if self.encryption_state and self.account_dict.get('encryption_state') != self.encryption_state:
- self.fail("Encryption type cannot be updated.")
-
- if self.encryption_config:
- if (
- self.encryption_config.get('type') == 'UserManaged'
- and self.encryption_config.get('key_vault_meta_info') != self.account_dict.get('encryption_config').get('key_vault_meta_info')
- ):
- self.results['changed'] = True
- key_vault_meta_info_model = self.datalake_store_models.UpdateKeyVaultMetaInfo(
- encryption_key_version=self.encryption_config.get('key_vault_meta_info').get('encryption_key_version')
- )
- encryption_config_model = self.datalake_store_models.UpdateEncryptionConfig = key_vault_meta_info_model
- parameters.encryption_config = encryption_config_model
-
- if self.firewall_state and self.account_dict.get('firewall_state') != self.firewall_state:
- self.results['changed'] = True
- parameters.firewall_state = self.firewall_state
-
- if self.firewall_allow_azure_ips and self.account_dict.get('firewall_allow_azure_ips') != self.firewall_allow_azure_ips:
- self.results['changed'] = True
- parameters.firewall_allow_azure_ips = self.firewall_allow_azure_ips
-
- if self.firewall_rules is not None:
- if not self.compare_lists(self.firewall_rules, self.account_dict.get('firewall_rules')):
- self.firewall_rules_model = list()
- for rule in self.firewall_rules:
- rule_model = self.datalake_store_models.UpdateFirewallRuleWithAccountParameters(
- name=rule.get('name'),
- start_ip_address=rule.get('start_ip_address'),
- end_ip_address=rule.get('end_ip_address'))
- self.firewall_rules_model.append(rule_model)
- self.results['changed'] = True
- parameters.firewall_rules = self.firewall_rules_model
-
- if self.virtual_network_rules is not None:
- if not self.compare_lists(self.virtual_network_rules, self.account_dict.get('virtual_network_rules')):
- self.virtual_network_rules_model = list()
- for vnet_rule in self.virtual_network_rules:
- vnet_rule_model = self.datalake_store_models.UpdateVirtualNetworkRuleWithAccountParameters(
- name=vnet_rule.get('name'),
- subnet_id=vnet_rule.get('subnet_id'))
- self.virtual_network_rules_model.append(vnet_rule_model)
- self.results['changed'] = True
- parameters.virtual_network_rules = self.virtual_network_rules_model
-
- if self.identity_model is not None:
- self.results['changed'] = True
- parameters.identity = self.identity_model
-
- self.log(str(parameters))
- if self.results['changed']:
- try:
- poller = self.datalake_store_client.accounts.begin_update(self.resource_group, self.name, parameters)
- self.get_poller_result(poller)
- except Exception as e:
- self.log('Error creating datalake store.')
- self.fail("Failed to create datalake store: {0}".format(str(e)))
-
- return self.get_datalake_store()
-
- def delete_datalake_store(self):
- self.log('Delete datalake store {0}'.format(self.name))
-
- self.results['changed'] = True if self.account_dict is not None else False
- if not self.check_mode and self.account_dict is not None:
- try:
- status = self.datalake_store_client.accounts.begin_delete(self.resource_group, self.name)
- self.log("delete status: ")
- self.log(str(status))
- except Exception as e:
- self.fail("Failed to delete datalake store: {0}".format(str(e)))
-
- return True
-
- def get_datalake_store(self):
- self.log('Get properties for datalake store {0}'.format(self.name))
- datalake_store_obj = None
- account_dict = None
-
- try:
- datalake_store_obj = self.datalake_store_client.accounts.get(self.resource_group, self.name)
- except ResourceNotFoundError:
- pass
-
- if datalake_store_obj:
- account_dict = self.account_obj_to_dict(datalake_store_obj)
-
- return account_dict
-
- def account_obj_to_dict(self, datalake_store_obj):
- account_dict = dict(
- account_id=datalake_store_obj.account_id,
- creation_time=datalake_store_obj.creation_time,
- current_tier=datalake_store_obj.current_tier,
- default_group=datalake_store_obj.default_group,
- encryption_config=None,
- encryption_provisioning_state=datalake_store_obj.encryption_provisioning_state,
- encryption_state=datalake_store_obj.encryption_state,
- endpoint=datalake_store_obj.endpoint,
- firewall_allow_azure_ips=datalake_store_obj.firewall_allow_azure_ips,
- firewall_rules=None,
- firewall_state=datalake_store_obj.firewall_state,
- id=datalake_store_obj.id,
- identity=None,
- last_modified_time=datalake_store_obj.last_modified_time,
- location=datalake_store_obj.location,
- name=datalake_store_obj.name,
- new_tier=datalake_store_obj.new_tier,
- provisioning_state=datalake_store_obj.provisioning_state,
- state=datalake_store_obj.state,
- tags=datalake_store_obj.tags,
- trusted_id_providers=datalake_store_obj.trusted_id_providers,
- trusted_id_provider_state=datalake_store_obj.trusted_id_provider_state,
- type=datalake_store_obj.type,
- virtual_network_rules=None
- )
-
- account_dict['firewall_rules'] = list()
- if datalake_store_obj.firewall_rules:
- for rule in datalake_store_obj.firewall_rules:
- rule_item = dict(
- name=rule.name,
- start_ip_address=rule.start_ip_address,
- end_ip_address=rule.end_ip_address
- )
- account_dict['firewall_rules'].append(rule_item)
-
- account_dict['virtual_network_rules'] = list()
- if datalake_store_obj.virtual_network_rules:
- for vnet_rule in datalake_store_obj.virtual_network_rules:
- vnet_rule_item = dict(
- name=vnet_rule.name,
- subnet_id=vnet_rule.subnet_id
- )
- account_dict['virtual_network_rules'].append(vnet_rule_item)
-
- if datalake_store_obj.identity:
- account_dict['identity'] = dict(
- type=datalake_store_obj.identity.type,
- principal_id=datalake_store_obj.identity.principal_id,
- tenant_id=datalake_store_obj.identity.tenant_id
- )
-
- if datalake_store_obj.encryption_config:
- if datalake_store_obj.encryption_config.key_vault_meta_info:
- account_dict['encryption_config'] = dict(
- key_vault_meta_info=dict(
- key_vault_resource_id=datalake_store_obj.encryption_config.key_vault_meta_info.key_vault_resource_id,
- encryption_key_name=datalake_store_obj.encryption_config.key_vault_meta_info.encryption_key_name,
- encryption_key_version=datalake_store_obj.encryption_config.key_vault_meta_info.encryption_key_version
- )
- )
-
- return account_dict
-
- def compare_lists(self, list1, list2):
- if len(list1) != len(list2):
- return False
- for element in list1:
- if element not in list2:
- return False
- return True
-
-
-def main():
- AzureRMDatalakeStore()
-
-
-if __name__ == '__main__':
- main()
diff --git a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_datalakestore_info.py b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_datalakestore_info.py
deleted file mode 100644
index 8444a4c1c..000000000
--- a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_datalakestore_info.py
+++ /dev/null
@@ -1,468 +0,0 @@
-#!/usr/bin/python
-#
-# Copyright (c) 2020 David Duque Hernández, (@next-davidduquehernandez)
-#
-# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-
-from __future__ import absolute_import, division, print_function
-__metaclass__ = type
-
-
-DOCUMENTATION = '''
----
-module: azure_rm_datalakestore_info
-version_added: "1.4.0"
-short_description: Get Azure Data Lake Store info
-description:
- - Get Azure Data Lake Store info.
-
-options:
- resource_group:
- description:
- - The name of the Azure resource group.
- type: str
- aliases:
- - resource_group_name
- name:
- description:
- - The name of the Data Lake Store account.
- type: str
-
-extends_documentation_fragment:
- - azure.azcollection.azure
-
-author:
- - David Duque Hernández (@next-davidduquehernandez)
-
-'''
-
-EXAMPLES = '''
-- name: Get Azure Data Lake Store info from resource group 'myResourceGroup' and name 'myDataLakeStore'
- azure_rm_datalakestore_info:
- resource_group: myResourceGroup
- name: myDataLakeStore
-
-- name: Get Azure Data Lake Store info from resource group 'myResourceGroup'
- azure_rm_datalakestore_info:
- resource_group: myResourceGroup
-
-- name: Get Azure Data Lake Store info
- azure_rm_datalakestore_info:
-'''
-
-RETURN = '''
-datalake:
- description:
- - A list of dictionaries containing facts for Azure Data Lake Store.
- returned: always
- type: complex
- contains:
- account_id:
- description:
- - The unique identifier associated with this Data Lake Store account.
- returned: always
- type: str
- sample: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
- creation_time:
- description:
- - The account creation time.
- returned: always
- type: str
- sample: '2020-01-01T00:00:00.000000+00:00'
- current_tier:
- description:
- - The commitment tier in use for the current month.
- type: str
- sample: Consumption
- default_group:
- description:
- - The default owner group for all new folders and files created in the Data Lake Store account.
- type: str
- sample: null
- encryption_config:
- description:
- - The Key Vault encryption configuration.
- type: complex
- contains:
- type:
- description:
- - The type of encryption configuration being used.
- type: str
- returned: always
- sample: ServiceManaged
- key_vault_meta_info:
- description:
- - The Key Vault information for connecting to user managed encryption keys.
- type: complex
- contains:
- key_vault_resource_id:
- description:
- - The resource identifier for the user managed Key Vault being used to encrypt.
- type: str
- returned: always
- sample: /subscriptions/{subscriptionId}/resourceGroups/myRG/providers/Microsoft.KeyVault/vaults/testkv
- encryption_key_name:
- description:
- - The name of the user managed encryption key.
- type: str
- returned: always
- sample: KeyName
- encryption_key_version:
- description:
- - The version of the user managed encryption key.
- type: str
- returned: always
- sample: 86a1e3b7406f45afa0d54e21eff47e39
- encryption_provisioning_state:
- description:
- - The current state of encryption provisioning for this Data Lake Store account.
- type: str
- sample: Succeeded
- encryption_state:
- description:
- - The current state of encryption for this Data Lake Store account.
- type: str
- sample: Enabled
- endpoint:
- description:
- - The full CName endpoint for this account.
- returned: always
- type: str
- sample: testaccount.azuredatalakestore.net
- firewall_allow_azure_ips:
- description:
- - The current state of allowing or disallowing IPs originating within Azure through the firewall.
- type: str
- sample: Disabled
- firewall_rules:
- description:
- - The list of firewall rules associated with this Data Lake Store account.
- type: list
- contains:
- name:
- description:
- - The resource name.
- type: str
- returned: always
- sample: Example Name
- start_ip_address:
- description:
- - The start IP address for the firewall rule.
- - This can be either ipv4 or ipv6.
- - Start and End should be in the same protocol.
- type: str
- returned: always
- sample: 192.168.1.1
- end_ip_address:
- description:
- - The end IP address for the firewall rule.
- - This can be either ipv4 or ipv6.
- - Start and End should be in the same protocol.
- type: str
- returned: always
- sample: 192.168.1.254
- firewall_state:
- description:
- - The current state of the IP address firewall for this Data Lake Store account.
- type: str
- sample: Enabled
- id:
- description:
- - The resource identifier.
- returned: always
- type: str
- sample: /subscriptions/{subscriptionId}/resourceGroups/myResourceGroup/providers/Microsoft.DataLakeStore/accounts/testaccount
- identity:
- description:
- - The Key Vault encryption identity, if any.
- type: complex
- contains:
- type:
- description:
- - The type of encryption being used.
- type: str
- sample: SystemAssigned
- principal_id:
- description:
- - The principal identifier associated with the encryption.
- type: str
- sample: 00000000-0000-0000-0000-000000000000
- tenant_id:
- description:
- - The tenant identifier associated with the encryption.
- type: str
- sample: 00000000-0000-0000-0000-000000000000
- last_modified_time:
- description:
- - The account last modified time.
- returned: always
- type: str
- sample: '2020-01-01T00:00:00.000000+00:00'
- location:
- description:
- - The resource location.
- returned: always
- type: str
- sample: westeurope
- name:
- description:
- - The resource name.
- returned: always
- type: str
- sample: testaccount
- new_tier:
- description:
- - The commitment tier to use for next month.
- type: str
- sample: Consumption
- provisioning_state:
- description:
- - The provisioning status of the Data Lake Store account.
- returned: always
- type: str
- sample: Succeeded
- state:
- description:
- - The state of the Data Lake Store account.
- returned: always
- type: str
- sample: Active
- tags:
- description:
- - The resource tags.
- returned: always
- type: dict
- sample: { "tag1":"abc" }
- trusted_id_providers:
- description:
- - The current state of the trusted identity provider feature for this Data Lake Store account.
- type: list
- contains:
- id:
- description:
- - The resource identifier.
- type: str
- name:
- description:
- - The resource name.
- type: str
- type:
- description:
- - The resource type.
- type: str
- id_provider:
- description:
- - The URL of this trusted identity provider.
- type: str
- trusted_id_provider_state:
- description:
- - The list of trusted identity providers associated with this Data Lake Store account.
- type: str
- sample: Enabled
- type:
- description:
- - The resource type.
- returned: always
- type: str
- sample: Microsoft.DataLakeStore/accounts
- virtual_network_rules:
- description:
- - The list of virtual network rules associated with this Data Lake Store account.
- type: list
- contains:
- name:
- description:
- - The resource name.
- type: str
- sample: Rule Name
- subnet_id:
- description:
- - The resource identifier for the subnet.
- type: str
- sample: /subscriptions/{subscriptionId}/resourceGroups/myRG/providers/Microsoft.Network/virtualNetworks/vnet/subnets/default
-'''
-
-try:
- from azure.core.exceptions import ResourceNotFoundError
-except Exception:
- # This is handled in azure_rm_common
- pass
-
-from ansible_collections.azure.azcollection.plugins.module_utils.azure_rm_common import AzureRMModuleBase
-
-
-class AzureRMDatalakeStoreInfo(AzureRMModuleBase):
- def __init__(self):
-
- self.module_arg_spec = dict(
- name=dict(type='str'),
- resource_group=dict(type='str', aliases=['resource_group_name'])
- )
-
- self.results = dict(
- changed=False,
- datalake=[]
- )
-
- self.name = None
- self.resource_group = None
-
- super(AzureRMDatalakeStoreInfo, self).__init__(self.module_arg_spec,
- supports_check_mode=True,
- supports_tags=False)
-
- def exec_module(self, **kwargs):
- self.module.deprecate("The azure_rm_datalakestore_info.py will deprecated. Azure Data Lake Storage Gen1 retired on February 29,2024", version=(2.3, ))
- for key in self.module_arg_spec:
- setattr(self, key, kwargs[key])
-
- if self.name and not self.resource_group:
- self.fail("Parameter error: resource group required when filtering by name.")
-
- results = []
- if self.name:
- results = self.get_datalake_store()
- elif self.resource_group:
- results = self.list_resource_group()
- else:
- results = self.list_all()
-
- self.results['datalake'] = results
- return self.results
-
- def get_datalake_store(self):
- self.log('Get properties for datalake store {0}'.format(self.name))
- datalake_store_obj = None
-
- try:
- datalake_store_obj = self.datalake_store_client.accounts.get(self.resource_group, self.name)
- except ResourceNotFoundError:
- pass
-
- if datalake_store_obj:
- return [self.account_obj_to_dict(datalake_store_obj)]
-
- return list()
-
- def list_resource_group(self):
- self.log('Get basic properties for datalake store in resource group {0}'.format(self.resource_group))
- datalake_store_obj = None
- results = list()
-
- try:
- datalake_store_obj = self.datalake_store_client.accounts.list_by_resource_group(self.resource_group)
- except Exception:
- pass
-
- if datalake_store_obj:
- for datalake_item in datalake_store_obj:
- results.append(self.account_obj_to_dict_basic(datalake_item))
- return results
-
- return list()
-
- def list_all(self):
- self.log('Get basic properties for all datalake store')
- datalake_store_obj = None
- results = list()
-
- try:
- datalake_store_obj = self.datalake_store_client.accounts.list()
- except Exception:
- pass
-
- if datalake_store_obj:
- for datalake_item in datalake_store_obj:
- results.append(self.account_obj_to_dict_basic(datalake_item))
- return results
-
- return list()
-
- def account_obj_to_dict(self, datalake_store_obj):
- account_dict = dict(
- account_id=datalake_store_obj.account_id,
- creation_time=datalake_store_obj.creation_time,
- current_tier=datalake_store_obj.current_tier,
- default_group=datalake_store_obj.default_group,
- encryption_config=dict(type=datalake_store_obj.encryption_config.type,
- key_vault_meta_info=None),
- encryption_provisioning_state=datalake_store_obj.encryption_provisioning_state,
- encryption_state=datalake_store_obj.encryption_state,
- endpoint=datalake_store_obj.endpoint,
- firewall_allow_azure_ips=datalake_store_obj.firewall_allow_azure_ips,
- firewall_rules=None,
- firewall_state=datalake_store_obj.firewall_state,
- id=datalake_store_obj.id,
- identity=None,
- last_modified_time=datalake_store_obj.last_modified_time,
- location=datalake_store_obj.location,
- name=datalake_store_obj.name,
- new_tier=datalake_store_obj.new_tier,
- provisioning_state=datalake_store_obj.provisioning_state,
- state=datalake_store_obj.state,
- tags=datalake_store_obj.tags,
- trusted_id_providers=datalake_store_obj.trusted_id_providers,
- trusted_id_provider_state=datalake_store_obj.trusted_id_provider_state,
- type=datalake_store_obj.type,
- virtual_network_rules=None
- )
-
- account_dict['firewall_rules'] = list()
- for rule in datalake_store_obj.firewall_rules:
- rule_item = dict(
- name=rule.name,
- start_ip_address=rule.start_ip_address,
- end_ip_address=rule.end_ip_address
- )
- account_dict['firewall_rules'].append(rule_item)
-
- account_dict['virtual_network_rules'] = list()
- for vnet_rule in datalake_store_obj.virtual_network_rules:
- vnet_rule_item = dict(
- name=vnet_rule.name,
- subnet_id=vnet_rule.subnet_id
- )
- account_dict['virtual_network_rules'].append(vnet_rule_item)
-
- if datalake_store_obj.identity:
- account_dict['identity'] = dict(
- type=datalake_store_obj.identity.type,
- principal_id=datalake_store_obj.identity.principal_id,
- tenant_id=datalake_store_obj.identity.tenant_id
- )
-
- if datalake_store_obj.encryption_config.key_vault_meta_info:
- account_dict['encryption_config'] = dict(
- key_vault_meta_info=dict(
- key_vault_resource_id=datalake_store_obj.encryption_config.key_vault_meta_info.key_vault_resource_id,
- encryption_key_name=datalake_store_obj.encryption_config.key_vault_meta_info.encryption_key_name,
- encryption_key_version=datalake_store_obj.encryption_config.key_vault_meta_info.encryption_key_version
- )
- )
-
- return account_dict
-
- def account_obj_to_dict_basic(self, datalake_store_obj):
- account_dict = dict(
- account_id=datalake_store_obj.account_id,
- creation_time=datalake_store_obj.creation_time,
- endpoint=datalake_store_obj.endpoint,
- id=datalake_store_obj.id,
- last_modified_time=datalake_store_obj.last_modified_time,
- location=datalake_store_obj.location,
- name=datalake_store_obj.name,
- provisioning_state=datalake_store_obj.provisioning_state,
- state=datalake_store_obj.state,
- tags=datalake_store_obj.tags,
- type=datalake_store_obj.type
- )
-
- return account_dict
-
-
-def main():
- AzureRMDatalakeStoreInfo()
-
-
-if __name__ == '__main__':
- main()
diff --git a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_loadbalancer.py b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_loadbalancer.py
index 2c473778c..c6c56291a 100644
--- a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_loadbalancer.py
+++ b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_loadbalancer.py
@@ -328,99 +328,6 @@ options:
- Receive bidirectional TCP Reset on TCP flow idle timeout or unexpected connection termination.
- This element is only used when I(protocol=Tcp).
type: bool
- public_ip_address_name:
- description:
- - (deprecated) Name of an existing public IP address object to associate with the security group.
- - This option has been deprecated, and will be removed in 2.9. Use I(frontend_ip_configurations) instead.
- type: str
- aliases:
- - public_ip_address
- - public_ip_name
- - public_ip
- probe_port:
- description:
- - (deprecated) The port that the health probe will use.
- - This option has been deprecated, and will be removed in 2.9. Use I(probes) instead.
- type: int
- probe_protocol:
- description:
- - (deprecated) The protocol to use for the health probe.
- - This option has been deprecated, and will be removed in 2.9. Use I(probes) instead.
- type: str
- choices:
- - Tcp
- - Http
- - Https
- probe_interval:
- description:
- - (deprecated) Time (in seconds) between endpoint health probes.
- - This option has been deprecated, and will be removed in 2.9. Use I(probes) instead.
- type: int
- default: 15
- probe_fail_count:
- description:
- - (deprecated) The amount of probe failures for the load balancer to make a health determination.
- - This option has been deprecated, and will be removed in 2.9. Use I(probes) instead.
- default: 3
- type: int
- probe_request_path:
- description:
- - (deprecated) The URL that an HTTP probe or HTTPS probe will use (only relevant if I(probe_protocol=Http) or I(probe_protocol=Https)).
- - This option has been deprecated, and will be removed in 2.9. Use I(probes) instead.
- type: str
- protocol:
- description:
- - (deprecated) The protocol (TCP or UDP) that the load balancer will use.
- - This option has been deprecated, and will be removed in 2.9. Use I(load_balancing_rules) instead.
- type: str
- choices:
- - Tcp
- - Udp
- load_distribution:
- description:
- - (deprecated) The type of load distribution that the load balancer will employ.
- - This option has been deprecated, and will be removed in 2.9. Use I(load_balancing_rules) instead.
- type: str
- choices:
- - Default
- - SourceIP
- - SourceIPProtocol
- frontend_port:
- description:
- - (deprecated) Frontend port that will be exposed for the load balancer.
- - This option has been deprecated, and will be removed in 2.9. Use I(load_balancing_rules) instead.
- type: int
- backend_port:
- description:
- - (deprecated) Backend port that will be exposed for the load balancer.
- - This option has been deprecated, and will be removed in 2.9. Use I(load_balancing_rules) instead.
- type: int
- idle_timeout:
- description:
- - (deprecated) Timeout for TCP idle connection in minutes.
- - This option has been deprecated, and will be removed in 2.9. Use I(load_balancing_rules) instead.
- type: int
- default: 4
- natpool_frontend_port_start:
- description:
- - (deprecated) Start of the port range for a NAT pool.
- - This option has been deprecated, and will be removed in 2.9. Use I(inbound_nat_pools) instead.
- type: int
- natpool_frontend_port_end:
- description:
- - (deprecated) End of the port range for a NAT pool.
- - This option has been deprecated, and will be removed in 2.9. Use I(inbound_nat_pools) instead.
- type: int
- natpool_backend_port:
- description:
- - (deprecated) Backend port used by the NAT pool.
- - This option has been deprecated, and will be removed in 2.9. Use I(inbound_nat_pools) instead.
- type: int
- natpool_protocol:
- description:
- - (deprecated) The protocol for the NAT pool.
- - This option has been deprecated, and will be removed in 2.9. Use I(inbound_nat_pools) instead.
- type: str
extends_documentation_fragment:
- azure.azcollection.azure
- azure.azcollection.azure_tags
@@ -710,58 +617,6 @@ class AzureRMLoadBalancer(AzureRMModuleBase):
type='list',
elements='dict',
options=load_balancing_rule_spec
- ),
- public_ip_address_name=dict(
- type='str',
- aliases=['public_ip_address', 'public_ip_name', 'public_ip']
- ),
- probe_port=dict(
- type='int'
- ),
- probe_protocol=dict(
- type='str',
- choices=['Tcp', 'Http', 'Https']
- ),
- probe_interval=dict(
- type='int',
- default=15
- ),
- probe_fail_count=dict(
- type='int',
- default=3
- ),
- probe_request_path=dict(
- type='str'
- ),
- protocol=dict(
- type='str',
- choices=['Tcp', 'Udp']
- ),
- load_distribution=dict(
- type='str',
- choices=['Default', 'SourceIP', 'SourceIPProtocol']
- ),
- frontend_port=dict(
- type='int'
- ),
- backend_port=dict(
- type='int'
- ),
- idle_timeout=dict(
- type='int',
- default=4
- ),
- natpool_frontend_port_start=dict(
- type='int'
- ),
- natpool_frontend_port_end=dict(
- type='int'
- ),
- natpool_backend_port=dict(
- type='int'
- ),
- natpool_protocol=dict(
- type='str'
)
)
@@ -775,22 +630,7 @@ class AzureRMLoadBalancer(AzureRMModuleBase):
self.inbound_nat_rules = None
self.inbound_nat_pools = None
self.load_balancing_rules = None
- self.public_ip_address_name = None
self.state = None
- self.probe_port = None
- self.probe_protocol = None
- self.probe_interval = None
- self.probe_fail_count = None
- self.probe_request_path = None
- self.protocol = None
- self.load_distribution = None
- self.frontend_port = None
- self.backend_port = None
- self.idle_timeout = None
- self.natpool_frontend_port_start = None
- self.natpool_frontend_port_end = None
- self.natpool_backend_port = None
- self.natpool_protocol = None
self.tags = None
self.results = dict(changed=False, state=dict())
@@ -814,54 +654,6 @@ class AzureRMLoadBalancer(AzureRMModuleBase):
load_balancer = self.get_load_balancer()
if self.state == 'present':
- # compatible parameters
- is_compatible_param = not self.frontend_ip_configurations and not self.backend_address_pools and not self.probes and not self.inbound_nat_pools
- is_compatible_param = is_compatible_param and not load_balancer # the instance should not be exist
- is_compatible_param = is_compatible_param or self.public_ip_address_name or self.probe_protocol or self.natpool_protocol or self.protocol
- if is_compatible_param:
- self.deprecate('Discrete load balancer config settings are deprecated and will be removed.'
- ' Use frontend_ip_configurations, backend_address_pools, probes, inbound_nat_pools lists instead.', version=(2, 9))
- frontend_ip_name = 'frontendip0'
- backend_address_pool_name = 'backendaddrp0'
- prob_name = 'prob0'
- inbound_nat_pool_name = 'inboundnatp0'
- lb_rule_name = 'lbr'
- self.frontend_ip_configurations = [dict(
- name=frontend_ip_name,
- public_ip_address=self.public_ip_address_name
- )]
- self.backend_address_pools = [dict(
- name=backend_address_pool_name
- )]
- self.probes = [dict(
- name=prob_name,
- port=self.probe_port,
- protocol=self.probe_protocol,
- interval=self.probe_interval,
- fail_count=self.probe_fail_count,
- request_path=self.probe_request_path
- )] if self.probe_protocol else None
- self.inbound_nat_pools = [dict(
- name=inbound_nat_pool_name,
- frontend_ip_configuration_name=frontend_ip_name,
- protocol=self.natpool_protocol,
- frontend_port_range_start=self.natpool_frontend_port_start,
- frontend_port_range_end=self.natpool_frontend_port_end,
- backend_port=self.natpool_backend_port
- )] if self.natpool_protocol else None
- self.load_balancing_rules = [dict(
- name=lb_rule_name,
- frontend_ip_configuration=frontend_ip_name,
- backend_address_pool=backend_address_pool_name,
- probe=prob_name,
- protocol=self.protocol,
- load_distribution=self.load_distribution,
- frontend_port=self.frontend_port,
- backend_port=self.backend_port,
- idle_timeout=self.idle_timeout,
- enable_floating_ip=False,
- )] if self.protocol else None
-
# create new load balancer structure early, so it can be easily compared
if not load_balancer:
frontend_ip_configurations_param = [self.network_models.FrontendIPConfiguration(
diff --git a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_localnetworkgateway.py b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_localnetworkgateway.py
new file mode 100644
index 000000000..6f88fff94
--- /dev/null
+++ b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_localnetworkgateway.py
@@ -0,0 +1,427 @@
+#!/usr/bin/python
+#
+# Copyright (c) 2024 xuzhang3 (@xuzhang3), Fred-sun (@Fred-sun)
+#
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+
+DOCUMENTATION = '''
+---
+module: azure_rm_localnetworkgateway
+
+version_added: "2.4.0"
+
+short_description: Manage Azure Local Network Gateway in a resource group
+
+description:
+ - Create, update or delete Azure Local Network Gateway in a resource group
+
+options:
+ resource_group:
+ description:
+ - The local network gateway's resource group.
+ type: str
+ required: true
+ name:
+ description:
+ - The name of the local network gateway.
+ type: str
+ required: true
+ location:
+ description:
+ - The location of the local network gateway.
+ type: str
+ local_network_address_space:
+ description:
+ - Local network site address space.
+ type: dict
+ suboptions:
+ address_prefixes:
+ description:
+ - A list of address blocks reserved for this virtual network in CIDR notation.
+ type: list
+ elements: str
+ gateway_ip_address:
+ description:
+ - IP address of local network gateway.
+ type: str
+ fqdn:
+ description:
+ - FQDN of local network gateway.
+ type: str
+ bgp_settings:
+ description:
+ - Local network gateway's BGP speaker settings.
+ type: dict
+ suboptions:
+ asn:
+ description:
+ - The BGP speaker's ASN.
+ type: int
+ bgp_peering_address:
+ description:
+ - The BGP peering address and BGP identifier of this BGP speaker.
+ type: str
+ peer_weight:
+ description:
+ - The weight added to routes learned from this BGP speaker.
+ type: int
+ state:
+ description:
+ - Use C(present) to create or update a local network gateway.
+ - Use C(absent) to delete the local network gateway.
+ type: str
+ default: present
+ choices:
+ - absent
+ - present
+extends_documentation_fragment:
+ - azure.azcollection.azure
+ - azure.azcollection.azure_tags
+
+author:
+ - xuzhang3 (@xuzhang3)
+ - Fred Sun (@Fred-sun)
+'''
+
+EXAMPLES = '''
+- name: Create a new local network gateway
+ azure_rm_localnetworkgateway:
+ resource_group: "{{ resource_group }}"
+ name: "localgateway-name"
+ local_network_address_space:
+ address_prefixes:
+ - 10.0.0.0/24
+ - 20.0.0.0/24
+ fqdn: fredtest.com
+ tags:
+ key: value
+ bgp_settings:
+ asn: 8
+ bgp_peering_address: 10.3.0.1
+ peer_weight: 3
+
+- name: Delete local network gateway
+ azure_rm_localnetworkgateway:
+ resource_group: "{{ resource_group }}"
+ name: "localgateway-name"
+ state: absent
+'''
+
+RETURN = '''
+state:
+ description:
+ - Current state of the Azure Local Network Gateway resource.
+ returned: always
+ type: complex
+ contains:
+ id:
+ description:
+ - The resource ID.
+ type: str
+ returned: always
+ sample: "/subscriptions/xxxx-xxxx/resourceGroups/testRG/providers/Microsoft.Network/localNetworkGateways/testgateway"
+ bgp_settings:
+ description:
+ - Local network gateway's BGP speaker settings.
+ type: complex
+ contains:
+ asn:
+ description:
+ - The BGP speaker's ASN.
+ type: int
+ returned: always
+ sample: 10
+ bgp_peering_address:
+ description:
+ - The BGP peering address and BGP identifier of this BGP speaker.
+ type: str
+ returned: always
+ sample: 10.0.0.3
+ peer_weight:
+ description:
+ - The weight added to routes learned from this BGP speaker.
+ type: int
+ returned: always
+ sample: 0
+ fqdn:
+ description:
+ - FQDN of local network gateway.
+ type: str
+ returned: always
+ sample: testfqdn.com
+ gateway_ip_address:
+ description:
+ - IP address of local network gateway.
+ type: str
+ returned: always
+ sample: 10.1.1.1
+ etag:
+ description:
+ - A unique read-only string that changes whenever the resource is updated.
+ type: str
+ returned: always
+ sample: b5a32693-2e75-49e0-9137-ded19db658d6
+ local_network_address_space:
+ description:
+ - Local network site address space.
+ type: complex
+ contains:
+ address_prefixes:
+ description:
+ - A list of address blocks reserved for this virtual network in CIDR notation.
+ type: list
+ returned: always
+ sample: ["10.0.0.0/24", "20.0.0.0/24"]
+ location:
+ description:
+ - The resource location.
+ type: str
+ returned: always
+ sample: eastus
+ name:
+ description:
+ - The resource name.
+ type: str
+ returned: always
+ sample: testgateway
+ provisioning_state:
+ description:
+ - The provisioning state of the local network gateway resource.
+ type: str
+ returned: always
+ sample: Succeeded
+ tags:
+ description:
+ - The resource tags.
+ type: str
+ returned: always
+ sample: {'key1': 'value1', 'key2': 'value2'}
+ type:
+ description:
+ - The resource type.
+ type: str
+ returned: always
+ sample: Microsoft.Network/localNetworkGateways
+'''
+
+from ansible_collections.azure.azcollection.plugins.module_utils.azure_rm_common import AzureRMModuleBase
+
+try:
+ from azure.core.exceptions import HttpResponseError
+ from azure.core.polling import LROPoller
+except Exception:
+ # handled in azure_rm_common
+ pass
+
+
+bgp_settings_spec = dict(
+ asn=dict(type='int'),
+ bgp_peering_address=dict(type='str'),
+ peer_weight=dict(type='int'),
+)
+
+
+local_network_address_space_spec = dict(
+ address_prefixes=dict(type='list', elements='str')
+)
+
+
+class AzureRMNetworkGateWay(AzureRMModuleBase):
+ """Utility class to get Azure Kubernetes Service Credentials facts"""
+
+ def __init__(self):
+
+ self.module_arg_spec = dict(
+ name=dict(type='str', required=True),
+ resource_group=dict(type='str', required=True),
+ location=dict(type='str'),
+ local_network_address_space=dict(type='dict', options=local_network_address_space_spec),
+ gateway_ip_address=dict(type='str'),
+ fqdn=dict(type='str'),
+ bgp_settings=dict(type='dict', options=bgp_settings_spec),
+ state=dict(type='str', default='present', choices=['present', 'absent'])
+ )
+
+ self.name = None
+ self.location = None
+ self.local_network_address_space = None
+ self.gateway_ip_address = None
+ self.fqdn = None
+ self.tags = None
+ self.bgp_settings = None
+
+ self.results = dict(
+ changed=False,
+ state=[],
+ )
+ mutually_exclusive = [['gateway_ip_address', 'fqdn']]
+
+ super(AzureRMNetworkGateWay, self).__init__(derived_arg_spec=self.module_arg_spec,
+ supports_check_mode=True,
+ mutually_exclusive=mutually_exclusive,
+ supports_tags=True,
+ facts_module=False)
+
+ def exec_module(self, **kwargs):
+
+ for key in list(self.module_arg_spec) + ['tags']:
+ setattr(self, key, kwargs[key])
+
+ if not self.location:
+ # Set default location
+ resource_group = self.get_resource_group(self.resource_group)
+ self.location = resource_group.location
+
+ old_response = self.get_local_network_gateway()
+ changed = False
+ update_tags = False
+
+ response = None
+ if self.state == 'present':
+ if old_response is not None:
+ if self.fqdn is not None and self.fqdn != old_response['fqdn']:
+ changed = True
+ else:
+ self.fqdn = old_response['fqdn']
+ if self.gateway_ip_address is not None and self.gateway_ip_address != old_response['gateway_ip_address']:
+ changed = True
+ else:
+ self.gateway_ip_address = old_response['gateway_ip_address']
+ if self.bgp_settings is not None and\
+ not all(self.bgp_settings.get(key) == old_response['bgp_settings'].get(key) for key in self.bgp_settings.keys()):
+ changed = True
+ if self.local_network_address_space is not None:
+ if old_response['local_network_address_space'].get('address_prefixes') is not None:
+ new_address = list(set(self.local_network_address_space['address_prefixes'] +
+ old_response['local_network_address_space']['address_prefixes']))
+ if len(new_address) > len(old_response['local_network_address_space'].get('address_prefixes')):
+ changed = True
+ self.local_network_address_space['address_prefixes'] = new_address
+ else:
+ changed = True
+ else:
+ self.local_network_address_space['address_prefixes'] = old_response['local_network_address_space'].get('address_prefixes')
+
+ update_tags, new_tags = self.update_tags(old_response.get('tags'))
+ if update_tags:
+ # response = self.update_local_network_gateway_tags(new_tags)
+ self.fail("Can't update the local network gateway tags, Exception code as AllPropertiesAreReadOnly")
+ changed = True
+ else:
+ changed = True
+
+ local_network_address_space = None
+ if self.local_network_address_space is not None:
+ local_network_address_space = self.network_models.AddressSpace(address_prefixes=self.local_network_address_space['address_prefixes'])
+ bgp_settings = None
+ if self.bgp_settings is not None:
+ bgp_settings = self.network_models.BgpSettings(asn=self.bgp_settings.get('asn'),
+ bgp_peering_address=self.bgp_settings.get('bgp_peering_address'),
+ peer_weight=self.bgp_settings.get('peer_weight'))
+
+ gateway_resource = self.network_models.LocalNetworkGateway(location=self.location,
+ tags=self.tags,
+ gateway_ip_address=self.gateway_ip_address,
+ fqdn=self.fqdn,
+ local_network_address_space=local_network_address_space,
+ bgp_settings=bgp_settings)
+ if changed:
+ if not self.check_mode:
+ response = self.create_or_update_local_network_gateway(gateway_resource)
+
+ if old_response is not None:
+ update_tags, new_tags = self.update_tags(old_response.get('tags'))
+ if update_tags:
+ if not self.check_mode:
+ response = self.update_local_network_gateway_tags(new_tags)
+ changed = True
+ else:
+ if not self.check_mode:
+ if old_response is not None:
+ self.delete_local_network_gateway()
+ changed = True
+ response = None
+ else:
+ changed = True
+
+ if response is None:
+ response = old_response
+ self.results['state'] = response
+ self.results['changed'] = changed
+ return self.results
+
+ def get_local_network_gateway(self):
+ """Gets the specified local network gateway in a resource group"""
+ response = None
+ try:
+ response = self.network_client.local_network_gateways.get(self.resource_group, self.name)
+ except HttpResponseError as ec:
+ self.log("Gets the specified local network gateway in a resource group Failed, Exception as {0}".format(ec))
+ return None
+ return self.format_response(response)
+
+ def create_or_update_local_network_gateway(self, body):
+ """Create or Update local network gateway"""
+ response = None
+ try:
+ response = self.network_client.local_network_gateways.begin_create_or_update(self.resource_group, self.name, body)
+ if isinstance(response, LROPoller):
+ response = self.get_poller_result(response)
+ except HttpResponseError as ec:
+ self.fail("Create or Updated a local network gateway in a resource group Failed, Exception as {0}".format(ec))
+
+ return self.format_response(response)
+
+ def update_local_network_gateway_tags(self, tags):
+ """Updates a local network gateway tags"""
+ response = None
+ try:
+ response = self.network_client.local_network_gateways.update_tags(self.resource_group, self.name, tags)
+ except HttpResponseError as ec:
+ self.fail("Update a local network gateway tags Failed, Exception as {0}".format(ec))
+ return self.format_response(response)
+
+ def delete_local_network_gateway(self):
+ """Deletes the specified local network gateway"""
+ try:
+ self.network_client.local_network_gateways.begin_delete(self.resource_group, self.name)
+ except HttpResponseError as ec:
+ self.fail("Deletes the specified local network gateway Failed, Exception as {0}".format(ec))
+ return None
+
+ def format_response(self, item):
+ result = dict(
+ id=item.id,
+ name=item.name,
+ location=item.location,
+ type=item.type,
+ tags=item.tags,
+ etag=item.etag,
+ local_network_address_space=dict(),
+ gateway_ip_address=item.gateway_ip_address,
+ fqdn=item.fqdn,
+ provisioning_state=item.provisioning_state,
+ bgp_settings=dict(),
+ )
+
+ if item.local_network_address_space is not None:
+ result['local_network_address_space']['address_prefixes'] = item.local_network_address_space.address_prefixes
+ if item.bgp_settings is not None:
+ result['bgp_settings']['asn'] = item.bgp_settings.asn
+ result['bgp_settings']['bgp_peering_address'] = item.bgp_settings.bgp_peering_address
+ result['bgp_settings']['peer_weight'] = item.bgp_settings.peer_weight
+ return result
+
+
+def main():
+ """Main module execution code path"""
+
+ AzureRMNetworkGateWay()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_localnetworkgateway_info.py b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_localnetworkgateway_info.py
new file mode 100644
index 000000000..445c2a237
--- /dev/null
+++ b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_localnetworkgateway_info.py
@@ -0,0 +1,267 @@
+#!/usr/bin/python
+#
+# Copyright (c) 2024 xuzhang3 (@xuzhang3), Fred-sun (@Fred-sun)
+#
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+
+DOCUMENTATION = '''
+---
+module: azure_rm_localnetworkgateway_info
+
+version_added: "2.4.0"
+
+short_description: Gets or list the specified local network gateway in a resource group
+
+description:
+ - Gets or list the specified local network gateway in a resource group.
+
+options:
+ resource_group:
+ description:
+ - The local network gateway's resource group.
+ type: str
+ required: true
+ name:
+ description:
+ - The name of the local network gateway.
+ type: str
+ tags:
+ description:
+ - Limit results by providing a list of tags. Format tags as 'key' or 'key:value'.
+ type: list
+ elements: str
+
+extends_documentation_fragment:
+ - azure.azcollection.azure
+
+author:
+ - xuzhang3 (@xuzhang3)
+ - Fred Sun (@Fred-sun)
+'''
+
+EXAMPLES = '''
+- name: Gets the specified local network gateway in a resource group
+ azure_rm_localnetworkgateway_info:
+ resource_group: "{{ resource_group }}"
+ name: "{{ local_networkgateway_name }}"
+
+- name: Gets all the local network gateways in a resource group
+ azure_rm_localnetworkgateway_info:
+ resource_group: "{{ resource_group }}"
+
+- name: Gets all the local network gateways in a resource group and filter by tags
+ azure_rm_localnetworkgateway_info:
+ resource_group: "{{ resource_group }}"
+ tags:
+ - foo
+'''
+
+RETURN = '''
+state:
+ description:
+ - Current state of the Azure Local Network Gateway resource.
+ returned: always
+ type: complex
+ contains:
+ id:
+ description:
+ - The resource ID.
+ type: str
+ returned: always
+ sample: "/subscriptions/xxxx-xxxx/resourceGroups/testRG/providers/Microsoft.Network/localNetworkGateways/testgateway"
+ bgp_settings:
+ description:
+ - Local network gateway's BGP speaker settings.
+ type: complex
+ contains:
+ asn:
+ description:
+ - The BGP speaker's ASN.
+ type: int
+ returned: always
+ sample: 10
+ bgp_peering_address:
+ description:
+ - The BGP peering address and BGP identifier of this BGP speaker.
+ type: str
+ returned: always
+ sample: 10.0.0.3
+ peer_weight:
+ description:
+ - The weight added to routes learned from this BGP speaker.
+ type: int
+ returned: always
+ sample: 0
+ fqdn:
+ description:
+ - FQDN of local network gateway.
+ type: str
+ returned: always
+ sample: testfqdn.com
+ gateway_ip_address:
+ description:
+ - IP address of local network gateway.
+ type: str
+ returned: always
+ sample: 10.1.1.1
+ etag:
+ description:
+ - A unique read-only string that changes whenever the resource is updated.
+ type: str
+ returned: always
+ sample: b5a32693-2e75-49e0-9137-ded19db658d6
+ local_network_address_space:
+ description:
+ - Local network site address space.
+ type: complex
+ contains:
+ address_prefixes:
+ description:
+ - A list of address blocks reserved for this virtual network in CIDR notation.
+ type: list
+ returned: always
+ sample: ["10.0.0.0/24", "20.0.0.0/24"]
+ location:
+ description:
+ - The resource location.
+ type: str
+ returned: always
+ sample: eastus
+ name:
+ description:
+ - The resource name.
+ type: str
+ returned: always
+ sample: testgateway
+ provisioning_state:
+ description:
+ - The provisioning state of the local network gateway resource.
+ type: str
+ returned: always
+ sample: Succeeded
+ tags:
+ description:
+ - The resource tags.
+ type: str
+ returned: always
+ sample: {'key1': 'value1', 'key2': 'value2'}
+ type:
+ description:
+ - The resource type.
+ type: str
+ returned: always
+ sample: Microsoft.Network/localNetworkGateways
+'''
+
+from ansible_collections.azure.azcollection.plugins.module_utils.azure_rm_common import AzureRMModuleBase
+
+try:
+ from azure.core.exceptions import HttpResponseError
+except Exception:
+ # handled in azure_rm_common
+ pass
+
+
+class AzureRMNetworkGateWayInfo(AzureRMModuleBase):
+ """Utility class to get Azure Kubernetes Service Credentials facts"""
+
+ def __init__(self):
+
+ self.module_args = dict(
+ name=dict(type='str'),
+ resource_group=dict(type='str', required=True),
+ tags=dict(type='list', elements='str'),
+ )
+
+ self.name = None
+ self.tags = None
+
+ self.results = dict(
+ changed=False,
+ state=[],
+ )
+
+ super(AzureRMNetworkGateWayInfo, self).__init__(derived_arg_spec=self.module_args,
+ supports_check_mode=True,
+ supports_tags=False,
+ facts_module=True)
+
+ def exec_module(self, **kwargs):
+
+ for key in self.module_args:
+ setattr(self, key, kwargs[key])
+
+ if self.name is not None:
+ self.results['state'] = self.get_local_network_gateway()
+ else:
+ self.results['state'] = self.list_local_network_gateway()
+
+ return self.results
+
+ def get_local_network_gateway(self):
+ """Gets the specified local network gateway in a resource group"""
+ response = None
+
+ try:
+ response = self.network_client.local_network_gateways.get(self.resource_group, self.name)
+ except HttpResponseError as ec:
+ self.log("Gets the specified local network gateway in a resource group Failed, Exception as {0}".format(ec))
+ if response and self.has_tags(response.tags, self.tags):
+ return [self.format_response(response)]
+ else:
+ return []
+
+ def list_local_network_gateway(self):
+ """Gets all the local network gateways in a resource group"""
+ response = None
+
+ try:
+ response = self.network_client.local_network_gateways.list(self.resource_group)
+ except HttpResponseError as ec:
+ self.log("Gets all the local network gateways in a resource group Failed, Exception as {0}".format(ec))
+
+ if response:
+ results = []
+ for item in response:
+ if self.has_tags(item.tags, self.tags):
+ results.append(self.format_response(item))
+ return results
+ else:
+ return []
+
+ def format_response(self, item):
+ result = dict(
+ id=item.id,
+ name=item.name,
+ location=item.location,
+ type=item.type,
+ tags=item.tags,
+ etag=item.etag,
+ local_network_address_space=dict(),
+ gateway_ip_address=item.gateway_ip_address,
+ fqdn=item.fqdn,
+ provisioning_state=item.provisioning_state,
+ bgp_settings=dict(),
+ )
+
+ if item.local_network_address_space is not None:
+ result['local_network_address_space']['address_prefixes'] = item.local_network_address_space.address_prefixes
+ if item.bgp_settings is not None:
+ result['bgp_settings']['asn'] = item.bgp_settings.asn
+ result['bgp_settings']['bgp_peering_address'] = item.bgp_settings.bgp_peering_address
+ result['bgp_settings']['peer_weight'] = item.bgp_settings.peer_weight
+ return result
+
+
+def main():
+ """Main module execution code path"""
+
+ AzureRMNetworkGateWayInfo()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_networkinterface.py b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_networkinterface.py
index 3343d5ac1..beba7810b 100644
--- a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_networkinterface.py
+++ b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_networkinterface.py
@@ -79,47 +79,6 @@ options:
- Windows
- Linux
default: Linux
- private_ip_address:
- description:
- - (Deprecate) Valid IPv4 address that falls within the specified subnet.
- - This option will be deprecated in 2.9, use I(ip_configurations) instead.
- type: str
- private_ip_allocation_method:
- description:
- - (Deprecate) Whether or not the assigned IP address is permanent.
- - When creating a network interface, if you specify I(private_ip_address=Static), you must provide a value for I(private_ip_address).
- - You can update the allocation method to C(Static) after a dynamic private IP address has been assigned.
- - This option will be deprecated in 2.9, use I(ip_configurations) instead.
- default: Dynamic
- type: str
- choices:
- - Dynamic
- - Static
- public_ip:
- description:
- - (Deprecate) When creating a network interface, if no public IP address name is provided a default public IP address will be created.
- - Set to C(false) if you do not want a public IP address automatically created.
- - This option will be deprecated in 2.9, use I(ip_configurations) instead.
- type: bool
- default: 'yes'
- public_ip_address_name:
- description:
- - (Deprecate) Name of an existing public IP address object to associate with the security group.
- - This option will be deprecated in 2.9, use I(ip_configurations) instead.
- type: str
- aliases:
- - public_ip_address
- - public_ip_name
- public_ip_allocation_method:
- description:
- - (Deprecate) If a I(public_ip_address_name) is not provided, a default public IP address will be created.
- - The allocation method determines whether or not the public IP address assigned to the network interface is permanent.
- - This option will be deprecated in 2.9, use I(ip_configurations) instead.
- type: str
- choices:
- - Dynamic
- - Static
- default: Dynamic
ip_configurations:
description:
- List of IP configurations. Each configuration object should include
@@ -323,7 +282,6 @@ EXAMPLES = '''
virtual_network: vnet001
subnet_name: subnet001
create_with_security_group: false
- public_ip: false
ip_configurations:
- name: default
primary: true
@@ -614,13 +572,8 @@ class AzureRMNetworkInterface(AzureRMModuleBase):
create_with_security_group=dict(type='bool', default=True),
security_group=dict(type='raw', aliases=['security_group_name']),
state=dict(default='present', choices=['present', 'absent']),
- private_ip_address=dict(type='str'),
- private_ip_allocation_method=dict(type='str', choices=['Dynamic', 'Static'], default='Dynamic'),
- public_ip_address_name=dict(type='str', aliases=['public_ip_address', 'public_ip_name']),
- public_ip=dict(type='bool', default=True),
subnet_name=dict(type='str', aliases=['subnet']),
virtual_network=dict(type='raw', aliases=['virtual_network_name']),
- public_ip_allocation_method=dict(type='str', choices=['Dynamic', 'Static'], default='Dynamic'),
ip_configurations=dict(type='list', default=[], elements='dict', options=ip_configuration_spec),
os_type=dict(type='str', choices=['Windows', 'Linux'], default='Linux'),
open_ports=dict(type='list', elements='str'),
@@ -638,13 +591,8 @@ class AzureRMNetworkInterface(AzureRMModuleBase):
self.create_with_security_group = None
self.enable_accelerated_networking = None
self.security_group = None
- self.private_ip_address = None
- self.private_ip_allocation_method = None
- self.public_ip_address_name = None
- self.public_ip = None
self.subnet_name = None
self.virtual_network = None
- self.public_ip_allocation_method = None
self.state = None
self.tags = None
self.os_type = None
@@ -703,17 +651,13 @@ class AzureRMNetworkInterface(AzureRMModuleBase):
if len(asgs) > 0:
config['application_security_groups'] = asgs
+ # If ip_confiurations is not specified then provide the default
+ # private interface
if self.state == 'present' and not self.ip_configurations:
- # construct the ip_configurations array for compatible
- self.deprecate('Setting ip_configuration flatten is deprecated and will be removed.'
- ' Using ip_configurations list to define the ip configuration', version=(2, 9))
self.ip_configurations = [
dict(
- private_ip_address=self.private_ip_address,
- private_ip_allocation_method=self.private_ip_allocation_method,
- public_ip_address_name=self.public_ip_address_name if self.public_ip else None,
- public_ip_allocation_method=self.public_ip_allocation_method,
name='default',
+ private_ip_allocation_method='Dynamic',
primary=True
)
]
@@ -875,7 +819,7 @@ class AzureRMNetworkInterface(AzureRMModuleBase):
def get_or_create_public_ip_address(self, ip_config):
name = ip_config.get('public_ip_address_name')
- if not (self.public_ip and name):
+ if not name:
return None
pip = self.get_public_ip_address(name)
diff --git a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_postgresqlflexiblefirewallrule.py b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_postgresqlflexiblefirewallrule.py
index c73843c46..1fba876f9 100644
--- a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_postgresqlflexiblefirewallrule.py
+++ b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_postgresqlflexiblefirewallrule.py
@@ -113,8 +113,6 @@ try:
from ansible_collections.azure.azcollection.plugins.module_utils.azure_rm_common import AzureRMModuleBase
from azure.core.exceptions import ResourceNotFoundError
from azure.core.polling import LROPoller
- import logging
- logging.basicConfig(filename='log.log', level=logging.INFO)
except ImportError:
# This is handled in azure_rm_common
pass
diff --git a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_postgresqlflexibleserver.py b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_postgresqlflexibleserver.py
index 335dc53c8..9d443deec 100644
--- a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_postgresqlflexibleserver.py
+++ b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_postgresqlflexibleserver.py
@@ -201,6 +201,42 @@ options:
- Whether to start the Post gresql server.
type: bool
default: False
+ identity:
+ description:
+ - Identity for the Server.
+ type: dict
+ version_added: '2.4.0'
+ suboptions:
+ type:
+ description:
+ - Type of the managed identity
+ required: false
+ choices:
+ - UserAssigned
+ - None
+ default: None
+ type: str
+ user_assigned_identities:
+ description:
+ - User Assigned Managed Identities and its options
+ required: false
+ type: dict
+ default: {}
+ suboptions:
+ id:
+ description:
+ - List of the user assigned identities IDs associated to the VM
+ required: false
+ type: list
+ elements: str
+ default: []
+ append:
+ description:
+ - If the list of identities has to be appended to current identities (true) or if it has to replace current identities (false)
+ required: false
+ type: bool
+ default: True
+
extends_documentation_fragment:
- azure.azcollection.azure
@@ -489,6 +525,7 @@ servers:
try:
from ansible_collections.azure.azcollection.plugins.module_utils.azure_rm_common import AzureRMModuleBase
+ import azure.mgmt.rdbms.postgresql_flexibleservers.models as PostgreSQLFlexibleModels
from azure.core.exceptions import ResourceNotFoundError
from azure.core.polling import LROPoller
except ImportError:
@@ -534,6 +571,18 @@ storage_spec = dict(
)
+user_assigned_identities_spec = dict(
+ id=dict(type='list', default=[], elements='str'),
+ append=dict(type='bool', default=True)
+)
+
+
+managed_identity_spec = dict(
+ type=dict(type='str', choices=['UserAssigned', 'None'], default='None'),
+ user_assigned_identities=dict(type='dict', options=user_assigned_identities_spec, default={}),
+)
+
+
class AzureRMPostgreSqlFlexibleServers(AzureRMModuleBase):
"""Configuration class for an Azure RM PostgreSQL Flexible Server resource"""
@@ -613,6 +662,7 @@ class AzureRMPostgreSqlFlexibleServers(AzureRMModuleBase):
source_server_resource_id=dict(
type='str'
),
+ identity=dict(type='dict', options=managed_identity_spec),
state=dict(
type='str',
default='present',
@@ -628,6 +678,7 @@ class AzureRMPostgreSqlFlexibleServers(AzureRMModuleBase):
self.is_start = None
self.is_stop = None
self.is_restart = None
+ self.identity = None
self.results = dict(changed=False)
self.state = None
@@ -663,6 +714,10 @@ class AzureRMPostgreSqlFlexibleServers(AzureRMModuleBase):
self.log("PostgreSQL Flexible Server instance doesn't exist")
if self.state == 'present':
if not self.check_mode:
+ if self.identity:
+ update_identity, new_identity = self.update_identities({})
+ if update_identity:
+ self.parameters['identity'] = new_identity
response = self.create_postgresqlflexibleserver(self.parameters)
if self.is_stop:
self.stop_postgresqlflexibleserver()
@@ -712,6 +767,12 @@ class AzureRMPostgreSqlFlexibleServers(AzureRMModuleBase):
else:
self.update_parameters['maintenance_window'][key] = old_response['maintenance_window'].get(key)
+ if self.identity:
+ update_identity, new_identity = self.update_identities(old_response.get('identity', {}))
+ if update_identity:
+ self.update_parameters['identity'] = new_identity
+ update_flag = True
+
update_tags, new_tags = self.update_tags(old_response['tags'])
self.update_parameters['tags'] = new_tags
if update_tags:
@@ -915,9 +976,50 @@ class AzureRMPostgreSqlFlexibleServers(AzureRMModuleBase):
result['maintenance_window']['start_minute'] = item.maintenance_window.start_minute
result['maintenance_window']['start_hour'] = item.maintenance_window.start_hour
result['maintenance_window']['day_of_week'] = item.maintenance_window.day_of_week
+ if item.identity is not None:
+ result['identity'] = item.identity.as_dict()
+ else:
+ result['identity'] = PostgreSQLFlexibleModels.UserAssignedIdentity(type='None').as_dict()
return result
+ def update_identities(self, curr_identity):
+ new_identities = []
+ changed = False
+ current_managed_type = curr_identity.get('type', 'None')
+ current_managed_identities = set(curr_identity.get('user_assigned_identities', {}).keys())
+ param_identity = self.module.params.get('identity')
+ param_identities = set(param_identity.get('user_assigned_identities', {}).get('id', []))
+ new_identities = param_identities
+
+ # If type set to None, and Resource has None, nothing to do
+ if 'None' in param_identity.get('type') and current_managed_type == 'None':
+ pass
+ # If type set to None, and Resource has current identities, remove UserAssigned identities
+ elif param_identity.get('type') == 'None':
+ changed = True
+ # If type in module args contains 'UserAssigned'
+ elif 'UserAssigned' in param_identity.get('type'):
+ if param_identity.get('user_assigned_identities', {}).get('append', False) is True:
+ new_identities = param_identities.union(current_managed_identities)
+ if len(current_managed_identities) != len(new_identities):
+ # update identities
+ changed = True
+ # If new identities have to overwrite current identities
+ else:
+ # Check if module args identities are different as current ones
+ if current_managed_identities.difference(new_identities) != set():
+ changed = True
+
+ # Append identities to the model
+ user_assigned_identities_dict = {uami: dict() for uami in new_identities}
+ new_identity = PostgreSQLFlexibleModels.UserAssignedIdentity(
+ type=param_identity.get('type'),
+ user_assigned_identities=user_assigned_identities_dict
+ )
+
+ return changed, new_identity
+
def main():
"""Main execution"""
diff --git a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_postgresqlflexibleserver_info.py b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_postgresqlflexibleserver_info.py
index 50fe9adc5..8d5f2b636 100644
--- a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_postgresqlflexibleserver_info.py
+++ b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_postgresqlflexibleserver_info.py
@@ -282,6 +282,30 @@ servers:
returned: always
sample: null
type: str
+ identity:
+ description:
+ - Identity for the Server.
+ type: complex
+ returned: when available
+ contains:
+ type:
+ description:
+ - Type of the managed identity
+ returned: always
+ sample: UserAssigned
+ type: str
+ user_assigned_identities:
+ description:
+ - User Assigned Managed Identities and its options
+ returned: always
+ type: complex
+ contains:
+ id:
+ description:
+ - Dict of the user assigned identities IDs associated to the Resource
+ returned: always
+ type: dict
+ elements: dict
tags:
description:
- Tags assigned to the resource. Dictionary of string:string pairs.
@@ -293,6 +317,7 @@ servers:
try:
from ansible_collections.azure.azcollection.plugins.module_utils.azure_rm_common import AzureRMModuleBase
+ import azure.mgmt.rdbms.postgresql_flexibleservers.models as PostgreSQLFlexibleModels
from azure.core.exceptions import ResourceNotFoundError
except ImportError:
# This is handled in azure_rm_common
@@ -431,6 +456,10 @@ class AzureRMPostgreSqlFlexibleServersInfo(AzureRMModuleBase):
result['maintenance_window']['start_minute'] = item.maintenance_window.start_minute
result['maintenance_window']['start_hour'] = item.maintenance_window.start_hour
result['maintenance_window']['day_of_week'] = item.maintenance_window.day_of_week
+ if item.identity is not None:
+ result['identity'] = item.identity.as_dict()
+ else:
+ result['identity'] = PostgreSQLFlexibleModels.UserAssignedIdentity(type='None').as_dict()
return result
diff --git a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_rediscache_info.py b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_rediscache_info.py
index 782fb0417..d1019dd65 100644
--- a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_rediscache_info.py
+++ b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_rediscache_info.py
@@ -198,13 +198,13 @@ rediscaches:
contains:
primary:
description:
- - The current primary key that clients can use to authenticate the Redis cahce.
+ - The current primary key that clients can use to authenticate the Redis cache.
returned: always
type: str
sample: X2xXXxx7xxxxxx5xxxx0xxxxx75xxxxxxxxXXXxxxxx=
secondary:
description:
- - The current secondary key that clients can use to authenticate the Redis cahce.
+ - The current secondary key that clients can use to authenticate the Redis cache.
returned: always
type: str
sample: X2xXXxx7xxxxxx5xxxx0xxxxx75xxxxxxxxXXXxxxxx=
diff --git a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_sqlmidatabase.py b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_sqlmidatabase.py
new file mode 100644
index 000000000..83d40fc5a
--- /dev/null
+++ b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_sqlmidatabase.py
@@ -0,0 +1,392 @@
+#!/usr/bin/python
+#
+# Copyright (c) 2024 xuzhang3 (@xuzhang3), Fred-sun (@Fred-sun)
+#
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+
+DOCUMENTATION = '''
+---
+module: azure_rm_sqlmidatabase
+version_added: "2.4.0"
+short_description: Manage SQL Managed Instance databases
+description:
+ - Manage SQL Managed Instance databases.
+
+options:
+ resource_group:
+ description:
+ - The name of the resource group that contains the resource.
+ type: str
+ required: true
+ managed_instance_name:
+ description:
+ - The name of the SQL managed instance.
+ type: str
+ required: true
+ database_name:
+ description:
+ - The name of the SQL managed instance database.
+ type: str
+ required: true
+ collation:
+ description:
+ - The collation of the Azure SQL Managed Database collation to use.
+ - For example C(SQL_Latin1_General_CP1_CI_AS) or C(Latin1_General_100_CS_AS_SC).
+ type: str
+ location:
+ description:
+ - The resource location.
+ type: str
+ state:
+ description:
+ - State of the SQL Managed Database.
+ - Use C(present) to create or update a automation runbook and use C(absent) to delete.
+ type: str
+ default: present
+ choices:
+ - present
+ - absent
+extends_documentation_fragment:
+ - azure.azcollection.azure
+ - azure.azcollection.azure_tags
+
+author:
+ - xuzhang3 (@xuzhang3)
+ - Fred-sun (@Fred-sun)
+'''
+
+EXAMPLES = '''
+- name: Create a SQL managed instance database
+ azure_rm_sqlmidatabase:
+ resource_group: testrg
+ managed_instance_name: testinstancename
+ database_name: newdatabase
+ collation: SQL_Latin1_General_CP1_CI_AS
+ location: eastus
+ tags:
+ key2: value2
+
+- name: Delete the SQL managed instance database
+ azure_rm_sqlmidatabase:
+ resource_group: testrg
+ managed_instance_name: testinstancename
+ database_name: newdatabase
+ state: absent
+'''
+
+RETURN = '''
+database:
+ description:
+ - A dictionary containing facts for SQL Managed Instance database info.
+ returned: always
+ type: complex
+ contains:
+ auto_complete_restore:
+ description:
+ - Whether to auto complete restore of this managed database.
+ type: bool
+ returned: always
+ sample: null
+ catalog_collation:
+ description:
+ - Collation of the metadata catalog.
+ type: str
+ returned: always
+ sample: null
+ create_mode:
+ description:
+ - Managed database create mode.
+ type: str
+ returned: always
+ sample: null
+ create_date:
+ description:
+ - Creation date of the database.
+ type: str
+ returned: always
+ sample: "2024-05-06T23:59:49.770Z"
+ database_name:
+ description:
+ - The sql mi databse name.
+ type: str
+ returned: always
+ sample: fredtest
+ default_secondary_location:
+ description:
+ - Geo paired region.
+ type: str
+ returned: always
+ sample: westus
+ id:
+ description:
+ - The resource ID.
+ type: str
+ returned: always
+ sample: "/subscriptions/xxx-xxxx/resourceGroups/testRG/providers/Microsoft.Sql/managedInstances/fredsqlmin/databases/fredtest"
+ last_backup_name:
+ description:
+ - Last backup file name for restore of this managed database.
+ type: str
+ returned: always
+ sample: null
+ location:
+ description:
+ - The resource's location.
+ type: str
+ returned: always
+ sample: eastus
+ long_term_retention_backup_resource_id:
+ description:
+ - The name of the Long Term Retention backup to be used for restore of this managed database.
+ type: str
+ returned: always
+ sample: null
+ managed_instance_name:
+ description:
+ - The name of the SQL managed instance.
+ type: str
+ returned: always
+ sample: fredsqlmin
+ recoverable_database_id:
+ description:
+ - The resource identifier of the recoverable database associated with the database.
+ type: str
+ returned: always
+ sample: null
+ resource_group:
+ description:
+ - The resource's resource group.
+ type: str
+ returned: always
+ sample: testRG
+ restorable_dropped_database_id:
+ description:
+ - The restorable dropped database resource id.
+ type: str
+ returned: always
+ sample: null
+ restore_point_in_time:
+ description:
+ - Specifies the point in time (ISO8601 format) of the source database.
+ type: str
+ returned: always
+ sample: null
+ source_database_id:
+ description:
+ - The resource identifier of the source database associated with create operation of this database.
+ type: str
+ returned: always
+ sample: null
+ status:
+ description:
+ - Status of the database.
+ type: str
+ returned: always
+ sample: online
+ storage_container_sas_token:
+ description:
+ - Specifies the storage container sas token.
+ type: str
+ returned: always
+ sample: null
+ storage_container_uri:
+ description:
+ - Specifies the uri of the storage container where backups for this restore are stopped.
+ type: str
+ returned: always
+ sample: null
+ tags:
+ description:
+ - The resource's tags
+ type: str
+ returned: always
+ sample: {key1: value1}
+ type:
+ description:
+ - The resource type.
+ type: str
+ returned: always
+ sample: "Microsoft.Sql/managedInstances/databases"
+
+'''
+
+from ansible_collections.azure.azcollection.plugins.module_utils.azure_rm_common import AzureRMModuleBase
+
+try:
+ from azure.core.exceptions import HttpResponseError
+except ImportError:
+ # This is handled in azure_rm_common
+ pass
+
+
+class AzureRMSqlMIDatabase(AzureRMModuleBase):
+ def __init__(self):
+ # define user inputs into argument
+ self.module_arg_spec = dict(
+ resource_group=dict(
+ type='str',
+ required=True,
+ ),
+ managed_instance_name=dict(
+ type='str',
+ required=True,
+ ),
+ database_name=dict(
+ type='str',
+ required=True,
+ ),
+ collation=dict(
+ type='str'
+ ),
+ location=dict(
+ type='str'
+ ),
+ state=dict(
+ type='str',
+ choices=['present', 'absent'],
+ default='present'
+ ),
+ )
+ # store the results of the module operation
+ self.results = dict(
+ changed=False
+ )
+ self.resource_group = None
+ self.managed_instance_name = None
+ self.database_name = None
+ self.state = None
+ self.parameters = dict()
+
+ super(AzureRMSqlMIDatabase, self).__init__(self.module_arg_spec, supports_check_mode=True, supports_tags=True, facts_module=False)
+
+ def exec_module(self, **kwargs):
+ for key in list(self.module_arg_spec.keys()) + ['tags']:
+ if hasattr(self, key):
+ setattr(self, key, kwargs[key])
+ elif kwargs.get(key) is not None:
+ self.parameters[key] = kwargs.get(key)
+
+ changed = False
+ resource_group = self.get_resource_group(self.resource_group)
+ if self.parameters.get('location') is None:
+ # Set default location
+ self.parameters['location'] = resource_group.location
+
+ old_response = self.get()
+ if old_response is None:
+ if self.state == 'present':
+ changed = True
+ if not self.check_mode:
+ self.results['database'] = self.create_database()
+ else:
+ update_tags, tags = self.update_tags(old_response.get('tags'))
+ if update_tags:
+ changed = True
+ self.parameters['tags'] = tags
+ for key in self.parameters.keys():
+ if key != 'tags' and self.parameters[key] != old_response.get(key):
+ self.fail("The collection and location not support to update")
+ if self.state == 'present':
+ if changed and not self.check_mode:
+ self.results['database'] = self.update_database()
+ else:
+ self.results['database'] = old_response
+ else:
+ changed = True
+ if not self.check_mode:
+ self.results['database'] = self.delete_database()
+
+ self.results['changed'] = changed
+ return self.results
+
+ def create_database(self):
+ response = None
+ try:
+ response = self.sql_client.managed_databases.begin_create_or_update(resource_group_name=self.resource_group,
+ managed_instance_name=self.managed_instance_name,
+ database_name=self.database_name,
+ parameters=self.parameters)
+ self.log("Response : {0}".format(response))
+ except HttpResponseError as ec:
+ self.fail('Create the SQL managed instance database failed, exception as {0}'.format(ec))
+
+ return self.format_item(self.get_poller_result(response))
+
+ def update_database(self):
+ response = None
+ try:
+ response = self.sql_client.managed_databases.begin_update(resource_group_name=self.resource_group,
+ managed_instance_name=self.managed_instance_name,
+ database_name=self.database_name,
+ parameters=self.parameters)
+ self.log("Response : {0}".format(response))
+ except HttpResponseError as ec:
+ self.fail('Update the SQL managed instance database failed, exception as {0}'.format(ec))
+
+ return self.format_item(self.get_poller_result(response))
+
+ def get(self):
+ response = None
+ try:
+ response = self.sql_client.managed_databases.get(resource_group_name=self.resource_group,
+ managed_instance_name=self.managed_instance_name,
+ database_name=self.database_name)
+ self.log("Response : {0}".format(response))
+ except HttpResponseError as ec:
+ self.log('Could not get facts for SQL managed instance database. Exception as {0}'.format(ec))
+
+ return self.format_item(response)
+
+ def delete_database(self):
+ response = None
+ try:
+ response = self.sql_client.managed_databases.begin_delete(resource_group_name=self.resource_group,
+ managed_instance_name=self.managed_instance_name,
+ database_name=self.database_name)
+ self.log("Response : {0}".format(response))
+ except HttpResponseError as ec:
+ self.fail('Could not get facts for SQL managed instance database. Exception as {0}'.format(ec))
+
+ return self.format_item(self.get_poller_result(response))
+
+ def format_item(self, item):
+ if item is None:
+ return
+ d = item.as_dict()
+ d = {
+ 'resource_group': self.resource_group,
+ 'managed_instance_name': self.managed_instance_name,
+ 'database_name': d.get('name'),
+ 'id': d.get('id', None),
+ 'type': d.get('type', None),
+ 'location': d.get('location'),
+ 'tags': d.get('tags'),
+ 'collation': d.get('collation'),
+ 'status': d.get('status'),
+ 'creation_date': d.get('creation_date'),
+ 'restore_point_in_time': d.get('restore_point_in_time'),
+ 'default_secondary_location': d.get('default_secondary_location'),
+ 'catalog_collation': d.get('catalog_collation'),
+ 'create_mode': d.get('create_mode'),
+ 'storage_container_uri': d.get('storage_container_uri'),
+ 'source_database_id': d.get('source_database_id'),
+ 'restorable_dropped_database_id': d.get('restorable_dropped_database_id'),
+ 'storage_container_sas_token': d.get('storage_container_sas_token'),
+ 'recoverable_database_id': d.get('recoverable_database_id'),
+ 'long_term_retention_backup_resource_id': d.get('long_term_retention_backup_resource_id'),
+ 'auto_complete_restore': d.get('auto_complete_restore'),
+ 'last_backup_name': d.get('last_backup_name')
+ }
+ return d
+
+
+def main():
+ AzureRMSqlMIDatabase()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_sqlmidatabase_info.py b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_sqlmidatabase_info.py
new file mode 100644
index 000000000..d6fe211a0
--- /dev/null
+++ b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_sqlmidatabase_info.py
@@ -0,0 +1,305 @@
+#!/usr/bin/python
+#
+# Copyright (c) 2024 xuzhang3 (@xuzhang3), Fred-sun (@Fred-sun)
+#
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+
+DOCUMENTATION = '''
+---
+module: azure_rm_sqlmidatabase_info
+version_added: "2.4.0"
+short_description: Get Azure SQL managed instance database facts
+description:
+ - Get facts of Azure SQL managed instance database facts.
+
+options:
+ resource_group:
+ description:
+ - The name of the resource group that contains the resource.
+ type: str
+ required: true
+ managed_instance_name:
+ description:
+ - The name of the SQL managed instance.
+ type: str
+ required: true
+ database_name:
+ description:
+ - The name of the SQL managed instance database.
+ type: str
+ tags:
+ description:
+ - Limit results by providing a list of tags. Format tags as 'key' or 'key:value'.
+ type: list
+ elements: str
+extends_documentation_fragment:
+ - azure.azcollection.azure
+
+author:
+ - xuzhang3 (@xuzhang3)
+ - Fred-sun (@Fred-sun)
+'''
+
+EXAMPLES = '''
+- name: Get SQL managed instance database by name
+ azure_rm_sqlmidatabase_info:
+ resource_group: testrg
+ managed_instance_name: testinstancename
+ database_name: newdatabase
+'''
+
+RETURN = '''
+database:
+ description:
+ - A dictionary containing facts for SQL Managed Instance database info.
+ returned: always
+ type: complex
+ contains:
+ auto_complete_restore:
+ description:
+ - Whether to auto complete restore of this managed database.
+ type: bool
+ returned: always
+ sample: null
+ catalog_collation:
+ description:
+ - Collation of the metadata catalog.
+ type: str
+ returned: always
+ sample: null
+ create_mode:
+ description:
+ - Managed database create mode.
+ type: str
+ returned: always
+ sample: null
+ create_date:
+ description:
+ - Creation date of the database.
+ type: str
+ returned: always
+ sample: "2024-05-06T23:59:49.770Z"
+ database_name:
+ description:
+ - The sql mi databse name.
+ type: str
+ returned: always
+ sample: fredtest
+ default_secondary_location:
+ description:
+ - Geo paired region.
+ type: str
+ returned: always
+ sample: westus
+ id:
+ description:
+ - The resource ID.
+ type: str
+ returned: always
+ sample: "/subscriptions/xxx-xxxx/resourceGroups/testRG/providers/Microsoft.Sql/managedInstances/fredsqlmin/databases/fredtest"
+ last_backup_name:
+ description:
+ - Last backup file name for restore of this managed database.
+ type: str
+ returned: always
+ sample: null
+ location:
+ description:
+ - The resource's location.
+ type: str
+ returned: always
+ sample: eastus
+ long_term_retention_backup_resource_id:
+ description:
+ - The name of the Long Term Retention backup to be used for restore of this managed database.
+ type: str
+ returned: always
+ sample: null
+ managed_instance_name:
+ description:
+ - The name of the SQL managed instance.
+ type: str
+ returned: always
+ sample: fredsqlmin
+ recoverable_database_id:
+ description:
+ - The resource identifier of the recoverable database associated with the database.
+ type: str
+ returned: always
+ sample: null
+ resource_group:
+ description:
+ - The resource's resource group.
+ type: str
+ returned: always
+ sample: testRG
+ restorable_dropped_database_id:
+ description:
+ - The restorable dropped database resource id.
+ type: str
+ returned: always
+ sample: null
+ restore_point_in_time:
+ description:
+ - Specifies the point in time (ISO8601 format) of the source database.
+ type: str
+ returned: always
+ sample: null
+ source_database_id:
+ description:
+ - The resource identifier of the source database associated with create operation of this database.
+ type: str
+ returned: always
+ sample: null
+ status:
+ description:
+ - Status of the database.
+ type: str
+ returned: always
+ sample: online
+ storage_container_sas_token:
+ description:
+ - Specifies the storage container sas token.
+ type: str
+ returned: always
+ sample: null
+ storage_container_uri:
+ description:
+ - Specifies the uri of the storage container where backups for this restore are stopped.
+ type: str
+ returned: always
+ sample: null
+ tags:
+ description:
+ - The resource's tags
+ type: str
+ returned: always
+ sample: {key1: value1}
+ type:
+ description:
+ - The resource type.
+ type: str
+ returned: always
+ sample: "Microsoft.Sql/managedInstances/databases"
+'''
+
+from ansible_collections.azure.azcollection.plugins.module_utils.azure_rm_common import AzureRMModuleBase
+
+try:
+ from azure.core.exceptions import HttpResponseError
+except ImportError:
+ # This is handled in azure_rm_common
+ pass
+
+
+class AzureRMSqlMIDatabaseInfo(AzureRMModuleBase):
+ def __init__(self):
+ # define user inputs into argument
+ self.module_arg_spec = dict(
+ resource_group=dict(
+ type='str',
+ required=True,
+ ),
+ managed_instance_name=dict(
+ type='str',
+ required=True,
+ ),
+ database_name=dict(
+ type='str',
+ ),
+ tags=dict(
+ type='list',
+ elements='str'
+ ),
+ )
+ # store the results of the module operation
+ self.results = dict(
+ changed=False
+ )
+ self.resource_group = None
+ self.managed_instance_name = None
+ self.database_name = None
+ self.tags = None
+
+ super(AzureRMSqlMIDatabaseInfo, self).__init__(self.module_arg_spec, supports_check_mode=True, supports_tags=False, facts_module=True)
+
+ def exec_module(self, **kwargs):
+ for key in self.module_arg_spec:
+ setattr(self, key, kwargs[key])
+
+ if self.database_name is not None:
+ self.results['database'] = self.get()
+ else:
+ self.results['database'] = self.list_by_instance()
+ return self.results
+
+ def list_by_instance(self):
+ response = None
+ results = []
+ try:
+ response = self.sql_client.managed_databases.list_by_instance(resource_group_name=self.resource_group,
+ managed_instance_name=self.managed_instance_name)
+ self.log("Response : {0}".format(response))
+ except HttpResponseError:
+ self.log('Could not get facts for SQL managed instance database.')
+
+ if response is not None:
+ for item in response:
+ if self.has_tags(item.tags, self.tags):
+ results.append(self.format_item(item))
+ return results
+
+ def get(self):
+ response = None
+ try:
+ response = self.sql_client.managed_databases.get(resource_group_name=self.resource_group,
+ managed_instance_name=self.managed_instance_name,
+ database_name=self.database_name)
+ self.log("Response : {0}".format(response))
+ except HttpResponseError as ec:
+ self.log('Could not get facts for SQL managed instance database.')
+
+ if response is not None and self.has_tags(response.tags, self.tags):
+ return [self.format_item(response)]
+
+ def format_item(self, item):
+ d = item.as_dict()
+ d = {
+ 'resource_group': self.resource_group,
+ 'managed_instance_name': self.managed_instance_name,
+ 'database_name': d.get('name'),
+ 'id': d.get('id', None),
+ 'type': d.get('type', None),
+ 'location': d.get('location'),
+ 'tags': d.get('tags'),
+ 'collation': d.get('collation'),
+ 'status': d.get('status'),
+ 'creation_date': d.get('creation_date'),
+ 'earliest_restore_point': d.get('earliest_restore_point'),
+ 'restore_point_in_time': d.get('restore_point_in_time'),
+ 'default_secondary_location': d.get('default_secondary_location'),
+ 'catalog_collation': d.get('catalog_collation'),
+ 'create_mode': d.get('create_mode'),
+ 'storage_container_uri': d.get('storage_container_uri'),
+ 'source_database_id': d.get('source_database_id'),
+ 'restorable_dropped_database_id': d.get('restorable_dropped_database_id'),
+ 'storage_container_sas_token': d.get('storage_container_sas_token'),
+ 'failover_group_id': d.get('failover_group_id'),
+ 'recoverable_database_id': d.get('recoverable_database_id'),
+ 'long_term_retention_backup_resource_id': d.get('long_term_retention_backup_resource_id'),
+ 'auto_complete_restore': d.get('auto_complete_restore'),
+ 'last_backup_name': d.get('last_backup_name')
+ }
+ return d
+
+
+def main():
+ AzureRMSqlMIDatabaseInfo()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_sqlmidblongtermretentionpolicy.py b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_sqlmidblongtermretentionpolicy.py
new file mode 100644
index 000000000..04b05b1c2
--- /dev/null
+++ b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_sqlmidblongtermretentionpolicy.py
@@ -0,0 +1,277 @@
+#!/usr/bin/python
+#
+# Copyright (c) 2024 xuzhang3 (@xuzhang3), Fred-sun (@Fred-sun)
+#
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+
+DOCUMENTATION = '''
+---
+module: azure_rm_sqlmidblongtermretentionpolicy
+version_added: "2.4.0"
+short_description: Manage Azure SQL Managed Instance long-term backup retention
+description:
+ - Manage Azure SQL Managed Instance long-term backup retention.
+
+options:
+ resource_group:
+ description:
+ - The name of the resource group that contains the resource.
+ type: str
+ required: true
+ managed_instance_name:
+ description:
+ - The name of the SQL managed instance.
+ type: str
+ required: true
+ database_name:
+ description:
+ - The name of the SQL managed instance database.
+ type: str
+ required: true
+ policy_name:
+ description:
+ - The name of the SQL managed instance long term retention policy.
+ type: str
+ required: true
+ choices:
+ - default
+ monthly_retention:
+ description:
+ - The monthly retention policy for an LTR backup in an ISO 8601 format.
+ type: str
+ yearly_retention:
+ description:
+ - The yearly retention policy for an LTR backup in an ISO 8601 format.
+ type: str
+ weekly_retention:
+ description:
+ - The weekly retention policy for an LTR backup in an ISO 8601 format.
+ type: str
+ week_of_year:
+ description:
+ - The week of year to take the yearly backup in an ISO 8601 format.
+ type: int
+
+extends_documentation_fragment:
+ - azure.azcollection.azure
+
+author:
+ - xuzhang3 (@xuzhang3)
+ - Fred-sun (@Fred-sun)
+'''
+
+EXAMPLES = '''
+- name: Update SQL managed instance long term retention policy's retention_days
+ azure_rm_sqlmidblongtermretentionpolicy:
+ resource_group: testrg
+ managed_instance_name: testinstancename
+ database_name: newdatabase
+ policy_name: default
+ monthly_retention: P3M
+ week_of_year: 17
+ weekly_retention: P13W
+ yearly_retention: P6Y
+'''
+
+RETURN = '''
+long_term_retention_policy:
+ description:
+ - A dictionary containing facts for SQL Managed Instance Short Term Retention Policies.
+ returned: always
+ type: complex
+ contains:
+ id:
+ description:
+ - Resource ID.
+ returned: always
+ type: str
+ sample: "/subscriptions/xxx-xxx/resourceGroups/testRG/providers/Microsoft.Sql/
+ managedInstances/fredsqlmi/databases/newdatabase/backupShortTermRetentionPolicies/default"
+ database_name:
+ description:
+ - SQL managed instance database name.
+ returned: always
+ type: str
+ sample: newdatabase
+ policy_name:
+ description:
+ - SQL managed instance long term retentioni policy name.
+ returned: always
+ type: str
+ sample: default
+ managed_instance_name:
+ description:
+ - SQL managed instance name.
+ returned: always
+ type: str
+ sample: testmanagedinstance
+ type:
+ description:
+ - The SQL managed instance type.
+ type: str
+ returned: always
+ sample: "Microsoft.Sql/managedInstances"
+ resource_group:
+ description:
+ - The resource relate resource group.
+ type: str
+ returned: always
+ sample: testRG
+ week_of_year:
+ description:
+ - The week of year to take the yearly backup in an ISO 8601 format.
+ type: int
+ sample: 7
+ returned: always
+ weekly_retention:
+ description:
+ - The weekly retention policy for an LTR backup in an ISO 8601 format.
+ type: str
+ sample: P13W
+ returned: always
+ monthly_retention:
+ description:
+ - The monthly retention policy for an LTR backup in an ISO 8601 format.
+ type: str
+ sample: P3M
+ returned: always
+ yearly_retention:
+ description:
+ - The yearly retention policy for an LTR backup in an ISO 8601 format.
+ type: str
+ sample: P6Y
+ returned: always
+'''
+
+from ansible_collections.azure.azcollection.plugins.module_utils.azure_rm_common import AzureRMModuleBase
+
+try:
+ from azure.core.exceptions import HttpResponseError
+except ImportError:
+ # This is handled in azure_rm_common
+ pass
+
+
+class AzureRMSqMILongTermRetentionPolicy(AzureRMModuleBase):
+ def __init__(self):
+ # define user inputs into argument
+ self.module_arg_spec = dict(
+ resource_group=dict(
+ type='str',
+ required=True,
+ ),
+ managed_instance_name=dict(
+ type='str',
+ required=True,
+ ),
+ database_name=dict(
+ type='str',
+ required=True,
+ ),
+ policy_name=dict(
+ type='str',
+ required=True,
+ choices=['default']
+ ),
+ weekly_retention=dict(
+ type='str',
+ ),
+ monthly_retention=dict(
+ type='str'
+ ),
+ yearly_retention=dict(
+ type='str'
+ ),
+ week_of_year=dict(
+ type='int'
+ )
+ )
+ # store the results of the module operation
+ self.parameters = dict()
+ self.results = dict(
+ changed=False,
+ diff=[]
+ )
+ self.resource_group = None
+ self.managed_instance_name = None
+ self.database_name = None
+ self.policy_name = None
+
+ super(AzureRMSqMILongTermRetentionPolicy, self).__init__(self.module_arg_spec, supports_check_mode=True, supports_tags=False, facts_module=True)
+
+ def exec_module(self, **kwargs):
+ for key in self.module_arg_spec:
+ if hasattr(self, key):
+ setattr(self, key, kwargs[key])
+ else:
+ self.parameters[key] = kwargs.get(key)
+
+ old_response = self.get()
+
+ if old_response is not None:
+ for key in self.parameters.keys():
+ if self.parameters[key] is not None and old_response[key] != self.parameters[key]:
+ self.results['changed'] = True
+ self.results['diff'].append(key)
+ if self.results['changed'] and not self.check_mode:
+ self.results['long_term_retention_policy'] = self.create_or_update_policy()
+ else:
+ self.results['changed'] = True
+ if not self.check_mode:
+ self.results['long_term_retention_policy'] = self.create_or_update_policy()
+ return self.results
+
+ def get(self):
+ response = None
+ try:
+ response = self.sql_client.managed_instance_long_term_retention_policies.get(resource_group_name=self.resource_group,
+ managed_instance_name=self.managed_instance_name,
+ database_name=self.database_name,
+ policy_name=self.policy_name)
+ self.log("Response : {0}".format(response))
+ except HttpResponseError:
+ self.log('Could not get facts for SQL managed instance long term retention policyes.')
+
+ return self.format_item(response) if response is not None else None
+
+ def create_or_update_policy(self):
+ response = None
+ try:
+ response = self.sql_client.managed_instance_long_term_retention_policies.begin_create_or_update(resource_group_name=self.resource_group,
+ managed_instance_name=self.managed_instance_name,
+ database_name=self.database_name,
+ policy_name=self.policy_name,
+ parameters=self.parameters)
+ self.log("Response : {0}".format(response))
+ except HttpResponseError as ec:
+ self.fail('Could not create SQL managed instance long term retention policyes. Exception info as {0}'.format(ec))
+
+ return self.format_item(self.get_poller_result(response))
+
+ def format_item(self, item):
+ d = item.as_dict()
+ d = {
+ 'resource_group': self.resource_group,
+ 'managed_instance_name': self.managed_instance_name,
+ 'database_name': self.database_name,
+ 'id': d.get('id', None),
+ 'name': d.get('name', None),
+ 'type': d.get('type', None),
+ "monthly_retention": d.get("monthly_retention"),
+ "week_of_year": d.get("week_of_year"),
+ "weekly_retention": d.get("weekly_retention"),
+ "yearly_retention": d.get("yearly_retention")
+ }
+ return d
+
+
+def main():
+ AzureRMSqMILongTermRetentionPolicy()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_sqlmidblongtermretentionpolicy_info.py b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_sqlmidblongtermretentionpolicy_info.py
new file mode 100644
index 000000000..11f7bce16
--- /dev/null
+++ b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_sqlmidblongtermretentionpolicy_info.py
@@ -0,0 +1,228 @@
+#!/usr/bin/python
+#
+# Copyright (c) 2024 xuzhang3 (@xuzhang3), Fred-sun (@Fred-sun)
+#
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+
+DOCUMENTATION = '''
+---
+module: azure_rm_sqlmidblongtermretentionpolicy_info
+version_added: "2.4.0"
+short_description: Get Azure SQL managed instance facts
+description:
+ - Get facts of Azure SQL managed instance facts.
+
+options:
+ resource_group:
+ description:
+ - The name of the resource group that contains the resource.
+ type: str
+ required: true
+ managed_instance_name:
+ description:
+ - The name of the SQL managed instance.
+ type: str
+ required: true
+ database_name:
+ description:
+ - The name of the SQL managed instance database.
+ type: str
+ required: true
+ policy_name:
+ description:
+ - The name of the SQL managed instance short term retention policy.
+ type: str
+ choices:
+ - default
+
+extends_documentation_fragment:
+ - azure.azcollection.azure
+
+author:
+ - xuzhang3 (@xuzhang3)
+ - Fred-sun (@Fred-sun)
+'''
+
+EXAMPLES = '''
+- name: Get SQL managed instance long term retention policy by name
+ azure_rm_sqlmidblongtermretentionpolicy_info:
+ resource_group: testrg
+ managed_instance_name: testinstancename
+ database_name: newdatabase
+ policy_name: default
+'''
+
+RETURN = '''
+short_term_retention_policy:
+ description:
+ - A dictionary containing facts for SQL Managed Instance Short Term Retention Policies.
+ returned: always
+ type: complex
+ contains:
+ id:
+ description:
+ - Resource ID.
+ returned: always
+ type: str
+ sample: "/subscriptions/xxx-xxx/resourceGroups/testRG/providers/Microsoft.Sql/
+ managedInstances/fredsqlmi/databases/newdatabase/backupShortTermRetentionPolicies/default"
+ database_name:
+ description:
+ - SQL managed instance database name.
+ returned: always
+ type: str
+ sample: newdatabase
+ policy_name:
+ description:
+ - SQL managed instance short term retentioni policy name.
+ returned: always
+ type: str
+ sample: default
+ managed_instance_name:
+ description:
+ - SQL managed instance name.
+ returned: always
+ type: str
+ sample: testmanagedinstance
+ type:
+ description:
+ - The SQL managed instance short term retention policy type.
+ type: str
+ returned: always
+ sample: "Microsoft.Sql/managedInstances/databases/backupShortTermRetentionPolicies"
+ resource_group:
+ description:
+ - The resource relate resource group.
+ type: str
+ returned: always
+ sample: testRG
+ week_of_year:
+ description:
+ - The week of year to take the yearly backup in an ISO 8601 format.
+ type: int
+ sample: 7
+ returned: always
+ weekly_retention:
+ description:
+ - The weekly retention policy for an LTR backup in an ISO 8601 format.
+ type: str
+ sample: P13W
+ returned: always
+ monthly_retention:
+ description:
+ - The monthly retention policy for an LTR backup in an ISO 8601 format.
+ type: str
+ sample: P3M
+ returned: always
+ yearly_retention:
+ description:
+ - The yearly retention policy for an LTR backup in an ISO 8601 format.
+ type: str
+ sample: P6Y
+ returned: always
+'''
+
+from ansible_collections.azure.azcollection.plugins.module_utils.azure_rm_common import AzureRMModuleBase
+
+try:
+ from azure.core.exceptions import HttpResponseError
+except ImportError:
+ # This is handled in azure_rm_common
+ pass
+
+
+class AzureRMSqMILongTermRetentionPolicyInfo(AzureRMModuleBase):
+ def __init__(self):
+ # define user inputs into argument
+ self.module_arg_spec = dict(
+ resource_group=dict(
+ type='str',
+ required=True,
+ ),
+ managed_instance_name=dict(
+ type='str',
+ required=True,
+ ),
+ database_name=dict(
+ type='str',
+ required=True,
+ ),
+ policy_name=dict(
+ type='str',
+ choices=['default']
+ ),
+ )
+ # store the results of the module operation
+ self.results = dict(
+ changed=False
+ )
+ self.resource_group = None
+ self.managed_instance_name = None
+ self.database_name = None
+ self.policy_name = None
+
+ super(AzureRMSqMILongTermRetentionPolicyInfo, self).__init__(self.module_arg_spec, supports_check_mode=True, supports_tags=False, facts_module=True)
+
+ def exec_module(self, **kwargs):
+ for key in self.module_arg_spec:
+ setattr(self, key, kwargs[key])
+
+ if self.policy_name is not None:
+ self.results['long_term_retention_policy'] = self.get()
+ else:
+ self.results['long_term_retention_policy'] = self.list_by_database()
+ return self.results
+
+ def list_by_database(self):
+ response = None
+ try:
+ response = self.sql_client.managed_instance_long_term_retention_policies.list_by_database(resource_group_name=self.resource_group,
+ managed_instance_name=self.managed_instance_name,
+ database_name=self.database_name)
+ self.log("Response : {0}".format(response))
+ except HttpResponseError:
+ self.log('Could not get facts for SQL managed instance short term retention policyes.')
+
+ return [self.format_item(item) for item in response] if response is not None else []
+
+ def get(self):
+ response = None
+ try:
+ response = self.sql_client.managed_instance_long_term_retention_policies.get(resource_group_name=self.resource_group,
+ managed_instance_name=self.managed_instance_name,
+ database_name=self.database_name,
+ policy_name=self.policy_name)
+ self.log("Response : {0}".format(response))
+ except HttpResponseError as ec:
+ self.fail(ec)
+ self.log('Could not get facts for SQL managed instance short term retention policyes.')
+
+ return [self.format_item(response)] if response is not None else None
+
+ def format_item(self, item):
+ d = item.as_dict()
+ d = {
+ 'resource_group': self.resource_group,
+ 'managed_instance_name': self.managed_instance_name,
+ 'database_name': self.database_name,
+ 'id': d.get('id', None),
+ 'name': d.get('name', None),
+ 'type': d.get('type', None),
+ "monthly_retention": d.get("monthly_retention"),
+ "week_of_year": d.get("week_of_year"),
+ "weekly_retention": d.get("weekly_retention"),
+ "yearly_retention": d.get("yearly_retention")
+ }
+ return d
+
+
+def main():
+ AzureRMSqMILongTermRetentionPolicyInfo()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_sqlmidbshorttermretentionpolicy.py b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_sqlmidbshorttermretentionpolicy.py
new file mode 100644
index 000000000..477c393bb
--- /dev/null
+++ b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_sqlmidbshorttermretentionpolicy.py
@@ -0,0 +1,244 @@
+#!/usr/bin/python
+#
+# Copyright (c) 2024 xuzhang3 (@xuzhang3), Fred-sun (@Fred-sun)
+#
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+
+DOCUMENTATION = '''
+---
+module: azure_rm_sqlmidbshorttermretentionpolicy
+version_added: "2.4.0"
+short_description: Manage SQL Managed Instance database backup short term retention policy
+description:
+ - Manage SQL Managed Instance database backup short term retention policy.
+
+options:
+ resource_group:
+ description:
+ - The name of the resource group that contains the resource.
+ type: str
+ required: true
+ managed_instance_name:
+ description:
+ - The name of the SQL managed instance.
+ type: str
+ required: true
+ database_name:
+ description:
+ - The name of the SQL managed instance database.
+ type: str
+ required: true
+ policy_name:
+ description:
+ - The name of the SQL managed instance short term retention policy.
+ type: str
+ required: true
+ choices:
+ - default
+ retention_days:
+ description:
+ - The backup retention period in days. This is how many days Point-in-Time.
+ type: int
+ default: 7
+
+extends_documentation_fragment:
+ - azure.azcollection.azure
+
+author:
+ - xuzhang3 (@xuzhang3)
+ - Fred-sun (@Fred-sun)
+'''
+
+EXAMPLES = '''
+- name: Update SQL managed instance short term retention policy's retention_days
+ azure_rm_sqlmidbshorttermretentionpolicy:
+ resource_group: testrg
+ managed_instance_name: testinstancename
+ database_name: newdatabase
+ policy_name: default
+ retention_days: 3
+'''
+
+RETURN = '''
+short_term_retention_policy:
+ description:
+ - A dictionary containing facts for SQL Managed Instance Short Term Retention Policies.
+ returned: always
+ type: complex
+ contains:
+ id:
+ description:
+ - Resource ID.
+ returned: always
+ type: str
+ sample: "/subscriptions/xxx-xxx/resourceGroups/testRG/providers/Microsoft.Sql/
+ managedInstances/fredsqlmi/databases/newdatabase/backupShortTermRetentionPolicies/default"
+ database_name:
+ description:
+ - SQL managed instance database name.
+ returned: always
+ type: str
+ sample: newdatabase
+ policy_name:
+ description:
+ - SQL managed instance short term retentioni policy name.
+ returned: always
+ type: str
+ sample: default
+ managed_instance_name:
+ description:
+ - SQL managed instance name.
+ returned: always
+ type: str
+ sample: testmanagedinstance
+ type:
+ description:
+ - The SQL managed instance type.
+ type: str
+ returned: always
+ sample: "Microsoft.Sql/managedInstances"
+ resource_group:
+ description:
+ - The resource relate resource group.
+ type: str
+ returned: always
+ sample: testRG
+ retention_days:
+ description:
+ - The backup retention period in days. This is how many days Point-in-Time.
+ type: int
+ sample: 7
+ returned: always
+'''
+
+from ansible_collections.azure.azcollection.plugins.module_utils.azure_rm_common import AzureRMModuleBase
+
+try:
+ from azure.core.exceptions import HttpResponseError
+except ImportError:
+ # This is handled in azure_rm_common
+ pass
+
+
+class AzureRMSqMIShortTermRetentionPolicy(AzureRMModuleBase):
+ def __init__(self):
+ # define user inputs into argument
+ self.module_arg_spec = dict(
+ resource_group=dict(
+ type='str',
+ required=True,
+ ),
+ managed_instance_name=dict(
+ type='str',
+ required=True,
+ ),
+ database_name=dict(
+ type='str',
+ required=True,
+ ),
+ policy_name=dict(
+ type='str',
+ required=True,
+ choices=['default']
+ ),
+ retention_days=dict(
+ type='int',
+ default=7
+ ),
+ )
+ # store the results of the module operation
+ self.results = dict(
+ changed=False,
+ diff=[]
+ )
+ self.resource_group = None
+ self.managed_instance_name = None
+ self.database_name = None
+ self.policy_name = None
+ self.retention_days = None
+
+ super(AzureRMSqMIShortTermRetentionPolicy, self).__init__(self.module_arg_spec, supports_check_mode=True, supports_tags=False, facts_module=True)
+
+ def exec_module(self, **kwargs):
+ for key in self.module_arg_spec:
+ setattr(self, key, kwargs[key])
+
+ old_response = self.get()
+
+ if old_response is not None:
+ if self.retention_days is not None and old_response['retention_days'] != self.retention_days:
+ self.results['changed'] = True
+ self.results['diff'].append('retention_days')
+ if not self.check_mode:
+ self.results['short_term_retention_policy'] = self.update_policy()
+ else:
+ self.results['changed'] = True
+ if not self.check_mode:
+ self.results['short_term_retention_policy'] = self.create_policy()
+ return self.results
+
+ def get(self):
+ response = None
+ try:
+ response = self.sql_client.managed_backup_short_term_retention_policies.get(resource_group_name=self.resource_group,
+ managed_instance_name=self.managed_instance_name,
+ database_name=self.database_name,
+ policy_name=self.policy_name)
+ self.log("Response : {0}".format(response))
+ except HttpResponseError:
+ self.log('Could not get facts for SQL managed instance short term retention policyes.')
+
+ return self.format_item(response) if response is not None else None
+
+ def update_policy(self):
+ response = None
+ try:
+ response = self.sql_client.managed_backup_short_term_retention_policies.begin_update(resource_group_name=self.resource_group,
+ managed_instance_name=self.managed_instance_name,
+ database_name=self.database_name,
+ policy_name=self.policy_name,
+ parameters=dict(retention_days=self.retention_days))
+ self.log("Response : {0}".format(response))
+ except HttpResponseError as ec:
+ self.fail('Could not update the SQL managed instance short term retention policyes. Exception as {0}'.format(ec))
+
+ return self.format_item(self.get_poller_result(response))
+
+ def create_policy(self):
+ response = None
+ try:
+ response = self.sql_client.managed_backup_short_term_retention_policies.begin_create_or_update(resource_group_name=self.resource_group,
+ managed_instance_name=self.managed_instance_name,
+ database_name=self.database_name,
+ policy_name=self.policy_name,
+ parameters=dict(retention_days=self.retention_days))
+ self.log("Response : {0}".format(response))
+ except HttpResponseError as ec:
+ self.fail('Could not Create the SQL managed instance short term retention policyes. Exception as {0}'.format(ec))
+
+ return self.format_item(self.get_poller_result(response))
+
+ def format_item(self, item):
+ d = item.as_dict()
+ d = {
+ 'resource_group': self.resource_group,
+ 'managed_instance_name': self.managed_instance_name,
+ 'database_name': self.database_name,
+ 'id': d.get('id', None),
+ 'name': d.get('name', None),
+ 'type': d.get('type', None),
+ 'retention_days': d.get('retention_days', None),
+ }
+ return d
+
+
+def main():
+ AzureRMSqMIShortTermRetentionPolicy()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_sqlmidbshorttermretentionpolicy_info.py b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_sqlmidbshorttermretentionpolicy_info.py
new file mode 100644
index 000000000..b3665b66d
--- /dev/null
+++ b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_sqlmidbshorttermretentionpolicy_info.py
@@ -0,0 +1,206 @@
+#!/usr/bin/python
+#
+# Copyright (c) 2024 xuzhang3 (@xuzhang3), Fred-sun (@Fred-sun)
+#
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+
+DOCUMENTATION = '''
+---
+module: azure_rm_sqlmidbshorttermretentionpolicy_info
+version_added: "2.4.0"
+short_description: Get Azure SQL managed instance short term retention policy
+description:
+ - Get Azure SQL managed instance short term retention policy.
+
+options:
+ resource_group:
+ description:
+ - The name of the resource group that contains the resource.
+ type: str
+ required: true
+ managed_instance_name:
+ description:
+ - The name of the SQL managed instance.
+ type: str
+ required: true
+ database_name:
+ description:
+ - The name of the SQL managed instance database.
+ type: str
+ required: true
+ policy_name:
+ description:
+ - The name of the SQL managed instance short term retention policy.
+ type: str
+ choices:
+ - default
+
+extends_documentation_fragment:
+ - azure.azcollection.azure
+
+author:
+ - xuzhang3 (@xuzhang3)
+ - Fred-sun (@Fred-sun)
+'''
+
+EXAMPLES = '''
+- name: Get SQL managed instance short term retention policy by name
+ azure_rm_sqlmidbshorttermretentionpolicy_info:
+ resource_group: testrg
+ managed_instance_name: testinstancename
+ database_name: newdatabase
+ policy_name: default
+'''
+
+RETURN = '''
+short_term_retention_policy:
+ description:
+ - A dictionary containing facts for SQL Managed Instance Short Term Retention Policies.
+ returned: always
+ type: complex
+ contains:
+ id:
+ description:
+ - Resource ID.
+ returned: always
+ type: str
+ sample: "/subscriptions/xxx-xxx/resourceGroups/testRG/providers/Microsoft.Sql/
+ managedInstances/fredsqlmi/databases/newdatabase/backupShortTermRetentionPolicies/default"
+ database_name:
+ description:
+ - SQL managed instance database name.
+ returned: always
+ type: str
+ sample: newdatabase
+ policy_name:
+ description:
+ - SQL managed instance short term retentioni policy name.
+ returned: always
+ type: str
+ sample: default
+ managed_instance_name:
+ description:
+ - SQL managed instance name.
+ returned: always
+ type: str
+ sample: testmanagedinstance
+ type:
+ description:
+ - The SQL managed instance short term retention policy type.
+ type: str
+ returned: always
+ sample: "Microsoft.Sql/managedInstances/databases/backupShortTermRetentionPolicies"
+ resource_group:
+ description:
+ - The resource relate resource group.
+ type: str
+ returned: always
+ sample: testRG
+ retention_days:
+ description:
+ - The backup retention period in days. This is how many days Point-in-Time
+ type: int
+ sample: 7
+ returned: always
+'''
+
+from ansible_collections.azure.azcollection.plugins.module_utils.azure_rm_common import AzureRMModuleBase
+
+try:
+ from azure.core.exceptions import HttpResponseError
+except ImportError:
+ # This is handled in azure_rm_common
+ pass
+
+
+class AzureRMSqMIShortTermRetentionPolicyInfo(AzureRMModuleBase):
+ def __init__(self):
+ # define user inputs into argument
+ self.module_arg_spec = dict(
+ resource_group=dict(
+ type='str',
+ required=True,
+ ),
+ managed_instance_name=dict(
+ type='str',
+ required=True,
+ ),
+ database_name=dict(
+ type='str',
+ required=True,
+ ),
+ policy_name=dict(
+ type='str',
+ choices=['default']
+ ),
+ )
+ # store the results of the module operation
+ self.results = dict(
+ changed=False
+ )
+ self.resource_group = None
+ self.managed_instance_name = None
+ self.database_name = None
+ self.policy_name = None
+
+ super(AzureRMSqMIShortTermRetentionPolicyInfo, self).__init__(self.module_arg_spec, supports_check_mode=True, supports_tags=False, facts_module=True)
+
+ def exec_module(self, **kwargs):
+ for key in self.module_arg_spec:
+ setattr(self, key, kwargs[key])
+
+ if self.policy_name is not None:
+ self.results['short_term_retention_policy'] = self.get()
+ else:
+ self.results['short_term_retention_policy'] = self.list_by_database()
+ return self.results
+
+ def list_by_database(self):
+ response = None
+ try:
+ response = self.sql_client.managed_backup_short_term_retention_policies.list_by_database(resource_group_name=self.resource_group,
+ managed_instance_name=self.managed_instance_name,
+ database_name=self.database_name)
+ self.log("Response : {0}".format(response))
+ except HttpResponseError:
+ self.log('Could not get facts for SQL managed instance short term retention policyes.')
+
+ return [self.format_item(item) for item in response] if response is not None else []
+
+ def get(self):
+ response = None
+ try:
+ response = self.sql_client.managed_backup_short_term_retention_policies.get(resource_group_name=self.resource_group,
+ managed_instance_name=self.managed_instance_name,
+ database_name=self.database_name,
+ policy_name=self.policy_name)
+ self.log("Response : {0}".format(response))
+ except HttpResponseError as ec:
+ self.log('Could not get facts for SQL managed instance short term retention policyes.')
+
+ return [self.format_item(response)] if response is not None else None
+
+ def format_item(self, item):
+ d = item.as_dict()
+ d = {
+ 'resource_group': self.resource_group,
+ 'managed_instance_name': self.managed_instance_name,
+ 'database_name': self.database_name,
+ 'id': d.get('id', None),
+ 'name': d.get('name', None),
+ 'type': d.get('type', None),
+ 'retention_days': d.get('retention_days', None),
+ }
+ return d
+
+
+def main():
+ AzureRMSqMIShortTermRetentionPolicyInfo()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_storageaccountmanagementpolicy.py b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_storageaccountmanagementpolicy.py
new file mode 100644
index 000000000..c0efaaf0b
--- /dev/null
+++ b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_storageaccountmanagementpolicy.py
@@ -0,0 +1,672 @@
+#!/usr/bin/python
+#
+# Copyright (c) 2024 xuzhang3 (@xuzhang3), Fred-sun (@Fred-sun)
+#
+#
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+
+DOCUMENTATION = '''
+---
+module: azure_rm_storageaccountmanagementpolicy
+version_added: "2.4.0"
+short_description: Manage storage account management policies
+description:
+ - Create, update or delete storage account management policies.
+options:
+ resource_group:
+ description:
+ - Name of the resource group to use.
+ required: true
+ type: str
+ aliases:
+ - resource_group_name
+ storage_account_name:
+ description:
+ - Name of the storage account.
+ type: str
+ required: true
+ rules:
+ description:
+ - The Storage Account ManagementPolicies Rules.
+ type: list
+ elements: dict
+ suboptions:
+ name:
+ description:
+ - The name of the policy rule.
+ - A rule name can contain any combination of alpha numeric characters.
+ type: str
+ required: true
+ type:
+ description:
+ - The type of the policy rule.
+ type: str
+ required: true
+ choices:
+ - Lifecycle
+ enabled:
+ description:
+ - Whether to enabled the rule
+ type: bool
+ definition:
+ description:
+ - Whether to enabled the rule
+ required: true
+ type: dict
+ suboptions:
+ actions:
+ description:
+ - An object that defines the action set.
+ type: dict
+ required: true
+ suboptions:
+ base_blob:
+ description:
+ - The management policy action for base blob.
+ type: dict
+ suboptions:
+ tier_to_cool:
+ description:
+ - The function to tier blobs to cool storage.
+ - Support blobs currently at Hot tier.
+ type: dict
+ suboptions:
+ days_after_modification_greater_than:
+ description:
+ - Value indicating the age in days after last modification.
+ type: float
+ days_after_last_access_time_greater_than:
+ description:
+ - This property can only be used in conjunction with last access time tracking policy.
+ type: float
+ tier_to_archive:
+ description:
+ - The function to tier blobs to archive storage.
+ - Support blobs currently at Hot or Cool tier.
+ type: dict
+ suboptions:
+ days_after_modification_greater_than:
+ description:
+ - Value indicating the age in days after last modification.
+ type: float
+ days_after_last_access_time_greater_than:
+ description:
+ - This property can only be used in conjunction with last access time tracking policy.
+ type: float
+ delete:
+ description:
+ - The function to delete the blob.
+ type: dict
+ suboptions:
+ days_after_modification_greater_than:
+ description:
+ - Value indicating the age in days after last modification.
+ type: float
+ days_after_last_access_time_greater_than:
+ description:
+ - This property can only be used in conjunction with last access time tracking policy.
+ type: float
+ enable_auto_tier_to_hot_from_cool:
+ description:
+ - This property enables auto tiering of a blob from cool to hot on a blob access.
+ type: bool
+ snapshot:
+ description:
+ - The management policy action for snapshot.
+ type: dict
+ suboptions:
+ tier_to_cool:
+ description:
+ - The function to tier blob snapshot to cool storage.
+ - Support blob snapshot at Hot tier.
+ type: dict
+ suboptions:
+ days_after_creation_greater_than:
+ description:
+ - Value indicating the age in days after creation.
+ type: float
+ required: true
+ tier_to_archive:
+ description:
+ - The function to tier blob snapshot to archive storage.
+ - Support blob snapshot currently at Hot or Cool tier.
+ type: dict
+ suboptions:
+ days_after_creation_greater_than:
+ description:
+ - Value indicating the age in days after creation.
+ type: float
+ required: true
+ delete:
+ description:
+ - The function to delete the blob snapshot.
+ type: dict
+ suboptions:
+ days_after_creation_greater_than:
+ description:
+ - Value indicating the age in days after creation.
+ type: float
+ required: true
+ version:
+ description:
+ - The management policy action for version.
+ type: dict
+ suboptions:
+ tier_to_cool:
+ description:
+ - The function to tier blob version to cool storage.
+ - Support blob version currently at Hot tier.
+ type: dict
+ suboptions:
+ days_after_creation_greater_than:
+ description:
+ - Value indicating the age in days after creation.
+ type: float
+ required: true
+ tier_to_archive:
+ description:
+ - The function to tier blob version to archive storage.
+ - Support blob version currently at Hot or Cool tier.
+ type: dict
+ suboptions:
+ days_after_creation_greater_than:
+ description:
+ - Value indicating the age in days after creation.
+ type: float
+ required: true
+ delete:
+ description:
+ - The function to delete the blob version.
+ type: dict
+ suboptions:
+ days_after_creation_greater_than:
+ description:
+ - Value indicating the age in days after creation.
+ type: float
+ required: true
+ filters:
+ description:
+ - An object that defines the filter set.
+ type: dict
+ suboptions:
+ prefix_match:
+ description:
+ - An array of strings for prefixes to be match.
+ type: list
+ elements: str
+ blob_types:
+ description:
+ - An array of predefined enum values.
+ - Currently blockBlob supports all tiering and delete actions. Only delete actions are supported for C(appendBlob).
+ type: list
+ required: true
+ elements: str
+ choices:
+ - blockBlob
+ - appendBlob
+ blob_index_match:
+ description:
+ - An array of blob index tag based filters, there can be at most 10 tag filters.
+ type: list
+ elements: dict
+ suboptions:
+ name:
+ description:
+ - This is the filter tag name, it can have 1 - 128 characters.
+ type: str
+ required: true
+ op:
+ description:
+ - This is the comparison operator which is used for object comparison and filtering.
+ - Only C(==) (equality operator) is currently supported.
+ type: str
+ required: true
+ value:
+ description:
+ - This is the filter tag value field used for tag based filtering.
+ - It can have 0-256 characters.
+ type: str
+ required: true
+ state:
+ description:
+ - State of the storage account managed policy. Use C(present) add or update the policy rule.
+ - Use C(absent) to delete all policy rules.
+ default: present
+ type: str
+ choices:
+ - absent
+ - present
+
+extends_documentation_fragment:
+ - azure.azcollection.azure
+
+author:
+ - xuzhang3 (@xuzhang3)
+ - Fred-sun (@Fred-sun)
+'''
+
+EXAMPLES = '''
+- name: Create storage account management policy with multi parameters
+ azure_rm_storageaccountmanagementpolicy:
+ resource_group: testRG
+ storage_account_name: testaccount
+ rules:
+ - name: olcmtest5
+ type: Lifecycle
+ enabled: false
+ definition:
+ actions:
+ base_blob:
+ enable_auto_tier_to_hot_from_cool: true
+ delete:
+ days_after_modification_greater_than: 33
+ days_after_last_access_time_greater_than: 33
+ tier_to_cool:
+ days_after_modification_greater_than: 33
+ days_after_last_access_time_greater_than: 33
+ tier_to_archive:
+ days_after_modification_greater_than: 33
+ days_after_last_access_time_greater_than: 33
+ snapshot:
+ tier_to_cool:
+ days_after_creation_greater_than: 33
+ tier_to_archive:
+ days_after_creation_greater_than: 33
+ delete:
+ days_after_creation_greater_than: 33
+ version:
+ tier_to_archive:
+ days_after_creation_greater_than: 33
+ tier_to_cool:
+ days_after_creation_greater_than: 33
+ delete:
+ days_after_creation_greater_than: 33
+ filters:
+ prefix_match:
+ - olcmtestcontainer2
+ blob_types:
+ - blockBlob
+ - appendBlob
+ blob_index_match:
+ - name: tags3
+ op: '=='
+ value: value3
+
+- name: Delete management policy rules
+ azure_rm_storageaccountmanagementpolicy:
+ resource_group: "{{ resource_group }}"
+ storage_account_name: "st{{ rpfx }}"
+ state: absent
+'''
+
+
+RETURN = '''
+state:
+ description:
+ - The data policy rules associated with the specified storage account.
+ returned: always
+ type: complex
+ contains:
+ id:
+ description:
+ - The data policy's ID.
+ returned: always
+ type: str
+ sample: "/subscriptions/xxx-xxx/resourceGroups/testRG/providers/Microsoft.Storage/storageAccounts/sttest/managementPolicies/default"
+ resource_group:
+ description:
+ - The resource group name.
+ returned: always
+ type: str
+ sample: testRG
+ storage_account_name:
+ description:
+ - The storage account name.
+ returned: always
+ type: str
+ sample: teststname
+ type:
+ description:
+ - The type of the resource.
+ returned: always
+ type: str
+ sample: "Microsoft.Storage/storageAccounts/managementPolicies"
+ last_modified_time:
+ description:
+ - Returns the date and time the ManagementPolicies was last modified.
+ returned: always
+ type: str
+ sample: "2024-04-12T11:40:10.376465+00:00"
+ name:
+ description:
+ - The name of the resource.
+ returned: always
+ type: str
+ sample: DefaultManagementPolicy
+ policy:
+ description:
+ - The Storage Account ManagementPolicy.
+ returned: always
+ type: complex
+ contains:
+ rules:
+ description:
+ - The Storage Account ManagementPolicies Rules.
+ type: list
+ returned: always
+ sample: [
+ {
+ "definition": {
+ "actions": {
+ "base_blob": {
+ "delete": {
+ "days_after_last_access_time_greater_than": 33.0,
+ "days_after_modification_greater_than": 33.0
+ },
+ "enable_auto_tier_to_hot_from_cool": true,
+ "tier_to_archive": {
+ "days_after_last_access_time_greater_than": 33.0,
+ "days_after_modification_greater_than": 33.0
+ },
+ "tier_to_cool": {
+ "days_after_last_access_time_greater_than": 33.0,
+ "days_after_modification_greater_than": 33.0
+ }
+ },
+ "snapshot": {
+ "delete": {
+ "days_after_creation_greater_than": 33.0
+ },
+ "tier_to_archive": {
+ "days_after_creation_greater_than": 33.0
+ },
+ "tier_to_cool": {
+ "days_after_creation_greater_than": 33.0
+ }
+ },
+ "version": {
+ "delete": {
+ "days_after_creation_greater_than": 33.0
+ },
+ "tier_to_archive": {
+ "days_after_creation_greater_than": 33.0
+ },
+ "tier_to_cool": {
+ "days_after_creation_greater_than": 33.0
+ }
+ }
+ },
+ "filters": {
+ "blob_index_match": [
+ {
+ "name": "tags3",
+ "op": "==",
+ "value": "value3"
+ }
+ ],
+ "blob_types": [
+ "blockBlob",
+ "appendBlob"
+ ],
+ "prefix_match": [
+ "olcmtestcontainer2"
+ ]
+ }
+ },
+ "enabled": false,
+ "name": "olcmtest5",
+ "type": "Lifecycle"
+ }
+ ]
+'''
+
+
+from ansible_collections.azure.azcollection.plugins.module_utils.azure_rm_common_ext import AzureRMModuleBaseExt
+try:
+ from azure.core.exceptions import ResourceNotFoundError
+except Exception:
+ # This is handled in azure_rm_common
+ pass
+
+
+class AzureRMStorageAccountManagementPolicy(AzureRMModuleBaseExt):
+
+ def __init__(self):
+
+ self.module_arg_spec = dict(
+ resource_group=dict(required=True, type='str', aliases=['resource_group_name']),
+ storage_account_name=dict(type='str', required=True),
+ state=dict(default='present', choices=['present', 'absent']),
+ rules=dict(
+ type='list',
+ elements='dict',
+ options=dict(
+ enabled=dict(type='bool'),
+ name=dict(type='str', required=True),
+ type=dict(type='str', required=True, choices=['Lifecycle']),
+ definition=dict(
+ type='dict',
+ required=True,
+ options=dict(
+ actions=dict(
+ type='dict',
+ required=True,
+ options=dict(
+ base_blob=dict(
+ type='dict',
+ options=dict(
+ tier_to_cool=dict(
+ type='dict',
+ options=dict(
+ days_after_modification_greater_than=dict(type='float'),
+ days_after_last_access_time_greater_than=dict(type='float')
+ )
+ ),
+ tier_to_archive=dict(
+ type='dict',
+ options=dict(
+ days_after_modification_greater_than=dict(type='float'),
+ days_after_last_access_time_greater_than=dict(type='float')
+ )
+ ),
+ delete=dict(
+ type='dict',
+ options=dict(
+ days_after_modification_greater_than=dict(type='float'),
+ days_after_last_access_time_greater_than=dict(type='float')
+ )
+ ),
+ enable_auto_tier_to_hot_from_cool=dict(type='bool')
+ )
+ ),
+ snapshot=dict(
+ type='dict',
+ options=dict(
+ tier_to_cool=dict(
+ type='dict',
+ options=dict(
+ days_after_creation_greater_than=dict(type='float', required=True)
+ )
+ ),
+ tier_to_archive=dict(
+ type='dict',
+ options=dict(
+ days_after_creation_greater_than=dict(type='float', required=True)
+ )
+ ),
+ delete=dict(
+ type='dict',
+ options=dict(
+ days_after_creation_greater_than=dict(type='float', required=True)
+ )
+ )
+ )
+ ),
+ version=dict(
+ type='dict',
+ options=dict(
+ tier_to_cool=dict(
+ type='dict',
+ options=dict(
+ days_after_creation_greater_than=dict(
+ type='float',
+ required=True
+ )
+ )
+ ),
+ tier_to_archive=dict(
+ type='dict',
+ options=dict(
+ days_after_creation_greater_than=dict(
+ type='float',
+ required=True
+ )
+ )
+ ),
+ delete=dict(
+ type='dict',
+ options=dict(
+ days_after_creation_greater_than=dict(
+ type='float',
+ required=True
+ )
+ )
+ )
+ )
+ )
+ )
+ ),
+ filters=dict(
+ type='dict',
+ options=dict(
+ prefix_match=dict(type='list', elements='str'),
+ blob_types=dict(type='list', elements='str', choices=['blockBlob', 'appendBlob'], required=True),
+ blob_index_match=dict(
+ type='list',
+ elements='dict',
+ options=dict(
+ name=dict(type='str', required=True),
+ op=dict(type='str', required=True),
+ value=dict(type='str', required=True)
+ )
+ )
+ )
+ )
+ )
+ )
+ )
+ )
+ )
+
+ self.results = dict(
+ changed=False,
+ state=dict()
+ )
+
+ self.resource_group = None
+ self.storage_account_name = None
+ self.state = None
+ self.rules = []
+
+ super(AzureRMStorageAccountManagementPolicy, self).__init__(self.module_arg_spec,
+ supports_tags=False,
+ supports_check_mode=True)
+
+ def exec_module(self, **kwargs):
+
+ for key in list(self.module_arg_spec.keys()):
+ setattr(self, key, kwargs[key])
+
+ managed_policy = self.get_management_policy()
+ changed = False
+
+ if self.state == 'present':
+ if managed_policy is not None:
+ rules = []
+ for item in managed_policy['policy']['rules']:
+ rules.append(item)
+ rules_name = [item['name'] for item in rules]
+ for item in self.rules:
+ if item['name'] in rules_name:
+ for tt in managed_policy['policy']['rules']:
+ if item['name'] == tt['name']:
+ old_item = tt
+ if not self.default_compare({}, item, old_item, '', dict(compare=[])):
+ rules.remove(old_item)
+ rules.append(item)
+ changed = True
+ else:
+ rules.append(item)
+ changed = True
+ if changed and not self.check_mode:
+ self.create_or_update_management_policy(rules)
+ else:
+ changed = True
+ if not self.check_mode:
+ self.create_or_update_management_policy(self.rules)
+ else:
+ if managed_policy is not None:
+ changed = True
+ if not self.check_mode:
+ self.delete_management_policy()
+
+ self.results['state'] = self.get_management_policy()
+ self.results['changed'] = changed
+
+ return self.results
+
+ def get_management_policy(self):
+ self.log('Get info for storage account management policy')
+
+ response = None
+ try:
+ response = self.storage_client.management_policies.get(self.resource_group, self.storage_account_name, 'default')
+ except ResourceNotFoundError as ec:
+ self.log("Failed to obtain the storage acount management policy, detail as {0}".format(ec))
+ return None
+ return self.format_to_dict(response)
+
+ def create_or_update_management_policy(self, rules):
+ self.log("Creating or updating storage account mangement policy")
+
+ try:
+ self.storage_client.management_policies.create_or_update(resource_group_name=self.resource_group,
+ account_name=self.storage_account_name,
+ management_policy_name='default',
+ properties=dict(policy=dict(rules=rules)))
+ except Exception as e:
+ self.log('Error creating or updating storage account management policy.')
+ self.fail("Failed to create or updating storage account management policy: {0}".format(str(e)))
+ return self.get_management_policy()
+
+ def delete_management_policy(self):
+ try:
+ self.storage_client.management_policies.delete(self.resource_group, self.storage_account_name, 'default')
+ except Exception as e:
+ self.fail("Failed to delete the storage account management policy: {0}".format(str(e)))
+
+ def format_to_dict(self, obj):
+ result = dict()
+ result['id'] = obj.id
+ result['resource_group'] = self.resource_group
+ result['storage_account_name'] = self.storage_account_name
+ result['name'] = obj.name
+ result['type'] = obj.type
+ result['last_modified_time'] = obj.last_modified_time
+ result['policy'] = dict(rules=[])
+ if obj.policy is not None:
+ result['policy'] = obj.policy.as_dict()
+
+ return result
+
+
+def main():
+ AzureRMStorageAccountManagementPolicy()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_storageaccountmanagementpolicy_info.py b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_storageaccountmanagementpolicy_info.py
new file mode 100644
index 000000000..1ffa1d21f
--- /dev/null
+++ b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_storageaccountmanagementpolicy_info.py
@@ -0,0 +1,245 @@
+#!/usr/bin/python
+#
+# Copyright (c) 2024 xuzhang3 (@xuzhang3), Fred-sun (@Fred-sun)
+#
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+
+DOCUMENTATION = '''
+---
+module: azure_rm_storageaccountmanagementpolicy_info
+
+version_added: "2.4.0"
+
+short_description: Get the data policy rules associated with the specified storage account
+
+description:
+ - Get the data policy rules associated with the specified storage account.
+
+options:
+ resource_group:
+ description:
+ - Name of the resource group to use.
+ required: true
+ type: str
+ aliases:
+ - resource_group_name
+ storage_account_name:
+ description:
+ - Name of the storage account to update or create.
+ type: str
+ required: true
+
+extends_documentation_fragment:
+ - azure.azcollection.azure
+
+author:
+ - xuzhang3 (@xuzhang3)
+ - Fred-sun (@Fred-sun)
+'''
+
+EXAMPLES = '''
+- name: Get the data policy rules associated with the specified storage account
+ azure_rm_storageaccountmanagementpolicy_info:
+ resource_group: myResourceGroup
+ storage_account_name: testaccount
+'''
+
+
+RETURN = '''
+state:
+ description:
+ - The data policy rules associated with the specified storage account.
+ returned: always
+ type: complex
+ contains:
+ id:
+ description:
+ - The data policy's ID.
+ returned: always
+ type: str
+ sample: "/subscriptions/xxx-xxx/resourceGroups/testRG/providers/Microsoft.Storage/storageAccounts/sttest/managementPolicies/default"
+ resource_group:
+ description:
+ - The resource group name.
+ returned: always
+ type: str
+ sample: testRG
+ storage_account_name:
+ description:
+ - The storage account name.
+ returned: always
+ type: str
+ sample: teststname
+ type:
+ description:
+ - The type of the resource.
+ returned: always
+ type: str
+ sample: "Microsoft.Storage/storageAccounts/managementPolicies"
+ last_modified_time:
+ description:
+ - Returns the date and time the ManagementPolicies was last modified.
+ returned: always
+ type: str
+ sample: "2024-04-12T11:40:10.376465+00:00"
+ name:
+ description:
+ - The name of the resource.
+ returned: always
+ type: str
+ sample: DefaultManagementPolicy
+ policy:
+ description:
+ - The Storage Account ManagementPolicy.
+ returned: always
+ type: complex
+ contains:
+ rules:
+ description:
+ - The Storage Account ManagementPolicies Rules.
+ type: list
+ returned: always
+ sample: [
+ {
+ "definition": {
+ "actions": {
+ "base_blob": {
+ "delete": {
+ "days_after_last_access_time_greater_than": 33.0,
+ "days_after_modification_greater_than": 33.0
+ },
+ "enable_auto_tier_to_hot_from_cool": true,
+ "tier_to_archive": {
+ "days_after_last_access_time_greater_than": 33.0,
+ "days_after_modification_greater_than": 33.0
+ },
+ "tier_to_cool": {
+ "days_after_last_access_time_greater_than": 33.0,
+ "days_after_modification_greater_than": 33.0
+ }
+ },
+ "snapshot": {
+ "delete": {
+ "days_after_creation_greater_than": 33.0
+ },
+ "tier_to_archive": {
+ "days_after_creation_greater_than": 33.0
+ },
+ "tier_to_cool": {
+ "days_after_creation_greater_than": 33.0
+ }
+ },
+ "version": {
+ "delete": {
+ "days_after_creation_greater_than": 33.0
+ },
+ "tier_to_archive": {
+ "days_after_creation_greater_than": 33.0
+ },
+ "tier_to_cool": {
+ "days_after_creation_greater_than": 33.0
+ }
+ }
+ },
+ "filters": {
+ "blob_index_match": [
+ {
+ "name": "tags3",
+ "op": "==",
+ "value": "value3"
+ }
+ ],
+ "blob_types": [
+ "blockBlob",
+ "appendBlob"
+ ],
+ "prefix_match": [
+ "olcmtestcontainer2"
+ ]
+ }
+ },
+ "enabled": false,
+ "name": "olcmtest5",
+ "type": "Lifecycle"
+ }
+ ]
+'''
+
+
+from ansible_collections.azure.azcollection.plugins.module_utils.azure_rm_common import AzureRMModuleBase
+try:
+ from azure.core.exceptions import ResourceNotFoundError
+except Exception:
+ # This is handled in azure_rm_common
+ pass
+
+
+class AzureRMStorageAccountManagementPolicyInfo(AzureRMModuleBase):
+
+ def __init__(self):
+
+ self.module_arg_spec = dict(
+ resource_group=dict(required=True, type='str', aliases=['resource_group_name']),
+ storage_account_name=dict(type='str', required=True),
+ )
+
+ self.results = dict(
+ changed=False,
+ state=dict()
+ )
+
+ self.resource_group = None
+ self.storage_account_name = None
+ self.state = None
+ self.rules = None
+
+ super(AzureRMStorageAccountManagementPolicyInfo, self).__init__(self.module_arg_spec,
+ supports_tags=False,
+ supports_check_mode=True)
+
+ def exec_module(self, **kwargs):
+
+ for key in list(self.module_arg_spec.keys()):
+ setattr(self, key, kwargs[key])
+
+ self.results['state'] = self.get_management_policy()
+
+ return self.results
+
+ def get_management_policy(self):
+ self.log('Get info for storage account management policy')
+
+ response = None
+ try:
+ response = self.storage_client.management_policies.get(self.resource_group, self.storage_account_name, 'default')
+ except ResourceNotFoundError as ec:
+ self.log("Failed to obtain the storage acount management policy, detail as {0}".format(ec))
+ return
+
+ return self.format_to_dict(response)
+
+ def format_to_dict(self, obj):
+ result = dict()
+ result['id'] = obj.id
+ result['resource_group'] = self.resource_group
+ result['storage_account_name'] = self.storage_account_name
+ result['name'] = obj.name
+ result['type'] = obj.type
+ result['last_modified_time'] = obj.last_modified_time
+ result['policy'] = dict(rules=[])
+ if obj.policy is not None:
+ result['policy'] = obj.policy.as_dict()
+
+ return result
+
+
+def main():
+ AzureRMStorageAccountManagementPolicyInfo()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_virtualmachine.py b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_virtualmachine.py
index e845e2fa1..d11dbd185 100644
--- a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_virtualmachine.py
+++ b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_virtualmachine.py
@@ -221,6 +221,7 @@ options:
- Type of OS disk caching.
type: str
choices:
+ - None
- ReadOnly
- ReadWrite
aliases:
@@ -270,6 +271,11 @@ options:
description:
- ID of disk encryption set for data disk.
type: str
+ managed_disk_id:
+ description:
+ - The ID of the existing data disk.
+ - If specified, attach mode will be chosen.
+ type: str
managed_disk_type:
description:
- Managed data disk type.
@@ -313,6 +319,7 @@ options:
- Type of data disk caching.
type: str
choices:
+ - None
- ReadOnly
- ReadWrite
public_ip_allocation_method:
@@ -1145,7 +1152,7 @@ class AzureRMVirtualMachine(AzureRMModuleBase):
storage_account_name=dict(type='str', aliases=['storage_account']),
storage_container_name=dict(type='str', aliases=['storage_container'], default='vhds'),
storage_blob_name=dict(type='str', aliases=['storage_blob']),
- os_disk_caching=dict(type='str', aliases=['disk_caching'], choices=['ReadOnly', 'ReadWrite']),
+ os_disk_caching=dict(type='str', aliases=['disk_caching'], choices=['None', 'ReadOnly', 'ReadWrite']),
os_disk_size_gb=dict(type='int'),
os_disk_encryption_set=dict(type='str'),
managed_disk_type=dict(type='str', choices=['Standard_LRS', 'StandardSSD_LRS', 'StandardSSD_ZRS', 'Premium_LRS', 'Premium_ZRS', 'UltraSSD_LRS']),
@@ -1181,12 +1188,13 @@ class AzureRMVirtualMachine(AzureRMModuleBase):
lun=dict(type='int', required=True),
disk_size_gb=dict(type='int'),
disk_encryption_set=dict(type='str'),
+ managed_disk_id=dict(type='str'),
managed_disk_type=dict(type='str', choices=['Standard_LRS', 'StandardSSD_LRS',
'StandardSSD_ZRS', 'Premium_LRS', 'Premium_ZRS', 'UltraSSD_LRS']),
storage_account_name=dict(type='str'),
storage_container_name=dict(type='str', default='vhds'),
storage_blob_name=dict(type='str'),
- caching=dict(type='str', choices=['ReadOnly', 'ReadWrite'])
+ caching=dict(type='str', choices=['None', 'ReadOnly', 'ReadWrite'])
)
),
plan=dict(type='dict'),
@@ -1965,41 +1973,49 @@ class AzureRMVirtualMachine(AzureRMModuleBase):
count = 0
for data_disk in self.data_disks:
- if not data_disk.get('managed_disk_type'):
- if not data_disk.get('storage_blob_name'):
- data_disk['storage_blob_name'] = self.name + '-data-' + str(count) + '.vhd'
- count += 1
+ data_disk_vhd = None
+ disk_name = None
- if data_disk.get('storage_account_name'):
- data_disk_storage_account = self.get_storage_account(self.resource_group, data_disk['storage_account_name'])
- else:
- data_disk_storage_account = self.create_default_storage_account()
- self.log("data disk storage account:")
- self.log(self.serialize_obj(data_disk_storage_account, 'StorageAccount'), pretty_print=True)
-
- if not data_disk.get('storage_container_name'):
- data_disk['storage_container_name'] = 'vhds'
-
- data_disk_requested_vhd_uri = 'https://{0}.blob.{1}/{2}/{3}'.format(
- data_disk_storage_account.name,
- self._cloud_environment.suffixes.storage_endpoint,
- data_disk['storage_container_name'],
- data_disk['storage_blob_name']
- )
-
- if not data_disk.get('managed_disk_type'):
- data_disk_managed_disk = None
- disk_name = data_disk['storage_blob_name']
- data_disk_vhd = self.compute_models.VirtualHardDisk(uri=data_disk_requested_vhd_uri)
+ if data_disk.get('managed_disk_id'):
+ create_option = self.compute_models.DiskCreateOptionTypes.attach
+ data_disk_managed_disk = self.compute_models.ManagedDiskParameters(id=data_disk.get('managed_disk_id'))
else:
- data_disk_vhd = None
- data_disk_managed_disk = self.compute_models.ManagedDiskParameters(storage_account_type=data_disk['managed_disk_type'])
- if data_disk.get('disk_encryption_set'):
- data_disk_managed_disk.disk_encryption_set = self.compute_models.DiskEncryptionSetParameters(
- id=data_disk['disk_encryption_set']
+ create_option = self.compute_models.DiskCreateOptionTypes.empty
+
+ if not data_disk.get('managed_disk_type'):
+ if not data_disk.get('storage_blob_name'):
+ data_disk['storage_blob_name'] = self.name + '-data-' + str(count) + '.vhd'
+ count += 1
+
+ if data_disk.get('storage_account_name'):
+ data_disk_storage_account = self.get_storage_account(self.resource_group, data_disk['storage_account_name'])
+ else:
+ data_disk_storage_account = self.create_default_storage_account()
+ self.log("data disk storage account:")
+ self.log(self.serialize_obj(data_disk_storage_account, 'StorageAccount'), pretty_print=True)
+
+ if not data_disk.get('storage_container_name'):
+ data_disk['storage_container_name'] = 'vhds'
+
+ data_disk_requested_vhd_uri = 'https://{0}.blob.{1}/{2}/{3}'.format(
+ data_disk_storage_account.name,
+ self._cloud_environment.suffixes.storage_endpoint,
+ data_disk['storage_container_name'],
+ data_disk['storage_blob_name']
)
- disk_name = self.name + "-datadisk-" + str(count)
- count += 1
+
+ if not data_disk.get('managed_disk_type'):
+ data_disk_managed_disk = None
+ disk_name = data_disk['storage_blob_name']
+ data_disk_vhd = self.compute_models.VirtualHardDisk(uri=data_disk_requested_vhd_uri)
+ else:
+ data_disk_managed_disk = self.compute_models.ManagedDiskParameters(storage_account_type=data_disk['managed_disk_type'])
+ if data_disk.get('disk_encryption_set'):
+ data_disk_managed_disk.disk_encryption_set = self.compute_models.DiskEncryptionSetParameters(
+ id=data_disk['disk_encryption_set']
+ )
+ disk_name = self.name + "-datadisk-" + str(count)
+ count += 1
data_disk['caching'] = data_disk.get(
'caching', 'ReadOnly'
@@ -2010,7 +2026,7 @@ class AzureRMVirtualMachine(AzureRMModuleBase):
name=disk_name,
vhd=data_disk_vhd,
caching=data_disk['caching'],
- create_option=self.compute_models.DiskCreateOptionTypes.empty,
+ create_option=create_option,
disk_size_gb=data_disk['disk_size_gb'],
managed_disk=data_disk_managed_disk,
))
@@ -2941,8 +2957,6 @@ class AzureRMVirtualMachine(AzureRMModuleBase):
pip = self.network_models.PublicIPAddress(id=pip_facts.id, location=pip_facts.location, resource_guid=pip_facts.resource_guid, sku=sku)
self.tags['_own_pip_'] = self.name + '01'
- self.tags['_own_nsg_'] = self.name + '01'
-
parameters = self.network_models.NetworkInterface(
location=self.location,
ip_configurations=[
@@ -2961,6 +2975,7 @@ class AzureRMVirtualMachine(AzureRMModuleBase):
parameters.network_security_group = self.network_models.NetworkSecurityGroup(id=group.id,
location=group.location,
resource_guid=group.resource_guid)
+ self.tags['_own_nsg_'] = self.name + '01'
parameters.ip_configurations[0].public_ip_address = pip
diff --git a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_virtualmachinescalesetinstance.py b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_virtualmachinescalesetinstance.py
index 402af0072..97878dcf4 100644
--- a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_virtualmachinescalesetinstance.py
+++ b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_virtualmachinescalesetinstance.py
@@ -295,16 +295,35 @@ class AzureRMVirtualMachineScaleSetInstance(AzureRMModuleBase):
def format_response(self, item):
d = item.as_dict()
- iv = self.mgmt_client.virtual_machine_scale_set_vms.get_instance_view(resource_group_name=self.resource_group,
- vm_scale_set_name=self.vmss_name,
- instance_id=d.get('instance_id', None)).as_dict()
- power_state = ""
- for index in range(len(iv['statuses'])):
- code = iv['statuses'][index]['code'].split('/')
- if code[0] == 'PowerState':
- power_state = code[1]
- break
- d = {
+ instance = None
+ power_state = ''
+ if d.get('provisioning_state', None) is not None:
+ iv = self.mgmt_client.virtual_machine_scale_set_vms.get_instance_view(resource_group_name=self.resource_group,
+ vm_scale_set_name=self.vmss_name,
+ instance_id=d.get('instance_id', None)).as_dict()
+ for index in range(len(iv['statuses'])):
+ code = iv['statuses'][index]['code'].split('/')
+ if code[0] == 'PowerState':
+ power_state = code[1]
+ break
+ else:
+ try:
+ instance = self.compute_client.virtual_machines.instance_view(self.resource_group, d.get('instance_id', None)).as_dict()
+ vm_instance = self.compute_client.virtual_machines.get(self.resource_group, d.get('instance_id', None)).as_dict()
+ except Exception as exc:
+ self.fail("Getting Flexible VMSS instance instance failed, name {0} instance view - {1}".format(d.get('instance_id'), str(exc)))
+
+ for index in range(len(instance['statuses'])):
+ code = instance['statuses'][index]['code'].split('/')
+ if code[0] == 'PowerState':
+ power_state = code[1]
+ elif code[0] == 'OSState' and code[1] == 'generalized':
+ power_state = 'generalized'
+ break
+ elif code[0] == 'ProvisioningState' and code[1] == 'failed':
+ power_state = ''
+ break
+ dd = {
'id': d.get('id'),
'tags': d.get('tags'),
'instance_id': d.get('instance_id'),
@@ -312,7 +331,9 @@ class AzureRMVirtualMachineScaleSetInstance(AzureRMModuleBase):
'power_state': power_state,
'protection_policy': d.get('protection_policy')
}
- return d
+ if d.get('provisioning_state') is None:
+ dd['tags'] = vm_instance.get('tags', None)
+ return dd
def main():
diff --git a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_virtualmachinescalesetinstance_info.py b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_virtualmachinescalesetinstance_info.py
index 47a3d3318..ba94461cb 100644
--- a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_virtualmachinescalesetinstance_info.py
+++ b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_virtualmachinescalesetinstance_info.py
@@ -211,16 +211,35 @@ class AzureRMVirtualMachineScaleSetVMInfo(AzureRMModuleBase):
def format_response(self, item):
d = item.as_dict()
- iv = self.mgmt_client.virtual_machine_scale_set_vms.get_instance_view(resource_group_name=self.resource_group,
- vm_scale_set_name=self.vmss_name,
- instance_id=d.get('instance_id', None)).as_dict()
- power_state = ""
- for index in range(len(iv['statuses'])):
- code = iv['statuses'][index]['code'].split('/')
- if code[0] == 'PowerState':
- power_state = code[1]
- break
- d = {
+ instance = None
+ power_state = ''
+ if d.get('provisioning_state') is not None:
+ iv = self.mgmt_client.virtual_machine_scale_set_vms.get_instance_view(resource_group_name=self.resource_group,
+ vm_scale_set_name=self.vmss_name,
+ instance_id=d.get('instance_id', None)).as_dict()
+ for index in range(len(iv['statuses'])):
+ code = iv['statuses'][index]['code'].split('/')
+ if code[0] == 'PowerState':
+ power_state = code[1]
+ break
+ else:
+ try:
+ instance = self.compute_client.virtual_machines.instance_view(self.resource_group, d.get('instance_id', None)).as_dict()
+ vm_instance = self.compute_client.virtual_machines.get(self.resource_group, d.get('instance_id', None)).as_dict()
+ except Exception as exc:
+ self.fail("Getting Flexible VMSS instance instance failed, name {0} instance view - {1}".format(d.get('instance_id'), str(exc)))
+
+ for index in range(len(instance['statuses'])):
+ code = instance['statuses'][index]['code'].split('/')
+ if code[0] == 'PowerState':
+ power_state = code[1]
+ elif code[0] == 'OSState' and code[1] == 'generalized':
+ power_state = 'generalized'
+ break
+ elif code[0] == 'ProvisioningState' and code[1] == 'failed':
+ power_state = ''
+ break
+ dd = {
'resource_group': self.resource_group,
'id': d.get('id', None),
'tags': d.get('tags', None),
@@ -230,10 +249,17 @@ class AzureRMVirtualMachineScaleSetVMInfo(AzureRMModuleBase):
'provisioning_state': d.get('provisioning_state', None),
'power_state': power_state,
'vm_id': d.get('vm_id', None),
- 'image_reference': d.get('storage_profile').get('image_reference', None),
- 'computer_name': d.get('os_profile').get('computer_name', None)
}
- return d
+ if d.get('provisioning_state') is not None:
+ dd['image_reference'] = d.get('storage_profile').get('image_reference', None)
+ dd['computer_name'] = d.get('os_profile').get('computer_name', None)
+ else:
+ dd['image_reference'] = vm_instance.get('storage_profile').get('image_reference', None)
+ dd['computer_name'] = vm_instance.get('os_profile').get('computer_name', None)
+ dd['provisioning_state'] = vm_instance.get('provisioning_state', None)
+ dd['tags'] = vm_instance.get('tags', None)
+ dd['vm_id'] = vm_instance.get('vm_id')
+ return dd
def main():
diff --git a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_virtualnetworkgatewaynatrule.py b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_virtualnetworkgatewaynatrule.py
new file mode 100644
index 000000000..51b7da15e
--- /dev/null
+++ b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_virtualnetworkgatewaynatrule.py
@@ -0,0 +1,354 @@
+#!/usr/bin/python
+#
+# Copyright (c) 2024 xuzhang3 (@xuzhang3), Fred-sun (@Fred-sun)
+#
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+
+DOCUMENTATION = '''
+---
+module: azure_rm_virtualnetworkgatewaynatrule
+
+version_added: "2.4.0"
+
+short_description: Gets or list the specified local network gateway in a resource group
+
+description:
+ - Gets or list the specified local network gateway in a resource group.
+
+options:
+ resource_group:
+ description:
+ - The local network gateway's resource group.
+ type: str
+ required: true
+ virtual_network_gateway_name:
+ description:
+ - The name of the local network gateway.
+ type: str
+ required: true
+ name:
+ description:
+ - he name of the resource that is unique within a resource group.
+ type: str
+ required: true
+ type_properties_type:
+ description:
+ - The type of NAT rule for VPN NAT.
+ type: str
+ choices:
+ - Dynamic
+ - Static
+ mode:
+ description:
+ - The Source NAT direction of a VPN NAT.
+ type: str
+ choices:
+ - EgressSnat
+ - IngressSnat
+ ip_configuration_id:
+ description:
+ - The IP Configuration ID this NAT rule applies to.
+ type: str
+ external_mappings:
+ description:
+ - The private IP address external mapping for NAT.
+ type: list
+ elements: str
+ internal_mappings:
+ description:
+ - The private IP address internal mapping for NAT.
+ type: list
+ elements: str
+ state:
+ description:
+ - Use C(present) to create or update the virtual network gateway nat rule.
+ - Use C(absent) to delete the nat rule.
+ type: str
+ default: present
+ choices:
+ - absent
+ - present
+extends_documentation_fragment:
+ - azure.azcollection.azure
+ - azure.azcollection.azure_tags
+
+author:
+ - xuzhang3 (@xuzhang3)
+ - Fred Sun (@Fred-sun)
+'''
+
+EXAMPLES = '''
+- name: Create a virtual netowrk nat rule
+ azure_rm_virtualnetworkgatewaynatrule:
+ resource_group: "{{ resource_group }}"
+ virtual_network_gateway_name: "{{ vngname }}"
+ name: "{{ natrulename }}"
+ type_properties_type: Dynamic
+ ip_configuration_id: "/subscriptions/xxx-xxx/resourceGroups/testRG/providers/Microsoft.Network/virtualNetworkGateways/testRG/ipConfigurations/ipconfig"
+ mode: EgressSnat
+ internal_mappings:
+ - 10.1.0.0/24
+ external_mappings:
+ - 192.168.1.0/24
+
+- name: Delete the virtual netowrk nat rule
+ azure_rm_virtualnetworkgatewaynatrule:
+ resource_group: "{{ resource_group }}"
+ virtual_network_gateway_name: "{{ vngname }}"
+ name: "{{ natrulename }}"
+ state: absent
+'''
+
+RETURN = '''
+state:
+ description:
+ - Gets the nat rules for a particular virtual network gateway
+ returned: always
+ type: complex
+ contains:
+ id:
+ description:
+ - The resource ID.
+ type: str
+ returned: always
+ sample: "/subscriptions/xxx-xxx/resourceGroups/testRG/providers/Microsoft.Network/virtualNetworkGateways/vng01/natRules/natrule"
+ internal_mappings:
+ description:
+ - The private IP address internal mapping for NAT.
+ type: list
+ returned: always
+ sample: ["10.1.0.0/24"]
+ external_mappings:
+ description:
+ - The private IP address external mapping for NAT.
+ type: list
+ returned: always
+ sample: ["192.168.1.0/24"]
+ ip_configuration_id:
+ description:
+ - he IP Configuration ID this NAT rule applies to.
+ type: str
+ returned: always
+ sample: "/subscriptions/subid/resourceGroups/rg1/providers/Microsoft.Network/virtualNetworkGateways/gateway1/ipConfigurations/default"
+ type_properties_type:
+ description:
+ - The type of NAT rule for VPN NAT.
+ type: str
+ returned: always
+ sample: Static
+ mode:
+ description:
+ - The Source NAT direction of a VPN NAT.
+ type: str
+ returned: always
+ sample: EgressSnat
+ name:
+ description:
+ - The resource name.
+ type: str
+ returned: always
+ sample: natrule_name
+ resource_group:
+ description:
+ - The resource group name.
+ type: str
+ returned: always
+ sample: testRG
+ etag:
+ description:
+ - A unique read-only string that changes whenever the resource is updated.
+ type: str
+ returned: always
+ sample: b5a32693-2e75-49e0-9137-ded19db658d6
+ provisioning_state:
+ description:
+ - The provisioning state of the nat rule resource.
+ type: str
+ returned: always
+ sample: Succeeded
+ type:
+ description:
+ - The resource type.
+ type: str
+ returned: always
+ sample: Microsoft.Network/virtualNetworkGateways/natRules
+'''
+
+from ansible_collections.azure.azcollection.plugins.module_utils.azure_rm_common import AzureRMModuleBase
+
+try:
+ from azure.core.exceptions import HttpResponseError
+ from azure.core.polling import LROPoller
+except Exception:
+ # handled in azure_rm_common
+ pass
+
+
+class AzureRMVirtualNetworkNatGateway(AzureRMModuleBase):
+ """Utility class to get Azure Kubernetes Service Credentials facts"""
+
+ def __init__(self):
+
+ self.module_arg_spec = dict(
+ name=dict(type='str', required=True),
+ resource_group=dict(type='str', required=True),
+ virtual_network_gateway_name=dict(type='str', required=True),
+ type_properties_type=dict(type='str', choices=['Dynamic', 'Static']),
+ mode=dict(type='str', choices=['EgressSnat', 'IngressSnat']),
+ ip_configuration_id=dict(type='str'),
+ external_mappings=dict(type='list', elements='str'),
+ internal_mappings=dict(type='list', elements='str'),
+ state=dict(type='str', default='present', choices=['present', 'absent'])
+ )
+
+ self.type_properties_type = None
+ self.mode = None
+ self.ip_configuration_id = None
+ self.external_mappings = None
+ self.internal_mappings = None
+
+ self.results = dict(
+ changed=False,
+ state=[],
+ )
+ required_if = [('type_properties_type', 'Dynamic', ['ip_configuration_id'])]
+
+ super(AzureRMVirtualNetworkNatGateway, self).__init__(derived_arg_spec=self.module_arg_spec,
+ supports_check_mode=True,
+ required_if=required_if,
+ supports_tags=True,
+ facts_module=False)
+
+ def exec_module(self, **kwargs):
+
+ for key in list(self.module_arg_spec):
+ setattr(self, key, kwargs[key])
+
+ old_response = self.get_nat_rule()
+ changed = False
+ response = None
+
+ if self.state == 'present':
+ if old_response is not None:
+ if self.type_properties_type is not None and self.type_properties_type != old_response['type_properties_type']:
+ self.fail("NAT type_properties_type cannot be changed.")
+ else:
+ self.type_properties_type = old_response['type_properties_type']
+ if self.mode is not None and self.mode != old_response['mode']:
+ self.fail("NAT mode cannot be changed.")
+ else:
+ self.mode = old_response['mode']
+ if self.ip_configuration_id is not None and self.ip_configuration_id != old_response['ip_configuration_id']:
+ changed = True
+ else:
+ self.ip_configuration_id = old_response['ip_configuration_id']
+ if self.internal_mappings is not None and old_response['internal_mappings'] != self.internal_mappings:
+ changed = True
+ else:
+ self.internal_mappings = old_response['internal_mappings']
+
+ if self.external_mappings is not None and self.external_mappings != old_response['external_mappings']:
+ changed = True
+ else:
+ self.external_mappings = old_response['external_mappings']
+ else:
+ changed = True
+
+ internal_mappings = None
+ external_mappings = None
+ if self.internal_mappings is not None:
+ internal_mappings = [self.network_models.VpnNatRuleMapping(address_space=item) for item in self.internal_mappings]
+ if self.external_mappings is not None:
+ external_mappings = [self.network_models.VpnNatRuleMapping(address_space=item) for item in self.external_mappings]
+
+ natrule_resource = self.network_models.VirtualNetworkGatewayNatRule(name=self.name,
+ type_properties_type=self.type_properties_type,
+ mode=self.mode,
+ ip_configuration_id=self.ip_configuration_id,
+ internal_mappings=internal_mappings,
+ external_mappings=external_mappings)
+ if changed:
+ if not self.check_mode:
+ response = self.create_or_update_local_network_gateway(natrule_resource)
+ else:
+ if not self.check_mode:
+ if old_response is not None:
+ self.delete_local_network_gateway()
+ changed = True
+ response = None
+ else:
+ changed = True
+
+ if response is None:
+ response = old_response
+ self.results['state'] = response
+ self.results['changed'] = changed
+ return self.results
+
+ def get_nat_rule(self):
+ """Gets the specified nat rule"""
+ response = None
+ try:
+ response = self.network_client.virtual_network_gateway_nat_rules.get(self.resource_group, self.virtual_network_gateway_name, self.name)
+ except HttpResponseError as ec:
+ self.log("Gets the specified local network gateway in a resource group Failed, Exception as {0}".format(ec))
+ return None
+ return self.format_response(response)
+
+ def create_or_update_local_network_gateway(self, body):
+ """Create or Update local network gateway"""
+ response = None
+ try:
+ response = self.network_client.virtual_network_gateway_nat_rules.begin_create_or_update(self.resource_group,
+ self.virtual_network_gateway_name, self.name, body)
+ if isinstance(response, LROPoller):
+ response = self.get_poller_result(response)
+ except HttpResponseError as ec:
+ self.fail("Create or Updated a local network gateway in a resource group Failed, Exception as {0}".format(ec))
+
+ return self.format_response(response)
+
+ def delete_local_network_gateway(self):
+ """Deletes the specified local network gateway"""
+ try:
+ self.network_client.virtual_network_gateway_nat_rules.begin_delete(self.resource_group, self.virtual_network_gateway_name, self.name)
+ except HttpResponseError as ec:
+ self.fail("Deletes the specified nat rule, Exception as {0}".format(ec))
+ return None
+
+ def format_response(self, item):
+ result = dict(
+ resource_group=self.resource_group,
+ id=item.id,
+ name=item.name,
+ type=item.type,
+ etag=item.etag,
+ provisioning_state=item.provisioning_state,
+ type_properties_type=item.type_properties_type,
+ mode=item.mode,
+ internal_mappings=list(),
+ external_mappings=list(),
+ ip_configuration_id=item.ip_configuration_id
+ )
+
+ if item.internal_mappings is not None:
+ for value in item.internal_mappings:
+ result['internal_mappings'].append(value.address_space)
+ if item.external_mappings is not None:
+ for value in item.external_mappings:
+ result['external_mappings'].append(value.address_space)
+ return result
+
+
+def main():
+ """Main module execution code path"""
+
+ AzureRMVirtualNetworkNatGateway()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_virtualnetworkgatewaynatrule_info.py b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_virtualnetworkgatewaynatrule_info.py
new file mode 100644
index 000000000..8aeb6649d
--- /dev/null
+++ b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_virtualnetworkgatewaynatrule_info.py
@@ -0,0 +1,236 @@
+#!/usr/bin/python
+#
+# Copyright (c) 2024 xuzhang3 (@xuzhang3), Fred-sun (@Fred-sun)
+#
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+
+DOCUMENTATION = '''
+---
+module: azure_rm_virtualnetworkgatewaynatrule_info
+
+version_added: "2.4.0"
+
+short_description: Gets or list nat rules for a particular virtual network gateway
+
+description:
+ - Gets or list nat rules for a particular virtual network gateway.
+
+options:
+ resource_group:
+ description:
+ - The local network gateway's resource group.
+ type: str
+ required: true
+ virtual_network_gateway_name:
+ description:
+ - The name of the local network gateway.
+ type: str
+ required: true
+ name:
+ description:
+ - The name of the nat rule.
+ type: str
+
+extends_documentation_fragment:
+ - azure.azcollection.azure
+
+author:
+ - xuzhang3 (@xuzhang3)
+ - Fred Sun (@Fred-sun)
+'''
+
+EXAMPLES = '''
+- name: Gets the nat rule by the name
+ azure_rm_virtualnetworkgatewaynatrule_info:
+ resource_group: "{{ resource_group }}"
+ virtual_network_gateway_name: "{{ local_networkgateway_name }}"
+ name: "{{ name }}"
+
+- name: List all nat rules for a particular virtual network gateway
+ azure_rm_virtualnetworkgatewaynatrule_info:
+ resource_group: "{{ resource_group }}"
+ virtual_network_gateway_name: "{{ local_networkgateway_name }}"
+'''
+
+RETURN = '''
+state:
+ description:
+ - Gets the nat rules for a particular virtual network gateway
+ returned: always
+ type: complex
+ contains:
+ id:
+ description:
+ - The resource ID.
+ type: str
+ returned: always
+ sample: "/subscriptions/xxx-xxx/resourceGroups/testRG/providers/Microsoft.Network/virtualNetworkGateways/vng01/natRules/natrule"
+ internal_mappings:
+ description:
+ - The private IP address internal mapping for NAT.
+ type: list
+ returned: always
+ sample: ["10.1.0.0/24"]
+ external_mappings:
+ description:
+ - The private IP address external mapping for NAT.
+ type: list
+ returned: always
+ sample: ["192.168.1.0/24"]
+ ip_configuration_id:
+ description:
+ - he IP Configuration ID this NAT rule applies to.
+ type: str
+ returned: always
+ sample: "/subscriptions/subid/resourceGroups/rg1/providers/Microsoft.Network/virtualNetworkGateways/gateway1/ipConfigurations/default"
+ type_properties_type:
+ description:
+ - The type of NAT rule for VPN NAT.
+ type: str
+ returned: always
+ sample: Static
+ mode:
+ description:
+ - The Source NAT direction of a VPN NAT.
+ type: str
+ returned: always
+ sample: EgressSnat
+ name:
+ description:
+ - The resource name.
+ type: str
+ returned: always
+ sample: natrule_name
+ resource_group:
+ description:
+ - The resource group name.
+ type: str
+ returned: always
+ sample: testRG
+ etag:
+ description:
+ - A unique read-only string that changes whenever the resource is updated.
+ type: str
+ returned: always
+ sample: b5a32693-2e75-49e0-9137-ded19db658d6
+ provisioning_state:
+ description:
+ - The provisioning state of the nat rule resource.
+ type: str
+ returned: always
+ sample: Succeeded
+ type:
+ description:
+ - The resource type.
+ type: str
+ returned: always
+ sample: Microsoft.Network/virtualNetworkGateways/natRules
+'''
+
+from ansible_collections.azure.azcollection.plugins.module_utils.azure_rm_common import AzureRMModuleBase
+
+try:
+ from azure.core.exceptions import HttpResponseError
+except Exception:
+ # handled in azure_rm_common
+ pass
+
+
+class AzureRMVirtualNetworkGatewayNatRuleInfo(AzureRMModuleBase):
+ """Utility class to get Azure Kubernetes Service Credentials facts"""
+
+ def __init__(self):
+
+ self.module_args = dict(
+ name=dict(type='str'),
+ resource_group=dict(type='str', required=True),
+ virtual_network_gateway_name=dict(type='str', required=True),
+ )
+
+ self.name = None
+
+ self.results = dict(
+ changed=False,
+ state=[],
+ )
+
+ super(AzureRMVirtualNetworkGatewayNatRuleInfo, self).__init__(derived_arg_spec=self.module_args,
+ supports_check_mode=True,
+ supports_tags=False,
+ facts_module=True)
+
+ def exec_module(self, **kwargs):
+
+ for key in self.module_args:
+ setattr(self, key, kwargs[key])
+
+ if self.name is not None:
+ self.results['state'] = self.get_by_name()
+ else:
+ self.results['state'] = self.list_by_virtual_network_gateway()
+
+ return self.results
+
+ def get_by_name(self):
+ """Gets the nat rule by name"""
+ response = None
+
+ try:
+ response = self.network_client.virtual_network_gateway_nat_rules.get(self.resource_group, self.virtual_network_gateway_name, self.name)
+ except HttpResponseError as ec:
+ self.log("Gets the nat rule by name got a Exception, Exception as {0}".format(ec))
+ if response:
+ return [self.format_response(response)]
+ else:
+ return []
+
+ def list_by_virtual_network_gateway(self):
+ """Gets all the nat rule in the local network gateway"""
+ response = None
+ try:
+ response = self.network_client.virtual_network_gateway_nat_rules.list_by_virtual_network_gateway(self.resource_group,
+ self.virtual_network_gateway_name)
+ except HttpResponseError as ec:
+ self.log("Gets all nat rule by the local network gateway got Exception, Exception as {0}".format(ec))
+
+ if response:
+ return [self.format_response(item) for item in response]
+ else:
+ return []
+
+ def format_response(self, item):
+ result = dict(
+ resource_group=self.resource_group,
+ id=item.id,
+ name=item.name,
+ type=item.type,
+ etag=item.etag,
+ provisioning_state=item.provisioning_state,
+ type_properties_type=item.type_properties_type,
+ mode=item.mode,
+ internal_mappings=list(),
+ external_mappings=list(),
+ ip_configuration_id=item.ip_configuration_id
+ )
+
+ if item.internal_mappings is not None:
+ for value in item.internal_mappings:
+ result['internal_mappings'].append(value.address_space)
+ if item.external_mappings is not None:
+ for value in item.external_mappings:
+ result['external_mappings'].append(value.address_space)
+ return result
+
+
+def main():
+ """Main module execution code path"""
+
+ AzureRMVirtualNetworkGatewayNatRuleInfo()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_vmsku_info.py b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_vmsku_info.py
new file mode 100644
index 000000000..a367bd692
--- /dev/null
+++ b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_vmsku_info.py
@@ -0,0 +1,311 @@
+#!/usr/bin/python
+#
+# Copyright (c) 2024
+# Nir Argaman <nargaman@redhat.com>
+#
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: azure_rm_vmsku_info
+
+version_added: "2.4.0"
+
+short_description: Get compute-related SKUs list
+
+description:
+ - Get details for compute-related resource SKUs.
+
+options:
+ location:
+ description:
+ - A region supported by current subscription.
+ type: str
+ resource_type:
+ description:
+ - Resource types e.g. "availabilitySets", "snapshots", "disks", etc.
+ type: str
+ size:
+ description:
+ - Size name, partial name is accepted.
+ type: str
+ zone:
+ description:
+ - Show skus supporting availability zones.
+ type: bool
+ default: False
+
+extends_documentation_fragment:
+ - azure.azcollection.azure
+
+author:
+ - Nir Argaman (@nirarg)
+
+'''
+
+EXAMPLES = '''
+- name: Gather Resource Group info
+ azure.azcollection.azure_rm_resourcegroup_info:
+ name: "{{ resource_group }}"
+ register: rg_info
+
+- name: List available VM SKUs
+ azure.azcollection.azure_rm_vmsku_info:
+ location: "{{ rg_info.resourcegroups.0.location }}"
+ resource_type: "virtualMachines"
+ size: "standard_B1"
+ zone: true
+ register: available_skus_result
+'''
+
+RETURN = '''
+available_skus:
+ description:
+ - List of compute-related resource SKUs.
+ returned: always
+ type: complex
+ contains:
+ resource_type:
+ description:
+ - The type of resource the SKU applies to.
+ returned: always
+ type: str
+ sample: "virtual_machine"
+ name:
+ description:
+ - The name of SKU.
+ returned: always
+ type: str
+ sample: "Standard_B1s"
+ tier:
+ description:
+ - Specifies the tier of virtual machines in a scale set.
+ returned: always
+ type: str
+ sample: "Standard"
+ size:
+ description:
+ - The Size of the SKU.
+ returned: always
+ type: str
+ sample: "B1s"
+ family:
+ description:
+ - The Family of this particular SKU.
+ returned: always
+ type: str
+ sample: "standardBSFamily"
+ locations:
+ description:
+ - The set of locations that the SKU is available.
+ returned: always
+ type: list
+ sample: ["eastus"]
+ location_info:
+ description:
+ - A list of locations and availability zones in those locations where the SKU is available.
+ returned: always
+ type: complex
+ contains:
+ location:
+ description:
+ - Location of the SKU.
+ type: str
+ returned: always
+ sample: "eastus"
+ zones:
+ description:
+ - List of availability zones where the SKU is supported.
+ type: list
+ returned: always
+ sample: ["1", "2", "3"]
+ zone_details:
+ description:
+ - Details of capabilities available to a SKU in specific zones.
+ returned: always
+ type: complex
+ contains:
+ capabilities:
+ description:
+ - A list of capabilities that are available for the SKU in the specified list of zones.
+ type: complex
+ returned: always
+ contains:
+ name:
+ description:
+ - An invariant to describe the feature.
+ type: str
+ returned: always
+ sample: "ultrassdavailable"
+ value:
+ description:
+ - An invariant if the feature is measured by quantity.
+ type: str
+ returned: always
+ sample: "True"
+ capabilities:
+ description:
+ - A name value pair to describe the capability.
+ returned: always
+ type: complex
+ contains:
+ name:
+ description:
+ - An invariant to describe the feature.
+ type: str
+ returned: always
+ sample: "ultrassdavailable"
+ value:
+ description:
+ - An invariant if the feature is measured by quantity.
+ type: str
+ returned: always
+ sample: "True"
+ restrictions:
+ description:
+ - The restrictions because of which SKU cannot be used. This is empty if there are no restrictions.
+ returned: always
+ type: complex
+ contains:
+ type:
+ description:
+ - The type of restrictions.
+ type: str
+ returned: always
+ sample: "location"
+ values:
+ description:
+ - The value of restrictions. If the restriction type is set to location. This would be different locations where the SKU is restricted.
+ type: str
+ returned: always
+ sample: ["eastus"]
+ restriction_info:
+ description:
+ - The information about the restriction where the SKU cannot be used.
+ returned: always
+ type: complex
+ contains:
+ locations:
+ description:
+ - Locations where the SKU is restricted.
+ type: list
+ sample: ["location"]
+ zones:
+ description:
+ - List of availability zones where the SKU is restricted.
+ type: list
+ sample: ["1", "2"]
+ reason_code:
+ description:
+ - The reason for restriction.
+ type: str
+ sample: "QuotaId"
+'''
+
+from ansible_collections.azure.azcollection.plugins.module_utils.azure_rm_common import AzureRMModuleBase
+
+try:
+ from azure.mgmt.compute import ComputeManagementClient
+ from azure.core.exceptions import HttpResponseError
+except ImportError:
+ # This is handled in azure_rm_common
+ pass
+
+
+class AzureRMVmskuInfo(AzureRMModuleBase):
+ def __init__(self):
+
+ self.module_arg_spec = dict(
+ location=dict(type='str'),
+ resource_type=dict(type='str'),
+ size=dict(type='str'),
+ zone=dict(type='bool', default=False)
+ )
+
+ self.results = dict(
+ available_skus=[],
+ count=0
+ )
+ self.location = None
+ self.resource_type = None
+ self.size = None
+ self.zone = False
+
+ super(AzureRMVmskuInfo, self).__init__(derived_arg_spec=self.module_arg_spec,
+ supports_check_mode=True,
+ supports_tags=False)
+
+ def list_skus(self):
+ try:
+ compute_client = self.get_mgmt_svc_client(ComputeManagementClient,
+ base_url=self._cloud_environment.endpoints.resource_manager,
+ api_version='2021-07-01')
+ skus_result = compute_client.resource_skus.list()
+ available_skus = []
+ for sku_info in skus_result:
+ if self.location and not _match_location(self.location, sku_info.locations):
+ continue
+ if not _is_sku_available(sku_info, self.zone):
+ continue
+ if self.resource_type and not sku_info.resource_type.lower() == self.resource_type.lower():
+ continue
+ if self.size and not (sku_info.resource_type == 'virtualMachines' and self.size.lower() in sku_info.name.lower()):
+ continue
+ if self.zone and not (sku_info.location_info and sku_info.location_info[0].zones):
+ continue
+ available_skus.append(sku_info.as_dict())
+ return available_skus
+ except HttpResponseError as e:
+ # Handle exceptions
+ raise e
+
+ def exec_module(self, **kwargs):
+ for key in self.module_arg_spec:
+ setattr(self, key, kwargs[key])
+
+ available_skus = self.list_skus()
+ self.results['available_skus'] = available_skus
+ self.results['count'] = len(available_skus)
+ return self.results
+
+
+def _match_location(loc, locations):
+ return next((x for x in locations if x.lower() == loc.lower()), None)
+
+
+def _is_sku_available(sku_info, zone):
+ """
+ The SKU is unavailable in the following cases:
+ 1. regional restriction and the region is restricted
+ 2. parameter "zone" is input which indicates only showing skus with availability zones.
+ Meanwhile, zonal restriction and all zones are restricted
+ """
+ is_available = True
+ is_restrict_zone = False
+ is_restrict_location = False
+ if not sku_info.restrictions:
+ return is_available
+ for restriction in sku_info.restrictions:
+ if restriction.reason_code == 'NotAvailableForSubscription':
+ if restriction.type == 'Zone' and not (
+ set(sku_info.location_info[0].zones or []) - set(restriction.restriction_info.zones or [])):
+ is_restrict_zone = True
+ if restriction.type == 'Location' and (
+ sku_info.location_info[0].location in (restriction.restriction_info.locations or [])):
+ is_restrict_location = True
+ if is_restrict_location or (is_restrict_zone and zone):
+ is_available = False
+ break
+ return is_available
+
+
+def main():
+ AzureRMVmskuInfo()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_webapp.py b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_webapp.py
index e58cbcd43..622c5dd5a 100644
--- a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_webapp.py
+++ b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_webapp.py
@@ -213,7 +213,191 @@ options:
- stopped
- restarted
default: started
-
+ site_auth_settings:
+ description:
+ - Configuration settings for the Azure App Service Authentication / Authorization feature.
+ type: dict
+ suboptions:
+ kind:
+ description:
+ - Kind of resource.
+ type: str
+ enabled:
+ description:
+ - Whether enable or disable the Authentication / Authorization feature for the current app.
+ type: bool
+ runtime_version:
+ description:
+ - The RuntimeVersion of the Authentication / Authorization feature in use for the current app.
+ type: str
+ unauthenticated_client_action:
+ description:
+ - The action to take when an unauthenticated client attempts to access the app.
+ type: str
+ choices:
+ - RedirectToLoginPage
+ - AllowAnonymous
+ token_store_enabled:
+ description:
+ - Whether to use App Service Token Store.
+ type: bool
+ allowed_external_redirect_urls:
+ description:
+ - External URLs that can be redirected to as part of logging in or logging out of the app.
+ - Note that the query string part of the URL is ignored.
+ type: list
+ elements: str
+ default_provider:
+ description:
+ - The default authentication provider to use when multiple providers are configured.
+ type: str
+ choices:
+ - AzureActiveDirectory
+ - Facebook
+ - Google
+ - MicrosoftAccount
+ - Twitter
+ - Github
+ token_refresh_extension_hours:
+ description:
+ - The number of hours after session token expiration that a session token can be used to call the token refresh API.
+ type: float
+ client_id:
+ description:
+ - The Client ID of this relying party application, known as the client_id.
+ type: str
+ client_secret:
+ description:
+ - The Client Secret of this relying party application (in Azure Active Directory, this is also referred to as the Key).
+ type: str
+ client_secret_setting_name:
+ description:
+ - The app setting name that contains the client secret of the relying party application.
+ type: str
+ client_secret_certificate_thumbprint:
+ description:
+ - An alternative to the client secret, that is the thumbprint of a certificate used for signing purposes.
+ - This property acts as a replacement for the Client Secret. It is also optional.
+ type: str
+ issuer:
+ description:
+ - The OpenID Connect Issuer URI that represents the entity which issues access tokens for this application.
+ type: str
+ allowed_audiences:
+ description:
+ - Allowed audience values to consider when validating JWTs issued by Azure Active Directory.
+ type: list
+ elements: str
+ aad_claims_authorization:
+ description:
+ - Gets a JSON string containing the Azure AD Acl settings.
+ type: str
+ additional_login_params:
+ description:
+ - Login parameters to send to the OpenID Connect authorization endpoint when a user logs in.
+ - Each parameter must be in the form "key=value".
+ type: str
+ google_client_id:
+ description:
+ - The OpenID Connect Client ID for the Google web application.
+ type: str
+ google_client_secret:
+ description:
+ - The client secret associated with the Google web application.
+ type: str
+ google_client_secret_setting_name:
+ description:
+ - The app setting name that contains the client secret associated with the Google web application.
+ type: str
+ google_o_auth_scopes:
+ description:
+ - The OAuth 2.0 scopes that will be requested as part of Google Sign-In authentication.
+ - This setting is optional. If not specified, "openid", "profile", and "email" are used as default scopes.
+ type: list
+ elements: str
+ facebook_app_id:
+ description:
+ - The App ID of the Facebook app used for login.
+ type: str
+ facebook_app_secret:
+ description:
+ - The App Secret of the Facebook app used for Facebook Login.
+ type: str
+ facebook_app_secret_setting_name:
+ description:
+ - The app setting name that contains the app secret used for Facebook Login.
+ type: str
+ facebook_o_auth_scopes:
+ description:
+ - The OAuth 2.0 scopes that will be requested as part of Facebook for Facebook Login.
+ type: list
+ elements: str
+ git_hub_client_id:
+ description:
+ - The Client Id of the GitHub app used for login.
+ type: str
+ git_hub_client_secret:
+ description:
+ - The Client Secret of the GitHub app used for Github Login.
+ type: str
+ git_hub_client_secret_setting_name:
+ description:
+ - The app setting name that contains the client secret of the Github app used for GitHub Login.
+ type: str
+ git_hub_o_auth_scopes:
+ description:
+ - The OAuth 2.0 scopes that will be requested as part of GitHub Login authentication.
+ - This setting is optional.
+ type: list
+ elements: str
+ twitter_consumer_key:
+ description:
+ - The OAuth 1.0a consumer key of the Twitter application used for sign-in.
+ type: str
+ twitter_consumer_secret:
+ description:
+ - The OAuth 1.0a consumer secret of the Twitter application used for sign-in.
+ - This setting is required for enabling Twitter Sign-In.
+ type: str
+ twitter_consumer_secret_setting_name:
+ description:
+ - The app setting name that contains the OAuth 1.0a consumer secret of the Twitter application used for sign-in.
+ type: str
+ microsoft_account_client_id:
+ description:
+ - The OAuth 2.0 client ID that was created for the app used for authentication.
+ - This setting is required for enabling Microsoft Account authentication.
+ type: str
+ microsoft_account_client_secret:
+ description:
+ - The OAuth 2.0 client secret that was created for the app used for authentication.
+ type: str
+ microsoft_account_client_secret_setting_name:
+ description:
+ - The app setting name containing the OAuth 2.0 client secret that was created for the app used for authentication.
+ type: str
+ microsoft_account_o_auth_scopes:
+ description:
+ - The OAuth 2.0 scopes that will be requested as part of Microsoft Account authentication.
+ type: list
+ elements: str
+ is_auth_from_file:
+ description:
+ - If I(is_auth_from_file=true), the auth config settings should be read from a file.
+ type: str
+ choices:
+ - 'true'
+ - 'false'
+ auth_file_path:
+ description:
+ - The path of the config file containing auth settings.
+ - If the path is relative, base will the site's root directory.
+ type: str
+ config_version:
+ description:
+ - The ConfigVersion of the Authentication / Authorization feature in use for the current app.
+ - The setting in this value can control the behavior of the control plane for Authentication / Authorization.
+ type: str
state:
description:
- State of the Web App.
@@ -337,6 +521,26 @@ EXAMPLES = '''
java_container: "Tomcat"
java_container_version: "8.5"
+- name: Create a windows web app with site_auth_settings
+ azure_rm_webapp:
+ resource_group: myResourceGroup
+ name: myWindowWebapp
+ site_auth_settings:
+ client_id: 'xxxxxxxx-xxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx'
+ default_provider: 'MicrosoftAccount'
+ runtime_version: '-2'
+ token_refresh_extension_hours: 120
+ unauthenticated_client_action: 'RedirectToLoginPage'
+ client_secret: 'xxxxxxxx-xxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx'
+ token_store_enabled: false
+ enabled: true
+ is_auth_from_file: false
+ plan:
+ resource_group: myResourceGroup
+ name: myLinuxwebapp
+ is_linux: false
+ sku: S1
+
- name: Create a linux web app with python framework
azure_rm_webapp:
resource_group: myResourceGroup
@@ -360,11 +564,12 @@ id:
sample: "/subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroups/myResourceGroup/providers/Microsoft.Web/sites/myWebApp"
'''
-from ansible_collections.azure.azcollection.plugins.module_utils.azure_rm_common import AzureRMModuleBase
+from ansible_collections.azure.azcollection.plugins.module_utils.azure_rm_common_ext import AzureRMModuleBaseExt
try:
from azure.core.exceptions import ResourceNotFoundError
from azure.core.polling import LROPoller
+ from azure.core.exceptions import HttpResponseError
from azure.mgmt.web.models import Site, AppServicePlan, SkuDescription, NameValuePair, SiteSourceControl, StringDictionary
except ImportError:
# This is handled in azure_rm_common
@@ -399,6 +604,48 @@ framework_spec = dict(
)
+site_auth_settings_spec = dict(
+ kind=dict(type='str'),
+ enabled=dict(type='bool'),
+ runtime_version=dict(type='str'),
+ unauthenticated_client_action=dict(type='str', choices=["RedirectToLoginPage", "AllowAnonymous"]),
+ token_store_enabled=dict(type='bool'),
+ allowed_external_redirect_urls=dict(type='list', elements='str'),
+ default_provider=dict(type='str', choices=["Facebook", "Google", "MicrosoftAccount", "Twitter", "Github", "AzureActiveDirectory"]),
+ token_refresh_extension_hours=dict(type='float', no_log=True),
+ client_id=dict(type='str'),
+ client_secret=dict(type='str', no_log=True),
+ client_secret_setting_name=dict(type='str'),
+ client_secret_certificate_thumbprint=dict(type='str', no_log=True),
+ issuer=dict(type='str'),
+ allowed_audiences=dict(type='list', elements='str'),
+ additional_login_params=dict(type='str'),
+ aad_claims_authorization=dict(type='str'),
+ google_client_id=dict(type='str'),
+ google_client_secret=dict(type='str', no_log=True),
+ google_client_secret_setting_name=dict(type='str'),
+ google_o_auth_scopes=dict(type='list', elements='str'),
+ facebook_app_id=dict(type='str'),
+ facebook_app_secret=dict(type='str', no_log=True),
+ facebook_app_secret_setting_name=dict(type='str'),
+ facebook_o_auth_scopes=dict(type='list', elements='str'),
+ git_hub_client_id=dict(type='str'),
+ git_hub_client_secret=dict(type='str', no_log=True),
+ git_hub_client_secret_setting_name=dict(type='str'),
+ git_hub_o_auth_scopes=dict(type='list', elements='str'),
+ twitter_consumer_key=dict(type='str', no_log=True),
+ twitter_consumer_secret=dict(type='str', no_log=True),
+ twitter_consumer_secret_setting_name=dict(type='str'),
+ microsoft_account_client_id=dict(type='str'),
+ microsoft_account_client_secret=dict(type='str', no_log=True),
+ microsoft_account_client_secret_setting_name=dict(type='str'),
+ microsoft_account_o_auth_scopes=dict(type='list', elements='str'),
+ is_auth_from_file=dict(type='str', choices=['true', 'false']),
+ auth_file_path=dict(type='str'),
+ config_version=dict(type='str')
+)
+
+
def _normalize_sku(sku):
if sku is None:
return sku
@@ -462,10 +709,10 @@ def webapp_to_dict(webapp):
class Actions:
- CreateOrUpdate, UpdateAppSettings, Delete = range(3)
+ CreateOrUpdate, UpdateAppSettings, UpdateAuthSettings, Delete = range(4)
-class AzureRMWebApps(AzureRMModuleBase):
+class AzureRMWebApps(AzureRMModuleBaseExt):
"""Configuration class for an Azure RM Web App resource"""
def __init__(self):
@@ -536,6 +783,10 @@ class AzureRMWebApps(AzureRMModuleBase):
choices=['started', 'stopped', 'restarted'],
default='started'
),
+ site_auth_settings=dict(
+ type='dict',
+ options=site_auth_settings_spec
+ ),
state=dict(
type='str',
default='present',
@@ -559,6 +810,7 @@ class AzureRMWebApps(AzureRMModuleBase):
self.site_config = dict()
self.app_settings = dict()
self.app_settings_strDic = None
+ self.site_auth_settings = None
# app service plan
self.plan = None
@@ -631,6 +883,7 @@ class AzureRMWebApps(AzureRMModuleBase):
if old_response:
self.results['id'] = old_response['id']
+ self.results['site_auth_settings'] = self.get_auth_settings()
if self.state == 'present':
if not self.plan and not old_response:
@@ -723,6 +976,8 @@ class AzureRMWebApps(AzureRMModuleBase):
to_be_updated = True
self.to_do.append(Actions.CreateOrUpdate)
+ if self.site_auth_settings is not None:
+ self.to_do.append(Actions.UpdateAuthSettings)
self.site.tags = self.tags
# service plan is required for creation
@@ -764,6 +1019,7 @@ class AzureRMWebApps(AzureRMModuleBase):
if update_tags:
to_be_updated = True
+ self.to_do.append(Actions.CreateOrUpdate)
# check if root level property changed
if self.is_updatable_property_changed(old_response):
@@ -799,6 +1055,12 @@ class AzureRMWebApps(AzureRMModuleBase):
for key in self.app_settings.keys():
self.app_settings_strDic[key] = self.app_settings[key]
+ if self.site_auth_settings is not None:
+ result = dict(compare=[])
+ if not self.default_compare({}, self.site_auth_settings, self.results['site_auth_settings'], '', dict(compare=[])):
+ to_be_updated = True
+ self.to_do.append(Actions.UpdateAuthSettings)
+
elif self.state == 'absent':
if old_response:
self.log("Delete Web App instance")
@@ -830,6 +1092,12 @@ class AzureRMWebApps(AzureRMModuleBase):
update_response = self.update_app_settings()
self.results['id'] = update_response.id
+ if Actions.UpdateAuthSettings in self.to_do:
+ auth_settings = self.update_auth_settings(self.site_auth_settings)
+ self.results['site_auth_settings'] = auth_settings
+ else:
+ self.results['site_auth_settings'] = self.get_auth_settings()
+
webapp = None
if old_response:
webapp = old_response
@@ -1026,6 +1294,36 @@ class AzureRMWebApps(AzureRMModuleBase):
self.fail("Failed to list application settings for web app {0} in resource group {1}: {2}".format(
self.name, self.resource_group, str(ex)))
+ def update_auth_settings(self, body):
+ '''
+ Update web app auth settings
+ :return: deserialized updating response
+ '''
+ self.log("Update web app auth settings")
+
+ try:
+ response = self.web_client.web_apps.update_auth_settings(self.resource_group, self.name, body)
+ self.log("Response : {0}".format(response))
+ return response.as_dict()
+ except HttpResponseError as ex:
+ self.fail("Failed to update web app auth settings for web app {0} in resource group {1}: {2}".format(
+ self.name, self.resource_group, str(ex)))
+
+ def get_auth_settings(self):
+ '''
+ Get the web app auth settings
+ :return: deserialized updating response
+ '''
+ self.log("Get the web app auth settings")
+
+ try:
+ response = self.web_client.web_apps.get_auth_settings(self.resource_group, self.name)
+ self.log("Response : {0}".format(response))
+ return response.as_dict()
+ except HttpResponseError as ex:
+ self.log("Failed to Get web app auth settings for web app {0} in resource group {1}: {2}".format(
+ self.name, self.resource_group, str(ex)))
+
def update_app_settings(self):
'''
Update application settings
@@ -1040,9 +1338,8 @@ class AzureRMWebApps(AzureRMModuleBase):
response = self.web_client.web_apps.update_application_settings(
resource_group_name=self.resource_group, name=self.name, app_settings=settings)
self.log("Response : {0}".format(response))
-
return response
- except Exception as ex:
+ except HttpResponseError as ex:
self.fail("Failed to update application settings for web app {0} in resource group {1}: {2}".format(
self.name, self.resource_group, str(ex)))
diff --git a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_webapp_info.py b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_webapp_info.py
index c0ec6b42d..22a5fea73 100644
--- a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_webapp_info.py
+++ b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_webapp_info.py
@@ -244,11 +244,18 @@ webapps:
returned: always
type: dict
sample: { tag1: abc }
+ site_auth_settings:
+ description:
+ - The Authentication / Authorization settings associated with web app.
+ type: dict
+ returned: always
+ sample: {}
'''
try:
from azure.core.exceptions import ResourceNotFoundError
from azure.core.polling import LROPoller
from azure.mgmt.web.models import CsmPublishingProfileOptions
+ from azure.core.exceptions import HttpResponseError
except Exception:
# This is handled in azure_rm_common
pass
@@ -390,6 +397,14 @@ class AzureRMWebAppInfo(AzureRMModuleBase):
self.fail('Error getting web app {0} publishing credentials - {1}'.format(request_id, str(ex)))
return response
+ def get_auth_settings(self, resource_group, name):
+ self.log('Get web app {0} auth settings'.format(name))
+ try:
+ response = self.web_client.web_apps.get_auth_settings(resource_group_name=resource_group, name=name)
+ return response.as_dict()
+ except HttpResponseError as ex:
+ self.log('Error getting web app {0} auth setting, exception as {1}'.format(name, str(ex)))
+
def get_webapp_ftp_publish_url(self, resource_group, name):
self.log('Get web app {0} app publish profile'.format(name))
@@ -430,6 +445,7 @@ class AzureRMWebAppInfo(AzureRMModuleBase):
app_settings = self.list_webapp_appsettings(resource_group, name)
publish_cred = self.get_publish_credentials(resource_group, name)
ftp_publish_url = self.get_webapp_ftp_publish_url(resource_group, name)
+ site_auth_settings = self.get_auth_settings(resource_group, name)
except Exception:
pass
return self.construct_curated_webapp(webapp=pip,
@@ -437,7 +453,8 @@ class AzureRMWebAppInfo(AzureRMModuleBase):
app_settings=app_settings,
deployment_slot=None,
ftp_publish_url=ftp_publish_url,
- publish_credentials=publish_cred)
+ publish_credentials=publish_cred,
+ site_auth_settings=site_auth_settings)
def construct_curated_webapp(self,
webapp,
@@ -445,7 +462,8 @@ class AzureRMWebAppInfo(AzureRMModuleBase):
app_settings=None,
deployment_slot=None,
ftp_publish_url=None,
- publish_credentials=None):
+ publish_credentials=None,
+ site_auth_settings=None):
curated_output = dict()
curated_output['id'] = webapp['id']
curated_output['name'] = webapp['name']
@@ -514,6 +532,9 @@ class AzureRMWebAppInfo(AzureRMModuleBase):
if publish_credentials and self.return_publish_profile:
curated_output['publishing_username'] = publish_credentials.publishing_user_name
curated_output['publishing_password'] = publish_credentials.publishing_password
+
+ # curated auth settings
+ curated_output['site_auth_settings'] = site_auth_settings if site_auth_settings is not None else {}
return curated_output
diff --git a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_webappaccessrestriction.py b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_webappaccessrestriction.py
index bfe2b7591..4f44067b1 100644
--- a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_webappaccessrestriction.py
+++ b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_webappaccessrestriction.py
@@ -39,7 +39,6 @@ options:
- The web app's HTTP access restrictions.
type: list
elements: dict
- default: []
suboptions:
name:
description:
@@ -66,7 +65,28 @@ options:
description:
- IPv4 address (with subnet mask) of the access restriction.
type: str
- required: true
+ subnet_traffic_tag:
+ description:
+ - (internal) Subnet traffic tags.
+ type: int
+ vnet_traffic_tag:
+ description:
+ - (internal) Vnet traffic tag.
+ type: int
+ tag:
+ description:
+ - IP restriction rule description.
+ type: str
+ choices:
+ - Default
+ - XffProxy
+ - ServiceTag
+ vnet_subnet_resource_id:
+ description:
+ - The Virtual network relaste subnet resource id.
+ - Only I(ip_adress) or I(vnet_subnet_resource_id) property must be specified.
+ - Parameter I(vnet_subnet_resource_id) cannot be used with I(subnet_traffic_tag) or I(vnet_traffic_tag) at the same time.
+ type: str
scm_ip_security_restrictions:
description:
- >-
@@ -74,7 +94,6 @@ options:
the SCM restrictions will be configured but not used.
type: list
elements: dict
- default: []
suboptions:
name:
description:
@@ -101,7 +120,28 @@ options:
description:
- IPv4 address (with subnet mask) of the access restriction.
type: str
- required: true
+ subnet_traffic_tag:
+ description:
+ - (internal) Subnet traffic tags.
+ type: int
+ vnet_traffic_tag:
+ description:
+ - (internal) Vnet traffic tag.
+ type: int
+ tag:
+ description:
+ - IP restriction rule description.
+ type: str
+ choices:
+ - Default
+ - XffProxy
+ - ServiceTag
+ vnet_subnet_resource_id:
+ description:
+ - The Virtual network relaste subnet resource id.
+ - Only I(ip_adress) or I(vnet_subnet_resource_id) property must be specified.
+ - Parameter I(vnet_subnet_resource_id) cannot be used with I(subnet_traffic_tag) or I(vnet_traffic_tag) at the same time.
+ type: str
scm_ip_security_restrictions_use_main:
description:
- >-
@@ -131,6 +171,12 @@ EXAMPLES = '''
action: "Allow"
ip_address: "2.2.2.2/24"
priority: 2
+ - name: "Datacenter 3"
+ action: Allow
+ priority: 3
+ description: "for test 02"
+ tag: XffProxy
+ vnet_subnet_resource_id: "{{ subnet_output.state.id }}"
scm_ip_security_restrictions_use_main: true
- name: Delete web app network access restrictions.
@@ -178,6 +224,30 @@ ip_security_restrictions:
returned: always
type: str
sample: 1.1.1.1/32
+ subnet_traffic_tag:
+ description:
+ - (internal) Subnet traffic tags.
+ type: int
+ returned: always
+ sample: int
+ vnet_traffic_tag:
+ description:
+ - (internal) Vnet traffic tag.
+ type: int
+ returned: always
+ sample: 3
+ tag:
+ description:
+ - IP restriction rule description.
+ type: str
+ returned: always
+ sample: default
+ vnet_subnet_resource_id:
+ description:
+ - The Virtual network relaste subnet resource id.
+ type: str
+ returned: always
+ sample: "/subscriptions/xxx-xxx/resourceGroups/testRG/providers/Microsoft.Network/virtualNetworks/vnet01/subnets/subnet01"
scm_ip_security_restrictions:
description:
- The web app's SCM access restrictions.
@@ -215,6 +285,30 @@ scm_ip_security_restrictions:
returned: always
type: str
sample: 1.1.1.1/32
+ subnet_traffic_tag:
+ description:
+ - (internal) Subnet traffic tags.
+ type: int
+ returned: always
+ sample: int
+ vnet_traffic_tag:
+ description:
+ - (internal) Vnet traffic tag.
+ type: int
+ returned: always
+ sample: 3
+ tag:
+ description:
+ - IP restriction rule description.
+ type: str
+ returned: always
+ sample: default
+ vnet_subnet_resource_id:
+ description:
+ - The Virtual network relaste subnet resource id.
+ type: str
+ returned: always
+ sample: "/subscriptions/xxx-xxx/resourceGroups/testRG/providers/Microsoft.Network/virtualNetworks/vnet01/subnets/subnet01"
scm_ip_security_restrictions_use_main:
description:
- Whether the HTTP access restrictions are used for SCM access.
@@ -223,7 +317,7 @@ scm_ip_security_restrictions_use_main:
sample: false
'''
-from ansible_collections.azure.azcollection.plugins.module_utils.azure_rm_common import AzureRMModuleBase
+from ansible_collections.azure.azcollection.plugins.module_utils.azure_rm_common_ext import AzureRMModuleBaseExt
try:
from azure.mgmt.web.models import IpSecurityRestriction
@@ -236,11 +330,15 @@ ip_restriction_spec = dict(
description=dict(type='str'),
action=dict(type='str', default='Allow', choices=['Allow', 'Deny']),
priority=dict(type='int', required=True),
- ip_address=dict(type='str', required=True),
+ ip_address=dict(type='str'),
+ vnet_subnet_resource_id=dict(type='str'),
+ vnet_traffic_tag=dict(type='int'),
+ subnet_traffic_tag=dict(type='int'),
+ tag=dict(type='str', choices=["Default", "XffProxy", "ServiceTag"]),
)
-class AzureRMWebAppAccessRestriction(AzureRMModuleBase):
+class AzureRMWebAppAccessRestriction(AzureRMModuleBaseExt):
def __init__(self):
@@ -248,8 +346,8 @@ class AzureRMWebAppAccessRestriction(AzureRMModuleBase):
name=dict(type='str', required=True),
resource_group=dict(type='str', required=True),
state=dict(type='str', default='present', choices=['present', 'absent']),
- ip_security_restrictions=dict(type='list', default=[], elements='dict', options=ip_restriction_spec),
- scm_ip_security_restrictions=dict(type='list', default=[], elements='dict', options=ip_restriction_spec),
+ ip_security_restrictions=dict(type='list', elements='dict', options=ip_restriction_spec),
+ scm_ip_security_restrictions=dict(type='list', elements='dict', options=ip_restriction_spec),
scm_ip_security_restrictions_use_main=dict(type='bool', default=False),
)
@@ -263,8 +361,8 @@ class AzureRMWebAppAccessRestriction(AzureRMModuleBase):
self.state = None
self.name = None
self.resource_group = None
- self.ip_security_restrictions = []
- self.scm_ip_security_restrictions = []
+ self.ip_security_restrictions = None
+ self.scm_ip_security_restrictions = None
self.scm_ip_security_restrictions_use_main = False
super(AzureRMWebAppAccessRestriction, self).__init__(self.module_arg_spec,
@@ -318,9 +416,16 @@ class AzureRMWebAppAccessRestriction(AzureRMModuleBase):
return site_config
def has_updates(self, site_config):
- return (site_config.scm_ip_security_restrictions_use_main != self.scm_ip_security_restrictions_use_main or self.ip_security_restrictions and
- self.ip_security_restrictions != self.to_restriction_dict_list(site_config.ip_security_restrictions) or self.scm_ip_security_restrictions and
- self.scm_ip_security_restrictions != self.to_restriction_dict_list(site_config.scm_ip_security_restrictions))
+ changed = False
+ if site_config.scm_ip_security_restrictions_use_main != self.scm_ip_security_restrictions_use_main:
+ changed = True
+ elif not self.default_compare({}, self.ip_security_restrictions,
+ self.to_restriction_dict_list(site_config.ip_security_restrictions), '', dict(compare=[])):
+ changed = True
+ elif not self.default_compare({}, self.scm_ip_security_restrictions,
+ self.to_restriction_dict_list(site_config.scm_ip_security_restrictions), '', dict(compare=[])):
+ changed = True
+ return changed
def has_access_restrictions(self, site_config):
return site_config.ip_security_restrictions or site_config.scm_ip_security_restrictions
@@ -356,6 +461,10 @@ class AzureRMWebAppAccessRestriction(AzureRMModuleBase):
action=restriction_dict['action'],
priority=restriction_dict['priority'],
ip_address=restriction_dict['ip_address'],
+ vnet_subnet_resource_id=restriction_dict['vnet_subnet_resource_id'],
+ vnet_traffic_tag=restriction_dict['vnet_traffic_tag'],
+ subnet_traffic_tag=restriction_dict['subnet_traffic_tag'],
+ tag=restriction_dict['tag'],
)
def to_restriction_dict_list(self, restriction_obj_list):
@@ -379,6 +488,10 @@ class AzureRMWebAppAccessRestriction(AzureRMModuleBase):
action=restriction_obj.action,
priority=restriction_obj.priority,
ip_address=restriction_obj.ip_address,
+ vnet_subnet_resource_id=restriction_obj.vnet_subnet_resource_id,
+ vnet_traffic_tag=restriction_obj.vnet_traffic_tag,
+ subnet_traffic_tag=restriction_obj.subnet_traffic_tag,
+ tag=restriction_obj.tag,
)
diff --git a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_webappaccessrestriction_info.py b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_webappaccessrestriction_info.py
index 2d07bc2de..901d6b806 100644
--- a/ansible_collections/azure/azcollection/plugins/modules/azure_rm_webappaccessrestriction_info.py
+++ b/ansible_collections/azure/azcollection/plugins/modules/azure_rm_webappaccessrestriction_info.py
@@ -79,6 +79,30 @@ ip_security_restrictions:
returned: always
type: str
sample: 1.1.1.1/32
+ subnet_traffic_tag:
+ description:
+ - (internal) Subnet traffic tags.
+ type: int
+ returned: always
+ sample: int
+ vnet_traffic_tag:
+ description:
+ - (internal) Vnet traffic tag.
+ type: int
+ returned: always
+ sample: 3
+ tags:
+ description:
+ - IP restriction rule description.
+ type: str
+ returned: always
+ sample: default
+ vnet_subnet_resource_id:
+ description:
+ - The Virtual network relaste subnet resource id.
+ type: str
+ returned: always
+ sample: "/subscriptions/xxx-xxx/resourceGroups/testRG/providers/Microsoft.Network/virtualNetworks/vnet01/subnets/subnet01"
scm_ip_security_restrictions:
description:
- The web app's SCM access restrictions.
@@ -116,6 +140,30 @@ scm_ip_security_restrictions:
returned: always
type: str
sample: 1.1.1.1/32
+ subnet_traffic_tag:
+ description:
+ - (internal) Subnet traffic tags.
+ type: int
+ returned: always
+ sample: int
+ vnet_traffic_tag:
+ description:
+ - (internal) Vnet traffic tag.
+ type: int
+ returned: always
+ sample: 3
+ tag:
+ description:
+ - IP restriction rule description.
+ type: str
+ returned: always
+ sample: default
+ vnet_subnet_resource_id:
+ description:
+ - The Virtual network relaste subnet resource id.
+ type: str
+ returned: always
+ sample: "/subscriptions/xxx-xxx/resourceGroups/testRG/providers/Microsoft.Network/virtualNetworks/vnet01/subnets/subnet01"
scm_ip_security_restrictions_use_main:
description:
- Whether the HTTP access restrictions are used for SCM access.
@@ -196,6 +244,10 @@ class AzureRMWebAppAccessRestrictionInfo(AzureRMModuleBase):
action=restriction_obj.action,
priority=restriction_obj.priority,
ip_address=restriction_obj.ip_address,
+ vnet_subnet_resource_id=restriction_obj.vnet_subnet_resource_id,
+ vnet_traffic_tag=restriction_obj.vnet_traffic_tag,
+ subnet_traffic_tag=restriction_obj.subnet_traffic_tag,
+ tag=restriction_obj.tag,
)
diff --git a/ansible_collections/azure/azcollection/pr-pipelines.yml b/ansible_collections/azure/azcollection/pr-pipelines.yml
index 7ddcc40dc..744860d8d 100644
--- a/ansible_collections/azure/azcollection/pr-pipelines.yml
+++ b/ansible_collections/azure/azcollection/pr-pipelines.yml
@@ -53,7 +53,6 @@ parameters:
- "azure_rm_containerregistry"
- "azure_rm_containerregistrytag"
- "azure_rm_cosmosdbaccount"
- - "azure_rm_datalakestore"
- "azure_rm_ddosprotectionplan"
- "azure_rm_deployment"
- "azure_rm_diskencryptionset"
@@ -74,6 +73,7 @@ parameters:
- "azure_rm_keyvaultsecret"
- "azure_rm_loadbalancer"
- "azure_rm_loganalyticsworkspace"
+ - "azure_rm_localnetworkgateway"
- "azure_rm_manageddisk"
- "azure_rm_managementgroup"
- "azure_rm_mariadbserver"
@@ -109,6 +109,7 @@ parameters:
- "azure_rm_servicebus"
- "azure_rm_sqlserver"
- "azure_rm_storageaccount"
+ - "azure_rm_storageaccountmanagementpolicy"
- "azure_rm_storageblob"
- "azure_rm_storageshare"
- "azure_rm_subnet"
@@ -122,6 +123,7 @@ parameters:
- "azure_rm_virtualnetwork"
- "azure_rm_virtualnetworkgateway"
- "azure_rm_virtualnetworkpeering"
+ - "azure_rm_virtualnetworkgatewaynatrule"
- "azure_rm_virtualwan"
- "azure_rm_vpnsite"
- "azure_rm_virtualhub"
@@ -137,6 +139,9 @@ parameters:
- "azure_rm_bastionhost"
- "azure_rm_devtestlab"
- "azure_rm_sqlmanagedinstance"
+ - "azure_rm_sqlmidatabase"
+ - "azure_rm_sqlmidblongtermretentionpolicy"
+ - "azure_rm_sqlmidbshorttermretentionpolicy"
- "azure_rm_vmssnetworkinterface_info"
- "azure_rm_sshpublickey"
- "inventory_azure"
diff --git a/ansible_collections/azure/azcollection/requirements-azure.txt b/ansible_collections/azure/azcollection/requirements.txt
index 5fc0064aa..557c59e47 100644
--- a/ansible_collections/azure/azcollection/requirements-azure.txt
+++ b/ansible_collections/azure/azcollection/requirements.txt
@@ -9,13 +9,12 @@ azure-mgmt-authorization==2.0.0
azure-mgmt-apimanagement==3.0.0
azure-mgmt-batch==16.2.0
azure-mgmt-cdn==11.0.0
-azure-mgmt-compute==26.1.0
+azure-mgmt-compute==30.6.0
azure-mgmt-containerinstance==9.0.0
-azure-mgmt-core==1.3.0
+azure-mgmt-core==1.4.0
azure-mgmt-containerregistry==9.1.0
azure-containerregistry==1.1.0
azure-mgmt-containerservice==20.0.0
-azure-mgmt-datalake-store==1.0.0
azure-mgmt-datafactory==2.0.0
azure-mgmt-dns==8.0.0
azure-mgmt-marketplaceordering==1.1.0
@@ -27,7 +26,7 @@ azure-mgmt-nspkg==2.0.0
azure-mgmt-privatedns==1.0.0
azure-mgmt-redis==13.0.0
azure-mgmt-resource==21.1.0
-azure-mgmt-rdbms==10.0.0
+azure-mgmt-rdbms==10.2.0b12
azure-mgmt-search==8.0.0
azure-mgmt-servicebus==7.1.0
azure-mgmt-sql==3.0.1
diff --git a/ansible_collections/azure/azcollection/sanity-requirements-azure.txt b/ansible_collections/azure/azcollection/sanity-requirements.txt
index 65bf3ad16..65bf3ad16 100644
--- a/ansible_collections/azure/azcollection/sanity-requirements-azure.txt
+++ b/ansible_collections/azure/azcollection/sanity-requirements.txt
diff --git a/ansible_collections/azure/azcollection/tests/integration/requirements.txt b/ansible_collections/azure/azcollection/tests/integration/requirements.txt
new file mode 100644
index 000000000..557c59e47
--- /dev/null
+++ b/ansible_collections/azure/azcollection/tests/integration/requirements.txt
@@ -0,0 +1,51 @@
+packaging
+requests[security]
+xmltodict
+msgraph-sdk==1.0.0
+azure-cli-core==2.34.0
+azure-common==1.1.11
+azure-identity==1.14.0
+azure-mgmt-authorization==2.0.0
+azure-mgmt-apimanagement==3.0.0
+azure-mgmt-batch==16.2.0
+azure-mgmt-cdn==11.0.0
+azure-mgmt-compute==30.6.0
+azure-mgmt-containerinstance==9.0.0
+azure-mgmt-core==1.4.0
+azure-mgmt-containerregistry==9.1.0
+azure-containerregistry==1.1.0
+azure-mgmt-containerservice==20.0.0
+azure-mgmt-datafactory==2.0.0
+azure-mgmt-dns==8.0.0
+azure-mgmt-marketplaceordering==1.1.0
+azure-mgmt-monitor==3.0.0
+azure-mgmt-managedservices==6.0.0
+azure-mgmt-managementgroups==1.0.0
+azure-mgmt-network==19.1.0
+azure-mgmt-nspkg==2.0.0
+azure-mgmt-privatedns==1.0.0
+azure-mgmt-redis==13.0.0
+azure-mgmt-resource==21.1.0
+azure-mgmt-rdbms==10.2.0b12
+azure-mgmt-search==8.0.0
+azure-mgmt-servicebus==7.1.0
+azure-mgmt-sql==3.0.1
+azure-mgmt-storage==19.0.0
+azure-mgmt-trafficmanager==1.0.0b1
+azure-mgmt-web==6.1.0
+azure-nspkg==2.0.0
+azure-storage-blob==12.11.0
+azure-core==1.28.0
+azure-keyvault==4.2.0
+azure-mgmt-keyvault==10.0.0
+azure-mgmt-cosmosdb==6.4.0
+azure-mgmt-hdinsight==9.0.0
+azure-mgmt-devtestlabs==9.0.0
+azure-mgmt-loganalytics==12.0.0
+azure-mgmt-automation==1.0.0
+azure-mgmt-iothub==2.2.0
+azure-iot-hub==2.6.1
+azure-mgmt-recoveryservices==2.0.0
+azure-mgmt-recoveryservicesbackup==3.0.0
+azure-mgmt-notificationhubs==7.0.0
+azure-mgmt-eventhub==10.1.0
diff --git a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_adapplication/tasks/main.yml b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_adapplication/tasks/main.yml
index d19e0b8cd..86a06c675 100644
--- a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_adapplication/tasks/main.yml
+++ b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_adapplication/tasks/main.yml
@@ -21,7 +21,7 @@
ansible.builtin.assert:
that: not output.changed
-- name: Create application with more parameter
+- name: Create application with more parameters
azure_rm_adapplication:
display_name: "{{ display_name }}-01"
sign_in_audience: AzureADandPersonalMicrosoftAccount
@@ -37,6 +37,16 @@
display_name: "{{ display_name }}_approle"
is_enabled: true
value: Password@0329
+ optional_claims:
+ access_token_claims:
+ - name: aud
+ essential: true
+ id_token_claims:
+ - name: acct
+ essential: true
+ saml2_token_claims:
+ - name: acct
+ essential: true
register: second_output
- name: Assert secondary resource create success
diff --git a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_aduser/tasks/main.yml b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_aduser/tasks/main.yml
index c02a263bb..24e686e52 100644
--- a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_aduser/tasks/main.yml
+++ b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_aduser/tasks/main.yml
@@ -1,35 +1,50 @@
- name: Prepare facts
ansible.builtin.set_fact:
- user_id: "user{{ 999999999999999999994 | random | to_uuid }}@contoso.com"
- object_id: "{{ 999999999999999999994 | random | to_uuid }}"
- user_principal_name: "{{ 999999999999999999994 | random | to_uuid }}"
+ user_name: "test_user_{{ 999999999999999999994 | random | to_uuid }}"
+ on_premises_immutable_id: "{{ 999999999999999999994 | random | to_uuid }}"
+ password_profile: "{{ lookup('community.general.random_string', length=12, min_lower=1, min_upper=1, min_special=1, min_numeric=1) }}"
+ domain: change_me.com
run_once: true
- name: Create test user
azure_rm_aduser:
- user_principal_name: "{{ user_id }}"
+ user_principal_name: "{{ user_name }}@{{ domain }}"
state: "present"
account_enabled: true
- display_name: "Test_{{ user_principal_name }}_Display_Name"
- password_profile: "password"
- mail_nickname: "Test_{{ user_principal_name }}_mail_nickname"
- immutable_id: "{{ object_id }}"
+ display_name: "{{ user_name }}_display_name"
+ password_profile: "{{ password_profile }}"
+ mail_nickname: "{{ user_name }}_mail_nickname"
+ on_premises_immutable_id: "{{ on_premises_immutable_id }}"
given_name: "First"
surname: "Last"
user_type: "Member"
usage_location: "US"
- mail: "{{ user_principal_name }}@contoso.com"
+ mail: "{{ user_name }}@{{ domain }}"
+ company_name: "Test Company"
+ on_premises_extension_attributes:
+ extension_attribute1: "test_extension_attribute1"
+ extension_attribute2: "test_extension_attribute2"
+ extension_attribute11: "test_extension_attribute11"
register: create_user_should_pass
- name: Try to update existing user - idempotent check
azure_rm_aduser:
- user_principal_name: "{{ user_id }}"
+ user_principal_name: "{{ user_name }}@{{ domain }}"
state: "present"
- display_name: "Test_{{ user_principal_name }}_Display_Name"
- mail_nickname: "Test_{{ user_principal_name }}_mail_nickname"
+ account_enabled: true
+ display_name: "{{ user_name }}_display_name"
+ mail_nickname: "{{ user_name }}_mail_nickname"
+ on_premises_immutable_id: "{{ on_premises_immutable_id }}"
given_name: "First"
surname: "Last"
- mail: "{{ user_principal_name }}@contoso.com"
+ user_type: "Member"
+ usage_location: "US"
+ mail: "{{ user_name }}@{{ domain }}"
+ company_name: "Test Company"
+ on_premises_extension_attributes:
+ extension_attribute1: "test_extension_attribute1"
+ extension_attribute2: "test_extension_attribute2"
+ extension_attribute11: "test_extension_attribute11"
register: attempted_update_with_no_changes_should_pass
- name: Assert Nothing Changed
@@ -39,42 +54,49 @@
- name: User_principal_name Should Pass
azure_rm_aduser_info:
- user_principal_name: "{{ user_id }}"
- register: get_user_should_pass
+ user_principal_name: "{{ user_name }}@{{ domain }}"
+ register: get_user_by_upn_should_pass
+
+- name: Attribute_name mail Should Pass
+ azure_rm_aduser_info:
+ attribute_name: "mail"
+ attribute_value: "{{ user_name }}@{{ domain }}"
+ register: get_user_by_mail_should_pass
- name: Assert user was created and account is enabled
ansible.builtin.assert:
that:
- - "create_user_should_pass['ad_users'][0]['account_enabled'] == True"
- - "get_user_should_pass['ad_users'][0]['account_enabled'] == True"
+ - "create_user_should_pass['ad_user']['account_enabled'] == True"
+ - "get_user_by_upn_should_pass['ad_users'][0]['account_enabled'] == True"
+ - "get_user_by_mail_should_pass['ad_users'][0]['account_enabled'] == True"
- name: Update test user
azure_rm_aduser:
- user_principal_name: "{{ user_id }}"
+ user_principal_name: "{{ user_name }}@{{ domain }}"
state: "present"
account_enabled: false
register: update_user_should_pass
- name: User_principal_name on updated user Should Pass
azure_rm_aduser_info:
- user_principal_name: "{{ user_id }}"
+ user_principal_name: "{{ user_name }}@{{ domain }}"
register: get_updated_user_should_pass
- name: Assert user was updated and account is disabled
ansible.builtin.assert:
that:
- - "update_user_should_pass['ad_users'][0]['account_enabled'] == False"
+ - "update_user_should_pass['ad_user']['account_enabled'] == False"
- "get_updated_user_should_pass['ad_users'][0]['account_enabled'] == False"
- name: Delete test user
azure_rm_aduser:
- user_principal_name: "{{ user_id }}"
+ user_principal_name: "{{ user_name }}@{{ domain }}"
state: "absent"
register: delete_user_should_pass
- name: User_principal_name Should Fail
azure_rm_aduser_info:
- user_principal_name: "{{ user_id }}"
+ user_principal_name: "{{ user_name }}@{{ domain }}"
register: get_user_should_fail
ignore_errors: true
@@ -91,19 +113,19 @@
- name: Assert task failed
ansible.builtin.assert:
that:
- - "missing_any_identifiers is undefined"
+ - "missing_any_identifiers is defined"
- name: Too many identifiers Should Fail
azure_rm_aduser_info:
- user_principal_name: "{{ user_id }}"
- object_id: "{{ object_id }}"
+ user_principal_name: "{{ user_name }}@{{ domain }}"
+ object_id: "{{ on_premises_immutable_id }}"
register: too_many_identifiers
ignore_errors: true
- name: Assert task failed
ansible.builtin.assert:
that:
- - "too_many_identifiers is undefined"
+ - "too_many_identifiers is defined"
- name: Missing attribute_value Should Fail
azure_rm_aduser_info:
@@ -114,27 +136,27 @@
- name: Assert task failed
ansible.builtin.assert:
that:
- - "missing_attribute_value is undefined"
+ - "missing_attribute_value is defined"
- name: Missing attribute_name Should Fail
azure_rm_aduser_info:
- attribute_value: SMTP:user@contoso.com
+ attribute_value: SMTP:user@stadtluzern.ch
register: missing_attribute_name
ignore_errors: true
- name: Assert task failed
ansible.builtin.assert:
that:
- - "missing_attribute_name is undefined"
+ - "missing_attribute_name is defined"
- name: Using all with principal name should fail
azure_rm_aduser_info:
all: true
- user_principal_name: "{{ user_id }}"
+ user_principal_name: "{{ user_name }}@{{ domain }}"
register: using_all_with_principal_name
ignore_errors: true
- name: Assert task failed
ansible.builtin.assert:
that:
- - "using_all_with_principal_name is undefined"
+ - "using_all_with_principal_name is defined"
diff --git a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_aks/tasks/main.yml b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_aks/tasks/main.yml
index ff60ca681..d3377384c 100644
--- a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_aks/tasks/main.yml
+++ b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_aks/tasks/main.yml
@@ -1,21 +1,46 @@
+- name: Gather Resource Group info
+ azure.azcollection.azure_rm_resourcegroup_info:
+ name: "{{ resource_group }}"
+ register: __rg_info
+
- name: Set varialbles
ansible.builtin.set_fact:
rpfx: "{{ resource_group | hash('md5') | truncate(8, True, '') }}"
noderpfx: "{{ resource_group | hash('md5') | truncate(4, True, '') }}"
+ location: "{{ __rg_info.resourcegroups.0.location }}"
+
+- name: Create User Managed Identity
+ azure_rm_resource:
+ resource_group: "{{ resource_group }}"
+ provider: ManagedIdentity
+ resource_type: userAssignedIdentities
+ resource_name: "{{ item }}"
+ api_version: "2023-01-31"
+ body:
+ location: "{{ location }}"
+ state: present
+ loop:
+ - "ansible-test-aks-identity"
+ - "ansible-test-aks-identity-2"
+
+- name: Set identities IDs to test. Identities ansible-test-aks-identity and ansible-test-aks-identity-2 have to be created previously
+ ansible.builtin.set_fact:
+ user_identity: "/subscriptions/{{ azure_subscription_id }}/resourcegroups/{{ resource_group }}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/ansible-test-aks-identity"
+ user_identity_2: "/subscriptions/{{ azure_subscription_id }}/resourcegroups/{{ resource_group }}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/ansible-test-aks-identity-2"
- name: Include aks tasks
ansible.builtin.include_tasks: minimal-cluster.yml
- name: Find available k8s version
azure_rm_aksversion_info:
- location: eastus
+ location: "{{ location }}"
register: versions
- name: Create an AKS instance (check mode)
azure_rm_aks:
name: "aks{{ rpfx }}"
resource_group: "{{ resource_group }}"
- location: eastus
+ location: "{{ location }}"
dns_prefix: "aks{{ rpfx }}"
kubernetes_version: "{{ versions.azure_aks_versions[0] }}"
service_principal:
@@ -56,7 +81,7 @@
azure_rm_aks:
name: "aks{{ rpfx }}"
resource_group: "{{ resource_group }}"
- location: eastus
+ location: "{{ location }}"
dns_prefix: "aks{{ rpfx }}"
kubernetes_version: "{{ versions.azure_aks_versions[0] }}"
service_principal:
@@ -107,7 +132,7 @@
azure_rm_aks:
name: "aks{{ rpfx }}"
resource_group: "{{ resource_group }}"
- location: eastus
+ location: "{{ location }}"
dns_prefix: "aks{{ rpfx }}"
kubernetes_version: "{{ versions.azure_aks_versions[0] }}"
service_principal:
@@ -167,7 +192,7 @@
azure_rm_aks:
name: "aks{{ rpfx }}"
resource_group: "{{ resource_group }}"
- location: eastus
+ location: "{{ location }}"
dns_prefix: "aks{{ rpfx }}"
kubernetes_version: "{{ versions.azure_aks_versions[0] }}"
service_principal:
@@ -198,7 +223,7 @@
- name: Get available version
azure_rm_aksversion_info:
- location: eastus
+ location: "{{ location }}"
version: "{{ versions.azure_aks_versions[0] }}"
register: version1
@@ -206,7 +231,7 @@
azure_rm_aks:
name: "aks{{ rpfx }}"
resource_group: "{{ resource_group }}"
- location: eastus
+ location: "{{ location }}"
dns_prefix: "aks{{ rpfx }}"
kubernetes_version: "{{ version1.azure_aks_versions[0] }}"
service_principal:
@@ -246,7 +271,7 @@
azure_rm_aks:
name: "aks{{ rpfx }}"
resource_group: "{{ resource_group }}"
- location: eastus
+ location: "{{ location }}"
dns_prefix: "aks{{ rpfx }}"
kubernetes_version: "{{ version1.azure_aks_versions[0] }}"
service_principal:
@@ -282,7 +307,7 @@
azure_rm_aks:
name: "aks{{ rpfx }}"
resource_group: "{{ resource_group }}"
- location: eastus
+ location: "{{ location }}"
dns_prefix: "aks{{ rpfx }}"
kubernetes_version: "{{ version1.azure_aks_versions[0] }}"
service_principal:
@@ -323,7 +348,7 @@
azure_rm_aks:
name: "aks{{ rpfx }}"
resource_group: "{{ resource_group }}"
- location: eastus
+ location: "{{ location }}"
dns_prefix: "aks{{ rpfx }}"
kubernetes_version: "{{ version1.azure_aks_versions[0] }}"
service_principal:
@@ -360,7 +385,7 @@
azure_rm_aks:
name: "aks{{ rpfx }}"
resource_group: "{{ resource_group }}"
- location: eastus
+ location: "{{ location }}"
dns_prefix: "aks{{ rpfx }}"
kubernetes_version: "{{ version1.azure_aks_versions[0] }}"
service_principal:
@@ -408,7 +433,7 @@
azure_rm_aks:
name: "aks{{ rpfx }}"
resource_group: "{{ resource_group }}"
- location: eastus
+ location: "{{ location }}"
dns_prefix: "aks{{ rpfx }}"
kubernetes_version: "{{ version1.azure_aks_versions[0] }}"
service_principal:
@@ -449,7 +474,7 @@
azure_rm_aks:
name: "aks{{ rpfx }}"
resource_group: "{{ resource_group }}"
- location: eastus
+ location: "{{ location }}"
dns_prefix: "aks{{ rpfx }}"
kubernetes_version: "{{ version1.azure_aks_versions[0] }}"
service_principal:
@@ -497,7 +522,7 @@
azure_rm_aks:
name: "aks{{ rpfx }}"
resource_group: "{{ resource_group }}"
- location: eastus
+ location: "{{ location }}"
dns_prefix: "aks{{ rpfx }}"
kubernetes_version: "{{ version1.azure_aks_versions[0] }}"
service_principal:
@@ -576,3 +601,15 @@
ansible.builtin.assert:
that:
- "fact.aks | length == 0"
+
+- name: Destroy User Managed Identity
+ azure_rm_resource:
+ resource_group: "{{ resource_group }}"
+ provider: ManagedIdentity
+ resource_type: userAssignedIdentities
+ resource_name: "{{ item }}"
+ api_version: "2023-01-31"
+ state: absent
+ loop:
+ - "ansible-test-aks-identity"
+ - "ansible-test-aks-identity-2"
diff --git a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_aks/tasks/minimal-cluster.yml b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_aks/tasks/minimal-cluster.yml
index 79362b384..3c12fe9b3 100644
--- a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_aks/tasks/minimal-cluster.yml
+++ b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_aks/tasks/minimal-cluster.yml
@@ -4,13 +4,13 @@
- name: Find available k8s version
azure_rm_aksversion_info:
- location: eastus
+ location: "{{ location }}"
register: versions
- name: Use minimal parameters and system-assigned identity
azure_rm_aks:
name: "minimal{{ rpfx }}"
- location: eastus
+ location: "{{ location }}"
resource_group: "{{ resource_group }}"
kubernetes_version: "{{ versions.azure_aks_versions[0] }}"
dns_prefix: "aks{{ rpfx }}"
@@ -55,11 +55,13 @@
- name: Use minimal parameters and system-assigned identity (idempotent)
azure_rm_aks:
name: "minimal{{ rpfx }}"
- location: eastus
+ location: "{{ location }}"
resource_group: "{{ resource_group }}"
kubernetes_version: "{{ versions.azure_aks_versions[0] }}"
dns_prefix: "aks{{ rpfx }}"
enable_rbac: true
+ identity:
+ type: "SystemAssigned"
aad_profile:
managed: true
agent_pool_profiles:
@@ -83,10 +85,142 @@
that:
- not output.changed
+- name: Use minimal parameters and user-assigned identity
+ azure_rm_aks:
+ name: "minimal{{ rpfx }}"
+ location: "{{ location }}"
+ resource_group: "{{ resource_group }}"
+ kubernetes_version: "{{ versions.azure_aks_versions[0] }}"
+ dns_prefix: "aks{{ rpfx }}"
+ enable_rbac: true
+ identity:
+ type: "UserAssigned"
+ user_assigned_identities: "{{ user_identity }}"
+ aad_profile:
+ managed: true
+ agent_pool_profiles:
+ - name: default
+ count: 1
+ vm_size: Standard_B2s
+ mode: System
+ api_server_access_profile:
+ authorized_ip_ranges:
+ - "192.0.2.0"
+ - "198.51.100.0"
+ - "203.0.113.0"
+ enable_private_cluster: false
+ network_profile:
+ load_balancer_sku: standard
+ outbound_type: loadBalancer
+ register: output
+
+- name: Assert the AKS instance is well created
+ ansible.builtin.assert:
+ that:
+ - output.changed
+ - output.provisioning_state == 'Succeeded'
+
+- name: Get AKS fact
+ azure_rm_aks_info:
+ name: "minimal{{ rpfx }}"
+ resource_group: "{{ resource_group }}"
+ register: fact
+
+- name: Assert fact returns the created one
+ ansible.builtin.assert:
+ that:
+ - "fact.aks | length == 1"
+ - fact.aks[0].id == output.id
+ - fact.aks[0].aad_profile.managed == true
+ - user_identity in fact.aks[0].identity.user_assigned_identities
+
+- name: Use minimal parameters and user-assigned identity (idempotent)
+ azure_rm_aks:
+ name: "minimal{{ rpfx }}"
+ location: "{{ location }}"
+ resource_group: "{{ resource_group }}"
+ kubernetes_version: "{{ versions.azure_aks_versions[0] }}"
+ dns_prefix: "aks{{ rpfx }}"
+ enable_rbac: true
+ identity:
+ type: "UserAssigned"
+ user_assigned_identities: "{{ user_identity }}"
+ aad_profile:
+ managed: true
+ agent_pool_profiles:
+ - name: default
+ count: 1
+ vm_size: Standard_B2s
+ mode: System
+ api_server_access_profile:
+ authorized_ip_ranges:
+ - "192.0.2.0"
+ - "198.51.100.0"
+ - "203.0.113.0"
+ enable_private_cluster: false
+ network_profile:
+ load_balancer_sku: standard
+ outbound_type: loadBalancer
+ register: output
+
+- name: Assert idempotent
+ ansible.builtin.assert:
+ that:
+ - not output.changed
+
+- name: Use minimal parameters and user-assigned 2 identity
+ azure_rm_aks:
+ name: "minimal{{ rpfx }}"
+ location: "{{ location }}"
+ resource_group: "{{ resource_group }}"
+ kubernetes_version: "{{ versions.azure_aks_versions[0] }}"
+ dns_prefix: "aks{{ rpfx }}"
+ enable_rbac: true
+ identity:
+ type: "UserAssigned"
+ user_assigned_identities: "{{ user_identity_2 }}"
+ aad_profile:
+ managed: true
+ agent_pool_profiles:
+ - name: default
+ count: 1
+ vm_size: Standard_B2s
+ mode: System
+ api_server_access_profile:
+ authorized_ip_ranges:
+ - "192.0.2.0"
+ - "198.51.100.0"
+ - "203.0.113.0"
+ enable_private_cluster: false
+ network_profile:
+ load_balancer_sku: standard
+ outbound_type: loadBalancer
+ register: output
+
+- name: Assert the AKS instance is well created
+ ansible.builtin.assert:
+ that:
+ - output.changed
+ - output.provisioning_state == 'Succeeded'
+
+- name: Get AKS fact
+ azure_rm_aks_info:
+ name: "minimal{{ rpfx }}"
+ resource_group: "{{ resource_group }}"
+ register: fact
+
+- name: Assert fact returns the created one
+ ansible.builtin.assert:
+ that:
+ - "fact.aks | length == 1"
+ - fact.aks[0].id == output.id
+ - fact.aks[0].aad_profile.managed == true
+ - user_identity_2 in fact.aks[0].identity.user_assigned_identities
+
- name: Update api_server_access_profile config
azure_rm_aks:
name: "minimal{{ rpfx }}"
- location: eastus
+ location: "{{ location }}"
resource_group: "{{ resource_group }}"
kubernetes_version: "{{ versions.azure_aks_versions[0] }}"
dns_prefix: "aks{{ rpfx }}"
diff --git a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_aksagentpool/tasks/main.yml b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_aksagentpool/tasks/main.yml
index 7c255f9ad..20a5a8e76 100644
--- a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_aksagentpool/tasks/main.yml
+++ b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_aksagentpool/tasks/main.yml
@@ -7,6 +7,25 @@
location: eastus
register: versions
+- name: Create proximity placement group
+ azure_rm_proximityplacementgroup:
+ resource_group: "{{ resource_group }}"
+ name: "proxi{{ rpfx }}"
+ register: proxi_output
+
+- name: Create public ip prefix
+ azure_rm_publicipprefix:
+ resource_group: "{{ resource_group }}"
+ name: "pipprefix{{ rpfx }}"
+ zones:
+ - 1
+ public_ip_address_version: IPV4
+ prefix_length: 29
+ sku:
+ name: Standard
+ tier: Regional
+ register: pip_output
+
- name: Create a kubernet service with minimal parameters
azure_rm_aks:
name: "min{{ rpfx }}"
@@ -24,6 +43,7 @@
mode: System
api_server_access_profile:
authorized_ip_ranges:
+ - "{{ pip_output.state.ip_prefix }}"
- "192.0.2.0"
- "198.51.100.0"
- "203.0.113.0"
@@ -194,6 +214,97 @@
- output.aks_agent_pools[0].max_pods == 42
- output.aks_agent_pools[0].orchestrator_version == agentpool_version.azure_orchestrator_version[0]
+- name: Create a new agent pool with multi parameters
+ azure_rm_aksagentpool:
+ resource_group: "{{ resource_group }}"
+ cluster_name: "min{{ rpfx }}"
+ name: default06
+ count: 1
+ vm_size: Standard_B2s
+ type_properties_type: VirtualMachineScaleSets
+ mode: System
+ node_labels: {"release":"stable"}
+ max_pods: 42
+ enable_auto_scaling: true
+ min_count: 1
+ max_count: 10
+ orchestrator_version: "{{ agentpool_version.azure_orchestrator_version[0] }}"
+ availability_zones:
+ - 1
+ kubelet_disk_type: OS
+ workload_runtime: OCIContainer
+ os_sku: Ubuntu
+ scale_down_mode: Delete
+ upgrade_settings:
+ max_surge: 50%
+ power_state:
+ code: Running
+ enable_node_public_ip: true
+ scale_set_priority: Regular
+ node_public_ip_prefix_id: "{{ pip_output.state.id }}"
+ spot_max_price: 85
+ proximity_placement_group_id: "{{ proxi_output.state.id }}"
+ enable_encryption_at_host: false
+ enable_ultra_ssd: false
+ enable_fips: true
+ tags:
+ key2: value2
+ register: output
+
+- name: Assert the node agent pool create well
+ ansible.builtin.assert:
+ that:
+ - output.changed
+
+- name: Create a new agent pool with multi parameters( Idempotent test)
+ azure_rm_aksagentpool:
+ resource_group: "{{ resource_group }}"
+ cluster_name: "min{{ rpfx }}"
+ name: default06
+ count: 1
+ vm_size: Standard_B2s
+ type_properties_type: VirtualMachineScaleSets
+ mode: System
+ node_labels: {"release":"stable"}
+ max_pods: 42
+ enable_auto_scaling: true
+ min_count: 1
+ max_count: 10
+ orchestrator_version: "{{ agentpool_version.azure_orchestrator_version[0] }}"
+ availability_zones:
+ - 1
+ kubelet_disk_type: OS
+ workload_runtime: OCIContainer
+ os_sku: Ubuntu
+ scale_down_mode: Delete
+ upgrade_settings:
+ max_surge: 50%
+ power_state:
+ code: Running
+ enable_node_public_ip: true
+ scale_set_priority: Regular
+ node_public_ip_prefix_id: "{{ pip_output.state.id }}"
+ spot_max_price: 85
+ proximity_placement_group_id: "{{ proxi_output.state.id }}"
+ enable_encryption_at_host: false
+ enable_ultra_ssd: false
+ enable_fips: true
+ tags:
+ key2: value2
+ register: output
+
+- name: Assert the node agent pool no change
+ ansible.builtin.assert:
+ that:
+ - not output.changed
+
+- name: Delete node agent pool
+ azure_rm_aksagentpool:
+ resource_group: "{{ resource_group }}"
+ cluster_name: "min{{ rpfx }}"
+ name: default06
+ state: absent
+
- name: Delete node agent pool
azure_rm_aksagentpool:
resource_group: "{{ resource_group }}"
diff --git a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_cognitivesearch/tasks/main.yml b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_cognitivesearch/tasks/main.yml
index bcc19ced0..0b8c3edfa 100644
--- a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_cognitivesearch/tasks/main.yml
+++ b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_cognitivesearch/tasks/main.yml
@@ -139,7 +139,7 @@
hosting_mode: default
identity: SystemAssigned
network_rule_set:
- - 8.8.8.8/31
+ - 8.8.8.8
- 1.1.1.1
partition_count: 2
public_network_access: disabled
@@ -171,7 +171,7 @@
hosting_mode: default
identity: SystemAssigned
network_rule_set:
- - 8.8.8.8/31
+ - 8.8.8.8
- 1.1.1.1
partition_count: 2
public_network_access: disabled
diff --git a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_cosmosdbaccount/tasks/main.yml b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_cosmosdbaccount/tasks/main.yml
index 5e1f3f4d8..64aaef57c 100644
--- a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_cosmosdbaccount/tasks/main.yml
+++ b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_cosmosdbaccount/tasks/main.yml
@@ -29,9 +29,9 @@
azure_rm_cosmosdbaccount:
resource_group: "{{ resource_group }}"
name: "{{ dbname }}"
- location: eastasia
+ location: eastus
geo_rep_locations:
- - name: eastasia
+ - name: eastus
failover_priority: 0
database_account_offer_type: Standard
check_mode: true
@@ -45,10 +45,10 @@
azure_rm_cosmosdbaccount:
resource_group: "{{ resource_group }}"
name: "{{ dbname }}"
- location: eastasia
+ location: eastus
kind: global_document_db
geo_rep_locations:
- - name: eastasia
+ - name: eastus
failover_priority: 0
- name: westus
failover_priority: 1
@@ -70,10 +70,10 @@
azure_rm_cosmosdbaccount:
resource_group: "{{ resource_group }}"
name: "{{ dbname }}"
- location: eastasia
+ location: eastus
kind: global_document_db
geo_rep_locations:
- - name: eastasia
+ - name: eastus
failover_priority: 0
- name: westus
failover_priority: 1
@@ -95,10 +95,10 @@
azure_rm_cosmosdbaccount:
resource_group: "{{ resource_group }}"
name: "{{ dbname }}"
- location: eastasia
+ location: eastus
kind: global_document_db
geo_rep_locations:
- - name: eastasia
+ - name: eastus
failover_priority: 0
- name: westus
failover_priority: 1
@@ -121,10 +121,10 @@
azure_rm_cosmosdbaccount:
resource_group: "{{ resource_group_secondary }}"
name: "{{ db2name }}"
- location: eastasia
+ location: eastus
kind: global_document_db
geo_rep_locations:
- - name: eastasia
+ - name: eastus
failover_priority: 0
- name: westus
failover_priority: 1
@@ -155,7 +155,7 @@
- output.accounts[0]['id'] != None
- output.accounts[0]['resource_group'] == resource_group
- output.accounts[0]['name'] == dbname
- - output.accounts[0]['location'] == 'eastasia'
+ - output.accounts[0]['location'] == 'eastus'
- output.accounts[0]['kind'] != None
- output.accounts[0]['consistency_policy'] != None
- output.accounts[0]['failover_policies'] != None
@@ -221,7 +221,7 @@
- output.accounts[0]['id'] != None
- output.accounts[0]['resource_group'] == resource_group
- output.accounts[0]['name'] == dbname
- - output.accounts[0]['location'] == 'eastasia'
+ - output.accounts[0]['location'] == 'eastus'
- output.accounts[0]['kind'] != None
- output.accounts[0]['consistency_policy'] != None
- output.accounts[0]['failover_policies'] != None
@@ -258,7 +258,7 @@
azure_rm_cosmosdbaccount:
resource_group: "{{ resource_group }}"
name: "{{ dbname }}-free4"
- location: eastasia
+ location: eastus
kind: mongo_db
mongo_version: "4.0"
enable_free_tier: "{{ free_tier_supported }}"
@@ -267,7 +267,7 @@
- "1.1.1.1"
- "2.2.2.2/28"
geo_rep_locations:
- - name: eastasia
+ - name: eastus
failover_priority: 0
- name: westus
failover_priority: 1
diff --git a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_datalakestore/tasks/main.yml b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_datalakestore/tasks/main.yml
deleted file mode 100644
index 8dc08f8b7..000000000
--- a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_datalakestore/tasks/main.yml
+++ /dev/null
@@ -1,199 +0,0 @@
-- name: Create data lake store name
- ansible.builtin.set_fact:
- adl_name: "adl{{ resource_group_datalake | hash('md5') | truncate(21, True, '') }}"
- vnet_name: "vnet{{ resource_group_datalake | hash('md5') | truncate(20, True, '') }}"
-
-- name: Create virtual network
- azure_rm_virtualnetwork:
- name: "{{ vnet_name }}"
- resource_group: "{{ resource_group_datalake }}"
- address_prefixes_cidr:
- - 10.1.0.0/16
- register: vnet_output
-
-- name: Create subnet
- azure_rm_subnet:
- name: foobar
- virtual_network_name: "{{ vnet_name }}"
- resource_group: "{{ resource_group_datalake }}"
- address_prefix_cidr: "10.1.1.0/24"
- service_endpoints:
- - service: Microsoft.AzureActiveDirectory
- register: subnet_output
-
-- name: Create minimal data lake store
- azure_rm_datalakestore:
- resource_group: "{{ resource_group_datalake }}"
- name: "{{ adl_name }}"
- register: output
-
-- name: Assert status succeeded and results
- ansible.builtin.assert:
- that:
- - output.changed
- - output.state.id is defined
- - output.state.account_id is defined
- - output.state.creation_time is defined
- - output.state.current_tier == "Consumption"
- - output.state.encryption_state == "Enabled"
- - output.state.endpoint == "{{ adl_name }}.azuredatalakestore.net"
- - output.state.firewall_allow_azure_ips == "Disabled"
- - output.state.firewall_rules | length == 0
- - output.state.firewall_state == "Disabled"
- - output.state.last_modified_time is defined
- - output.state.new_tier == "Consumption"
- - output.state.provisioning_state == "Succeeded"
- - output.state.trusted_id_provider_state == "Disabled"
-
-- name: Create minimal data lake store (Idempotence)
- azure_rm_datalakestore:
- resource_group: "{{ resource_group_datalake }}"
- name: "{{ adl_name }}"
- register: output
-
-- name: Assert that status has not changed
- ansible.builtin.assert:
- that:
- - not output.changed
-
-- name: Update data lake store to add virtual_network_rules
- azure_rm_datalakestore:
- resource_group: "{{ resource_group_datalake }}"
- name: "{{ adl_name }}"
- virtual_network_rules:
- - name: vnet_rule_1
- subnet_id: "{{ subnet_output.state.id }}"
- register: output
-
-- name: Assert status succeeded and results include virtual_network_rules
- ansible.builtin.assert:
- that:
- - output.changed
- - output.state.virtual_network_rules | length == 1
- - output.state.virtual_network_rules[0].name == "vnet_rule_1"
- - output.state.virtual_network_rules[0].subnet_id == subnet_output.state.id
-
-- name: Update data lake store to change encryption state that must fail
- azure_rm_datalakestore:
- resource_group: "{{ resource_group_datalake }}"
- name: "{{ adl_name }}"
- encryption_state: Disabled
- register: output
- ignore_errors: true
-
-- name: Assert that encryption state cannot change
- ansible.builtin.assert:
- that:
- - not output.changed
- - output.msg == 'Encryption type cannot be updated.'
-
-- name: Update data lake store to add new_tier
- azure_rm_datalakestore:
- resource_group: "{{ resource_group_datalake }}"
- name: "{{ adl_name }}"
- new_tier: Commitment_1TB
- register: output
-
-- name: Assert status succeeded and results include virtual_network_rules
- ansible.builtin.assert:
- that:
- - output.changed
- - output.state.current_tier == "Consumption"
- - output.state.new_tier == "Commitment_1TB"
-
-- name: Delete minimal data lake store
- azure_rm_datalakestore:
- resource_group: "{{ resource_group_datalake }}"
- name: "{{ adl_name }}"
- state: absent
- register: output
-
-- name: Create new data lake store
- azure_rm_datalakestore:
- resource_group: "{{ resource_group_datalake }}"
- name: "{{ adl_name }}"
- tags:
- P1: V1
- P2: V4
- P3: V3
- new_tier: Commitment_1TB
- default_group: default_group_test
- encryption_state: Enabled
- firewall_state: Enabled
- firewall_allow_azure_ips: Enabled
- firewall_rules:
- - name: test_rule_1
- start_ip_address: 192.168.1.1
- end_ip_address: 192.168.1.254
- - name: test_rule_2
- start_ip_address: 10.0.0.1
- end_ip_address: 10.1.0.1
- virtual_network_rules:
- - name: vnet_rule_1
- subnet_id: "{{ subnet_output.state.id }}"
- register: output
-
-- name: Assert status succeeded and results include an Id value
- ansible.builtin.assert:
- that:
- - output.changed
- - output.state.id is defined
- - output.state.account_id is defined
- - output.state.creation_time is defined
- - output.state.current_tier == "Commitment_1TB"
- - output.state.default_group == "default_group_test"
- - output.state.encryption_state == "Enabled"
- - output.state.endpoint == "{{ adl_name }}.azuredatalakestore.net"
- - output.state.firewall_allow_azure_ips == "Enabled"
- - output.state.firewall_rules | length == 2
- - output.state.firewall_state == "Enabled"
- - output.state.last_modified_time is defined
- - output.state.new_tier == "Commitment_1TB"
- - output.state.provisioning_state == "Succeeded"
- - output.state.tags | length == 3
- - output.state.trusted_id_provider_state == "Disabled"
- - output.state.virtual_network_rules | length == 1
- - output.state.virtual_network_rules[0].name == "vnet_rule_1"
- - output.state.virtual_network_rules[0].subnet_id == subnet_output.state.id
-
-- name: Create new data lake store (Idempotence)
- azure_rm_datalakestore:
- resource_group: "{{ resource_group_datalake }}"
- name: "{{ adl_name }}"
- tags:
- P1: V1
- P2: V4
- P3: V3
- new_tier: Commitment_1TB
- default_group: default_group_test
- encryption_state: Enabled
- firewall_state: Enabled
- firewall_allow_azure_ips: Enabled
- firewall_rules:
- - name: test_rule_1
- start_ip_address: 192.168.1.1
- end_ip_address: 192.168.1.254
- - name: test_rule_2
- start_ip_address: 10.0.0.1
- end_ip_address: 10.1.0.1
- virtual_network_rules:
- - name: vnet_rule_1
- subnet_id: "{{ subnet_output.state.id }}"
- register: output
-
-- name: Assert that status has not changed
- ansible.builtin.assert:
- that:
- - not output.changed
-
-- name: Delete virtual network
- azure_rm_virtualnetwork:
- name: "{{ vnet_name }}"
- resource_group: "{{ resource_group_datalake }}"
- state: absent
-
-- name: Delete Data Lake Store
- azure_rm_datalakestore:
- resource_group: "{{ resource_group_datalake }}"
- name: "{{ adl_name }}"
- state: absent
diff --git a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_eventhub/tasks/main.yml b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_eventhub/tasks/main.yml
index 62e60e95b..579d326d8 100644
--- a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_eventhub/tasks/main.yml
+++ b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_eventhub/tasks/main.yml
@@ -76,10 +76,10 @@
namespace_name: "{{ namespace_name }}"
name: "{{ name_rpfx }}"
resource_group: "{{ resource_group }}"
- message_retention_in_days: 4
+ message_retention_in_days: 1
state: present
tags:
- test: modified
+ test1: modified1
register: results
- name: Assert the event hub updated
diff --git a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_expressroute/tasks/main.yml b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_expressroute/tasks/main.yml
index 3d70ddd82..58bda9cce 100644
--- a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_expressroute/tasks/main.yml
+++ b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_expressroute/tasks/main.yml
@@ -14,9 +14,9 @@
authorizations:
- name: authorization_test
service_provider_properties:
- service_provider_name: Aryaka Networks
- peering_location: Seattle
- bandwidth_in_mbps: '200'
+ service_provider_name: Telstra Test
+ peering_location: Denver Test
+ bandwidth_in_mbps: 1000
sku:
tier: premium
family: metereddata
@@ -40,9 +40,9 @@
authorizations:
- name: authorization_test
service_provider_properties:
- service_provider_name: Aryaka Networks
- peering_location: Seattle
- bandwidth_in_mbps: '200'
+ service_provider_name: Telstra Test
+ peering_location: Denver Test
+ bandwidth_in_mbps: 1000
sku:
tier: premium
family: metereddata
@@ -65,9 +65,9 @@
authorizations:
- name: authorization_test
service_provider_properties:
- service_provider_name: Aryaka Networks
- peering_location: Seattle
- bandwidth_in_mbps: '200'
+ service_provider_name: Telstra Test
+ peering_location: Denver Test
+ bandwidth_in_mbps: 1000
sku:
tier: premium
family: metereddata
diff --git a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_image/tasks/main.yml b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_image/tasks/main.yml
index 2fc543091..3e9e32f47 100644
--- a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_image/tasks/main.yml
+++ b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_image/tasks/main.yml
@@ -33,9 +33,12 @@
azure_rm_networkinterface:
resource_group: "{{ resource_group }}"
name: "{{ vm_name }}"
+ ip_configurations:
+ - name: default
+ public_ip_address_name: "{{ public_ip_name }}"
+ primary: true
virtual_network: "{{ vm_name }}"
subnet: "{{ vm_name }}"
- public_ip_name: "{{ public_ip_name }}"
security_group: "{{ security_group_name }}"
- name: Create virtual machine
diff --git a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_loadbalancer/tasks/main.yml b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_loadbalancer/tasks/main.yml
index 913e618d0..692590994 100644
--- a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_loadbalancer/tasks/main.yml
+++ b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_loadbalancer/tasks/main.yml
@@ -32,7 +32,9 @@
azure_rm_loadbalancer:
resource_group: '{{ resource_group }}'
name: "{{ lbname_a }}"
- public_ip: "{{ pipaname }}"
+ frontend_ip_configurations:
+ - name: frontendip0
+ public_ip_address: "{{ pipaname }}"
check_mode: true
register: output
@@ -44,7 +46,9 @@
azure_rm_loadbalancer:
resource_group: '{{ resource_group }}'
name: "{{ lbname_a }}"
- public_ip: "{{ pipaname }}"
+ frontend_ip_configurations:
+ - name: frontendip0
+ public_ip_address: "{{ pipaname }}"
register: output
- name: Assert load balancer created
@@ -55,7 +59,9 @@
azure_rm_loadbalancer:
resource_group: '{{ resource_group }}'
name: "{{ lbname_a }}"
- public_ip: "{{ pipaname }}"
+ frontend_ip_configurations:
+ - name: frontendip0
+ public_ip_address: "{{ pipaname }}"
register: output
- name: Assert no change
@@ -90,20 +96,34 @@
resource_group: '{{ resource_group }}'
name: "{{ lbname_b }}"
sku: Standard
- public_ip_address: "{{ pipbname }}"
- probe_protocol: Tcp
- probe_port: 80
- probe_interval: 10
- probe_fail_count: 3
- protocol: Tcp
- load_distribution: Default
- frontend_port: 80
- backend_port: 8080
- idle_timeout: 4
- natpool_frontend_port_start: 30
- natpool_frontend_port_end: 40
- natpool_backend_port: 80
- natpool_protocol: Tcp
+ frontend_ip_configurations:
+ - name: frontendip0
+ public_ip_address: "{{ pipbname }}"
+ backend_address_pools:
+ - name: backendaddrp0
+ probes:
+ - name: prob0
+ port: 80
+ protocol: Tcp
+ interval: 10
+ fail_count: 3
+ inbound_nat_pools:
+ - name: inboundnatp0
+ frontend_ip_configuration_name: frontendip0
+ protocol: Tcp
+ frontend_port_range_start: 30
+ frontend_port_range_end: 40
+ backend_port: 80
+ load_balancing_rules:
+ - name: lbr
+ frontend_ip_configuration: frontendip0
+ backend_address_pool: backendaddrp0
+ probe: prob0
+ frontend_port: 80
+ backend_port: 8080
+ idle_timeout: 4
+ load_distribution: Default
+ protocol: Tcp
register: output
- name: Assert complex load balancer created
@@ -117,20 +137,34 @@
resource_group: '{{ resource_group }}'
name: "{{ lbname_b }}"
sku: Standard
- public_ip_address: "{{ pipbname }}"
- probe_protocol: Tcp
- probe_port: 80
- probe_interval: 10
- probe_fail_count: 3
- protocol: Tcp
- load_distribution: Default
- frontend_port: 80
- backend_port: 8080
- idle_timeout: 4
- natpool_frontend_port_start: 30
- natpool_frontend_port_end: 40
- natpool_backend_port: 80
- natpool_protocol: Tcp
+ frontend_ip_configurations:
+ - name: frontendip0
+ public_ip_address: "{{ pipbname }}"
+ backend_address_pools:
+ - name: backendaddrp0
+ probes:
+ - name: prob0
+ port: 80
+ protocol: Tcp
+ interval: 10
+ fail_count: 3
+ inbound_nat_pools:
+ - name: inboundnatp0
+ frontend_ip_configuration_name: frontendip0
+ protocol: Tcp
+ frontend_port_range_start: 30
+ frontend_port_range_end: 40
+ backend_port: 80
+ load_balancing_rules:
+ - name: lbr
+ frontend_ip_configuration: frontendip0
+ backend_address_pool: backendaddrp0
+ probe: prob0
+ frontend_port: 80
+ backend_port: 8080
+ idle_timeout: 4
+ load_distribution: Default
+ protocol: Tcp
register: output
- name: Assert that output has not changed
@@ -143,20 +177,34 @@
resource_group: '{{ resource_group }}'
name: "{{ lbname_b }}"
sku: Standard
- public_ip_address: "{{ pipbname }}"
- probe_protocol: Tcp
- probe_port: 80
- probe_interval: 10
- probe_fail_count: 3
- protocol: Tcp
- load_distribution: Default
- frontend_port: 81
- backend_port: 8080
- idle_timeout: 4
- natpool_frontend_port_start: 30
- natpool_frontend_port_end: 40
- natpool_backend_port: 80
- natpool_protocol: Tcp
+ frontend_ip_configurations:
+ - name: frontendip0
+ public_ip_address: "{{ pipbname }}"
+ backend_address_pools:
+ - name: backendaddrp0
+ probes:
+ - name: prob0
+ port: 80
+ protocol: Tcp
+ interval: 10
+ fail_count: 3
+ inbound_nat_pools:
+ - name: inboundnatp0
+ frontend_ip_configuration_name: frontendip0
+ protocol: Tcp
+ frontend_port_range_start: 30
+ frontend_port_range_end: 40
+ backend_port: 80
+ load_balancing_rules:
+ - name: lbr
+ frontend_ip_configuration: frontendip0
+ backend_address_pool: backendaddrp0
+ probe: prob0
+ frontend_port: 81
+ backend_port: 8080
+ idle_timeout: 4
+ load_distribution: Default
+ protocol: Tcp
register: output
- name: Assert that output has changed
diff --git a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_localnetworkgateway/aliases b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_localnetworkgateway/aliases
new file mode 100644
index 000000000..aa77c071a
--- /dev/null
+++ b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_localnetworkgateway/aliases
@@ -0,0 +1,3 @@
+cloud/azure
+shippable/azure/group2
+destructive
diff --git a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_datalakestore/meta/main.yml b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_localnetworkgateway/meta/main.yml
index 95e1952f9..95e1952f9 100644
--- a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_datalakestore/meta/main.yml
+++ b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_localnetworkgateway/meta/main.yml
diff --git a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_localnetworkgateway/tasks/main.yml b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_localnetworkgateway/tasks/main.yml
new file mode 100644
index 000000000..d7447111c
--- /dev/null
+++ b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_localnetworkgateway/tasks/main.yml
@@ -0,0 +1,114 @@
+- name: Set Storage Account Names
+ ansible.builtin.set_fact:
+ rpfx: "{{ resource_group | hash('md5') | truncate(21, True, '') }}"
+
+- name: Create a new local network gateway (Check mode)
+ azure_rm_localnetworkgateway:
+ resource_group: "{{ resource_group }}"
+ name: "{{ rpfx }}"
+ local_network_address_space:
+ address_prefixes:
+ - 10.0.0.0/24
+ - 20.0.0.0/24
+ fqdn: testfqdn.com
+ bgp_settings:
+ asn: 8
+ bgp_peering_address: 10.3.0.1
+ peer_weight: 3
+ tags:
+ key1: value1
+ check_mode: true
+
+- name: Create a new local network gateway
+ azure_rm_localnetworkgateway:
+ resource_group: "{{ resource_group }}"
+ name: "{{ rpfx }}"
+ local_network_address_space:
+ address_prefixes:
+ - 10.0.0.0/24
+ - 20.0.0.0/24
+ fqdn: testfqdn.com
+ bgp_settings:
+ asn: 8
+ bgp_peering_address: 10.3.0.1
+ peer_weight: 3
+ tags:
+ key1: value1
+ register: output
+
+- name: Assert the local network gateway is well created
+ ansible.builtin.assert:
+ that:
+ - output.changed
+ - output.state.provisioning_state == 'Succeeded'
+
+- name: Create a new local network gateway(Idempotent test)
+ azure_rm_localnetworkgateway:
+ resource_group: "{{ resource_group }}"
+ name: "{{ rpfx }}"
+ local_network_address_space:
+ address_prefixes:
+ - 10.0.0.0/24
+ - 20.0.0.0/24
+ fqdn: testfqdn.com
+ bgp_settings:
+ asn: 8
+ bgp_peering_address: 10.3.0.1
+ peer_weight: 3
+ tags:
+ key1: value1
+ register: output
+
+- name: Assert the local network gateway no change
+ ansible.builtin.assert:
+ that:
+ - not output.changed
+
+- name: Create a new local network gateway(Update test)
+ azure_rm_localnetworkgateway:
+ resource_group: "{{ resource_group }}"
+ name: "{{ rpfx }}"
+ local_network_address_space:
+ address_prefixes:
+ - 10.0.0.0/24
+ - 30.0.0.0/24
+ fqdn: testfqdn1.com
+ bgp_settings:
+ asn: 10
+ bgp_peering_address: 20.3.0.1
+ peer_weight: 5
+ tags:
+ key1: value1
+ register: output
+
+- name: Assert the local network gateway updated
+ ansible.builtin.assert:
+ that:
+ - output.changed
+
+- name: Get a new local network gateway
+ azure_rm_localnetworkgateway_info:
+ resource_group: "{{ resource_group }}"
+ name: "{{ rpfx }}"
+ register: output
+
+- name: Assert the local network gateway facts
+ ansible.builtin.assert:
+ that:
+ - not output.changed
+ - output.state[0].bgp_settings.asn == 10
+ - output.state[0].bgp_settings.peer_weight == 5
+ - "output.state[0].tags | length == 1"
+ - "output.state[0].local_network_address_space.address_prefixes | length == 3"
+
+- name: Delete the local network gateway
+ azure_rm_localnetworkgateway:
+ resource_group: "{{ resource_group }}"
+ name: "{{ rpfx }}"
+ state: absent
+ register: output
+
+- name: Assert the local network gateway is deleted
+ ansible.builtin.assert:
+ that:
+ - output.changed
diff --git a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_mariadbserver/aliases b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_mariadbserver/aliases
index b586dc7c3..0cfc7d52b 100644
--- a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_mariadbserver/aliases
+++ b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_mariadbserver/aliases
@@ -6,3 +6,4 @@ azure_rm_mariadbdatabase
azure_rm_mariadbdatabase_facts
azure_rm_mariadbfirewallrule
azure_rm_mariadbfirewallrule_facts
+disabled
diff --git a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_mysqlserver/aliases b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_mysqlserver/aliases
index 21e7a127b..98ddd39ff 100644
--- a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_mysqlserver/aliases
+++ b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_mysqlserver/aliases
@@ -8,3 +8,4 @@ azure_rm_mysqlfirewallrule
azure_rm_mysqlfirewallrule_facts
azure_rm_mysqlconfiguration
azure_rm_mysqlconfiguration_facts
+disabled
diff --git a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_networkinterface/tasks/main.yml b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_networkinterface/tasks/main.yml
index e47906d72..4955230f2 100644
--- a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_networkinterface/tasks/main.yml
+++ b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_networkinterface/tasks/main.yml
@@ -87,8 +87,7 @@
resource_group: "{{ resource_group }}"
name: "tn{{ rpfx }}"
virtual_network: "{{ vn.state.id }}"
- subnet: "tn{{ rpfx }}"
- public_ip: false
+ subnet_name: "tn{{ rpfx }}"
create_with_security_group: false
register: output
check_mode: true
@@ -105,8 +104,7 @@
virtual_network:
name: "tn{{ rpfx }}"
resource_group: "{{ resource_group_secondary }}"
- subnet: "tn{{ rpfx }}"
- public_ip: false
+ subnet_name: "tn{{ rpfx }}"
create_with_security_group: false
register: output
@@ -146,7 +144,7 @@
private_ip_address: "{{ facts.networkinterfaces[0].ip_configurations[0].private_ip_address }}"
private_ip_allocation_method: "{{ facts.networkinterfaces[0].ip_configurations[0].private_ip_allocation_method }}"
primary: "{{ facts.networkinterfaces[0].ip_configurations[0].primary }}"
- subnet: "{{ facts.networkinterfaces[0].subnet }}"
+ subnet_name: "{{ facts.networkinterfaces[0].subnet }}"
register: output
- name: Assert the NIC created
@@ -159,9 +157,8 @@
resource_group: "{{ resource_group }}"
name: "tn{{ rpfx }}"
virtual_network: "{{ vn.state.id }}"
- subnet: "tn{{ rpfx }}"
+ subnet_name: "tn{{ rpfx }}"
create_with_security_group: false
- public_ip: false
register: output
- name: Assert NIC created
@@ -174,8 +171,7 @@
resource_group: "{{ resource_group }}"
name: "tn{{ rpfx }}"
virtual_network: "{{ vn.state.id }}"
- subnet: "tn{{ rpfx }}"
- public_ip: false
+ subnet_name: "tn{{ rpfx }}"
security_group: "tn{{ rpfx }}sg"
register: output
check_mode: true
@@ -190,8 +186,10 @@
resource_group: "{{ resource_group }}"
name: "tn{{ rpfx }}"
virtual_network: "{{ vn.state.id }}"
- subnet: "tn{{ rpfx }}"
- public_ip_address_name: "tn{{ rpfx }}"
+ subnet_name: "tn{{ rpfx }}"
+ ip_configurations:
+ - name: default
+ public_ip_address_name: "tn{{ rpfx }}"
create_with_security_group: false
register: output
check_mode: true
@@ -206,10 +204,9 @@
resource_group: "{{ resource_group }}"
name: "tn{{ rpfx }}"
virtual_network: "{{ vn.state.id }}"
- subnet: "tn{{ rpfx }}"
+ subnet_name: "tn{{ rpfx }}"
enable_accelerated_networking: true
create_with_security_group: false
- public_ip: false
register: output
check_mode: true
@@ -223,10 +220,9 @@
resource_group: "{{ resource_group }}"
name: "tn{{ rpfx }}"
virtual_network: "{{ vn.state.id }}"
- subnet: "tn{{ rpfx }}"
+ subnet_name: "tn{{ rpfx }}"
create_with_security_group: false
enable_ip_forwarding: true
- public_ip: false
register: output
check_mode: true
@@ -240,9 +236,8 @@
resource_group: "{{ resource_group }}"
name: "tn{{ rpfx }}"
virtual_network: "{{ vn.state.id }}"
- subnet: "tn{{ rpfx }}"
+ subnet_name: "tn{{ rpfx }}"
create_with_security_group: false
- public_ip: false
dns_servers:
- 8.9.10.11
- 7.8.9.10
@@ -259,7 +254,7 @@
resource_group: "{{ resource_group }}"
name: "tn{{ rpfx }}"
virtual_network: "{{ vn.state.id }}"
- subnet: "tn{{ rpfx }}"
+ subnet_name: "tn{{ rpfx }}"
enable_accelerated_networking: true
enable_ip_forwarding: true
security_group: "tn{{ rpfx }}sg"
@@ -300,7 +295,7 @@
resource_group: "{{ resource_group }}"
name: "tn{{ rpfx }}"
virtual_network: "{{ vn.state.id }}"
- subnet: "tn{{ rpfx }}"
+ subnet_name: "tn{{ rpfx }}"
enable_accelerated_networking: true
security_group: "tn{{ rpfx }}sg"
enable_ip_forwarding: true
@@ -355,7 +350,7 @@
resource_group: "{{ resource_group }}"
name: "tn{{ rpfx }}"
virtual_network: "{{ vn.state.id }}"
- subnet: "tn{{ rpfx }}"
+ subnet_name: "tn{{ rpfx }}"
security_group: "tn{{ rpfx }}sg"
enable_accelerated_networking: true
enable_ip_forwarding: true
@@ -462,9 +457,8 @@
resource_group: "{{ resource_group }}"
name: "{{ nic_name1 }}"
virtual_network: "{{ vn.state.id }}"
- subnet: "tn{{ rpfx }}"
+ subnet_name: "tn{{ rpfx }}"
create_with_security_group: true
- public_ip: false
ip_configurations:
- name: ipconfig1
application_security_groups:
@@ -483,9 +477,8 @@
resource_group: "{{ resource_group }}"
name: "{{ nic_name1 }}"
virtual_network: "{{ vn.state.id }}"
- subnet: "tn{{ rpfx }}"
+ subnet_name: "tn{{ rpfx }}"
create_with_security_group: true
- public_ip: false
ip_configurations:
- name: ipconfig1
application_security_groups:
@@ -504,9 +497,8 @@
resource_group: "{{ resource_group }}"
name: "{{ nic_name1 }}"
virtual_network: "{{ vn.state.id }}"
- subnet: "tn{{ rpfx }}"
+ subnet_name: "tn{{ rpfx }}"
create_with_security_group: true
- public_ip: false
ip_configurations:
- name: ipconfig1
application_security_groups:
@@ -598,7 +590,6 @@
virtual_network: "tn{{ rpfx }}"
subnet_name: "nic-appgw-subnet{{ rpfx }}"
create_with_security_group: false
- public_ip: false
ip_configurations:
- name: "default"
primary: true
@@ -621,7 +612,6 @@
virtual_network: "tn{{ rpfx }}"
subnet_name: "nic-appgw-subnet{{ rpfx }}"
create_with_security_group: false
- public_ip: false
ip_configurations:
- name: "default"
primary: true
diff --git a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_postgresqlflexibleserver/tasks/main.yml b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_postgresqlflexibleserver/tasks/main.yml
index 2add54e1e..9880cdc3c 100644
--- a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_postgresqlflexibleserver/tasks/main.yml
+++ b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_postgresqlflexibleserver/tasks/main.yml
@@ -1,15 +1,43 @@
+- name: Gather Resource Group info
+ azure.azcollection.azure_rm_resourcegroup_info:
+ name: "{{ resource_group }}"
+ register: __rg_info
+
- name: Prepare random number
ansible.builtin.set_fact:
rpfx: "{{ resource_group | hash('md5') | truncate(8, True, '') }}"
new_resource_group: "{{ resource_group }}-02"
run_once: true
+- name: Set Azure Region based on resource group location
+ ansible.builtin.set_fact:
+ location: "{{ __rg_info.resourcegroups.0.location }}"
+
- name: Create a new resource group
azure_rm_resourcegroup:
name: "{{ new_resource_group }}"
- location: southeastasia
+ location: "{{ location }}"
+
+- name: Create User Managed Identities
+ azure_rm_resource:
+ resource_group: "{{ new_resource_group }}"
+ provider: ManagedIdentity
+ resource_type: userAssignedIdentities
+ resource_name: "{{ item }}"
+ api_version: "2023-01-31"
+ body:
+ location: "{{ location }}"
+ state: present
+ loop:
+ - "ansible-test-pgsql-identity"
+ - "ansible-test-pgsql-identity-2"
+
+- name: Set identities IDs to test. Identities ansible-test-psql-identity and ansible-test-psql-identity-2 have to be created previously
+ ansible.builtin.set_fact:
+ user_identity_1: "/subscriptions/{{ azure_subscription_id }}/resourcegroups/{{ new_resource_group }}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/ansible-test-pgsql-identity"
+ user_identity_2: "/subscriptions/{{ azure_subscription_id }}/resourcegroups/{{ new_resource_group }}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/ansible-test-pgsql-identity-2"
-- name: Create post gresql flexible server (check mode)
+- name: Create postgresql flexible server (check mode)
azure_rm_postgresqlflexibleserver:
resource_group: "{{ new_resource_group }}"
name: postflexible{{ rpfx }}
@@ -34,9 +62,14 @@
day_of_week: 3
availability_zone: 2
create_mode: Create
+ identity:
+ type: UserAssigned
+ user_assigned_identities:
+ id:
+ - "{{ user_identity_1 }}"
check_mode: true
-- name: Create post gresql flexible server
+- name: Create postgresql flexible server
azure_rm_postgresqlflexibleserver:
resource_group: "{{ new_resource_group }}"
name: postflexible{{ rpfx }}
@@ -61,14 +94,25 @@
day_of_week: 3
availability_zone: 2
create_mode: Create
+ identity:
+ type: UserAssigned
+ user_assigned_identities:
+ id:
+ - "{{ user_identity_1 }}"
register: output
-- name: Assert the post grep sql server create success
+- name: Assert the postgresql flexible server create success
ansible.builtin.assert:
that:
- output.changed
-- name: Create post gresql flexible server (Idempotent Test)
+- name: Assert User identity assigned
+ ansible.builtin.assert:
+ that:
+ - output.state.identity.type == 'UserAssigned'
+ - user_identity_1 in output.state.identity.user_assigned_identities
+
+- name: Create postgresql flexible server (Idempotent Test)
azure_rm_postgresqlflexibleserver:
resource_group: "{{ new_resource_group }}"
name: postflexible{{ rpfx }}
@@ -93,14 +137,19 @@
day_of_week: 3
availability_zone: 2
create_mode: Create
+ identity:
+ type: UserAssigned
+ user_assigned_identities:
+ id:
+ - "{{ user_identity_1 }}"
register: output
-- name: Assert the post grep sql server create success
+- name: Assert the postgresql server create success
ansible.builtin.assert:
that:
- not output.changed
-- name: Update post gresql flexible server with multiple parameters
+- name: Update postgresql flexible server with multiple parameters
azure_rm_postgresqlflexibleserver:
resource_group: "{{ new_resource_group }}"
name: postflexible{{ rpfx }}
@@ -128,9 +177,15 @@
tags:
key1: value1
key2: value2
+ identity:
+ type: "UserAssigned"
+ user_assigned_identities:
+ id:
+ - "{{ user_identity_2 }}"
+ append: true
register: output
-- name: Assert the post grep sql server update success
+- name: Assert the postgresql server update success
ansible.builtin.assert:
that:
- output.changed
@@ -141,7 +196,7 @@
name: postflexible{{ rpfx }}
register: output
-- name: Assert the post gresql server is well created
+- name: Assert the postgresql server is well created
ansible.builtin.assert:
that:
- output.servers[0].tags | length == 2
@@ -150,8 +205,23 @@
- output.servers[0].maintenance_window.day_of_week == 6
- output.servers[0].maintenance_window.start_hour == 10
- output.servers[0].maintenance_window.start_minute == 6
+ - user_identity_1 in output.servers[0].identity.user_assigned_identities
+ - user_identity_2 in output.servers[0].identity.user_assigned_identities
+
+- name: Postgresql server Identity None
+ azure_rm_postgresqlflexibleserver:
+ name: postflexible{{ rpfx }}
+ resource_group: "{{ new_resource_group }}"
+ identity:
+ type: "None"
+ register: output
+
+- name: Assert no managed identities
+ ansible.builtin.assert:
+ that:
+ - output.state.identity.type == 'None'
-- name: Create a post gresql flexible database(check mode)
+- name: Create a postgresql flexible database(check mode)
azure_rm_postgresqlflexibledatabase:
resource_group: "{{ new_resource_group }}"
server_name: postflexible{{ rpfx }}
@@ -160,7 +230,7 @@
charset: UTF8
check_mode: true
-- name: Create a post gresql flexible database
+- name: Create a postgresql flexible database
azure_rm_postgresqlflexibledatabase:
resource_group: "{{ new_resource_group }}"
server_name: postflexible{{ rpfx }}
@@ -169,12 +239,12 @@
charset: UTF8
register: output
-- name: Assert the post gresql flexible database created success
+- name: Assert the postgresql flexible database created success
ansible.builtin.assert:
that:
- output.changed
-- name: Create a post gresql flexible database(Idempotent test)
+- name: Create a postgresql flexible database(Idempotent test)
azure_rm_postgresqlflexibledatabase:
resource_group: "{{ new_resource_group }}"
server_name: postflexible{{ rpfx }}
@@ -183,25 +253,25 @@
charset: UTF8
register: output
-- name: Assert the post gresql flexible database no changed
+- name: Assert the postgresql flexible database no changed
ansible.builtin.assert:
that:
- not output.changed
-- name: Get the post gresql flexibe database facts
+- name: Get the postgresql flexibe database facts
azure_rm_postgresqlflexibledatabase_info:
resource_group: "{{ new_resource_group }}"
server_name: postflexible{{ rpfx }}
name: database{{ rpfx }}
register: output
-- name: Assert the post gresql flexible database facts
+- name: Assert the postgresql flexible database facts
ansible.builtin.assert:
that:
- output.databases[0].collation == 'en_US.utf8'
- output.databases[0].charset == 'UTF8'
-- name: Delete the post gresql flexibe database
+- name: Delete the postgresql flexibe database
azure_rm_postgresqlflexibledatabase:
resource_group: "{{ new_resource_group }}"
server_name: postflexible{{ rpfx }}
@@ -209,12 +279,12 @@
state: absent
register: output
-- name: Assert the post gresql flexible database deleted
+- name: Assert the postgresql flexible database deleted
ansible.builtin.assert:
that:
- output.changed
-- name: Create a post gresql flexible firwall rule (Check mode)
+- name: Create a postgresql flexible firwall rule (Check mode)
azure_rm_postgresqlflexiblefirewallrule:
resource_group: "{{ new_resource_group }}"
server_name: postflexible{{ rpfx }}
@@ -223,7 +293,7 @@
end_ip_address: 10.0.0.20
check_mode: true
-- name: Create the post gresql flexible firwall rule
+- name: Create the postgresql flexible firwall rule
azure_rm_postgresqlflexiblefirewallrule:
resource_group: "{{ new_resource_group }}"
server_name: postflexible{{ rpfx }}
@@ -232,12 +302,12 @@
end_ip_address: 10.0.0.20
register: output
-- name: Assert the post grepsql flexible firewall rule created well
+- name: Assert the postgrepsql flexible firewall rule created well
ansible.builtin.assert:
that:
- output.changed
-- name: Create the post gresql flexible firwall rule (Idempotent test)
+- name: Create the postgresql flexible firwall rule (Idempotent test)
azure_rm_postgresqlflexiblefirewallrule:
resource_group: "{{ new_resource_group }}"
server_name: postflexible{{ rpfx }}
@@ -246,12 +316,12 @@
end_ip_address: 10.0.0.20
register: output
-- name: Assert the post grepsql flexible firewall rule support idempotent test
+- name: Assert the postgresql flexible firewall rule support idempotent test
ansible.builtin.assert:
that:
- not output.changed
-- name: Update the post gresql flexible firwall rule
+- name: Update the postgresql flexible firwall rule
azure_rm_postgresqlflexiblefirewallrule:
resource_group: "{{ new_resource_group }}"
server_name: postflexible{{ rpfx }}
@@ -260,25 +330,25 @@
end_ip_address: 10.0.0.18
register: output
-- name: Assert the post grepsql flexible server update well
+- name: Assert the postgresql flexible server update well
ansible.builtin.assert:
that:
- output.changed
-- name: Get the post gresql flexible firwall rule facts
+- name: Get the postgresql flexible firwall rule facts
azure_rm_postgresqlflexiblefirewallrule_info:
resource_group: "{{ new_resource_group }}"
server_name: postflexible{{ rpfx }}
name: firewall{{ rpfx }}
register: output
-- name: Assert the post gresql flexible firewall rule facts
+- name: Assert the postgresql flexible firewall rule facts
ansible.builtin.assert:
that:
- output.firewall_rules[0].start_ip_address == '10.0.0.16'
- output.firewall_rules[0].end_ip_address == '10.0.0.18'
-- name: Delete the post gresql flexible firwall rule
+- name: Delete the postgresql flexible firwall rule
azure_rm_postgresqlflexiblefirewallrule:
resource_group: "{{ new_resource_group }}"
server_name: postflexible{{ rpfx }}
@@ -286,30 +356,30 @@
state: absent
register: output
-- name: Assert the post grepsql flexible server delete well
+- name: Assert the postgresql flexible server delete well
ansible.builtin.assert:
that:
- output.changed
-- name: List the post gresql flexible config facts
+- name: List the postgresql flexible config facts
azure_rm_postgresqlflexibleconfiguration_info:
resource_group: "{{ new_resource_group }}"
server_name: postflexible{{ rpfx }}
register: output
-- name: Assert the post gresql flexible server configuration
+- name: Assert the postgresql flexible server configuration
ansible.builtin.assert:
that:
- output.settings | length > 0
-- name: Stop the post gresql flexible server
+- name: Stop the postgresql flexible server
azure_rm_postgresqlflexibleserver:
resource_group: "{{ new_resource_group }}"
name: postflexible{{ rpfx }}
is_stop: true
register: output
-- name: Assert the post grep sql server stop success
+- name: Assert the postgresql server stop success
ansible.builtin.assert:
that:
- output.changed
@@ -319,30 +389,42 @@
minutes: 10
changed_when: true
-- name: Restart post gresql flexible server
+- name: Restart postgresql flexible server
azure_rm_postgresqlflexibleserver:
resource_group: "{{ new_resource_group }}"
name: postflexible{{ rpfx }}
is_restart: true
register: output
-- name: Assert the post grep sql server restart success
+- name: Assert the postgresql server restart success
ansible.builtin.assert:
that:
- output.changed
-- name: Delete post gresql flexible server
+- name: Delete postgresql flexible server
azure_rm_postgresqlflexibleserver:
resource_group: "{{ new_resource_group }}"
name: postflexible{{ rpfx }}
state: absent
register: output
-- name: Assert the post gresql server is well deleted
+- name: Assert the postgresql server is well deleted
ansible.builtin.assert:
that:
- output.changed
+- name: Destroy User Managed Identities
+ azure_rm_resource:
+ resource_group: "{{ new_resource_group }}"
+ provider: ManagedIdentity
+ resource_type: userAssignedIdentities
+ resource_name: "{{ item }}"
+ api_version: "2023-01-31"
+ state: absent
+ loop:
+ - "ansible-test-psql-identity"
+ - "ansible-test-psql-identity-2"
+
- name: Delete the new resource group
azure_rm_resourcegroup:
name: "{{ new_resource_group }}"
diff --git a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_privatelinkservice/tasks/main.yml b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_privatelinkservice/tasks/main.yml
index a3743b074..d42d65ecd 100644
--- a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_privatelinkservice/tasks/main.yml
+++ b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_privatelinkservice/tasks/main.yml
@@ -1,7 +1,6 @@
- name: Set Private Link Service Names
ansible.builtin.set_fact:
rpfx: "{{ resource_group | hash('md5') | truncate(20, True, '') }}"
- azure_subscription_id: f64d4ee8-be94-457d-ba26-3fa6b6506cef
- name: Delete the private link service
azure_rm_privatelinkservice:
diff --git a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_datalakestore/aliases b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_sqlmanageddatabase/aliases
index 6feba04aa..e133dafcd 100644
--- a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_datalakestore/aliases
+++ b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_sqlmanageddatabase/aliases
@@ -1,4 +1,4 @@
cloud/azure
-shippable/azure/group10
destructive
+shippable/azure/group9
disabled
diff --git a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_sqlmanageddatabase/meta/main.yml b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_sqlmanageddatabase/meta/main.yml
new file mode 100644
index 000000000..95e1952f9
--- /dev/null
+++ b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_sqlmanageddatabase/meta/main.yml
@@ -0,0 +1,2 @@
+dependencies:
+ - setup_azure
diff --git a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_sqlmanageddatabase/tasks/main.yml b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_sqlmanageddatabase/tasks/main.yml
new file mode 100644
index 000000000..ddbc1b881
--- /dev/null
+++ b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_sqlmanageddatabase/tasks/main.yml
@@ -0,0 +1,264 @@
+- name: Prepare random number
+ ansible.builtin.set_fact:
+ random_postfix: "sqlmi{{ 1000 | random }}{{ resource_group | hash('md5') | truncate(8, True, '') }}"
+
+- name: Create virtual network
+ azure_rm_virtualnetwork:
+ name: "{{ random_postfix }}"
+ address_prefixes_cidr:
+ - 10.1.0.0/16
+ - 172.100.0.0/16
+ - fdda:e69b:1587:495e::/64
+ dns_servers:
+ - 127.0.0.1
+ - 127.0.0.3
+ resource_group: "{{ resource_group }}"
+
+- name: Create a route table
+ azure_rm_routetable:
+ name: "{{ random_postfix }}"
+ resource_group: "{{ resource_group }}"
+ tags:
+ purpose: testing
+
+- name: Create security group
+ azure_rm_securitygroup:
+ resource_group: "{{ resource_group }}"
+ name: "{{ random_postfix }}"
+ tags:
+ testing: testing
+ delete: on-exit
+ foo: bar
+ purge_rules: true
+ rules:
+ - name: DenySSH
+ protocol: Tcp
+ destination_port_range: 22
+ access: Deny
+ priority: 100
+ direction: Inbound
+ - name: AllowSSH
+ protocol: Tcp
+ source_address_prefix: 174.109.158.0/24
+ destination_port_range: 22
+ access: Allow
+ priority: 101
+ direction: Inbound
+
+- name: Add the subnet back
+ azure_rm_subnet:
+ name: foobar
+ virtual_network_name: "{{ random_postfix }}"
+ resource_group: "{{ resource_group }}"
+ address_prefix_cidr: "10.1.0.0/24"
+ security_group:
+ resource_gorup: "{{ resource_group }}"
+ name: "{{ random_postfix }}"
+ route_table:
+ name: "{{ random_postfix }}"
+ resource_group: "{{ resource_group }}"
+ delegations:
+ - name: 'mysqlinstance'
+ serviceName: "Microsoft.Sql/managedInstances"
+ register: subnet_output
+
+- name: Create sql managed instance
+ azure_rm_sqlmanagedinstance:
+ resource_group: "{{ resource_group }}"
+ name: "{{ random_postfix }}"
+ subnet_id: "{{ subnet_output.state.id }}"
+ identity:
+ type: SystemAssigned
+ sku:
+ name: GP_Gen5
+ tier: GeneralPurpose
+ family: Gen5
+ capacity: 8
+ administrator_login: azureuser
+ administrator_login_password: Fredtest@password0329test
+ storage_size_in_gb: 256
+ v_cores: 8
+
+- name: Create a new sql managed instance database(checkmode test)
+ azure_rm_sqlmidatabase:
+ resource_group: "{{ resource_group }}"
+ managed_instance_name: "{{ random_postfix }}"
+ database_name: "database{{ random_postfix }}"
+ collation: SQL_Latin1_General_CP1_CI_AS
+ location: eastus
+ tags:
+ key1: value1
+ check_mode: true
+
+- name: Create a new sql managed instance database
+ azure_rm_sqlmidatabase:
+ resource_group: "{{ resource_group }}"
+ managed_instance_name: "{{ random_postfix }}"
+ database_name: "database{{ random_postfix }}"
+ collation: SQL_Latin1_General_CP1_CI_AS
+ location: eastus
+ tags:
+ key1: value1
+ register: output
+
+- name: Assert the datebase is well created
+ ansible.builtin.assert:
+ that:
+ - output.changed
+
+- name: Create a new sql managed instance database (Idempotent test)
+ azure_rm_sqlmidatabase:
+ resource_group: "{{ resource_group }}"
+ managed_instance_name: "{{ random_postfix }}"
+ database_name: "database{{ random_postfix }}"
+ collation: SQL_Latin1_General_CP1_CI_AS
+ location: eastus
+ tags:
+ key1: value1
+ register: output
+
+- name: Assert the datebase has no changed
+ ansible.builtin.assert:
+ that:
+ - not output.changed
+
+- name: Update the sql managed instance database tags
+ azure_rm_sqlmidatabase:
+ resource_group: "{{ resource_group }}"
+ managed_instance_name: "{{ random_postfix }}"
+ database_name: "database{{ random_postfix }}"
+ collation: SQL_Latin1_General_CP1_CI_AS
+ location: eastus
+ tags:
+ key2: value2
+ register: output
+
+- name: Assert the datebase udpated
+ ansible.builtin.assert:
+ that:
+ - output.changed
+
+- name: Get the sql managed instance database facts
+ azure_rm_sqlmidatabase_info:
+ resource_group: "{{ resource_group }}"
+ managed_instance_name: "{{ random_postfix }}"
+ database_name: "database{{ random_postfix }}"
+ register: output
+
+- name: Assert the datebase facts
+ ansible.builtin.assert:
+ that:
+ - output.database[0].tags |length == 2
+ - output.database[0].collation == "SQL_Latin1_General_CP1_CI_AS"
+
+- name: Get sql managed database long term retention policy by name
+ azure_rm_sqlmidblongtermretentionpolicy_info:
+ resource_group: "{{ resource_group }}"
+ managed_instance_name: "{{ random_postfix }}"
+ database_name: "database{{ random_postfix }}"
+ policy_name: default
+ register: output
+
+- name: Assert the sql managed datebase long term retention policy facts
+ ansible.builtin.assert:
+ that:
+ - output.long_term_retention_policy[0].name == 'default'
+ - output.long_term_retention_policy[0].monthly_retention == "PT0S"
+ - output.long_term_retention_policy[0].yearly_retention == "PT0S"
+ - output.long_term_retention_policy[0].weekly_retention == "PT0S"
+ - output.long_term_retention_policy[0].week_of_year == 0
+
+- name: Update the database long term retention policy
+ azure_rm_sqlmidblongtermretentionpolicy:
+ resource_group: "{{ resource_group }}"
+ managed_instance_name: "{{ random_postfix }}"
+ database_name: "database{{ random_postfix }}"
+ policy_name: default
+ monthly_retention: P3M
+ week_of_year: 13
+ weekly_retention: P13W
+ yearly_retention: P3Y
+ register: output
+
+- name: Assert the sql managed datebase long term retention policy updated
+ ansible.builtin.assert:
+ that:
+ - output.changed
+
+- name: Get sql managed database long term retention policy by name
+ azure_rm_sqlmidblongtermretentionpolicy_info:
+ resource_group: "{{ resource_group }}"
+ managed_instance_name: "{{ random_postfix }}"
+ database_name: "database{{ random_postfix }}"
+ policy_name: default
+ register: output
+
+- name: Assert the sql managed datebase long term retention policy facts
+ ansible.builtin.assert:
+ that:
+ - output.long_term_retention_policy[0].name == 'default'
+ - output.long_term_retention_policy[0].monthly_retention == "P3M"
+ - output.long_term_retention_policy[0].yearly_retention == "P3Y"
+ - output.long_term_retention_policy[0].weekly_retention == "P13W"
+ - output.long_term_retention_policy[0].week_of_year == 13
+
+- name: Get the sql managed instance database short term retention policy facts
+ azure_rm_sqlmidbshorttermretentionpolicy_info:
+ resource_group: "{{ resource_group }}"
+ managed_instance_name: "{{ random_postfix }}"
+ database_name: "database{{ random_postfix }}"
+ policy_name: default
+ register: output
+
+- name: Assert the sql managed datebase short term retention policy facts
+ ansible.builtin.assert:
+ that:
+ - output.short_term_retention_policy[0].name == 'default'
+ - output.short_term_retention_policy[0].retention_days == 7
+
+- name: Update the sql managed short term retetion
+ azure_rm_sqlmidbshorttermretentionpolicy:
+ resource_group: "{{ resource_group }}"
+ managed_instance_name: "{{ random_postfix }}"
+ database_name: "database{{ random_postfix }}"
+ policy_name: default
+ retention_days: 10
+ register: output
+
+- name: Assert the sql managed datebase short term retention policy updated
+ ansible.builtin.assert:
+ that:
+ - output.changed
+
+- name: Get the sql managed instance database short term retention policy facts
+ azure_rm_sqlmidbshorttermretentionpolicy_info:
+ resource_group: "{{ resource_group }}"
+ managed_instance_name: "{{ random_postfix }}"
+ database_name: "database{{ random_postfix }}"
+ policy_name: default
+ register: output
+
+- name: Assert the sql managed datebase short term retention policy facts
+ ansible.builtin.assert:
+ that:
+ - output.short_term_retention_policy[0].name == 'default'
+ - output.short_term_retention_policy[0].retention_days == 10
+
+- name: Delete the sql managed database
+ azure_rm_sqlmidatabase:
+ resource_group: "{{ resource_group }}"
+ managed_instance_name: "{{ random_postfix }}"
+ database_name: "database{{ random_postfix }}"
+ state: absent
+ register: output
+
+- name: Assert the sql managed datebase deleted
+ ansible.builtin.assert:
+ that:
+ - output.changed
+
+- name: Delete sql managed instance
+ azure_rm_sqlmanagedinstance:
+ resource_group: "{{ resource_group }}"
+ name: "{{ random_postfix }}"
+ state: absent
diff --git a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_storageaccountmanagementpolicy/aliases b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_storageaccountmanagementpolicy/aliases
new file mode 100644
index 000000000..aa77c071a
--- /dev/null
+++ b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_storageaccountmanagementpolicy/aliases
@@ -0,0 +1,3 @@
+cloud/azure
+shippable/azure/group2
+destructive
diff --git a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_storageaccountmanagementpolicy/meta/main.yml b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_storageaccountmanagementpolicy/meta/main.yml
new file mode 100644
index 000000000..95e1952f9
--- /dev/null
+++ b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_storageaccountmanagementpolicy/meta/main.yml
@@ -0,0 +1,2 @@
+dependencies:
+ - setup_azure
diff --git a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_storageaccountmanagementpolicy/tasks/main.yml b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_storageaccountmanagementpolicy/tasks/main.yml
new file mode 100644
index 000000000..96245a06a
--- /dev/null
+++ b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_storageaccountmanagementpolicy/tasks/main.yml
@@ -0,0 +1,242 @@
+- name: Set random value
+ ansible.builtin.set_fact:
+ rpfx: "{{ resource_group | hash('md5') | truncate(22, True, '') }}"
+
+- name: Create storage account with static website disabled
+ azure_rm_storageaccount:
+ resource_group: "{{ resource_group }}"
+ name: "st{{ rpfx }}"
+ account_type: Standard_LRS
+ kind: StorageV2
+ static_website:
+ enabled: false
+
+- name: Create storage account management policy rule (Check mode test)
+ azure_rm_storageaccountmanagementpolicy:
+ resource_group: "{{ resource_group }}"
+ storage_account_name: "st{{ rpfx }}"
+ rules:
+ - name: olcmtest
+ type: Lifecycle
+ enabled: false
+ definition:
+ actions:
+ base_blob:
+ enable_auto_tier_to_hot_from_cool: true
+ delete:
+ days_after_modification_greater_than: 22
+ days_after_last_access_time_greater_than: 22
+ tier_to_cool:
+ days_after_modification_greater_than: 22
+ days_after_last_access_time_greater_than: 22
+ tier_to_archive:
+ days_after_modification_greater_than: 22
+ days_after_last_access_time_greater_than: 22
+ snapshot:
+ tier_to_cool:
+ days_after_creation_greater_than: 22
+ tier_to_archive:
+ days_after_creation_greater_than: 22
+ delete:
+ days_after_creation_greater_than: 22
+ version:
+ tier_to_archive:
+ days_after_creation_greater_than: 22
+ tier_to_cool:
+ days_after_creation_greater_than: 22
+ delete:
+ days_after_creation_greater_than: 22
+ filters:
+ prefix_match:
+ - olcmtestcontainer
+ blob_types:
+ - blockBlob
+ - appendBlob
+ blob_index_match:
+ - name: tags6
+ op: '=='
+ value: value6
+ check_mode: true
+
+- name: Create storage account management policy rule
+ azure_rm_storageaccountmanagementpolicy:
+ resource_group: "{{ resource_group }}"
+ storage_account_name: "st{{ rpfx }}"
+ rules:
+ - name: olcmtest
+ type: Lifecycle
+ enabled: false
+ definition:
+ actions:
+ base_blob:
+ enable_auto_tier_to_hot_from_cool: true
+ delete:
+ days_after_modification_greater_than: 22
+ days_after_last_access_time_greater_than: 22
+ tier_to_cool:
+ days_after_modification_greater_than: 22
+ days_after_last_access_time_greater_than: 22
+ tier_to_archive:
+ days_after_modification_greater_than: 22
+ days_after_last_access_time_greater_than: 22
+ snapshot:
+ tier_to_cool:
+ days_after_creation_greater_than: 22
+ tier_to_archive:
+ days_after_creation_greater_than: 22
+ delete:
+ days_after_creation_greater_than: 22
+ version:
+ tier_to_archive:
+ days_after_creation_greater_than: 22
+ tier_to_cool:
+ days_after_creation_greater_than: 22
+ delete:
+ days_after_creation_greater_than: 22
+ filters:
+ prefix_match:
+ - olcmtestcontainer
+ blob_types:
+ - blockBlob
+ - appendBlob
+ blob_index_match:
+ - name: tags6
+ op: '=='
+ value: value6
+ register: output
+
+- name: Assert the Managed policy created
+ ansible.builtin.assert:
+ that:
+ - output.changed
+
+- name: Create storage account management policy rule(Idempotent test)
+ azure_rm_storageaccountmanagementpolicy:
+ resource_group: "{{ resource_group }}"
+ storage_account_name: "st{{ rpfx }}"
+ rules:
+ - name: olcmtest
+ type: Lifecycle
+ enabled: false
+ definition:
+ actions:
+ base_blob:
+ enable_auto_tier_to_hot_from_cool: true
+ delete:
+ days_after_modification_greater_than: 22
+ days_after_last_access_time_greater_than: 22
+ tier_to_cool:
+ days_after_modification_greater_than: 22
+ days_after_last_access_time_greater_than: 22
+ tier_to_archive:
+ days_after_modification_greater_than: 22
+ days_after_last_access_time_greater_than: 22
+ snapshot:
+ tier_to_cool:
+ days_after_creation_greater_than: 22
+ tier_to_archive:
+ days_after_creation_greater_than: 22
+ delete:
+ days_after_creation_greater_than: 22
+ version:
+ tier_to_archive:
+ days_after_creation_greater_than: 22
+ tier_to_cool:
+ days_after_creation_greater_than: 22
+ delete:
+ days_after_creation_greater_than: 22
+ filters:
+ prefix_match:
+ - olcmtestcontainer
+ blob_types:
+ - blockBlob
+ - appendBlob
+ blob_index_match:
+ - name: tags6
+ op: '=='
+ value: value6
+ register: output
+
+- name: Assert the Managed policy no change
+ ansible.builtin.assert:
+ that:
+ - not output.changed
+
+- name: Create storage account management policy rule(Updating test)
+ azure_rm_storageaccountmanagementpolicy:
+ resource_group: "{{ resource_group }}"
+ storage_account_name: "st{{ rpfx }}"
+ rules:
+ - name: olcmtest
+ type: Lifecycle
+ enabled: false
+ definition:
+ actions:
+ base_blob:
+ enable_auto_tier_to_hot_from_cool: true
+ delete:
+ days_after_modification_greater_than: 33
+ days_after_last_access_time_greater_than: 33
+ tier_to_cool:
+ days_after_modification_greater_than: 33
+ days_after_last_access_time_greater_than: 33
+ tier_to_archive:
+ days_after_modification_greater_than: 33
+ days_after_last_access_time_greater_than: 33
+ snapshot:
+ tier_to_cool:
+ days_after_creation_greater_than: 33
+ tier_to_archive:
+ days_after_creation_greater_than: 33
+ delete:
+ days_after_creation_greater_than: 33
+ version:
+ tier_to_archive:
+ days_after_creation_greater_than: 33
+ tier_to_cool:
+ days_after_creation_greater_than: 33
+ delete:
+ days_after_creation_greater_than: 33
+ filters:
+ prefix_match:
+ - olcmtestcontainer
+ blob_types:
+ - blockBlob
+ - appendBlob
+ blob_index_match:
+ - name: tags6
+ op: '=='
+ value: value6
+ register: output
+
+- name: Assert the Managed policy updated
+ ansible.builtin.assert:
+ that:
+ - output.changed
+
+- name: Get storage account management policy
+ azure_rm_storageaccountmanagementpolicy_info:
+ resource_group: "{{ resource_group }}"
+ storage_account_name: "st{{ rpfx }}"
+ register: output
+
+- name: Assert the management policy rule facts
+ ansible.builtin.assert:
+ that:
+ - output.state.policy.rules[0].enabled is false
+ - output.state.policy.rules[0].name == 'olcmtest'
+ - output.state.policy.rules[0].type == 'Lifecycle'
+ - output.state.policy.rules[0].definition.actions.base_blob.enable_auto_tier_to_hot_from_cool is true
+ - output.state.policy.rules | length == 1
+
+- name: Delete the storage account management policy's rule
+ azure_rm_storageaccountmanagementpolicy:
+ resource_group: "{{ resource_group }}"
+ storage_account_name: "st{{ rpfx }}"
+ state: absent
+ register: output
+
+- name: Assert the Managed policy deleted
+ ansible.builtin.assert:
+ that:
+ - output.changed
diff --git a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_storageblob/tasks/main.yml b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_storageblob/tasks/main.yml
index 98cd3dc38..71abbbbf1 100644
--- a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_storageblob/tasks/main.yml
+++ b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_storageblob/tasks/main.yml
@@ -8,6 +8,7 @@
resource_group: "{{ resource_group }}"
name: "{{ storage_account }}"
account_type: Standard_LRS
+ register: az_storageaccount
- name: Create container
azure_rm_storageblob:
@@ -16,6 +17,24 @@
account_name: "{{ storage_account }}"
container_name: my-blobs
+- name: Lookup service principal object id
+ ansible.builtin.set_fact:
+ object_id: "{{ lookup('azure.azcollection.azure_service_principal_attribute',
+ azure_client_id=azure_client_id,
+ azure_secret=azure_secret,
+ azure_tenant=azure_tenant) }}"
+ register: spn_facts
+
+- name: Create role assignment by scope (Storage Blob Data Contributor)
+ azure_rm_roleassignment:
+ scope: "{{ az_storageaccount.state.id }}/blobServices/default/containers/my-blobs"
+ assignee_object_id: "{{ spn_facts.ansible_facts.object_id }}"
+ role_definition_id: "/providers/Microsoft.Authorization/roleDefinitions/ba92f5b4-2d11-453d-a403-e96b0029c9fe"
+
+- name: Pause for 30 seconds
+ ansible.builtin.command: sleep 30
+ changed_when: true
+
- name: Force upload blob
azure_rm_storageblob:
auth_mode: login
diff --git a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_virtualmachine/inventory.yml b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_virtualmachine/inventory.yml
index 63ba77727..62525210f 100644
--- a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_virtualmachine/inventory.yml
+++ b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_virtualmachine/inventory.yml
@@ -13,6 +13,10 @@ all:
network: 10.42.2.0/24
subnet: 10.42.2.0/28
+ azure_test_skus:
+ network: 10.42.3.0/24
+ subnet: 10.42.3.0/28
+
azure_test_minimal:
network: 10.42.3.0/24
subnet: 10.42.3.0/28
diff --git a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_virtualmachine/tasks/azure_test_public_ip.yml b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_virtualmachine/tasks/azure_test_public_ip.yml
index f24be88e2..0ef24fd82 100644
--- a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_virtualmachine/tasks/azure_test_public_ip.yml
+++ b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_virtualmachine/tasks/azure_test_public_ip.yml
@@ -31,9 +31,12 @@
azure_rm_networkinterface:
resource_group: "{{ resource_group }}"
name: "{{ interface_name }}"
+ ip_configurations:
+ - name: default
+ primary: true
+ public_ip_address_name: "{{ public_ip_name }}"
virtual_network: "{{ network_name }}"
subnet: "{{ subnet_name }}"
- public_ip_name: "{{ public_ip_name }}"
security_group: "{{ security_group }}"
- name: Create virtual machine with a single NIC and no boot diagnostics
@@ -233,7 +236,7 @@
azure_rm_virtualmachine:
resource_group: "{{ resource_group }}"
name: "{{ vm_name }}"
- vm_size: Standard_A1
+ vm_size: Standard_A1_v2
storage_account: "{{ storage_account }}"
storage_container: "{{ vm_name }}"
storage_blob: "{{ vm_name }}.vhd"
@@ -256,7 +259,7 @@
ansible.builtin.assert:
that:
- resize_result is changed
- - resize_result.ansible_facts.azure_vm.hardware_profile.vm_size == "Standard_A1"
+ - resize_result.ansible_facts.azure_vm.hardware_profile.vm_size == "Standard_A1_v2"
- name: Delete VM
azure_rm_virtualmachine:
diff --git a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_virtualmachine/tasks/azure_test_skus.yml b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_virtualmachine/tasks/azure_test_skus.yml
new file mode 100644
index 000000000..f65c15869
--- /dev/null
+++ b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_virtualmachine/tasks/azure_test_skus.yml
@@ -0,0 +1,136 @@
+- name: Set variables
+ ansible.builtin.include_tasks: setup.yml
+
+- name: Gather Resource Group info
+ azure.azcollection.azure_rm_resourcegroup_info:
+ name: "{{ resource_group }}"
+ register: rg_info
+
+- name: List available VM SKUs
+ azure.azcollection.azure_rm_vmsku_info:
+ location: "{{ rg_info.resourcegroups.0.location }}"
+ resource_type: "virtualMachines"
+ size: "standard_B1"
+ zone: true
+ register: available_skus_result
+
+- name: Create desired capabilities list
+ ansible.builtin.set_fact:
+ desired_capabilities: [
+ {
+ "name": "MaxResourceVolumeMB",
+ "value": "4096"
+ },
+ {
+ "name": "MemoryGB",
+ "value": "2"
+ }]
+
+- name: Filter available SKUs with desired capabilities
+ ansible.builtin.set_fact:
+ skus_result: |
+ {% set skus_result = [] %}
+ {% for item in available_skus_result.available_skus -%}
+ {% set ns = namespace(use_sku=True) %}
+ {% for capability in item.capabilities -%}
+ {% for desired in desired_capabilities -%}
+ {% if capability.name == desired.name and capability.value != desired.value -%}
+ {% set ns.use_sku = False %}
+ {%- endif %}
+ {%- endfor %}
+ {%- endfor %}
+ {% if ns.use_sku -%}
+ {{ skus_result.append(item.name) }}
+ {%- endif %}
+ {%- endfor %}
+ {{ skus_result }}
+ failed_when: skus_result[0] is not defined
+
+- name: Create VM with first sku in avilable skus list
+ azure_rm_virtualmachine:
+ resource_group: "{{ resource_group }}"
+ name: "{{ vm_name }}"
+ admin_username: "testuser"
+ ssh_password_enabled: false
+ ssh_public_keys:
+ - path: /home/testuser/.ssh/authorized_keys
+ key_data: "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDfoYlIV4lTPZTv7hXaVwQQuqBgGs4yeNRX0SPo2+HQt9u4X7IGwrtXc0nEUm6LfaCikMH58bOL8f20NTGz285kxdFHZRcBXtqmnMz2rXwhK9gwq5h1khc+GzHtdcJXsGA4y0xuaNcidcg04jxAlN/06fwb/VYwwWTVbypNC0gpGEpWckCNm8vlDlA55sU5et0SZ+J0RKVvEaweUOeNbFZqckGPA384imfeYlADppK/7eAxqfBVadVvZG8IJk4yvATgaIENIFj2cXxqu2mQ/Bp5Wr45uApvJsFXmi+v/nkiOEV1QpLOnEwAZo6EfFS4CCQtsymxJCl1PxdJ5LD4ZOtP xiuxi.sun@qq.com"
+ vm_size: "{{ skus_result[0] }}"
+ virtual_network: "{{ network_name }}"
+ image:
+ offer: 0001-com-ubuntu-server-focal
+ publisher: Canonical
+ sku: 20_04-lts
+ version: latest
+ register: vm_output
+
+- name: Query auto created security group before deleting
+ azure_rm_securitygroup_info:
+ resource_group: "{{ resource_group }}"
+ name: "{{ vm_name }}01"
+ register: nsg_result
+
+- name: Assert that security group were exist before deleting
+ ansible.builtin.assert:
+ that:
+ - nsg_result.securitygroups | length == 1
+ - nsg_result.securitygroups[0].network_interfaces | length == 1
+
+- name: Delete VM
+ azure_rm_virtualmachine:
+ resource_group: "{{ resource_group }}"
+ name: "{{ vm_name }}"
+ remove_on_absent: all_autocreated
+ state: absent
+
+- name: Query auto created NIC
+ azure_rm_networkinterface_info:
+ resource_group: "{{ resource_group }}"
+ name: "{{ vm_name }}01"
+ register: nic_result
+
+- name: Query auto created security group
+ azure_rm_securitygroup_info:
+ resource_group: "{{ resource_group }}"
+ name: "{{ vm_name }}01"
+ register: nsg_result
+
+- name: Query auto created public IP
+ azure_rm_publicipaddress_info:
+ resource_group: "{{ resource_group }}"
+ name: "{{ vm_name }}01"
+ register: pip_result
+
+- name: Assert that autocreated resources were deleted
+ ansible.builtin.assert:
+ that:
+ # what about the default storage group?
+ - nic_result.networkinterfaces | length == 0
+ - nsg_result.securitygroups | length == 0
+ - pip_result.publicipaddresses | length == 0
+
+- name: Destroy subnet
+ azure_rm_subnet:
+ resource_group: "{{ resource_group }}"
+ virtual_network: "{{ network_name }}"
+ name: "{{ subnet_name }}"
+ state: absent
+
+- name: Destroy virtual network
+ azure_rm_virtualnetwork:
+ resource_group: "{{ resource_group }}"
+ name: "{{ network_name }}"
+ state: absent
+
+- name: Destroy availability set
+ azure_rm_availabilityset:
+ resource_group: "{{ resource_group }}"
+ name: "{{ availability_set }}"
+ state: absent
+
+- name: Destroy storage account
+ azure_rm_storageaccount:
+ resource_group: "{{ resource_group }}"
+ name: "{{ storage_account }}"
+ force_delete_nonempty: true
+ state: absent
diff --git a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_virtualmachineextension/tasks/main.yml b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_virtualmachineextension/tasks/main.yml
index e053cf0b8..b544b4a71 100644
--- a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_virtualmachineextension/tasks/main.yml
+++ b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_virtualmachineextension/tasks/main.yml
@@ -39,9 +39,12 @@
azure_rm_networkinterface:
resource_group: "{{ resource_group }}"
name: testNIC
+ ip_configurations:
+ - name: default
+ primary: true
+ public_ip_address_name: testPublicIP
virtual_network: testVnet1
subnet: testSubnet
- public_ip_name: testPublicIP
security_group_name: testNetworkSecurityGroup
- name: Create a storage account
diff --git a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_virtualmachinescaleset/tasks/main.yml b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_virtualmachinescaleset/tasks/main.yml
index c3c5336ae..dd429406c 100644
--- a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_virtualmachinescaleset/tasks/main.yml
+++ b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_virtualmachinescaleset/tasks/main.yml
@@ -27,7 +27,11 @@
azure_rm_loadbalancer:
resource_group: "{{ resource_group }}"
name: testLB
- public_ip_address_name: testPublicIP
+ frontend_ip_configurations:
+ - name: frontendipconf0
+ public_ip_address: testPublicIP
+ backend_address_pools:
+ - name: backendaddrp0
sku: Standard
- name: Create public IP address 1
@@ -41,7 +45,11 @@
azure_rm_loadbalancer:
resource_group: "{{ resource_group }}"
name: testLB1
- public_ip_address_name: testPublicIP1
+ frontend_ip_configurations:
+ - name: frontendipconf0
+ public_ip_address: testPublicIP1
+ backend_address_pools:
+ - name: backendaddrp1
sku: Standard
- name: Create network security group within same resource group of VMSS.
diff --git a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_virtualnetworkgatewaynatrule/aliases b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_virtualnetworkgatewaynatrule/aliases
new file mode 100644
index 000000000..aa77c071a
--- /dev/null
+++ b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_virtualnetworkgatewaynatrule/aliases
@@ -0,0 +1,3 @@
+cloud/azure
+shippable/azure/group2
+destructive
diff --git a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_virtualnetworkgatewaynatrule/meta/main.yml b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_virtualnetworkgatewaynatrule/meta/main.yml
new file mode 100644
index 000000000..95e1952f9
--- /dev/null
+++ b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_virtualnetworkgatewaynatrule/meta/main.yml
@@ -0,0 +1,2 @@
+dependencies:
+ - setup_azure
diff --git a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_virtualnetworkgatewaynatrule/tasks/main.yml b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_virtualnetworkgatewaynatrule/tasks/main.yml
new file mode 100644
index 000000000..9ca17e251
--- /dev/null
+++ b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_virtualnetworkgatewaynatrule/tasks/main.yml
@@ -0,0 +1,131 @@
+- name: Prepare random number
+ ansible.builtin.set_fact:
+ natrulename: "nat{{ resource_group | hash('md5') | truncate(7, True, '') }}{{ 1000 | random }}"
+ vnetname: "vnet{{ resource_group | hash('md5') | truncate(7, True, '') }}{{ 1000 | random }}"
+ vngname: "vng{{ resource_group | hash('md5') | truncate(7, True, '') }}{{ 1000 | random }}"
+ pubipname: "testPublicIP{{ resource_group | hash('md5') | truncate(7, True, '') }}{{ 1000 | random }}"
+
+- name: Create virtual network
+ azure_rm_virtualnetwork:
+ resource_group: "{{ resource_group }}"
+ name: "{{ vnetname }}"
+ address_prefixes: "10.0.0.0/16"
+
+- name: Add subnet
+ azure_rm_subnet:
+ resource_group: "{{ resource_group }}"
+ name: GatewaySubnet
+ address_prefix: "10.0.2.0/24"
+ virtual_network: "{{ vnetname }}"
+
+- name: Create public IP address
+ azure_rm_publicipaddress:
+ resource_group: "{{ resource_group }}"
+ allocation_method: Dynamic
+ name: "{{ pubipname }}"
+
+- name: Create a virtual network gateway
+ azure_rm_virtualnetworkgateway:
+ resource_group: "{{ resource_group }}"
+ name: "{{ vngname }}"
+ sku: VpnGw2
+ vpn_gateway_generation: Generation2
+ ip_configurations:
+ - name: testipconfig
+ private_ip_allocation_method: Dynamic
+ public_ip_address_name: "{{ pubipname }}"
+ virtual_network: "{{ vnetname }}"
+
+- name: Create a virtual netowrk nat rule(check_mode test)
+ azure_rm_virtualnetworkgatewaynatrule:
+ resource_group: "{{ resource_group }}"
+ virtual_network_gateway_name: "{{ vngname }}"
+ name: "{{ natrulename }}"
+ type_properties_type: Static
+ mode: EgressSnat
+ internal_mappings:
+ - 10.1.0.0/24
+ external_mappings:
+ - 192.168.1.0/24
+ check_mode: true
+
+- name: Create a virtual netowrk nat rule
+ azure_rm_virtualnetworkgatewaynatrule:
+ resource_group: "{{ resource_group }}"
+ virtual_network_gateway_name: "{{ vngname }}"
+ name: "{{ natrulename }}"
+ type_properties_type: Static
+ mode: EgressSnat
+ internal_mappings:
+ - 10.1.0.0/24
+ external_mappings:
+ - 192.168.1.0/24
+ register: output
+
+- name: Assert the virtual network nat rule is well created
+ ansible.builtin.assert:
+ that:
+ - output.changed
+
+- name: Create a virtual netowrk nat rule(Idempotent test)
+ azure_rm_virtualnetworkgatewaynatrule:
+ resource_group: "{{ resource_group }}"
+ virtual_network_gateway_name: "{{ vngname }}"
+ name: "{{ natrulename }}"
+ type_properties_type: Static
+ mode: EgressSnat
+ internal_mappings:
+ - 10.1.0.0/24
+ external_mappings:
+ - 192.168.1.0/24
+ register: output
+
+- name: Assert the virtual network nat rule no changed
+ ansible.builtin.assert:
+ that:
+ - not output.changed
+
+- name: Create a new virtual netowrk nat rule (Update test)
+ azure_rm_virtualnetworkgatewaynatrule:
+ resource_group: "{{ resource_group }}"
+ virtual_network_gateway_name: "{{ vngname }}"
+ name: "{{ natrulename }}"
+ type_properties_type: Static
+ mode: EgressSnat
+ internal_mappings:
+ - 10.3.0.0/24
+ external_mappings:
+ - 192.168.2.0/24
+ register: output
+
+- name: Assert the virtual network nat rule is well Updated
+ ansible.builtin.assert:
+ that:
+ - output.changed
+
+- name: Get the virtual netowrk nat rule facts
+ azure_rm_virtualnetworkgatewaynatrule_info:
+ resource_group: "{{ resource_group }}"
+ virtual_network_gateway_name: "{{ vngname }}"
+ name: "{{ natrulename }}"
+ register: output
+
+- name: Assert the virtual network nat rule facts
+ ansible.builtin.assert:
+ that:
+ - output.state[0].mode == 'EgressSnat'
+ - output.state[0].internal_mappings == ["10.3.0.0/24"]
+ - output.state[0].external_mappings == ["192.168.2.0/24"]
+
+- name: Delete the virtual netowrk nat rule
+ azure_rm_virtualnetworkgatewaynatrule:
+ resource_group: "{{ resource_group }}"
+ virtual_network_gateway_name: "{{ vngname }}"
+ name: "{{ natrulename }}"
+ state: absent
+ register: output
+
+- name: Assert the virtual network nat rule deleted
+ ansible.builtin.assert:
+ that:
+ - output.changed
diff --git a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_webapp/tasks/main.yml b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_webapp/tasks/main.yml
index 6b8128eb3..e766953fc 100644
--- a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_webapp/tasks/main.yml
+++ b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_webapp/tasks/main.yml
@@ -557,6 +557,74 @@
- facts.webapps[0].ftps_state == 'Disabled'
- not facts.webapps[0].http20_enabled
+- name: Create a windows web app with site_auth_settings
+ azure_rm_webapp:
+ resource_group: "{{ resource_group }}"
+ name: "{{ win_app_name }}-auth"
+ plan:
+ resource_group: "{{ resource_group }}"
+ name: "{{ win_plan_name }}-auth"
+ is_linux: false
+ sku: S1
+ site_auth_settings:
+ client_id: "{{ azure_client_id }}"
+ default_provider: 'MicrosoftAccount'
+ runtime_version: '-2'
+ token_refresh_extension_hours: 90
+ unauthenticated_client_action: 'RedirectToLoginPage'
+ client_secret: "{{ azure_secret }}"
+ token_store_enabled: true
+ is_auth_from_file: false
+ enabled: false
+ register: output
+
+- name: Assert the web app is well created
+ ansible.builtin.assert:
+ that:
+ output.changed
+
+- name: Update the web app with site_auth_settings
+ azure_rm_webapp:
+ resource_group: "{{ resource_group }}"
+ name: "{{ win_app_name }}-auth"
+ plan:
+ resource_group: "{{ resource_group }}"
+ name: "{{ win_plan_name }}-auth"
+ is_linux: false
+ sku: S1
+ site_auth_settings:
+ client_id: "{{ azure_client_id }}"
+ default_provider: 'MicrosoftAccount'
+ runtime_version: '-3'
+ token_refresh_extension_hours: 100
+ unauthenticated_client_action: 'RedirectToLoginPage'
+ client_secret: "{{ azure_secret }}"
+ token_store_enabled: false
+ is_auth_from_file: false
+ enabled: true
+ register: output
+
+- name: Assert the web app is well updated
+ ansible.builtin.assert:
+ that:
+ output.changed
+
+- name: Get the web app facts
+ azure_rm_webapp_info:
+ resource_group: "{{ resource_group }}"
+ name: "{{ win_app_name }}-auth"
+ register: output
+
+- name: Assert the web app facts
+ ansible.builtin.assert:
+ that:
+ - output.webapps[0].site_auth_settings.enabled is true
+ - output.webapps[0].site_auth_settings.is_auth_from_file == 'false'
+ - output.webapps[0].site_auth_settings.runtime_version == '-3'
+ - output.webapps[0].site_auth_settings.token_refresh_extension_hours == 100.0
+ - output.webapps[0].site_auth_settings.token_store_enabled is false
+ - output.webapps[0].site_auth_settings.unauthenticated_client_action == 'RedirectToLoginPage'
+
- name: Create a webapp slot (Check mode)
azure_rm_webappslot:
resource_group: "{{ resource_group }}"
diff --git a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_webappaccessrestriction/tasks/main.yml b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_webappaccessrestriction/tasks/main.yml
index 9850dd148..a9496c992 100644
--- a/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_webappaccessrestriction/tasks/main.yml
+++ b/ansible_collections/azure/azcollection/tests/integration/targets/azure_rm_webappaccessrestriction/tasks/main.yml
@@ -3,6 +3,20 @@
rpfx: "{{ resource_group | hash('md5') | truncate(7, True, '') }}{{ 1000 | random }}"
run_once: true
+- name: Create virtual network
+ azure_rm_virtualnetwork:
+ resource_group: "{{ resource_group }}"
+ name: "vnet-{{ rpfx }}"
+ address_prefixes: 10.42.0.0/24
+
+- name: Create subnet
+ azure_rm_subnet:
+ resource_group: "{{ resource_group }}"
+ name: "subnet-{{ rpfx }}"
+ address_prefix: 10.42.0.0/28
+ virtual_network: "vnet-{{ rpfx }}"
+ register: subnet_output
+
- name: Create a web app
azure_rm_webapp:
resource_group: "{{ resource_group }}"
@@ -26,9 +40,16 @@
action: "Allow"
ip_address: "2.2.2.2/24"
priority: 2
+ - name: "Datacenter 3"
+ action: Allow
+ priority: 3
+ description: "fred test 03"
+ tag: XffProxy
+ vnet_subnet_resource_id: "{{ subnet_output.state.id }}"
scm_ip_security_restrictions_use_main: true
register: output
check_mode: true
+
- name: Assert the resource is well created
ansible.builtin.assert:
that: output.changed
@@ -46,17 +67,26 @@
action: "Allow"
ip_address: "2.2.2.2/24"
priority: 2
+ - name: "Datacenter 3"
+ action: Allow
+ priority: 3
+ description: "fred test 03"
+ tag: XffProxy
+ vnet_subnet_resource_id: "{{ subnet_output.state.id }}"
scm_ip_security_restrictions_use_main: true
register: output
- name: Assert the resource is well created
ansible.builtin.assert:
that:
- output.changed
- - output.ip_security_restrictions | length == 2
+ - output.ip_security_restrictions | length == 3
- output.ip_security_restrictions[0].action == 'Allow'
- output.ip_security_restrictions[0].ip_address == '1.1.1.1/24'
- output.ip_security_restrictions[1].action == 'Allow'
- output.ip_security_restrictions[1].ip_address == '2.2.2.2/24'
+ - output.ip_security_restrictions[2].priority == 3
+ - output.ip_security_restrictions[2].tag == "XffProxy"
+ - output.ip_security_restrictions[2].vnet_subnet_resource_id == subnet_output.state.id
- output.scm_ip_security_restrictions_use_main == true
- name: "Check webapp access restriction facts 1"
@@ -68,11 +98,14 @@
ansible.builtin.assert:
that:
- not output.changed
- - output.ip_security_restrictions | length == 2
+ - output.ip_security_restrictions | length == 3
- output.ip_security_restrictions[0].action == 'Allow'
- output.ip_security_restrictions[0].ip_address == '1.1.1.1/24'
- output.ip_security_restrictions[1].action == 'Allow'
- output.ip_security_restrictions[1].ip_address == '2.2.2.2/24'
+ - output.ip_security_restrictions[2].priority == 3
+ - output.ip_security_restrictions[2].tag == "XffProxy"
+ - output.ip_security_restrictions[2].vnet_subnet_resource_id == subnet_output.state.id
- output.scm_ip_security_restrictions_use_main == true
- name: "Create webapp access restriction - idempotent"
@@ -88,8 +121,15 @@
action: "Allow"
ip_address: "2.2.2.2/24"
priority: 2
+ - name: "Datacenter 3"
+ action: Allow
+ priority: 3
+ description: "fred test 03"
+ tag: XffProxy
+ vnet_subnet_resource_id: "{{ subnet_output.state.id }}"
scm_ip_security_restrictions_use_main: true
register: output
+
- name: Assert the resource is not changed
ansible.builtin.assert:
that: not output.changed
diff --git a/ansible_collections/azure/azcollection/tests/integration/targets/inventory_azure/playbooks/setup.yml b/ansible_collections/azure/azcollection/tests/integration/targets/inventory_azure/playbooks/setup.yml
index f162e9134..6e932d54b 100644
--- a/ansible_collections/azure/azcollection/tests/integration/targets/inventory_azure/playbooks/setup.yml
+++ b/ansible_collections/azure/azcollection/tests/integration/targets/inventory_azure/playbooks/setup.yml
@@ -47,3 +47,24 @@
sku: 20_04-lts
version: latest
register: vm_output
+
+ - name: Create minimal VM 2 with defaults
+ azure_rm_virtualmachine:
+ resource_group: "{{ resource_group }}"
+ name: "{{ vm_name_2 }}"
+ admin_username: testuser
+ ssh_password_enabled: false
+ ssh_public_keys:
+ - path: /home/testuser/.ssh/authorized_keys
+ key_data: "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDfoYlIV4lTPZTv7hXaVwQQuqBgGs4yeNRX0SPo2+HQt9u4X7IGwrtXc0nEUm6LfaCikMH58bOL8f20NTGz285kxdFHZRcBXtqmnMz2rXwhK9gwq5h1khc+GzHtdcJXsGA4y0xuaNcidcg04jxAlN/06fwb/VYwwWTVbypNC0gpGEpWckCNm8vlDlA55sU5et0SZ+J0RKVvEaweUOeNbFZqckGPA384imfeYlADppK/7eAxqfBVadVvZG8IJk4yvATgaIENIFj2cXxqu2mQ/Bp5Wr45uApvJsFXmi+v/nkiOEV1QpLOnEwAZo6EfFS4CCQtsymxJCl1PxdJ5LD4ZOtP xiuxi.sun@qq.com"
+ vm_size: Standard_B1ms
+ virtual_network: "{{ network_name }}"
+ image:
+ offer: 0001-com-ubuntu-server-focal
+ publisher: Canonical
+ sku: 20_04-lts
+ version: latest
+ tags:
+ Deployment-Method: Ansible
+ Automation-Method: Ansible
+ register: vm_output_2
diff --git a/ansible_collections/azure/azcollection/tests/integration/targets/inventory_azure/playbooks/teardown.yml b/ansible_collections/azure/azcollection/tests/integration/targets/inventory_azure/playbooks/teardown.yml
index 2a131033a..51690d199 100644
--- a/ansible_collections/azure/azcollection/tests/integration/targets/inventory_azure/playbooks/teardown.yml
+++ b/ansible_collections/azure/azcollection/tests/integration/targets/inventory_azure/playbooks/teardown.yml
@@ -15,6 +15,13 @@
remove_on_absent: all_autocreated
state: absent
+ - name: Delete VM 2
+ azure_rm_virtualmachine:
+ resource_group: "{{ resource_group }}"
+ name: "{{ vm_name_2 }}"
+ remove_on_absent: all_autocreated
+ state: absent
+
- name: Destroy subnet
azure_rm_subnet:
resource_group: "{{ resource_group }}"
diff --git a/ansible_collections/azure/azcollection/tests/integration/targets/inventory_azure/playbooks/test_inventory_filter.yml b/ansible_collections/azure/azcollection/tests/integration/targets/inventory_azure/playbooks/test_inventory_filter.yml
new file mode 100644
index 000000000..b3e715506
--- /dev/null
+++ b/ansible_collections/azure/azcollection/tests/integration/targets/inventory_azure/playbooks/test_inventory_filter.yml
@@ -0,0 +1,21 @@
+---
+- name: Config hosts
+ hosts: localhost
+ connection: local
+ gather_facts: false
+ tasks:
+ - name: Set facts
+ ansible.builtin.include_vars: vars.yml
+
+ - name: Refresh inventory
+ ansible.builtin.meta: refresh_inventory
+
+ - name: Test vm_name_2 in Inventory
+ ansible.builtin.assert:
+ that:
+ - vm_name_2 in hostvars
+
+ - name: Test vm_name not in Inventory
+ ansible.builtin.assert:
+ that:
+ - vm_name not in hostvars
diff --git a/ansible_collections/azure/azcollection/tests/integration/targets/inventory_azure/playbooks/vars.yml b/ansible_collections/azure/azcollection/tests/integration/targets/inventory_azure/playbooks/vars.yml
index dc6bbe080..87fa5608f 100644
--- a/ansible_collections/azure/azcollection/tests/integration/targets/inventory_azure/playbooks/vars.yml
+++ b/ansible_collections/azure/azcollection/tests/integration/targets/inventory_azure/playbooks/vars.yml
@@ -5,6 +5,7 @@ uid_short: "{{ (resource_group ~ inventory_hostname) | hash('md5') | truncate(10
storage_account: "{{ 'stor' ~ uid }}"
availability_set: "{{ 'avbs' ~ uid_short }}"
vm_name: "{{ 'vm' ~ uid_short }}"
+vm_name_2: "{{ 'vm2' ~ uid_short }}"
network_name: "{{ 'vnet' ~ uid_short }}"
subnet_name: "{{ 'snet' ~ uid_short }}"
security_group: "{{ 'sg' ~ uid_short }}"
diff --git a/ansible_collections/azure/azcollection/tests/integration/targets/inventory_azure/runme.sh b/ansible_collections/azure/azcollection/tests/integration/targets/inventory_azure/runme.sh
index 6f381c0a5..30c86d9ca 100755
--- a/ansible_collections/azure/azcollection/tests/integration/targets/inventory_azure/runme.sh
+++ b/ansible_collections/azure/azcollection/tests/integration/targets/inventory_azure/runme.sh
@@ -19,6 +19,11 @@ ansible-playbook playbooks/empty_inventory_config.yml "$@"
ansible-playbook playbooks/create_inventory_config.yml "$@" --extra-vars "template=basic2.yml"
ansible-playbook playbooks/test_inventory.yml "$@"
+# using host filters
+ansible-playbook playbooks/empty_inventory_config.yml "$@"
+ansible-playbook playbooks/create_inventory_config.yml "$@" --extra-vars "template=filter.yml"
+ansible-playbook playbooks/test_inventory_filter.yml "$@"
+
# teardown
ansible-playbook playbooks/teardown.yml "$@"
diff --git a/ansible_collections/azure/azcollection/tests/integration/targets/inventory_azure/templates/filter.yml b/ansible_collections/azure/azcollection/tests/integration/targets/inventory_azure/templates/filter.yml
new file mode 100644
index 000000000..a928de0da
--- /dev/null
+++ b/ansible_collections/azure/azcollection/tests/integration/targets/inventory_azure/templates/filter.yml
@@ -0,0 +1,14 @@
+---
+plugin: azure.azcollection.azure_rm
+conditional_groups:
+ azure: true
+exclude_host_filters:
+ - location not in ['eastus', 'northcentralus']
+ - powerstate != 'running'
+ - not (tags['Deployment-Method'] | default('Exclude') == 'Ansible' and tags['Automation-Method'] | default('Exclude') == 'Ansible')
+ # Customer tried to use the following filter but dashes in variable names is not allowed.
+ # Workaround was to use the dictionary access method above with defaults.
+ #- not (tags.Deployment-Method == 'Ansible' and tags.Automation-Method == 'Ansible')
+# fail_on_template_errors should be enabled for debugging and possibly all times.
+fail_on_template_errors: True
+plain_host_names: true
diff --git a/ansible_collections/azure/azcollection/tests/utils/ado/ado.sh b/ansible_collections/azure/azcollection/tests/utils/ado/ado.sh
index 4bae8b5d4..2258a81f2 100755
--- a/ansible_collections/azure/azcollection/tests/utils/ado/ado.sh
+++ b/ansible_collections/azure/azcollection/tests/utils/ado/ado.sh
@@ -64,8 +64,8 @@ cp -aT "${SHIPPABLE_BUILD_DIR}" "${TEST_DIR}"
cd "${TEST_DIR}"
mkdir -p shippable/testresults
-pip install -I -r "${TEST_DIR}/requirements-azure.txt"
-pip install -I -r "${TEST_DIR}/sanity-requirements-azure.txt"
+pip install -I -r "${TEST_DIR}/requirements.txt"
+pip install -I -r "${TEST_DIR}/sanity-requirements.txt"
pip install ansible-lint
diff --git a/ansible_collections/cisco/dnac/.github/workflows/docs.yml b/ansible_collections/cisco/dnac/.github/workflows/docs.yml
index 46ab0dbc2..daf83e452 100644
--- a/ansible_collections/cisco/dnac/.github/workflows/docs.yml
+++ b/ansible_collections/cisco/dnac/.github/workflows/docs.yml
@@ -6,21 +6,21 @@ on:
- v[0-9]+.[0-9]+.[0-9]+
branches:
- main
- workflow_dispatch:
+
jobs:
docs:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v4
+ - uses: actions/checkout@v2
- name: configure git
run: |
git config user.name "${GITHUB_ACTOR}"
git config user.email "${GITHUB_ACTOR}@bots.github.com"
git fetch --no-tags --prune --depth=1 origin +refs/heads/*:refs/remotes/origin/*
- name: Set up Python
- uses: actions/setup-python@v5
+ uses: actions/setup-python@v2
with:
- python-version: "3.11"
+ python-version: "3.7"
- name: Install dependencies
run: make doc-setup
- name: Build docs
@@ -36,4 +36,4 @@ jobs:
git add $(basename ${GITHUB_REF}) index.html
git commit -m "update docs for $(basename ${GITHUB_REF})" || true
- name: push docs
- run: git push origin gh-pages \ No newline at end of file
+ run: git push origin gh-pages
diff --git a/ansible_collections/cisco/dnac/FILES.json b/ansible_collections/cisco/dnac/FILES.json
index 7286b9cd5..3141cbfe3 100644
--- a/ansible_collections/cisco/dnac/FILES.json
+++ b/ansible_collections/cisco/dnac/FILES.json
@@ -18,7 +18,7 @@
"name": "requirements.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6f4e9b0ecf5083105385ead22aeb6999d6a0bd98e7068c2bb91c646ba93a009e",
+ "chksum_sha256": "0056c3104117a810b70a80d3f97345dd2335ac1dbfc2fcd21a54963f01e6a28b",
"format": 1
},
{
@@ -39,7 +39,7 @@
"name": "plugins/doc_fragments/module_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d944d7fda9464eedfee62561448b79a05ccd5fb67d7aef28e64568c829db191a",
+ "chksum_sha256": "f7e15caa0f88093ecd71533efe25b4ead34e3852725c289e9bebba5a85e8de6f",
"format": 1
},
{
@@ -60,7 +60,7 @@
"name": "plugins/doc_fragments/module.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d36fd0e1b6bf11ebc07206a060f39df3ce4f4715334464a4a7b10b8878ebe115",
+ "chksum_sha256": "b3c51a933d2237cace5de4cbf7ef13451b02d7a84fc6ead095cdb6d0c89d136e",
"format": 1
},
{
@@ -92,10 +92,24 @@
"format": 1
},
{
+ "name": "plugins/action/maps_import_start.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "54309099d804bb3459641c7c3af3c0a58a9fcba09a0068f53dee95c6a67e7596",
+ "format": 1
+ },
+ {
+ "name": "plugins/action/lan_automation_v2.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "4df1e0ef3405d7740223e872792c51c340c25d3d205560e9d56ed97218ccce11",
+ "format": 1
+ },
+ {
"name": "plugins/action/tag_member.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "59607962abe1c2545059d5f932fc3a4a3c4ccd1de9b564da7343a85949ded82c",
+ "chksum_sha256": "7a22bb81dc118bea688e0bb15f9531f6005817b2b0009e6457720f8987dfbe5a",
"format": 1
},
{
@@ -158,7 +172,7 @@
"name": "plugins/action/buildings_planned_access_points_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "38c1e2d879a6abe346236147f60f263657c8dfae60f7e6e3a55b01df772c09bd",
+ "chksum_sha256": "248d24b35d5ee5ba32ff2cfa15444c24922dc4d0c870fd2c53d4f1804e03ec0d",
"format": 1
},
{
@@ -186,7 +200,7 @@
"name": "plugins/action/snmpv2_read_community_credential.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "04e5b74ac64d19bf858a84c4d5befda887a299c3ed56a413281531ebd746e0a7",
+ "chksum_sha256": "8a64434ed5c94403939ebe7b9a75bcc4880fad782f306b9eefcf1e52edcc89cc",
"format": 1
},
{
@@ -204,6 +218,13 @@
"format": 1
},
{
+ "name": "plugins/action/flexible_report_schedule_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "8d9734b85e1c844201e9fed3890d515fcd7397bea4fea6d91fb9a50936bd273f",
+ "format": 1
+ },
+ {
"name": "plugins/action/network_device_config_info.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -214,7 +235,21 @@
"name": "plugins/action/tag_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2392e3239b03eaccb0ec2277b8702d3f0a1d9f5cd3f476d45af59a941c89881d",
+ "chksum_sha256": "05c24b33377244bc36e13693e2610f4e172d035df660e5cfdeabbcf5d394ec94",
+ "format": 1
+ },
+ {
+ "name": "plugins/action/integration_settings_itsm_instances_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "4e4f5c9614e4fd82249e25ffeab1630831a3eb4144a49ddea8c92f94bbde19ec",
+ "format": 1
+ },
+ {
+ "name": "plugins/action/sda_fabric_devices_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "66993af792fee4309c4a382566501b16fe984d6590eaeedefc84dae0fe3c465b",
"format": 1
},
{
@@ -256,14 +291,14 @@
"name": "plugins/action/wireless_sensor_test_results_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a904e42c9f800997b73c8c8934aa94193bf8ed7f0eb6d97845ef4a7fa7a2c37b",
+ "chksum_sha256": "1b3cf73be67dd8ae5e5fb1749b9d0f1b174c9e51c1a5a72a19ceeb550630c9b1",
"format": 1
},
{
"name": "plugins/action/wireless_dynamic_interface.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7a4ee0fc872f5994b73cde2e13591dc98670496ce815720f2cd14f907a4b08ec",
+ "chksum_sha256": "5a6637b70da9130292694681f503709d662c18f61abed5915359b3d83e17bd6b",
"format": 1
},
{
@@ -284,14 +319,21 @@
"name": "plugins/action/tag_member_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "de1e6a70d75ba1b1208f81fc1401897f16945bb702e0abeccc79d3b090b35267",
+ "chksum_sha256": "d39476e080f7c2ad7e5676c1628145fa903e857f46311f8fff8d1ea2126496b4",
+ "format": 1
+ },
+ {
+ "name": "plugins/action/sda_fabric_zones_count_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "4bba4465bdac1ac7f79757055ec5ff0c3bd40ed0c8e4a9c856eff625ee231317",
"format": 1
},
{
"name": "plugins/action/lan_automation_status_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "29a1e27b787d5d63acbe46e9b0a3ff14564ec95aff544481774f84410fbdfe11",
+ "chksum_sha256": "52d4980132245c4298db9dc8db4009bcb34c34704534008db0cfb786849630ac",
"format": 1
},
{
@@ -302,10 +344,17 @@
"format": 1
},
{
+ "name": "plugins/action/network_device_ip_address_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "de46dbf0d5176bf1f0d9e573ab829d9d3729450e5ef31b726ef401485f4c093f",
+ "format": 1
+ },
+ {
"name": "plugins/action/event_syslog_config_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a2c0cd9c7f5160da33e3d3bf1bd79584ec01c9c1d4bccd9b15672fbd88288786",
+ "chksum_sha256": "04252979da8175ec966edc48a7b331dc05a839df34fe93dddbc4279c36373734",
"format": 1
},
{
@@ -319,7 +368,7 @@
"name": "plugins/action/user.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e4ad8c0110b4a2ce7ac3b9a3f87789e17e787641e760bd29f6d6c511a64b7b34",
+ "chksum_sha256": "7dcef7d717a38fea5e1c0ce715df7bae62a641305000ae570f85dd29966b9815",
"format": 1
},
{
@@ -361,7 +410,7 @@
"name": "plugins/action/pnp_device_authorize.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c4af732ced83cb878c6c45c866c59e34f958411447552cd9df3495f2b735684c",
+ "chksum_sha256": "df9eef750592edcb8ad9b40c1b7459fec0aaa915d96912143eeaa373f1dfc8a1",
"format": 1
},
{
@@ -403,7 +452,7 @@
"name": "plugins/action/topology_network_health_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "248750c93ab26cf710c212594b616128bba496a2f7ccc8a9f489cbf9472c7fdd",
+ "chksum_sha256": "7c0e8fb04e53ce05031222530e6bc043ebbed393b454cdd2592d0a16138b014f",
"format": 1
},
{
@@ -445,14 +494,21 @@
"name": "plugins/action/sensor.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bf4f5c9cbbad79b07e49256fe082e5d35f1b21096e9d0a0f63dcb64af315c364",
+ "chksum_sha256": "90d22e6b90dc5e7bf91c9e075204b30c72639a9b517722c9aea3a04f3c4570a2",
"format": 1
},
{
"name": "plugins/action/integration_settings_instances_itsm.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8da509fa62ce2ebe58a0e46b27f6da0c71342ce317b0649e09ebdbec15d7f033",
+ "chksum_sha256": "a29c1fd09ddb9648ea3d3d77c9f3c95b066622f833d59f8697f362450259599e",
+ "format": 1
+ },
+ {
+ "name": "plugins/action/applications_v2_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "fdb7f3c419aca4cd5bc553d4f93695ce7aa6d45a0be1264bb106dc67dded4a2d",
"format": 1
},
{
@@ -463,6 +519,13 @@
"format": 1
},
{
+ "name": "plugins/action/network_device_config_task_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e265c2f429472df1c3f4a9eb0036a25f4008face61436c2c4d21cb23659c0e7c",
+ "format": 1
+ },
+ {
"name": "plugins/action/global_pool.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -484,6 +547,13 @@
"format": 1
},
{
+ "name": "plugins/action/sda_anycast_gateways.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "975bbcf92dcdfb056d038c3e6af14e725c88ba786ef8ffcc99080df46068a4ae",
+ "format": 1
+ },
+ {
"name": "plugins/action/sda_fabric_authentication_profile_info.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -494,7 +564,7 @@
"name": "plugins/action/client_detail_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fa28affa3883fe6a27022030e57804e9e99d057bdac4e13fe10d86a58d6dc5b5",
+ "chksum_sha256": "aa7433a3c12edcf81be8dccc313820c9b1fb97d0bddd83138a845db05ba3e204",
"format": 1
},
{
@@ -540,6 +610,13 @@
"format": 1
},
{
+ "name": "plugins/action/applications_v2.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "cc66471d8b16a51159290f0a9a8e5b83bba2b3a057389695d30d1d69b5722b17",
+ "format": 1
+ },
+ {
"name": "plugins/action/execute_suggested_actions_commands.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -568,6 +645,20 @@
"format": 1
},
{
+ "name": "plugins/action/users_external_servers_aaa_attribute.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "75fb4f9db4517e86e91ebf67ddfc93565b6f2a77c3df827cc3791cc3e86887ec",
+ "format": 1
+ },
+ {
+ "name": "plugins/action/authentication_policy_servers.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "20d5d767c5dae08f19a52283191860ec00cea72f7c7b9c697225fdccc1b36a63",
+ "format": 1
+ },
+ {
"name": "plugins/action/wireless_rf_profile.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -585,7 +676,7 @@
"name": "plugins/action/event_subscription_syslog_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "10439c8dd6fa1616c3e7278f89971e3732582fd5e8b7da85e0702bf473d89b7d",
+ "chksum_sha256": "64ebe4a1a1c6e43fbab007fe7989cc87a464f78ac51b6b53a0a615e23893fba1",
"format": 1
},
{
@@ -620,14 +711,21 @@
"name": "plugins/action/site_health_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2f094d2d29329904b57ae0d9a7a5166f26296b87cb598319f3128f51e17e5954",
+ "chksum_sha256": "6d6b8ab0404749a7bd094c6a16457e449e19bb3f9e31a2135b58639e2d8a9b8b",
+ "format": 1
+ },
+ {
+ "name": "plugins/action/sda_extranet_policies_count_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "8b7cae2471f9d1b839c5a5de9174398c275358f136a66990230d0657ffbd0184",
"format": 1
},
{
"name": "plugins/action/event_series_count_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6fd81a9f25f5c4a9c00244b74c3544b516a1c20779902e7ecee70ee6b82d5410",
+ "chksum_sha256": "18dc5d8ab0afa96aa19061412580f8824e1cf460661da3572f2eac4242bcdb08",
"format": 1
},
{
@@ -652,10 +750,24 @@
"format": 1
},
{
+ "name": "plugins/action/sda_port_assignments.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "bb1c8924bdf2721e42fbbb904406a2e738b3985931e40d644a0f62fcf41f3bda",
+ "format": 1
+ },
+ {
+ "name": "plugins/action/event_snmp_config.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d63d0c88e5b0fe2636d76f1868cc31db4fba25d02fdfa64d9b98390e9e1907b6",
+ "format": 1
+ },
+ {
"name": "plugins/action/system_performance_historical_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "dd6801599f5621a66538b6034cb8430a204e1aa43299bf2bca24e70e0c6a715d",
+ "chksum_sha256": "ddc6e4df2eee6fb8c356c35c89faf26e17717181512e6380a9f002ff6ca5622c",
"format": 1
},
{
@@ -669,7 +781,7 @@
"name": "plugins/action/eox_status_device_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9e9ff33e5d0ea84ff053262f1484ad78ce0d6e3238cde30461266c956cc0d661",
+ "chksum_sha256": "edc100edb3177476b0d4265da29e2c1cd33c1109f0a1d9a3f5126bd4c9f0a979",
"format": 1
},
{
@@ -704,7 +816,7 @@
"name": "plugins/action/event_subscription_details_rest_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8d201cb30c52241483abdd8f42cc208e15032def6b9c2f8a018f5ac17877955c",
+ "chksum_sha256": "e1c5dcb98fe0a444c35b0ea2ae7052c636ecfb6c7bfe28ad7c014b071ebe6c5c",
"format": 1
},
{
@@ -715,6 +827,13 @@
"format": 1
},
{
+ "name": "plugins/action/sda_provision_devices.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "cf30339c74fb5c6e9f4587d48daf02c6681a5c775a6e6b6170ee3d6d60eb2dfe",
+ "format": 1
+ },
+ {
"name": "plugins/action/site_design_floormap_info.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -732,14 +851,14 @@
"name": "plugins/action/global_pool_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c1b4d6b1605c7d72a997fc3d89e98b047583db2c83ca69a1a4bafa95f054c469",
+ "chksum_sha256": "cbbc9c14b7b136375de99fa26c7f718080268ddbbb0fc80e56eb6c42771bd3e0",
"format": 1
},
{
"name": "plugins/action/pnp_device_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4a2c5d8a78ecbed92a5931a6b4f7a8e2ebc212a35add3fe323614314c727b190",
+ "chksum_sha256": "167726e8a9bd0e9518ad17b686a0bf3b9bfe6a9c3938aafc4514908461146487",
"format": 1
},
{
@@ -757,6 +876,20 @@
"format": 1
},
{
+ "name": "plugins/action/maps_import_perform.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "5f4b3ca9a6363edbd820b4eaedab7f8597949482c85c9b60a808755f80b41f61",
+ "format": 1
+ },
+ {
+ "name": "plugins/action/sda_fabric_devices_layer2_handoffs.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "627bfa2b5f088888aab497fb26d5ba876cdfb1fd506a7e8488a7c61774373898",
+ "format": 1
+ },
+ {
"name": "plugins/action/wireless_dynamic_interface_info.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -774,7 +907,7 @@
"name": "plugins/action/wireless_accespoint_configuration.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4453a97d54c4b93b1b3157a5af9f7d55e2a862c27362d8043022221e08e41d73",
+ "chksum_sha256": "f300f23f00d94dd9a3a04953fd6fea46c7efae6cf78914c0e645cbeb3371eb10",
"format": 1
},
{
@@ -785,6 +918,13 @@
"format": 1
},
{
+ "name": "plugins/action/roles.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "dde02248d4c02dd2384b859a29c1d7a9b978941aadb97ce15f5f41db7a5e8066",
+ "format": 1
+ },
+ {
"name": "plugins/action/pnp_workflow_info.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -872,7 +1012,14 @@
"name": "plugins/action/pnp_workflow.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "463a007fa834f97a436e6b645cf47f8ba4008528219dfb832c097dcabf51d83c",
+ "chksum_sha256": "8912a594a2760b4727f8f68690f981427683a1a265d73b2be6252895dc3ecc35",
+ "format": 1
+ },
+ {
+ "name": "plugins/action/dna_event_snmp_config_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "94938ad767551e1af771cdb35b30075910ceff4e8bbf9625b2a2705db5f5489b",
"format": 1
},
{
@@ -883,6 +1030,20 @@
"format": 1
},
{
+ "name": "plugins/action/sda_provision_devices_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "17db234ab2f814e5bf1b04437ac3a3032d7787d2183a06dd7d69d01e5bb90063",
+ "format": 1
+ },
+ {
+ "name": "plugins/action/site_count_v2_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "99c3799243bb19a9e7fbcdb5e4f88aa0900801551d028815d866689a34eb8082",
+ "format": 1
+ },
+ {
"name": "plugins/action/topology_layer_3_info.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -900,7 +1061,28 @@
"name": "plugins/action/snmpv2_write_community_credential.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2c391e722933e237cd7567955e16a1285e7d0e77b3b6daf30f9f586cec053675",
+ "chksum_sha256": "6d4a6e1f1855ae52dd2b47447d5ba85388457b45cac85b0f4459f1268758565f",
+ "format": 1
+ },
+ {
+ "name": "plugins/action/sda_fabric_zones.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "9f4a808ef07543426e91b70fc080fce232159a98c9180500c1e46243fa3ad522",
+ "format": 1
+ },
+ {
+ "name": "plugins/action/auth_token_create.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "7672b5a09fa2ea55dc41987d9fb94f574e37c4d38b2ff8103148670586bceb6f",
+ "format": 1
+ },
+ {
+ "name": "plugins/action/flexible_report_executions_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "0f88d1fda36e659979b6e6ede34448bb4f6b79279d161c3630d5c8d1d99a1021",
"format": 1
},
{
@@ -921,14 +1103,21 @@
"name": "plugins/action/applications_health_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "71615f34ac265b0e9f50e3272025ac534c6118c3787dcd06f3f2c0f643994396",
+ "chksum_sha256": "db5aba07b73429f4849173e1ad41aef94e2bbd98f49a78fb67618003cb796e18",
"format": 1
},
{
"name": "plugins/action/sensor_test_template_edit.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bdd4bfd7d57a7f56c6594bfb1bc5c406d0d8762fb4ad252bf05242b25a167a1d",
+ "chksum_sha256": "fd0b09e90f0398837e4a472dc297fcd6b35d45659c4c949e37725909d6618224",
+ "format": 1
+ },
+ {
+ "name": "plugins/action/sda_fabric_devices_layer2_handoffs_sda_transits.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "17ccc69fabb97d340643029c93d2cd73027c1d080f5c264060003143c9630511",
"format": 1
},
{
@@ -970,7 +1159,14 @@
"name": "plugins/action/eox_status_summary_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0b1d1cf72b516b5a6377f80a121c87e188722e02c80afaab4bc659935a427e7a",
+ "chksum_sha256": "b2f8e12eb8bada2a5d43d0cf207a277247a4fecf0c783302abcaa3deffc3eb5a",
+ "format": 1
+ },
+ {
+ "name": "plugins/action/network_device_user_defined_field_update.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "7331619759c108f4892949e818ba94b6e26b116f0cd89b475472add4a5f51ec1",
"format": 1
},
{
@@ -981,10 +1177,24 @@
"format": 1
},
{
+ "name": "plugins/action/sda_fabric_devices_layer2_handoffs_count_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "128719b16dafce1a95916cdcb6ad000a7ee1ad2b7fcb650bc5c9b1e5d38d108b",
+ "format": 1
+ },
+ {
+ "name": "plugins/action/network_device_insight_device_link_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "24451a6d94a7726c6a2760528d906fd9cf953eeb6a5c8df08e8db8dbce72e89e",
+ "format": 1
+ },
+ {
"name": "plugins/action/applications.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6543f4175307162813ec97875ac7a144f6348f8e31c593b426c5843f5207a105",
+ "chksum_sha256": "dc1555114c26ae844ee634476c21af4803414d53bb6ed630c2add8eb19ada986",
"format": 1
},
{
@@ -995,10 +1205,17 @@
"format": 1
},
{
+ "name": "plugins/action/sda_fabric_sites_count_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "15faa579d885c655a1b935da69bee84b954ef71878f72c5c9107fb7740bf9454",
+ "format": 1
+ },
+ {
"name": "plugins/action/network_device_user_defined_field.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "658ab360c8f31498c52c8891a67e22072ee0ac52a1ea0736b81b7d8d8cb5a91c",
+ "chksum_sha256": "b5d5d1644d5ad1b8a47057aaaca8bedd40efae86e69914f892a10edc92c79651",
"format": 1
},
{
@@ -1012,7 +1229,14 @@
"name": "plugins/action/reserve_ip_subpool.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7ba29285b43ce2c85ef7d2b6efc21494a7fa332cf39d69373eda40cca7a99b07",
+ "chksum_sha256": "42f49148d6e5468feeb39e72d2e3880dea25f3a5ebc24067379d6623740fe195",
+ "format": 1
+ },
+ {
+ "name": "plugins/action/network_device_user_defined_field_delete.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "9bbd4992cee82d7dad98d30cd215903c6865c95f04d208f74c2f0d1079226d3f",
"format": 1
},
{
@@ -1040,7 +1264,14 @@
"name": "plugins/action/tag_membership.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5baeadf354e21ecb0710f33f1739dd74db8af6663597425e0715eeabc548db56",
+ "chksum_sha256": "6e6ba9e7e3e3b8be97720a2d6070eac65bd81d647a689d84fcaface75bb9f603",
+ "format": 1
+ },
+ {
+ "name": "plugins/action/ise_integration_status_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "9d125d2f6dabad78b117b775ed26dc1c07b70d875d5bfeabed781504b3fd307a",
"format": 1
},
{
@@ -1072,10 +1303,24 @@
"format": 1
},
{
+ "name": "plugins/action/users_external_authentication.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "49e55be8cee1e8bf3f44e7f6234339538956d37cd709ee23d17c8fdada8968f9",
+ "format": 1
+ },
+ {
"name": "plugins/action/pnp_server_profile_update.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "71fa543e8ec9ce8aad48d65bbee0d979c007642fc2df7a19eba38d79e96a3aed",
+ "chksum_sha256": "9ee4fac3785dfa1fecec11f91bb39cc1b4b269c8334caa91357f5ac4794bb1df",
+ "format": 1
+ },
+ {
+ "name": "plugins/action/planned_access_points.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "471d263847d0608fd321f52e2996f620f073076c715b1c769843f8326cc51b6f",
"format": 1
},
{
@@ -1093,17 +1338,38 @@
"format": 1
},
{
+ "name": "plugins/action/sda_fabric_devices_layer2_handoffs_sda_transits_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "810d819549f8094091c4b60000e2fd99d0b821f9e138e98ae59d6aff51ebaeb8",
+ "format": 1
+ },
+ {
+ "name": "plugins/action/lan_automation_update_device.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "5f0f00aa58a71de17e93223ef7245a4bea2d98e69bd30b02c1a6f529e0a2c05d",
+ "format": 1
+ },
+ {
+ "name": "plugins/action/lan_automation_sessions_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "7b893ff1702c06b82fb8a82dad1b5e350e75b887c1d08bc96caf666182feb18b",
+ "format": 1
+ },
+ {
"name": "plugins/action/pnp_device_claim_to_site.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8825b8f2fc53aa0593a593862f62c60f139ef02c5ae29ae7bd179049e7b7b3fc",
+ "chksum_sha256": "85556476314b9eeebaa3ff5586e364bf5de5a030000be4ef3a5e4bf2e98bb2f8",
"format": 1
},
{
"name": "plugins/action/applications_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0c856f54df96da2fbef0a5ff7ffb4f4a4a56cb4671007c297bc40bd566c9a25e",
+ "chksum_sha256": "e4a91e25fbf5d4616d266adb7919a3876bbfef3070fae72c9f50e2ea06d5a066",
"format": 1
},
{
@@ -1114,6 +1380,13 @@
"format": 1
},
{
+ "name": "plugins/action/sda_authentication_profiles_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "88004e0d71b33682574587fe2fb9c4569f464ce182d7704e22777b38ff9a404c",
+ "format": 1
+ },
+ {
"name": "plugins/action/pnp_device_config_preview.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -1124,7 +1397,7 @@
"name": "plugins/action/path_trace.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "90cb774e86b721c1e9171178dbb245377c536d876ffa6baf1dc18f7034fea8ad",
+ "chksum_sha256": "239b482789c6d7229506881e930541e7495b3ab5c9f0a581f86e42e89221ef3c",
"format": 1
},
{
@@ -1138,7 +1411,7 @@
"name": "plugins/action/system_health_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4101703b248890baf3e71f2968ad965b7b350aa548aad35a745e4d42401b4a18",
+ "chksum_sha256": "a69690ae3bb51acd43bb40427d86c3424e6595dbc44051145c9ddf5c2359efec",
"format": 1
},
{
@@ -1159,14 +1432,14 @@
"name": "plugins/action/client_proximity_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "11a7dac1a69c44d5a0b56cce55ffc31ced8979707b0298d08ceba9e7e8b79e60",
+ "chksum_sha256": "005524318b138481b4eb0b2dfc90280749fcf8755886acf08474150071a0b230",
"format": 1
},
{
"name": "plugins/action/tag_member_count_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a230a1ba7a6b4a938c5231b7a13d71d6d8e835ba8a550550b49485c5f3e1794a",
+ "chksum_sha256": "749c6fea46890a13566a1a3da7cc93c38d97c38f05d67184053fa1bb63c65746",
"format": 1
},
{
@@ -1208,14 +1481,21 @@
"name": "plugins/action/planned_access_points_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b2fda31186f7a55adc70dda8f7604e72835fe682d181a71bd3e6e6aabf450eb7",
+ "chksum_sha256": "168118e178f1355b7ed1725222750827979f3385499e85eac6b5b7b5ea5b05cd",
"format": 1
},
{
"name": "plugins/action/event_subscription.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "74ec91367e90c402d8af52429dd94d6956f707611684da181fbaba4b3f1c889a",
+ "chksum_sha256": "183443142f6d8df66a977e1db84b344ae9eeff01fe398ae2da2607c6fd0e326b",
+ "format": 1
+ },
+ {
+ "name": "plugins/action/sda_fabric_devices_count_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "c7121f6b22d044add86d069d22b8a2d1c3c6c4d11cef680f4493daff3926dca2",
"format": 1
},
{
@@ -1233,10 +1513,17 @@
"format": 1
},
{
+ "name": "plugins/action/applications_count_v2_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "aaaf5f0d557a32d50089c7ff011b5c28a2bfa49621ec264c3e8384f403b8ed45",
+ "format": 1
+ },
+ {
"name": "plugins/action/wireless_profile.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4086cf3f615b844715afaa7efcb9280dafc15652e8f0fb6cc9867979f95f78ae",
+ "chksum_sha256": "6c01eae452f3f8f6d9b02ee810a42ffd40b475e57062e1327deca0a4a1318214",
"format": 1
},
{
@@ -1282,10 +1569,17 @@
"format": 1
},
{
+ "name": "plugins/action/sda_fabric_sites.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "8aa753af9dbcf442007c6bf95b2c5f18abbe3f3969121b538811a280a45b35c2",
+ "format": 1
+ },
+ {
"name": "plugins/action/pnp_device.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9a687957ad40a293835888e71527df0e975d14ae9643538eb6bb9a2e76f9ff94",
+ "chksum_sha256": "5d4125fd879026ebaadbf622455ef6d70b82464d9411e04d88b930a737e65ac5",
"format": 1
},
{
@@ -1338,10 +1632,17 @@
"format": 1
},
{
+ "name": "plugins/action/sda_fabric_devices_layer2_handoffs_ip_transits.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "9d84e5196e3e211b922db3e727fcdde6c204d2ad3f38ac8fab74d1bee0bbf5fc",
+ "format": 1
+ },
+ {
"name": "plugins/action/http_read_credential.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ea4512b9a8e6dfe8fed6788ec7c94c5c89ac2dca5d9fb291e88697c32dbd4bd8",
+ "chksum_sha256": "aca30deb8e5335cfd932fed68b30ef559ec2a66a3e9d8876cf86d533ef52856b",
"format": 1
},
{
@@ -1355,7 +1656,7 @@
"name": "plugins/action/swim_import_local.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cd8697519e27bd68e4ea9c3e932ec126715606e5db5708019482f43926e0c56a",
+ "chksum_sha256": "8dd1c5d02ff6dc70dff6724e4fa62f657faf4654660e9e8f487eefe7ed725976",
"format": 1
},
{
@@ -1397,14 +1698,14 @@
"name": "plugins/action/event_subscription_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ee183b977f3f13bf1a8af1dffafefaee95cceeebd40b4f0e36037c1f6cd9a26e",
+ "chksum_sha256": "a172c6876b7aebcb2dcde7702ffe72c340adb6d93247d1216fc9e76065e9e640",
"format": 1
},
{
"name": "plugins/action/sda_fabric_authentication_profile.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2559d831b7dd7a6edad3eaae6d37d59af3760cfab9589d0f11efcf4fd5285856",
+ "chksum_sha256": "1b3554854ecd87a7a88cf426aaa784eca8ceab319c8754ac00465b8f45edf588",
"format": 1
},
{
@@ -1429,10 +1730,31 @@
"format": 1
},
{
+ "name": "plugins/action/users_external_authentication_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "39f0a4287ba253cb3557f38962ca6b3c71a8683ef12ed7ca8206fe3fa4239475",
+ "format": 1
+ },
+ {
"name": "plugins/action/event_syslog_config.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "af47ad471d16527a552eb464e4e285a1dc3903d12bcdb53ac9f7b341683a0291",
+ "chksum_sha256": "996f8578c784909f25376af322d89ca54aa6df2295694d0cfe9b48d7d5af71fb",
+ "format": 1
+ },
+ {
+ "name": "plugins/action/integration_settings_status_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "ef4fcd6aef3f3943379427741e3aed8375631646ba3e480d98ce5663de166d07",
+ "format": 1
+ },
+ {
+ "name": "plugins/action/site_v2_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "5a689e6a84425b28f2d673639111dac9572f09112952be615a8cdab88cb04e1d",
"format": 1
},
{
@@ -1443,6 +1765,13 @@
"format": 1
},
{
+ "name": "plugins/action/sda_fabric_zones_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "f07cdd227d11906068e8cae3aa8d67e2796bf1afdd7202a88809b3e886110458",
+ "format": 1
+ },
+ {
"name": "plugins/action/interface_network_device_info.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -1453,7 +1782,7 @@
"name": "plugins/action/reserve_ip_subpool_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b715b6d7e5b72166c3f4548642454ba6067996a9fe5dc3f7b7645642585241cd",
+ "chksum_sha256": "78295da1cbf969cc874d580115e8599a66f7c964a7c78868344797ffb108fe64",
"format": 1
},
{
@@ -1481,21 +1810,21 @@
"name": "plugins/action/cli_credential.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "56d1d6cb86590b4d45b0e1b9fbc573065d1061b1a06e1da9bdad9096bfbb1df7",
+ "chksum_sha256": "76be1e7004db40898fde05224984a13b1401d0a1f987713c0aabeb57b6a814b0",
"format": 1
},
{
"name": "plugins/action/event_webhook_create.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "24de6e919f6d5f8a1c677d9d3db0048ee1420db9c4fa091aa7908e76194a6871",
+ "chksum_sha256": "ba24342e4950ef95d98df06574c38fd547bed4c691460a256f6625fc18b755b4",
"format": 1
},
{
"name": "plugins/action/license_device_license_summary_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6f885c0f39c6c9dc60a6166de19191182fa7fd6fe936c14a1740681c9a93729a",
+ "chksum_sha256": "c3291da9027f9b1afe7b8f45da8111be47aaf6454bc284bd684f432ad494d856",
"format": 1
},
{
@@ -1544,14 +1873,14 @@
"name": "plugins/action/pnp_device_count_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "dde49434e1a52a27e6e47a73d42d438a7bbb1de50ab1aaa27f05133b611d88a4",
+ "chksum_sha256": "14a15704d9107affcbfb729c2ca27d46aed26c8579c0f32f95610fc2b69700a2",
"format": 1
},
{
"name": "plugins/action/event_series_audit_logs_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8320aa210b599d5e3bec0d58f940034af84ddf75db24f86a06e253741edf894d",
+ "chksum_sha256": "184d1a94fbeadb32da80b2dade77aae8069e0ae30ea58e2f53bbd7e9534dcda7",
"format": 1
},
{
@@ -1586,7 +1915,7 @@
"name": "plugins/action/tag_count_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cb4e51af0d85a43ffad2384537df585b7e737d0d7dcd7b112de0877dec6e0671",
+ "chksum_sha256": "806d28f88f2e51c362a2685ab3ecdaac7ea3da034842e8323000dab24c53730c",
"format": 1
},
{
@@ -1604,10 +1933,31 @@
"format": 1
},
{
+ "name": "plugins/action/lan_automation_update.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "bb2aa9acd57a690a7d53b82cd462d836c82d984015dfcc6fa48f608ba2e7db97",
+ "format": 1
+ },
+ {
+ "name": "plugins/action/application_policy_application_set_count_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "3488bd4678710cbe722ce6a1ddb8d7216d7ea4b2370b93393b405926de340944",
+ "format": 1
+ },
+ {
+ "name": "plugins/action/sda_fabric_devices.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "bd66e14da9bad72b6da4a2da612de7a8e076b8d481dac36adfcb32e2f0a42b2b",
+ "format": 1
+ },
+ {
"name": "plugins/action/network_device_equipment_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f9b7b922148b75ca971390fe1978ad4c411b21f6afd2572dc67b3ce13218f578",
+ "chksum_sha256": "b2467e4ca06288e00274362406a4acf7ead96b112d1eb49e04dc3af7e1e55738",
"format": 1
},
{
@@ -1677,7 +2027,7 @@
"name": "plugins/action/event_artifact_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e842c6a103ac73287e9a971b7277f5a8c5eb4082688d9aa4850bacbff2306bdd",
+ "chksum_sha256": "90d740da7c5762a17cc809514143a7da4811566566457b7c991ee50fb44f1331",
"format": 1
},
{
@@ -1698,7 +2048,14 @@
"name": "plugins/action/wireless_psk_override.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0dcaa6b9471bba0cd0bb3a1334ab65dc41aff6b96627f5c2569b58ee967e8c45",
+ "chksum_sha256": "0802521a897bc581a08271b44fe122f07199b44c478c8ffd311afbb1722613bd",
+ "format": 1
+ },
+ {
+ "name": "plugins/action/sda_fabric_devices_layer2_handoffs_sda_transits_count_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d24147f8a24e99c77048c2dfeda86cf8341b57d3eb10e713e9966b06cc3e0308",
"format": 1
},
{
@@ -1712,7 +2069,7 @@
"name": "plugins/action/event_subscription_rest_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "77abd5371f55d2761ecfdca768f810e23edf05a31808ff4e2d1f782ac1698ee7",
+ "chksum_sha256": "fd27369df552b5ed650fd2a22b88e1d514ed9597c08cc95c4a44f78685aa2544",
"format": 1
},
{
@@ -1723,6 +2080,13 @@
"format": 1
},
{
+ "name": "plugins/action/flexible_report_execute.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "b7866d7180f49a0e3cdf7aa2f2430e71916325064a6f0bea863a9f05c495f960",
+ "format": 1
+ },
+ {
"name": "plugins/action/lan_automation_create.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -1733,7 +2097,7 @@
"name": "plugins/action/http_write_credential.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c91fe4cdf627f8dba64a72176fd4a76d8628db887fa5208a3ea5db9179c0fb10",
+ "chksum_sha256": "877aea365a574924a1d120ac3ab63fdb0795d35e7edce33b431b3f45be7e6a6c",
"format": 1
},
{
@@ -1782,14 +2146,14 @@
"name": "plugins/action/event_subscription_details_email_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "efdec21d7da566abd71ea9d01824ec4fdb6cf32e955e365204cef0f7a5e8503e",
+ "chksum_sha256": "7dc0bbe68655cc7f2a4ea9be2277cfe0afd28e5285e1ad1f4a09616ae2975356",
"format": 1
},
{
"name": "plugins/action/lan_automation_log_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "12bd4814eabce317225300612d0581b3cdc4b6a1f4f9bb1acbd0891d5a189bed",
+ "chksum_sha256": "0aa4d15a314738c9ae684938c84653dcfe6b3e31516d855eacf4e66d391fe87f",
"format": 1
},
{
@@ -1824,7 +2188,7 @@
"name": "plugins/action/wireless_enterprise_ssid.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0816373a1a03a76cbccf6c1d7a0e0b6e6530e61850edf5a0dd7343b096fe15d2",
+ "chksum_sha256": "8efea76f65508f2c1f1c5068ad2554936efa7278501a67cf63279cfe9ea448e0",
"format": 1
},
{
@@ -1835,6 +2199,13 @@
"format": 1
},
{
+ "name": "plugins/action/sda_fabric_devices_layer2_handoffs_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "4628acd14dfe59ffdaf909c5d375690047b0feaad0b84c490d0328ac3533eedd",
+ "format": 1
+ },
+ {
"name": "plugins/action/threat_summary.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -1866,7 +2237,7 @@
"name": "plugins/action/configuration_template_project.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bcaead03b19afd4d38131060cb38169bf0d5867e269733a4d922ae04b824cb29",
+ "chksum_sha256": "adc174e5fb49c6395a04cd438c55922773a8b4b7a2c1287ddcae41317fd471af",
"format": 1
},
{
@@ -1880,7 +2251,7 @@
"name": "plugins/action/network_v2.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "45a828448a214ec990b58ed3b83bc9f454feb766930359cd76f85c5f3c9fa2e6",
+ "chksum_sha256": "f281cc713e19bb0ed38af2ee03f6afa30269323cd596a7b954fefd34afa56330",
"format": 1
},
{
@@ -1891,17 +2262,24 @@
"format": 1
},
{
+ "name": "plugins/action/sda_extranet_policies.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "2e103fed6da99c8875555ff95da9d4a7a08891e9b030e3c496c8658b50e67ac6",
+ "format": 1
+ },
+ {
"name": "plugins/action/event_subscription_email_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ef01b120c8cebaee14e4cf67e46e8023e9a6ebed7cb2921d24f14b79abb0ee61",
+ "chksum_sha256": "bdc8e5f2a679ed5d0a25e609b18a3183ef474b89cab806987bc10bfb7c668b42",
"format": 1
},
{
"name": "plugins/action/event_webhook_update.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "17d8c59c616c36113f771979592d5ee0a3341a1be22dcff36ae097df88b9e765",
+ "chksum_sha256": "630e94c2145dc46258c1ace0beda5298001497bc364d9fbab139ce687333e494",
"format": 1
},
{
@@ -1926,10 +2304,17 @@
"format": 1
},
{
+ "name": "plugins/action/device_reboot_apreboot.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d96aeeeb9ff1917d1e5eed9208a1d9f59340a96e734b273eb0090809eca5d3ca",
+ "format": 1
+ },
+ {
"name": "plugins/action/reports.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "524f07a391c3579de6243907ba008ea53b66de0b3a9bfe12645326be5095a6a4",
+ "chksum_sha256": "5fc37092b020f9b8493c8589bae7d3b7ce538b75b66c2a1f26e01048a12550ae",
"format": 1
},
{
@@ -1943,7 +2328,7 @@
"name": "plugins/action/event_subscription_details_syslog_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cee3d0370cda496a3e94c0f4412e322c7746d8c233dc873375bd3d3d1e60ed75",
+ "chksum_sha256": "c4af63fc3daf5bc0e03995fa2dc24f67999386867b2ea04c6b87c39056829c14",
"format": 1
},
{
@@ -1957,7 +2342,7 @@
"name": "plugins/action/path_trace_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "554226ea2b636a0306c5183a4fb284df784f07b79c4193a150c58701f77c383b",
+ "chksum_sha256": "57c21fdc2e587ad63346ca3aa2a2542e0cce4f57b7e4a4576b94e35a8eff47c1",
"format": 1
},
{
@@ -1971,7 +2356,7 @@
"name": "plugins/action/site_membership_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b806daa0fce4f2dd8517796f0da7eba35fcbe3f0d449699b26ffb923efbea0dd",
+ "chksum_sha256": "280409abd25b69b94ad6bd498e9a67d5e688e43efa39ffadaa2c39165b99b7ee",
"format": 1
},
{
@@ -1999,7 +2384,7 @@
"name": "plugins/action/user_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5381b94a7d702556e79ef19efd80addf139ea41fcb38394389a14197b763c085",
+ "chksum_sha256": "e9c21359963499264d5e7f7782672e4d800b9b36b19c8dace1a3056dd1c9eb44",
"format": 1
},
{
@@ -2020,14 +2405,21 @@
"name": "plugins/action/event_series_audit_logs_summary_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "46ce3fd077a4c0cb4f254bcfa1e713ce1900cf3965fb100c73058e7e22d51c4b",
+ "chksum_sha256": "e6518ff4c66ca26239d201fc3ec3a9939ad3e2af9fab2067c793c7da3c5c4dea",
"format": 1
},
{
"name": "plugins/action/sda_fabric_border_device.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6e0ef157ac3b9ab621ab3e0501126903b798910f1227d5c2f46e6aaefee049ac",
+ "chksum_sha256": "d5d2e86fb850cfb389ac6d5e76ded2a33f340c4778eb35b39a7757f5a1089b49",
+ "format": 1
+ },
+ {
+ "name": "plugins/action/maps_import.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "4f8b40af7d536f096e3d9ebc6f37d0d74a3d1829270e522ca7df4da43d987923",
"format": 1
},
{
@@ -2041,7 +2433,7 @@
"name": "plugins/action/application_sets.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8f9473d1f81cde7ed3da76b2eb57b1b56e79cdea940b30121823bb86a6c9f963",
+ "chksum_sha256": "c5629f7794f39327d5c0e71e730e9d6e4eaad31a2d31d351dd7ae9ec6831a01b",
"format": 1
},
{
@@ -2052,17 +2444,31 @@
"format": 1
},
{
+ "name": "plugins/action/sda_extranet_policies_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "8ab4342b450b6fe63684b0a005b6e200a0942448e8f245b8733548dc1e2aba70",
+ "format": 1
+ },
+ {
"name": "plugins/action/configuration_template_deploy.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "183830484f8d54d8f60591639aaa12c5da6e45809773c5000f1ae0b6b0e33a18",
+ "chksum_sha256": "c0822e5b9b5aa549439c04c7b51190cc269882a25771c725ef837da1a2215b8d",
"format": 1
},
{
"name": "plugins/action/application_sets_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f78bd9b437edaadb9e59bba0989baa2db39a5765da444e1f9945a33335857020",
+ "chksum_sha256": "dcf888a9dcb9c9203ee2ae772379f1d851d2e9de2a1c2624dd0eaba0302f22aa",
+ "format": 1
+ },
+ {
+ "name": "plugins/action/maps_export.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "5b04022db8e64bb87a2b02a6fc543fd49deb273506d65f82e614ad7f4ed51618",
"format": 1
},
{
@@ -2076,14 +2482,14 @@
"name": "plugins/action/compliance_device_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f19ad1c56edfa95068f61ee1db5d5068152ae117783767d78ea7d2e2e290d7a4",
+ "chksum_sha256": "82fafe3c0365b9db6eb9ff075369d3df8c219d8c7335d75a9042b1672483ad4e",
"format": 1
},
{
"name": "plugins/action/device_configurations_export.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2559650703e54a103a0d758b20b6abf73effd4de9c41ce3b75bc6ffd7a0e7201",
+ "chksum_sha256": "10abf034afd3565803c6bd22f871987a51d2f874455ca6cd3dec3fcf7c4af850",
"format": 1
},
{
@@ -2101,6 +2507,13 @@
"format": 1
},
{
+ "name": "plugins/action/sda_port_assignments_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "114ce4726aaf0f9023962636e70435d07322a4ac3f806de6552192d91d455a41",
+ "format": 1
+ },
+ {
"name": "plugins/action/task_tree_info.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -2150,6 +2563,13 @@
"format": 1
},
{
+ "name": "plugins/action/wireless_accesspoint_configuration_create.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "14d1df914bc60da02e0a2b68ccca0f2b6a884b0cd56325046679ae2d5ddd2681",
+ "format": 1
+ },
+ {
"name": "plugins/action/nfv_profile_info.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -2167,14 +2587,14 @@
"name": "plugins/action/discovery_summary_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8b54ae5d126151be73665f0174fe59ef8bf2084add786d8d548b139422555087",
+ "chksum_sha256": "e3a5de9f5d2595e8262da1ed0cb826bc5656b4af70150a83647e65a8e9647200",
"format": 1
},
{
"name": "plugins/action/reserve_ip_subpool_update.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e9a3bbd7c7ff26d4d018bf8dea5e9f8780bc5634bd17739bdbb8b49f89bee17e",
+ "chksum_sha256": "419cda917712d233ec0810f195c0fea088ab2de306a2a36b62aa6aa36e46fe96",
"format": 1
},
{
@@ -2188,7 +2608,7 @@
"name": "plugins/action/issues_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fc6486505eee136bf1c8cc6a371316bebdc5afb197f3b743bebc96d7b41a6d18",
+ "chksum_sha256": "2f51f2e6e22e784e4a5297fcffb716610e901a83072bb043775e7b34651e5d89",
"format": 1
},
{
@@ -2202,7 +2622,7 @@
"name": "plugins/action/system_performance_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5926d765044f104dd3c54ab3a428cc67c734e79f4220a61dfe1bbadd9f2c9ad1",
+ "chksum_sha256": "aca9978d76721e3cc801663352af0953b57bbd027d60c9cd6674b33b0fd38d4c",
"format": 1
},
{
@@ -2227,6 +2647,13 @@
"format": 1
},
{
+ "name": "plugins/action/network_device_management_address_update.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "54c2f2a19ebc792db3ab3e17b8995acbb099ba5e948072e9ad663f69f58b27fd",
+ "format": 1
+ },
+ {
"name": "plugins/action/platform_nodes_configuration_summary_info.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -2332,10 +2759,24 @@
"format": 1
},
{
+ "name": "plugins/action/flexible_report_schedules_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "a45514e245fe2aa3b07dbd39b35396b2bdc2616f8a739ad1bca74842d809c261",
+ "format": 1
+ },
+ {
"name": "plugins/action/sda_virtual_network_ip_pool.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1cde647d4403165ece8341c043617c3f402c0bbb76bac149094b7d8bec71c56f",
+ "chksum_sha256": "3b26ca5b07d92ecab4777afd092a190178a89476c07cfb0a3030ac73f39ee2df",
+ "format": 1
+ },
+ {
+ "name": "plugins/action/maps_supported_access_points_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "f5296ec5d21a9acb046185863fba648dd2d6b32c9c584e85ba7b00b101dc49a5",
"format": 1
},
{
@@ -2349,14 +2790,14 @@
"name": "plugins/action/snmpv3_credential.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "631335eb762e3905203e5ad892b569e356374bd3444b121277f7c7aa4cf38ef7",
+ "chksum_sha256": "a066b234e198d4772b9c71b8c713c4eed01d1a977d6b7a11064d275eb46d4510",
"format": 1
},
{
"name": "plugins/action/configuration_template_deploy_v2.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4b06d448f4d109eb9401a2fc0af98a61298d1b967baa11238d8d5e41876a08fd",
+ "chksum_sha256": "27be488eeac1d42d452f034f881f3675da06764a1d7388532de40edb96818ea2",
"format": 1
},
{
@@ -2367,6 +2808,13 @@
"format": 1
},
{
+ "name": "plugins/action/sda_fabric_sites_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "6b66f251ab61f913ae408588d70c60309b0cb34fde9e5b6f3b54a5e287f5e274",
+ "format": 1
+ },
+ {
"name": "plugins/action/sda_virtual_network.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -2374,6 +2822,13 @@
"format": 1
},
{
+ "name": "plugins/action/sda_site_member_member_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "8d6756ba0582df9a68719a59b69d9c0a7e67ff8756e23cbb3614d44cf8a52a4c",
+ "format": 1
+ },
+ {
"name": "plugins/action/interface_network_device_detail_info.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -2381,10 +2836,17 @@
"format": 1
},
{
+ "name": "plugins/action/application_policy_application_set_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "a95ec215dc938e50a9911d8309cd40795a84c628c66114f6ebdc71350b79921f",
+ "format": 1
+ },
+ {
"name": "plugins/action/security_advisories_ids_per_device_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b582766fd339f124044896e846019de0074fb73c4483722a2bed7e2f5cb6a845",
+ "chksum_sha256": "c1832384a3f9768d30885258b76dce5af01db4042a7f99bc216d1c197548026d",
"format": 1
},
{
@@ -2398,14 +2860,28 @@
"name": "plugins/action/network_device_count_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4323cd920063dc4e7a91a4a9477acb1899119ff08b34720ea103a302615775d1",
+ "chksum_sha256": "4c3e6cda8e36db094225c094c6c9a402076523441aa4f66a1c8d2d435b31dbdf",
+ "format": 1
+ },
+ {
+ "name": "plugins/action/sda_fabric_devices_layer2_handoffs_ip_transits_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "94eebe1421d1e901699003ae79390cdb7edfb3513e177bd80e1abbc6124b5aa2",
+ "format": 1
+ },
+ {
+ "name": "plugins/action/maps_import_status_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "9afea9f3b66cb0a9dfd404d1125784c1c7e49756704b8bea8e6c6f202ec551e1",
"format": 1
},
{
"name": "plugins/action/event_series_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c299e8c7292de04cb81550c5a339e42670f1ffafb43eade55f62eda077a29183",
+ "chksum_sha256": "4b602a2fe14340b4fee04f56213f43bf4bfa0f55a602261e0cabb18585cfab95",
"format": 1
},
{
@@ -2423,6 +2899,13 @@
"format": 1
},
{
+ "name": "plugins/action/sda_fabric_devices_layer2_handoffs_ip_transits_count_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "3837ca3d8180bc4b665d162c982925e69115cd43a6494f76b9733743139753eb",
+ "format": 1
+ },
+ {
"name": "plugins/action/service_provider_create.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -2430,6 +2913,13 @@
"format": 1
},
{
+ "name": "plugins/action/sda_anycast_gateways_count_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "492e6806dcb6ccadd0adbc87311b8707b9572c0dfd07638e75a3c91245f77234",
+ "format": 1
+ },
+ {
"name": "plugins/action/network_device_update_role.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -2444,17 +2934,24 @@
"format": 1
},
{
+ "name": "plugins/action/lan_automation_update_v2.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "f2e505e764d1346f6cd49347b402f3aab904c47b0fc9524914da846d388661e2",
+ "format": 1
+ },
+ {
"name": "plugins/action/transit_peer_network.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0e8256436a2080853e062ba0d35e28c035e25890417f1206c0f73bb00edc5a15",
+ "chksum_sha256": "39865b0500024fe14be25d24f260145ac8b2e08dadeced9b8ca8faf957dc04cd",
"format": 1
},
{
"name": "plugins/action/device_details_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "31e7395e48abedd68fe6088eb0e6124f8f8932f67d58efcb4e6be9ac39aa3838",
+ "chksum_sha256": "7738196d5339861f8691fc0e1d84dd8aac4fcef7042eb1a4362e7d44da2fdaff",
"format": 1
},
{
@@ -2482,21 +2979,21 @@
"name": "plugins/action/event_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f0735d2346337a51383eeea16ed5b49d2030287e2c7ebe0a95f18b68f4e879f7",
+ "chksum_sha256": "755da95351c4d77be0328be65f769a16fb9e5fc1ab96c5216ecf8805466f958d",
"format": 1
},
{
"name": "plugins/action/device_health_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "703f95da4ac9774a5cd5f14ff1eb6f3c5debbffe646537bb5a773a1a7af3c081",
+ "chksum_sha256": "821cf19f2a7a04dbb143612b76a21d26f52e45f33e08a3bf5ef977f60517a93d",
"format": 1
},
{
"name": "plugins/action/netconf_credential.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8653f7f42fd41f7456cd998782e490b36047ce87e184517d22490395717ddd49",
+ "chksum_sha256": "58cbb5426eb168279cdfab8633c296143d0ec0e673b9e6e84b489cab962ccba0",
"format": 1
},
{
@@ -2507,10 +3004,24 @@
"format": 1
},
{
+ "name": "plugins/action/sda_provision_devices_count_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e33de6953ed35f9fb7a03a303edacac7670d26ee3a8819ee1dc98630535d6483",
+ "format": 1
+ },
+ {
"name": "plugins/action/client_health_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a98020c492a68e8e21d5f59ab89e990c51461b408940d216523c57e2c919b433",
+ "chksum_sha256": "bceaee6198a6cfe5e4e1b65adf9f65323ad3d32a18f56655d37390b3e30824f8",
+ "format": 1
+ },
+ {
+ "name": "plugins/action/sda_anycast_gateways_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "c0920c33b9cd5bcf005987d2c7b7ea3025e48019823b1cf10f7372a17dfbe513",
"format": 1
},
{
@@ -2521,10 +3032,17 @@
"format": 1
},
{
+ "name": "plugins/action/users_external_servers_aaa_attribute_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "f67cd48d446bd2c7bb0d6b8ec48e4346a5a49e780ac108cf27ce7d2f8e0e0444",
+ "format": 1
+ },
+ {
"name": "plugins/action/compliance_device_details_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "aeb1fde154feaefb1ca4f7bc13782505b2965627e62a256e4be77b407e5f7f27",
+ "chksum_sha256": "3e699f9201179179f6741e069655df0a72471b8351dbacbda3664b57e5f235f1",
"format": 1
},
{
@@ -2545,14 +3063,21 @@
"name": "plugins/action/network_device.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a7d111b471e4947939e5642dd386177e3d58ffb8e1aa2fd4899a24c3bd9bbf33",
+ "chksum_sha256": "0e9db44ff8b36a0fbd54184293fa3ae14859f8117c4a18a849eb97faac11e6fe",
"format": 1
},
{
"name": "plugins/action/network_device_export.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d04cc77305e6aa1927e2916f432cf00d3563dab2c9af932947fd73ed2ea4f3e8",
+ "chksum_sha256": "f3e85583029487fe0e9aa298faef51b446e8afef8ae8e707c419271f64dd167b",
+ "format": 1
+ },
+ {
+ "name": "plugins/action/flexible_report_schedule.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "700e79e8a634da384ea6001245b1857a9d83e7c1e38ebda6c4ddd4b0239e9b9b",
"format": 1
},
{
@@ -2566,7 +3091,7 @@
"name": "plugins/action/event_series_audit_logs_parent_records_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "87ad711cd866043b16fecb6e7c7f3b44d29b73ba803a93be3e0acd60563e3411",
+ "chksum_sha256": "8547a424c0b33d480bc03ea6674c79a2d3940b99ec30c5f2c9e9f8d70a890520",
"format": 1
},
{
@@ -2587,7 +3112,7 @@
"name": "plugins/action/compliance_device_by_id_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "57ef8a4bbae876ea5b13556242c28f888b9c87374b9631dd9c2230d4538dd7ca",
+ "chksum_sha256": "8413a0b04fe1d957b42f87570e446fb41fb69e7a62c3c68aa6534ba427c32188",
"format": 1
},
{
@@ -2661,6 +3186,13 @@
"format": 1
},
{
+ "name": "plugins/action/sda_port_assignments_count_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "6e37a421bcd14f2d0358167fa6ab158edf7b632555024570de4b3f64bf94af11",
+ "format": 1
+ },
+ {
"name": "plugins/action/pnp_smart_account_domains_info.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -2699,7 +3231,7 @@
"name": "plugins/action/site_assign_device.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cecb277ea6c72eacbd0f376bcb0b27101cac13ec93ac8b2a74dd21016c4a9732",
+ "chksum_sha256": "486d749a320ff80bb761a69b499aeadb5b4335ee645c77f26873ff05fb7313c8",
"format": 1
},
{
@@ -2731,6 +3263,13 @@
"format": 1
},
{
+ "name": "plugins/action/flexible_report_content_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "37ec2913303c2fe4ebf9f94c81710d4e2bdffb1a8222f4eb7c2fdc8183488ef1",
+ "format": 1
+ },
+ {
"name": "plugins/action/qos_device_interface_info_count_info.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -2745,6 +3284,13 @@
"format": 1
},
{
+ "name": "plugins/action/sda_authentication_profiles.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "1442d6fe37e41273491e1e78b90fe599b826e60cc062f12608f0189c6fdec99f",
+ "format": 1
+ },
+ {
"name": "plugins/action/configuration_template_project_info.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -2752,6 +3298,20 @@
"format": 1
},
{
+ "name": "plugins/action/event_webhook_read_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "cc3a50dafb9d0c85198883c2701be48eb0835824cdf0f63408b9860955f58736",
+ "format": 1
+ },
+ {
+ "name": "plugins/action/configuration_archive_details_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "5b682f22387daff59abf9165c29928b8e8ff71789eb69d5f8adaee3d0f87f6d8",
+ "format": 1
+ },
+ {
"name": "plugins/action/pnp_global_settings.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -2766,6 +3326,20 @@
"format": 1
},
{
+ "name": "plugins/action/network_device_config_write_memory.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "97abc3caa77a3201884ab4a579818ace14f4bc7744d5a4752d70d7421a871ae7",
+ "format": 1
+ },
+ {
+ "name": "plugins/action/application_policy_application_set.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "a129f8ffd40bc2966fc336ed301ed659ff4706d7d7c35e9e0b04199dccd328d8",
+ "format": 1
+ },
+ {
"name": "plugins/plugin_utils",
"ftype": "dir",
"chksum_type": null,
@@ -2790,7 +3364,7 @@
"name": "plugins/plugin_utils/dnac.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "10337ed46f1639cf19961bef89742250412ea554529a33f67812b5264d29aaa3",
+ "chksum_sha256": "210615748cb65b6c1073ac26b95ca3a33d960150aad0aa7cd698b68938778abf",
"format": 1
},
{
@@ -2818,7 +3392,7 @@
"name": "plugins/module_utils/dnac.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f36fc397baecb8b3936be4de24ebb50c5fc2c7bc3b3ed76d5cde908b715fd0bd",
+ "chksum_sha256": "7327914e0074946b6e27bff680e5f70468b3d6f90d931849abbfa68fe0465c39",
"format": 1
},
{
@@ -2829,374 +3403,437 @@
"format": 1
},
{
+ "name": "plugins/modules/maps_import_start.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "b5808512cb3b7f3c5478b336948ec97b2f39d9e18a6c658b0772b2d99aec3328",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/lan_automation_v2.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e5a1ec51037638d3420cce40b580d45834a5c996d271a59ed0687c07a1bedbdb",
+ "format": 1
+ },
+ {
"name": "plugins/modules/tag_member.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "34fdb2053f4dbe10ec14ed19fa2207895d307dba5ea319dff4ec631cd71c589c",
+ "chksum_sha256": "293ae8ee520fbb3ae6fc9f2f5f7f1320775fa45e8d4ea2abac3eee41f86ce7cc",
"format": 1
},
{
"name": "plugins/modules/disasterrecovery_system_operationstatus_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "338fc2d9fb90e5a71ac2d4c6f8cf7d28dc25869f5d2ef6fdf5757e23b38edb4b",
+ "chksum_sha256": "1291478b8d161c3ee38ffee077fbe9459734ebae64dab0ee8b72688f6faf2909",
"format": 1
},
{
"name": "plugins/modules/network_device_by_serial_number_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "333d4019c7c97edb935b5edc2dbf6f31ac01a9fb85edbcc4970cdf10838abf54",
+ "chksum_sha256": "68848c4427ca3971702335d27b842dccc9d4bc1ea3e1ffc5bd141099cc16b88b",
"format": 1
},
{
"name": "plugins/modules/wireless_enterprise_ssid_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "77347253283a1530cd0fad6437990a9fc4324810cd8b32ec6f9eb938d3d9d156",
+ "chksum_sha256": "42d258afb7becdbc83e0149e21aca30061d722908236db98672dee32eb2c4a79",
"format": 1
},
{
"name": "plugins/modules/sda_count_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bb754d9e70ec973e1a807c5725d33c0eda80eb8c5c92cede20dfa29865f90822",
+ "chksum_sha256": "570fa5599c95e962b3f7c34f70e8623530dbb6389180d75a699fa44682cb2b32",
"format": 1
},
{
"name": "plugins/modules/network_device_custom_prompt.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "80d8f21b58d669ec05a6f5d3e9a0cffa4a9acb12497202d5d7f73fa6f178326a",
+ "chksum_sha256": "960c2a1102ffa82f61ad7a37f4cdfdaa437a98b5b2886d0633a92ffe45805d8d",
"format": 1
},
{
"name": "plugins/modules/configuration_template_deploy_status_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "08013512def343c074bf7c61562eb19f7d032e068c9c7490a75ef4f056a745af",
+ "chksum_sha256": "eb940bf338251258122ef5ae9151cb77aad9cf61140dd148892e1a32b49fe908",
"format": 1
},
{
"name": "plugins/modules/file_namespaces_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f4321c28883bf4ef2f5c2e2138ba553eef2449a64af6fec22f70f942fd185a10",
+ "chksum_sha256": "1566073dd639ed45c385ddfddf7136bc3e0dd5b2839627c55182de825c19052a",
"format": 1
},
{
"name": "plugins/modules/syslog_config_create.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "64fab4d7ca2e088eec6df77a7b34dad99413db65e0d1e2dc1bc1df087a03f958",
+ "chksum_sha256": "a1985f998376762b5bf288a7a3b5dfba467b4d43293e04cba95b0973bd2d97ca",
"format": 1
},
{
"name": "plugins/modules/buildings_planned_access_points_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "913eac273b931f533e656c155798c8cb6f6470cb162d11ee99b4bc8da3494970",
+ "chksum_sha256": "1b32c88dd22a43afb63b024f3007fde265d4781d965eb85715c4c68d665fddfb",
"format": 1
},
{
"name": "plugins/modules/reports_view_group_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "851bf33899e8784e2d86d5c1cfb2bdb5694001ee71acb73e0a8386720e2eb399",
+ "chksum_sha256": "094e78af82bd10b9ad7d0e1290ea7c1b43be4cf2a56ca21cbfb4db3d6dfc4ca9",
"format": 1
},
{
"name": "plugins/modules/wireless_provision_ssid_create_provision.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cded1c74ad3da8fc10d9ada27632cdd8cc36d904fb1fb8150754160d6539dbf3",
+ "chksum_sha256": "6bb0280f07b224ae4657b91e62716e5b72e26f87b3c15d31be067648c7472590",
"format": 1
},
{
"name": "plugins/modules/pnp_device_claim.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "602231c84933ddd1e3ee06b665a6732c14d2789ceed318ab2a45cc7b1d37c554",
+ "chksum_sha256": "c125210d0ac1cae328b3384c0009fce25b531cd327af4eca95a76dbe92514e49",
"format": 1
},
{
"name": "plugins/modules/snmpv2_read_community_credential.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "233bb1488321b2efad2aac315fccb11a4ab4ff577638f6a13f368e776346960f",
+ "chksum_sha256": "b80861abab778ea620422c514c47b17e086a97c8740950807c6a81c9cb7b0269",
"format": 1
},
{
"name": "plugins/modules/snmp_properties.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "696f1758dcdb3a1d7baf721a1d3116f0d1708179ea191a11a3a156e4206234d1",
+ "chksum_sha256": "ac8ac8001dfa460202af11df2db28cf8d3b4d14e127c33a81b6549d063488961",
"format": 1
},
{
"name": "plugins/modules/wireless_profile_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c7b6af622e5a439f772afff88f0d4a08b643539de664e06e04236eadd4947fc8",
+ "chksum_sha256": "2324ae7d93ec4e9647b7fd74291f29183b40c1c3a852a89c96a99ccd97f4c027",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/flexible_report_schedule_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "4b53084d42cb2873eedf4591231983c526797b9c83098133df48b24be3586bcb",
"format": 1
},
{
"name": "plugins/modules/network_device_config_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "35f355bdab72a72b05cccd892b9f9746aca19892ae563b3d2ae7ba34801baf24",
+ "chksum_sha256": "1b3ef29a1890c55efa5c57303a6a722932a17af12d39d422bd5d747b4046c9a0",
"format": 1
},
{
"name": "plugins/modules/tag_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "572dbeaf9d5419df7af96b1d25e4acf37aafafc515b144eb5e2f1718e5e48a5d",
+ "chksum_sha256": "cef1ecb4906e50d53ec2bdae2d658865031af625718ee3240fa01ce2da9b0045",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/integration_settings_itsm_instances_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e52f159087478236697ed20f9e35a4a19992e0162af81d0f44608ca2ee331dfb",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/sda_fabric_devices_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "424c87eda238111102ae3dec2fbfdabd861d0c49e07949e6ddc67444a14165aa",
"format": 1
},
{
"name": "plugins/modules/site_assign_credential.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a24bf8e37412386945e0a1285f305c3d00c3dfe546693451cd0eaaf09ea58fbf",
+ "chksum_sha256": "be882f776ed2c578b815808adb2c0dd7a6b9d7e7dadaabde2febe7a1aef74e08",
"format": 1
},
{
"name": "plugins/modules/sda_fabric_control_plane_device.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9e1fc3c6997610fb5eb58da1ff86cac1c2f12fe29f570b04c21f221d8eff08c0",
+ "chksum_sha256": "3af343400e5f1c3b823e78e146bf4a939cdf1e80f9efde102be6a94d6e064bfc",
"format": 1
},
{
"name": "plugins/modules/profiling_rules_count_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d7ec7710c2e4481aa1380b07cffe76c8d5c5563c6e45b7592a6cad08d3fe71b8",
+ "chksum_sha256": "bd2e6ca9ea074ea2c843059c128b1fb540f3e1789b6854e7297b1a30263647b2",
"format": 1
},
{
"name": "plugins/modules/roles_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "de94f18450380a4392ddf55fe23341d727d5c38401e9e4bb145768f1f549d312",
+ "chksum_sha256": "f8d391c0b0738b44b89aa4f6dc844281f7510ea70723978ea5aae9e13a8af343",
"format": 1
},
{
"name": "plugins/modules/task_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c939839ee83d08e2568d652d43cc2dd043f7f4bc45bdf6b643593990b318f5f9",
+ "chksum_sha256": "6093efc36be8b4f4343d6ef744b5de43e7ef88a4a200a4747b0856d2f3b9da10",
"format": 1
},
{
"name": "plugins/modules/wireless_sensor_test_results_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2aba1ead06b80720d76b8d2ab036fdda406b4c85bf12520413d3bdfc07b9401a",
+ "chksum_sha256": "46b64a7a64331898c10a5fd7fb89ca0fd7697764717022db292d08d857cbc89c",
"format": 1
},
{
"name": "plugins/modules/wireless_dynamic_interface.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "060b815aa82c2113c3ce30c15d309e5ca171dc20bc8e03b451e758b5804f717a",
+ "chksum_sha256": "c3f9c8b55e010480550a22839e7f838d591c0003485266ddad53e1be3f531cb0",
"format": 1
},
{
"name": "plugins/modules/nfv_provision_detail_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f22479fd4fd40caef85c62bef81ee9f9d95ec8a38d7fb4d356d333ca9790e148",
+ "chksum_sha256": "c127a988c66b21c5a7247e1a8d979b9c36ef877a08fab185ff1e92b04404c79e",
"format": 1
},
{
"name": "plugins/modules/interface_operation_create.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "752c7f20ff323991e808abe801061cbb02fbc322ab7c1291707a5689c87c9bcc",
+ "chksum_sha256": "be37eea6420dc1a449f5ab1ab38f416839365dc6bb2e95de3f72423d8e834143",
"format": 1
},
{
"name": "plugins/modules/tag_member_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8ed423053ce775e469f32b9082d8fd6a5b4c2a72e122bceca773e2196c5c3b80",
+ "chksum_sha256": "23d4f7a200eaa63d7dbfc4cf505b6faea460b860d7dd123e88234af555d525e8",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/sda_fabric_zones_count_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "c8631aa9da6ac9e49214b9b79987f2322b7c0b05b3570e08574ed83d3cbbe57d",
"format": 1
},
{
"name": "plugins/modules/lan_automation_status_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ca3459c865c3cfd1300abbbe00327ad89370e0171dc9db54ceea74bafc69649e",
+ "chksum_sha256": "8197fa6dc00c9fbead2d8bd7c082b8f256c7a940da35d5ea1d169c89cc48bf1a",
"format": 1
},
{
"name": "plugins/modules/endpoint_analytics_profiling_rules.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0ee09af1955e780d336f71e53f5aade0fef05319a22c0a056a07dd5230c157a4",
+ "chksum_sha256": "45f09d17ce2db9afaa8b9cfcbc1cb38546dc9f4eb65fccdaea15e116e6532043",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/network_device_ip_address_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "8629fade9e148d3fdfe9c25a4a3692556083be1f351c6b860c0777a2e24ff4bb",
"format": 1
},
{
"name": "plugins/modules/event_syslog_config_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "97fa31b07f0261b63564720aae6185620f35e7fc86a5a154a8580c497c35c7c6",
+ "chksum_sha256": "c05e5943ccd5c4d056e3f251e59307fb71348d9690764b777578b75233cd469b",
"format": 1
},
{
"name": "plugins/modules/business_sda_wireless_controller_delete.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9005bf199793e794b60b820b8a3c35e37d460facfbb78eeac939899ebeb5953d",
+ "chksum_sha256": "c1e8d9804320ff936449280c448b92174b73d545d65cec956cc96467c5eb5aea",
"format": 1
},
{
"name": "plugins/modules/user.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fc5b1bd717058796844b4c90dbe087fe9b5161b3addd2b73d52bd52f98e29766",
+ "chksum_sha256": "7db33186c799a0eab740c24e8dea373286ecf06ad8089ca3390a4fe759b92bad",
"format": 1
},
{
"name": "plugins/modules/associate_site_to_network_profile.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "602abf01696377306e1a74e83b6fc749a3438adeb6531133a2a57ecfa7325e70",
+ "chksum_sha256": "6e6f2d156694ca2983b2a2510002e430666b25e505853201e06cd7b6a919ac92",
"format": 1
},
{
"name": "plugins/modules/compliance_check_run.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "72ff23d614916219c240716a8201b6db09a5948defa6ca3f84b90bcc74c619d8",
+ "chksum_sha256": "22959aa730832a31a14c1febc5aa1b62b1d0a5b73de92e55c7f4eab09adc1dee",
"format": 1
},
{
"name": "plugins/modules/pnp_device_unclaim.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c4bff9d9b35533a014fa68451e2d3ebe6ab2c7298b1c629ab49f78214e0169bc",
+ "chksum_sha256": "54b65858115f65b0e82993060dfba6a886ba1f59e02d473249b0b421eb6c64a7",
"format": 1
},
{
"name": "plugins/modules/interface_update.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9d154bfc41b0268be60fc3bfe934d162d6d3c7889f4212c138238fdb59beec62",
+ "chksum_sha256": "d408922129c4a562c8d0af15cd17224b901526797f22cce4da5417b220f95bcb",
"format": 1
},
{
"name": "plugins/modules/lan_automation_count_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "01ce3ce65cc63c09db01ed03b67ec8c6b3a617e9cc47aa6c1cf6315ae3ac7a60",
+ "chksum_sha256": "db330f1b086f3274f08a1128b06e3e3d253f021fbfadd9d0a00d95f2ce9b4e03",
"format": 1
},
{
"name": "plugins/modules/pnp_device_authorize.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "48371d790ad877e32f1397e82d42245bc66da1558dade583eca97e303d6f4f2b",
+ "chksum_sha256": "79333c9e79527c632dcbdc915a9fdd531163a42979c5b8d155f2f98758a9ceaa",
"format": 1
},
{
"name": "plugins/modules/task_count_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "96121154401e39130b0faf0da4714179d91fac8c54ac7b7e0e742a2e4f3bb3c7",
+ "chksum_sha256": "6e6ee28e7fc7e7d94b81b9401e3e5a9fb1cce551c12582264213a743657c572c",
"format": 1
},
{
"name": "plugins/modules/app_policy_intent_create.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c62d8cfc4d50b9dad0c73184b3e10acfab20ef9267cbde21dd20ade5f913e98a",
+ "chksum_sha256": "b92253ad46ab4377bb115d13e9779c2752a92f4bb8808d49f1c577c036da9bab",
"format": 1
},
{
"name": "plugins/modules/pnp_device_import.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "60538898d31429714400e1bee616715662a6dc0868c10fe642aaf2c4d8804411",
+ "chksum_sha256": "2251ce5c40cb4991d9ddbee8490a5f86cb07c6c05ba10c2a06f62bd826ba4a56",
"format": 1
},
{
"name": "plugins/modules/authentication_import_certificate_p12.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cddda97a5a8bb76dfb9913c6ced565b770c5c95b33375e626eeab2f1b588be94",
+ "chksum_sha256": "7d5e988ba88abc1dd743e4ae2fd2b2fcf74103bfa2f43c4feab9ba9bc4fab326",
"format": 1
},
{
"name": "plugins/modules/sp_profile_delete_v2.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0111f60a0dd6a58af385bf585064db9679166cf1f2636cb820308791ed2797d1",
+ "chksum_sha256": "f5733f949fc83bc8beb42d3e53fcabcd77f1fd8c629d44c2d1b68eebae89f90b",
"format": 1
},
{
"name": "plugins/modules/topology_network_health_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "85f8527871c7e42a00295658f92bb184d284a5755b1e5ee4f21c4d9a3f8b449d",
+ "chksum_sha256": "254f45dd294fb349f6a43fb507209e7335724137c64c77cd0b0bd4fb66e8a118",
"format": 1
},
{
"name": "plugins/modules/wireless_provision_device_create.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "097a942722519fd081a35577b59dd618719156d3cae7b1020e4c089d7836c191",
+ "chksum_sha256": "e333a3937388363748bd24069ba686d4fa7ca79a53900ff3d188d6ec108f72a1",
"format": 1
},
{
"name": "plugins/modules/discovery_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "dd2feb16fbcfe5032e7e1728e28bf8b350ee9db828d94c5aaf55c4e9a92b4e57",
+ "chksum_sha256": "f7493e81adec79cd0ef648b6e98afd7532b6edf60a84d2581f85e3a7a670fea8",
"format": 1
},
{
"name": "plugins/modules/device_reboot_apreboot_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e22d725706aba70caefdc1734ffd4fc84f98dd4b8f06813147ae00589bd973e1",
+ "chksum_sha256": "2d9b8f8ca8dd69004d4e2f831c3ea3d86f9f516eb6ea6d103560fe9b239be4f1",
"format": 1
},
{
"name": "plugins/modules/discovery_device_range_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5849703f79a8c63a95508fd8cc76c37fdbd7ddd554f2d446077f17f06f80ea03",
+ "chksum_sha256": "48632d2e8fee67e70dfd0afa383ee2c66f272042267b89050f3385344d9d8a1e",
"format": 1
},
{
"name": "plugins/modules/event_subscription_email.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b20e8d01d362ea518177479564b41475bb22390529bd9e146a43d0b3574cc7f3",
+ "chksum_sha256": "f50cd89d0ed3d65a0d8419830451aa73f6110c5ffea5a78cd30612882be5b2ca",
"format": 1
},
{
"name": "plugins/modules/sensor.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "61a1f62c0323db3cd354dab37154431035575c4d30f56de9526fdfa0e8e95282",
+ "chksum_sha256": "bfb3b4ff87c0782809ea30631235593328520f61e6fc27fff32a25c4fd00d870",
"format": 1
},
{
"name": "plugins/modules/integration_settings_instances_itsm.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4447c1484dad5ad8b0c6e581a5a79e8a00cd25d5c919960c83a7bc8ec73f0c70",
+ "chksum_sha256": "4d6e08d55682264f52da7cdb446bf67b77baaf3fee6cd848f568f0d1e9176756",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/applications_v2_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "2f0e1e14dc340a3fdf34fffc8ed2a0d76e0cf10bf604205b10cbd7239c8ea348",
"format": 1
},
{
"name": "plugins/modules/site_design_floormap.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d602258e6a2793627b1288a73251242d9ffd1ad8e50a0a61414295ea4c77a53b",
+ "chksum_sha256": "53837387ceb3fd1d816eea59960c5ff2cf8f0e7c0e83ecfefd10313b85d9d6c1",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/network_device_config_task_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "1f8e99c041c2a7ba2ebc561fa117b3b46cce4704b71bd077ca7338c1a82a3eed",
"format": 1
},
{
@@ -3210,77 +3847,84 @@
"name": "plugins/modules/global_pool.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b5942ca9610d638861c899a28b7edff86176ef90ffb6aa84c74d07398cc12c",
+ "chksum_sha256": "f5fcb933c699b6b7072faad57c8970e710b593a3bab3d48142172e9a8714a40b",
"format": 1
},
{
"name": "plugins/modules/device_interface_by_ip_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "247b37667e6600267377a09c95d468f263cf393c9269cc3fe7352a86391d7b96",
+ "chksum_sha256": "e01b6df227a787d44f6b23079d80b50c9d1cb673d41a4cd3fc0f7cda98456ef7",
"format": 1
},
{
"name": "plugins/modules/network_device_chassis_details_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "61e6ae2c6c39475d4e508591966135ac0715bf438868fa6f2421738e3d460899",
+ "chksum_sha256": "fb25bc3ae6d3cae81d227521ea9e11e3fbfc61651b97ca4586cc64b693196bc5",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/sda_anycast_gateways.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "57728887a2f596ecb47baea194c5a24673f35821cb098f621d6fd0eaebb23620",
"format": 1
},
{
"name": "plugins/modules/sda_fabric_authentication_profile_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ba207e06e273caafae8ee2031bf163b960f08dfbde7e5211489ea78dafd1d1f5",
+ "chksum_sha256": "d3a1d38b2b209fc2a14352a7f4d31b69fc3810e3fc8b7ae9d5820ae3fb1cc137",
"format": 1
},
{
"name": "plugins/modules/client_detail_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "50d9012553e45a39231c185123ed81d2bc273ff0440f9231f88578efe4ab301a",
+ "chksum_sha256": "e2553b22055b2e4c9be6ee332bc5512f951ecb3531dd00a834692e86386ff6a0",
"format": 1
},
{
"name": "plugins/modules/license_term_details_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "93698c9894a43757eef2431a0db6c00ad611440dc70aec7169ebca0cf5875f61",
+ "chksum_sha256": "2c6f6b31148e8bf227b5a1abdb737e6f5ba93d17a78112496c55082393150611",
"format": 1
},
{
"name": "plugins/modules/license_device_deregistration.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ddeed833b64145c5d88b04421115f39d30effab218656bdedeb828c869c50764",
+ "chksum_sha256": "c1871c4b62e628e6705434d49f598092d9e17c23bc7a23f84d69f5e35dbc6b72",
"format": 1
},
{
"name": "plugins/modules/device_interface_ospf_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ed30d8d2a7ee30d254452ca157bad1f22117a286d37fe9bef525dbd5582c35f9",
+ "chksum_sha256": "ebeda8983b328305c25005d580140b192e32361e4786ad15c521e548a26d4a60",
"format": 1
},
{
"name": "plugins/modules/sda_virtual_network_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "35c830cba1f0a91954ae39b856aecd01ad51dc0d5c1d10d891671af5ed6e4c6e",
+ "chksum_sha256": "cd9f4dee572b01fd922d4b3f3caca47c9f804a95faf340214ed3055313e91703",
"format": 1
},
{
"name": "plugins/modules/discovery_device_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9503c3fbb9a27e54e0b290ef82ac1255de261218561abdf11ff0204bd7f4517f",
+ "chksum_sha256": "8d40ae97e00af4fcfad9bb6e9bfa3bbce863cb745d3517ad089206c8de7041c0",
"format": 1
},
{
"name": "plugins/modules/business_sda_hostonboarding_ssid_ippool_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "02000c3c68ecb37e768d5938da1bb59a84df7a7e252684dc80f5adb287df972c",
+ "chksum_sha256": "d6dce40ab4c8397397f4cfa5389bb7fa927d079c423cfaad696d0d4eedfb26b0",
"format": 1
},
{
@@ -3291,122 +3935,164 @@
"format": 1
},
{
+ "name": "plugins/modules/applications_v2.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "c635f18bd1ea53283e81747c97df1bf5c83031ed62c7a5d28bfd2547f590c6e7",
+ "format": 1
+ },
+ {
"name": "plugins/modules/execute_suggested_actions_commands.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f66085b17d99d7819eda3961cf0cfcb7b9dbbc317908c0f01dd61898236f203f",
+ "chksum_sha256": "c3bfad45ac4505b8b5261b81bcd554706d82934d4ed2d2012364eafcb73031ea",
"format": 1
},
{
"name": "plugins/modules/business_sda_wireless_controller_create.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b08d11477f17e38c0b2227c8ce36413c494d9a50a6a076ae12859cc504d4abee",
+ "chksum_sha256": "4c7bd7a8b0356175fdec06a2c5827b11caf054a36b082e7e17d82e4f5d2fa9c1",
"format": 1
},
{
"name": "plugins/modules/network_settings_workflow_manager.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e72eded9f5e28624682f928bc3791f830e6964f28d4bb57b76fe8671d44f5fe0",
+ "chksum_sha256": "d0b434d8c20f88f34e3b79d3db80e571502852f6660c7a9ed1612e45ab1ebca0",
"format": 1
},
{
"name": "plugins/modules/wireless_provision_device_update.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9db7025f8f65a52d3f0752e767c2c1c9613a80f8007b4e78315fa70e945c6d28",
+ "chksum_sha256": "188adb83a2c3830d0e89a34c48d282727ccca03ff903a3192cdd12d8356402c5",
"format": 1
},
{
"name": "plugins/modules/network_device_sync.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "78f9ceb7e7b4371e85c3cd52f71c6e6b307ffd23cdfd37111b544928422757ab",
+ "chksum_sha256": "ebd4639b7cd1bdc8986e4b5ad36adccce96116ec60ffb9a4be39958cfa468559",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/users_external_servers_aaa_attribute.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "a627387baeed5b9e6d887baa969797119c7162caafefbd2418f6b8ee5f78c7b9",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/authentication_policy_servers.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "10a70fab94380f4483bb332c9d3221660fd7c1ec41b90c39a70b47c903546c20",
"format": 1
},
{
"name": "plugins/modules/wireless_rf_profile.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5472514c55110035ba8c47f60313201f8b49b0bf9c2b2bbac47c7c5dc08c5cbf",
+ "chksum_sha256": "8a8fa3f6d5fbda776349caa70c6457b97f290ca53d86fd486e15c9dc6b43a48b",
"format": 1
},
{
"name": "plugins/modules/sda_fabric_site_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "508c83e7ef73c711338d199b4b05eda7e94bfc4831958ca8f1fcfd62a77f8b5f",
+ "chksum_sha256": "caba487dd7c297eb00d8b298cd5171054c409814ed3fc3b78738648cba07777d",
"format": 1
},
{
"name": "plugins/modules/event_subscription_syslog_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "37591b77c22a26b1ce79bba87f94dc5e1c96a94da9ac5dd797a74960a646bb05",
+ "chksum_sha256": "1d1e6714dc48721b64b4c391ba1231768a3bbceff959091e0da0781a7a72a7aa",
"format": 1
},
{
"name": "plugins/modules/configuration_template_import_project.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "99a2b7ba0b8cff781db383abcd667cd23d02d1991b27deada9c2802f56870bd7",
+ "chksum_sha256": "798862d2f4b8f0f5e27bbf6986691a86d162d17bf4c5ed440f52adf2e1708a81",
"format": 1
},
{
"name": "plugins/modules/topology_vlan_details_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "70aaf59cc3a489a2f4aa5d0b052d7aeb89c09fc4435c0513beeafffeaa0e99de",
+ "chksum_sha256": "686d9c676cfa7becd0bbbd45528d492d56d9aa3907f14347b820255b12b1ef6b",
"format": 1
},
{
"name": "plugins/modules/lan_automation_log_by_serial_number_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "205f22f0ec447a840cafa8b219e22b88636967ae405420cbfc99a99d9c64bbd5",
+ "chksum_sha256": "19dc77958757a2ca44c902143cb56eb0e7650446b8aabc2f133daf51d7300f2b",
"format": 1
},
{
"name": "plugins/modules/service_provider_v2_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7887050a78e4710122fa71fa4f758063a83b3ea50b6286541dc4c7cef6bed68a",
+ "chksum_sha256": "d606f20174aa7a9087fdcdcc8512332a8267e24b40c08ab8bde66f8da52e44ac",
"format": 1
},
{
"name": "plugins/modules/site_health_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9b20f00dd0a3fdd8e623c90781e001de3488e082ff7f78d3d0c2a78c2e8cb7dc",
+ "chksum_sha256": "3126044095570bb15849ccfd605fff6fff82ce14b57ebda0d2d445fd63399cae",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/sda_extranet_policies_count_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "4429abe08ef560bdbc9111394a4acfe5d251de44a95c5d187d79a1e5f064f3cd",
"format": 1
},
{
"name": "plugins/modules/event_series_count_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e8ccee65b615913c707e66532d126931f8bcac3fbdea044d97151642c57d5423",
+ "chksum_sha256": "0efe7c68b1e1fef38467413ed99a474c8edd5cee8d3df1b29cab2c1c290a6f02",
"format": 1
},
{
"name": "plugins/modules/configuration_template_clone.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7d717330707d9fe6eea2482a6108756b83227fc12c89d0be6133015f5cda28d9",
+ "chksum_sha256": "4d1d01f173ad0a4cf5d1f30f00054a50f730c2636056253cca0704746d85ac78",
"format": 1
},
{
"name": "plugins/modules/swim_image_details_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bdb0d985b3d8e3e87099c574b5c06770159c6b937f86d6aeda7e7d1b6b80cb94",
+ "chksum_sha256": "411c0f081ef6c15f649b8acd4bf580e9cfe1978d4d577cd88a9eeb17374a5552",
"format": 1
},
{
"name": "plugins/modules/sda_provision_device_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c368e9b1b79763ca9c1c0ac900b575b13b40463165301e4a50d2505c46cfa4b8",
+ "chksum_sha256": "bedfe37d8e28e83f5b4a061d641479b3669d6a45b7bde4394379f055ed62ee14",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/sda_port_assignments.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "dc03f7e86d0816e6f2b74a1aeff6df9725909998b3f150de439befb032bc3cac",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/event_snmp_config.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "074a5b79c3924e4a05e0ce90d770f513331d1b126b8de1e6d67620702179af22",
"format": 1
},
{
@@ -3420,133 +4106,161 @@
"name": "plugins/modules/system_performance_historical_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "db43d9616769582ea8ac005948a39665f274157f4f8e165361f2541a9635de43",
+ "chksum_sha256": "69f70e09e377b99ae08e624c38938ffd19f6f2f8225b5b80dd5663a7248ef434",
"format": 1
},
{
"name": "plugins/modules/sda_fabric_control_plane_device_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a01c251bfa7e2011ccd7d311d4dd0c85428790ea78cb3180f878004d22551241",
+ "chksum_sha256": "3d10a1b157b3d8d9c2305390fc0f9972048640759b28ef8834548e156cfc7757",
"format": 1
},
{
"name": "plugins/modules/eox_status_device_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "64980b0ccb39275f2b8dd625f90fdd001625be11fb2d4342f60e2fe420af8df9",
+ "chksum_sha256": "5ebf0483037e1f4b44f7ed04dd827527f73beb1775045964a41ee0ce2eee2bd7",
"format": 1
},
{
"name": "plugins/modules/threat_detail.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "865c8e83860d3a399ae0420597636dc67ae83c826d1ca2f2d40eddcc4c130def",
+ "chksum_sha256": "9372754b0c98e8668e712b1566f41b628036edf529649a04ac151038126a541d",
"format": 1
},
{
"name": "plugins/modules/configuration_template_version_create.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c1b7aa1f6a2ec4ed5ade524168e4c6804d0694e29f5cbf9acab3502ee4e7ad0c",
+ "chksum_sha256": "84830e089cede3627965c2e2c3432d71ade4ca505b768bf67a10db317ba2c85f",
"format": 1
},
{
"name": "plugins/modules/syslog_config_update.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "26816bb5140d94d96173d69a16b49a0132e6febbb3ff3eb1e856746a77eca271",
+ "chksum_sha256": "31f034158c9299818d576b876fa061cd18a2d851c8dfb73320eae70649df5525",
"format": 1
},
{
"name": "plugins/modules/nfv_provision_details.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8d59bcb7f9b05d8a5268472ee86786c1dfe55d168c22f05bb08eb66fb32ceb53",
+ "chksum_sha256": "bea75903a6d37c513b62d3c02e5ab82dcc7b6950a9d9e76b40248ac489dc83ea",
"format": 1
},
{
"name": "plugins/modules/event_subscription_details_rest_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "39f3ded1e5b9d76cf7e0ea7a54e73bea15bcc76a90ebdfbb78c5295ad86c19d5",
+ "chksum_sha256": "23176aab3b140c79189c5b34f8efd0d38d5c80da393b01915e90591372f00760",
"format": 1
},
{
"name": "plugins/modules/discovery_range_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "584f18f492f252b82331acf732253ea92bb96fd989cfe2545a73884106fedc4b",
+ "chksum_sha256": "8c1a4ca2f54a53c1530358d0dc094d39b8c1e6238aaf667149cf2313e4e489ab",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/sda_provision_devices.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "86474a14141864441273135ed1b8cb5c11aea9c726e22e9286fc601525fe2cd9",
"format": 1
},
{
"name": "plugins/modules/site_design_floormap_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "22f432894c23601c08b42da3f77b42ca341e3a8075c249c83a046ab97e999404",
+ "chksum_sha256": "33a35685e349b459ef23ede17ee6b3545625d56f36222cadd883bc5f84f00ff3",
"format": 1
},
{
"name": "plugins/modules/discovery_job_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f623ef881ce3592e32ea25604dbd075d837b9a5c1e8c1997178bd0474c425cb2",
+ "chksum_sha256": "329b0e18edd10fe5c8195f1cacfefdab6968126282a6fc02f974c8134131bb23",
"format": 1
},
{
"name": "plugins/modules/global_pool_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b377a33bab9641cb9756c14252cef50419eb6b49f131cf7cb57195a40a1d096a",
+ "chksum_sha256": "c9742585ee6e2afa9760802e605c9a2083674772ba85377b6da840397780021b",
"format": 1
},
{
"name": "plugins/modules/pnp_device_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "69fd7b9a578ff1b2aa1a3832a5b15caa0f53794a28881e2dd210cb8d2a54544e",
+ "chksum_sha256": "8bc632b159532c0d8af23b76ff5e72c8ffd8960fc0215aecb9bd197be33ffeca",
"format": 1
},
{
"name": "plugins/modules/device_replacement.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "76619f7b6f994527b0048eb03b6f197872ae889bb692fd6c5a67ce373610d405",
+ "chksum_sha256": "ee1db7de94d2c25d135e02258279fe4689850eb8fe1716b698521ed95acdbd46",
"format": 1
},
{
"name": "plugins/modules/sensor_test_template_duplicate.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bdcbee1705c8ac319ba71af6725dad6164a78605bdb0da4ebe833539701f196c",
+ "chksum_sha256": "66a483793dd384170a73e0084696c871d81009a2099d3b9db1421e59ac9117e8",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/maps_import_perform.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "ebc2e0bb625d2ed3b393842ee27bcdd3c94259b003e346dfd2c9b5f47b06fa8a",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/sda_fabric_devices_layer2_handoffs.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "17b50b5d0aeade050716a0013adf29ed2fa259e94c68268561031550c7d51db9",
"format": 1
},
{
"name": "plugins/modules/wireless_dynamic_interface_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "47d74bea911de6d0409ee28fdc3a86dd205102460acf1dbe0dc057ccdf09babc",
+ "chksum_sha256": "debb2c6f6916493013baec1c86b8d38c904d5490b94665e362184523d1d2f0cc",
"format": 1
},
{
"name": "plugins/modules/discovery.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "831619571aa2f0c113400c8fa930b69f305fda6fbb8621dcb8dbcd871cd0a279",
+ "chksum_sha256": "0f52074c2e395f5323246dad0e693458029fe7a829395afcde3216563831ac61",
"format": 1
},
{
"name": "plugins/modules/wireless_accespoint_configuration.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "141d3a2c603c8b4f62466e0e6db45672bae260ae1a12f487cfe2c20026dff748",
+ "chksum_sha256": "6ea9a71ea0f785d0e211ac0349c1e22fabda190f225faa283798ae83ae68d3e6",
"format": 1
},
{
"name": "plugins/modules/qos_device_interface_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d3a84c0cb694ca3c097b9ff30bc976aa135a2ac91181afe2e38d5b187cb8530d",
+ "chksum_sha256": "cdecf64e33ddbc2161210bfc5cf77c83f4ab15e19813267076ab3e5df4eddaca",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/roles.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "dd450e7e97e4da64ae002be1121415494e1e0ca8b4016a0404fbac50028548fb",
"format": 1
},
{
@@ -3560,357 +4274,490 @@
"name": "plugins/modules/pnp_workflow_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "40f26611c60bf2ec8533a82785eee65bb2a247be209105a002f1cb1dcf7081b4",
+ "chksum_sha256": "96bbd0188ac4345eb0e52f593a17f3a64564a8042f87be10b453f76ae656bb8b",
"format": 1
},
{
"name": "plugins/modules/license_smart_account_details_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "dd3bffa15d967017ca8313b2e75fa3c467b9282da5baaaac7360b5be9319a5c5",
+ "chksum_sha256": "1a7e1644b0293f7de5d9d88c7aa43d24365b61c0e718df8d632bce6fdbbacb1f",
"format": 1
},
{
"name": "plugins/modules/network_device_module_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d99ee768d2564d3283b7693b1a9f9ffb30bba6647022d1d4cb41daf1b37cfac0",
+ "chksum_sha256": "6561a3f14e87b44436746e0c6f3a18b648c29f3e39bb7058ab443b35e9ab9a99",
"format": 1
},
{
"name": "plugins/modules/assign_device_to_site.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "96e597a643befd67693dba086d91ae2fbae6986a98d2956ed112b8dc47ad6378",
+ "chksum_sha256": "356aab737627fc509ef09743c4a99ba39f2e429caf4c8887645935bcfac8342b",
"format": 1
},
{
"name": "plugins/modules/event_config_connector_types_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0cddf97005b6d88a1880b132aecb16ea2e11cd52e1a6fb198f39028a3b961aa2",
+ "chksum_sha256": "dfd2711864e7900a01c49da81b83a49c9847b8d9a55c7157da5dc590a5208b21",
"format": 1
},
{
"name": "plugins/modules/network_device_inventory_insight_link_mismatch_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4d9fc23291299a4966e404b79d9df1bf8c30b038637111640b565c854a486d7c",
+ "chksum_sha256": "d9c3b066a8820caa6ccdc4e6b6c1d8b5732dac1b6ae426bccf96c21a2d3593c1",
"format": 1
},
{
"name": "plugins/modules/task_operation_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2ada3350d24c3f06eb24f7878a6d47145d565b9d87dfb5474da61a7c5e592828",
+ "chksum_sha256": "6a48adcbc6916b2be2656cd5cf9ff7393480ba87c58335411d7711d3d975f71a",
"format": 1
},
{
"name": "plugins/modules/site_create.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a8389054876e4e0d52709b43e47a559882e808cfae93b3ece582d2854d7ceaa9",
+ "chksum_sha256": "bfc405e79e54558afdac379bbc1a1095155e1b7c9fda7c04f569e248428f7fa7",
"format": 1
},
{
"name": "plugins/modules/credential_to_site_by_siteid_create_v2.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6fd1eb16206ca007172456bf66824edf8cc86a7a9c21cd40a4497f2ee15347ca",
+ "chksum_sha256": "15414564d4e48a96f9bf0597782dbd0d33e97839c58f3320892335cb03111f9a",
"format": 1
},
{
"name": "plugins/modules/sda_virtual_network_v2.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "de5463a3e5a8c767a4620f2b0d6959f03e615c1662d9d1277e421df577515ffa",
+ "chksum_sha256": "5d5ea6c7eb8b6b474ea9ad89017a7070764b886f137090bb49e7a43785d70f13",
"format": 1
},
{
"name": "plugins/modules/network_device_by_ip_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "753ed26c0934c1cca81a58079b45b8a57e3987f8f7428c722ee906b8ffc35d76",
+ "chksum_sha256": "90b9be576438cf7378dcdf3e418224bb49939d4b94e1e65fc6815f2ae5277bf7",
"format": 1
},
{
"name": "plugins/modules/network_device_lexicographically_sorted_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1ffa1d43fdac74ed390659d347cd2914d48e622ad97edf07136caf763d9bdeb3",
+ "chksum_sha256": "b9cf6ef22f58fc9616da2a296931356e2a7ba5b6977bba08edf809fe80746f1a",
"format": 1
},
{
"name": "plugins/modules/pnp_workflow.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3c927bafdeaeabcee4b03392cb8f833cee992c06df87392bc9850424a9a36f23",
+ "chksum_sha256": "b6df76e37828d532445f4b480e8fb4cdc8ed7e9e395fc04cedc87a4d47822ebe",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/dna_event_snmp_config_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "6439fc3b4521d53d58287cdb2ed9846eedb6cedd61b77f64fae25b58fb978289",
"format": 1
},
{
"name": "plugins/modules/event_subscription_count_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3726450f73d3d9a369f6e13df3dfef8da2e6d13aad9070a8bf8101f3353f5ba",
+ "chksum_sha256": "7c86ce322e75ac47f1e48aca37ec455353e7685c8b58c13fef038465c742091d",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/sda_provision_devices_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "db0352fd812ff11c38a6475a132c62b4da87dd17de0579d2ca1d6f4dc4f32f1c",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/site_count_v2_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "aea2aafe7007b01340212c8190329b27fc04e247783c450810f61c88c1b73284",
"format": 1
},
{
"name": "plugins/modules/topology_layer_3_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "763bc8e38e403d4bf06b8168c4eb875928478eba61089dc829de0e0d91160895",
+ "chksum_sha256": "432d3d5cc30a1bfe40aeab059f4dea1cfcd6e771f17f4142f47f37d79a53c8a0",
"format": 1
},
{
"name": "plugins/modules/device_credential_create.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7d82073fb51ff17af9d0e94ae606ca0a20cad4bad8d0ccdd1fa308a4b4cbfbd8",
+ "chksum_sha256": "bbf906d70b1c55823af6632ca00786dfc21cf8e6f8827318419c6c390692bc7d",
"format": 1
},
{
"name": "plugins/modules/snmpv2_write_community_credential.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cd7292c2440f6f5053ef12fbf76b0a1b27989ec6263c20f2664504f88d867582",
+ "chksum_sha256": "b5c508a5bb5e5859a88c6ca1394960cf4a1a64697194b1bc39454ef5c261be5c",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/sda_fabric_zones.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "1a50f1bb52b8f4037fd7cb1f2c350bec571f42ae089ad84c625a90753c9b6fdd",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/auth_token_create.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "22ef061588ed01e92f063ae933e0c62c2edb8e4c3a1ae00d4841adae61a289e6",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/flexible_report_executions_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d9196d5540b750d15fbec607e8bdcb9a9042d47a602ecd40b2164cf3992ea127",
"format": 1
},
{
"name": "plugins/modules/global_credential_update.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7eefe1e65619a4de2b2075074b53053b0c99dfa795c1142b28069d4fa71826d2",
+ "chksum_sha256": "6052bca1a12d5d9102bf554cfd6b33e697ce167a483a5b13d5229a875493ddea",
"format": 1
},
{
"name": "plugins/modules/wireless_rf_profile_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a6ca9ea355c4df2052d03af05e398cf2d13b971aa7cb492c9fe69069d5fa16db",
+ "chksum_sha256": "a9e3c64f4b5f4da51d7bd5f424f39f5e1fbf5bfb31153d2f6b8a2c53a9553108",
"format": 1
},
{
"name": "plugins/modules/applications_health_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "463b0623e0e944947016aef514f897154ab1eb1848c1b91f9a9a73c122757c50",
+ "chksum_sha256": "9634154beb900243b5e4a915009b58f4fab14e10d5cb8032d49e967d306ed4bf",
"format": 1
},
{
"name": "plugins/modules/sensor_test_template_edit.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4274322757ed2a99ba127d28c92e6cda2f49f4cf031af6e97422a3a2783170d8",
+ "chksum_sha256": "dd0ccf996abf02d446d4a64cdcf49937aa22066bf2c435dc920866c4887323c8",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/sda_fabric_devices_layer2_handoffs_sda_transits.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "70d92488bd22c4ebb2663144fadd9f3233312d145676bd679ef37691c51df925",
"format": 1
},
{
"name": "plugins/modules/network_device_stack_details_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c55fd06561816f1bf354e19c0f8ef97fc2de5d21fe57d30e9953b7caac320df6",
+ "chksum_sha256": "5b987f8de84e834362ac938a1e573961e20086185969e218aa3537f83a8dbe34",
"format": 1
},
{
"name": "plugins/modules/site_workflow_manager.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4c0530303d930f81006f318ad9e9df5569163153476b53ba4c0a0fcffacb2b1f",
+ "chksum_sha256": "081a808358032c7df21b67d357676405e3150728f067dd7f260b428a11828ec5",
"format": 1
},
{
"name": "plugins/modules/global_credential_delete.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "87ca0d16efa2b308878513ca6be96c9d13bfc99c9f5218ca3742ddbb8b83fb60",
+ "chksum_sha256": "d0a57d88313209e35f9f4379a27a192bb71f1902d1e0da9736eea979fcb44874",
"format": 1
},
{
"name": "plugins/modules/sda_multicast.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "399e17ea2b5769b4096df6b0a13a2cc0ffc7de4a4f2f75afd7f3240a3750884d",
+ "chksum_sha256": "11970baa7e05613c0f1fc03a0d603b2caf1b7eed2b26689a167bfe5cdc439b3c",
"format": 1
},
{
"name": "plugins/modules/sda_device_role_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2060f3f260e3c7fca652b67872a6232ab819ba5d773c8a6c4d7036aac7de39eb",
+ "chksum_sha256": "5264e4d478d5f6b48ade448280552b77fbf889f303a02c8d0bc19c52b69e8767",
"format": 1
},
{
"name": "plugins/modules/pnp_virtual_account_devices_sync.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f1c8fed58d9c4d513773b7948e03b27765aeceff9a45a74759c07b14af8c4bc1",
+ "chksum_sha256": "fee5b3c30c248d746dca9fe3e466542ff77d2b32f852acbf05cfce13c910005c",
"format": 1
},
{
"name": "plugins/modules/eox_status_summary_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5a28ed446010d97b1f21516a3253120c0f0099619c7d033ca97071b20acd7988",
+ "chksum_sha256": "9ae1abdc875885302014254939ef5dc5547ab1b86e47b5918f93d0aead3444b2",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/network_device_user_defined_field_update.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "754e3b5d6432d2f49d9ff8a24f0fbd1681522a4e4bfaca817e576d7186b74987",
"format": 1
},
{
"name": "plugins/modules/accesspoint_configuration_details_by_task_id_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b5d2c060f6664ccbf75f82150a414c8a6b18a6eb801346041b3233bffe6f97f6",
+ "chksum_sha256": "ab68a978d3fd15364db08f48ff123a1766e8f4cebe893e6bf3df5abe20da1da4",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/sda_fabric_devices_layer2_handoffs_count_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "65bab8e0f899ab4c74db775172ed66e9f198974905808d1bddf384671840546b",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/network_device_insight_device_link_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "fac428e755d90868d9f31e015d84f76392c853015ebcdb18514df1cef32db25d",
"format": 1
},
{
"name": "plugins/modules/applications.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "426447a41c87ed84cfbd9c3ae0830ddafcdaa68a3c6aa732c74a27026e936daf",
+ "chksum_sha256": "62045e7d535c7962f0d6335caf442cc4203f62775b650f3a65800280ffc86a15",
"format": 1
},
{
"name": "plugins/modules/itsm_integration_events_failed_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "407fc6cb6189f1403844e652bc64ff03cbcea66f0eadd3775185b7487f3f986e",
+ "chksum_sha256": "2d441c7c52465abb36f7fc6c51217c29ab31f63db378e76260426b46d1cffa46",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/sda_fabric_sites_count_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "3bca05d649ae901cec6b3be618968619d9c68470f2c7d82f502f11383cdd33d9",
"format": 1
},
{
"name": "plugins/modules/network_device_user_defined_field.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a3357526777edc63c0663320959ec9c1d087e5602793111abdcb2d0b8602b4b1",
+ "chksum_sha256": "21754f98911c59361197c88925ab1e48f47e3b7a7129fb122ccd677e15d299a4",
"format": 1
},
{
"name": "plugins/modules/configuration_template_export_project.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "58a0750f67b1831b04ad09fb602f4c47c264b817ff995c0c8ca468f3dfe1e2b9",
+ "chksum_sha256": "be82087ed8db35ca5423d9c3c97fc3cfb1764eb184872de682de172b055110ac",
"format": 1
},
{
"name": "plugins/modules/reserve_ip_subpool.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b11c7cc628c5b50c93abca9d675e14acc91074183dd68e6eb4ecedcc093d1747",
+ "chksum_sha256": "8c11851c06491aaba3565af44e75d8aa5bcc9922adc1ff6d4a7a87d9f440ab5f",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/network_device_user_defined_field_delete.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "78378dd1f27e4589e214e188cb5a00164edddc9f0254249c3608886850b17f25",
"format": 1
},
{
"name": "plugins/modules/network_device_interface_neighbor_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "53b55d54fc692e05378ff615c491c2a4c25fa58520ad616af7fa18831011036c",
+ "chksum_sha256": "0bccf2641d20c842c8d68e885b97ae4ba5066845b8157e7cf5daa49313947ae2",
"format": 1
},
{
"name": "plugins/modules/device_credential_workflow_manager.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1f9eab5ed7aa33debf4b7652aa0b117b7b6786d538c550bb2999878cdd85d29c",
+ "chksum_sha256": "5948a0188b16f7e1e2f25d121305d518b6bcb61ed55165ef8753f8e9d826dbc0",
"format": 1
},
{
"name": "plugins/modules/network_device_polling_interval_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6a8f3d47966abc700d31c6ca0834ec60f94cad60422f3fcbc8d43e7e13ae620c",
+ "chksum_sha256": "9ccc4d90a3da897bc366abc3ee5154a3fe95121e08679cc3dd3ca4b6456f6fe4",
"format": 1
},
{
"name": "plugins/modules/network_v2_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5a831b1065c921dc1a98ab5fff748a83b6322e1ff1d7bf51f4adddd921ea9440",
+ "chksum_sha256": "72d50189cf29c165b0c040e883537d0ef119bc79b0bb0ffb9b8e5236fc6078d9",
"format": 1
},
{
"name": "plugins/modules/tag_membership.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "85e2d7164b26e761166d4f04343289d331610d2bc62665b1c2521354415a7893",
+ "chksum_sha256": "4aa479f854784bf5b3ea59160a2a07101d741e467c1bf0c3fa4a96e84b94bcee",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/ise_integration_status_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "8c6ee89839a3e654d7530eedcc365ce2b0150de3b2bba9f6ab3df9b453c17a5b",
"format": 1
},
{
"name": "plugins/modules/profiling_rules_in_bulk_create.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3e79aa1f0409bfdd17aa57ed69a7626daa52c510ebee3a500f760983a4723e25",
+ "chksum_sha256": "3eff918962db43db58a8ccf1253428374954c6e39b3cab22069ba637524e9868",
"format": 1
},
{
"name": "plugins/modules/event_artifact_count_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6121fe9593a6fef7f9870a196e4a201cd138337b90c1726e20a0056cc82552ba",
+ "chksum_sha256": "60cfe0c5983aa74f30910da3e36cce7771a8299315d89eebf292461fa385fa20",
"format": 1
},
{
"name": "plugins/modules/network_device_module_count_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "988dd211b76c628305d21acdbb3a4f1a59e716d819d3ea5773f1994b79823859",
+ "chksum_sha256": "b1ef5c568a04a195c411516d6f4afed46af06491b4a4e339fba56efd9248d478",
"format": 1
},
{
"name": "plugins/modules/global_credential_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bbeb591b1502b1f63b91404301e266e4b66bd157ed4b03934ea77ba4ee096d66",
+ "chksum_sha256": "cc4a948a848fc7f8a4fe3ceec4363655f248ee7d1c8affb1c0c1e69d975a7f75",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/users_external_authentication.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "8ee8684e9431d58eefc6722b1961d3e076257a58a789ef5bd5eb061186a9e183",
"format": 1
},
{
"name": "plugins/modules/pnp_server_profile_update.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1e88323ee47995da579c9263e3442833b08fe47c68de3e86b8d66e3d3e38b2f1",
+ "chksum_sha256": "c4291fa99e3f4a416ae039a0c9021e3814ae336174fa5bfea6f646c604f6bfe6",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/planned_access_points.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "6847314575523c9e43edd2581f659e2507c3913ec7a270828d763e2a686aa33b",
"format": 1
},
{
"name": "plugins/modules/projects_details_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8291f865748dab5754228e60da5e2321ca6a1fe8cfb14463b51df1e16427f3b6",
+ "chksum_sha256": "3bd7eaec0bebb440dbc688c7df1a29a8789715b778c854c63dc992e511014f82",
"format": 1
},
{
"name": "plugins/modules/authentication_policy_servers_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1551391c2a66ef1bbad5408128dca9846f792337a583af80269237b310933101",
+ "chksum_sha256": "6edad1686a79c5a3411b9c0daa8ec04ee52aa3754a219270e30d658d4b7765bd",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/sda_fabric_devices_layer2_handoffs_sda_transits_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "1a06ec0f9641dc771c05cfa17dc08bc4e0a9f25f294eab3bfbe34b84bf935812",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/lan_automation_update_device.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "f03cb025da8218da0c68affd3e1dff4acc722286f927179b477ec0c1edef8a19",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/lan_automation_sessions_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "2edf5ba1f6857285d1a8d96dd8ac4ca7a35f0e9ebabe4dad68c77382a26d3dc5",
"format": 1
},
{
"name": "plugins/modules/pnp_device_claim_to_site.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1362012e0e6134286e284fdbd89fd8493b4fcf6271089de7c40a307340131789",
+ "chksum_sha256": "d3bdf0bffb7c85b30b97842e6e63b19482e9d4754132dbfcb80ffe314d9d4389",
"format": 1
},
{
"name": "plugins/modules/applications_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a821bb44a947903868c0d3fa718b64e09d87825e856aa8512bea8295d55b6b20",
+ "chksum_sha256": "dc8995f5fe282e3d9f1e93e993c7a4bbdb3d81b682ecb37ab17870c7aca46a03",
"format": 1
},
{
"name": "plugins/modules/network_device_wireless_lan_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0a5b00d3249e3bf2691cb50f8e6edf843fad6e3a0cdae63a57bf5cd72f33d15b",
+ "chksum_sha256": "8963f14773ade34372b51817b15847705bb530ab3b94030bdb54391f38347ffe",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/sda_authentication_profiles_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "db5881f90721fa69a5cf71025f30b6871cbd8abb28a0a489d7e0deeac649eec9",
"format": 1
},
{
"name": "plugins/modules/pnp_device_config_preview.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "019be448c8f8e4a283e68e46cd79aa30a826bd9b83eef9280c60ae617696cf76",
+ "chksum_sha256": "ccf1e982e7c17c6946d19fd4360e6fca1f350fddb92dc1747910282b7402a6a4",
"format": 1
},
{
"name": "plugins/modules/path_trace.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "99068699100cf4e24c2dfaf1caa2d8d6fe150a9762a6bf318af532574ac56148",
+ "chksum_sha256": "0398cf9ffd1d0ae0132e2d2e7bc8d2d45c54e92929ec4d7a751c45daa2e7c1cc",
"format": 1
},
{
@@ -3924,294 +4771,322 @@
"name": "plugins/modules/device_replacement_deploy.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ecdce508fbb0473d0784e3c159cd76ff6b07cdeb999d54c825ed2b25cea9a0d7",
+ "chksum_sha256": "874ea53cf495920180ac0eeffb3465013fc880aea2c21a817608ce8edc0198bc",
"format": 1
},
{
"name": "plugins/modules/system_health_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1f325dcf5afc01848e3d7b1619a670d01c12d0624c7a1ca8db4c2e4a535748ce",
+ "chksum_sha256": "9f6791af8d179919be5f4bd182d85f5dd1fd25394893bccfa715826ce49982dd",
"format": 1
},
{
"name": "plugins/modules/app_policy_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7b625c39fb2a224fe7dd3b91b30646ec3d12b2bd697083fbe05e43271c7081ca",
+ "chksum_sha256": "98fef95300310e66fb5cfa5cadcd9e58da82fcd1b06ec52c03cf40d5522c8dde",
"format": 1
},
{
"name": "plugins/modules/event_count_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8e3591a1fb3519b884c967564907eb4af42118c1f6c8c91afd311afde2165c41",
+ "chksum_sha256": "4dfddd02f14511e45113487fa642be0abbc1bb8ca9a035b7199728760181a650",
"format": 1
},
{
"name": "plugins/modules/client_proximity_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9bbbdb160591c895f91d752f917ba887c17f298a6c3f4248af59cd5b09cc8696",
+ "chksum_sha256": "50fd1a9d340c21bd56cf4289bd754d336bd0b39efad79410d57504a4867fe764",
"format": 1
},
{
"name": "plugins/modules/tag_member_count_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "411390492f0e57f70f66d25bdcf722016ff30f769f6afdd849ed2cfbeb67d6d4",
+ "chksum_sha256": "eeba1278a5eb0799b0028e36407450ff5da50a2c3132dc9c667d76de3daa5f52",
"format": 1
},
{
"name": "plugins/modules/network_device_global_polling_interval_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "67732485524ac9a2b11fb8cfa118cfb4b66fcb191fc69d3f372b5c5c49161593",
+ "chksum_sha256": "5c149a51bfa8ff378d7e76e888edddf8b92f18f082c12a4175bda63a75c133c0",
"format": 1
},
{
"name": "plugins/modules/application_sets_count_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "829d68acb8ef33ae1eaad98e5391fdb71696b9785f5800779e603a1e4e0c7680",
+ "chksum_sha256": "133e15cdb0cd33452d80da1826b9cf2804570d5646e939727679732083709cc5",
"format": 1
},
{
"name": "plugins/modules/wireless_provision_access_point.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ad1add88dd3f16ca410cde9be6c97e4f62053ef92fb5b0743fa31227d9b6a473",
+ "chksum_sha256": "7c65e1ea63460473bb0abdad74ff13639f2213bdf35c21c1116c33425cbd3da0",
"format": 1
},
{
"name": "plugins/modules/site_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a51b7f98adbe7d53bb7ca70b73b059ed10c1f1dca1b01677375b506be41a18f1",
+ "chksum_sha256": "98b11632deb36f370afd534e58385e284f3aaa627fb385d123e68c2ff1fe67e6",
"format": 1
},
{
"name": "plugins/modules/pnp_global_settings_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "724999fb1b00383d5ade1f29a5ee65c66f9462a26afc04636bf17689b4be6699",
+ "chksum_sha256": "497f4340992dd389fad1b4c05292fe26a29d3b8c5fc5001fcb2abaafbb32abb0",
"format": 1
},
{
"name": "plugins/modules/planned_access_points_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5425befb5c57fc61d2a57040fb4328b4cc9f208704a1d53d336d4dde18158138",
+ "chksum_sha256": "7db1c3978281909660e6659b31f03b4898d7ec8626d4282d12f4c4e2f7ddd7d3",
"format": 1
},
{
"name": "plugins/modules/event_subscription.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8f94beb2a6d694ecceb446365140c0a98b53532947f417eae5a506ca772c9ffb",
+ "chksum_sha256": "03363fe9906a11893fac21f3a29caee7a9a7cb6a63993309438774b9369e79f8",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/sda_fabric_devices_count_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "027d9dc56a4c8350bd3336826102b74994d5e10037a721d0261392966adcb755",
"format": 1
},
{
"name": "plugins/modules/disassociate_site_to_network_profile.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9e719471dd45977103c51c3adea0b01296b0952f8229abccb9da232af87fb5c7",
+ "chksum_sha256": "1028c8f1caea7f640af9f41ae1694332f0dd1a24f66cf0084f79fc310be08334",
"format": 1
},
{
"name": "plugins/modules/security_advisories_per_device_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7b8e497ab8f7a2c4ffc741c01c97b24294c0368afab2ea6a497a1c733f8f5aad",
+ "chksum_sha256": "19fedad04c8ded9f65f438b496024484a5e8f8f72bcf1dd6db58b73a125a61e7",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/applications_count_v2_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "715e02cf65bb853af2559be377959c4d7c6880de4e982efcd9b2850f2a08a6f9",
"format": 1
},
{
"name": "plugins/modules/wireless_profile.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b9a3286b939cee7d41e926459c389371eb71712704f2cc80cc62d5d4c030e12c",
+ "chksum_sha256": "35de26e305ba400711d1ddfb0e8d109f8d47e1c2b3982ac1377b71697e6495d5",
"format": 1
},
{
"name": "plugins/modules/authentication_import_certificate.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f25ff63970622b3ca833b8d9417934f11ff1a3ce0c67ab48c094f8679c440d2c",
+ "chksum_sha256": "269782e557e768f90a60571ee4fa2f873ef7b6cb58503d1b08f50fd2a28ca45c",
"format": 1
},
{
"name": "plugins/modules/sda_port_assignment_for_user_device.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0c5b6bb1caa7a59337c0d2cf2f7189dd89d6d75af96aedda38735d4a57addc17",
+ "chksum_sha256": "751c42200d35b13e008a931123944db8c3340d756b34dcdddafac309e41c6ec1",
"format": 1
},
{
"name": "plugins/modules/site_delete.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "136244729cba0e4ee3c153b5f21b303450f51775a19424b9f4ae5e698b827f56",
+ "chksum_sha256": "d1b8cb4e61be19259f6b33e97038b9577227afc5846ba69f0de87a68a2e907ae",
"format": 1
},
{
"name": "plugins/modules/nfv_profile.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a925b77fe717b8b23909239509a9afc244f7df072c5c010207d2a83d8b7bee65",
+ "chksum_sha256": "6726a6b03ce388c2b599a8655b2780d922e1156dc9e458275a833713bd0d1195",
"format": 1
},
{
"name": "plugins/modules/network_device_range_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8ea25e0bc3e101c707b72dc2caf814118f067175f3dd28c8dd7c630daf834f23",
+ "chksum_sha256": "906b40b2044a627baacf31d757caa2aadd86613ea886e113593ab558f7b4b657",
"format": 1
},
{
"name": "plugins/modules/network_device_custom_prompt_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ca6368c4d89bdf14c64cb4e719cd206d0758038e3ff50db8ac29c7e7ea037cde",
+ "chksum_sha256": "e965177738d1a993a705f6c9da8cc5c3a0dd2fd7eaf231d46ed762a88c62ddb3",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/sda_fabric_sites.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "83b6428ed53441293a6a4263d7f6afe12d4cae26d3afc08d31ecc6ed104a07a6",
"format": 1
},
{
"name": "plugins/modules/pnp_device.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "87b4e7b1a8100d3b3165455e59e9a80bb82790ed58167253d8e893abda8292a0",
+ "chksum_sha256": "1251cc269546b5bff9411fa14bf947e05f33a402f19e3b3132df6dbb0d6e9c0a",
"format": 1
},
{
"name": "plugins/modules/license_virtual_account_details_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0fd065702a56e2130a33816c8e4fc626cdce32c14785b1c06a20605498c21bd6",
+ "chksum_sha256": "8738b65ff851c592427885e55b676fda6f0a2f118207431fa32a12eb879e4cf6",
"format": 1
},
{
"name": "plugins/modules/device_credential_delete.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "73611279350c75b42fb90582f76fefff1ab74f85656f49b715c412cd9eacbeb4",
+ "chksum_sha256": "4112d52673839a84191e4c8877fdafa167b7976ffc331f5a57e201e33e0652b4",
"format": 1
},
{
"name": "plugins/modules/topology_layer_2_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9dea8f1c6b0b8319a453be363cc0162ca525538b313d05c03e5cb8139d4b5ba4",
+ "chksum_sha256": "f19f751b20c26c2db14053a02dc3d55cea548c3177212c0a42fcf2d82483e0bb",
"format": 1
},
{
"name": "plugins/modules/threat_detail_count.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "42588fff16510897afde5d58916f13e4201dc8dbfa0918b01004b96785d6fd95",
+ "chksum_sha256": "3f6c021fc116c4802c29113b492c2f5cda1addf1ebe6f624d2722cacef316266",
"format": 1
},
{
"name": "plugins/modules/device_credential_update.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "277c1c63c75d3ed29a244a143e5aca494b04346a27e56692c180f810d4fe43ae",
+ "chksum_sha256": "6fe37ce35ca1dad9e0f7189326bcd3289e3cb789a4aeacbc6ae31b551831bb19",
"format": 1
},
{
"name": "plugins/modules/device_credential_intent.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9c7b1a9f78eac8b641fb6360e573adbd30e4703f93f0d79a994429d425076c7b",
+ "chksum_sha256": "93570c5849133d7c230cc124f011cab40bd44c66d7472c0d72de6d196a881544",
"format": 1
},
{
"name": "plugins/modules/site_intent.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b1faa8cc81d9795cc1f818b0af82b399b096d6e1271af4152f9c83ac7ea6e98f",
+ "chksum_sha256": "ae87700c81ddd83673b6b28486851c1a298120156897be3e91bfd4aefa426b93",
"format": 1
},
{
"name": "plugins/modules/site_update.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "73ab43253acb3ae2b8e09d59724d762048c28945898d998f69df3d73610fc6c3",
+ "chksum_sha256": "dd5185b81ecba1c164c423d63cb84e9615ea433c75461e9750783047680378c3",
"format": 1
},
{
"name": "plugins/modules/network_device_user_defined_field_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c8510f70069c4aea3bb52a4aaa1a5ca8c1aa55d4e31efdb93a9f5c9e12853c36",
+ "chksum_sha256": "cfc476abd629865452204e693eff555a2aeddc87eefdb49720a59196b669f381",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/sda_fabric_devices_layer2_handoffs_ip_transits.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "3ea2e338edce4221ca5c2e84562129b1ed97da7eb8b381a611267c22217edc60",
"format": 1
},
{
"name": "plugins/modules/http_read_credential.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a9a2195ab6cdb6dc40a885de01e3601ac7553a503a6cf118bd4d0dc1e8d81ba7",
+ "chksum_sha256": "57356560f1b4aca092c4d00edee2525bfd1c304a09030faff1e6723c0b32b69e",
"format": 1
},
{
"name": "plugins/modules/compliance_device_details_count_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "94fd67bee2ce423dd2be0429769caa55a9ce461541108767f66b44e76a81b4ef",
+ "chksum_sha256": "227fbbfb88c0bc80c77bcd75c3457a8dfd63c8cb5ba81410bf9e4dc76da8bec0",
"format": 1
},
{
"name": "plugins/modules/swim_import_local.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fe5f9c42b91c809caad535b1e7070e531d246018b17bd37122ac2f165d547712",
+ "chksum_sha256": "6c1e39bafb26afb06ae6cc5f676be528fc3afdf2255ce1a8aad1e71591c2b362",
"format": 1
},
{
"name": "plugins/modules/dnac_packages_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "78752569cf03879b371c78c9c16803553171fdcc3deaef98505cc77752414ade",
+ "chksum_sha256": "cfe5b83a56bc385e4ad220e6164172737ed83fe7df20a938db25614cbb3724fb",
"format": 1
},
{
"name": "plugins/modules/configuration_template_export_template.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5ede848070ed4d186baac3dda19d2a89dbe84221bb4d44d3dee857e1c718034f",
+ "chksum_sha256": "e833efd18d52f601407ddde749138f54d1d0523b8113dde4571e7c2f8bc30b64",
"format": 1
},
{
"name": "plugins/modules/transit_peer_network_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "563a9bdf5c6166c9e5abf1a8433376eca11180c07f544b302928b3be534dddd9",
+ "chksum_sha256": "1685b33498e51b11d76bf6264341f47ec2a7680bf33c4ebdc8bf59d3f954dbf6",
"format": 1
},
{
"name": "plugins/modules/wireless_provision_ssid_delete_reprovision.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1c3750918a721cc0820f4a20d127c944952a770968ecbe3dbc64a00d5b559832",
+ "chksum_sha256": "96d9f83134188bd3af69891f64093c26518adfb143714e3a76ba35aaca1de47c",
"format": 1
},
{
"name": "plugins/modules/sda_virtual_network_v2_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c50256d12da60d7e78fd69bedbdcb5997616ad8b47f5d326124af10b9d49947f",
+ "chksum_sha256": "9f8becba9bebb52d877a879e43aeb196047244cbcd97213168b8a74a91280ad2",
"format": 1
},
{
"name": "plugins/modules/event_subscription_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d716dceb1475a191a63d5ccb8d6897102a5bd5d3f345c1fab109fd7056f478be",
+ "chksum_sha256": "e9bb2ba2b0cd4ad9a8dbb7d19b063ec561eecfcfbaa7df1d74fbfb3bb53e8a9d",
"format": 1
},
{
"name": "plugins/modules/sda_fabric_authentication_profile.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0f400cdcf47e1f8a7767554424e50fd5573b8d58fe3c218b54d27ae3f93eecb2",
+ "chksum_sha256": "73f60c4bcc97359393b258e435ff4ad851fe86d10e96c072fd94d88a41efd5f7",
"format": 1
},
{
@@ -4225,21 +5100,21 @@
"name": "plugins/modules/command_runner_run_command.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5eb5992001e9431eb08775d3845169a2062b842aff1b3735a41dcaf470c201aa",
+ "chksum_sha256": "e8de49ce0a4dbec2bb5fe97e7d9d3cd8cdea20167f0df79b878700e2f29d9343",
"format": 1
},
{
"name": "plugins/modules/sda_fabric_edge_device_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "997a913eae617e9210e6db7efe98cd81284a81bf23f5c3909297c7f389f7c955",
+ "chksum_sha256": "d88c3f14ff267d35e12cd54373029a6fc7ca7eedc666ea8e1f400070f5b91605",
"format": 1
},
{
"name": "plugins/modules/client_enrichment_details_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2dcfc9666186dd28a0758aecce6e8ef4c8261f85f4d312b7d9874df8b70ab181",
+ "chksum_sha256": "b306980fd05aa039cd1cf466626250ad0cee2d6f50d9f9133ece98daa0e3594c",
"format": 1
},
{
@@ -4250,885 +5125,1025 @@
"format": 1
},
{
+ "name": "plugins/modules/users_external_authentication_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "a5220c74990c64d812fcc3d182519199b547e18cb59d9068d1e6758d52e4f009",
+ "format": 1
+ },
+ {
"name": "plugins/modules/event_syslog_config.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0bde247c5caa064449e0633732bbd312497e3e0f69aaede6f9c15f155016e214",
+ "chksum_sha256": "ec80b230363fa669a35f042e973505bb94974b62b719e07c691e8be80f6de73e",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/integration_settings_status_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "64cdf8230da29604895cda20d0ec63fcafdbc423255bef6f7f33e8aab591352b",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/site_v2_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "721185562997c08f62a54aaa0080580cb1a0d07c1457a5895cd089199b1767dd",
"format": 1
},
{
"name": "plugins/modules/sda_provision_device.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "16c231a9de2ccbc861b11a5ac41c8e438669a06cd0988eb9db8a406c870d7fe7",
+ "chksum_sha256": "a9b6af12738bdef271dbea181c89857a805f1a0098a5d42102614785784721a9",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/sda_fabric_zones_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "7b0541716906297b540cf60a8ec2e6ba6367b75566851aeba7fa15b5fd78da4e",
"format": 1
},
{
"name": "plugins/modules/interface_network_device_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7151fb0da471791cf5ce401fcb5c7779376f045a1e77415752c75f6281b7e700",
+ "chksum_sha256": "e26d7f4e8bb601b22e8ec6b74f48f74b42529833ed36c402ce63116a588e7685",
"format": 1
},
{
"name": "plugins/modules/reserve_ip_subpool_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0e23957ae461ad94694930f6dbcee49f15a66e33a5482b7260c77f0f548a72e2",
+ "chksum_sha256": "d73fde51aa68da44889ec7c0a508214d9731af3f3d688e8da80e7a13c50e73cf",
"format": 1
},
{
"name": "plugins/modules/configuration_template_version_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6db9c81a5e9cd3a66121eecc8e5b87b4265cb9a410bfafa294cd080d338ae6a6",
+ "chksum_sha256": "7556391e44a084b4d67c61bdd512f7e3181718f660bd269fddccce0fcf810700",
"format": 1
},
{
"name": "plugins/modules/dna_command_runner_keywords_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "65e79488afa4ef3820aab5e915a0aab9e578bbf7a31a2bbcf908772bbde354f1",
+ "chksum_sha256": "f24148bc4293a48c710736fda4390e9b0def0d3be07e72356b52f081b9965ffe",
"format": 1
},
{
"name": "plugins/modules/tag_member_type_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e03ef4a4e4b6b6c4eee059129aeb0a87d675065c9f3289923b3b9db6d8888142",
+ "chksum_sha256": "65419222da937c7106f4989b52d1c70b32e391113b15ecd3180fc71afb813dfb",
"format": 1
},
{
"name": "plugins/modules/cli_credential.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ae8078d2bc7f17fa3946400f9d9f91cbac2122cb0af50814581524065edb4983",
+ "chksum_sha256": "1231dd7d9ea06f5aa06ea5dee5680311c63cab7e67de10a432657b8099e03d2f",
"format": 1
},
{
"name": "plugins/modules/event_webhook_create.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3327f4d0facc8deca05dcf652187bce2e0b853d251febe9a45a6532204d3b54c",
+ "chksum_sha256": "9113e643eaf981efed5842fd26507c5a14b8d2fb8b08664397d6e64c581b1ee6",
"format": 1
},
{
"name": "plugins/modules/license_device_license_summary_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "03459e80c875ce7f99aed01d53b079418711565ff526edf4a4aaaac686a8641e",
+ "chksum_sha256": "4666e49ae3fa3762b0eacfde25b58714c81b07a1cd4285ddf063ed90b4bcc761",
"format": 1
},
{
"name": "plugins/modules/role_permissions_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "53d012b05cc0d04388db377e9f987708362162becba7af1dbb233502fe8147e2",
+ "chksum_sha256": "8b6d3d071dfc3a204f8b53ae1fffe096d6f63aeff0785b05d4167ff6960e074b",
"format": 1
},
{
"name": "plugins/modules/device_interface_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "90eacd9d08fd01b5d978f44fcda0d499c6921292a159447b81800c5d17eec4fc",
+ "chksum_sha256": "3223612193382e7df15ed352003c9c00e4572c89e9caca4a45425bfb9a90854e",
"format": 1
},
{
"name": "plugins/modules/disasterrecovery_system_status_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "79a90ecec047649cac72b3f47f3311c4c9f31967bb50d8719e2f995377fcd2fd",
+ "chksum_sha256": "7888d8cd980a54179bf85c04a6b8438083da220db1f2b9a26190241e4638e7ca",
"format": 1
},
{
"name": "plugins/modules/device_replacement_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "72ffc7e99647b8d75827d26f188dcf5ac046d25205bdfa4f378dc948362de367",
+ "chksum_sha256": "ca6ae6ed646a4cd94ef761b8bd1c3919d928806bd88e86ec27a678fd0cb3db1b",
"format": 1
},
{
"name": "plugins/modules/pnp_virtual_account_deregister.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cc1ec82c0edaa6b470c3b047c52521e927b7b8ce2127802656ccb5cf7a056ff3",
+ "chksum_sha256": "f1105383b5a10f1629ed21aeb22a750f53f061aa46c1ee7cb76d81e8e6df692d",
"format": 1
},
{
"name": "plugins/modules/configuration_template_import_template.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1f3afd88bfc842a3254fd93bd5c5ac2f77bb3d42e36ced80809454885c54c290",
+ "chksum_sha256": "aece2abc5b276a698ffba30b210e2e0b193abbb1326f54d763b52a2b0c7681b4",
"format": 1
},
{
"name": "plugins/modules/pnp_device_count_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "05e9933ab8d49d305e4eb8cd975eda7201d0a7907a22fad8c41aa1792e0ec3b0",
+ "chksum_sha256": "f46fcc2cf081db65dae014fe1b6c4b6430556c5d74cc843cc5493cfb77c9c63f",
"format": 1
},
{
"name": "plugins/modules/event_series_audit_logs_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "83db2fba5b867f93003eef05e77dc06cae596077d8031cb8e4786412835b09ab",
+ "chksum_sha256": "a1857cc752144f7a56e22c91c04f086529de19a4e0b7703183493d3aba18ffa5",
"format": 1
},
{
"name": "plugins/modules/service_provider_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7d17180dc4cd643a09611991cf2043425189cb9a73c6f59065981d8281b3ae87",
+ "chksum_sha256": "4097152c24ba947b15d215e151380d859eda2feffe354e698fba080f5b6ebf9f",
"format": 1
},
{
"name": "plugins/modules/event_email_config_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a2abdfc66a3d67f00d17e810ee2d13570ca3539a9ba9a9720bd472194317d31d",
+ "chksum_sha256": "47b49496c0f389be8e4e732c3bd1e7c30d6ad20d21a63e5a79e5e8b928e02310",
"format": 1
},
{
"name": "plugins/modules/endpoint_analytics_profiling_rules_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1cf4ab1ba574bb96b930fa6d10bfee4a8144bb410ea1411a57a009907e7c273e",
+ "chksum_sha256": "feed95ed6e4b2ddabec1d112d81814ab0d76a7bf281aa47457097e9e24b7f6b3",
"format": 1
},
{
"name": "plugins/modules/system_health_count_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bc062d3bcd58caca91751be0e30e482f8922ab7b740a95dfabcb3617abc3eb7b",
+ "chksum_sha256": "d6cc40d040f79c2677c32632a363a588fb9c2931ccda69819495e914ab26f413",
"format": 1
},
{
"name": "plugins/modules/tag_count_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "84b58cb1ff6b7051130a080d0cc358d184ee463049814ebecdf4b87f0d13c3a8",
+ "chksum_sha256": "341a5b06c0625aa91f4ebc035d79af61c686bca093b62f43eca6d319ed187381",
"format": 1
},
{
"name": "plugins/modules/template_preview.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0767e4f700a883074575115d80e90a55eca61ecd684f7a581f6a3e449892ecd3",
+ "chksum_sha256": "c04b320999ab29dfb5cd2aae675a9d4777e5f3023ddba425940dd884b409e483",
"format": 1
},
{
"name": "plugins/modules/sda_device_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9ceca94ff68b9cce8a259c9a202302eccabcfb1a6624406cb19d669a8134eaf1",
+ "chksum_sha256": "3834104f14cd38b91e0064389aacd902177aed0638e50f033ee7b32128e974cd",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/lan_automation_update.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "f886bd12117d6e0e76dc7e51f1ee1df114178b89b12e6c50746c1439444aa3e1",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/application_policy_application_set_count_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "487e39d49b010daf40ceccbcdf72f0e75e85f693ad9e0bbfb5095bf6f17aa733",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/sda_fabric_devices.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "0b965b7b058d9ade842a1dedb951ad51e644df437c71dd2dbbcad1cc00ce3283",
"format": 1
},
{
"name": "plugins/modules/network_device_equipment_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a894f8ad27d154d6bfbb8457a9925bd20efca3313133dce4a6a4f50d3e692a23",
+ "chksum_sha256": "09fb086061e70770a28f451d8df535946630c540815535840dc4700386edd606",
"format": 1
},
{
"name": "plugins/modules/sda_virtual_network_ip_pool_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a8ef7391bb0d784b2959a69bad9a6415fee524d07b7088f218d11d9e61ae386e",
+ "chksum_sha256": "a4b60630e13ae5d44d6637eba121f9b3df37c22b9748fc3e545bb2400c258d52",
"format": 1
},
{
"name": "plugins/modules/lan_automation_delete.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fbeb1f2b815cc579dcc630713ccfb5b1683195d17c0002303d9929e73a5d3aa3",
+ "chksum_sha256": "92f56de07a8c9a9c6d64d979b1bb0ea6a328165f86c8d04deb7d5d34411cca35",
"format": 1
},
{
"name": "plugins/modules/app_policy_default_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "046a8cc036a2f26035e3f993e5f576648e1bfcb59f651a2f74e724c7bf292395",
+ "chksum_sha256": "d50975551fbf464abb74a10b5fb2be936549bdd3295c46a62665f02e3875a306",
"format": 1
},
{
"name": "plugins/modules/swim_import_via_url.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5d9113ffafc2a9fcf72f4aa671f970b5b86383e6e582eee1d02c00ccd4d2518c",
+ "chksum_sha256": "4a1a75d0931618a00dc98c74a579b9d4ab8bf6f416881da473506a52e6f3d9b1",
"format": 1
},
{
"name": "plugins/modules/business_sda_hostonboarding_ssid_ippool.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5c0381440528dbfc720d6914c6f68260a7ebf3d5aabd832fec9baf3bf4a048fa",
+ "chksum_sha256": "67b36b235636dc5a5d32a2738d2717e18c96e80146d0963e9f39e3459068bdb6",
"format": 1
},
{
"name": "plugins/modules/event_snmp_config_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8526121f3e913a218cd02fa7c4e1b03933c00ddf54695823e555db7e4790f15c",
+ "chksum_sha256": "f691edb4e24578a9e9fc9d2a468603a1eefc64f5ba75b7f9b9e5b6c3b34ca7ac",
"format": 1
},
{
"name": "plugins/modules/snmp_properties_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "dca676e7508f5ecbe4760a4d25fd75a388cb7088ea113a4368e19af0807cfb16",
+ "chksum_sha256": "4a027d7b72e792d15bcde9a9fb207e9613899779a76440e68502e386fe53f943",
"format": 1
},
{
"name": "plugins/modules/event_email_config_create.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5857874a63efdf544c3bd4cfb5a23506e4da8080c05b18ce624086f3286d3e10",
+ "chksum_sha256": "a792961122848831d159a19f8e072d0027b08bde193926cbeea377207f0269df",
"format": 1
},
{
"name": "plugins/modules/swim_trigger_activation.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6fe55c5f2ba712ff8405ac65db4eebabfe1fbf783eb2384878a93ab9803e2c9a",
+ "chksum_sha256": "b2aff0bbc4cdadc073d154a8522163f1b814b852b6c5a2549faf2a61754e1e15",
"format": 1
},
{
"name": "plugins/modules/event_artifact_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0add74d4a9abc68435cfd8f2b2b0a2b12e1e41083734982e74e697c8b10e272e",
+ "chksum_sha256": "5fe1203b3a8b898598d839055f1b1fa741203c90470f65364a06ccacd2014e97",
"format": 1
},
{
"name": "plugins/modules/template_workflow_manager.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b1b8ec891e44e539d660034f5f3edd7caf85de970139b74f93addf8e82031f99",
+ "chksum_sha256": "9968fa10335ad6782039b36718daee5da613bf03d1540f043fb99d97adc7d107",
"format": 1
},
{
"name": "plugins/modules/discovery_count_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "db50b48430f478eeefcb50eaf426a91bc350069fd637a6fabcca8aaab30970f2",
+ "chksum_sha256": "37615f0da5f5febcc838310d51346ee2036079cc61fff574e2463db45d2604b2",
"format": 1
},
{
"name": "plugins/modules/nfv_provision.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5e161b58a0aae21985735aa16f1c51aaca68993bd7cac8496ccce7caf119bf6a",
+ "chksum_sha256": "4004ef8a1f6af8e5b2f9cb4765d9805b20e0a36730da5fed6e556a7b976df0a3",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/network_compliance_workflow_manager.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "0f6109f268da5b2d4b3b8eeaa1f3b381e78c45ccfaa6b6d580e0d5025c8e7c6e",
"format": 1
},
{
"name": "plugins/modules/wireless_psk_override.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "699f9ea5227ec33456bdb9d0f1f87394f8b59962d2668134a4bb69e6088abf20",
+ "chksum_sha256": "2afc3435ad7b2cfb8e62969fefc71b64a2681b36e28c43ad69260b648855d31e",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/sda_fabric_devices_layer2_handoffs_sda_transits_count_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "2c15a92f06765c127de972c2cb3da97615141532d3ef56fcb6568ab09b5feb20",
"format": 1
},
{
"name": "plugins/modules/network_create.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d84c316a8dceab645376ca2a12c460fc08c131fbe8cd7a09c08442c1e6b707a7",
+ "chksum_sha256": "ab51ceea4286084203e4d63430e8ba450990841124b795e39d8b44fc88abd012",
"format": 1
},
{
"name": "plugins/modules/event_subscription_rest_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c183256389b8cd164b134c25172db6fdcd71e87da6e0793fe08c6d7b87afa656",
+ "chksum_sha256": "804ce3683c6cf7bc749d71b3fa2ee859eaba28d265d2922c3fd24efb86b05158",
"format": 1
},
{
"name": "plugins/modules/reports_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2b8e90dcac98a407cf1f544cd4d204b9f8bb2f6716124f66e915734f3fd90803",
+ "chksum_sha256": "5dc7c95a87f3a16070b3f38fba2e039eb4a9272b37a97ef394b7caa162a83d3e",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/flexible_report_execute.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "1a6819efdcd2dc8c68a1815aa5e5ad39000d2a8fae3a77645ca32b58c6c916c8",
"format": 1
},
{
"name": "plugins/modules/lan_automation_create.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ef5c835136ea31887795cb4ae057f13b305d34d5cee5fc83e29a4d77d6305e31",
+ "chksum_sha256": "bbb2e1d862bf442f9d8b139959d0f0bc821545598696a8612d2b4884efb66a15",
"format": 1
},
{
"name": "plugins/modules/http_write_credential.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a8a0f2c951bf025923aec39680fa8617abac532db668ad2627bed2937dc8084c",
+ "chksum_sha256": "b88826b37c4de10ef2e0f40d60e43621836bd4998d36ddcc64e3285816731100",
"format": 1
},
{
"name": "plugins/modules/device_interface_isis_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "297b2c6a01e75055f148b270fef9ec1e3811b1caa4db452a069a1090bb3f7683",
+ "chksum_sha256": "2da3241237e365bf059bbd50b044b68abca6c20bdc31cb0c5e0ad81b011a90d9",
"format": 1
},
{
"name": "plugins/modules/sda_multicast_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8625b54c14c546a997b89d8b19b760f53b56acf941c7283786220ea0cf790e3b",
+ "chksum_sha256": "15e62e56669cb4c3872e4f2b8d994abfc874d34027ff3d3f22112e1028758b1d",
"format": 1
},
{
"name": "plugins/modules/topology_physical_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "21ea3828b12100cb2d63cca9a180288f62a1bbd7c5dd4bef18317f7f04136210",
+ "chksum_sha256": "2efaa588d48215f547ffa3cd19cc701bb84d913b51afd275c966dabe07dc55ce",
"format": 1
},
{
"name": "plugins/modules/users_external_servers_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "188dbd8a8d337ac460723c1a0d54b4771acf9185668793df472e3ae123662584",
+ "chksum_sha256": "84071627899f6cbb99b103b3c5b79d54572563f5fff9ec30e047cf0e07b3fe98",
"format": 1
},
{
"name": "plugins/modules/security_advisories_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "40b7ac64401d1b2501ee2520c04da1cddbca62562bb6a1be4c4495a092fd417c",
+ "chksum_sha256": "87c2ad3e232500ce910896d06e829f607b2236674140517221132c62e13a5554",
"format": 1
},
{
"name": "plugins/modules/configuration_template_create.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cbedabf444a6db5481f4cba8e0f9c15f7785437bcdccd5119364dd2fad886879",
+ "chksum_sha256": "767ca248b6f188fcbe1bd0f9073de994f8496b182c5b47dab1d657aa550c37ae",
"format": 1
},
{
"name": "plugins/modules/event_subscription_details_email_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "405d6770ef140baba4df7d621a67ba686df209a008a7d3b4e8684749f4cb9168",
+ "chksum_sha256": "b052ac784bc4acf3aeeb0b0ac45bcd316d19d1ee3fc75faffef1a06885a8c272",
"format": 1
},
{
"name": "plugins/modules/lan_automation_log_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5b3f1532b6effb7aef33e95b0700dbe67df967bb3bd20805410c3854a17269bb",
+ "chksum_sha256": "4eafddbd339a227e476c02c1a5d11ce40ef7e758cd9921e784e906915a512ec9",
"format": 1
},
{
"name": "plugins/modules/network_device_supervisor_card_details_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c04d35500ec069592a814f98f6d2d3866b2bb3451e5d466981d76e2cc8d763e0",
+ "chksum_sha256": "3bc98989b6f34d32303e3beae37f9ad7d3201091b6d21e623b25da03b1f42278",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/ise_radius_integration_workflow_manager.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "c1b1250d663701773f6e243f87458e6f2e5cd107730131ba84d202b9b9e45d45",
"format": 1
},
{
"name": "plugins/modules/global_credential_v2.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1f72234ee753cfe8541ceb94f386cf5b1660541161115a36be6c8d966b9fb1c2",
+ "chksum_sha256": "5a04f97a82cf2c62694dc43a62e082390a1510f527aaff8bda8f5e2decf43cc2",
"format": 1
},
{
"name": "plugins/modules/event_email_config.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f757d98a432913e6ffceacb593eb9677f0fa028d7cee487f98dd59fc95734308",
+ "chksum_sha256": "907a304e7036d88c2a4ed7738c1d30aca1a77f016e3f31c9371039293da8da75",
"format": 1
},
{
"name": "plugins/modules/network_device_interface_poe_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "390f24f6873f8f88d0a84fd61f5f804523293b5c9d5a427f1116219e5b676080",
+ "chksum_sha256": "db63b1366c5baf2549dcbd8802c70c0205be47da31307e06e7761f68986e675c",
"format": 1
},
{
"name": "plugins/modules/wireless_enterprise_ssid.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "89ae9237888c63505ba6494775d57957c92fe6975112aa4920181be6b4c83812",
+ "chksum_sha256": "4477840f8927f396a10abda9d1011fb0592ec267a0ce3a4346df40623a3e5986",
"format": 1
},
{
"name": "plugins/modules/event_email_config_update.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "90fc2f8a4e52bac735eeebd13f2c441389eee28567aaeddccdd3aa0b2d0911fd",
+ "chksum_sha256": "0d4035e18d48e246a2b823071bdb0bbc836d0f1b2ee56e837f7d860741edea6d",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/sda_fabric_devices_layer2_handoffs_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "b46957078e45746c87ffb7fe0d56bd6233e087bff58a3d9e20ed5ed768db5cdf",
"format": 1
},
{
"name": "plugins/modules/threat_summary.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "af2181ffaf29afff92bda27e6ac26918f6996a709335ccdf452ef247c444061a",
+ "chksum_sha256": "08794baf63996c39c9a42375a90dcc918cbdcef8183d660f6daff4d60629a6ce",
"format": 1
},
{
"name": "plugins/modules/network_update.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1c2c7db40bb76890878a5bbb8eaae4a0bd086ae05440c308e8a2b1ee27db775d",
+ "chksum_sha256": "9dad8916f75a31b6eb1fc97ef38f45d280cf66114795e8a6ae992f75526c47b3",
"format": 1
},
{
"name": "plugins/modules/network_device_poe_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0b3cd9983e422fb5201601e8aa3df0534dea843b7ef390a7ed369afc9ac2aa62",
+ "chksum_sha256": "efff5f497a7606042801b696cb820a4ea2360107fe4e33b43b46bf92c3497d1c",
"format": 1
},
{
"name": "plugins/modules/pnp_virtual_account_sync_result_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6ceb61016327b35bc8fdb49515026520d78fb2c4cd559d8967c2bb95a287fc91",
+ "chksum_sha256": "5730a9942ab2587aeeea91481325663e4e8a99f60105e016af7bfae19dfee046",
"format": 1
},
{
"name": "plugins/modules/configuration_template_project.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "027056a2cf1ecd0bb2e91caec83465584c0b360bd4921d195f4c0078dc3c2815",
+ "chksum_sha256": "7f806fecf5ef5434019b04eb6ccb27fe8fb6db4d8893456fbb68ec10c441418d",
"format": 1
},
{
"name": "plugins/modules/sda_fabric.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "01d1c96f6b7ec3b4fc82def191d4bb3ad80cd0d0db695e942f7976843b07db11",
+ "chksum_sha256": "1ccbc65a2e62e297096596f975d9123a849e517752b18c02437f16fd0024d4e2",
"format": 1
},
{
"name": "plugins/modules/network_v2.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ccc7f8825f7e7dc5447051acbd4a2897d07d0a0c3ea34481d2dc18b31ae8f48d",
+ "chksum_sha256": "d5b260f2b234feca443644e57d7a2e408431468c9a930054ede6ba8589f70c0c",
"format": 1
},
{
"name": "plugins/modules/network_device_meraki_organization_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "501b32fad7f0ccc8d12dd4a4ddd19f62e5f5f45414d2c7a4ef9bab388c7936a2",
+ "chksum_sha256": "ec8248727b2c518bd03a13f0c408108059eef96d040146d5e3782ccebcf5dbf0",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/sda_extranet_policies.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "0ecc25aa7a684dfb6d7715ece04a8a1a2b5f8dc4753357b018371f954cd3cae8",
"format": 1
},
{
"name": "plugins/modules/event_subscription_email_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6d10e11f72b43f0bdd6f6e0db5bae48d207d43c64822e9270447cdf270415012",
+ "chksum_sha256": "9c0c0487ec02bc5302411aacb70e46e2cf55e06fefa5ef520848eb86ba49e00d",
"format": 1
},
{
"name": "plugins/modules/event_webhook_update.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cb5eb2a7db3bd583bc200f3665fdacc2eaa7307463d0727602e06e4bc1e357ec",
+ "chksum_sha256": "188a121e008cc50dafc977ba4f0b16f05b9a8d834c6ec571e7027f4d33ab8dee",
"format": 1
},
{
"name": "plugins/modules/discovery_device_count_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cc143f6f80d9acc2dc9a2f6bbe88b6dc63e37037584752a9c6b97e2d24b469a7",
+ "chksum_sha256": "0f50a0b2edefad84692957adb3e1ea46ab863373a7bd6e2dd4b758524bb153c6",
"format": 1
},
{
"name": "plugins/modules/dnacaap_management_execution_status_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5ec0a31b09ac3740c66e541cfe4b596db90132248cb0b10e99da48468227bc17",
+ "chksum_sha256": "d411f9c33688ab31a689975eca4df951a34c8f8f077a9f755f6fa22bf9ebaeb6",
"format": 1
},
{
"name": "plugins/modules/network_device_summary_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6b6c01df03b984a409eca09ac1507a3f670e44b244ee049bafe485247dd22430",
+ "chksum_sha256": "327e254ca96f8b28e28448b335354388c988310e9ed129f182581f07da34087f",
"format": 1
},
{
"name": "plugins/modules/device_reboot_apreboot.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "44ad5748789d0ab4ec27188c2303b649696853c831c85bf41a31964a4880b037",
+ "chksum_sha256": "cff9ba2430d8976b4faee460e753a29d94be5210c73a7b521134986c4094c8fb",
"format": 1
},
{
"name": "plugins/modules/reports.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b2546d138a874a33e91ed9f70e87f2236c28fa3c9161daf31fbc472876e4c294",
+ "chksum_sha256": "90fab8c549539e483c9cabf8e814ea063acb07451c8ae829b8d20441772806cd",
"format": 1
},
{
"name": "plugins/modules/reports_view_group_view_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f89203d7479c24ccea06961c58fa87ca9e0e8d39aa61016aad069fb040d93d3a",
+ "chksum_sha256": "41bf6f730aaab85942e0c51d6244ebccbadd1b336d833b6ffb380052c61edd0f",
"format": 1
},
{
"name": "plugins/modules/event_subscription_details_syslog_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0d5bf1e319f86e92fbb96e9787aa7ecd6ca539f42c43d24bb09c1915f46d580c",
+ "chksum_sha256": "0d2d2cb1aede9c3e840198356e2eea6d69c674824bc22619fcc9cb4e44d3b8dc",
"format": 1
},
{
"name": "plugins/modules/license_device_license_details_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "edfae10160a1ff5072ca86e0a10e0043ff22031507bd78f680b5f52537d30470",
+ "chksum_sha256": "c09c1885d0e9c5b5b0622b18fcd504676d4bbdec0f9169f5cce64ed0edc802cf",
"format": 1
},
{
"name": "plugins/modules/path_trace_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "505ef505a81ef65b7e3317bc4598f429f39bbdc26cf396f24b7f4a8368307397",
+ "chksum_sha256": "989da58f7d741d504c9929979a606694b601dfdef6bf071d64bd00c3dfd4009c",
"format": 1
},
{
"name": "plugins/modules/site_count_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a159cab1c0c05979e19a2204e3f8f4f80b28b73b10e58f5ad016fed8eff40858",
+ "chksum_sha256": "9df7dca9e3ab4b0c99aba5086047acab1a77e7cd173c222b5c23a0f0a79532a5",
"format": 1
},
{
"name": "plugins/modules/site_membership_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c9a7691fb7b4e10cd29df359bc551dab79fc88f78395ccaabf2a0aa5413d5d3b",
+ "chksum_sha256": "3a7432b2b23a59524c63b09b22bf15cefb17194cdf01298384615fcf7d843edb",
"format": 1
},
{
"name": "plugins/modules/sda_fabric_border_device_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8c9ee7371b672a4b05e84d8fdad023c028bd7fda42c9c291e8c8c0b8468cbbcc",
+ "chksum_sha256": "a0d642d6299b2527f7e03296df89f16098def038888ab114a769e4151ab6a549",
"format": 1
},
{
"name": "plugins/modules/inventory_workflow_manager.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4f2eac3c3da7c44477646bb5d283b84381c68b7ddff0f5374e505dd8d5f0506d",
+ "chksum_sha256": "f08a3b5801060af5569f4527b8b5b555a5fa57a36e15c15ec244f86427b94460",
"format": 1
},
{
"name": "plugins/modules/global_credential_v2_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4370b9bece6cec8f15fbd34a42a2b06d059f5dd227876d462dce87b10243e0df",
+ "chksum_sha256": "71dc7a180167a66c6189977dda447b8a9fdb9280ba66107586ccad73c5a2e348",
"format": 1
},
{
"name": "plugins/modules/event_subscription_syslog.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d296e1664e4008d17b54f7e56909c76ec480fafeaa47987956b0b4b7a9205a3b",
+ "chksum_sha256": "2c3d9bb091a94924c393a1942edefe83f311b15e8fa80377f4937dee134b9f57",
"format": 1
},
{
"name": "plugins/modules/user_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "14550e1a3958db5cffe0926f05ff81490e3744077015d5adb31d80af9ede4334",
+ "chksum_sha256": "3cee2f6c07f18cfaa8e18930fa76b7c5b5669cb55f217d40bc7e483b722b2168",
"format": 1
},
{
"name": "plugins/modules/device_interface_count_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9b87a231ea5513907e7ac5a11ae6689f63443851999b5ae73a07536863065f52",
+ "chksum_sha256": "3b80a3b27f378d1f6cf37fd95df48a44b164ed6c0106dd0e1deaedb71c5cc5d4",
"format": 1
},
{
"name": "plugins/modules/event_api_status_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cbec81ca521399d416024b473f3eeb1fdfcaef7823790529d0fd59e58a3c38d4",
+ "chksum_sha256": "353cea570ebf74d8b584ba794e64b0176ae91a715b64a6fe1545bf18c78cf0f4",
"format": 1
},
{
"name": "plugins/modules/event_series_audit_logs_summary_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "789ee39e43d4f396e82c434df8e2dcb2d35d0490a0fc270a198fc4743816be8f",
+ "chksum_sha256": "f0b237decdf37fd9bb5d6b3f5d08cde9ac7cdc581f856c88531112a8b1bba01d",
"format": 1
},
{
"name": "plugins/modules/sda_fabric_border_device.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "514a9818da32a1b17c218013dfec643b731adf5f9c6b43f503841d906ad9f8c3",
+ "chksum_sha256": "97aa1dfe53c2e5bd67ab9f44dc18a57d0b23bb98b0307482c1d45dd036dc2fea",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/maps_import.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "4be6faff4dbda6aa88994791e3865bbe8d12fbefc9d9b25cabb15f7f60d4f7e3",
"format": 1
},
{
"name": "plugins/modules/sda_fabric_site.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4a8ed49cebc8ff4bc66d42e6fffec8ed468a4946f3b0e27810fb6b8f1b356bff",
+ "chksum_sha256": "7fae44e5b7e685e42d8cd70c31d1deddac1002956f30bb38b51817b44b7ba1c3",
"format": 1
},
{
"name": "plugins/modules/application_sets.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "192769a8af1b5c7f3f336ec63211d092a919039b1654abb98cc0182f83d89e32",
+ "chksum_sha256": "9540e0f4030acd7b899e86df7f635ba26e5765016a35ee779caf7abdec79031b",
"format": 1
},
{
"name": "plugins/modules/interface_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a17f013992aab461cc87547571bc824dcbaedf5a535005bb60f9ff8d17376821",
+ "chksum_sha256": "a652b927a9ef50a5ed43c8eed2dacfcd3b16302753fd083cb5c31a90f1102ff2",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/sda_extranet_policies_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "dd818d844091f0970f3305005c3a014736276d17e56c633f30c33f888288d59f",
"format": 1
},
{
"name": "plugins/modules/configuration_template_deploy.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "25716a8304f6d3d2cf71687dc7fa67a970476462dbd150b197869dd8d79252ce",
+ "chksum_sha256": "d4d2892eb0058e15bb283f65a6a792e5fa845461ca45d624a078d088c3e9d7da",
"format": 1
},
{
"name": "plugins/modules/application_sets_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2dd82f908f43a7a9c18e60a76a2e5fff93c5f0f7f2e88dc33580ffc1ab22589d",
+ "chksum_sha256": "6690416a6e6566885b4dfcc0fecc24b57a917a9720262b10e39d1d82f404fabb",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/maps_export.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "f06274aafea6ed98ad33020df38a14e31df8f946088bfbc1a827c9732b251a95",
"format": 1
},
{
"name": "plugins/modules/device_family_identifiers_details_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bea4d4ef7a733abe83abcadcd8f3f638aa38f83ebcc53d8c69abf61a0a699144",
+ "chksum_sha256": "fefd74c1c8a0e408f38c9fc4b61b0c0267b248104c82e6f21ac83c7bb7ae2e8f",
"format": 1
},
{
"name": "plugins/modules/compliance_device_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "68d11400c2a1460502bf973e8f34d63e8cf668b3158ee0d18332926623fbe908",
+ "chksum_sha256": "814046b645a7d2b4561f59ec98179ae03e27443333c4b057458d5524eec7cfc1",
"format": 1
},
{
"name": "plugins/modules/device_configurations_export.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1b1bea127f011c75e212a406bec66e6027e9091338f02e4ed63655cb45009953",
+ "chksum_sha256": "548d46718c2b4e692c90f743f0f438825ac58077bce41da86c15e32320d4c6aa",
"format": 1
},
{
"name": "plugins/modules/pnp_device_reset.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "18f72f928ff064c8be1ffe100036b6e511c6cf4d61341795a5ae44fd25273ce2",
+ "chksum_sha256": "1eb0625227b7e27d306b86f7aefe9b37e302f5260482a85dc1cfcaefd8325c89",
"format": 1
},
{
"name": "plugins/modules/pnp_device_history_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "732bf747504e5438618fc356c4c0304a630241f2d66c60e4525c713ff3066725",
+ "chksum_sha256": "e547193733a8e24874aea14e5c9fc96e2bc3f779ca5a211f97ea56bece292333",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/sda_port_assignments_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "496aacd96319febb20142a3c395e94ff3c342cc2a2c0b4a7e9d9d2edc7b2d6d5",
"format": 1
},
{
"name": "plugins/modules/task_tree_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "59893d9359af8cc27e151b5ed20f645cf7a8b6b638db999ae6fac7c3f48d9a97",
+ "chksum_sha256": "9d6f81339a365c2e28f642a63d338e4e07bd0b5688fceda94a3567839a47af77",
"format": 1
},
{
"name": "plugins/modules/network_device_vlan_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5930cc396ca55b4d45bd55be12ff4217ce39a7a128cf57086e81a08fe3e4911e",
+ "chksum_sha256": "898a090362940e6212da59d43f8255edc95f7374652fa56ea912a2b834b6fc75",
"format": 1
},
{
"name": "plugins/modules/itsm_cmdb_sync_status_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2bdc11fb3544154af2c2cc7c6b4a9440c148cbd83dd71b0406cc8b473f7d81cf",
+ "chksum_sha256": "18cc8c58e7312ba3cb42ac7d9f8bb5754f5586f7a1c39dbc94ff34c25b6e712f",
"format": 1
},
{
"name": "plugins/modules/file_import.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b50500ed9705cc9aab6d449087985ae9a15b58912610d7d43031171dde180380",
+ "chksum_sha256": "b6f8d46134e9a2b95135eb3834a55c2567b04c2055792750e16aea7a94f44f28",
"format": 1
},
{
"name": "plugins/modules/app_policy_queuing_profile_count_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "764f46f16e0b00ff41d2fadd5b330dd93da4c7b973be4dcedd0403139d94ebcf",
+ "chksum_sha256": "ac0859aff09942c9a4f06625b4e48082a745f323abe482a16f3b33eca333f773",
"format": 1
},
{
"name": "plugins/modules/license_device_count_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "73b123592b5032b2bf01a1a3d07309c3bd72e00babea6a57cec23e4d88770a73",
+ "chksum_sha256": "f9c87ea86150be39b6e644bbae5230e77c26a9372d3b85cf1411b808405a66d1",
"format": 1
},
{
"name": "plugins/modules/sda_port_assignment_for_access_point.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0083dc63ec58196ea6e35b8090c3f0e6129f32a822d22444230b8c908954ea74",
+ "chksum_sha256": "2a350de1b483f27e27c95c502d366aec0292f7d348f70c49cee86f3bd4f5d7a2",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/wireless_accesspoint_configuration_create.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "6e7f770cfeaeb1f16d7e3566fb45fd3d88c1876961f771328de1e1653be219c0",
"format": 1
},
{
"name": "plugins/modules/nfv_profile_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c221b4a09bd70a6967d2d8112dc9a2b686e06096015ca32f87ff5d946cd0efbc",
+ "chksum_sha256": "091c20e2f5090af820b3bcb6223cc042e9d63be9faa012b9222865747d8c7414",
"format": 1
},
{
"name": "plugins/modules/sensor_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "86bf44ebb2ecaeb88dc8c0f38b1aa579fab91f4b4c2d0befbbe0ad25c2fa8456",
+ "chksum_sha256": "51cacd94083186b3519a78ccd193af5cd60586fe4e40f99da39dd9730dfebb53",
"format": 1
},
{
"name": "plugins/modules/discovery_summary_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e427e86af18d037d23dbe7ba001fdc804209faa0a3db0d95a66306cac9703c47",
+ "chksum_sha256": "f59eb2e5693058695191eca9f955e5eef6039c3246201d0569095523fbda403b",
"format": 1
},
{
"name": "plugins/modules/reserve_ip_subpool_update.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2c496cdf70d115f1a5eb9aa9d27ca58a914b4e1afdce1e9ed4039c167c5c96c5",
+ "chksum_sha256": "a9b996c9d9cadc7ac7bf4253e7b601b37a93aa7fcbd24d4a7e9bcde633347ea9",
"format": 1
},
{
"name": "plugins/modules/sda_fabric_edge_device.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "da4b7c0c2189805b12502ffee36f981e43f66b64c11d72e9114b44e3dc36ea32",
+ "chksum_sha256": "4e1adde9984977580f01b633abf820dd4dd8111b0baac8e23b24b0b0d2a0a33b",
"format": 1
},
{
"name": "plugins/modules/issues_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3233a808e2a38a5dcff1c5ac5dae05c3c234ef3867ea579b4325c6308644fee4",
+ "chksum_sha256": "83ff7d0d98ebf0b89a08948606efc586045f2fc481486ba14ca11076da48a560",
"format": 1
},
{
"name": "plugins/modules/reserve_ip_subpool_delete.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be838faad1e1cc2132456eef16a45989490e303cf472101662e899ff746d3471",
+ "chksum_sha256": "821c8f30274d1d5b1a233f42aff1787161e813406e2d282a6f0d384bda11a044",
"format": 1
},
{
"name": "plugins/modules/system_performance_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "94b84d09630e63de3e78fb401d831c1b575f2b67ab203a9630e73a249e414c23",
+ "chksum_sha256": "75d9d1240f8e62a5348588fd3df8321c7e30c10cf5de2845a8f5f8e1816d865c",
"format": 1
},
{
"name": "plugins/modules/pnp_virtual_accounts_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4da458d6f6900281ce72fb7d35efdafce5726c2a7b1ed1fceb96013e39df2a2c",
+ "chksum_sha256": "3583367dd2102d84d40504cb785879885f729604f6721b5fa524cb8a0defc9aa",
"format": 1
},
{
"name": "plugins/modules/qos_device_interface.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "345a0c8ba2460447514eaadd9a4c6c06437a41dd523ac0ee242b438bc5b6fa7d",
+ "chksum_sha256": "e9e4c1ff267466b876dad679335a8e671e533cf08f6d82d00683441833a62470",
"format": 1
},
{
"name": "plugins/modules/wireless_accesspoint_configuration_summary_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "476d76a1dae5fb65ec437614086f4330345337c4a8016143bb4c0e35e70f5394",
+ "chksum_sha256": "935e1925c42c1424ce6fc2f87263bb2f157b5ff08ae4fbf6fa4f38103b2d82a3",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/network_device_management_address_update.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "57102b9d8576c41e1b68a7088de90ea7955876864c9471616ac9900bd6a09156",
"format": 1
},
{
"name": "plugins/modules/platform_nodes_configuration_summary_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "425dc1675cc14df2e1a1f106ba82b015efea4dad660efd30e90a85513544fe6e",
+ "chksum_sha256": "a0b5e997f6b1c260762ccdad7907a78ff3a4ea253123197fcbd9f1a8ba9d8410",
"format": 1
},
{
"name": "plugins/modules/license_usage_details_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b4cd3c33a245ad781ca91ed701162be7f153a784fc5074510e6653ffd37eea58",
+ "chksum_sha256": "ac10eaabfd7fcb77185bb64d3472a8ef1c237f73b3d75f595f19652b3919f6ae",
"format": 1
},
{
"name": "plugins/modules/swim_trigger_distribution.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "05301b58c6bd53db105787507b8d95f9146e876c04517da24a2ed7ce93e5ca9e",
+ "chksum_sha256": "f218209dd75253af2e5a8e0dbfc027346accca12d986bc3bc6ed4c1074f434ea",
"format": 1
},
{
"name": "plugins/modules/network_device_config_count_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fdcbd52016db5111340cbe50b352689233b74841792b2ac6e26baee1962dec7e",
+ "chksum_sha256": "8ea22a160edb3e80749b67c2d1e946993a8160ec42cbdf347722e5a503aa318e",
"format": 1
},
{
"name": "plugins/modules/golden_tag_image_details_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fa5190a562a76348af1a487b0fedfeebcdf38420faac0273a0f4c4d7c2d6cb9f",
+ "chksum_sha256": "b904b518a8193d4bc70947a17a2294ef87ba64a77a340e73cf3f25cc220ef1fa",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/device_configs_backup_workflow_manager.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "6d1679cdcad88b99307dfddbad1dca460c6f4b7ac0bc4ac0f29278d0a6414520",
"format": 1
},
{
"name": "plugins/modules/app_policy_queuing_profile_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7901bb6d68d20beacf33d98102b5f4c9a60d04363823ad08eed7199ce413a49c",
+ "chksum_sha256": "72a1227fc26f917b0a299830d8fbdfd201b48a864672c818632afa977b80319a",
"format": 1
},
{
"name": "plugins/modules/device_enrichment_details_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "573bbc37a473a66dd6917c421cb88b41893400becba54c7445e1c74141854444",
+ "chksum_sha256": "09dfec0101c847340ec0860bfc9b9857422517ae16812899db27343f46c07206",
"format": 1
},
{
"name": "plugins/modules/file_namespace_files_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b4b92d8ffd4657da74f40eb67135e126835805a45ccf56b10a734f913dcb14f1",
+ "chksum_sha256": "f8a959b47e443402cfd8dfce0a67ab7915a1dd8682fb3ac09e84a112277bec80",
"format": 1
},
{
"name": "plugins/modules/service_provider_update.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "01cdb05b3612adcf7492d4c05b7bf341129e6bf945ca8d6e1a7d0c98df1c70e6",
+ "chksum_sha256": "3bfcefde227b00be39eedfe7a6dbf9927233d3dd8a3a02ea723485852f61aaac",
"format": 1
},
{
@@ -5142,322 +6157,427 @@
"name": "plugins/modules/license_device_registration.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8911294bbab15816cf933fa3c99fb99fcc5d59a70a98353e993a5df1fa61913a",
+ "chksum_sha256": "7e820d4ebcee73f58bdf948739ddb8d7cf9b713a52ce2b48694ff5e7a297d4b9",
"format": 1
},
{
"name": "plugins/modules/configuration_template_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d4329e504a1f1ae60ea74b216f842bc3b1f7425e666f6eab3643f3a01499c275",
+ "chksum_sha256": "c5bf39108fcd452b195b440f5754651e9db5f979030781d54c0b5ddb30699018",
"format": 1
},
{
"name": "plugins/modules/topology_site_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8013a0732ee984bd7779b6e2fc10eac4a1dd833e8d39bb37161857ae0a9191c5",
+ "chksum_sha256": "4cd10c0f6a21b4f2c062566198dcf114e510a7fe615be487f05e783bb07a7fe5",
"format": 1
},
{
"name": "plugins/modules/network_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f84233f884fb03d3a49de190b30e1dc254fecce2ce839143fc2b21c259ff3141",
+ "chksum_sha256": "2f6a589e1b9ef0628bae6ca871ebd3f20143e597c4f3c59acf44706c3fd11798",
"format": 1
},
{
"name": "plugins/modules/network_device_functional_capability_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2c970dd0ad71174f2f57047aa0361be3a6cad21743b0d15e1ea944944d5d492e",
+ "chksum_sha256": "7e0413afda4b62bb45b04203b75f870bf4d9f2427227e340baa1aba35cdc8d64",
"format": 1
},
{
"name": "plugins/modules/business_sda_virtual_network_summary_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f0961a8523eec413b3ea8e505b823d54dc47eee05900cd1b5e2aacc04fab8161",
+ "chksum_sha256": "0c97538098ee629297f5aafaffdb0aba70985714c701a43a11ce8c0d74c8b967",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/flexible_report_schedules_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "82a2e61e81a1b7bda19949e0fe9823399dff8ae1c85f0e6e873e60be9f55100c",
"format": 1
},
{
"name": "plugins/modules/sda_virtual_network_ip_pool.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "30c2f462463ccca7ebb154d7112289b6e4b5dc7b84dbb16184c05946ef545f48",
+ "chksum_sha256": "8041b5f3ebccd483c05a426ba2e7fa2ea4796533514d185a3134c42e89a9300f",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/maps_supported_access_points_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "0347ce0ed67446e425adfff589b95b120da60576ebbdbcb768a8f32a6ff85cd7",
"format": 1
},
{
"name": "plugins/modules/sda_port_assignment_for_access_point_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "36b20fa02a78d3856fc637637ef790a4c6d9fde70df05ba64b6d507833e70033",
+ "chksum_sha256": "2817c5a894e6680ca1abffedf64dd5d11c0f6b263c440612a1e23aefb86ee9d3",
"format": 1
},
{
"name": "plugins/modules/snmpv3_credential.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4986e2f6eb85ab186ec1dba19172866c46c914d89a53fa703e4dcaef2a2002b0",
+ "chksum_sha256": "4e9699bff4ae0ae815af39782f7ec2c77a9d28ead5580446c09fde73df3c69ae",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/events_and_notifications_workflow_manager.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "cd86ca2d94a314a8f8610d9ef401872616bea42758141bb5885417a6679e55ae",
"format": 1
},
{
"name": "plugins/modules/configuration_template_deploy_v2.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cd81da364978b6b07c8b740866b2f0ec5158ec7c121405cdda89b9ccf9827304",
+ "chksum_sha256": "49d186fde5018dab78dfbe6c3f23abf121d02e183cf2a81afdc9fbdb28a784bb",
"format": 1
},
{
"name": "plugins/modules/applications_count_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "95631698e329c64d12a016b29dbcf4503e15142a399d35c865f4a67af9ebb6e1",
+ "chksum_sha256": "3e341cba89fb50acc235f592777a7356a78b53a00eee5ea20018bebb918b0c95",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/sda_fabric_sites_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "7ac4168723db276042f1fea32a03575abbfb83bbf65a2bf5a10ea1e8f625dee8",
"format": 1
},
{
"name": "plugins/modules/sda_virtual_network.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0a9db2f68811fa69d9bc436fd4e2f811d72046610895ab77077c1b3cd40317ff",
+ "chksum_sha256": "3a8367c0f2c13ece68ec44321f14b8b48299bd088c223ead66523309a9fbe664",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/sda_site_member_member_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "813a4f8a1b3b032854c37eb94d66d1135c485a1d2d24f8606ddd6c4a086f7b2a",
"format": 1
},
{
"name": "plugins/modules/interface_network_device_detail_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5180c047f70c067118628a41c0b39cb35a8809aa144d39d69e5fa26f93bde66b",
+ "chksum_sha256": "dc267a54b0309f42fb4382ebf8e3e8edd531a92f40ba66a4b8b8e063471968ce",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/application_policy_application_set_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "c127ea25203a77f692d9cddd16b338dfc6fe392f32b2027a65e53cc102f05fad",
"format": 1
},
{
"name": "plugins/modules/security_advisories_ids_per_device_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ec65ad9dc89bf2383064373c81a8eb380176512b855df4af9a30e37abed9d3b2",
+ "chksum_sha256": "27b08cfb0abeff84e94c64066f030708e1c4b9c8cd1050963783e8d0fd99a2dc",
"format": 1
},
{
"name": "plugins/modules/platform_release_summary_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8d8908b473417711a0b340aa89c684182c453e0c992780c67d4054ea006404b2",
+ "chksum_sha256": "5d282cd29ef8f0e767fafd918446c32544959e3321a22288333af90bcdaa3dfb",
"format": 1
},
{
"name": "plugins/modules/network_device_count_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2c5d2866eb86e0b1b66454231d5f4f97051b69503a9396c5f239e964a08771c0",
+ "chksum_sha256": "77e5c9bd406c90b7ea716e820a5041b01f5c07978d934eb70710782adef6144e",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/sda_fabric_devices_layer2_handoffs_ip_transits_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "a477e3abb013a5a5202a5fd63d0d7c19e39fb9988bf97e9fc269f49f38801048",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/maps_import_status_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "ef0363e554dff74d80a3d2aa0edae26e44c6a5455028d98af62f63877f209299",
"format": 1
},
{
"name": "plugins/modules/event_series_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b6a358d02719db2e816a63eceb647f47065f4bc850417ce3be6cbcd251b52199",
+ "chksum_sha256": "a68e307c325dffe3e4c26df6d78f243b84dbb0a1686820f9487e7e613c668968",
"format": 1
},
{
"name": "plugins/modules/app_policy_queuing_profile.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0a2f9c2eec211c62f7e7e3805c7d79a1bf0487a7c5af795c084366806812ff2b",
+ "chksum_sha256": "d903ec2ccdb399585be3e25e83910a3748647cc520ade5946515e30f6fd5d360",
"format": 1
},
{
"name": "plugins/modules/service_provider_profile_delete.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ad7c00fc5747b64f421bb56bf49d69e2a7c602bcde375da4903581fdd9f4f733",
+ "chksum_sha256": "c32a7528a85d4f112295a71ccf090f9207bd45e6125058fe3496f6643d0bdb64",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/sda_fabric_devices_layer2_handoffs_ip_transits_count_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "f0bb3abf1d3c7c0be45b87031985f1c5b86e65a082ad35eb97a633b47fe5df60",
"format": 1
},
{
"name": "plugins/modules/service_provider_create.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fcf85ee32ff93d2d080a05983bf9ad5c84e9208186b49cfd2e99fd960e7d6071",
+ "chksum_sha256": "dec2053d54eb2cf4f35ad7ea04168286f4f8e2fe4a12b887730c7780e59ba3db",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/sda_anycast_gateways_count_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d8a204497679bb8ee4e5bbe7a00cf07822d17b014477c3bad627b9c07c4690ec",
"format": 1
},
{
"name": "plugins/modules/network_device_update_role.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5527bd9efd7c373b2f2bef0c7d9769178dd03da2214d9ec0d8327c75605c9b5a",
+ "chksum_sha256": "aacf97863838a4df6d68c083c4d6e76d144b44b70a1c78e1a3398e2e56a07aaf",
"format": 1
},
{
"name": "plugins/modules/sda_port_assignment_for_user_device_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3dae9e1a13f2ef7c64599173440a257c981aa1973e521b2a21ecb19777a4f420",
+ "chksum_sha256": "68b30813011c5a79aab3ddcabe2a76fe9b3a4dc32b3449d694e89ee0c310a182",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/lan_automation_update_v2.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "375a0656b1054f12231be571440a4934ed28c435f5e14bc28b244c0e6815f34d",
"format": 1
},
{
"name": "plugins/modules/transit_peer_network.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "753a85064c256636c3d572a0d95745615caf59d3467d23640c0dfa67609e8508",
+ "chksum_sha256": "38c9e643509d7a6d05dad85d1db2ea3001d4c3c45a6ed0b17c764c9fde1b11c4",
"format": 1
},
{
"name": "plugins/modules/device_details_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6edf275ba5c5101305b745ab1da31394f4921ddc7273390dd6d7d9ba68443f71",
+ "chksum_sha256": "c069b26f7a4aa8533d95c99d3c178c8b79ac6e86de4ff81bfa42880380644879",
"format": 1
},
{
"name": "plugins/modules/device_replacement_count_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ec372d22e552b584051564f4b2350201d6f100dd15d9d3b8ceec092d7f3662e3",
+ "chksum_sha256": "f7fbd3b3f70490b3625455bea6b62d63398a2ed8974898090487881133d919d6",
"format": 1
},
{
"name": "plugins/modules/service_provider_v2.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0e08a7b59aad648e9080fceae08654318c57de7b3103c49f832938dad61f1e4b",
+ "chksum_sha256": "81b2c8885ec0d584f8c2e872c1316a8afcb35d18621a3cef91902f0ee321f3cb",
"format": 1
},
{
"name": "plugins/modules/sensor_test_run.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fdc7503849a10f9f5250efe5aff1762230f36c307dbb2b9525269dff88741562",
+ "chksum_sha256": "d7206a628f3d822080d385d1bdb40d604ff2b5685a2c324b8fd62f0506a679b3",
"format": 1
},
{
"name": "plugins/modules/event_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d9e1da13a438a4aa0e9789bd0b8a8dd2a62d744a74219d52bdffcf54babe7a90",
+ "chksum_sha256": "0bda6fdc2d7dbc9b7a416241bcfd16d6b309d7817a37f13e57e1e2b962bc4120",
"format": 1
},
{
"name": "plugins/modules/device_health_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f9dd5499ad6dbdec5e399ccbde4b4a341d487e2c25abeef9db15c8a822764f38",
+ "chksum_sha256": "3fb092769d764e1f55ee7765ea6d78884136d3eeee4f987c35b024b79ee06aee",
"format": 1
},
{
"name": "plugins/modules/netconf_credential.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "042123c962929ad1cd28b34d332c32c3762f34b6da174aa9016a5d5076026a1e",
+ "chksum_sha256": "af1aaa8f57e70f5936e0467ac7ef3418d2468ce3a58a826511e9ede6a903ffff",
"format": 1
},
{
"name": "plugins/modules/network_device_register_for_wsa_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "90f6f71069aa7cc1ae69ce29872ac0de4abd18f00b3938451440a10e2f9aaf56",
+ "chksum_sha256": "c0a8470c882628ee199dd5b74139ecc891bfaad227017fccf60cd1f0534dba38",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/sda_provision_devices_count_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "76bedfd906eb88758e2630c350e4118020d729f0006e5975e24b38061261c693",
"format": 1
},
{
"name": "plugins/modules/client_health_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f19acdcc18536cc0d1481d2f1e599470b2452f535e9a7875fcc561cbd24a3af8",
+ "chksum_sha256": "5f5bc7d34d762c9601cd25baaa2728d61840ae4f3927d2834b100f0b4933a02b",
"format": 1
},
{
"name": "plugins/modules/provision_workflow_manager.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b4022bae7d5a73a4f04189450ed124ec797c1d074188ae2448f781838cca65cf",
+ "chksum_sha256": "37f9004e64e1da6f71c0fc5067f577637bede3748f8c89f385c8ec198b08eded",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/sda_anycast_gateways_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "fa53f3fd8857249fc9377e2d212dc32e92619fd02dc63dc4a66626dde1a0a2c5",
"format": 1
},
{
"name": "plugins/modules/interface_network_device_range_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9c0d0e2edb7feb9baae0fea938aa582bb5e12fb5d43ae1e8b073fdfb2f8a3a55",
+ "chksum_sha256": "6bea9266a3fba2f7a92bac9d684af90bb05da02d166533d8923a91f3d45d3435",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/users_external_servers_aaa_attribute_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "9922f23aa814fc3208538b90302ea3d1b11deb942db81b9e06b0c6867041cc18",
"format": 1
},
{
"name": "plugins/modules/inventory_intent.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ece57506e288379d5617d21514aa1511a81e33a5797c1965ed32edb945c46829",
+ "chksum_sha256": "695da7d552f48e08581497672165a3b16d53eb8f7318972e7fe7d38f124c009c",
"format": 1
},
{
"name": "plugins/modules/compliance_device_details_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "dd1ff0a6119295b29da46673213a0a532d43b69fa99dd2350e379aa4211a710f",
+ "chksum_sha256": "fc4cc857751619a335d15ac529eb37d4b12ba5f5992e9f56ed088d4b3952e37c",
"format": 1
},
{
"name": "plugins/modules/discovery_range_delete.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "57dcb52ea8cb6564b792c1858e50f4c101fd7c200c8e93848963f6d07ed5ba57",
+ "chksum_sha256": "f1797657eb7c68fcfa27cb0d32e8d654f3363305fe9531dbf828eee3b90eceab",
"format": 1
},
{
"name": "plugins/modules/pnp_workflow_count_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b510f57488ce208d9b3d50a4228efe565ed55013b2e2fa4a439cdf217a51dd28",
+ "chksum_sha256": "7ec9095234bee61c2cd23a829bf0b35a752bdf2b7f8098744ca3d174f5d507f9",
"format": 1
},
{
"name": "plugins/modules/network_device.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5745ffb093995318488363a512a318b47e8f5fe848eb29735e01593d4ea4c4d9",
+ "chksum_sha256": "ddbe644478cba9fbcde27305ee7eb0f55a18108d96b4a2c97220863c73124721",
"format": 1
},
{
"name": "plugins/modules/network_device_export.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9fdb9952b2c82440a6d7621254e07990602c70579bbd4c49a9a234e83233a933",
+ "chksum_sha256": "294dca67968195b97f8e9fd88b8c1c26b838bb6115b36faa1529860832c46cef",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/flexible_report_schedule.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "1960e3669a6c225b788846b49e5a2b368d59bb62c1436a6cb326b5bd28f7288a",
"format": 1
},
{
"name": "plugins/modules/templates_details_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "99c2b3a5172cb9fe26133dca84865f6dba2fb2f663ecc97dd9530141931574e0",
+ "chksum_sha256": "09901609f1f945a180193493ee0df66e93ee734738a1ba510ed7e37afc94017a",
"format": 1
},
{
"name": "plugins/modules/event_series_audit_logs_parent_records_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cc6b6acb562cb233d3073e4d8d9a8fd5947c05feebe45d447270626c887f263d",
+ "chksum_sha256": "a87c7f9255ae0d79fa0d5906786bbe125f83176a9ba898129499c961955c9b31",
"format": 1
},
{
"name": "plugins/modules/network_device_with_snmp_v3_des_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9680badfaa131c3e8402ed585e1f9f078b24fb0188c87565a3ee5f94c4134587",
+ "chksum_sha256": "fc983c3dcde7dac9bef787802acff9d33fd067d55e9028e7ecfcd5aa87841988",
"format": 1
},
{
"name": "plugins/modules/license_virtual_account_change.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c50583679b1731041148b0ba9d5426b6d9f6d22de366ee1d81617be686bb8394",
+ "chksum_sha256": "3813f3471e823971e579070f290d7630d85a97a40c176594906484c5114b8dca",
"format": 1
},
{
"name": "plugins/modules/compliance_device_by_id_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2b7d3287a9ebdec1a9e0a3247bb0d26645c810db6188be6a6e019816096fd099",
+ "chksum_sha256": "7209ce25e2a94576d4363ef6d2c7a810c5e49dbe6422e2089de8ac4e749c5df9",
"format": 1
},
{
"name": "plugins/modules/pnp_virtual_account_add.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f4310f4e30db9d98de59b39284f4399711f959d5f9f5a7ed91170b3e22e9abd1",
+ "chksum_sha256": "8eb1d1f76022552139a6908ad928f300bc5812969650de5b0a93bb2734767461",
"format": 1
},
{
@@ -5471,84 +6591,91 @@
"name": "plugins/modules/tag.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "599cb4070ae8d04cdb5c7aa40230cabd02434dd1781fe0cb30edd53f96c91aa7",
+ "chksum_sha256": "0cc37d73464adcaef9cca8db08fec2924ff9c2b068a012c36cf3a7ce84526fc1",
"format": 1
},
{
"name": "plugins/modules/reports_executions_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8d8d4e2cc1aa4e7eb5cfde98efa1d0c0fd815e0732e25c88052832fec71bad4e",
+ "chksum_sha256": "be7cbc92d443b2757ba9793629bc33a9f85e02c763243b24d2deabc8289a2a9c",
"format": 1
},
{
"name": "plugins/modules/sda_fabric_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "21d8b6a152f6cf176f988135c026a332e0207dbf527c2d8ce8e4f91fb3a29ce6",
+ "chksum_sha256": "07898d0802118fae5e3d7c509e1e56ecd0f3f59c34f3d75cd3846e27512a36d5",
"format": 1
},
{
"name": "plugins/modules/device_credential_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "32c77e4f8bbce95b669e7b633cb634c0ce90f8bc90aa5ba05d88378c1c72b7b4",
+ "chksum_sha256": "cb118bdc76ccb630bcdae5ba61fc315584df36a7f9055bc240e23ff2e275445f",
"format": 1
},
{
"name": "plugins/modules/file_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be00fb30acf461ef13d6f53e64d6566e3373854c4f4b814a068705795ed1466e",
+ "chksum_sha256": "b0d36a1e23b5ed6a1cd0d0647dae477c09ed220a8d9b5d9ccfca040e0d6fd214",
"format": 1
},
{
"name": "plugins/modules/event_subscription_rest.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "781ca61ab98e39bc744c37e1a49c67cb4145ef0d2ab0ea5d5b72f29053166a8a",
+ "chksum_sha256": "265145dd3ac5f8d5e0e8b52fe33f5706f3befb833102984907f11243c7f16141",
"format": 1
},
{
"name": "plugins/modules/security_advisories_summary_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d2427fda49247b452d0a814c785608c30798a6411ab8556a9af5c8fcff2d501d",
+ "chksum_sha256": "9853b25828e1e46b0b0192061209c491ce070329de53ae78753d225a2cf3a8ab",
"format": 1
},
{
"name": "plugins/modules/network_device_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "230805a1eb0dd3ee97a3310492d877cd6782682c785487f62b05d0c392fa3617",
+ "chksum_sha256": "31242334f7ba49fafea8154b3f4a47a76ab19443f8aa594de209e9aff08e2c06",
"format": 1
},
{
"name": "plugins/modules/integration_settings_instances_itsm_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d62bb9050400c5eb96b1086e303eafea81a553acd984f837a7f37fa807c078b2",
+ "chksum_sha256": "e2e79c47e304a91dded84b7333f0ff9967aa2181e897a0a95deeb2bc5300a0c9",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/sda_port_assignments_count_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "f03e30bca00313116e05227091ffdc8c59baf1dcedabc8941e96351c81c6f0dd",
"format": 1
},
{
"name": "plugins/modules/pnp_smart_account_domains_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d1d5cc0723495f018e760b2307f0a562cb82784ec0a94f9fcdc698570ea2cb48",
+ "chksum_sha256": "ebc0f4c7f1dfd398c6b3f8fdc8cae34fa8191bb2bde4bb1d4d5944c9eb775411",
"format": 1
},
{
"name": "plugins/modules/security_advisories_devices_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c46d4671ba23a245e1e394231f7be2aee9431c7b1075beaae8c61b87974bf3a3",
+ "chksum_sha256": "aadbbf92814a9768ceb61a52df8c6b04a3c6459fbf7e2095084200a7069f9e0b",
"format": 1
},
{
"name": "plugins/modules/reserve_ip_subpool_create.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6a196f4b903c9f46fe96bcb9f413e781b3ff54e6804e498b253f3a4e72059098",
+ "chksum_sha256": "56d3ad7649fafaaf797fbe6e95475ec7cd02aa577679e9ab924154fdc22e4154",
"format": 1
},
{
@@ -5562,84 +6689,126 @@
"name": "plugins/modules/itsm_integration_events_retry.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b5fff2a26a1bff3739f193a1c8d97bbcfa6b858b0e6724c485decb12848ecade",
+ "chksum_sha256": "81c5cd5cc2ec68991f0dd7980e5e0e9dbdac6c07939018b7069c7923fc4eb8fe",
"format": 1
},
{
"name": "plugins/modules/user_enrichment_details_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8e9e53d0be84ff2313ade25053b67ef7aff8e10b0494d3a588d2b0bcd3376745",
+ "chksum_sha256": "6e6a170452b36e55637aa713625080716f0eb69d2f8c0bad007368f5de5662d4",
"format": 1
},
{
"name": "plugins/modules/site_assign_device.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6ea9d0392351b4c14adf24902b22fb610b12eab49398a314a6dcc73fb30641c1",
+ "chksum_sha256": "0735017c42b78fca7fc250341fdd7683107047289f694a5c845034fb6e39618a",
"format": 1
},
{
"name": "plugins/modules/golden_tag_image_delete.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9cc5242bf78cac68ab06a508aa7046d6d20de4c3e1d0658f9893e21ddb6a39fc",
+ "chksum_sha256": "3f2b819bf047d2976da2e109bcc0080452b63d846e2ffb814269f5b14e7509b1",
"format": 1
},
{
"name": "plugins/modules/network_device_linecard_details_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ec2124d0f33ed574f0135698679be01794cab7c5610052899579266dc7994860",
+ "chksum_sha256": "d1a19ee13de26e1b241f0bf5048810452f65fdb030ad2c9cad01ba44d762b3aa",
"format": 1
},
{
"name": "plugins/modules/issues_enrichment_details_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9ac8965247e64c864374eb781b66398c38deb4e24f366c232c4a04e52dc2e59f",
+ "chksum_sha256": "da899f79685885c132bfcff04d79fb59d40a61da61066ae0e3a37f0aa4e6da73",
"format": 1
},
{
"name": "plugins/modules/golden_image_create.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b7a030e84fb8bfdae750f3b26e44a3ad5a69e6f977ce205ae2c3647065ad70b8",
+ "chksum_sha256": "6e1aa1234d413ba575b20db665ce2ab250e27a1c05af0445b63fa04e05870c55",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/flexible_report_content_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e4ece717189eecd927735147df57f3eb6d1ddfccdc61999070b0f471abca2384",
"format": 1
},
{
"name": "plugins/modules/qos_device_interface_info_count_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "367f38405567a693e4948efb26b11bcf2c4cf8cc17141c848d96b32f0c39c4fb",
+ "chksum_sha256": "61b768a5cc261e3b4c02d85e447d2619b3d2064aecdc0e6cf5580fbf607a434f",
"format": 1
},
{
"name": "plugins/modules/compliance_device_status_count_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cc118928a2bf1982387666dbc9f518b089e12d01af303100583072a0da87b383",
+ "chksum_sha256": "98956e263aaaf50bd43233d9d7075959290cdaf9ee689fb2b086ad3d8be84610",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/sda_authentication_profiles.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d1e7d0a75cee9bfb73bc670b495caac5a0c286785d37279cf79694a13d60016d",
"format": 1
},
{
"name": "plugins/modules/configuration_template_project_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0628463b729978f221dad948cdf77316e9564dd2f9cdaa7a299a2970658c312e",
+ "chksum_sha256": "8a617630768d682814c984239d91027debc77a9d0c9869bcfebe36d0d1bcbe6f",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/event_webhook_read_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "0c1ca90d57c0f8ef0bc7bfdada9a293f20599900f36e28075a070492f88e7eaa",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/configuration_archive_details_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "1c51cae2c8541d38d5de099c92e6e8eaf9a8b54b95f60f024f40931c3c20ece7",
"format": 1
},
{
"name": "plugins/modules/pnp_global_settings.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "42bee25376c6dfcb9043b0e59fc9b104bcb2cfa23a2e150a10dd900c08df0153",
+ "chksum_sha256": "fb2a7e845d6e8796528e534d35373ebcfe779acbcde4cadc6ad15c58de1780ed",
"format": 1
},
{
"name": "plugins/modules/configuration_template.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f4ea58c333f46cfbbc93da9bdd06331d9497625009c9d82faf6d384516212adc",
+ "chksum_sha256": "5bbee6ef5c40a7648bf89233e494121a33ffede117a3397ab3bd74c7adb16962",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/network_device_config_write_memory.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d7a3963d37a8e7a0006a12085af7f2e74218d435faff9db3bc85834b7c1b01e2",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/application_policy_application_set.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "bcbf79cb0dfd846a05be16b581b546fa7a11851c63f8214b900265a9886240fb",
"format": 1
},
{
@@ -5772,49 +6941,49 @@
"name": "tests/sanity/ignore-2.14.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6e77127e949b5b006b39726f5131bab9f0155a94a51c63b0f4f75d46194abf63",
+ "chksum_sha256": "3470dec7246d7c9795f3fd8d19481f0230aae651ea8c0733ddb1514ecd259c1f",
"format": 1
},
{
"name": "tests/sanity/ignore-2.15.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6e77127e949b5b006b39726f5131bab9f0155a94a51c63b0f4f75d46194abf63",
+ "chksum_sha256": "3470dec7246d7c9795f3fd8d19481f0230aae651ea8c0733ddb1514ecd259c1f",
"format": 1
},
{
"name": "tests/sanity/ignore-2.11.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4fcac0d5107e0b7315afdb8987dcee501dc86636551b08ae1a8c4ba5bc63d39b",
+ "chksum_sha256": "67470f85cce874b703824b046969393dc88a316722d2d35acf4ae6a0ee77770b",
"format": 1
},
{
"name": "tests/sanity/ignore-2.10.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3c4651e8b4cdfa9af71c2fc40a99ea7ab83fc4b90a5b353f5f4bd6315ca77b49",
+ "chksum_sha256": "119f5ed9010713e065d71d44b32b52d0f77f520c0fd3ac7208abfff7ccd2e300",
"format": 1
},
{
"name": "tests/sanity/ignore-2.12.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "29d8543143d22a474e2851734aaa32b46770150b47be01d0d4e5fa2807006b68",
+ "chksum_sha256": "4c4e59d2d9b1d8c78b9fa55c54899513a9f34eebd1d10d9be7ce131971d56b8e",
"format": 1
},
{
"name": "tests/sanity/ignore-2.13.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6e77127e949b5b006b39726f5131bab9f0155a94a51c63b0f4f75d46194abf63",
+ "chksum_sha256": "3470dec7246d7c9795f3fd8d19481f0230aae651ea8c0733ddb1514ecd259c1f",
"format": 1
},
{
"name": "tests/sanity/ignore-2.9.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3c4651e8b4cdfa9af71c2fc40a99ea7ab83fc4b90a5b353f5f4bd6315ca77b49",
+ "chksum_sha256": "119f5ed9010713e065d71d44b32b52d0f77f520c0fd3ac7208abfff7ccd2e300",
"format": 1
},
{
@@ -5867,6 +7036,20 @@
"format": 1
},
{
+ "name": "playbooks/network_compliance_workflow_manager.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "1566dafa07fd4dce41f9f416ec54a78166f3690c9a279ed78251cb88f2383664",
+ "format": 1
+ },
+ {
+ "name": "playbooks/input_events_and_notification.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "aaee4ed114bc67fba514482da03dbc7d9da269e0846d8e35a43828125a867cb1",
+ "format": 1
+ },
+ {
"name": "playbooks/template.yml",
"ftype": "file",
"chksum_type": "sha256",
@@ -5891,7 +7074,7 @@
"name": "playbooks/device_provision_workflow.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d7824efea5e33a458e49665dbe08e87acc2978da4925d834592808c495b9e3fc",
+ "chksum_sha256": "3a2298eba569bcf2485375bde0539f90081e7ee4ae1ba420048e14a424c40986",
"format": 1
},
{
@@ -6007,6 +7190,13 @@
"format": 1
},
{
+ "name": "playbooks/configs_backup.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "aa598b07049b1530319a83b2b1caa3f60ff48689c6bebe5f75015936c7fbc8d8",
+ "format": 1
+ },
+ {
"name": "playbooks/network_device_update_role.yml",
"ftype": "file",
"chksum_type": "sha256",
@@ -6063,6 +7253,13 @@
"format": 1
},
{
+ "name": "playbooks/ise_radius_integration_workflow_manager.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "10663d636127a58ebfa3e0ef65868e20718b79b8cd731c26423ea3c043500d01",
+ "format": 1
+ },
+ {
"name": "playbooks/swim_import_local.yml",
"ftype": "file",
"chksum_type": "sha256",
@@ -6077,6 +7274,13 @@
"format": 1
},
{
+ "name": "playbooks/events_and_notifications_workflow_manager.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "df8dc37a5412caf8cb97d4a596363d1cc08e697332d87b2f4279cfd9be1930df",
+ "format": 1
+ },
+ {
"name": "playbooks/device_provision.yml",
"ftype": "file",
"chksum_type": "sha256",
@@ -6115,7 +7319,14 @@
"name": "playbooks/network_settings_workflow_manager.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c8ae63ded3e85bf002f0bb9d0561f1c58ed22d034d72271ee97642425ca07406",
+ "chksum_sha256": "316737626797b0346bc8c9c80898e7a32954582fc56f2d04905aebd05de249ea",
+ "format": 1
+ },
+ {
+ "name": "playbooks/dnac.log",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
@@ -6129,7 +7340,7 @@
"name": "playbooks/template_workflow_manager.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "dee57171b4a8bfd5f27f90921febf88baa6ee52f34bf9e88290da2b496ba87cb",
+ "chksum_sha256": "eb6b1ea951c015f1aefc5e871ce984f9a3a5a75473c77597f1cdefaf77692ee4",
"format": 1
},
{
@@ -6185,7 +7396,7 @@
"name": "playbooks/device_credential_workflow_manager.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "da77ff1743c952a728532bfc40984e7b3a156f06889b257fd91e8cd203f7c06b",
+ "chksum_sha256": "df08696b0c39e1f73a1cd7527b53e283337a027a4eb0d5ed70cb095be1d88156",
"format": 1
},
{
@@ -6206,7 +7417,7 @@
"name": "playbooks/credentials.template",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a1331022feae8b2d74bd4ae2ccc65f98a7e5e4fdb0fd6286ed3a626de3a26603",
+ "chksum_sha256": "a2029ad882304142ae0da41574cc1706d2e529b032161497cc60c832acb24c59",
"format": 1
},
{
@@ -6283,14 +7494,21 @@
"name": "changelogs/changelog.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0471f143c1a2248a79e6252e9ab2b902aa90ce8c49bade17d94c7840c109a871",
+ "chksum_sha256": "af04ee8af8787968f32c935b4fcb72ee53222fabaccc351ee78a53faa5e25b36",
"format": 1
},
{
"name": "README.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a22c72a870f94ee00f2b75ea7c907e3bc93d43c3479c8d56841b2bba927ce5db",
+ "chksum_sha256": "b2f03d2a625fc991fe40132f4d78cb0fd4b300e0b18f69a86ab3bb01fda44639",
+ "format": 1
+ },
+ {
+ "name": "Pipfile",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "44f2fbe8515874d21e0669f6dd3562c7071e9a866289d73e6d86561ac535918f",
"format": 1
},
{
@@ -6332,7 +7550,7 @@
"name": ".github/workflows/docs.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f0a547c256a584df7a0a12f1f14604f37775b81f01cca623667a811c9d6ed6af",
+ "chksum_sha256": "5b70edc3aaf93c6e502d9c973b310f490fbde8fac6518aa7cf496935918cd786",
"format": 1
},
{
diff --git a/ansible_collections/cisco/dnac/MANIFEST.json b/ansible_collections/cisco/dnac/MANIFEST.json
index 7c31dde5d..31e06d140 100644
--- a/ansible_collections/cisco/dnac/MANIFEST.json
+++ b/ansible_collections/cisco/dnac/MANIFEST.json
@@ -2,7 +2,7 @@
"collection_info": {
"namespace": "cisco",
"name": "dnac",
- "version": "6.13.3",
+ "version": "6.16.0",
"authors": [
"Rafael Campos <rcampos@altus.cr>",
"William Astorga <wastorga@altus.cr>",
@@ -42,7 +42,7 @@
"name": "FILES.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d5808375079300aef903ebc6a1308b5535b15fc6d726ea535ee8ff83f05aa969",
+ "chksum_sha256": "ef9ba55628b61b9782901afdf604ae4a4bdf9270a1874975d976ebac1bc240a6",
"format": 1
},
"format": 1
diff --git a/ansible_collections/cisco/dnac/Pipfile b/ansible_collections/cisco/dnac/Pipfile
new file mode 100644
index 000000000..fbe2bde8d
--- /dev/null
+++ b/ansible_collections/cisco/dnac/Pipfile
@@ -0,0 +1,12 @@
+[[source]]
+url = "https://pypi.org/simple"
+verify_ssl = true
+name = "pypi"
+
+[packages]
+dnacentersdk = ">=2.7.0"
+
+[dev-packages]
+
+[requires]
+python_version = "3.12"
diff --git a/ansible_collections/cisco/dnac/README.md b/ansible_collections/cisco/dnac/README.md
index 4d2835f33..96e39ff82 100644
--- a/ansible_collections/cisco/dnac/README.md
+++ b/ansible_collections/cisco/dnac/README.md
@@ -4,7 +4,7 @@
The dnacenter-ansible project provides an Ansible collection for managing and automating your Cisco DNA Center environment. It consists of a set of modules and roles for performing tasks related to DNA Center.
-This collection has been tested and supports Cisco DNA Center 2.3.5.3.
+This collection has been tested and supports Cisco DNA Center 2.3.7.6.
*Note: This collection is not compatible with versions of Ansible before v2.8.*
@@ -15,11 +15,11 @@ The following table shows the supported versions.
| Cisco DNA Center version | Ansible "cisco.dnac" version | Python "dnacentersdk" version |
|--------------------------|------------------------------|-------------------------------|
-| 2.1.1 | 3.0.0 | 2.2.5 |
| 2.2.2.3 | 3.3.1 | 2.3.3 |
| 2.2.3.3 | 6.4.0 | 2.4.11 |
| 2.3.3.0 | 6.6.4 | 2.5.5 |
-| 2.3.5.3 | ^6.13.0 | ^2.6.0 |
+| 2.3.5.3 | 6.13.3 | 2.6.11 |
+| 2.3.7.6 | ^6.15.0 | ^2.7.1 |
If your Ansible collection is older please consider updating it first.
@@ -45,7 +45,7 @@ ansible-galaxy collection install cisco.dnac:3.3.1
## Requirements
- Ansible >= 2.15
-- [Python DNA Center SDK](https://github.com/cisco-en-programmability/dnacentersdk) v2.6.0 or newer
+- [Python DNA Center SDK](https://github.com/cisco-en-programmability/dnacentersdk) v2.7.0 or newer
- Python >= 3.9, as the DNA Center SDK doesn't support Python version 2.x
## Install
@@ -76,7 +76,7 @@ export DNAC_HOST=<A.B.C.D>
export DNAC_PORT=443 # optional, defaults to 443
export DNAC_USERNAME=<username>
export DNAC_PASSWORD=<password>
-export DNAC_VERSION=2.3.5.3 # optional, defaults to 2.3.5.3. See the Compatibility matrix
+export DNAC_VERSION=2.3.7.6 # optional, defaults to 2.3.7.6. See the Compatibility matrix
export DNAC_VERIFY=False # optional, defaults to True
export DNAC_DEBUG=False # optional, defaults to False
```
@@ -114,7 +114,7 @@ dnac_host: <A.B.C.D>
dnac_port: 443 # optional, defaults to 443
dnac_username: <username>
dnac_password: <password>
-dnac_version: 2.3.5.3 # optional, defaults to 2.3.5.3. See the Compatibility matrix
+dnac_version: 2.3.7.6 # optional, defaults to 2.3.7.6. See the Compatibility matrix
dnac_verify: False # optional, defaults to True
dnac_debug: False # optional, defaults to False
```
diff --git a/ansible_collections/cisco/dnac/changelogs/changelog.yaml b/ansible_collections/cisco/dnac/changelogs/changelog.yaml
index 81e4f4348..7f341b035 100644
--- a/ansible_collections/cisco/dnac/changelogs/changelog.yaml
+++ b/ansible_collections/cisco/dnac/changelogs/changelog.yaml
@@ -865,4 +865,70 @@ releases:
minor_changes:
- Adding support to importing a template using JSON file
- Changes in discovery workflow manager modules relating to different states of the discovery job
+ 6.14.0:
+ release_date: "2024-05-31"
+ changes:
+ release_summary: New Dna Center API version 2.3.7.6, and addition of Workflow Manager modules support for device configuration backups, events and notifications, ISE and RADIUS server integrations, and network compliance.
+ minor_changes:
+ - device_configs_backup_workflow_manager - New workflow manager module for device configuration backup functions.
+ - events_and_notifications_workflow_manager - New workflow manager for configuring various types of destinations(Webhook, Email, Syslog, SNMP, ITSM) to deliver event notifications.
+ - ise_radius_integration_workflow_manager - New workflow manager for Authentication and Policy Servers(ISE/AAA).
+ - network_compliance_workflow_manager - New workflow manager for Network Compliance module for managing network compliance tasks on reachable device(s).
+ - device_credential_workflow_manager - Updated the log messages.
+ - inventory_workflow_manager - Updated changes related to provisioning devices.
+ - provision_workflow_manager - Updated changes related to handle errors.
+ - site_workflow_manager - Updated changes in Site updation.
+ - network_settings_workflow_manager - Added attributes 'ipv4_global_pool_name'.
+ - template_workflow_manager - Removed attributes 'create_time', 'failure_policy', 'last_update_time', 'latest_version_time', 'parent_template_id', 'project_id', 'validation_errors', 'rollback_template_params' and 'rollback_template_content'.
+ - ise_radius_integration_workflow_manager - Removed the attributes 'port' and 'subscriber_name'. Added the attribute 'ise_integration_wait_time'.
- Changes in inventory and swim workflow manager modules.
+ - application_policy_application_set_count_info - new module
+ - application_policy_application_set_info - new module
+ - application_policy_application_set - new module
+ - applications_count_v2_info - new module
+ - applications_v2_info - new module
+ - applications_v2 - new module
+ - auth_token_create - new module
+ - authentication_policy_servers - new module
+ - device_reboot_apreboot - new module
+ - dna_event_snmp_config_info - new module
+ - event_snmp_config - new module
+ - event_webhook_read_info - new module
+ - flexible_report_content_info - new module
+ - flexible_report_execute - new module
+ - flexible_report_executions_info - new module
+ - flexible_report_schedule_info - new module
+ - flexible_report_schedule - new module
+ - integration_settings_itsm_instances_info - new module
+ - integration_settings_status_info - new module
+ - ise_integration_status_info - new module
+ - lan_automation_sessions_info - new module
+ - lan_automation_update_device - new module
+ - lan_automation_update_v2 - new module
+ - lan_automation_update - new module
+ - lan_automation_v2 - new module
+ - network_device_user_defined_field_delete - new module
+ - users_external_authentication - new module
+ - users_external_servers_aaa_attribute - new module
+ 6.15.0:
+ release_date: "2024-06-03"
+ changes:
+ release_summary: Fix module name.
+ minor_changes:
+ - Fix module name from network_device_config__info to configuration_archive_details_info.
+ 6.16.0:
+ release_date: "2024-06-07"
+ changes:
+ release_summary: Code changes in workflow manager modules.
+ minor_changes:
+ - Added example playbooks in device_provision_workflow.yml
+ - Added API to validate the server address
+ - Minor bug fixes in device_credential_workflow_manager.py module
+ - Checking SNMP versions in events_and_notifications_workflow_manager.py module
+ - Added new attribute 'ise_integration_wait_time' in ise_radius_integration_workflow_manager.py
+ - Added example playbooks in network_compliance_workflow_manager.py
+ - Added detailed documentation in network_settings_workflow_manager.py
+ - Added the code for creating/updating/deleting events subscription notification with specified destination and added the playbook and documentation with examples
+ - provision_workflow_manager.py - Added attribute 'provisioning'
+ - template_workflow_manager.py - Added attributes 'choices', 'failure_policy'
+ - events_and_notifications_workflow_manager.py - Added attributes 'webhook_event_notification', 'email_event_notification', 'syslog_event_notification'
diff --git a/ansible_collections/cisco/dnac/playbooks/configs_backup.yml b/ansible_collections/cisco/dnac/playbooks/configs_backup.yml
new file mode 100644
index 000000000..696e02184
--- /dev/null
+++ b/ansible_collections/cisco/dnac/playbooks/configs_backup.yml
@@ -0,0 +1,39 @@
+---
+- name: Take running config backup of devices
+ hosts: localhost
+ connection: local
+ gather_facts: no
+
+ vars_files:
+ - "{{ CLUSTERFILE }}"
+
+ vars:
+ dnac_login: &dnac_login
+ dnac_host: "{{ dnac_host }}"
+ dnac_username: "{{ dnac_username }}"
+ dnac_password: "{{ dnac_password }}"
+ dnac_verify: "{{ dnac_verify }}"
+ dnac_port: "{{ dnac_port }}"
+ dnac_version: "{{ dnac_version }}"
+ dnac_debug: "{{ dnac_debug }}"
+ dnac_log_level: "DEBUG"
+
+ tasks:
+ - name: Take backup of a wired 9500 switch
+ cisco.dnac.device_configs_backup_workflow_manager:
+ <<: *dnac_login
+ dnac_log: True
+ state: merged
+ config_verify: True
+ config:
+ - hostname: NY-BN-9500.cisco.local
+ file_path: /home/admin/madhan_ansible/collections/ansible_collections/cisco/dnac/playbooks/new_tmp
+
+ - name: Take backup of all the switches
+ cisco.dnac.device_configs_backup_workflow_manager:
+ <<: *dnac_login
+ dnac_log: True
+ state: merged
+ config_verify: True
+ config:
+ - family: Switches and Hubs \ No newline at end of file
diff --git a/ansible_collections/cisco/dnac/playbooks/credentials.template b/ansible_collections/cisco/dnac/playbooks/credentials.template
index 5270c0816..4e2878d25 100644
--- a/ansible_collections/cisco/dnac/playbooks/credentials.template
+++ b/ansible_collections/cisco/dnac/playbooks/credentials.template
@@ -3,7 +3,7 @@ dnac_host: <A.B.C.D>
dnac_port: 443
dnac_username: <username>
dnac_password: <password>
-dnac_version: 2.3.5.3
+dnac_version: 2.3.7.6
dnac_verify: False
dnac_debug: False
dnac_log_level: [CRITICAL, ERROR, WARNING, INFO, DEBUG]
diff --git a/ansible_collections/cisco/dnac/playbooks/device_credential_workflow_manager.yml b/ansible_collections/cisco/dnac/playbooks/device_credential_workflow_manager.yml
index 3d77584f0..ffdc84110 100644
--- a/ansible_collections/cisco/dnac/playbooks/device_credential_workflow_manager.yml
+++ b/ansible_collections/cisco/dnac/playbooks/device_credential_workflow_manager.yml
@@ -4,7 +4,7 @@
gather_facts: no
connection: local
tasks:
- - name: Create Credentials and assign it to a site.
+ - name: Create global device credentials.
cisco.dnac.device_credential_workflow_manager:
dnac_host: "{{ dnac_host }}"
dnac_port: "{{ dnac_port }}"
@@ -14,6 +14,7 @@
dnac_debug: "{{ dnac_debug }}"
dnac_log: True
state: merged
+ config_verify: True
config:
- global_credential_details:
cli_credential:
@@ -54,7 +55,20 @@
port: 443
# old_description:
# old_username:
- assign_credentials_to_site:
+
+ - name: Assign global device credentials to a site.
+ cisco.dnac.device_credential_workflow_manager:
+ dnac_host: "{{ dnac_host }}"
+ dnac_port: "{{ dnac_port }}"
+ dnac_username: "{{ dnac_username }}"
+ dnac_password: "{{ dnac_password }}"
+ dnac_verify: "{{ dnac_verify }}"
+ dnac_debug: "{{ dnac_debug }}"
+ dnac_log: True
+ state: merged
+ config_verify: True
+ config:
+ - assign_credentials_to_site:
cli_credential:
description: CLI
username: cli
@@ -74,7 +88,7 @@
- Global/Chennai/Trill
- Global/Chennai/Tidel
- - name: Delete Credentials
+ - name: Delete credentials
cisco.dnac.device_credential_workflow_manager:
dnac_host: "{{ dnac_host }}"
dnac_port: "{{ dnac_port }}"
@@ -84,6 +98,7 @@
dnac_debug: "{{ dnac_debug }}"
dnac_log: True
state: deleted
+ config_verify: True
config:
- global_credential_details:
cli_credential:
diff --git a/ansible_collections/cisco/dnac/playbooks/device_provision_workflow.yml b/ansible_collections/cisco/dnac/playbooks/device_provision_workflow.yml
index acb3249a5..61c23937a 100644
--- a/ansible_collections/cisco/dnac/playbooks/device_provision_workflow.yml
+++ b/ansible_collections/cisco/dnac/playbooks/device_provision_workflow.yml
@@ -16,18 +16,31 @@
dnac_port: "{{ dnac_port }}"
dnac_version: "{{ dnac_version }}"
dnac_debug: "{{ dnac_debug }}"
+ dnac_log_level: "{{ dnac_log_level }}"
tasks:
- - name: Provision a wired device to a site
+ - name: Assign a wired device to a site
cisco.dnac.provision_workflow_manager:
<<: *dnac_login
dnac_log: True
state: merged
config_verify: True
config:
- - site_name_hierarchy: Global/USA/San Francisco/BGL_18
- management_ip_address: 204.1.2.2
+ - site_name_hierarchy: Global/USA/New York/NY_BLD1
+
+ management_ip_address: 204.192.3.40
+ provisioning: false
+ - name: Provision a wired device to a site
+ cisco.dnac.provision_workflow_manager:
+ <<: *dnac_login
+ dnac_log: True
+ dnac_log_level: DEBUG
+ state: merged
+ config_verify: True
+ config:
+ - site_name_hierarchy: Global/USA/SAN JOSE/BLD23/BLD20_FLOOR2
+ management_ip_address: 204.192.3.40
- name: Unprovision a wired device from a site
cisco.dnac.provision_workflow_manager:
@@ -36,3 +49,16 @@
state: deleted
config:
- management_ip_address: 204.1.2.2
+
+ - name: Provision a wireless device to a site
+ cisco.dnac.provision_workflow_manager:
+ <<: *dnac_login
+ dnac_log: True
+ dnac_log_level: DEBUG
+ state: merged
+ config_verify: True
+ config:
+ - site_name_hierarchy: Global/USA/RTP/BLD11
+ management_ip_address: 204.192.12.201
+ managed_ap_locations:
+ - Global/USA/RTP/BLD11/BLD11_FLOOR1
diff --git a/ansible_collections/cyberark/conjur/tests/unit/plugins/lookup/__init__.py b/ansible_collections/cisco/dnac/playbooks/dnac.log
index e69de29bb..e69de29bb 100644
--- a/ansible_collections/cyberark/conjur/tests/unit/plugins/lookup/__init__.py
+++ b/ansible_collections/cisco/dnac/playbooks/dnac.log
diff --git a/ansible_collections/cisco/dnac/playbooks/events_and_notifications_workflow_manager.yml b/ansible_collections/cisco/dnac/playbooks/events_and_notifications_workflow_manager.yml
new file mode 100644
index 000000000..74874dd52
--- /dev/null
+++ b/ansible_collections/cisco/dnac/playbooks/events_and_notifications_workflow_manager.yml
@@ -0,0 +1,88 @@
+---
+- name: Configure channels and create events in Cisco Catalyst Center
+ hosts: localhost
+ connection: local
+ gather_facts: no
+ vars_files:
+ - "input_events_and_notification.yml"
+ - "credentials.yml"
+ tasks:
+ - name: Add/update channels with destination and create/update events in Cisco Catalyst Center.
+ cisco.dnac.events_and_notifications_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: DEBUG
+ dnac_log: true
+ config_verify: true
+ state: merged
+ config:
+ - webhook_destination:
+ name: "{{item.webhook_destination.name}}"
+ description: "{{item.webhook_destination.description}}"
+ url: "{{item.webhook_destination.url}}"
+ method: "{{item.webhook_destination.method}}"
+ trust_cert: "{{item.webhook_destination.trust_cert}}"
+ email_destination:
+ from_email: "{{item.email_destination.from_email}}"
+ to_email: "{{item.email_destination.to_email}}"
+ subject: "{{item.email_destination.subject}}"
+ primary_smtp_config:
+ server_address: "{{item.email_destination.primary_smtp_config.server_address}}"
+ port: "{{item.email_destination.primary_smtp_config.port}}"
+ username: "{{item.email_destination.primary_smtp_config.username}}"
+ syslog_destination:
+ name: "{{item.syslog_destination.name}}"
+ description: "{{item.syslog_destination.description}}"
+ server_address: "{{item.syslog_destination.server_address}}"
+ protocol: "{{item.syslog_destination.protocol}}"
+ port: "{{item.syslog_destination.port}}"
+ snmp_destination:
+ name: "{{item.snmp_destination.name}}"
+ description: "{{item.snmp_destination.description}}"
+ server_address: "{{item.snmp_destination.server_address}}"
+ port: "{{item.snmp_destination.port}}"
+ snmp_version: "{{item.snmp_destination.snmp_version}}"
+ username: "{{item.snmp_destination.username}}"
+ mode: "{{item.snmp_destination.mode}}"
+ auth_type: "{{item.snmp_destination.auth_type}}"
+ auth_password: "{{item.snmp_destination.auth_password}}"
+ privacy_type: "{{item.snmp_destination.privacy_type}}"
+ privacy_password: "{{item.snmp_destination.privacy_password}}"
+ itsm_setting:
+ instance_name: "{{item.itsm_setting.instance_name}}"
+ description: "{{item.itsm_setting.description}}"
+ connection_settings:
+ url: "{{item.itsm_setting.connection_settings.url}}"
+ username: "{{item.itsm_setting.connection_settings.username}}"
+ password: "{{item.itsm_setting.connection_settings.password}}"
+ webhook_event_notification:
+ name: "{{item.webhook_event_notification.name}}"
+ description: "{{item.webhook_event_notification.description}}"
+ sites: "{{item.webhook_event_notification.sites}}"
+ events: "{{item.webhook_event_notification.events}}"
+ destination: "{{item.webhook_event_notification.destination}}"
+ email_event_notification:
+ name: "{{item.email_event_notification.name}}"
+ description: "{{item.email_event_notification.description}}"
+ sites: "{{item.email_event_notification.sites}}"
+ events: "{{item.email_event_notification.events}}"
+ sender_email: "{{item.email_event_notification.sender_email}}"
+ recipient_emails: "{{item.email_event_notification.recipient_emails}}"
+ subject: "{{item.email_event_notification.subject}}"
+ instance: "{{item.email_event_notification.instance}}"
+ instance_description: "{{item.email_event_notification.instance_description}}"
+ syslog_event_notification:
+ name: "{{item.syslog_event_notification.name}}"
+ description: "{{item.syslog_event_notification.description}}"
+ sites: "{{item.syslog_event_notification.sites}}"
+ events: "{{item.syslog_event_notification.events}}"
+ destination: "{{item.syslog_event_notification.destination}}"
+
+ with_items: "{{ events_notification }}"
+ tags:
+ - events_testing
diff --git a/ansible_collections/cisco/dnac/playbooks/input_events_and_notification.yml b/ansible_collections/cisco/dnac/playbooks/input_events_and_notification.yml
new file mode 100644
index 000000000..3642c4ce0
--- /dev/null
+++ b/ansible_collections/cisco/dnac/playbooks/input_events_and_notification.yml
@@ -0,0 +1,40 @@
+---
+events_notification:
+ - syslog_destination:
+ name: Syslog Demo test
+ description: "Adding syslog destination for testing"
+ server_address: "10.30.0.90"
+ protocol: "TCP"
+ port: 652
+ webhook_destination:
+ name: "webhook demo 19"
+ description: "webhhok description for testing"
+ url: "https://10.195.227.14/dna"
+ method: "POST"
+ trust_cert: False
+ email_destination:
+ from_email: "test@cisco.com"
+ to_email: "abmahesh@cisco.com"
+ subject: "Ansible testing"
+ primary_smtp_config:
+ server_address: "outbound.cisco.com"
+ port: '25'
+ snmp_destination:
+ name: Snmp test
+ description: "Adding snmp destination for testing for update"
+ server_address: "10.30.0.91"
+ port: '265'
+ snmp_version: "V3"
+ username: cisco123
+ mode: AUTH_PRIVACY
+ auth_type: SHA
+ auth_password: authpass123
+ privacy_type: AES128
+ privacy_password: privacy123
+ itsm_setting:
+ instance_name: "Playbook itsm demo"
+ description: "ITSM description for testing"
+ connection_settings:
+ url: "https://catalystcente1.com"
+ username: "catalyst"
+ password: "catalyst@123"
diff --git a/ansible_collections/cisco/dnac/playbooks/ise_radius_integration_workflow_manager.yml b/ansible_collections/cisco/dnac/playbooks/ise_radius_integration_workflow_manager.yml
new file mode 100644
index 000000000..9ca9a9181
--- /dev/null
+++ b/ansible_collections/cisco/dnac/playbooks/ise_radius_integration_workflow_manager.yml
@@ -0,0 +1,109 @@
+- hosts: dnac_servers
+ vars_files:
+ - credentials.yml
+ gather_facts: no
+ connection: local
+ tasks:
+ - name: Create an Authentication and Policy Server.
+ cisco.dnac.ise_radius_integration_workflow_manager:
+ dnac_host: "{{ dnac_host }}"
+ dnac_port: "{{ dnac_port }}"
+ dnac_username: "{{ dnac_username }}"
+ dnac_password: "{{ dnac_password }}"
+ dnac_verify: "{{ dnac_verify }}"
+ dnac_debug: "{{ dnac_debug }}"
+ dnac_log: True
+ dnac_log_level: "{{ dnac_log_level }}"
+ dnac_log_append: True
+ dnac_log_file_path: "{{ dnac_log_file_path }}"
+ state: merged
+ config_verify: True
+ config:
+ - authentication_policy_server:
+ server_type: AAA # [ISE, AAA]
+ server_ip_address: 10.0.0.20
+ shared_secret: cisco
+ protocol: RADIUS_TACACS # [TACACS, RADIUS, RADIUS_TACACS]
+ encryption_scheme: KEYWRAP # KEYWRAP or RADSEC
+ encryption_key: dnacsolutions123 # For KEYWRAP, must be 16 char long
+ message_authenticator_code_key: dnacisesolutions1234 # For KEYWRAP, must be 20 char long
+ authentication_port: 1800
+ accounting_port: 1700
+ retries: 3 # Range from 1 to 3
+ timeout: 4 # Range from 2 to 20
+ role: secondary
+
+ - name: Delete Authentication and Policy Server.
+ cisco.dnac.ise_radius_integration_workflow_manager:
+ dnac_host: "{{ dnac_host }}"
+ dnac_port: "{{ dnac_port }}"
+ dnac_username: "{{ dnac_username }}"
+ dnac_password: "{{ dnac_password }}"
+ dnac_verify: "{{ dnac_verify }}"
+ dnac_debug: "{{ dnac_debug }}"
+ dnac_log: True
+ dnac_log_level: "{{ dnac_log_level }}"
+ dnac_log_append: True
+ dnac_log_file_path: "{{ dnac_log_file_path }}"
+ state: deleted
+ config_verify: True
+ config:
+ - authentication_policy_server:
+ server_ip_address: 10.0.0.20
+
+ - name: Create ISE Server.
+ cisco.dnac.ise_radius_integration_workflow_manager:
+ dnac_host: "{{ dnac_host }}"
+ dnac_port: "{{ dnac_port }}"
+ dnac_username: "{{ dnac_username }}"
+ dnac_password: "{{ dnac_password }}"
+ dnac_verify: "{{ dnac_verify }}"
+ dnac_debug: "{{ dnac_debug }}"
+ dnac_log: True
+ dnac_log_level: "{{ dnac_log_level }}"
+ dnac_log_append: True
+ dnac_log_file_path: "{{ dnac_log_file_path }}"
+ state: merged
+ config_verify: True
+ config:
+ - authentication_policy_server:
+ server_type: ISE # [ISE, AAA]
+ server_ip_address: 10.195.243.59
+ shared_secret: cisco
+ protocol: RADIUS_TACACS # [TACACS, RADIUS, RADIUS_TACACS]
+ encryption_scheme: KEYWRAP # KEYWRAP or RADSEC
+ encryption_key: dnacsolutions123 # For KEYWRAP, must be 16 char long
+ message_authenticator_code_key: dnacisesolutions1234 # For KEYWRAP, must be 20 char long
+ authentication_port: 1800
+ accounting_port: 1700
+ retries: 3 # Range from 1 to 3
+ timeout: 4 # Range from 2 to 20
+ role: primary
+ pxgrid_enabled: False # Avaliable for Cisco ISE only
+ use_dnac_cert_for_pxgrid: False
+ cisco_ise_dtos: # use this for creating the Cisco ISE Server
+ - user_name: admin
+ password: abcd
+ fqdn: abc.cisco.com
+ ip_address: 10.195.243.59
+ description: CISCO ISE
+ trusted_server: True
+ ise_integration_wait_time: 20
+
+ - name: Delete an ISE Server.
+ cisco.dnac.ise_radius_integration_workflow_manager:
+ dnac_host: "{{ dnac_host }}"
+ dnac_port: "{{ dnac_port }}"
+ dnac_username: "{{ dnac_username }}"
+ dnac_password: "{{ dnac_password }}"
+ dnac_verify: "{{ dnac_verify }}"
+ dnac_debug: "{{ dnac_debug }}"
+ dnac_log: True
+ dnac_log_level: "{{ dnac_log_level }}"
+ dnac_log_append: True
+ dnac_log_file_path: "{{ dnac_log_file_path }}"
+ state: deleted
+ config_verify: True
+ config:
+ - authentication_policy_server:
+ server_ip_address: 10.195.243.59
diff --git a/ansible_collections/cisco/dnac/playbooks/network_compliance_workflow_manager.yml b/ansible_collections/cisco/dnac/playbooks/network_compliance_workflow_manager.yml
new file mode 100644
index 000000000..7bbe82f6c
--- /dev/null
+++ b/ansible_collections/cisco/dnac/playbooks/network_compliance_workflow_manager.yml
@@ -0,0 +1,151 @@
+---
+- name: Testing
+ hosts: dnac_servers
+ gather_facts: no
+
+ vars_files:
+ - "credentials.yml"
+
+ vars:
+ dnac_login: &dnac_login
+ dnac_host: "{{ dnac_host }}"
+ dnac_username: "{{ dnac_username }}"
+ dnac_password: "{{ dnac_password }}"
+ dnac_verify: "{{ dnac_verify }}"
+ dnac_port: "{{ dnac_port }}"
+ dnac_version: "{{ dnac_version }}"
+ dnac_debug: "{{ dnac_debug }}"
+ dnac_log: true
+ dnac_log_level: INFO
+ dnac_log_append: False
+ config_verify: true
+
+ tasks:
+ - name: Run Compliance check using IP address list (run_compliance by default is True)
+ cisco.dnac.network_compliance_workflow_manager:
+ <<: *dnac_login
+ config:
+ - ip_address_list: ['204.1.2.2', '204.1.2.5', '204.1.2.4']
+
+ - name: Run Compliance check using IP address list
+ cisco.dnac.network_compliance_workflow_manager:
+ <<: *dnac_login
+ config:
+ - ip_address_list: ['204.1.2.2', '204.1.2.5', '204.1.2.4']
+ run_compliance: True
+
+
+ - name: Run Compliance check using Site
+ cisco.dnac.network_compliance_workflow_manager:
+ <<: *dnac_login
+ config:
+ - site: "Global"
+ run_compliance: True
+
+
+ - name: Run Compliance check using both IP address list and Site
+ cisco.dnac.network_compliance_workflow_manager:
+ <<: *dnac_login
+ config:
+ - ip_address_list: ['204.1.2.2', '204.1.2.5', '204.1.2.4']
+ site_name: "Global/USA/San Francisco/Building_1/floor_1"
+ run_compliance: True
+
+
+ - name: Run Compliance check with specific categories using IP address list
+ cisco.dnac.network_compliance_workflow_manager:
+ <<: *dnac_login
+ config:
+ - ip_address_list: ['204.1.2.2', '204.1.2.5', '204.1.2.4']
+ run_compliance_categories: ['INTENT', 'RUNNING_CONFIG', 'IMAGE', 'PSIRT']
+
+
+ - name: Run Compliance check with specific categories using Site
+ cisco.dnac.network_compliance_workflow_manager:
+ <<: *dnac_login
+ config:
+ - site_name: "Global"
+ run_compliance_categories: ['INTENT', 'RUNNING_CONFIG', 'IMAGE', 'PSIRT']
+
+
+ - name: Run Compliance check with specific categories using both IP address list and Site
+ cisco.dnac.network_compliance_workflow_manager:
+ <<: *dnac_login
+ config:
+ - ip_address_list: ['204.1.2.2', '204.1.2.5', '204.1.2.4']
+ site_name: "Global/USA/San Francisco/Building_1/floor_1"
+ run_compliance_categories: ['INTENT', 'RUNNING_CONFIG', 'IMAGE', 'PSIRT']
+
+
+ - name: Sync Device Configuration using IP address list
+ cisco.dnac.network_compliance_workflow_manager:
+ <<: *dnac_login
+ config:
+ - ip_address_list: ['204.1.2.2', '204.1.2.5', '204.1.2.4']
+ run_compliance: False
+ sync_device_config: True
+
+
+ - name: Sync Device Configuration using Site
+ cisco.dnac.network_compliance_workflow_manager:
+ <<: *dnac_login
+ config:
+ - site_name: "Global/USA/San Francisco/Building_1/floor_1"
+ run_compliance: False
+ sync_device_config: True
+
+
+ - name: Sync Device Configuration using both IP address list and Site
+ cisco.dnac.network_compliance_workflow_manager:
+ <<: *dnac_login
+ config:
+ - ip_address_list: ['204.1.2.2', '204.1.2.5', '204.1.2.4']
+ site_name: "Global/USA/San Francisco/Building_1/floor_1"
+ run_compliance: False
+ sync_device_config: True
+
+
+ - name: Run Compliance and Sync Device Configuration with IP address list
+ cisco.dnac.network_compliance_workflow_manager:
+ <<: *dnac_login
+ config:
+ - ip_address_list: ['204.1.2.2', '204.1.2.5', '204.1.2.4']
+ run_compliance: True
+ sync_device_config: True
+
+
+ - name: Run Compliance and Sync Device Configuration with Site
+ cisco.dnac.network_compliance_workflow_manager:
+ <<: *dnac_login
+ config:
+ - site_name: "Global/USA/San Francisco/Building_1/floor_1"
+ run_compliance: True
+ sync_device_config: True
+
+
+ - name: Run Compliance with specific categories and Sync Device Configuration with IP address list
+ cisco.dnac.network_compliance_workflow_manager:
+ <<: *dnac_login
+ config:
+ - ip_address_list: ['204.1.2.2', '204.1.2.5', '204.1.2.4']
+ run_compliance_categories: ['INTENT', 'RUNNING_CONFIG', 'IMAGE', 'PSIRT']
+ sync_device_config: True
+
+
+ - name: Run Compliance with specific categories and Sync Device Configuration with Site
+ cisco.dnac.network_compliance_workflow_manager:
+ <<: *dnac_login
+ config:
+ - site_name: "Global/USA/San Francisco/Building_1/floor_1"
+ run_compliance_categories: ['INTENT', 'RUNNING_CONFIG', 'IMAGE', 'PSIRT']
+ sync_device_config: True
+
+
+ - name: Run Compliance and Sync Device Configuration using both IP address list and Site
+ cisco.dnac.network_compliance_workflow_manager:
+ <<: *dnac_login
+ config:
+ - ip_address_list: ['204.1.2.2', '204.1.2.5', '204.1.2.4']
+ site_name: "Global/USA/San Francisco/Building_1/floor_1"
+ run_compliance: True
+ sync_device_config: True
diff --git a/ansible_collections/cisco/dnac/playbooks/network_settings_workflow_manager.yml b/ansible_collections/cisco/dnac/playbooks/network_settings_workflow_manager.yml
index 36b88ac2d..11ddde205 100644
--- a/ansible_collections/cisco/dnac/playbooks/network_settings_workflow_manager.yml
+++ b/ansible_collections/cisco/dnac/playbooks/network_settings_workflow_manager.yml
@@ -13,7 +13,7 @@
dnac_verify: "{{ dnac_verify }}"
dnac_debug: "{{ dnac_debug }}"
dnac_log: True
- dnac_log_level: "{{ dnac_log_level }}"
+ dnac_log_level: DEBUG
dnac_log_append: True
dnac_log_file_path: "{{ dnac_log_file_path }}"
state: merged
@@ -23,58 +23,70 @@
settings:
ip_pool:
- name: Global_Pool2
- gateway: '' #use this for updating
+ pool_type: Generic
ip_address_space: IPv6 #required when we are creating
cidr: 2001:db8::/64 #required when we are creating
+ gateway: '' #use this for updating
+ dhcp_server_ips: [] #use this for updating
+ dns_server_ips: [] #use this for updating
+ # prev_name: Global_Pool2
+ - name: Global_Pool3
pool_type: Generic
+ ip_address_space: IPv4 #required when we are creating
+ cidr: 10.0.0.0/8 #required when we are creating
+ gateway: '' #use this for updating
dhcp_server_ips: [] #use this for updating
dns_server_ips: [] #use this for updating
# prev_name: Global_Pool2
reserve_pool_details:
+ - site_name: Global/Chennai/Trill
+ name: IP_Pool_3
+ pool_type: LAN
ipv6_address_space: True
- ipv4_global_pool: 100.0.0.0/8
+ # ipv4_global_pool: 100.0.0.0/8
+ ipv4_global_pool_name: Global_Pool1
ipv4_prefix: True
ipv4_prefix_length: 9
ipv4_subnet: 100.128.0.0
ipv4_gateway: 100.128.0.1
- # ipv4_dns_servers: [100.128.0.1]
- name: IP_Pool_3
+ ipv4_dns_servers: []
ipv6_prefix: True
ipv6_prefix_length: 64
ipv6_global_pool: 2001:db8::/64
+ # ipv6_global_pool_name: Global_Pool2
ipv6_subnet: '2001:db8::'
- site_name: Global/Chennai/Trill
slaac_support: True
# prev_name: IP_Pool_4
- pool_type: LAN
network_management_details:
+ site_name: Global/Chennai
settings:
+ network_aaa: #works only if we system settigns is set
+ primary_server_address: 10.0.0.20 #Mandatory for AAA and ISE
+ secondary_server_address: 10.0.0.21
+ protocol: TACACS
+ server_type: AAA
+ # shared_secret: string #ISE
+ client_and_endpoint_aaa: #works only if we system settigns is set
+ primary_server_address: 10.197.156.42 #Mandatory for AAA and ISE
+ secondary_server_address: 10.0.0.21
+ protocol: RADIUS
+ server_type: AAA
+ # shared_secret: string #ISE
dhcp_server:
- 10.0.0.1
dns_server:
domain_name: cisco.com
primary_ip_address: 10.0.0.2
secondary_ip_address: 10.0.0.3
- client_and_endpoint_aaa: #works only if we system settigns is set
- ip_address: 10.197.156.42 #Mandatory for ISE, sec ip for AAA
- network: 10.0.0.20
- protocol: RADIUS
- servers: AAA
- # shared_secret: string #ISE
+ ntp_server:
+ - 10.0.0.5
+ timezone: GMT
message_of_the_day:
banner_message: hello
- retain_existing_banner: 'true'
+ retain_existing_banner: True
netflow_collector:
ip_address: 10.0.0.4
port: 443
- network_aaa: #works only if we system settigns is set
- ip_address: 10.0.0.21 #Mandatory for ISE, sec ip for AAA
- network: 10.0.0.20
- protocol: TACACS
- servers: AAA
- # shared_secret: string #ISE
- ntp_server:
- - 10.0.0.5
snmp_server:
configure_dnac_ip: false
# ip_addresses:
@@ -83,8 +95,6 @@
configure_dnac_ip: false
# ip_addresses:
# - 10.0.0.7
- timezone: GMT
- site_name: Global/Chennai
- name: Delete Global Pool and Release Pool Reservation
cisco.dnac.network_settings_workflow_manager:
@@ -104,5 +114,5 @@
ip_pool:
- name: Global_Pool2
reserve_pool_details:
+ - site_name: Global/Chennai/Trill
name: IP_Pool_3
- site_name: Global/Chennai/Trill
diff --git a/ansible_collections/cisco/dnac/playbooks/template_workflow_manager.yml b/ansible_collections/cisco/dnac/playbooks/template_workflow_manager.yml
index 3cfed07e7..be296ff2e 100644
--- a/ansible_collections/cisco/dnac/playbooks/template_workflow_manager.yml
+++ b/ansible_collections/cisco/dnac/playbooks/template_workflow_manager.yml
@@ -29,7 +29,6 @@
version_description: "{{ item.description }}"
language: "{{ item.language }}"
software_type: "{{ item.type }}"
- software_variant: "{{ item.variant }}"
device_types:
- product_family: "{{ item.family }}"
export:
diff --git a/ansible_collections/cisco/dnac/plugins/action/application_policy_application_set.py b/ansible_collections/cisco/dnac/plugins/action/application_policy_application_set.py
new file mode 100644
index 000000000..171175d4a
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/application_policy_application_set.py
@@ -0,0 +1,234 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+ dnac_compare_equality,
+ get_dict_result,
+)
+from ansible_collections.cisco.dnac.plugins.plugin_utils.exceptions import (
+ InconsistentParameters,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ state=dict(type="str", default="present", choices=["present", "absent"]),
+ payload=dict(type="list"),
+ id=dict(type="str"),
+))
+
+required_if = [
+ ("state", "present", ["id"], True),
+ ("state", "present", ["payload"], True),
+ ("state", "absent", ["id"], True),
+ ("state", "absent", ["payload"], True),
+]
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ApplicationPolicyApplicationSet(object):
+ def __init__(self, params, dnac):
+ self.dnac = dnac
+ self.new_object = dict(
+ payload=params.get("payload"),
+ id=params.get("id"),
+ )
+
+ def get_all_params(self, name=None, id=None):
+ new_object_params = {}
+ new_object_params['attributes'] = self.new_object.get('attributes')
+ new_object_params['name'] = name or self.new_object.get('name')
+ new_object_params['offset'] = self.new_object.get('offset')
+ new_object_params['limit'] = self.new_object.get('limit')
+ return new_object_params
+
+ def create_params(self):
+ new_object_params = {}
+ new_object_params['payload'] = self.new_object.get('payload')
+ return new_object_params
+
+ def delete_by_id_params(self):
+ new_object_params = {}
+ new_object_params['id'] = self.new_object.get('id')
+ return new_object_params
+
+ def get_object_by_name(self, name):
+ result = None
+ # NOTE: Does not have a get by name method, using get all
+ try:
+ items = self.dnac.exec(
+ family="application_policy",
+ function="get_application_sets2",
+ params=self.get_all_params(name=name),
+ )
+ if isinstance(items, dict):
+ if 'response' in items:
+ items = items.get('response')
+ result = get_dict_result(items, 'name', name)
+ except Exception:
+ result = None
+ return result
+
+ def get_object_by_id(self, id):
+ result = None
+ # NOTE: Does not have a get by id method or it is in another action
+ return result
+
+ def exists(self):
+ prev_obj = None
+ id_exists = False
+ name_exists = False
+ requested_obj = self.new_object.get('payload')
+ if requested_obj and len(requested_obj) > 0:
+ requested_obj = requested_obj[0]
+ o_id = self.new_object.get("id") or requested_obj.get("id")
+ name = self.new_object.get("name") or requested_obj.get("name")
+ if o_id:
+ prev_obj = self.get_object_by_id(o_id)
+ id_exists = prev_obj is not None and isinstance(prev_obj, dict)
+ if not id_exists and name:
+ prev_obj = self.get_object_by_name(name)
+ name_exists = prev_obj is not None and isinstance(prev_obj, dict)
+ if name_exists:
+ _id = prev_obj.get("id")
+ if id_exists and name_exists and o_id != _id:
+ raise InconsistentParameters("The 'id' and 'name' params don't refer to the same object")
+ if _id:
+ self.new_object.update(dict(id=_id))
+ it_exists = prev_obj is not None and isinstance(prev_obj, dict)
+ return (it_exists, prev_obj)
+
+ def requires_update(self, current_obj):
+ requested_obj = self.new_object.get('payload')
+ if requested_obj and len(requested_obj) > 0:
+ requested_obj = requested_obj[0]
+
+ obj_params = [
+ ("name", "name"),
+ ("scalableGroupType", "scalableGroupType"),
+ ("defaultBusinessRelevance", "defaultBusinessRelevance"),
+ ("namespace", "namespace"),
+ ("qualifier", "qualifier"),
+ ("type", "type"),
+ ("scalableGroupExternalHandle", "scalableGroupExternalHandle"),
+ ("id", "id"),
+ ]
+ # Method 1. Params present in request (Ansible) obj are the same as the current (ISE) params
+ # If any does not have eq params, it requires update
+ return any(not dnac_compare_equality(current_obj.get(dnac_param),
+ requested_obj.get(ansible_param))
+ for (dnac_param, ansible_param) in obj_params)
+
+ def create(self):
+ result = self.dnac.exec(
+ family="application_policy",
+ function="create_application_sets",
+ params=self.create_params(),
+ op_modifies=True,
+ )
+ return result
+
+ def delete(self):
+ requested_obj = self.new_object.get('payload')
+ if requested_obj and len(requested_obj) > 0:
+ requested_obj = requested_obj[0]
+ id = self.new_object.get("id") or requested_obj.get("id")
+ name = self.new_object.get("name") or requested_obj.get("name")
+ result = None
+ if not id:
+ prev_obj_name = self.get_object_by_name(name)
+ id_ = None
+ if prev_obj_name:
+ id_ = prev_obj_name.get("id")
+ if id_:
+ self.new_object.update(dict(id=id_))
+ result = self.dnac.exec(
+ family="application_policy",
+ function="delete_application_set",
+ params=self.delete_by_id_params(),
+ )
+ return result
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = False
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ dnac = DNACSDK(self._task.args)
+ obj = ApplicationPolicyApplicationSet(self._task.args, dnac)
+
+ state = self._task.args.get("state")
+
+ response = None
+ if state == "present":
+ (obj_exists, prev_obj) = obj.exists()
+ if obj_exists:
+ if obj.requires_update(prev_obj):
+ response = prev_obj
+ dnac.object_present_and_different()
+ else:
+ response = prev_obj
+ dnac.object_already_present()
+ else:
+ response = obj.create()
+ dnac.object_created()
+ elif state == "absent":
+ (obj_exists, prev_obj) = obj.exists()
+ if obj_exists:
+ response = obj.delete()
+ dnac.object_deleted()
+ else:
+ dnac.object_already_absent()
+
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/application_policy_application_set_count_info.py b/ansible_collections/cisco/dnac/plugins/action/application_policy_application_set_count_info.py
new file mode 100644
index 000000000..3a0a27f67
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/application_policy_application_set_count_info.py
@@ -0,0 +1,89 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ scalableGroupType=dict(type="str"),
+ headers=dict(type="dict"),
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = True
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ scalable_group_type=params.get("scalableGroupType"),
+ headers=params.get("headers"),
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ self._result.update(dict(dnac_response={}))
+
+ dnac = DNACSDK(params=self._task.args)
+
+ response = dnac.exec(
+ family="application_policy",
+ function='get_application_set_count',
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/application_policy_application_set_info.py b/ansible_collections/cisco/dnac/plugins/action/application_policy_application_set_info.py
new file mode 100644
index 000000000..360ffb79e
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/application_policy_application_set_info.py
@@ -0,0 +1,95 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ attributes=dict(type="str"),
+ name=dict(type="str"),
+ offset=dict(type="float"),
+ limit=dict(type="float"),
+ headers=dict(type="dict"),
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = True
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ attributes=params.get("attributes"),
+ name=params.get("name"),
+ offset=params.get("offset"),
+ limit=params.get("limit"),
+ headers=params.get("headers"),
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ self._result.update(dict(dnac_response={}))
+
+ dnac = DNACSDK(params=self._task.args)
+
+ response = dnac.exec(
+ family="application_policy",
+ function='get_application_sets2',
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/application_sets.py b/ansible_collections/cisco/dnac/plugins/action/application_sets.py
index b1bc7f48b..f6877a331 100644
--- a/ansible_collections/cisco/dnac/plugins/action/application_sets.py
+++ b/ansible_collections/cisco/dnac/plugins/action/application_sets.py
@@ -37,6 +37,7 @@ argument_spec.update(dict(
required_if = [
("state", "present", ["payload"], True),
+ ("state", "absent", ["payload"], True),
]
required_one_of = []
mutually_exclusive = []
@@ -151,12 +152,15 @@ class ApplicationSets(object):
return result
def delete(self):
- id = self.new_object.get("id")
- name = self.new_object.get("name")
+ requested_obj = self.new_object.get('payload')
+ if requested_obj and len(requested_obj) > 0:
+ requested_obj = requested_obj[0]
+ id = self.new_object.get("id") or requested_obj.get("id")
+ name = self.new_object.get("name") or requested_obj.get("name")
result = None
result = self.dnac.exec(
family="application_policy",
- function="delete_application_set",
+ function="delete_application_set2",
params=self.delete_all_params(),
)
return result
diff --git a/ansible_collections/cisco/dnac/plugins/action/application_sets_info.py b/ansible_collections/cisco/dnac/plugins/action/application_sets_info.py
index 9e388f1ac..870d0e3c1 100644
--- a/ansible_collections/cisco/dnac/plugins/action/application_sets_info.py
+++ b/ansible_collections/cisco/dnac/plugins/action/application_sets_info.py
@@ -25,8 +25,8 @@ from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
argument_spec = dnac_argument_spec()
# Add arguments specific for this module
argument_spec.update(dict(
- offset=dict(type="int"),
- limit=dict(type="int"),
+ offset=dict(type="float"),
+ limit=dict(type="float"),
name=dict(type="str"),
headers=dict(type="dict"),
))
diff --git a/ansible_collections/cisco/dnac/plugins/action/applications.py b/ansible_collections/cisco/dnac/plugins/action/applications.py
index 81c404e88..cc378dc1b 100644
--- a/ansible_collections/cisco/dnac/plugins/action/applications.py
+++ b/ansible_collections/cisco/dnac/plugins/action/applications.py
@@ -37,6 +37,7 @@ argument_spec.update(dict(
required_if = [
("state", "present", ["payload"], True),
+ ("state", "absent", ["payload"], True),
]
required_one_of = []
mutually_exclusive = []
@@ -79,7 +80,7 @@ class Applications(object):
try:
items = self.dnac.exec(
family="application_policy",
- function="get_applications",
+ function="get_applications2",
params=self.get_all_params(name=name),
)
if isinstance(items, dict):
@@ -96,7 +97,7 @@ class Applications(object):
try:
items = self.dnac.exec(
family="application_policy",
- function="get_applications",
+ function="get_applications2",
params=self.get_all_params(id=id),
)
if isinstance(items, dict):
@@ -175,12 +176,15 @@ class Applications(object):
return result
def delete(self):
- id = self.new_object.get("id")
- name = self.new_object.get("name")
+ requested_obj = self.new_object.get('payload')
+ if requested_obj and len(requested_obj) > 0:
+ requested_obj = requested_obj[0]
+ id = self.new_object.get("id") or requested_obj.get("id")
+ name = self.new_object.get("name") or requested_obj.get("name")
result = None
result = self.dnac.exec(
family="application_policy",
- function="delete_application",
+ function="delete_application2",
params=self.delete_all_params(),
)
return result
diff --git a/ansible_collections/cisco/dnac/plugins/action/applications_count_v2_info.py b/ansible_collections/cisco/dnac/plugins/action/applications_count_v2_info.py
new file mode 100644
index 000000000..f8d236569
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/applications_count_v2_info.py
@@ -0,0 +1,89 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ scalableGroupType=dict(type="str"),
+ headers=dict(type="dict"),
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = True
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ scalable_group_type=params.get("scalableGroupType"),
+ headers=params.get("headers"),
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ self._result.update(dict(dnac_response={}))
+
+ dnac = DNACSDK(params=self._task.args)
+
+ response = dnac.exec(
+ family="application_policy",
+ function='get_application_count',
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/applications_health_info.py b/ansible_collections/cisco/dnac/plugins/action/applications_health_info.py
index 31d101b24..dcb74b44d 100644
--- a/ansible_collections/cisco/dnac/plugins/action/applications_health_info.py
+++ b/ansible_collections/cisco/dnac/plugins/action/applications_health_info.py
@@ -28,11 +28,11 @@ argument_spec.update(dict(
siteId=dict(type="str"),
deviceId=dict(type="str"),
macAddress=dict(type="str"),
- startTime=dict(type="int"),
- endTime=dict(type="int"),
+ startTime=dict(type="float"),
+ endTime=dict(type="float"),
applicationHealth=dict(type="str"),
- offset=dict(type="int"),
- limit=dict(type="int"),
+ offset=dict(type="float"),
+ limit=dict(type="float"),
applicationName=dict(type="str"),
headers=dict(type="dict"),
))
diff --git a/ansible_collections/cisco/dnac/plugins/action/applications_info.py b/ansible_collections/cisco/dnac/plugins/action/applications_info.py
index 8d1275175..5a1ccaf65 100644
--- a/ansible_collections/cisco/dnac/plugins/action/applications_info.py
+++ b/ansible_collections/cisco/dnac/plugins/action/applications_info.py
@@ -25,8 +25,8 @@ from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
argument_spec = dnac_argument_spec()
# Add arguments specific for this module
argument_spec.update(dict(
- offset=dict(type="int"),
- limit=dict(type="int"),
+ offset=dict(type="float"),
+ limit=dict(type="float"),
name=dict(type="str"),
headers=dict(type="dict"),
))
@@ -85,7 +85,7 @@ class ActionModule(ActionBase):
response = dnac.exec(
family="application_policy",
- function='get_applications',
+ function='get_applications2',
params=self.get_object(self._task.args),
)
self._result.update(dict(dnac_response=response))
diff --git a/ansible_collections/cisco/dnac/plugins/action/applications_v2.py b/ansible_collections/cisco/dnac/plugins/action/applications_v2.py
new file mode 100644
index 000000000..3771bb004
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/applications_v2.py
@@ -0,0 +1,275 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+ dnac_compare_equality,
+ get_dict_result,
+)
+from ansible_collections.cisco.dnac.plugins.plugin_utils.exceptions import (
+ InconsistentParameters,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ state=dict(type="str", default="present", choices=["present", "absent"]),
+ payload=dict(type="list"),
+ id=dict(type="str"),
+))
+
+required_if = [
+ ("state", "present", ["id"], True),
+ ("state", "present", ["payload"], True),
+ ("state", "absent", ["id"], True),
+ ("state", "absent", ["payload"], True),
+]
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ApplicationsV2(object):
+ def __init__(self, params, dnac):
+ self.dnac = dnac
+ self.new_object = dict(
+ payload=params.get("payload"),
+ id=params.get("id"),
+ )
+
+ def get_all_params(self, name=None, id=None):
+ new_object_params = {}
+ new_object_params['attributes'] = self.new_object.get('attributes')
+ new_object_params['name'] = name or self.new_object.get('name')
+ new_object_params['offset'] = self.new_object.get('offset')
+ new_object_params['limit'] = self.new_object.get('limit')
+ return new_object_params
+
+ def create_params(self):
+ new_object_params = {}
+ new_object_params['payload'] = self.new_object.get('payload')
+ return new_object_params
+
+ def delete_by_id_params(self):
+ new_object_params = {}
+ new_object_params['id'] = self.new_object.get('id')
+ return new_object_params
+
+ def update_all_params(self):
+ new_object_params = {}
+ new_object_params['payload'] = self.new_object.get('payload')
+ return new_object_params
+
+ def get_object_by_name(self, name):
+ result = None
+ # NOTE: Does not have a get by name method or it is in another action
+ try:
+ items = self.dnac.exec(
+ family="application_policy",
+ function="get_applications",
+ params=self.get_all_params(name=name),
+ )
+ if isinstance(items, dict):
+ if 'response' in items:
+ items = items.get('response')
+ result = get_dict_result(items, 'name', name)
+ except Exception:
+ result = None
+ return result
+
+ def get_object_by_id(self, id):
+ result = None
+ # NOTE: Does not have a get by id method or it is in another action
+ try:
+ items = self.dnac.exec(
+ family="application_policy",
+ function="get_applications",
+ params=self.get_all_params(id=id),
+ )
+ if isinstance(items, dict):
+ if 'response' in items:
+ items = items.get('response')
+ result = get_dict_result(items, 'id', id)
+ except Exception:
+ result = None
+ return result
+
+ def exists(self):
+ id_exists = False
+ name_exists = False
+ prev_obj = None
+ requested_obj = self.new_object.get('payload')
+ if requested_obj and len(requested_obj) > 0:
+ requested_obj = requested_obj[0]
+ o_id = self.new_object.get("id") or requested_obj.get("id")
+ name = self.new_object.get("name") or requested_obj.get("name")
+ if o_id:
+ prev_obj = self.get_object_by_id(o_id)
+ id_exists = prev_obj is not None and isinstance(prev_obj, dict)
+ if not id_exists and name:
+ prev_obj = self.get_object_by_name(name)
+ name_exists = prev_obj is not None and isinstance(prev_obj, dict)
+ if name_exists:
+ _id = prev_obj.get("id")
+ if id_exists and name_exists and o_id != _id:
+ raise InconsistentParameters("The 'id' and 'name' params don't refer to the same object")
+ if _id:
+ self.new_object.update(dict(id=_id))
+ it_exists = prev_obj is not None and isinstance(prev_obj, dict)
+ return (it_exists, prev_obj)
+
+ def requires_update(self, current_obj):
+ requested_obj = self.new_object.get('payload')
+ if requested_obj and len(requested_obj) > 0:
+ requested_obj = requested_obj[0]
+
+ obj_params = [
+ ("id", "id"),
+ ("instanceId", "instanceId"),
+ ("displayName", "displayName"),
+ ("instanceVersion", "instanceVersion"),
+ ("indicativeNetworkIdentity", "indicativeNetworkIdentity"),
+ ("name", "name"),
+ ("namespace", "namespace"),
+ ("networkApplications", "networkApplications"),
+ ("networkIdentity", "networkIdentity"),
+ ("parentScalableGroup", "parentScalableGroup"),
+ ("qualifier", "qualifier"),
+ ("scalableGroupExternalHandle", "scalableGroupExternalHandle"),
+ ("scalableGroupType", "scalableGroupType"),
+ ("type", "type"),
+ ("id", "id"),
+ ]
+ # Method 1. Params present in request (Ansible) obj are the same as the current (DNAC) params
+ # If any does not have eq params, it requires update
+ return any(not dnac_compare_equality(current_obj.get(dnac_param),
+ requested_obj.get(ansible_param))
+ for (dnac_param, ansible_param) in obj_params)
+
+ def create(self):
+ result = self.dnac.exec(
+ family="application_policy",
+ function="create_applications",
+ params=self.create_params(),
+ op_modifies=True,
+ )
+ return result
+
+ def update(self):
+ requested_obj = self.new_object.get('payload')
+ if requested_obj and len(requested_obj) > 0:
+ requested_obj = requested_obj[0]
+ id = self.new_object.get("id") or requested_obj.get("id")
+ name = self.new_object.get("name") or requested_obj.get("name")
+ result = None
+ result = self.dnac.exec(
+ family="application_policy",
+ function="edit_applications",
+ params=self.update_all_params(),
+ op_modifies=True,
+ )
+ return result
+
+ def delete(self):
+ requested_obj = self.new_object.get('payload')
+ if requested_obj and len(requested_obj) > 0:
+ requested_obj = requested_obj[0]
+ id = self.new_object.get("id") or requested_obj.get("id")
+ name = self.new_object.get("name") or requested_obj.get("name")
+ result = None
+ if not id:
+ prev_obj_name = self.get_object_by_name(name)
+ id_ = None
+ if prev_obj_name:
+ id_ = prev_obj_name.get("id")
+ if id_:
+ self.new_object.update(dict(id=id_))
+ result = self.dnac.exec(
+ family="application_policy",
+ function="delete_application",
+ params=self.delete_by_id_params(),
+ )
+ return result
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = False
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ dnac = DNACSDK(self._task.args)
+ obj = ApplicationsV2(self._task.args, dnac)
+
+ state = self._task.args.get("state")
+
+ response = None
+
+ if state == "present":
+ (obj_exists, prev_obj) = obj.exists()
+ if obj_exists:
+ if obj.requires_update(prev_obj):
+ response = obj.update()
+ dnac.object_updated()
+ else:
+ response = prev_obj
+ dnac.object_already_present()
+ else:
+ response = obj.create()
+ dnac.object_created()
+
+ elif state == "absent":
+ (obj_exists, prev_obj) = obj.exists()
+ if obj_exists:
+ response = obj.delete()
+ dnac.object_deleted()
+ else:
+ dnac.object_already_absent()
+
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/applications_v2_info.py b/ansible_collections/cisco/dnac/plugins/action/applications_v2_info.py
new file mode 100644
index 000000000..394363c01
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/applications_v2_info.py
@@ -0,0 +1,95 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ attributes=dict(type="str"),
+ name=dict(type="str"),
+ offset=dict(type="float"),
+ limit=dict(type="float"),
+ headers=dict(type="dict"),
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = True
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ attributes=params.get("attributes"),
+ name=params.get("name"),
+ offset=params.get("offset"),
+ limit=params.get("limit"),
+ headers=params.get("headers"),
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ self._result.update(dict(dnac_response={}))
+
+ dnac = DNACSDK(params=self._task.args)
+
+ response = dnac.exec(
+ family="application_policy",
+ function='get_applications',
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/auth_token_create.py b/ansible_collections/cisco/dnac/plugins/action/auth_token_create.py
new file mode 100644
index 000000000..64e868966
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/auth_token_create.py
@@ -0,0 +1,84 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguements specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = False
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ dnac = DNACSDK(params=self._task.args)
+
+ response = dnac.exec(
+ family="authentication",
+ function='authentication_api',
+ op_modifies=True,
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/authentication_policy_servers.py b/ansible_collections/cisco/dnac/plugins/action/authentication_policy_servers.py
new file mode 100644
index 000000000..ea97f45b3
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/authentication_policy_servers.py
@@ -0,0 +1,338 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+ dnac_compare_equality,
+ get_dict_result,
+)
+from ansible_collections.cisco.dnac.plugins.plugin_utils.exceptions import (
+ InconsistentParameters,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ state=dict(type="str", default="present", choices=["present", "absent"]),
+ authenticationPort=dict(type="int"),
+ accountingPort=dict(type="int"),
+ ciscoIseDtos=dict(type="list"),
+ ipAddress=dict(type="str"),
+ pxgridEnabled=dict(type="bool"),
+ useDnacCertForPxgrid=dict(type="bool"),
+ isIseEnabled=dict(type="bool"),
+ port=dict(type="int"),
+ protocol=dict(type="str"),
+ retries=dict(type="str"),
+ role=dict(type="str"),
+ sharedSecret=dict(type="str"),
+ timeoutSeconds=dict(type="str"),
+ encryptionScheme=dict(type="str"),
+ messageKey=dict(type="str"),
+ encryptionKey=dict(type="str"),
+ externalCiscoIseIpAddrDtos=dict(type="list"),
+ id=dict(type="str"),
+))
+
+required_if = [
+ ("state", "present", ["id", "role"], True),
+ ("state", "absent", ["id", "role"], True),
+]
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class AuthenticationPolicyServers(object):
+ def __init__(self, params, dnac):
+ self.dnac = dnac
+ self.new_object = dict(
+ authenticationPort=params.get("authenticationPort"),
+ accountingPort=params.get("accountingPort"),
+ ciscoIseDtos=params.get("ciscoIseDtos"),
+ ipAddress=params.get("ipAddress"),
+ pxgridEnabled=params.get("pxgridEnabled"),
+ useDnacCertForPxgrid=params.get("useDnacCertForPxgrid"),
+ isIseEnabled=params.get("isIseEnabled"),
+ port=params.get("port"),
+ protocol=params.get("protocol"),
+ retries=params.get("retries"),
+ role=params.get("role"),
+ sharedSecret=params.get("sharedSecret"),
+ timeoutSeconds=params.get("timeoutSeconds"),
+ encryptionScheme=params.get("encryptionScheme"),
+ messageKey=params.get("messageKey"),
+ encryptionKey=params.get("encryptionKey"),
+ externalCiscoIseIpAddrDtos=params.get("externalCiscoIseIpAddrDtos"),
+ id=params.get("id"),
+ )
+
+ def get_all_params(self, name=None, id=None):
+ new_object_params = {}
+ new_object_params['is_ise_enabled'] = self.new_object.get('isIseEnabled') or \
+ self.new_object.get('is_ise_enabled')
+ new_object_params['state'] = self.new_object.get('state_') or \
+ self.new_object.get('state')
+ new_object_params['role'] = self.new_object.get('role')
+ return new_object_params
+
+ def create_params(self):
+ new_object_params = {}
+ new_object_params['authenticationPort'] = self.new_object.get('authenticationPort')
+ new_object_params['accountingPort'] = self.new_object.get('accountingPort')
+ new_object_params['ciscoIseDtos'] = self.new_object.get('ciscoIseDtos')
+ new_object_params['ipAddress'] = self.new_object.get('ipAddress')
+ new_object_params['pxgridEnabled'] = self.new_object.get('pxgridEnabled')
+ new_object_params['useDnacCertForPxgrid'] = self.new_object.get('useDnacCertForPxgrid')
+ new_object_params['isIseEnabled'] = self.new_object.get('isIseEnabled')
+ new_object_params['port'] = self.new_object.get('port')
+ new_object_params['protocol'] = self.new_object.get('protocol')
+ new_object_params['retries'] = self.new_object.get('retries')
+ new_object_params['role'] = self.new_object.get('role')
+ new_object_params['sharedSecret'] = self.new_object.get('sharedSecret')
+ new_object_params['timeoutSeconds'] = self.new_object.get('timeoutSeconds')
+ new_object_params['encryptionScheme'] = self.new_object.get('encryptionScheme')
+ new_object_params['messageKey'] = self.new_object.get('messageKey')
+ new_object_params['encryptionKey'] = self.new_object.get('encryptionKey')
+ new_object_params['externalCiscoIseIpAddrDtos'] = self.new_object.get('externalCiscoIseIpAddrDtos')
+ return new_object_params
+
+ def delete_by_id_params(self):
+ new_object_params = {}
+ new_object_params['id'] = self.new_object.get('id')
+ return new_object_params
+
+ def update_by_id_params(self):
+ new_object_params = {}
+ new_object_params['authenticationPort'] = self.new_object.get('authenticationPort')
+ new_object_params['accountingPort'] = self.new_object.get('accountingPort')
+ new_object_params['ciscoIseDtos'] = self.new_object.get('ciscoIseDtos')
+ new_object_params['ipAddress'] = self.new_object.get('ipAddress')
+ new_object_params['pxgridEnabled'] = self.new_object.get('pxgridEnabled')
+ new_object_params['useDnacCertForPxgrid'] = self.new_object.get('useDnacCertForPxgrid')
+ new_object_params['isIseEnabled'] = self.new_object.get('isIseEnabled')
+ new_object_params['port'] = self.new_object.get('port')
+ new_object_params['protocol'] = self.new_object.get('protocol')
+ new_object_params['retries'] = self.new_object.get('retries')
+ new_object_params['role'] = self.new_object.get('role')
+ new_object_params['sharedSecret'] = self.new_object.get('sharedSecret')
+ new_object_params['timeoutSeconds'] = self.new_object.get('timeoutSeconds')
+ new_object_params['encryptionScheme'] = self.new_object.get('encryptionScheme')
+ new_object_params['messageKey'] = self.new_object.get('messageKey')
+ new_object_params['encryptionKey'] = self.new_object.get('encryptionKey')
+ new_object_params['externalCiscoIseIpAddrDtos'] = self.new_object.get('externalCiscoIseIpAddrDtos')
+ new_object_params['id'] = self.new_object.get('id')
+ return new_object_params
+
+ def get_object_by_name(self, name):
+ result = None
+ # NOTE: Does not have a get by name method or it is in another action
+ try:
+ items = self.dnac.exec(
+ family="system_settings",
+ function="get_authentication_and_policy_servers",
+ params=self.get_all_params(name=name),
+ )
+ if isinstance(items, dict):
+ if 'response' in items:
+ items = items.get('response')
+ result = get_dict_result(items, 'name', name)
+ except Exception:
+ result = None
+ return result
+
+ def get_object_by_id(self, id):
+ result = None
+ # NOTE: Does not have a get by id method or it is in another action
+ try:
+ items = self.dnac.exec(
+ family="system_settings",
+ function="get_authentication_and_policy_servers",
+ params=self.get_all_params(id=id),
+ )
+ if isinstance(items, dict):
+ if 'response' in items:
+ items = items.get('response')
+ result = get_dict_result(items, 'id', id)
+ except Exception:
+ result = None
+ return result
+
+ def exists(self):
+ id_exists = False
+ name_exists = False
+ prev_obj = None
+ o_id = self.new_object.get("id")
+ name = self.new_object.get("name")
+ if o_id:
+ prev_obj = self.get_object_by_id(o_id)
+ id_exists = prev_obj is not None and isinstance(prev_obj, dict)
+ if not id_exists and name:
+ prev_obj = self.get_object_by_name(name)
+ name_exists = prev_obj is not None and isinstance(prev_obj, dict)
+ if name_exists:
+ _id = prev_obj.get("id")
+ if id_exists and name_exists and o_id != _id:
+ raise InconsistentParameters("The 'id' and 'name' params don't refer to the same object")
+ if _id:
+ self.new_object.update(dict(id=_id))
+ it_exists = prev_obj is not None and isinstance(prev_obj, dict)
+ return (it_exists, prev_obj)
+
+ def requires_update(self, current_obj):
+ requested_obj = self.new_object
+
+ obj_params = [
+ ("authenticationPort", "authenticationPort"),
+ ("accountingPort", "accountingPort"),
+ ("ciscoIseDtos", "ciscoIseDtos"),
+ ("ipAddress", "ipAddress"),
+ ("pxgridEnabled", "pxgridEnabled"),
+ ("useDnacCertForPxgrid", "useDnacCertForPxgrid"),
+ ("isIseEnabled", "isIseEnabled"),
+ ("port", "port"),
+ ("protocol", "protocol"),
+ ("retries", "retries"),
+ ("role", "role"),
+ ("sharedSecret", "sharedSecret"),
+ ("timeoutSeconds", "timeoutSeconds"),
+ ("encryptionScheme", "encryptionScheme"),
+ ("messageKey", "messageKey"),
+ ("encryptionKey", "encryptionKey"),
+ ("externalCiscoIseIpAddrDtos", "externalCiscoIseIpAddrDtos"),
+ ("id", "id"),
+ ]
+ # Method 1. Params present in request (Ansible) obj are the same as the current (DNAC) params
+ # If any does not have eq params, it requires update
+ return any(not dnac_compare_equality(current_obj.get(dnac_param),
+ requested_obj.get(ansible_param))
+ for (dnac_param, ansible_param) in obj_params)
+
+ def create(self):
+ result = self.dnac.exec(
+ family="system_settings",
+ function="add_authentication_and_policy_server_access_configuration",
+ params=self.create_params(),
+ op_modifies=True,
+ )
+ return result
+
+ def update(self):
+ id = self.new_object.get("id")
+ name = self.new_object.get("name")
+ result = None
+ if not id:
+ prev_obj_name = self.get_object_by_name(name)
+ id_ = None
+ if prev_obj_name:
+ id_ = prev_obj_name.get("id")
+ if id_:
+ self.new_object.update(dict(id=id_))
+ result = self.dnac.exec(
+ family="system_settings",
+ function="edit_authentication_and_policy_server_access_configuration",
+ params=self.update_by_id_params(),
+ op_modifies=True,
+ )
+ return result
+
+ def delete(self):
+ id = self.new_object.get("id")
+ name = self.new_object.get("name")
+ result = None
+ if not id:
+ prev_obj_name = self.get_object_by_name(name)
+ id_ = None
+ if prev_obj_name:
+ id_ = prev_obj_name.get("id")
+ if id_:
+ self.new_object.update(dict(id=id_))
+ result = self.dnac.exec(
+ family="system_settings",
+ function="delete_authentication_and_policy_server_access_configuration",
+ params=self.delete_by_id_params(),
+ )
+ return result
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = False
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ dnac = DNACSDK(self._task.args)
+ obj = AuthenticationPolicyServers(self._task.args, dnac)
+
+ state = self._task.args.get("state")
+
+ response = None
+
+ if state == "present":
+ (obj_exists, prev_obj) = obj.exists()
+ if obj_exists:
+ if obj.requires_update(prev_obj):
+ response = obj.update()
+ dnac.object_updated()
+ else:
+ response = prev_obj
+ dnac.object_already_present()
+ else:
+ response = obj.create()
+ dnac.object_created()
+
+ elif state == "absent":
+ (obj_exists, prev_obj) = obj.exists()
+ if obj_exists:
+ response = obj.delete()
+ dnac.object_deleted()
+ else:
+ dnac.object_already_absent()
+
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/buildings_planned_access_points_info.py b/ansible_collections/cisco/dnac/plugins/action/buildings_planned_access_points_info.py
index 166887325..3c8647ac1 100644
--- a/ansible_collections/cisco/dnac/plugins/action/buildings_planned_access_points_info.py
+++ b/ansible_collections/cisco/dnac/plugins/action/buildings_planned_access_points_info.py
@@ -26,8 +26,8 @@ argument_spec = dnac_argument_spec()
# Add arguments specific for this module
argument_spec.update(dict(
buildingId=dict(type="str"),
- limit=dict(type="int"),
- offset=dict(type="int"),
+ limit=dict(type="float"),
+ offset=dict(type="float"),
radios=dict(type="bool"),
headers=dict(type="dict"),
))
diff --git a/ansible_collections/cisco/dnac/plugins/action/cli_credential.py b/ansible_collections/cisco/dnac/plugins/action/cli_credential.py
index 2b42190fa..d10c90049 100644
--- a/ansible_collections/cisco/dnac/plugins/action/cli_credential.py
+++ b/ansible_collections/cisco/dnac/plugins/action/cli_credential.py
@@ -127,7 +127,7 @@ class CliCredential(object):
id_exists = False
name_exists = False
o_id = self.new_object.get("id")
- name = self.new_object.get("username") or self.new_object.get("description")
+ name = self.new_object.get("description") or self.new_object.get("username")
if o_id:
prev_obj = self.get_object_by_id(o_id)
id_exists = prev_obj is not None and isinstance(prev_obj, dict)
diff --git a/ansible_collections/cisco/dnac/plugins/action/client_detail_info.py b/ansible_collections/cisco/dnac/plugins/action/client_detail_info.py
index e09db53c8..59b7fd8bb 100644
--- a/ansible_collections/cisco/dnac/plugins/action/client_detail_info.py
+++ b/ansible_collections/cisco/dnac/plugins/action/client_detail_info.py
@@ -25,8 +25,8 @@ from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
argument_spec = dnac_argument_spec()
# Add arguments specific for this module
argument_spec.update(dict(
- timestamp=dict(type="str"),
macAddress=dict(type="str"),
+ timestamp=dict(type="float"),
headers=dict(type="dict"),
))
@@ -65,8 +65,8 @@ class ActionModule(ActionBase):
def get_object(self, params):
new_object = dict(
- timestamp=params.get("timestamp"),
mac_address=params.get("macAddress"),
+ timestamp=params.get("timestamp"),
headers=params.get("headers"),
)
return new_object
diff --git a/ansible_collections/cisco/dnac/plugins/action/client_health_info.py b/ansible_collections/cisco/dnac/plugins/action/client_health_info.py
index 87275b036..1f8a652b1 100644
--- a/ansible_collections/cisco/dnac/plugins/action/client_health_info.py
+++ b/ansible_collections/cisco/dnac/plugins/action/client_health_info.py
@@ -25,7 +25,7 @@ from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
argument_spec = dnac_argument_spec()
# Add arguments specific for this module
argument_spec.update(dict(
- timestamp=dict(type="str"),
+ timestamp=dict(type="float"),
headers=dict(type="dict"),
))
diff --git a/ansible_collections/cisco/dnac/plugins/action/client_proximity_info.py b/ansible_collections/cisco/dnac/plugins/action/client_proximity_info.py
index b9a411856..af505d52a 100644
--- a/ansible_collections/cisco/dnac/plugins/action/client_proximity_info.py
+++ b/ansible_collections/cisco/dnac/plugins/action/client_proximity_info.py
@@ -26,8 +26,8 @@ argument_spec = dnac_argument_spec()
# Add arguments specific for this module
argument_spec.update(dict(
username=dict(type="str"),
- number_days=dict(type="int"),
- time_resolution=dict(type="int"),
+ number_days=dict(type="float"),
+ time_resolution=dict(type="float"),
headers=dict(type="dict"),
))
diff --git a/ansible_collections/cisco/dnac/plugins/action/compliance_device_by_id_info.py b/ansible_collections/cisco/dnac/plugins/action/compliance_device_by_id_info.py
index e2c684bfc..e9e9397ee 100644
--- a/ansible_collections/cisco/dnac/plugins/action/compliance_device_by_id_info.py
+++ b/ansible_collections/cisco/dnac/plugins/action/compliance_device_by_id_info.py
@@ -29,8 +29,6 @@ argument_spec.update(dict(
category=dict(type="str"),
complianceType=dict(type="str"),
diffList=dict(type="bool"),
- key=dict(type="str"),
- value=dict(type="str"),
headers=dict(type="dict"),
))
@@ -73,8 +71,6 @@ class ActionModule(ActionBase):
category=params.get("category"),
compliance_type=params.get("complianceType"),
diff_list=params.get("diffList"),
- key=params.get("key"),
- value=params.get("value"),
headers=params.get("headers"),
)
return new_object
diff --git a/ansible_collections/cisco/dnac/plugins/action/compliance_device_details_info.py b/ansible_collections/cisco/dnac/plugins/action/compliance_device_details_info.py
index 4fedf1aab..44fbabc27 100644
--- a/ansible_collections/cisco/dnac/plugins/action/compliance_device_details_info.py
+++ b/ansible_collections/cisco/dnac/plugins/action/compliance_device_details_info.py
@@ -28,8 +28,8 @@ argument_spec.update(dict(
complianceType=dict(type="str"),
complianceStatus=dict(type="str"),
deviceUuid=dict(type="str"),
- offset=dict(type="int"),
- limit=dict(type="int"),
+ offset=dict(type="float"),
+ limit=dict(type="float"),
headers=dict(type="dict"),
))
diff --git a/ansible_collections/cisco/dnac/plugins/action/compliance_device_info.py b/ansible_collections/cisco/dnac/plugins/action/compliance_device_info.py
index 25d2db9cb..553523dd2 100644
--- a/ansible_collections/cisco/dnac/plugins/action/compliance_device_info.py
+++ b/ansible_collections/cisco/dnac/plugins/action/compliance_device_info.py
@@ -27,8 +27,6 @@ argument_spec = dnac_argument_spec()
argument_spec.update(dict(
complianceStatus=dict(type="str"),
deviceUuid=dict(type="str"),
- offset=dict(type="int"),
- limit=dict(type="int"),
headers=dict(type="dict"),
))
@@ -69,8 +67,6 @@ class ActionModule(ActionBase):
new_object = dict(
compliance_status=params.get("complianceStatus"),
device_uuid=params.get("deviceUuid"),
- offset=params.get("offset"),
- limit=params.get("limit"),
headers=params.get("headers"),
)
return new_object
diff --git a/ansible_collections/cisco/dnac/plugins/action/configuration_archive_details_info.py b/ansible_collections/cisco/dnac/plugins/action/configuration_archive_details_info.py
new file mode 100644
index 000000000..52a1b3253
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/configuration_archive_details_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ deviceId=dict(type="str"),
+ fileType=dict(type="str"),
+ createdTime=dict(type="str"),
+ createdBy=dict(type="str"),
+ offset=dict(type="float"),
+ limit=dict(type="float"),
+ headers=dict(type="dict"),
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = True
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ device_id=params.get("deviceId"),
+ file_type=params.get("fileType"),
+ created_time=params.get("createdTime"),
+ created_by=params.get("createdBy"),
+ offset=params.get("offset"),
+ limit=params.get("limit"),
+ headers=params.get("headers"),
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ self._result.update(dict(dnac_response={}))
+
+ dnac = DNACSDK(params=self._task.args)
+
+ response = dnac.exec(
+ family="configuration_archive",
+ function='get_configuration_archive_details',
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/configuration_template_deploy.py b/ansible_collections/cisco/dnac/plugins/action/configuration_template_deploy.py
index 55b033a76..48ccdcf3a 100644
--- a/ansible_collections/cisco/dnac/plugins/action/configuration_template_deploy.py
+++ b/ansible_collections/cisco/dnac/plugins/action/configuration_template_deploy.py
@@ -28,7 +28,7 @@ argument_spec.update(dict(
forcePushTemplate=dict(type="bool"),
isComposite=dict(type="bool"),
mainTemplateId=dict(type="str"),
- memberTemplateDeploymentInfo=dict(type="str"),
+ memberTemplateDeploymentInfo=dict(type="list"),
targetInfo=dict(type="list"),
templateId=dict(type="str"),
))
diff --git a/ansible_collections/cisco/dnac/plugins/action/configuration_template_deploy_v2.py b/ansible_collections/cisco/dnac/plugins/action/configuration_template_deploy_v2.py
index 802c84d98..fe1ec9b76 100644
--- a/ansible_collections/cisco/dnac/plugins/action/configuration_template_deploy_v2.py
+++ b/ansible_collections/cisco/dnac/plugins/action/configuration_template_deploy_v2.py
@@ -28,7 +28,7 @@ argument_spec.update(dict(
forcePushTemplate=dict(type="bool"),
isComposite=dict(type="bool"),
mainTemplateId=dict(type="str"),
- memberTemplateDeploymentInfo=dict(type="str"),
+ memberTemplateDeploymentInfo=dict(type="list"),
targetInfo=dict(type="list"),
templateId=dict(type="str"),
))
diff --git a/ansible_collections/cisco/dnac/plugins/action/configuration_template_project.py b/ansible_collections/cisco/dnac/plugins/action/configuration_template_project.py
index e6e356aa0..65017e94d 100644
--- a/ansible_collections/cisco/dnac/plugins/action/configuration_template_project.py
+++ b/ansible_collections/cisco/dnac/plugins/action/configuration_template_project.py
@@ -37,7 +37,7 @@ argument_spec.update(dict(
id=dict(type="str"),
lastUpdateTime=dict(type="int"),
name=dict(type="str"),
- templates=dict(type="dict"),
+ templates=dict(type="list"),
projectId=dict(type="str"),
))
diff --git a/ansible_collections/cisco/dnac/plugins/action/device_configurations_export.py b/ansible_collections/cisco/dnac/plugins/action/device_configurations_export.py
index 20c129b99..665bcbf41 100644
--- a/ansible_collections/cisco/dnac/plugins/action/device_configurations_export.py
+++ b/ansible_collections/cisco/dnac/plugins/action/device_configurations_export.py
@@ -25,8 +25,8 @@ from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
argument_spec = dnac_argument_spec()
# Add arguments specific for this module
argument_spec.update(dict(
- deviceId=dict(type="list"),
password=dict(type="str", no_log=True),
+ deviceId=dict(type="str"),
))
required_if = []
@@ -64,8 +64,8 @@ class ActionModule(ActionBase):
def get_object(self, params):
new_object = dict(
- deviceId=params.get("deviceId"),
password=params.get("password"),
+ deviceId=params.get("deviceId"),
)
return new_object
diff --git a/ansible_collections/cisco/dnac/plugins/action/device_details_info.py b/ansible_collections/cisco/dnac/plugins/action/device_details_info.py
index 7a743804c..4c64f60f9 100644
--- a/ansible_collections/cisco/dnac/plugins/action/device_details_info.py
+++ b/ansible_collections/cisco/dnac/plugins/action/device_details_info.py
@@ -25,9 +25,9 @@ from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
argument_spec = dnac_argument_spec()
# Add arguments specific for this module
argument_spec.update(dict(
- timestamp=dict(type="str"),
- searchBy=dict(type="str"),
+ timestamp=dict(type="float"),
identifier=dict(type="str"),
+ searchBy=dict(type="str"),
headers=dict(type="dict"),
))
@@ -67,8 +67,8 @@ class ActionModule(ActionBase):
def get_object(self, params):
new_object = dict(
timestamp=params.get("timestamp"),
- search_by=params.get("searchBy"),
identifier=params.get("identifier"),
+ search_by=params.get("searchBy"),
headers=params.get("headers"),
)
return new_object
diff --git a/ansible_collections/cisco/dnac/plugins/action/device_health_info.py b/ansible_collections/cisco/dnac/plugins/action/device_health_info.py
index 68bd65bfe..5bae85ad7 100644
--- a/ansible_collections/cisco/dnac/plugins/action/device_health_info.py
+++ b/ansible_collections/cisco/dnac/plugins/action/device_health_info.py
@@ -28,10 +28,10 @@ argument_spec.update(dict(
deviceRole=dict(type="str"),
siteId=dict(type="str"),
health=dict(type="str"),
- startTime=dict(type="int"),
- endTime=dict(type="int"),
- limit=dict(type="int"),
- offset=dict(type="int"),
+ startTime=dict(type="float"),
+ endTime=dict(type="float"),
+ limit=dict(type="float"),
+ offset=dict(type="float"),
headers=dict(type="dict"),
))
diff --git a/ansible_collections/cisco/dnac/plugins/action/device_reboot_apreboot.py b/ansible_collections/cisco/dnac/plugins/action/device_reboot_apreboot.py
new file mode 100644
index 000000000..70a7b3e62
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/device_reboot_apreboot.py
@@ -0,0 +1,181 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+ dnac_compare_equality,
+ get_dict_result,
+)
+from ansible_collections.cisco.dnac.plugins.plugin_utils.exceptions import (
+ InconsistentParameters,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ state=dict(type="str", default="present", choices=["present"]),
+ apMacAddresses=dict(type="list"),
+))
+
+required_if = [
+]
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class DeviceRebootApreboot(object):
+ def __init__(self, params, dnac):
+ self.dnac = dnac
+ self.new_object = dict(
+ apMacAddresses=params.get("apMacAddresses"),
+ )
+
+ def get_all_params(self, name=None, id=None):
+ new_object_params = {}
+ new_object_params['parent_task_id'] = self.new_object.get('parentTaskId') or \
+ self.new_object.get('parent_task_id')
+ return new_object_params
+
+ def create_params(self):
+ new_object_params = {}
+ new_object_params['apMacAddresses'] = self.new_object.get('apMacAddresses')
+ return new_object_params
+
+ def get_object_by_name(self, name):
+ result = None
+ # NOTE: Does not have a get by name method, using get all
+ try:
+ items = self.dnac.exec(
+ family="wireless",
+ function="get_access_point_reboot_task_result",
+ params=self.get_all_params(name=name),
+ )
+ if isinstance(items, dict):
+ if 'response' in items:
+ items = items.get('response')
+ result = get_dict_result(items, 'name', name)
+ except Exception:
+ result = None
+ return result
+
+ def get_object_by_id(self, id):
+ result = None
+ # NOTE: Does not have a get by id method or it is in another action
+ return result
+
+ def exists(self):
+ prev_obj = None
+ id_exists = False
+ name_exists = False
+ o_id = self.new_object.get("id")
+ name = self.new_object.get("name")
+ if o_id:
+ prev_obj = self.get_object_by_id(o_id)
+ id_exists = prev_obj is not None and isinstance(prev_obj, dict)
+ if not id_exists and name:
+ prev_obj = self.get_object_by_name(name)
+ name_exists = prev_obj is not None and isinstance(prev_obj, dict)
+ if name_exists:
+ _id = prev_obj.get("id")
+ if id_exists and name_exists and o_id != _id:
+ raise InconsistentParameters("The 'id' and 'name' params don't refer to the same object")
+ if _id:
+ self.new_object.update(dict(id=_id))
+ it_exists = prev_obj is not None and isinstance(prev_obj, dict)
+ return (it_exists, prev_obj)
+
+ def requires_update(self, current_obj):
+ requested_obj = self.new_object
+
+ obj_params = [
+ ("apMacAddresses", "apMacAddresses"),
+ ]
+ # Method 1. Params present in request (Ansible) obj are the same as the current (ISE) params
+ # If any does not have eq params, it requires update
+ return any(not dnac_compare_equality(current_obj.get(dnac_param),
+ requested_obj.get(ansible_param))
+ for (dnac_param, ansible_param) in obj_params)
+
+ def create(self):
+ result = self.dnac.exec(
+ family="wireless",
+ function="reboot_access_points",
+ params=self.create_params(),
+ op_modifies=True,
+ )
+ return result
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = False
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ dnac = DNACSDK(self._task.args)
+ obj = DeviceRebootApreboot(self._task.args, dnac)
+
+ state = self._task.args.get("state")
+
+ response = None
+ if state == "present":
+ (obj_exists, prev_obj) = obj.exists()
+ if obj_exists:
+ if obj.requires_update(prev_obj):
+ response = prev_obj
+ dnac.object_present_and_different()
+ else:
+ response = prev_obj
+ dnac.object_already_present()
+ else:
+ response = obj.create()
+ dnac.object_created()
+
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/discovery_summary_info.py b/ansible_collections/cisco/dnac/plugins/action/discovery_summary_info.py
index 9b64187b2..4e4b9741a 100644
--- a/ansible_collections/cisco/dnac/plugins/action/discovery_summary_info.py
+++ b/ansible_collections/cisco/dnac/plugins/action/discovery_summary_info.py
@@ -80,7 +80,7 @@ class ActionModule(ActionBase):
ip_address=params.get("ipAddress"),
ping_status=params.get("pingStatus"),
snmp_status=params.get("snmpStatus"),
- cli_status=params.get("cliStatus"),
+ clistatus=params.get("cliStatus"),
netconf_status=params.get("netconfStatus"),
http_status=params.get("httpStatus"),
headers=params.get("headers"),
diff --git a/ansible_collections/cisco/dnac/plugins/action/dna_event_snmp_config_info.py b/ansible_collections/cisco/dnac/plugins/action/dna_event_snmp_config_info.py
new file mode 100644
index 000000000..19ad20c2d
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/dna_event_snmp_config_info.py
@@ -0,0 +1,97 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ configId=dict(type="str"),
+ offset=dict(type="float"),
+ limit=dict(type="float"),
+ sortBy=dict(type="str"),
+ order=dict(type="str"),
+ headers=dict(type="dict"),
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = True
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ config_id=params.get("configId"),
+ offset=params.get("offset"),
+ limit=params.get("limit"),
+ sort_by=params.get("sortBy"),
+ order=params.get("order"),
+ headers=params.get("headers"),
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ self._result.update(dict(dnac_response={}))
+
+ dnac = DNACSDK(params=self._task.args)
+
+ response = dnac.exec(
+ family="event_management",
+ function='get_snmp_destination',
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/eox_status_device_info.py b/ansible_collections/cisco/dnac/plugins/action/eox_status_device_info.py
index 06ef71e8b..40d675cce 100644
--- a/ansible_collections/cisco/dnac/plugins/action/eox_status_device_info.py
+++ b/ansible_collections/cisco/dnac/plugins/action/eox_status_device_info.py
@@ -82,8 +82,8 @@ class ActionModule(ActionBase):
id = self._task.args.get("deviceId")
if id:
response = dnac.exec(
- family="eo_x",
- function='get_eo_x_details_per_device',
+ family="eox",
+ function='get_eox_details_per_device',
params=self.get_object(self._task.args),
)
self._result.update(dict(dnac_response=response))
@@ -91,8 +91,8 @@ class ActionModule(ActionBase):
return self._result
if not id:
response = dnac.exec(
- family="eo_x",
- function='get_eo_x_status_for_all_devices',
+ family="eox",
+ function='get_eox_status_for_all_devices',
params=self.get_object(self._task.args),
)
self._result.update(dict(dnac_response=response))
diff --git a/ansible_collections/cisco/dnac/plugins/action/eox_status_summary_info.py b/ansible_collections/cisco/dnac/plugins/action/eox_status_summary_info.py
index 5b449d662..db8934816 100644
--- a/ansible_collections/cisco/dnac/plugins/action/eox_status_summary_info.py
+++ b/ansible_collections/cisco/dnac/plugins/action/eox_status_summary_info.py
@@ -78,8 +78,8 @@ class ActionModule(ActionBase):
dnac = DNACSDK(params=self._task.args)
response = dnac.exec(
- family="eo_x",
- function='get_eo_x_summary',
+ family="eox",
+ function='get_eox_summary',
params=self.get_object(self._task.args),
)
self._result.update(dict(dnac_response=response))
diff --git a/ansible_collections/cisco/dnac/plugins/action/event_artifact_info.py b/ansible_collections/cisco/dnac/plugins/action/event_artifact_info.py
index 4376fd4f0..beef1e6a9 100644
--- a/ansible_collections/cisco/dnac/plugins/action/event_artifact_info.py
+++ b/ansible_collections/cisco/dnac/plugins/action/event_artifact_info.py
@@ -27,8 +27,8 @@ argument_spec = dnac_argument_spec()
argument_spec.update(dict(
eventIds=dict(type="str"),
tags=dict(type="str"),
- offset=dict(type="int"),
- limit=dict(type="int"),
+ offset=dict(type="float"),
+ limit=dict(type="float"),
sortBy=dict(type="str"),
order=dict(type="str"),
search=dict(type="str"),
diff --git a/ansible_collections/cisco/dnac/plugins/action/event_info.py b/ansible_collections/cisco/dnac/plugins/action/event_info.py
index d22c13f36..f7bcd07c7 100644
--- a/ansible_collections/cisco/dnac/plugins/action/event_info.py
+++ b/ansible_collections/cisco/dnac/plugins/action/event_info.py
@@ -27,8 +27,8 @@ argument_spec = dnac_argument_spec()
argument_spec.update(dict(
eventId=dict(type="str"),
tags=dict(type="str"),
- offset=dict(type="int"),
- limit=dict(type="int"),
+ offset=dict(type="float"),
+ limit=dict(type="float"),
sortBy=dict(type="str"),
order=dict(type="str"),
headers=dict(type="dict"),
diff --git a/ansible_collections/cisco/dnac/plugins/action/event_series_audit_logs_info.py b/ansible_collections/cisco/dnac/plugins/action/event_series_audit_logs_info.py
index a2f85bacd..6bd1bd2f3 100644
--- a/ansible_collections/cisco/dnac/plugins/action/event_series_audit_logs_info.py
+++ b/ansible_collections/cisco/dnac/plugins/action/event_series_audit_logs_info.py
@@ -41,10 +41,10 @@ argument_spec.update(dict(
deviceId=dict(type="str"),
isSystemEvents=dict(type="bool"),
description=dict(type="str"),
- offset=dict(type="int"),
- limit=dict(type="int"),
- startTime=dict(type="int"),
- endTime=dict(type="int"),
+ offset=dict(type="float"),
+ limit=dict(type="float"),
+ startTime=dict(type="float"),
+ endTime=dict(type="float"),
sortBy=dict(type="str"),
order=dict(type="str"),
headers=dict(type="dict"),
diff --git a/ansible_collections/cisco/dnac/plugins/action/event_series_audit_logs_parent_records_info.py b/ansible_collections/cisco/dnac/plugins/action/event_series_audit_logs_parent_records_info.py
index b9839c20f..ffd36b1fe 100644
--- a/ansible_collections/cisco/dnac/plugins/action/event_series_audit_logs_parent_records_info.py
+++ b/ansible_collections/cisco/dnac/plugins/action/event_series_audit_logs_parent_records_info.py
@@ -40,10 +40,10 @@ argument_spec.update(dict(
deviceId=dict(type="str"),
isSystemEvents=dict(type="bool"),
description=dict(type="str"),
- offset=dict(type="int"),
- limit=dict(type="int"),
- startTime=dict(type="int"),
- endTime=dict(type="int"),
+ offset=dict(type="float"),
+ limit=dict(type="float"),
+ startTime=dict(type="float"),
+ endTime=dict(type="float"),
sortBy=dict(type="str"),
order=dict(type="str"),
headers=dict(type="dict"),
diff --git a/ansible_collections/cisco/dnac/plugins/action/event_series_audit_logs_summary_info.py b/ansible_collections/cisco/dnac/plugins/action/event_series_audit_logs_summary_info.py
index 42d15ac74..47e4576a1 100644
--- a/ansible_collections/cisco/dnac/plugins/action/event_series_audit_logs_summary_info.py
+++ b/ansible_collections/cisco/dnac/plugins/action/event_series_audit_logs_summary_info.py
@@ -42,8 +42,8 @@ argument_spec.update(dict(
deviceId=dict(type="str"),
isSystemEvents=dict(type="bool"),
description=dict(type="str"),
- startTime=dict(type="int"),
- endTime=dict(type="int"),
+ startTime=dict(type="float"),
+ endTime=dict(type="float"),
headers=dict(type="dict"),
))
diff --git a/ansible_collections/cisco/dnac/plugins/action/event_series_count_info.py b/ansible_collections/cisco/dnac/plugins/action/event_series_count_info.py
index 8e6372c92..5951fd119 100644
--- a/ansible_collections/cisco/dnac/plugins/action/event_series_count_info.py
+++ b/ansible_collections/cisco/dnac/plugins/action/event_series_count_info.py
@@ -26,8 +26,8 @@ argument_spec = dnac_argument_spec()
# Add arguments specific for this module
argument_spec.update(dict(
eventIds=dict(type="str"),
- startTime=dict(type="int"),
- endTime=dict(type="int"),
+ startTime=dict(type="float"),
+ endTime=dict(type="float"),
category=dict(type="str"),
type=dict(type="str"),
severity=dict(type="str"),
diff --git a/ansible_collections/cisco/dnac/plugins/action/event_series_info.py b/ansible_collections/cisco/dnac/plugins/action/event_series_info.py
index 8722bc05a..31d06b5bb 100644
--- a/ansible_collections/cisco/dnac/plugins/action/event_series_info.py
+++ b/ansible_collections/cisco/dnac/plugins/action/event_series_info.py
@@ -26,16 +26,16 @@ argument_spec = dnac_argument_spec()
# Add arguments specific for this module
argument_spec.update(dict(
eventIds=dict(type="str"),
- startTime=dict(type="int"),
- endTime=dict(type="int"),
+ startTime=dict(type="float"),
+ endTime=dict(type="float"),
category=dict(type="str"),
type=dict(type="str"),
severity=dict(type="str"),
domain=dict(type="str"),
subDomain=dict(type="str"),
source=dict(type="str"),
- offset=dict(type="int"),
- limit=dict(type="int"),
+ offset=dict(type="float"),
+ limit=dict(type="float"),
sortBy=dict(type="str"),
order=dict(type="str"),
tags=dict(type="str"),
diff --git a/ansible_collections/cisco/dnac/plugins/action/event_snmp_config.py b/ansible_collections/cisco/dnac/plugins/action/event_snmp_config.py
new file mode 100644
index 000000000..8c0683ce8
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/event_snmp_config.py
@@ -0,0 +1,239 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+ dnac_compare_equality,
+)
+from ansible_collections.cisco.dnac.plugins.plugin_utils.exceptions import (
+ InconsistentParameters,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ state=dict(type="str", default="present", choices=["present"]),
+ name=dict(type="str"),
+ description=dict(type="str"),
+ ipAddress=dict(type="str"),
+ port=dict(type="str"),
+ snmpVersion=dict(type="str"),
+ community=dict(type="str"),
+ userName=dict(type="str"),
+ snmpMode=dict(type="str"),
+ snmpAuthType=dict(type="str"),
+ authPassword=dict(type="str", no_log=True),
+ snmpPrivacyType=dict(type="str"),
+ privacyPassword=dict(type="str", no_log=True),
+ configId=dict(type="str"),
+))
+
+required_if = [
+ ("state", "present", ["name"], True),
+]
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class EventSnmpConfig(object):
+ def __init__(self, params, dnac):
+ self.dnac = dnac
+ self.new_object = dict(
+ name=params.get("name"),
+ description=params.get("description"),
+ ipAddress=params.get("ipAddress"),
+ port=params.get("port"),
+ snmpVersion=params.get("snmpVersion"),
+ community=params.get("community"),
+ userName=params.get("userName"),
+ snmpMode=params.get("snmpMode"),
+ snmpAuthType=params.get("snmpAuthType"),
+ authPassword=params.get("authPassword"),
+ snmpPrivacyType=params.get("snmpPrivacyType"),
+ privacyPassword=params.get("privacyPassword"),
+ configId=params.get("configId"),
+ )
+
+ def create_params(self):
+ new_object_params = {}
+ new_object_params['name'] = self.new_object.get('name')
+ new_object_params['description'] = self.new_object.get('description')
+ new_object_params['ipAddress'] = self.new_object.get('ipAddress')
+ new_object_params['port'] = self.new_object.get('port')
+ new_object_params['snmpVersion'] = self.new_object.get('snmpVersion')
+ new_object_params['community'] = self.new_object.get('community')
+ new_object_params['userName'] = self.new_object.get('userName')
+ new_object_params['snmpMode'] = self.new_object.get('snmpMode')
+ new_object_params['snmpAuthType'] = self.new_object.get('snmpAuthType')
+ new_object_params['authPassword'] = self.new_object.get('authPassword')
+ new_object_params['snmpPrivacyType'] = self.new_object.get('snmpPrivacyType')
+ new_object_params['privacyPassword'] = self.new_object.get('privacyPassword')
+ return new_object_params
+
+ def update_all_params(self):
+ new_object_params = {}
+ new_object_params['configId'] = self.new_object.get('configId')
+ new_object_params['name'] = self.new_object.get('name')
+ new_object_params['description'] = self.new_object.get('description')
+ new_object_params['ipAddress'] = self.new_object.get('ipAddress')
+ new_object_params['port'] = self.new_object.get('port')
+ new_object_params['snmpVersion'] = self.new_object.get('snmpVersion')
+ new_object_params['community'] = self.new_object.get('community')
+ new_object_params['userName'] = self.new_object.get('userName')
+ new_object_params['snmpMode'] = self.new_object.get('snmpMode')
+ new_object_params['snmpAuthType'] = self.new_object.get('snmpAuthType')
+ new_object_params['authPassword'] = self.new_object.get('authPassword')
+ new_object_params['snmpPrivacyType'] = self.new_object.get('snmpPrivacyType')
+ new_object_params['privacyPassword'] = self.new_object.get('privacyPassword')
+ return new_object_params
+
+ def get_object_by_name(self, name):
+ result = None
+ # NOTE: Does not have a get by name and get all
+ return result
+
+ def get_object_by_id(self, id):
+ result = None
+ # NOTE: Does not have a get by id method or it is in another action
+ return result
+
+ def exists(self):
+ prev_obj = None
+ id_exists = False
+ name_exists = False
+ o_id = self.new_object.get("id")
+ name = self.new_object.get("name")
+ if o_id:
+ prev_obj = self.get_object_by_id(o_id)
+ id_exists = prev_obj is not None and isinstance(prev_obj, dict)
+ if not id_exists and name:
+ prev_obj = self.get_object_by_name(name)
+ name_exists = prev_obj is not None and isinstance(prev_obj, dict)
+ if name_exists:
+ _id = prev_obj.get("id")
+ if id_exists and name_exists and o_id != _id:
+ raise InconsistentParameters("The 'id' and 'name' params don't refer to the same object")
+ if _id:
+ self.new_object.update(dict(id=_id))
+ it_exists = prev_obj is not None and isinstance(prev_obj, dict)
+ return (it_exists, prev_obj)
+
+ def requires_update(self, current_obj):
+ requested_obj = self.new_object
+
+ obj_params = [
+ ("name", "name"),
+ ("description", "description"),
+ ("ipAddress", "ipAddress"),
+ ("port", "port"),
+ ("snmpVersion", "snmpVersion"),
+ ("community", "community"),
+ ("userName", "userName"),
+ ("snmpMode", "snmpMode"),
+ ("snmpAuthType", "snmpAuthType"),
+ ("authPassword", "authPassword"),
+ ("snmpPrivacyType", "snmpPrivacyType"),
+ ("privacyPassword", "privacyPassword"),
+ ("configId", "configId"),
+ ]
+ # Method 1. Params present in request (Ansible) obj are the same as the current (ISE) params
+ # If any does not have eq params, it requires update
+ return any(not dnac_compare_equality(current_obj.get(dnac_param),
+ requested_obj.get(ansible_param))
+ for (dnac_param, ansible_param) in obj_params)
+
+ def create(self):
+ result = self.dnac.exec(
+ family="event_management",
+ function="create_snmp_destination",
+ params=self.create_params(),
+ op_modifies=True,
+ )
+ return result
+
+ def update(self):
+ id = self.new_object.get("id")
+ name = self.new_object.get("name")
+ result = None
+ result = self.dnac.exec(
+ family="event_management",
+ function="update_snmp_destination",
+ params=self.update_all_params(),
+ op_modifies=True,
+ )
+ return result
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = False
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ dnac = DNACSDK(self._task.args)
+ obj = EventSnmpConfig(self._task.args, dnac)
+
+ state = self._task.args.get("state")
+
+ response = None
+ if state == "present":
+ (obj_exists, prev_obj) = obj.exists()
+ if obj_exists:
+ if obj.requires_update(prev_obj):
+ response = obj.update()
+ dnac.object_updated()
+ else:
+ response = prev_obj
+ dnac.object_already_present()
+ else:
+ response = obj.create()
+ dnac.object_created()
+
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/event_subscription.py b/ansible_collections/cisco/dnac/plugins/action/event_subscription.py
index 52db4e8ad..bb444e268 100644
--- a/ansible_collections/cisco/dnac/plugins/action/event_subscription.py
+++ b/ansible_collections/cisco/dnac/plugins/action/event_subscription.py
@@ -37,6 +37,7 @@ argument_spec.update(dict(
required_if = [
("state", "present", ["payload"], True),
+ ("state", "absent", ["payload"], True),
]
required_one_of = []
mutually_exclusive = []
@@ -183,8 +184,11 @@ class EventSubscription(object):
return result
def delete(self):
- id = self.new_object.get("id")
- name = self.new_object.get("name")
+ requested_obj = self.new_object.get('payload')
+ if requested_obj and len(requested_obj) > 0:
+ requested_obj = requested_obj[0]
+ id = self.new_object.get("id") or requested_obj.get("id")
+ name = self.new_object.get("name") or requested_obj.get("name")
result = None
result = self.dnac.exec(
family="event_management",
diff --git a/ansible_collections/cisco/dnac/plugins/action/event_subscription_details_email_info.py b/ansible_collections/cisco/dnac/plugins/action/event_subscription_details_email_info.py
index 2ece6f956..9681e5362 100644
--- a/ansible_collections/cisco/dnac/plugins/action/event_subscription_details_email_info.py
+++ b/ansible_collections/cisco/dnac/plugins/action/event_subscription_details_email_info.py
@@ -27,8 +27,8 @@ argument_spec = dnac_argument_spec()
argument_spec.update(dict(
name=dict(type="str"),
instanceId=dict(type="str"),
- offset=dict(type="int"),
- limit=dict(type="int"),
+ offset=dict(type="float"),
+ limit=dict(type="float"),
sortBy=dict(type="str"),
order=dict(type="str"),
headers=dict(type="dict"),
diff --git a/ansible_collections/cisco/dnac/plugins/action/event_subscription_details_rest_info.py b/ansible_collections/cisco/dnac/plugins/action/event_subscription_details_rest_info.py
index 9c7321317..147aaaa8d 100644
--- a/ansible_collections/cisco/dnac/plugins/action/event_subscription_details_rest_info.py
+++ b/ansible_collections/cisco/dnac/plugins/action/event_subscription_details_rest_info.py
@@ -27,8 +27,8 @@ argument_spec = dnac_argument_spec()
argument_spec.update(dict(
name=dict(type="str"),
instanceId=dict(type="str"),
- offset=dict(type="int"),
- limit=dict(type="int"),
+ offset=dict(type="float"),
+ limit=dict(type="float"),
sortBy=dict(type="str"),
order=dict(type="str"),
headers=dict(type="dict"),
diff --git a/ansible_collections/cisco/dnac/plugins/action/event_subscription_details_syslog_info.py b/ansible_collections/cisco/dnac/plugins/action/event_subscription_details_syslog_info.py
index fbec9c6ad..27b88b23d 100644
--- a/ansible_collections/cisco/dnac/plugins/action/event_subscription_details_syslog_info.py
+++ b/ansible_collections/cisco/dnac/plugins/action/event_subscription_details_syslog_info.py
@@ -27,8 +27,8 @@ argument_spec = dnac_argument_spec()
argument_spec.update(dict(
name=dict(type="str"),
instanceId=dict(type="str"),
- offset=dict(type="int"),
- limit=dict(type="int"),
+ offset=dict(type="float"),
+ limit=dict(type="float"),
sortBy=dict(type="str"),
order=dict(type="str"),
headers=dict(type="dict"),
diff --git a/ansible_collections/cisco/dnac/plugins/action/event_subscription_email_info.py b/ansible_collections/cisco/dnac/plugins/action/event_subscription_email_info.py
index 9e5f52866..93b4762e2 100644
--- a/ansible_collections/cisco/dnac/plugins/action/event_subscription_email_info.py
+++ b/ansible_collections/cisco/dnac/plugins/action/event_subscription_email_info.py
@@ -26,8 +26,8 @@ argument_spec = dnac_argument_spec()
# Add arguments specific for this module
argument_spec.update(dict(
eventIds=dict(type="str"),
- offset=dict(type="int"),
- limit=dict(type="int"),
+ offset=dict(type="float"),
+ limit=dict(type="float"),
sortBy=dict(type="str"),
order=dict(type="str"),
domain=dict(type="str"),
diff --git a/ansible_collections/cisco/dnac/plugins/action/event_subscription_info.py b/ansible_collections/cisco/dnac/plugins/action/event_subscription_info.py
index c5dc8d6f6..747008720 100644
--- a/ansible_collections/cisco/dnac/plugins/action/event_subscription_info.py
+++ b/ansible_collections/cisco/dnac/plugins/action/event_subscription_info.py
@@ -26,8 +26,8 @@ argument_spec = dnac_argument_spec()
# Add arguments specific for this module
argument_spec.update(dict(
eventIds=dict(type="str"),
- offset=dict(type="int"),
- limit=dict(type="int"),
+ offset=dict(type="float"),
+ limit=dict(type="float"),
sortBy=dict(type="str"),
order=dict(type="str"),
headers=dict(type="dict"),
diff --git a/ansible_collections/cisco/dnac/plugins/action/event_subscription_rest_info.py b/ansible_collections/cisco/dnac/plugins/action/event_subscription_rest_info.py
index 046639260..f8c69c168 100644
--- a/ansible_collections/cisco/dnac/plugins/action/event_subscription_rest_info.py
+++ b/ansible_collections/cisco/dnac/plugins/action/event_subscription_rest_info.py
@@ -26,8 +26,8 @@ argument_spec = dnac_argument_spec()
# Add arguments specific for this module
argument_spec.update(dict(
eventIds=dict(type="str"),
- offset=dict(type="int"),
- limit=dict(type="int"),
+ offset=dict(type="float"),
+ limit=dict(type="float"),
sortBy=dict(type="str"),
order=dict(type="str"),
domain=dict(type="str"),
diff --git a/ansible_collections/cisco/dnac/plugins/action/event_subscription_syslog_info.py b/ansible_collections/cisco/dnac/plugins/action/event_subscription_syslog_info.py
index d79db0953..c8338dffb 100644
--- a/ansible_collections/cisco/dnac/plugins/action/event_subscription_syslog_info.py
+++ b/ansible_collections/cisco/dnac/plugins/action/event_subscription_syslog_info.py
@@ -26,8 +26,8 @@ argument_spec = dnac_argument_spec()
# Add arguments specific for this module
argument_spec.update(dict(
eventIds=dict(type="str"),
- offset=dict(type="int"),
- limit=dict(type="int"),
+ offset=dict(type="float"),
+ limit=dict(type="float"),
sortBy=dict(type="str"),
order=dict(type="str"),
domain=dict(type="str"),
diff --git a/ansible_collections/cisco/dnac/plugins/action/event_syslog_config.py b/ansible_collections/cisco/dnac/plugins/action/event_syslog_config.py
index e4aae36bb..366d56718 100644
--- a/ansible_collections/cisco/dnac/plugins/action/event_syslog_config.py
+++ b/ansible_collections/cisco/dnac/plugins/action/event_syslog_config.py
@@ -36,7 +36,7 @@ argument_spec.update(dict(
description=dict(type="str"),
host=dict(type="str"),
protocol=dict(type="str"),
- port=dict(type="str"),
+ port=dict(type="int"),
))
required_if = [
diff --git a/ansible_collections/cisco/dnac/plugins/action/event_syslog_config_info.py b/ansible_collections/cisco/dnac/plugins/action/event_syslog_config_info.py
index bcef02393..02c304e9b 100644
--- a/ansible_collections/cisco/dnac/plugins/action/event_syslog_config_info.py
+++ b/ansible_collections/cisco/dnac/plugins/action/event_syslog_config_info.py
@@ -28,8 +28,8 @@ argument_spec.update(dict(
configId=dict(type="str"),
name=dict(type="str"),
protocol=dict(type="str"),
- offset=dict(type="int"),
- limit=dict(type="int"),
+ offset=dict(type="float"),
+ limit=dict(type="float"),
sortBy=dict(type="str"),
order=dict(type="str"),
headers=dict(type="dict"),
diff --git a/ansible_collections/cisco/dnac/plugins/action/event_webhook_create.py b/ansible_collections/cisco/dnac/plugins/action/event_webhook_create.py
index de2b92514..ddbadcae4 100644
--- a/ansible_collections/cisco/dnac/plugins/action/event_webhook_create.py
+++ b/ansible_collections/cisco/dnac/plugins/action/event_webhook_create.py
@@ -32,6 +32,7 @@ argument_spec.update(dict(
method=dict(type="str"),
trustCert=dict(type="bool"),
headers=dict(type="list"),
+ isProxyRoute=dict(type="bool"),
))
required_if = []
@@ -76,6 +77,7 @@ class ActionModule(ActionBase):
method=params.get("method"),
trustCert=params.get("trustCert"),
headers=params.get("headers"),
+ isProxyRoute=params.get("isProxyRoute"),
)
return new_object
diff --git a/ansible_collections/cisco/dnac/plugins/action/event_webhook_read_info.py b/ansible_collections/cisco/dnac/plugins/action/event_webhook_read_info.py
new file mode 100644
index 000000000..9bb609a07
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/event_webhook_read_info.py
@@ -0,0 +1,97 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ webhookIds=dict(type="str"),
+ offset=dict(type="float"),
+ limit=dict(type="float"),
+ sortBy=dict(type="str"),
+ order=dict(type="str"),
+ headers=dict(type="dict"),
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = True
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ webhook_ids=params.get("webhookIds"),
+ offset=params.get("offset"),
+ limit=params.get("limit"),
+ sort_by=params.get("sortBy"),
+ order=params.get("order"),
+ headers=params.get("headers"),
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ self._result.update(dict(dnac_response={}))
+
+ dnac = DNACSDK(params=self._task.args)
+
+ response = dnac.exec(
+ family="event_management",
+ function='get_webhook_destination',
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/event_webhook_update.py b/ansible_collections/cisco/dnac/plugins/action/event_webhook_update.py
index d11bc98a0..772b5e263 100644
--- a/ansible_collections/cisco/dnac/plugins/action/event_webhook_update.py
+++ b/ansible_collections/cisco/dnac/plugins/action/event_webhook_update.py
@@ -32,6 +32,7 @@ argument_spec.update(dict(
method=dict(type="str"),
trustCert=dict(type="bool"),
headers=dict(type="list"),
+ isProxyRoute=dict(type="bool"),
))
required_if = []
@@ -76,6 +77,7 @@ class ActionModule(ActionBase):
method=params.get("method"),
trustCert=params.get("trustCert"),
headers=params.get("headers"),
+ isProxyRoute=params.get("isProxyRoute"),
)
return new_object
diff --git a/ansible_collections/cisco/dnac/plugins/action/flexible_report_content_info.py b/ansible_collections/cisco/dnac/plugins/action/flexible_report_content_info.py
new file mode 100644
index 000000000..ec95a02b7
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/flexible_report_content_info.py
@@ -0,0 +1,100 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ reportId=dict(type="str"),
+ executionId=dict(type="str"),
+ headers=dict(type="dict"),
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = True
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ report_id=params.get("reportId"),
+ execution_id=params.get("executionId"),
+ headers=params.get("headers"),
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ self._result.update(dict(dnac_response={}))
+
+ dnac = DNACSDK(params=self._task.args)
+
+ id = self._task.args.get("reportId")
+ if id:
+ response = dnac.exec(
+ family="reports",
+ function='download_flexible_report',
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
+ if not id:
+ # NOTE: Does not have a get all method or it is in another action
+ response = None
+ dnac.object_modify_result(changed=False, result="Module does not have get all, check arguments of module")
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/flexible_report_execute.py b/ansible_collections/cisco/dnac/plugins/action/flexible_report_execute.py
new file mode 100644
index 000000000..b2c481e10
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/flexible_report_execute.py
@@ -0,0 +1,86 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguements specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ reportId=dict(type="str"),
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = False
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ report_id=params.get("reportId"),
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ dnac = DNACSDK(params=self._task.args)
+
+ response = dnac.exec(
+ family="reports",
+ function='executing_the_flexible_report',
+ op_modifies=True,
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/flexible_report_executions_info.py b/ansible_collections/cisco/dnac/plugins/action/flexible_report_executions_info.py
new file mode 100644
index 000000000..7340ddcd9
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/flexible_report_executions_info.py
@@ -0,0 +1,89 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ reportId=dict(type="str"),
+ headers=dict(type="dict"),
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = True
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ report_id=params.get("reportId"),
+ headers=params.get("headers"),
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ self._result.update(dict(dnac_response={}))
+
+ dnac = DNACSDK(params=self._task.args)
+
+ response = dnac.exec(
+ family="reports",
+ function='get_execution_id_by_report_id',
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/flexible_report_schedule.py b/ansible_collections/cisco/dnac/plugins/action/flexible_report_schedule.py
new file mode 100644
index 000000000..f328936eb
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/flexible_report_schedule.py
@@ -0,0 +1,195 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+ dnac_compare_equality,
+ get_dict_result,
+)
+from ansible_collections.cisco.dnac.plugins.plugin_utils.exceptions import (
+ InconsistentParameters,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ state=dict(type="str", default="present", choices=["present"]),
+ schedule=dict(type="dict"),
+ reportId=dict(type="str"),
+))
+
+required_if = [
+ ("state", "present", ["reportId"], True),
+]
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class FlexibleReportSchedule(object):
+ def __init__(self, params, dnac):
+ self.dnac = dnac
+ self.new_object = dict(
+ schedule=params.get("schedule"),
+ report_id=params.get("reportId"),
+ )
+
+ def update_by_id_params(self):
+ new_object_params = {}
+ new_object_params['schedule'] = self.new_object.get('schedule')
+ new_object_params['reportId'] = self.new_object.get('reportId')
+ return new_object_params
+
+ def get_object_by_name(self, name):
+ result = None
+ # NOTE: Does not have a get by name and get all
+ return result
+
+ def get_object_by_id(self, id):
+ result = None
+ try:
+ items = self.dnac.exec(
+ family="reports",
+ function="get_flexible_report_schedule_by_report_id",
+ params={"report_id": id}
+ )
+ if isinstance(items, dict):
+ if 'response' in items:
+ items = items.get('response')
+ result = get_dict_result(items, 'report_id', id)
+ except Exception:
+ result = None
+ return result
+
+ def exists(self):
+ prev_obj = None
+ id_exists = False
+ name_exists = False
+ o_id = self.new_object.get("id")
+ o_id = o_id or self.new_object.get("report_id")
+ name = self.new_object.get("name")
+ if o_id:
+ prev_obj = self.get_object_by_id(o_id)
+ id_exists = prev_obj is not None and isinstance(prev_obj, dict)
+ if not id_exists and name:
+ prev_obj = self.get_object_by_name(name)
+ name_exists = prev_obj is not None and isinstance(prev_obj, dict)
+ if name_exists:
+ _id = prev_obj.get("id")
+ _id = _id or prev_obj.get("reportId")
+ if id_exists and name_exists and o_id != _id:
+ raise InconsistentParameters("The 'id' and 'name' params don't refer to the same object")
+ if _id:
+ self.new_object.update(dict(id=_id))
+ self.new_object.update(dict(report_id=_id))
+ if _id:
+ prev_obj = self.get_object_by_id(_id)
+ it_exists = prev_obj is not None and isinstance(prev_obj, dict)
+ return (it_exists, prev_obj)
+
+ def requires_update(self, current_obj):
+ requested_obj = self.new_object
+
+ obj_params = [
+ ("schedule", "schedule"),
+ ("reportId", "report_id"),
+ ]
+ # Method 1. Params present in request (Ansible) obj are the same as the current (ISE) params
+ # If any does not have eq params, it requires update
+ return any(not dnac_compare_equality(current_obj.get(dnac_param),
+ requested_obj.get(ansible_param))
+ for (dnac_param, ansible_param) in obj_params)
+
+ def update(self):
+ id = self.new_object.get("id")
+ id = id or self.new_object.get("report_id")
+ name = self.new_object.get("name")
+ result = None
+ if not id:
+ prev_obj_name = self.get_object_by_name(name)
+ id_ = None
+ if prev_obj_name:
+ id_ = prev_obj_name.get("id")
+ id_ = id_ or prev_obj_name.get("reportId")
+ if id_:
+ self.new_object.update(dict(report_id=id_))
+ result = self.dnac.exec(
+ family="reports",
+ function="update_schedule_of_flexible_report",
+ params=self.update_by_id_params(),
+ op_modifies=True,
+ )
+ return result
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = False
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ dnac = DNACSDK(self._task.args)
+ obj = FlexibleReportSchedule(self._task.args, dnac)
+
+ state = self._task.args.get("state")
+
+ response = None
+ if state == "present":
+ (obj_exists, prev_obj) = obj.exists()
+ if obj_exists:
+ if obj.requires_update(prev_obj):
+ response = obj.update()
+ dnac.object_updated()
+ else:
+ response = prev_obj
+ dnac.object_already_present()
+ else:
+ dnac.fail_json("Object does not exists, plugin only has update")
+
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/flexible_report_schedule_info.py b/ansible_collections/cisco/dnac/plugins/action/flexible_report_schedule_info.py
new file mode 100644
index 000000000..cf11f213c
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/flexible_report_schedule_info.py
@@ -0,0 +1,98 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ reportId=dict(type="str"),
+ headers=dict(type="dict"),
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = True
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ report_id=params.get("reportId"),
+ headers=params.get("headers"),
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ self._result.update(dict(dnac_response={}))
+
+ dnac = DNACSDK(params=self._task.args)
+
+ id = self._task.args.get("reportId")
+ if id:
+ response = dnac.exec(
+ family="reports",
+ function='get_flexible_report_schedule_by_report_id',
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
+ if not id:
+ # NOTE: Does not have a get all method or it is in another action
+ response = None
+ dnac.object_modify_result(changed=False, result="Module does not have get all, check arguments of module")
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/flexible_report_schedules_info.py b/ansible_collections/cisco/dnac/plugins/action/flexible_report_schedules_info.py
new file mode 100644
index 000000000..15565a03a
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/flexible_report_schedules_info.py
@@ -0,0 +1,87 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ headers=dict(type="dict"),
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = True
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ headers=params.get("headers"),
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ self._result.update(dict(dnac_response={}))
+
+ dnac = DNACSDK(params=self._task.args)
+
+ response = dnac.exec(
+ family="reports",
+ function='get_all_flexible_report_schedules',
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/global_pool_info.py b/ansible_collections/cisco/dnac/plugins/action/global_pool_info.py
index 56c0161a0..304594281 100644
--- a/ansible_collections/cisco/dnac/plugins/action/global_pool_info.py
+++ b/ansible_collections/cisco/dnac/plugins/action/global_pool_info.py
@@ -25,8 +25,8 @@ from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
argument_spec = dnac_argument_spec()
# Add arguments specific for this module
argument_spec.update(dict(
- offset=dict(type="int"),
- limit=dict(type="int"),
+ offset=dict(type="float"),
+ limit=dict(type="float"),
headers=dict(type="dict"),
))
diff --git a/ansible_collections/cisco/dnac/plugins/action/http_read_credential.py b/ansible_collections/cisco/dnac/plugins/action/http_read_credential.py
index c73050be8..1d1ac0acd 100644
--- a/ansible_collections/cisco/dnac/plugins/action/http_read_credential.py
+++ b/ansible_collections/cisco/dnac/plugins/action/http_read_credential.py
@@ -130,7 +130,7 @@ class HttpReadCredential(object):
id_exists = False
name_exists = False
o_id = self.new_object.get("id")
- name = self.new_object.get("username") or self.new_object.get("description")
+ name = self.new_object.get("description") or self.new_object.get("username")
if o_id:
prev_obj = self.get_object_by_id(o_id)
id_exists = prev_obj is not None and isinstance(prev_obj, dict)
diff --git a/ansible_collections/cisco/dnac/plugins/action/http_write_credential.py b/ansible_collections/cisco/dnac/plugins/action/http_write_credential.py
index 7ae396a1b..5a63c0d5e 100644
--- a/ansible_collections/cisco/dnac/plugins/action/http_write_credential.py
+++ b/ansible_collections/cisco/dnac/plugins/action/http_write_credential.py
@@ -130,7 +130,7 @@ class HttpWriteCredential(object):
id_exists = False
name_exists = False
o_id = self.new_object.get("id")
- name = self.new_object.get("username") or self.new_object.get("description")
+ name = self.new_object.get("description") or self.new_object.get("username")
if o_id:
prev_obj = self.get_object_by_id(o_id)
id_exists = prev_obj is not None and isinstance(prev_obj, dict)
diff --git a/ansible_collections/cisco/dnac/plugins/action/integration_settings_instances_itsm.py b/ansible_collections/cisco/dnac/plugins/action/integration_settings_instances_itsm.py
index 1b12af772..0575dc457 100644
--- a/ansible_collections/cisco/dnac/plugins/action/integration_settings_instances_itsm.py
+++ b/ansible_collections/cisco/dnac/plugins/action/integration_settings_instances_itsm.py
@@ -77,6 +77,7 @@ class IntegrationSettingsInstancesItsm(object):
new_object_params['description'] = self.new_object.get('description')
new_object_params['data'] = self.new_object.get('data')
new_object_params['dypName'] = self.new_object.get('dypName')
+ new_object_params['instanceId'] = self.new_object.get('instanceId')
return new_object_params
def get_object_by_name(self, name):
diff --git a/ansible_collections/cisco/dnac/plugins/action/integration_settings_itsm_instances_info.py b/ansible_collections/cisco/dnac/plugins/action/integration_settings_itsm_instances_info.py
new file mode 100644
index 000000000..f8360f9c7
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/integration_settings_itsm_instances_info.py
@@ -0,0 +1,87 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ headers=dict(type="dict"),
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = True
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ headers=params.get("headers"),
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ self._result.update(dict(dnac_response={}))
+
+ dnac = DNACSDK(params=self._task.args)
+
+ response = dnac.exec(
+ family="itsm_integration",
+ function='get_all_itsm_integration_settings',
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/integration_settings_status_info.py b/ansible_collections/cisco/dnac/plugins/action/integration_settings_status_info.py
new file mode 100644
index 000000000..2673531b9
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/integration_settings_status_info.py
@@ -0,0 +1,87 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ headers=dict(type="dict"),
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = True
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ headers=params.get("headers"),
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ self._result.update(dict(dnac_response={}))
+
+ dnac = DNACSDK(params=self._task.args)
+
+ response = dnac.exec(
+ family="itsm_integration",
+ function='get_itsm_integration_status',
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/ise_integration_status_info.py b/ansible_collections/cisco/dnac/plugins/action/ise_integration_status_info.py
new file mode 100644
index 000000000..40bb9e2f7
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/ise_integration_status_info.py
@@ -0,0 +1,87 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ headers=dict(type="dict"),
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = True
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ headers=params.get("headers"),
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ self._result.update(dict(dnac_response={}))
+
+ dnac = DNACSDK(params=self._task.args)
+
+ response = dnac.exec(
+ family="system_settings",
+ function='cisco_ise_server_integration_status',
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/issues_info.py b/ansible_collections/cisco/dnac/plugins/action/issues_info.py
index ab158f9e7..288681100 100644
--- a/ansible_collections/cisco/dnac/plugins/action/issues_info.py
+++ b/ansible_collections/cisco/dnac/plugins/action/issues_info.py
@@ -25,14 +25,14 @@ from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
argument_spec = dnac_argument_spec()
# Add arguments specific for this module
argument_spec.update(dict(
- startTime=dict(type="int"),
- endTime=dict(type="int"),
+ startTime=dict(type="float"),
+ endTime=dict(type="float"),
siteId=dict(type="str"),
deviceId=dict(type="str"),
macAddress=dict(type="str"),
priority=dict(type="str"),
- aiDriven=dict(type="str"),
issueStatus=dict(type="str"),
+ aiDriven=dict(type="str"),
headers=dict(type="dict"),
))
@@ -77,8 +77,8 @@ class ActionModule(ActionBase):
device_id=params.get("deviceId"),
mac_address=params.get("macAddress"),
priority=params.get("priority"),
- ai_driven=params.get("aiDriven"),
issue_status=params.get("issueStatus"),
+ ai_driven=params.get("aiDriven"),
headers=params.get("headers"),
)
return new_object
diff --git a/ansible_collections/cisco/dnac/plugins/action/lan_automation_log_info.py b/ansible_collections/cisco/dnac/plugins/action/lan_automation_log_info.py
index dfa702581..dc9caf604 100644
--- a/ansible_collections/cisco/dnac/plugins/action/lan_automation_log_info.py
+++ b/ansible_collections/cisco/dnac/plugins/action/lan_automation_log_info.py
@@ -25,8 +25,8 @@ from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
argument_spec = dnac_argument_spec()
# Add arguments specific for this module
argument_spec.update(dict(
- offset=dict(type="int"),
- limit=dict(type="int"),
+ offset=dict(type="float"),
+ limit=dict(type="float"),
id=dict(type="str"),
headers=dict(type="dict"),
))
diff --git a/ansible_collections/cisco/dnac/plugins/action/lan_automation_sessions_info.py b/ansible_collections/cisco/dnac/plugins/action/lan_automation_sessions_info.py
new file mode 100644
index 000000000..5a9a7119d
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/lan_automation_sessions_info.py
@@ -0,0 +1,87 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ headers=dict(type="dict"),
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = True
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ headers=params.get("headers"),
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ self._result.update(dict(dnac_response={}))
+
+ dnac = DNACSDK(params=self._task.args)
+
+ response = dnac.exec(
+ family="lan_automation",
+ function='lan_automation_active_sessions',
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/lan_automation_status_info.py b/ansible_collections/cisco/dnac/plugins/action/lan_automation_status_info.py
index 073cfb275..e380bd5fb 100644
--- a/ansible_collections/cisco/dnac/plugins/action/lan_automation_status_info.py
+++ b/ansible_collections/cisco/dnac/plugins/action/lan_automation_status_info.py
@@ -25,8 +25,8 @@ from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
argument_spec = dnac_argument_spec()
# Add arguments specific for this module
argument_spec.update(dict(
- offset=dict(type="int"),
- limit=dict(type="int"),
+ offset=dict(type="float"),
+ limit=dict(type="float"),
id=dict(type="str"),
headers=dict(type="dict"),
))
diff --git a/ansible_collections/cisco/dnac/plugins/action/lan_automation_update.py b/ansible_collections/cisco/dnac/plugins/action/lan_automation_update.py
new file mode 100644
index 000000000..0a30414e1
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/lan_automation_update.py
@@ -0,0 +1,88 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguements specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ payload=dict(type="list"),
+ id=dict(type="str"),
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = False
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ payload=params.get("payload"),
+ id=params.get("id"),
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ dnac = DNACSDK(params=self._task.args)
+
+ response = dnac.exec(
+ family="lan_automation",
+ function='lan_automation_stop_and_update_devices',
+ op_modifies=True,
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/lan_automation_update_device.py b/ansible_collections/cisco/dnac/plugins/action/lan_automation_update_device.py
new file mode 100644
index 000000000..30ec6ef88
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/lan_automation_update_device.py
@@ -0,0 +1,92 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguements specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ loopbackUpdateDeviceList=dict(type="list"),
+ linkUpdate=dict(type="dict"),
+ hostnameUpdateDevices=dict(type="list"),
+ feature=dict(type="str"),
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = False
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ loopbackUpdateDeviceList=params.get("loopbackUpdateDeviceList"),
+ linkUpdate=params.get("linkUpdate"),
+ hostnameUpdateDevices=params.get("hostnameUpdateDevices"),
+ feature=params.get("feature"),
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ dnac = DNACSDK(params=self._task.args)
+
+ response = dnac.exec(
+ family="lan_automation",
+ function='lan_automation_device_update',
+ op_modifies=True,
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/lan_automation_update_v2.py b/ansible_collections/cisco/dnac/plugins/action/lan_automation_update_v2.py
new file mode 100644
index 000000000..acd4c9812
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/lan_automation_update_v2.py
@@ -0,0 +1,88 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguements specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ payload=dict(type="list"),
+ id=dict(type="str"),
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = False
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ payload=params.get("payload"),
+ id=params.get("id"),
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ dnac = DNACSDK(params=self._task.args)
+
+ response = dnac.exec(
+ family="lan_automation",
+ function='lan_automation_stop_and_update_devices_v2',
+ op_modifies=True,
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/lan_automation_v2.py b/ansible_collections/cisco/dnac/plugins/action/lan_automation_v2.py
new file mode 100644
index 000000000..2c538a873
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/lan_automation_v2.py
@@ -0,0 +1,86 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguements specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ payload=dict(type="list"),
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = False
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ payload=params.get("payload"),
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ dnac = DNACSDK(params=self._task.args)
+
+ response = dnac.exec(
+ family="lan_automation",
+ function='lan_automation_start_v2',
+ op_modifies=True,
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/license_device_license_summary_info.py b/ansible_collections/cisco/dnac/plugins/action/license_device_license_summary_info.py
index af82708ca..03544a2bf 100644
--- a/ansible_collections/cisco/dnac/plugins/action/license_device_license_summary_info.py
+++ b/ansible_collections/cisco/dnac/plugins/action/license_device_license_summary_info.py
@@ -25,15 +25,15 @@ from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
argument_spec = dnac_argument_spec()
# Add arguments specific for this module
argument_spec.update(dict(
- page_number=dict(type="int"),
+ page_number=dict(type="float"),
order=dict(type="str"),
sort_by=dict(type="str"),
dna_level=dict(type="str"),
device_type=dict(type="str"),
- limit=dict(type="int"),
+ limit=dict(type="float"),
registration_status=dict(type="str"),
virtual_account_name=dict(type="str"),
- smart_account_id=dict(type="int"),
+ smart_account_id=dict(type="str"),
device_uuid=dict(type="str"),
headers=dict(type="dict"),
))
diff --git a/ansible_collections/cisco/dnac/plugins/action/maps_export.py b/ansible_collections/cisco/dnac/plugins/action/maps_export.py
new file mode 100644
index 000000000..108cfd503
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/maps_export.py
@@ -0,0 +1,86 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguements specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ siteHierarchyUuid=dict(type="str"),
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = False
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ site_hierarchy_uuid=params.get("siteHierarchyUuid"),
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ dnac = DNACSDK(params=self._task.args)
+
+ response = dnac.exec(
+ family="sites",
+ function='export_map_archive',
+ op_modifies=True,
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/maps_import.py b/ansible_collections/cisco/dnac/plugins/action/maps_import.py
new file mode 100644
index 000000000..de9e4dc2c
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/maps_import.py
@@ -0,0 +1,85 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguements specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ importContextUuid=dict(type="str"),
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = False
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ import_context_uuid=params.get("importContextUuid"),
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ dnac = DNACSDK(params=self._task.args)
+
+ response = dnac.exec(
+ family="sites",
+ function="import_map_archive_cancel_an_import",
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/maps_import_perform.py b/ansible_collections/cisco/dnac/plugins/action/maps_import_perform.py
new file mode 100644
index 000000000..e6ab782a0
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/maps_import_perform.py
@@ -0,0 +1,86 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguements specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ importContextUuid=dict(type="str"),
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = False
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ import_context_uuid=params.get("importContextUuid"),
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ dnac = DNACSDK(params=self._task.args)
+
+ response = dnac.exec(
+ family="sites",
+ function='import_map_archive_perform_import',
+ op_modifies=True,
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/maps_import_start.py b/ansible_collections/cisco/dnac/plugins/action/maps_import_start.py
new file mode 100644
index 000000000..5bc2a1c6c
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/maps_import_start.py
@@ -0,0 +1,84 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguements specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = False
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ dnac = DNACSDK(params=self._task.args)
+
+ response = dnac.exec(
+ family="sites",
+ function='import_map_archive_start_import',
+ op_modifies=True,
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/maps_import_status_info.py b/ansible_collections/cisco/dnac/plugins/action/maps_import_status_info.py
new file mode 100644
index 000000000..bfb88e7d3
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/maps_import_status_info.py
@@ -0,0 +1,89 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ importContextUuid=dict(type="str"),
+ headers=dict(type="dict"),
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = True
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ import_context_uuid=params.get("importContextUuid"),
+ headers=params.get("headers"),
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ self._result.update(dict(dnac_response={}))
+
+ dnac = DNACSDK(params=self._task.args)
+
+ response = dnac.exec(
+ family="sites",
+ function='import_map_archive_import_status',
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/maps_supported_access_points_info.py b/ansible_collections/cisco/dnac/plugins/action/maps_supported_access_points_info.py
new file mode 100644
index 000000000..d4d991387
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/maps_supported_access_points_info.py
@@ -0,0 +1,87 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ headers=dict(type="dict"),
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = True
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ headers=params.get("headers"),
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ self._result.update(dict(dnac_response={}))
+
+ dnac = DNACSDK(params=self._task.args)
+
+ response = dnac.exec(
+ family="sites",
+ function='maps_supported_access_points',
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/netconf_credential.py b/ansible_collections/cisco/dnac/plugins/action/netconf_credential.py
index 5ea30373d..4312a5834 100644
--- a/ansible_collections/cisco/dnac/plugins/action/netconf_credential.py
+++ b/ansible_collections/cisco/dnac/plugins/action/netconf_credential.py
@@ -95,7 +95,7 @@ class NetconfCredential(object):
if isinstance(items, dict):
if 'response' in items:
items = items.get('response')
- result = get_dict_result(items, 'description', name)
+ result = get_dict_result(items, 'description', name) or get_dict_result(items, 'username', name)
except Exception:
result = None
return result
@@ -121,7 +121,7 @@ class NetconfCredential(object):
id_exists = False
name_exists = False
o_id = self.new_object.get("id")
- name = self.new_object.get("username") or self.new_object.get("description")
+ name = self.new_object.get("description") or self.new_object.get("username")
if o_id:
prev_obj = self.get_object_by_id(o_id)
id_exists = prev_obj is not None and isinstance(prev_obj, dict)
diff --git a/ansible_collections/cisco/dnac/plugins/action/network_device.py b/ansible_collections/cisco/dnac/plugins/action/network_device.py
index 650f0cc82..38a99c6b1 100644
--- a/ansible_collections/cisco/dnac/plugins/action/network_device.py
+++ b/ansible_collections/cisco/dnac/plugins/action/network_device.py
@@ -56,9 +56,9 @@ argument_spec.update(dict(
snmpUserName=dict(type="str"),
snmpVersion=dict(type="str"),
type=dict(type="str"),
- updateMgmtIPaddressList=dict(type="list"),
userName=dict(type="str"),
id=dict(type="str"),
+ updateMgmtIPaddressList=dict(type="list"),
cleanConfig=dict(type="bool"),
))
@@ -100,9 +100,9 @@ class NetworkDevice(object):
snmpUserName=params.get("snmpUserName"),
snmpVersion=params.get("snmpVersion"),
type=params.get("type"),
- updateMgmtIPaddressList=params.get("updateMgmtIPaddressList"),
userName=params.get("userName"),
id=params.get("id"),
+ updateMgmtIPaddressList=params.get("updateMgmtIPaddressList"),
clean_config=params.get("cleanConfig"),
managementIpAddress=params.get("managementIpAddress"),
)
@@ -188,7 +188,6 @@ class NetworkDevice(object):
new_object_params['snmpUserName'] = self.new_object.get('snmpUserName')
new_object_params['snmpVersion'] = self.new_object.get('snmpVersion')
new_object_params['type'] = self.new_object.get('type')
- new_object_params['updateMgmtIPaddressList'] = self.new_object.get('updateMgmtIPaddressList')
new_object_params['userName'] = self.new_object.get('userName')
return new_object_params
@@ -316,9 +315,9 @@ class NetworkDevice(object):
("snmpUserName", "snmpUserName"),
("snmpVersion", "snmpVersion"),
("type", "type"),
- ("updateMgmtIPaddressList", "updateMgmtIPaddressList"),
("userName", "userName"),
("id", "id"),
+ ("updateMgmtIPaddressList", "updateMgmtIPaddressList"),
("cleanConfig", "clean_config"),
]
# Method 1. Params present in request (Ansible) obj are the same as the current (DNAC) params
diff --git a/ansible_collections/cisco/dnac/plugins/action/network_device_config_task_info.py b/ansible_collections/cisco/dnac/plugins/action/network_device_config_task_info.py
new file mode 100644
index 000000000..d9231ba35
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/network_device_config_task_info.py
@@ -0,0 +1,89 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ parentTaskId=dict(type="str"),
+ headers=dict(type="dict"),
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = True
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ parent_task_id=params.get("parentTaskId"),
+ headers=params.get("headers"),
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ self._result.update(dict(dnac_response={}))
+
+ dnac = DNACSDK(params=self._task.args)
+
+ response = dnac.exec(
+ family="configuration_archive",
+ function='get_config_task_details',
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/network_device_config_write_memory.py b/ansible_collections/cisco/dnac/plugins/action/network_device_config_write_memory.py
new file mode 100644
index 000000000..1f145f04f
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/network_device_config_write_memory.py
@@ -0,0 +1,86 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguements specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ deviceId=dict(type="list"),
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = False
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ deviceId=params.get("deviceId"),
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ dnac = DNACSDK(params=self._task.args)
+
+ response = dnac.exec(
+ family="configuration_archive",
+ function='commit_device_configuration',
+ op_modifies=True,
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/network_device_count_info.py b/ansible_collections/cisco/dnac/plugins/action/network_device_count_info.py
index b2e2ad364..79424c86a 100644
--- a/ansible_collections/cisco/dnac/plugins/action/network_device_count_info.py
+++ b/ansible_collections/cisco/dnac/plugins/action/network_device_count_info.py
@@ -26,6 +26,10 @@ argument_spec = dnac_argument_spec()
# Add arguments specific for this module
argument_spec.update(dict(
deviceId=dict(type="str"),
+ hostname=dict(type="list"),
+ managementIpAddress=dict(type="list"),
+ macAddress=dict(type="list"),
+ locationName=dict(type="list"),
headers=dict(type="dict"),
))
@@ -65,6 +69,10 @@ class ActionModule(ActionBase):
def get_object(self, params):
new_object = dict(
device_id=params.get("deviceId"),
+ hostname=params.get("hostname"),
+ management_ip_address=params.get("managementIpAddress"),
+ mac_address=params.get("macAddress"),
+ location_name=params.get("locationName"),
headers=params.get("headers"),
)
return new_object
diff --git a/ansible_collections/cisco/dnac/plugins/action/network_device_equipment_info.py b/ansible_collections/cisco/dnac/plugins/action/network_device_equipment_info.py
index 2365b8a11..96235401f 100644
--- a/ansible_collections/cisco/dnac/plugins/action/network_device_equipment_info.py
+++ b/ansible_collections/cisco/dnac/plugins/action/network_device_equipment_info.py
@@ -83,7 +83,7 @@ class ActionModule(ActionBase):
response = dnac.exec(
family="devices",
- function='return_power_supply_fan_details_for_the_given_device',
+ function='get_the_details_of_physical_components_of_the_given_device',
params=self.get_object(self._task.args),
)
self._result.update(dict(dnac_response=response))
diff --git a/ansible_collections/cisco/dnac/plugins/action/network_device_export.py b/ansible_collections/cisco/dnac/plugins/action/network_device_export.py
index ed7447eec..d130203a2 100644
--- a/ansible_collections/cisco/dnac/plugins/action/network_device_export.py
+++ b/ansible_collections/cisco/dnac/plugins/action/network_device_export.py
@@ -26,7 +26,6 @@ argument_spec = dnac_argument_spec()
# Add arguments specific for this module
argument_spec.update(dict(
deviceUuids=dict(type="list"),
- id=dict(type="str"),
operationEnum=dict(type="str"),
parameters=dict(type="list"),
password=dict(type="str", no_log=True),
@@ -68,7 +67,6 @@ class ActionModule(ActionBase):
def get_object(self, params):
new_object = dict(
deviceUuids=params.get("deviceUuids"),
- id=params.get("id"),
operationEnum=params.get("operationEnum"),
parameters=params.get("parameters"),
password=params.get("password"),
diff --git a/ansible_collections/cisco/dnac/plugins/action/network_device_insight_device_link_info.py b/ansible_collections/cisco/dnac/plugins/action/network_device_insight_device_link_info.py
new file mode 100644
index 000000000..63cdcd94b
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/network_device_insight_device_link_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ siteId=dict(type="str"),
+ offset=dict(type="int"),
+ limit=dict(type="int"),
+ category=dict(type="str"),
+ sortBy=dict(type="str"),
+ order=dict(type="str"),
+ headers=dict(type="dict"),
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = True
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ site_id=params.get("siteId"),
+ offset=params.get("offset"),
+ limit=params.get("limit"),
+ category=params.get("category"),
+ sort_by=params.get("sortBy"),
+ order=params.get("order"),
+ headers=params.get("headers"),
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ self._result.update(dict(dnac_response={}))
+
+ dnac = DNACSDK(params=self._task.args)
+
+ response = dnac.exec(
+ family="devices",
+ function='inventory_insight_device_link_mismatch',
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/network_device_ip_address_info.py b/ansible_collections/cisco/dnac/plugins/action/network_device_ip_address_info.py
new file mode 100644
index 000000000..a766c17dc
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/network_device_ip_address_info.py
@@ -0,0 +1,98 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ ipAddress=dict(type="str"),
+ headers=dict(type="dict"),
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = True
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ ip_address=params.get("ipAddress"),
+ headers=params.get("headers"),
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ self._result.update(dict(dnac_response={}))
+
+ dnac = DNACSDK(params=self._task.args)
+
+ id = self._task.args.get("ipAddress")
+ if id:
+ response = dnac.exec(
+ family="devices",
+ function='get_network_device_by_ip',
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
+ if not id:
+ # NOTE: Does not have a get all method or it is in another action
+ response = None
+ dnac.object_modify_result(changed=False, result="Module does not have get all, check arguments of module")
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/network_device_management_address_update.py b/ansible_collections/cisco/dnac/plugins/action/network_device_management_address_update.py
new file mode 100644
index 000000000..ab9d7c670
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/network_device_management_address_update.py
@@ -0,0 +1,88 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguements specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ newIP=dict(type="str"),
+ deviceid=dict(type="str"),
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = False
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ newIP=params.get("newIP"),
+ deviceid=params.get("deviceid"),
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ dnac = DNACSDK(params=self._task.args)
+
+ response = dnac.exec(
+ family="devices",
+ function='update_device_management_address',
+ op_modifies=True,
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/network_device_user_defined_field.py b/ansible_collections/cisco/dnac/plugins/action/network_device_user_defined_field.py
index 724221ee9..9f1c83f14 100644
--- a/ansible_collections/cisco/dnac/plugins/action/network_device_user_defined_field.py
+++ b/ansible_collections/cisco/dnac/plugins/action/network_device_user_defined_field.py
@@ -75,6 +75,7 @@ class NetworkDeviceUserDefinedField(object):
new_object_params = {}
new_object_params['name'] = self.new_object.get('name')
new_object_params['description'] = self.new_object.get('description')
+ new_object_params['id'] = self.new_object.get('id')
return new_object_params
def get_object_by_name(self, name):
diff --git a/ansible_collections/cisco/dnac/plugins/action/network_device_user_defined_field_delete.py b/ansible_collections/cisco/dnac/plugins/action/network_device_user_defined_field_delete.py
new file mode 100644
index 000000000..b1ac8e643
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/network_device_user_defined_field_delete.py
@@ -0,0 +1,87 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguements specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ name=dict(type="str"),
+ deviceId=dict(type="str"),
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = False
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ name=params.get("name"),
+ device_id=params.get("deviceId"),
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ dnac = DNACSDK(params=self._task.args)
+
+ response = dnac.exec(
+ family="devices",
+ function="remove_user_defined_field_from_device",
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/network_device_user_defined_field_update.py b/ansible_collections/cisco/dnac/plugins/action/network_device_user_defined_field_update.py
new file mode 100644
index 000000000..81ffddaaf
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/network_device_user_defined_field_update.py
@@ -0,0 +1,88 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguements specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ payload=dict(type="list"),
+ deviceId=dict(type="str"),
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = False
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ payload=params.get("payload"),
+ device_id=params.get("deviceId"),
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ dnac = DNACSDK(params=self._task.args)
+
+ response = dnac.exec(
+ family="devices",
+ function='add_user_defined_field_to_device',
+ op_modifies=True,
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/network_v2.py b/ansible_collections/cisco/dnac/plugins/action/network_v2.py
index c1d4d0c1c..f9087cbf0 100644
--- a/ansible_collections/cisco/dnac/plugins/action/network_v2.py
+++ b/ansible_collections/cisco/dnac/plugins/action/network_v2.py
@@ -60,11 +60,13 @@ class NetworkV2(object):
def create_params(self):
new_object_params = {}
new_object_params['settings'] = self.new_object.get('settings')
+ new_object_params['siteId'] = self.new_object.get('siteId')
return new_object_params
def update_by_id_params(self):
new_object_params = {}
new_object_params['settings'] = self.new_object.get('settings')
+ new_object_params['siteId'] = self.new_object.get('siteId')
return new_object_params
def get_object_by_name(self, name):
diff --git a/ansible_collections/cisco/dnac/plugins/action/path_trace.py b/ansible_collections/cisco/dnac/plugins/action/path_trace.py
index 133122616..d49417255 100644
--- a/ansible_collections/cisco/dnac/plugins/action/path_trace.py
+++ b/ansible_collections/cisco/dnac/plugins/action/path_trace.py
@@ -117,7 +117,7 @@ class PathTrace(object):
try:
items = self.dnac.exec(
family="path_trace",
- function="retrives_all_previous_pathtraces_summary",
+ function="retrieves_all_previous_pathtraces_summary",
params=self.get_all_params(name=name),
)
if isinstance(items, dict):
diff --git a/ansible_collections/cisco/dnac/plugins/action/path_trace_info.py b/ansible_collections/cisco/dnac/plugins/action/path_trace_info.py
index be9d0dbc2..079046eeb 100644
--- a/ansible_collections/cisco/dnac/plugins/action/path_trace_info.py
+++ b/ansible_collections/cisco/dnac/plugins/action/path_trace_info.py
@@ -28,16 +28,16 @@ argument_spec.update(dict(
periodicRefresh=dict(type="bool"),
sourceIP=dict(type="str"),
destIP=dict(type="str"),
- sourcePort=dict(type="str"),
- destPort=dict(type="str"),
- gtCreateTime=dict(type="str"),
- ltCreateTime=dict(type="str"),
+ sourcePort=dict(type="float"),
+ destPort=dict(type="float"),
+ gtCreateTime=dict(type="float"),
+ ltCreateTime=dict(type="float"),
protocol=dict(type="str"),
status=dict(type="str"),
taskId=dict(type="str"),
- lastUpdateTime=dict(type="str"),
- limit=dict(type="int"),
- offset=dict(type="int"),
+ lastUpdateTime=dict(type="float"),
+ limit=dict(type="float"),
+ offset=dict(type="float"),
order=dict(type="str"),
sortBy=dict(type="str"),
flowAnalysisId=dict(type="str"),
@@ -122,7 +122,7 @@ class ActionModule(ActionBase):
if not id:
response = dnac.exec(
family="path_trace",
- function='retrives_all_previous_pathtraces_summary',
+ function='retrieves_all_previous_pathtraces_summary',
params=self.get_object(self._task.args),
)
self._result.update(dict(dnac_response=response))
diff --git a/ansible_collections/cisco/dnac/plugins/action/planned_access_points.py b/ansible_collections/cisco/dnac/plugins/action/planned_access_points.py
new file mode 100644
index 000000000..180b2fea3
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/planned_access_points.py
@@ -0,0 +1,286 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+ dnac_compare_equality,
+ get_dict_result,
+)
+from ansible_collections.cisco.dnac.plugins.plugin_utils.exceptions import (
+ InconsistentParameters,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ state=dict(type="str", default="present", choices=["present", "absent"]),
+ attributes=dict(type="dict"),
+ isSensor=dict(type="bool"),
+ location=dict(type="dict"),
+ position=dict(type="dict"),
+ radioCount=dict(type="int"),
+ radios=dict(type="list"),
+ floorId=dict(type="str"),
+ plannedAccessPointUuid=dict(type="str"),
+))
+
+required_if = [
+ ("state", "present", ["floorId", "plannedAccessPointUuid"], True),
+ ("state", "absent", ["floorId", "plannedAccessPointUuid"], True),
+]
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class PlannedAccessPoints(object):
+ def __init__(self, params, dnac):
+ self.dnac = dnac
+ self.new_object = dict(
+ attributes=params.get("attributes"),
+ isSensor=params.get("isSensor"),
+ location=params.get("location"),
+ position=params.get("position"),
+ radioCount=params.get("radioCount"),
+ radios=params.get("radios"),
+ floor_id=params.get("floorId"),
+ planned_access_point_uuid=params.get("plannedAccessPointUuid"),
+ )
+
+ def get_all_params(self, name=None, id=None):
+ new_object_params = {}
+ new_object_params['limit'] = self.new_object.get('limit')
+ new_object_params['offset'] = self.new_object.get('offset')
+ new_object_params['radios'] = self.new_object.get('radios')
+ new_object_params['floor_id'] = self.new_object.get('floorId') or \
+ self.new_object.get('floor_id')
+ return new_object_params
+
+ def create_params(self):
+ new_object_params = {}
+ new_object_params['attributes'] = self.new_object.get('attributes')
+ new_object_params['isSensor'] = self.new_object.get('isSensor')
+ new_object_params['location'] = self.new_object.get('location')
+ new_object_params['position'] = self.new_object.get('position')
+ new_object_params['radioCount'] = self.new_object.get('radioCount')
+ new_object_params['radios'] = self.new_object.get('radios')
+ new_object_params['floorId'] = self.new_object.get('floorId')
+ return new_object_params
+
+ def delete_by_id_params(self):
+ new_object_params = {}
+ new_object_params['floor_id'] = self.new_object.get('floor_id')
+ new_object_params['planned_access_point_uuid'] = self.new_object.get('planned_access_point_uuid')
+ return new_object_params
+
+ def update_all_params(self):
+ new_object_params = {}
+ new_object_params['attributes'] = self.new_object.get('attributes')
+ new_object_params['isSensor'] = self.new_object.get('isSensor')
+ new_object_params['location'] = self.new_object.get('location')
+ new_object_params['position'] = self.new_object.get('position')
+ new_object_params['radioCount'] = self.new_object.get('radioCount')
+ new_object_params['radios'] = self.new_object.get('radios')
+ new_object_params['floorId'] = self.new_object.get('floorId')
+ return new_object_params
+
+ def get_object_by_name(self, name):
+ result = None
+ # NOTE: Does not have a get by name method or it is in another action
+ try:
+ items = self.dnac.exec(
+ family="devices",
+ function="get_planned_access_points_for_floor",
+ params=self.get_all_params(name=name),
+ )
+ if isinstance(items, dict):
+ if 'response' in items:
+ items = items.get('response')
+ result = get_dict_result(items, 'name', name)
+ except Exception:
+ result = None
+ return result
+
+ def get_object_by_id(self, id):
+ result = None
+ # NOTE: Does not have a get by id method or it is in another action
+ try:
+ items = self.dnac.exec(
+ family="devices",
+ function="get_planned_access_points_for_floor",
+ params=self.get_all_params(id=id),
+ )
+ if isinstance(items, dict):
+ if 'response' in items:
+ items = items.get('response')
+ result = get_dict_result(items, 'id', id)
+ except Exception:
+ result = None
+ return result
+
+ def exists(self):
+ id_exists = False
+ name_exists = False
+ prev_obj = None
+ o_id = self.new_object.get("id")
+ o_id = o_id or self.new_object.get("planned_access_point_uuid")
+ name = self.new_object.get("name")
+ if o_id:
+ prev_obj = self.get_object_by_id(o_id)
+ id_exists = prev_obj is not None and isinstance(prev_obj, dict)
+ if not id_exists and name:
+ prev_obj = self.get_object_by_name(name)
+ name_exists = prev_obj is not None and isinstance(prev_obj, dict)
+ if name_exists:
+ _id = prev_obj.get("id")
+ _id = _id or prev_obj.get("plannedAccessPointUuid")
+ if id_exists and name_exists and o_id != _id:
+ raise InconsistentParameters("The 'id' and 'name' params don't refer to the same object")
+ if _id:
+ self.new_object.update(dict(id=_id))
+ self.new_object.update(dict(planned_access_point_uuid=_id))
+ it_exists = prev_obj is not None and isinstance(prev_obj, dict)
+ return (it_exists, prev_obj)
+
+ def requires_update(self, current_obj):
+ requested_obj = self.new_object
+
+ obj_params = [
+ ("attributes", "attributes"),
+ ("isSensor", "isSensor"),
+ ("location", "location"),
+ ("position", "position"),
+ ("radioCount", "radioCount"),
+ ("radios", "radios"),
+ ("floorId", "floor_id"),
+ ("plannedAccessPointUuid", "planned_access_point_uuid"),
+ ]
+ # Method 1. Params present in request (Ansible) obj are the same as the current (DNAC) params
+ # If any does not have eq params, it requires update
+ return any(not dnac_compare_equality(current_obj.get(dnac_param),
+ requested_obj.get(ansible_param))
+ for (dnac_param, ansible_param) in obj_params)
+
+ def create(self):
+ result = self.dnac.exec(
+ family="devices",
+ function="create_planned_access_point_for_floor",
+ params=self.create_params(),
+ op_modifies=True,
+ )
+ return result
+
+ def update(self):
+ id = self.new_object.get("id")
+ name = self.new_object.get("name")
+ result = None
+ result = self.dnac.exec(
+ family="devices",
+ function="update_planned_access_point_for_floor",
+ params=self.update_all_params(),
+ op_modifies=True,
+ )
+ return result
+
+ def delete(self):
+ id = self.new_object.get("id")
+ id = id or self.new_object.get("planned_access_point_uuid")
+ name = self.new_object.get("name")
+ result = None
+ if not id:
+ prev_obj_name = self.get_object_by_name(name)
+ id_ = None
+ if prev_obj_name:
+ id_ = prev_obj_name.get("id")
+ id_ = id_ or prev_obj_name.get("plannedAccessPointUuid")
+ if id_:
+ self.new_object.update(dict(planned_access_point_uuid=id_))
+ result = self.dnac.exec(
+ family="devices",
+ function="delete_planned_access_point_for_floor",
+ params=self.delete_by_id_params(),
+ )
+ return result
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = False
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ dnac = DNACSDK(self._task.args)
+ obj = PlannedAccessPoints(self._task.args, dnac)
+
+ state = self._task.args.get("state")
+
+ response = None
+
+ if state == "present":
+ (obj_exists, prev_obj) = obj.exists()
+ if obj_exists:
+ if obj.requires_update(prev_obj):
+ response = obj.update()
+ dnac.object_updated()
+ else:
+ response = prev_obj
+ dnac.object_already_present()
+ else:
+ response = obj.create()
+ dnac.object_created()
+
+ elif state == "absent":
+ (obj_exists, prev_obj) = obj.exists()
+ if obj_exists:
+ response = obj.delete()
+ dnac.object_deleted()
+ else:
+ dnac.object_already_absent()
+
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/planned_access_points_info.py b/ansible_collections/cisco/dnac/plugins/action/planned_access_points_info.py
index 86cd1a87b..328bd1c63 100644
--- a/ansible_collections/cisco/dnac/plugins/action/planned_access_points_info.py
+++ b/ansible_collections/cisco/dnac/plugins/action/planned_access_points_info.py
@@ -26,8 +26,8 @@ argument_spec = dnac_argument_spec()
# Add arguments specific for this module
argument_spec.update(dict(
floorId=dict(type="str"),
- limit=dict(type="int"),
- offset=dict(type="int"),
+ limit=dict(type="float"),
+ offset=dict(type="float"),
radios=dict(type="bool"),
headers=dict(type="dict"),
))
diff --git a/ansible_collections/cisco/dnac/plugins/action/pnp_device.py b/ansible_collections/cisco/dnac/plugins/action/pnp_device.py
index ec9c0f36a..5acf0f3c7 100644
--- a/ansible_collections/cisco/dnac/plugins/action/pnp_device.py
+++ b/ansible_collections/cisco/dnac/plugins/action/pnp_device.py
@@ -31,15 +31,7 @@ argument_spec = dnac_argument_spec()
# Add arguments specific for this module
argument_spec.update(dict(
state=dict(type="str", default="present", choices=["present", "absent"]),
- _id=dict(type="str"),
deviceInfo=dict(type="dict"),
- runSummaryList=dict(type="list"),
- systemResetWorkflow=dict(type="dict"),
- systemWorkflow=dict(type="dict"),
- tenantId=dict(type="str"),
- version=dict(type="int"),
- workflow=dict(type="dict"),
- workflowParameters=dict(type="dict"),
id=dict(type="str"),
))
@@ -56,15 +48,7 @@ class PnpDevice(object):
def __init__(self, params, dnac):
self.dnac = dnac
self.new_object = dict(
- _id=params.get("_id"),
deviceInfo=params.get("deviceInfo"),
- runSummaryList=params.get("runSummaryList"),
- systemResetWorkflow=params.get("systemResetWorkflow"),
- systemWorkflow=params.get("systemWorkflow"),
- tenantId=params.get("tenantId"),
- version=params.get("version"),
- workflow=params.get("workflow"),
- workflowParameters=params.get("workflowParameters"),
id=params.get("id"),
)
@@ -81,17 +65,11 @@ class PnpDevice(object):
self.new_object.get('state')
new_object_params['onb_state'] = self.new_object.get('onbState') or \
self.new_object.get('onb_state')
- new_object_params['cm_state'] = self.new_object.get('cmState') or \
- self.new_object.get('cm_state')
new_object_params['name'] = name or self.new_object.get('name')
new_object_params['pid'] = self.new_object.get('pid')
new_object_params['source'] = self.new_object.get('source')
- new_object_params['project_id'] = self.new_object.get('projectId') or \
- self.new_object.get('project_id')
new_object_params['workflow_id'] = self.new_object.get('workflowId') or \
self.new_object.get('workflow_id')
- new_object_params['project_name'] = self.new_object.get('projectName') or \
- self.new_object.get('project_name')
new_object_params['workflow_name'] = self.new_object.get('workflowName') or \
self.new_object.get('workflow_name')
new_object_params['smart_account_id'] = self.new_object.get('smartAccountId') or \
@@ -109,15 +87,7 @@ class PnpDevice(object):
def create_params(self):
new_object_params = {}
- new_object_params['_id'] = self.new_object.get('_id')
new_object_params['deviceInfo'] = self.new_object.get('deviceInfo')
- new_object_params['runSummaryList'] = self.new_object.get('runSummaryList')
- new_object_params['systemResetWorkflow'] = self.new_object.get('systemResetWorkflow')
- new_object_params['systemWorkflow'] = self.new_object.get('systemWorkflow')
- new_object_params['tenantId'] = self.new_object.get('tenantId')
- new_object_params['version'] = self.new_object.get('version')
- new_object_params['workflow'] = self.new_object.get('workflow')
- new_object_params['workflowParameters'] = self.new_object.get('workflowParameters')
return new_object_params
def delete_by_id_params(self):
@@ -127,15 +97,8 @@ class PnpDevice(object):
def update_by_id_params(self):
new_object_params = {}
- new_object_params['_id'] = self.new_object.get('_id')
+ new_object_params['id'] = self.new_object.get('id')
new_object_params['deviceInfo'] = self.new_object.get('deviceInfo')
- new_object_params['runSummaryList'] = self.new_object.get('runSummaryList')
- new_object_params['systemResetWorkflow'] = self.new_object.get('systemResetWorkflow')
- new_object_params['systemWorkflow'] = self.new_object.get('systemWorkflow')
- new_object_params['tenantId'] = self.new_object.get('tenantId')
- new_object_params['version'] = self.new_object.get('version')
- new_object_params['workflow'] = self.new_object.get('workflow')
- new_object_params['workflowParameters'] = self.new_object.get('workflowParameters')
new_object_params['id'] = self.new_object.get('id')
return new_object_params
@@ -206,15 +169,7 @@ class PnpDevice(object):
requested_obj = self.new_object
obj_params = [
- ("_id", "_id"),
("deviceInfo", "deviceInfo"),
- ("runSummaryList", "runSummaryList"),
- ("systemResetWorkflow", "systemResetWorkflow"),
- ("systemWorkflow", "systemWorkflow"),
- ("tenantId", "tenantId"),
- ("version", "version"),
- ("workflow", "workflow"),
- ("workflowParameters", "workflowParameters"),
("id", "id"),
]
# Method 1. Params present in request (Ansible) obj are the same as the current (DNAC) params
diff --git a/ansible_collections/cisco/dnac/plugins/action/pnp_device_authorize.py b/ansible_collections/cisco/dnac/plugins/action/pnp_device_authorize.py
index 2e6f18aca..140b06bf8 100644
--- a/ansible_collections/cisco/dnac/plugins/action/pnp_device_authorize.py
+++ b/ansible_collections/cisco/dnac/plugins/action/pnp_device_authorize.py
@@ -76,7 +76,7 @@ class ActionModule(ActionBase):
dnac = DNACSDK(params=self._task.args)
response = dnac.exec(
- family="cisco_dna_center_system",
+ family="device_onboarding_pnp",
function='authorize_device',
op_modifies=True,
params=self.get_object(self._task.args),
diff --git a/ansible_collections/cisco/dnac/plugins/action/pnp_device_claim_to_site.py b/ansible_collections/cisco/dnac/plugins/action/pnp_device_claim_to_site.py
index 6bceab4a1..33e1ccb03 100644
--- a/ansible_collections/cisco/dnac/plugins/action/pnp_device_claim_to_site.py
+++ b/ansible_collections/cisco/dnac/plugins/action/pnp_device_claim_to_site.py
@@ -34,8 +34,8 @@ argument_spec.update(dict(
staticIP=dict(type="str"),
subnetMask=dict(type="str"),
gateway=dict(type="str"),
- vlanID=dict(type="str"),
- interfaceName=dict(type="str"),
+ vlanId=dict(type="str"),
+ ipInterfaceName=dict(type="str"),
sensorProfile=dict(type="str"),
))
@@ -83,8 +83,8 @@ class ActionModule(ActionBase):
staticIP=params.get("staticIP"),
subnetMask=params.get("subnetMask"),
gateway=params.get("gateway"),
- vlanID=params.get("vlanID"),
- interfaceName=params.get("interfaceName"),
+ vlanId=params.get("vlanId"),
+ ipInterfaceName=params.get("ipInterfaceName"),
sensorProfile=params.get("sensorProfile"),
)
return new_object
diff --git a/ansible_collections/cisco/dnac/plugins/action/pnp_device_count_info.py b/ansible_collections/cisco/dnac/plugins/action/pnp_device_count_info.py
index 933b8bd0c..a43bf5682 100644
--- a/ansible_collections/cisco/dnac/plugins/action/pnp_device_count_info.py
+++ b/ansible_collections/cisco/dnac/plugins/action/pnp_device_count_info.py
@@ -28,13 +28,10 @@ argument_spec.update(dict(
serialNumber=dict(type="list"),
state_=dict(type="list"),
onbState=dict(type="list"),
- cmState=dict(type="list"),
name=dict(type="list"),
pid=dict(type="list"),
source=dict(type="list"),
- projectId=dict(type="list"),
workflowId=dict(type="list"),
- projectName=dict(type="list"),
workflowName=dict(type="list"),
smartAccountId=dict(type="list"),
virtualAccountId=dict(type="list"),
@@ -80,13 +77,10 @@ class ActionModule(ActionBase):
serial_number=params.get("serialNumber"),
state=params.get("state_"),
onb_state=params.get("onbState"),
- cm_state=params.get("cmState"),
name=params.get("name"),
pid=params.get("pid"),
source=params.get("source"),
- project_id=params.get("projectId"),
workflow_id=params.get("workflowId"),
- project_name=params.get("projectName"),
workflow_name=params.get("workflowName"),
smart_account_id=params.get("smartAccountId"),
virtual_account_id=params.get("virtualAccountId"),
diff --git a/ansible_collections/cisco/dnac/plugins/action/pnp_device_info.py b/ansible_collections/cisco/dnac/plugins/action/pnp_device_info.py
index 529736179..b0d567c20 100644
--- a/ansible_collections/cisco/dnac/plugins/action/pnp_device_info.py
+++ b/ansible_collections/cisco/dnac/plugins/action/pnp_device_info.py
@@ -32,13 +32,10 @@ argument_spec.update(dict(
serialNumber=dict(type="list"),
state_=dict(type="list"),
onbState=dict(type="list"),
- cmState=dict(type="list"),
name=dict(type="list"),
pid=dict(type="list"),
source=dict(type="list"),
- projectId=dict(type="list"),
workflowId=dict(type="list"),
- projectName=dict(type="list"),
workflowName=dict(type="list"),
smartAccountId=dict(type="list"),
virtualAccountId=dict(type="list"),
@@ -92,13 +89,10 @@ class ActionModule(ActionBase):
serial_number=params.get("serialNumber"),
state=params.get("state_"),
onb_state=params.get("onbState"),
- cm_state=params.get("cmState"),
name=params.get("name"),
pid=params.get("pid"),
source=params.get("source"),
- project_id=params.get("projectId"),
workflow_id=params.get("workflowId"),
- project_name=params.get("projectName"),
workflow_name=params.get("workflowName"),
smart_account_id=params.get("smartAccountId"),
virtual_account_id=params.get("virtualAccountId"),
diff --git a/ansible_collections/cisco/dnac/plugins/action/pnp_server_profile_update.py b/ansible_collections/cisco/dnac/plugins/action/pnp_server_profile_update.py
index 209812d6d..2414b69b8 100644
--- a/ansible_collections/cisco/dnac/plugins/action/pnp_server_profile_update.py
+++ b/ansible_collections/cisco/dnac/plugins/action/pnp_server_profile_update.py
@@ -25,19 +25,10 @@ from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
argument_spec = dnac_argument_spec()
# Add arguments specific for this module
argument_spec.update(dict(
- autoSyncPeriod=dict(type="int"),
- ccoUser=dict(type="str"),
- expiry=dict(type="int"),
- lastSync=dict(type="int"),
- profile=dict(type="dict"),
smartAccountId=dict(type="str"),
- syncResult=dict(type="dict"),
- syncResultStr=dict(type="str"),
- syncStartTime=dict(type="int"),
- syncStatus=dict(type="str"),
- tenantId=dict(type="str"),
- token=dict(type="str"),
virtualAccountId=dict(type="str"),
+ profile=dict(type="dict"),
+ ccoUser=dict(type="str"),
))
required_if = []
@@ -75,19 +66,10 @@ class ActionModule(ActionBase):
def get_object(self, params):
new_object = dict(
- autoSyncPeriod=params.get("autoSyncPeriod"),
- ccoUser=params.get("ccoUser"),
- expiry=params.get("expiry"),
- lastSync=params.get("lastSync"),
- profile=params.get("profile"),
smartAccountId=params.get("smartAccountId"),
- syncResult=params.get("syncResult"),
- syncResultStr=params.get("syncResultStr"),
- syncStartTime=params.get("syncStartTime"),
- syncStatus=params.get("syncStatus"),
- tenantId=params.get("tenantId"),
- token=params.get("token"),
virtualAccountId=params.get("virtualAccountId"),
+ profile=params.get("profile"),
+ ccoUser=params.get("ccoUser"),
)
return new_object
diff --git a/ansible_collections/cisco/dnac/plugins/action/pnp_workflow.py b/ansible_collections/cisco/dnac/plugins/action/pnp_workflow.py
index 7a42ea0d9..868106c71 100644
--- a/ansible_collections/cisco/dnac/plugins/action/pnp_workflow.py
+++ b/ansible_collections/cisco/dnac/plugins/action/pnp_workflow.py
@@ -149,6 +149,7 @@ class PnpWorkflow(object):
new_object_params['useState'] = self.new_object.get('useState')
new_object_params['version'] = self.new_object.get('version')
new_object_params['id'] = self.new_object.get('id')
+ new_object_params['id'] = self.new_object.get('id')
return new_object_params
def get_object_by_name(self, name):
diff --git a/ansible_collections/cisco/dnac/plugins/action/reports.py b/ansible_collections/cisco/dnac/plugins/action/reports.py
index 28f656870..decf20545 100644
--- a/ansible_collections/cisco/dnac/plugins/action/reports.py
+++ b/ansible_collections/cisco/dnac/plugins/action/reports.py
@@ -38,6 +38,7 @@ argument_spec.update(dict(
view=dict(type="dict"),
viewGroupId=dict(type="str"),
viewGroupVersion=dict(type="str"),
+ dataCategory=dict(type="str"),
reportId=dict(type="str"),
))
@@ -61,6 +62,7 @@ class Reports(object):
view=params.get("view"),
viewGroupId=params.get("viewGroupId"),
viewGroupVersion=params.get("viewGroupVersion"),
+ dataCategory=params.get("dataCategory"),
report_id=params.get("reportId"),
)
@@ -80,6 +82,7 @@ class Reports(object):
new_object_params['view'] = self.new_object.get('view')
new_object_params['viewGroupId'] = self.new_object.get('viewGroupId')
new_object_params['viewGroupVersion'] = self.new_object.get('viewGroupVersion')
+ new_object_params['dataCategory'] = self.new_object.get('dataCategory')
return new_object_params
def delete_by_id_params(self):
@@ -157,6 +160,7 @@ class Reports(object):
("view", "view"),
("viewGroupId", "viewGroupId"),
("viewGroupVersion", "viewGroupVersion"),
+ ("dataCategory", "dataCategory"),
("reportId", "report_id"),
]
# Method 1. Params present in request (Ansible) obj are the same as the current (ISE) params
diff --git a/ansible_collections/cisco/dnac/plugins/action/reserve_ip_subpool.py b/ansible_collections/cisco/dnac/plugins/action/reserve_ip_subpool.py
index 22ea8edae..152438824 100644
--- a/ansible_collections/cisco/dnac/plugins/action/reserve_ip_subpool.py
+++ b/ansible_collections/cisco/dnac/plugins/action/reserve_ip_subpool.py
@@ -98,6 +98,12 @@ class ReserveIpSubpool(object):
self.new_object.get('site_id')
new_object_params['offset'] = self.new_object.get('offset')
new_object_params['limit'] = self.new_object.get('limit')
+ new_object_params['ignore_inherited_groups'] = self.new_object.get('ignoreInheritedGroups') or \
+ self.new_object.get('ignore_inherited_groups')
+ new_object_params['pool_usage'] = self.new_object.get('poolUsage') or \
+ self.new_object.get('pool_usage')
+ new_object_params['group_name'] = self.new_object.get('groupName') or \
+ self.new_object.get('group_name')
return new_object_params
def create_params(self):
@@ -122,6 +128,7 @@ class ReserveIpSubpool(object):
new_object_params['ipv4TotalHost'] = self.new_object.get('ipv4TotalHost')
new_object_params['ipv6TotalHost'] = self.new_object.get('ipv6TotalHost')
new_object_params['slaacSupport'] = self.new_object.get('slaacSupport')
+ new_object_params['siteId'] = self.new_object.get('siteId')
new_object_params['site_id'] = self.new_object.get('site_id')
return new_object_params
@@ -140,12 +147,13 @@ class ReserveIpSubpool(object):
new_object_params['ipv6Prefix'] = self.new_object.get('ipv6Prefix')
new_object_params['ipv6PrefixLength'] = self.new_object.get('ipv6PrefixLength')
new_object_params['ipv6Subnet'] = self.new_object.get('ipv6Subnet')
+ new_object_params['ipv6TotalHost'] = self.new_object.get('ipv6TotalHost')
new_object_params['ipv6GateWay'] = self.new_object.get('ipv6GateWay')
new_object_params['ipv6DhcpServers'] = self.new_object.get('ipv6DhcpServers')
new_object_params['ipv6DnsServers'] = self.new_object.get('ipv6DnsServers')
- new_object_params['ipv6TotalHost'] = self.new_object.get('ipv6TotalHost')
new_object_params['slaacSupport'] = self.new_object.get('slaacSupport')
new_object_params['ipv4GateWay'] = self.new_object.get('ipv4GateWay')
+ new_object_params['siteId'] = self.new_object.get('siteId')
new_object_params['id'] = self.new_object.get('id')
new_object_params['site_id'] = self.new_object.get('site_id')
return new_object_params
diff --git a/ansible_collections/cisco/dnac/plugins/action/reserve_ip_subpool_info.py b/ansible_collections/cisco/dnac/plugins/action/reserve_ip_subpool_info.py
index c704db1ef..bcf946a93 100644
--- a/ansible_collections/cisco/dnac/plugins/action/reserve_ip_subpool_info.py
+++ b/ansible_collections/cisco/dnac/plugins/action/reserve_ip_subpool_info.py
@@ -26,8 +26,11 @@ argument_spec = dnac_argument_spec()
# Add arguments specific for this module
argument_spec.update(dict(
siteId=dict(type="str"),
- offset=dict(type="int"),
- limit=dict(type="int"),
+ offset=dict(type="float"),
+ limit=dict(type="float"),
+ ignoreInheritedGroups=dict(type="str"),
+ poolUsage=dict(type="str"),
+ groupName=dict(type="str"),
headers=dict(type="dict"),
))
@@ -69,6 +72,9 @@ class ActionModule(ActionBase):
site_id=params.get("siteId"),
offset=params.get("offset"),
limit=params.get("limit"),
+ ignore_inherited_groups=params.get("ignoreInheritedGroups"),
+ pool_usage=params.get("poolUsage"),
+ group_name=params.get("groupName"),
headers=params.get("headers"),
)
return new_object
diff --git a/ansible_collections/cisco/dnac/plugins/action/reserve_ip_subpool_update.py b/ansible_collections/cisco/dnac/plugins/action/reserve_ip_subpool_update.py
index 4d2f8bf58..c3ef0accc 100644
--- a/ansible_collections/cisco/dnac/plugins/action/reserve_ip_subpool_update.py
+++ b/ansible_collections/cisco/dnac/plugins/action/reserve_ip_subpool_update.py
@@ -33,10 +33,10 @@ argument_spec.update(dict(
ipv6Prefix=dict(type="bool"),
ipv6PrefixLength=dict(type="int"),
ipv6Subnet=dict(type="str"),
+ ipv6TotalHost=dict(type="int"),
ipv6GateWay=dict(type="str"),
ipv6DhcpServers=dict(type="list"),
ipv6DnsServers=dict(type="list"),
- ipv6TotalHost=dict(type="int"),
slaacSupport=dict(type="bool"),
ipv4GateWay=dict(type="str"),
siteId=dict(type="str"),
@@ -86,10 +86,10 @@ class ActionModule(ActionBase):
ipv6Prefix=params.get("ipv6Prefix"),
ipv6PrefixLength=params.get("ipv6PrefixLength"),
ipv6Subnet=params.get("ipv6Subnet"),
+ ipv6TotalHost=params.get("ipv6TotalHost"),
ipv6GateWay=params.get("ipv6GateWay"),
ipv6DhcpServers=params.get("ipv6DhcpServers"),
ipv6DnsServers=params.get("ipv6DnsServers"),
- ipv6TotalHost=params.get("ipv6TotalHost"),
slaacSupport=params.get("slaacSupport"),
ipv4GateWay=params.get("ipv4GateWay"),
site_id=params.get("siteId"),
diff --git a/ansible_collections/cisco/dnac/plugins/action/roles.py b/ansible_collections/cisco/dnac/plugins/action/roles.py
new file mode 100644
index 000000000..38b6ba42a
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/roles.py
@@ -0,0 +1,262 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+ dnac_compare_equality,
+ get_dict_result,
+)
+from ansible_collections.cisco.dnac.plugins.plugin_utils.exceptions import (
+ InconsistentParameters,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ state=dict(type="str", default="present", choices=["present", "absent"]),
+ role=dict(type="str"),
+ description=dict(type="str"),
+ resourceTypes=dict(type="list"),
+ roleId=dict(type="str"),
+))
+
+required_if = [
+ ("state", "present", ["role", "roleId"], True),
+ ("state", "absent", ["role", "roleId"], True),
+]
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class Roles(object):
+ def __init__(self, params, dnac):
+ self.dnac = dnac
+ self.new_object = dict(
+ role=params.get("role"),
+ description=params.get("description"),
+ resourceTypes=params.get("resourceTypes"),
+ roleId=params.get("roleId"),
+ role_id=params.get("roleId"),
+ )
+
+ def get_all_params(self, name=None, id=None):
+ new_object_params = {}
+ return new_object_params
+
+ def create_params(self):
+ new_object_params = {}
+ new_object_params['role'] = self.new_object.get('role')
+ new_object_params['description'] = self.new_object.get('description')
+ new_object_params['resourceTypes'] = self.new_object.get('resourceTypes')
+ return new_object_params
+
+ def delete_by_id_params(self):
+ new_object_params = {}
+ new_object_params['role_id'] = self.new_object.get('role_id')
+ return new_object_params
+
+ def update_all_params(self):
+ new_object_params = {}
+ new_object_params['roleId'] = self.new_object.get('roleId')
+ new_object_params['description'] = self.new_object.get('description')
+ new_object_params['resourceTypes'] = self.new_object.get('resourceTypes')
+ return new_object_params
+
+ def get_object_by_name(self, name):
+ result = None
+ # NOTE: Does not have a get by name method or it is in another action
+ try:
+ items = self.dnac.exec(
+ family="userand_roles",
+ function="get_roles_api",
+ params=self.get_all_params(name=name),
+ )
+ if isinstance(items, dict):
+ if 'response' in items:
+ items = items.get('response')
+ result = get_dict_result(items, 'name', name)
+ except Exception:
+ result = None
+ return result
+
+ def get_object_by_id(self, id):
+ result = None
+ # NOTE: Does not have a get by id method or it is in another action
+ try:
+ items = self.dnac.exec(
+ family="userand_roles",
+ function="get_roles_api",
+ params=self.get_all_params(id=id),
+ )
+ if isinstance(items, dict):
+ if 'response' in items:
+ items = items.get('response')
+ result = get_dict_result(items, 'id', id)
+ except Exception:
+ result = None
+ return result
+
+ def exists(self):
+ id_exists = False
+ name_exists = False
+ prev_obj = None
+ o_id = self.new_object.get("id")
+ o_id = o_id or self.new_object.get("role_id")
+ name = self.new_object.get("name")
+ if o_id:
+ prev_obj = self.get_object_by_id(o_id)
+ id_exists = prev_obj is not None and isinstance(prev_obj, dict)
+ if not id_exists and name:
+ prev_obj = self.get_object_by_name(name)
+ name_exists = prev_obj is not None and isinstance(prev_obj, dict)
+ if name_exists:
+ _id = prev_obj.get("id")
+ _id = _id or prev_obj.get("roleId")
+ if id_exists and name_exists and o_id != _id:
+ raise InconsistentParameters("The 'id' and 'name' params don't refer to the same object")
+ if _id:
+ self.new_object.update(dict(id=_id))
+ self.new_object.update(dict(role_id=_id))
+ it_exists = prev_obj is not None and isinstance(prev_obj, dict)
+ return (it_exists, prev_obj)
+
+ def requires_update(self, current_obj):
+ requested_obj = self.new_object
+
+ obj_params = [
+ ("role", "role"),
+ ("description", "description"),
+ ("resourceTypes", "resourceTypes"),
+ ("roleId", "roleId"),
+ ("roleId", "role_id"),
+ ]
+ # Method 1. Params present in request (Ansible) obj are the same as the current (DNAC) params
+ # If any does not have eq params, it requires update
+ return any(not dnac_compare_equality(current_obj.get(dnac_param),
+ requested_obj.get(ansible_param))
+ for (dnac_param, ansible_param) in obj_params)
+
+ def create(self):
+ result = self.dnac.exec(
+ family="userand_roles",
+ function="add_role_api",
+ params=self.create_params(),
+ op_modifies=True,
+ )
+ return result
+
+ def update(self):
+ id = self.new_object.get("id")
+ name = self.new_object.get("name")
+ result = None
+ result = self.dnac.exec(
+ family="userand_roles",
+ function="update_role_api",
+ params=self.update_all_params(),
+ op_modifies=True,
+ )
+ return result
+
+ def delete(self):
+ id = self.new_object.get("id")
+ id = id or self.new_object.get("role_id")
+ name = self.new_object.get("name")
+ result = None
+ if not id:
+ prev_obj_name = self.get_object_by_name(name)
+ id_ = None
+ if prev_obj_name:
+ id_ = prev_obj_name.get("id")
+ id_ = id_ or prev_obj_name.get("roleId")
+ if id_:
+ self.new_object.update(dict(role_id=id_))
+ result = self.dnac.exec(
+ family="userand_roles",
+ function="delete_role_api",
+ params=self.delete_by_id_params(),
+ )
+ return result
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = False
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ dnac = DNACSDK(self._task.args)
+ obj = Roles(self._task.args, dnac)
+
+ state = self._task.args.get("state")
+
+ response = None
+
+ if state == "present":
+ (obj_exists, prev_obj) = obj.exists()
+ if obj_exists:
+ if obj.requires_update(prev_obj):
+ response = obj.update()
+ dnac.object_updated()
+ else:
+ response = prev_obj
+ dnac.object_already_present()
+ else:
+ response = obj.create()
+ dnac.object_created()
+
+ elif state == "absent":
+ (obj_exists, prev_obj) = obj.exists()
+ if obj_exists:
+ response = obj.delete()
+ dnac.object_deleted()
+ else:
+ dnac.object_already_absent()
+
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/sda_anycast_gateways.py b/ansible_collections/cisco/dnac/plugins/action/sda_anycast_gateways.py
new file mode 100644
index 000000000..0fe26ddfb
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/sda_anycast_gateways.py
@@ -0,0 +1,284 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+ dnac_compare_equality,
+ get_dict_result,
+)
+from ansible_collections.cisco.dnac.plugins.plugin_utils.exceptions import (
+ AnsibleSDAException,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ state=dict(type="str", default="present", choices=["present", "absent"]),
+ payload=dict(type="list"),
+ id=dict(type="str"),
+))
+
+required_if = [
+ ("state", "present", ["payload"], True),
+]
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class SdaAnycastGateways(object):
+ def __init__(self, params, dnac):
+ self.dnac = dnac
+ self.new_object = dict(
+ payload=params.get("payload"),
+ id=params.get("id"),
+ )
+
+ def get_all_params(self, name=None, id=None):
+ new_object_params = {}
+ new_object_params['id'] = id or self.new_object.get('id')
+ new_object_params['fabric_id'] = self.new_object.get('fabricId') or \
+ self.new_object.get('fabric_id')
+ new_object_params['virtual_network_name'] = self.new_object.get('virtualNetworkName') or \
+ self.new_object.get('virtual_network_name')
+ new_object_params['ip_pool_name'] = self.new_object.get('ipPoolName') or \
+ self.new_object.get('ip_pool_name')
+ new_object_params['vlan_name'] = self.new_object.get('vlanName') or \
+ self.new_object.get('vlan_name')
+ new_object_params['vlan_id'] = self.new_object.get('vlanId') or \
+ self.new_object.get('vlan_id')
+ new_object_params['offset'] = self.new_object.get('offset')
+ new_object_params['limit'] = self.new_object.get('limit')
+ return new_object_params
+
+ def create_params(self):
+ new_object_params = {}
+ new_object_params['payload'] = self.new_object.get('payload')
+ return new_object_params
+
+ def delete_by_id_params(self):
+ new_object_params = {}
+ new_object_params['id'] = self.new_object.get('id')
+ return new_object_params
+
+ def update_all_params(self):
+ new_object_params = {}
+ new_object_params['payload'] = self.new_object.get('payload')
+ return new_object_params
+
+ def get_object_by_name(self, name, is_absent=False):
+ result = None
+ # NOTE: Does not have a get by name method or it is in another action
+ try:
+ items = self.dnac.exec(
+ family="sda",
+ function="get_anycast_gateways",
+ params=self.get_all_params(name=name),
+ )
+ if isinstance(items, dict):
+ if 'response' in items:
+ items = items.get('response')
+ if isinstance(items, dict) and items.get("status") == "failed":
+ if is_absent:
+ raise AnsibleSDAException(response=items)
+ result = None
+ return result
+ result = get_dict_result(items, 'name', name)
+ except Exception:
+ if is_absent:
+ raise
+ result = None
+ return result
+
+ def get_object_by_id(self, id):
+ result = None
+ # NOTE: Does not have a get by id method or it is in another action
+ try:
+ items = self.dnac.exec(
+ family="sda",
+ function="get_anycast_gateways",
+ params=self.get_all_params(id=id),
+ )
+ if isinstance(items, dict):
+ if 'response' in items:
+ items = items.get('response')
+ result = get_dict_result(items, 'id', id)
+ except Exception:
+ result = None
+ return result
+
+ def exists(self, is_absent=False):
+ name = self.new_object.get("name")
+ prev_obj = self.get_object_by_name(name, is_absent=is_absent)
+ it_exists = prev_obj is not None and isinstance(prev_obj, dict) and prev_obj.get("status") != "failed"
+ return (it_exists, prev_obj)
+
+ def requires_update(self, current_obj):
+ requested_obj = self.new_object.get('payload')
+ if requested_obj and len(requested_obj) > 0:
+ requested_obj = requested_obj[0]
+
+ obj_params = [
+ ("id", "id"),
+ ("fabricId", "fabricId"),
+ ("virtualNetworkName", "virtualNetworkName"),
+ ("ipPoolName", "ipPoolName"),
+ ("tcpMssAdjustment", "tcpMssAdjustment"),
+ ("vlanName", "vlanName"),
+ ("vlanId", "vlanId"),
+ ("trafficType", "trafficType"),
+ ("poolType", "poolType"),
+ ("securityGroupName", "securityGroupName"),
+ ("isCriticalPool", "isCriticalPool"),
+ ("isLayer2FloodingEnabled", "isLayer2FloodingEnabled"),
+ ("isWirelessPool", "isWirelessPool"),
+ ("isIpDirectedBroadcast", "isIpDirectedBroadcast"),
+ ("isIntraSubnetRoutingEnabled", "isIntraSubnetRoutingEnabled"),
+ ("isMultipleIpToMacAddresses", "isMultipleIpToMacAddresses"),
+ ("isSupplicantBasedExtendedNodeOnboarding", "isSupplicantBasedExtendedNodeOnboarding"),
+ ("id", "id"),
+ ]
+ # Method 1. Params present in request (Ansible) obj are the same as the current (DNAC) params
+ # If any does not have eq params, it requires update
+ return any(not dnac_compare_equality(current_obj.get(dnac_param),
+ requested_obj.get(ansible_param))
+ for (dnac_param, ansible_param) in obj_params)
+
+ def create(self):
+ result = self.dnac.exec(
+ family="sda",
+ function="add_anycast_gateways",
+ params=self.create_params(),
+ op_modifies=True,
+ )
+ if isinstance(result, dict):
+ if 'response' in result:
+ result = result.get('response')
+ if isinstance(result, dict) and result.get("status") == "failed":
+ raise AnsibleSDAException(response=result)
+ return result
+
+ def update(self):
+ requested_obj = self.new_object.get('payload')
+ if requested_obj and len(requested_obj) > 0:
+ requested_obj = requested_obj[0]
+ id = self.new_object.get("id") or requested_obj.get("id")
+ name = self.new_object.get("name") or requested_obj.get("name")
+ result = None
+ result = self.dnac.exec(
+ family="sda",
+ function="update_anycast_gateways",
+ params=self.update_all_params(),
+ op_modifies=True,
+ )
+ return result
+
+ def delete(self):
+ requested_obj = self.new_object.get('payload')
+ if requested_obj and len(requested_obj) > 0:
+ requested_obj = requested_obj[0]
+ id = self.new_object.get("id") or requested_obj.get("id")
+ name = self.new_object.get("name") or requested_obj.get("name")
+ result = None
+ if not id:
+ prev_obj_name = self.get_object_by_name(name)
+ id_ = None
+ if prev_obj_name:
+ id_ = prev_obj_name.get("id")
+ if id_:
+ self.new_object.update(dict(id=id_))
+ result = self.dnac.exec(
+ family="sda",
+ function="delete_anycast_gateway_by_id",
+ params=self.delete_by_id_params(),
+ )
+ return result
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = False
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ dnac = DNACSDK(self._task.args)
+ obj = SdaAnycastGateways(self._task.args, dnac)
+
+ state = self._task.args.get("state")
+
+ response = None
+
+ if state == "present":
+ (obj_exists, prev_obj) = obj.exists()
+ if obj_exists:
+ if obj.requires_update(prev_obj):
+ response = obj.update()
+ dnac.object_updated()
+ else:
+ response = prev_obj
+ dnac.object_already_present()
+ else:
+ try:
+ response = obj.create()
+ dnac.object_created()
+ except AnsibleSDAException as e:
+ dnac.fail_json("Could not create object {e}".format(e=e._response))
+
+ elif state == "absent":
+ try:
+ (obj_exists, prev_obj) = obj.exists(is_absent=True)
+ if obj_exists:
+ response = obj.delete()
+ dnac.object_deleted()
+ else:
+ dnac.object_already_absent()
+ except AnsibleSDAException as e:
+ dnac.fail_json("Could not get object to be delete {e}".format(e=e._response))
+
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/sda_anycast_gateways_count_info.py b/ansible_collections/cisco/dnac/plugins/action/sda_anycast_gateways_count_info.py
new file mode 100644
index 000000000..f33187d70
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/sda_anycast_gateways_count_info.py
@@ -0,0 +1,97 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ fabricId=dict(type="str"),
+ virtualNetworkName=dict(type="str"),
+ ipPoolName=dict(type="str"),
+ vlanName=dict(type="str"),
+ vlanId=dict(type="int"),
+ headers=dict(type="dict"),
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = True
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ fabric_id=params.get("fabricId"),
+ virtual_network_name=params.get("virtualNetworkName"),
+ ip_pool_name=params.get("ipPoolName"),
+ vlan_name=params.get("vlanName"),
+ vlan_id=params.get("vlanId"),
+ headers=params.get("headers"),
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ self._result.update(dict(dnac_response={}))
+
+ dnac = DNACSDK(params=self._task.args)
+
+ response = dnac.exec(
+ family="sda",
+ function='get_anycast_gateway_count',
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/sda_anycast_gateways_info.py b/ansible_collections/cisco/dnac/plugins/action/sda_anycast_gateways_info.py
new file mode 100644
index 000000000..7e13dde41
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/sda_anycast_gateways_info.py
@@ -0,0 +1,103 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ id=dict(type="str"),
+ fabricId=dict(type="str"),
+ virtualNetworkName=dict(type="str"),
+ ipPoolName=dict(type="str"),
+ vlanName=dict(type="str"),
+ vlanId=dict(type="int"),
+ offset=dict(type="int"),
+ limit=dict(type="int"),
+ headers=dict(type="dict"),
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = True
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ id=params.get("id"),
+ fabric_id=params.get("fabricId"),
+ virtual_network_name=params.get("virtualNetworkName"),
+ ip_pool_name=params.get("ipPoolName"),
+ vlan_name=params.get("vlanName"),
+ vlan_id=params.get("vlanId"),
+ offset=params.get("offset"),
+ limit=params.get("limit"),
+ headers=params.get("headers"),
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ self._result.update(dict(dnac_response={}))
+
+ dnac = DNACSDK(params=self._task.args)
+
+ response = dnac.exec(
+ family="sda",
+ function='get_anycast_gateways',
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/sda_authentication_profiles.py b/ansible_collections/cisco/dnac/plugins/action/sda_authentication_profiles.py
new file mode 100644
index 000000000..fde4c48cc
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/sda_authentication_profiles.py
@@ -0,0 +1,189 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+ dnac_compare_equality,
+ get_dict_result,
+)
+from ansible_collections.cisco.dnac.plugins.plugin_utils.exceptions import (
+ AnsibleSDAException,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ state=dict(type="str", default="present", choices=["present"]),
+ payload=dict(type="list"),
+))
+
+required_if = [
+ ("state", "present", ["payload"], True),
+]
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class SdaAuthenticationProfiles(object):
+ def __init__(self, params, dnac):
+ self.dnac = dnac
+ self.new_object = dict(
+ payload=params.get("payload"),
+ )
+
+ def get_all_params(self, name=None, id=None):
+ new_object_params = {}
+ new_object_params['fabric_id'] = self.new_object.get('fabricId') or \
+ self.new_object.get('fabric_id')
+ new_object_params['authentication_profile_name'] = self.new_object.get('authenticationProfileName') or \
+ self.new_object.get('authentication_profile_name')
+ return new_object_params
+
+ def update_all_params(self):
+ new_object_params = {}
+ new_object_params['payload'] = self.new_object.get('payload')
+ return new_object_params
+
+ def get_object_by_name(self, name, is_absent=False):
+ result = None
+ # NOTE: Does not have a get by name method, using get all
+ try:
+ items = self.dnac.exec(
+ family="sda",
+ function="get_authentication_profiles",
+ params=self.get_all_params(name=name),
+ )
+ if isinstance(items, dict):
+ if 'response' in items:
+ items = items.get('response')
+ if isinstance(items, dict) and items.get("status") == "failed":
+ if is_absent:
+ raise AnsibleSDAException(response=items)
+ result = None
+ return result
+ result = get_dict_result(items, 'name', name)
+ except Exception:
+ if is_absent:
+ raise
+ result = None
+ return result
+
+ def get_object_by_id(self, id):
+ result = None
+ # NOTE: Does not have a get by id method or it is in another action
+ return result
+
+ def exists(self, is_absent=False):
+ name = self.new_object.get("name")
+ prev_obj = self.get_object_by_name(name, is_absent=is_absent)
+ it_exists = prev_obj is not None and isinstance(prev_obj, dict) and prev_obj.get("status") != "failed"
+ return (it_exists, prev_obj)
+
+ def requires_update(self, current_obj):
+ requested_obj = self.new_object.get('payload')
+ if requested_obj and len(requested_obj) > 0:
+ requested_obj = requested_obj[0]
+
+ obj_params = [
+ ("id", "id"),
+ ("fabricId", "fabricId"),
+ ("authenticationProfileName", "authenticationProfileName"),
+ ("authenticationOrder", "authenticationOrder"),
+ ("dot1xToMabFallbackTimeout", "dot1xToMabFallbackTimeout"),
+ ("wakeOnLan", "wakeOnLan"),
+ ("numberOfHosts", "numberOfHosts"),
+ ]
+ # Method 1. Params present in request (Ansible) obj are the same as the current (ISE) params
+ # If any does not have eq params, it requires update
+ return any(not dnac_compare_equality(current_obj.get(dnac_param),
+ requested_obj.get(ansible_param))
+ for (dnac_param, ansible_param) in obj_params)
+
+ def update(self):
+ requested_obj = self.new_object.get('payload')
+ if requested_obj and len(requested_obj) > 0:
+ requested_obj = requested_obj[0]
+ id = self.new_object.get("id") or requested_obj.get("id")
+ name = self.new_object.get("name") or requested_obj.get("name")
+ result = None
+ result = self.dnac.exec(
+ family="sda",
+ function="update_authentication_profile",
+ params=self.update_all_params(),
+ op_modifies=True,
+ )
+ return result
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = False
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ dnac = DNACSDK(self._task.args)
+ obj = SdaAuthenticationProfiles(self._task.args, dnac)
+
+ state = self._task.args.get("state")
+
+ response = None
+ if state == "present":
+ (obj_exists, prev_obj) = obj.exists()
+ if obj_exists:
+ if obj.requires_update(prev_obj):
+ response = obj.update()
+ dnac.object_updated()
+ else:
+ response = prev_obj
+ dnac.object_already_present()
+ else:
+ dnac.fail_json("Object does not exists, plugin only has update")
+
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/sda_authentication_profiles_info.py b/ansible_collections/cisco/dnac/plugins/action/sda_authentication_profiles_info.py
new file mode 100644
index 000000000..cb7a4a143
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/sda_authentication_profiles_info.py
@@ -0,0 +1,91 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ fabricId=dict(type="str"),
+ authenticationProfileName=dict(type="str"),
+ headers=dict(type="dict"),
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = True
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ fabric_id=params.get("fabricId"),
+ authentication_profile_name=params.get("authenticationProfileName"),
+ headers=params.get("headers"),
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ self._result.update(dict(dnac_response={}))
+
+ dnac = DNACSDK(params=self._task.args)
+
+ response = dnac.exec(
+ family="sda",
+ function='get_authentication_profiles',
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/sda_extranet_policies.py b/ansible_collections/cisco/dnac/plugins/action/sda_extranet_policies.py
new file mode 100644
index 000000000..265d7c34b
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/sda_extranet_policies.py
@@ -0,0 +1,263 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+ dnac_compare_equality,
+ get_dict_result,
+)
+from ansible_collections.cisco.dnac.plugins.plugin_utils.exceptions import (
+ AnsibleSDAException,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ state=dict(type="str", default="present", choices=["present", "absent"]),
+ payload=dict(type="list"),
+ id=dict(type="str"),
+))
+
+required_if = [
+ ("state", "present", ["payload"], True),
+]
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class SdaExtranetPolicies(object):
+ def __init__(self, params, dnac):
+ self.dnac = dnac
+ self.new_object = dict(
+ payload=params.get("payload"),
+ id=params.get("id"),
+ )
+
+ def get_all_params(self, name=None, id=None):
+ new_object_params = {}
+ new_object_params['extranet_policy_name'] = self.new_object.get('extranetPolicyName') or \
+ self.new_object.get('extranet_policy_name')
+ new_object_params['offset'] = self.new_object.get('offset')
+ new_object_params['limit'] = self.new_object.get('limit')
+ return new_object_params
+
+ def create_params(self):
+ new_object_params = {}
+ new_object_params['payload'] = self.new_object.get('payload')
+ return new_object_params
+
+ def delete_by_id_params(self):
+ new_object_params = {}
+ new_object_params['id'] = self.new_object.get('id')
+ return new_object_params
+
+ def update_all_params(self):
+ new_object_params = {}
+ new_object_params['payload'] = self.new_object.get('payload')
+ return new_object_params
+
+ def get_object_by_name(self, name, is_absent=False):
+ result = None
+ # NOTE: Does not have a get by name method or it is in another action
+ try:
+ items = self.dnac.exec(
+ family="sda",
+ function="get_extranet_policies",
+ params=self.get_all_params(name=name),
+ )
+ if isinstance(items, dict):
+ if 'response' in items:
+ items = items.get('response')
+ if isinstance(items, dict) and items.get("status") == "failed":
+ if is_absent:
+ raise AnsibleSDAException(response=items)
+ result = None
+ return result
+ result = get_dict_result(items, 'name', name)
+ except Exception:
+ if is_absent:
+ raise
+ result = None
+ return result
+
+ def get_object_by_id(self, id):
+ result = None
+ # NOTE: Does not have a get by id method or it is in another action
+ try:
+ items = self.dnac.exec(
+ family="sda",
+ function="get_extranet_policies",
+ params=self.get_all_params(id=id),
+ )
+ if isinstance(items, dict):
+ if 'response' in items:
+ items = items.get('response')
+ result = get_dict_result(items, 'id', id)
+ except Exception:
+ result = None
+ return result
+
+ def exists(self, is_absent=False):
+ name = self.new_object.get("name")
+ prev_obj = self.get_object_by_name(name, is_absent=is_absent)
+ it_exists = prev_obj is not None and isinstance(prev_obj, dict) and prev_obj.get("status") != "failed"
+ return (it_exists, prev_obj)
+
+ def requires_update(self, current_obj):
+ requested_obj = self.new_object.get('payload')
+ if requested_obj and len(requested_obj) > 0:
+ requested_obj = requested_obj[0]
+
+ obj_params = [
+ ("id", "id"),
+ ("extranetPolicyName", "extranetPolicyName"),
+ ("fabricIds", "fabricIds"),
+ ("providerVirtualNetworkName", "providerVirtualNetworkName"),
+ ("subscriberVirtualNetworkNames", "subscriberVirtualNetworkNames"),
+ ("id", "id"),
+ ]
+ # Method 1. Params present in request (Ansible) obj are the same as the current (DNAC) params
+ # If any does not have eq params, it requires update
+ return any(not dnac_compare_equality(current_obj.get(dnac_param),
+ requested_obj.get(ansible_param))
+ for (dnac_param, ansible_param) in obj_params)
+
+ def create(self):
+ result = self.dnac.exec(
+ family="sda",
+ function="add_extranet_policy",
+ params=self.create_params(),
+ op_modifies=True,
+ )
+ if isinstance(result, dict):
+ if 'response' in result:
+ result = result.get('response')
+ if isinstance(result, dict) and result.get("status") == "failed":
+ raise AnsibleSDAException(response=result)
+ return result
+
+ def update(self):
+ requested_obj = self.new_object.get('payload')
+ if requested_obj and len(requested_obj) > 0:
+ requested_obj = requested_obj[0]
+ id = self.new_object.get("id") or requested_obj.get("id")
+ name = self.new_object.get("name") or requested_obj.get("name")
+ result = None
+ result = self.dnac.exec(
+ family="sda",
+ function="update_extranet_policy",
+ params=self.update_all_params(),
+ op_modifies=True,
+ )
+ return result
+
+ def delete(self):
+ requested_obj = self.new_object.get('payload')
+ if requested_obj and len(requested_obj) > 0:
+ requested_obj = requested_obj[0]
+ id = self.new_object.get("id") or requested_obj.get("id")
+ name = self.new_object.get("name") or requested_obj.get("name")
+ result = None
+ if not id:
+ prev_obj_name = self.get_object_by_name(name)
+ id_ = None
+ if prev_obj_name:
+ id_ = prev_obj_name.get("id")
+ if id_:
+ self.new_object.update(dict(id=id_))
+ result = self.dnac.exec(
+ family="sda",
+ function="delete_extranet_policy_by_id",
+ params=self.delete_by_id_params(),
+ )
+ return result
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = False
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ dnac = DNACSDK(self._task.args)
+ obj = SdaExtranetPolicies(self._task.args, dnac)
+
+ state = self._task.args.get("state")
+
+ response = None
+
+ if state == "present":
+ (obj_exists, prev_obj) = obj.exists()
+ if obj_exists:
+ if obj.requires_update(prev_obj):
+ response = obj.update()
+ dnac.object_updated()
+ else:
+ response = prev_obj
+ dnac.object_already_present()
+ else:
+ try:
+ response = obj.create()
+ dnac.object_created()
+ except AnsibleSDAException as e:
+ dnac.fail_json("Could not create object {e}".format(e=e._response))
+
+ elif state == "absent":
+ try:
+ (obj_exists, prev_obj) = obj.exists(is_absent=True)
+ if obj_exists:
+ response = obj.delete()
+ dnac.object_deleted()
+ else:
+ dnac.object_already_absent()
+ except AnsibleSDAException as e:
+ dnac.fail_json("Could not get object to be delete {e}".format(e=e._response))
+
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/sda_extranet_policies_count_info.py b/ansible_collections/cisco/dnac/plugins/action/sda_extranet_policies_count_info.py
new file mode 100644
index 000000000..b880f0edd
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/sda_extranet_policies_count_info.py
@@ -0,0 +1,87 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ headers=dict(type="dict"),
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = True
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ headers=params.get("headers"),
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ self._result.update(dict(dnac_response={}))
+
+ dnac = DNACSDK(params=self._task.args)
+
+ response = dnac.exec(
+ family="sda",
+ function='get_extranet_policy_count',
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/sda_extranet_policies_info.py b/ansible_collections/cisco/dnac/plugins/action/sda_extranet_policies_info.py
new file mode 100644
index 000000000..ebb3b4e12
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/sda_extranet_policies_info.py
@@ -0,0 +1,93 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ extranetPolicyName=dict(type="str"),
+ offset=dict(type="float"),
+ limit=dict(type="float"),
+ headers=dict(type="dict"),
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = True
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ extranet_policy_name=params.get("extranetPolicyName"),
+ offset=params.get("offset"),
+ limit=params.get("limit"),
+ headers=params.get("headers"),
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ self._result.update(dict(dnac_response={}))
+
+ dnac = DNACSDK(params=self._task.args)
+
+ response = dnac.exec(
+ family="sda",
+ function='get_extranet_policies',
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/sda_fabric_authentication_profile.py b/ansible_collections/cisco/dnac/plugins/action/sda_fabric_authentication_profile.py
index 91a1f3ae5..d546eed18 100644
--- a/ansible_collections/cisco/dnac/plugins/action/sda_fabric_authentication_profile.py
+++ b/ansible_collections/cisco/dnac/plugins/action/sda_fabric_authentication_profile.py
@@ -159,8 +159,11 @@ class SdaFabricAuthenticationProfile(object):
return result
def delete(self):
- id = self.new_object.get("id")
- name = self.new_object.get("name")
+ requested_obj = self.new_object.get('payload')
+ if requested_obj and len(requested_obj) > 0:
+ requested_obj = requested_obj[0]
+ id = self.new_object.get("id") or requested_obj.get("id")
+ name = self.new_object.get("name") or requested_obj.get("name")
result = None
result = self.dnac.exec(
family="sda",
diff --git a/ansible_collections/cisco/dnac/plugins/action/sda_fabric_border_device.py b/ansible_collections/cisco/dnac/plugins/action/sda_fabric_border_device.py
index ab907f808..1088414d9 100644
--- a/ansible_collections/cisco/dnac/plugins/action/sda_fabric_border_device.py
+++ b/ansible_collections/cisco/dnac/plugins/action/sda_fabric_border_device.py
@@ -144,8 +144,11 @@ class SdaFabricBorderDevice(object):
return result
def delete(self):
- id = self.new_object.get("id")
- name = self.new_object.get("name")
+ requested_obj = self.new_object.get('payload')
+ if requested_obj and len(requested_obj) > 0:
+ requested_obj = requested_obj[0]
+ id = self.new_object.get("id") or requested_obj.get("id")
+ name = self.new_object.get("name") or requested_obj.get("name")
result = None
result = self.dnac.exec(
family="sda",
diff --git a/ansible_collections/cisco/dnac/plugins/action/sda_fabric_devices.py b/ansible_collections/cisco/dnac/plugins/action/sda_fabric_devices.py
new file mode 100644
index 000000000..e1dfa3e56
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/sda_fabric_devices.py
@@ -0,0 +1,279 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+ dnac_compare_equality,
+ get_dict_result,
+)
+from ansible_collections.cisco.dnac.plugins.plugin_utils.exceptions import (
+ AnsibleSDAException,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ state=dict(type="str", default="present", choices=["present", "absent"]),
+ payload=dict(type="list"),
+ fabricId=dict(type="str"),
+ networkDeviceId=dict(type="str"),
+ deviceRoles=dict(type="str"),
+ id=dict(type="str"),
+))
+
+required_if = [
+ ("state", "present", ["id"], True),
+ ("state", "present", ["payload"], True),
+ ("state", "absent", ["id"], True),
+ ("state", "absent", ["payload"], True),
+]
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class SdaFabricDevices(object):
+ def __init__(self, params, dnac):
+ self.dnac = dnac
+ self.new_object = dict(
+ payload=params.get("payload"),
+ fabric_id=params.get("fabricId"),
+ network_device_id=params.get("networkDeviceId"),
+ device_roles=params.get("deviceRoles"),
+ id=params.get("id"),
+ )
+
+ def get_all_params(self, name=None, id=None):
+ new_object_params = {}
+ new_object_params['fabric_id'] = self.new_object.get('fabricId') or \
+ self.new_object.get('fabric_id')
+ new_object_params['network_device_id'] = self.new_object.get('networkDeviceId') or \
+ self.new_object.get('network_device_id')
+ new_object_params['device_roles'] = self.new_object.get('deviceRoles') or \
+ self.new_object.get('device_roles')
+ new_object_params['offset'] = self.new_object.get('offset')
+ new_object_params['limit'] = self.new_object.get('limit')
+ return new_object_params
+
+ def create_params(self):
+ new_object_params = {}
+ new_object_params['payload'] = self.new_object.get('payload')
+ return new_object_params
+
+ def delete_by_id_params(self):
+ new_object_params = {}
+ new_object_params['id'] = self.new_object.get('id')
+ return new_object_params
+
+ def update_all_params(self):
+ new_object_params = {}
+ new_object_params['payload'] = self.new_object.get('payload')
+ return new_object_params
+
+ def get_object_by_name(self, name, is_absent=False):
+ result = None
+ # NOTE: Does not have a get by name method or it is in another action
+ try:
+ items = self.dnac.exec(
+ family="sda",
+ function="get_fabric_devices",
+ params=self.get_all_params(name=name),
+ )
+ if isinstance(items, dict):
+ if 'response' in items:
+ items = items.get('response')
+ if isinstance(items, dict) and items.get("status") == "failed":
+ if is_absent:
+ raise AnsibleSDAException(response=items)
+ result = None
+ return result
+ result = get_dict_result(items, 'name', name)
+ except Exception:
+ if is_absent:
+ raise
+ result = None
+ return result
+
+ def get_object_by_id(self, id):
+ result = None
+ # NOTE: Does not have a get by id method or it is in another action
+ try:
+ items = self.dnac.exec(
+ family="sda",
+ function="get_fabric_devices",
+ params=self.get_all_params(id=id),
+ )
+ if isinstance(items, dict):
+ if 'response' in items:
+ items = items.get('response')
+ result = get_dict_result(items, 'id', id)
+ except Exception:
+ result = None
+ return result
+
+ def exists(self, is_absent=False):
+ name = self.new_object.get("name")
+ prev_obj = self.get_object_by_name(name, is_absent=is_absent)
+ it_exists = prev_obj is not None and isinstance(prev_obj, dict) and prev_obj.get("status") != "failed"
+ return (it_exists, prev_obj)
+
+ def requires_update(self, current_obj):
+ requested_obj = self.new_object.get('payload')
+ if requested_obj and len(requested_obj) > 0:
+ requested_obj = requested_obj[0]
+
+ obj_params = [
+ ("id", "id"),
+ ("networkDeviceId", "networkDeviceId"),
+ ("fabricId", "fabricId"),
+ ("deviceRoles", "deviceRoles"),
+ ("borderDeviceSettings", "borderDeviceSettings"),
+ ("fabricId", "fabric_id"),
+ ("networkDeviceId", "network_device_id"),
+ ("deviceRoles", "device_roles"),
+ ("id", "id"),
+ ]
+ # Method 1. Params present in request (Ansible) obj are the same as the current (DNAC) params
+ # If any does not have eq params, it requires update
+ return any(not dnac_compare_equality(current_obj.get(dnac_param),
+ requested_obj.get(ansible_param))
+ for (dnac_param, ansible_param) in obj_params)
+
+ def create(self):
+ result = self.dnac.exec(
+ family="sda",
+ function="add_fabric_devices",
+ params=self.create_params(),
+ op_modifies=True,
+ )
+ if isinstance(result, dict):
+ if 'response' in result:
+ result = result.get('response')
+ if isinstance(result, dict) and result.get("status") == "failed":
+ raise AnsibleSDAException(response=result)
+ return result
+
+ def update(self):
+ requested_obj = self.new_object.get('payload')
+ if requested_obj and len(requested_obj) > 0:
+ requested_obj = requested_obj[0]
+ id = self.new_object.get("id") or requested_obj.get("id")
+ name = self.new_object.get("name") or requested_obj.get("name")
+ result = None
+ result = self.dnac.exec(
+ family="sda",
+ function="update_fabric_devices",
+ params=self.update_all_params(),
+ op_modifies=True,
+ )
+ return result
+
+ def delete(self):
+ requested_obj = self.new_object.get('payload')
+ if requested_obj and len(requested_obj) > 0:
+ requested_obj = requested_obj[0]
+ id = self.new_object.get("id") or requested_obj.get("id")
+ name = self.new_object.get("name") or requested_obj.get("name")
+ result = None
+ if not id:
+ prev_obj_name = self.get_object_by_name(name)
+ id_ = None
+ if prev_obj_name:
+ id_ = prev_obj_name.get("id")
+ if id_:
+ self.new_object.update(dict(id=id_))
+ result = self.dnac.exec(
+ family="sda",
+ function="delete_a_fabric_device_by_id",
+ params=self.delete_by_id_params(),
+ )
+ return result
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = False
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ dnac = DNACSDK(self._task.args)
+ obj = SdaFabricDevices(self._task.args, dnac)
+
+ state = self._task.args.get("state")
+
+ response = None
+
+ if state == "present":
+ (obj_exists, prev_obj) = obj.exists()
+ if obj_exists:
+ if obj.requires_update(prev_obj):
+ response = obj.update()
+ dnac.object_updated()
+ else:
+ response = prev_obj
+ dnac.object_already_present()
+ else:
+ try:
+ response = obj.create()
+ dnac.object_created()
+ except AnsibleSDAException as e:
+ dnac.fail_json("Could not create object {e}".format(e=e._response))
+
+ elif state == "absent":
+ try:
+ (obj_exists, prev_obj) = obj.exists(is_absent=True)
+ if obj_exists:
+ response = obj.delete()
+ dnac.object_deleted()
+ else:
+ dnac.object_already_absent()
+ except AnsibleSDAException as e:
+ dnac.fail_json("Could not get object to be delete {e}".format(e=e._response))
+
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/sda_fabric_devices_count_info.py b/ansible_collections/cisco/dnac/plugins/action/sda_fabric_devices_count_info.py
new file mode 100644
index 000000000..f61f179fa
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/sda_fabric_devices_count_info.py
@@ -0,0 +1,93 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ fabricId=dict(type="str"),
+ networkDeviceId=dict(type="str"),
+ deviceRoles=dict(type="str"),
+ headers=dict(type="dict"),
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = True
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ fabric_id=params.get("fabricId"),
+ network_device_id=params.get("networkDeviceId"),
+ device_roles=params.get("deviceRoles"),
+ headers=params.get("headers"),
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ self._result.update(dict(dnac_response={}))
+
+ dnac = DNACSDK(params=self._task.args)
+
+ response = dnac.exec(
+ family="sda",
+ function='get_fabric_devices_count',
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/sda_fabric_devices_info.py b/ansible_collections/cisco/dnac/plugins/action/sda_fabric_devices_info.py
new file mode 100644
index 000000000..503be554a
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/sda_fabric_devices_info.py
@@ -0,0 +1,97 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ fabricId=dict(type="str"),
+ networkDeviceId=dict(type="str"),
+ deviceRoles=dict(type="str"),
+ offset=dict(type="float"),
+ limit=dict(type="float"),
+ headers=dict(type="dict"),
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = True
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ fabric_id=params.get("fabricId"),
+ network_device_id=params.get("networkDeviceId"),
+ device_roles=params.get("deviceRoles"),
+ offset=params.get("offset"),
+ limit=params.get("limit"),
+ headers=params.get("headers"),
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ self._result.update(dict(dnac_response={}))
+
+ dnac = DNACSDK(params=self._task.args)
+
+ response = dnac.exec(
+ family="sda",
+ function='get_fabric_devices',
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/sda_fabric_devices_layer2_handoffs.py b/ansible_collections/cisco/dnac/plugins/action/sda_fabric_devices_layer2_handoffs.py
new file mode 100644
index 000000000..6655e5b6a
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/sda_fabric_devices_layer2_handoffs.py
@@ -0,0 +1,261 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+ dnac_compare_equality,
+ get_dict_result,
+)
+from ansible_collections.cisco.dnac.plugins.plugin_utils.exceptions import (
+ AnsibleSDAException,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ state=dict(type="str", default="present", choices=["present", "absent"]),
+ payload=dict(type="list"),
+ fabricId=dict(type="str"),
+ networkDeviceId=dict(type="str"),
+ id=dict(type="str"),
+))
+
+required_if = [
+ ("state", "present", ["payload"], True),
+]
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class SdaFabricDevicesLayer2Handoffs(object):
+ def __init__(self, params, dnac):
+ self.dnac = dnac
+ self.new_object = dict(
+ payload=params.get("payload"),
+ fabric_id=params.get("fabricId"),
+ network_device_id=params.get("networkDeviceId"),
+ id=params.get("id"),
+ )
+
+ def get_all_params(self, name=None, id=None):
+ new_object_params = {}
+ new_object_params['fabric_id'] = self.new_object.get('fabricId') or \
+ self.new_object.get('fabric_id')
+ new_object_params['network_device_id'] = self.new_object.get('networkDeviceId') or \
+ self.new_object.get('network_device_id')
+ new_object_params['offset'] = self.new_object.get('offset')
+ new_object_params['limit'] = self.new_object.get('limit')
+ return new_object_params
+
+ def create_params(self):
+ new_object_params = {}
+ new_object_params['payload'] = self.new_object.get('payload')
+ return new_object_params
+
+ def delete_by_id_params(self):
+ new_object_params = {}
+ new_object_params['id'] = self.new_object.get('id')
+ return new_object_params
+
+ def get_object_by_name(self, name, is_absent=False):
+ result = None
+ # NOTE: Does not have a get by name method or it is in another action
+ try:
+ items = self.dnac.exec(
+ family="sda",
+ function="get_fabric_devices_layer2_handoffs",
+ params=self.get_all_params(name=name),
+ )
+ if isinstance(items, dict):
+ if 'response' in items:
+ items = items.get('response')
+ if isinstance(items, dict) and items.get("status") == "failed":
+ if is_absent:
+ raise AnsibleSDAException(response=items)
+ result = None
+ return result
+ result = get_dict_result(items, 'name', name)
+ except Exception:
+ if is_absent:
+ raise
+ result = None
+ return result
+
+ def get_object_by_id(self, id):
+ result = None
+ # NOTE: Does not have a get by id method or it is in another action
+ try:
+ items = self.dnac.exec(
+ family="sda",
+ function="get_fabric_devices_layer2_handoffs",
+ params=self.get_all_params(id=id),
+ )
+ if isinstance(items, dict):
+ if 'response' in items:
+ items = items.get('response')
+ result = get_dict_result(items, 'id', id)
+ except Exception:
+ result = None
+ return result
+
+ def exists(self, is_absent=False):
+ name = self.new_object.get("name")
+ prev_obj = self.get_object_by_name(name, is_absent=is_absent)
+ it_exists = prev_obj is not None and isinstance(prev_obj, dict) and prev_obj.get("status") != "failed"
+ return (it_exists, prev_obj)
+
+ def requires_update(self, current_obj):
+ requested_obj = self.new_object.get('payload')
+ if requested_obj and len(requested_obj) > 0:
+ requested_obj = requested_obj[0]
+
+ obj_params = [
+ ("networkDeviceId", "networkDeviceId"),
+ ("fabricId", "fabricId"),
+ ("interfaceName", "interfaceName"),
+ ("internalVlanId", "internalVlanId"),
+ ("externalVlanId", "externalVlanId"),
+ ("fabricId", "fabric_id"),
+ ("networkDeviceId", "network_device_id"),
+ ("id", "id"),
+ ]
+ # Method 1. Params present in request (Ansible) obj are the same as the current (DNAC) params
+ # If any does not have eq params, it requires update
+ return any(not dnac_compare_equality(current_obj.get(dnac_param),
+ requested_obj.get(ansible_param))
+ for (dnac_param, ansible_param) in obj_params)
+
+ def create(self):
+ result = self.dnac.exec(
+ family="sda",
+ function="add_fabric_devices_layer2_handoffs",
+ params=self.create_params(),
+ op_modifies=True,
+ )
+ if isinstance(result, dict):
+ if 'response' in result:
+ result = result.get('response')
+ if isinstance(result, dict) and result.get("status") == "failed":
+ raise AnsibleSDAException(response=result)
+ return result
+
+ def update(self):
+ requested_obj = self.new_object.get('payload')
+ if requested_obj and len(requested_obj) > 0:
+ requested_obj = requested_obj[0]
+ id = self.new_object.get("id") or requested_obj.get("id")
+ name = self.new_object.get("name") or requested_obj.get("name")
+ result = None
+ # NOTE: Does not have update method. What do we do?
+ return result
+
+ def delete(self):
+ requested_obj = self.new_object.get('payload')
+ if requested_obj and len(requested_obj) > 0:
+ requested_obj = requested_obj[0]
+ id = self.new_object.get("id") or requested_obj.get("id")
+ name = self.new_object.get("name") or requested_obj.get("name")
+ result = None
+ if not id:
+ prev_obj_name = self.get_object_by_name(name)
+ id_ = None
+ if prev_obj_name:
+ id_ = prev_obj_name.get("id")
+ if id_:
+ self.new_object.update(dict(id=id_))
+ result = self.dnac.exec(
+ family="sda",
+ function="delete_fabric_device_layer2_handoff_by_id",
+ params=self.delete_by_id_params(),
+ )
+ return result
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = False
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ dnac = DNACSDK(self._task.args)
+ obj = SdaFabricDevicesLayer2Handoffs(self._task.args, dnac)
+
+ state = self._task.args.get("state")
+
+ response = None
+
+ if state == "present":
+ (obj_exists, prev_obj) = obj.exists()
+ if obj_exists:
+ if obj.requires_update(prev_obj):
+ response = obj.update()
+ dnac.object_updated()
+ else:
+ response = prev_obj
+ dnac.object_already_present()
+ else:
+ try:
+ response = obj.create()
+ dnac.object_created()
+ except AnsibleSDAException as e:
+ dnac.fail_json("Could not create object {e}".format(e=e._response))
+
+ elif state == "absent":
+ try:
+ (obj_exists, prev_obj) = obj.exists(is_absent=True)
+ if obj_exists:
+ response = obj.delete()
+ dnac.object_deleted()
+ else:
+ dnac.object_already_absent()
+ except AnsibleSDAException as e:
+ dnac.fail_json("Could not get object to be delete {e}".format(e=e._response))
+
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/sda_fabric_devices_layer2_handoffs_count_info.py b/ansible_collections/cisco/dnac/plugins/action/sda_fabric_devices_layer2_handoffs_count_info.py
new file mode 100644
index 000000000..60d88025b
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/sda_fabric_devices_layer2_handoffs_count_info.py
@@ -0,0 +1,91 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ fabricId=dict(type="str"),
+ networkDeviceId=dict(type="str"),
+ headers=dict(type="dict"),
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = True
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ fabric_id=params.get("fabricId"),
+ network_device_id=params.get("networkDeviceId"),
+ headers=params.get("headers"),
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ self._result.update(dict(dnac_response={}))
+
+ dnac = DNACSDK(params=self._task.args)
+
+ response = dnac.exec(
+ family="sda",
+ function='get_fabric_devices_layer2_handoffs_count',
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/sda_fabric_devices_layer2_handoffs_info.py b/ansible_collections/cisco/dnac/plugins/action/sda_fabric_devices_layer2_handoffs_info.py
new file mode 100644
index 000000000..fd3e8f8f8
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/sda_fabric_devices_layer2_handoffs_info.py
@@ -0,0 +1,95 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ fabricId=dict(type="str"),
+ networkDeviceId=dict(type="str"),
+ offset=dict(type="float"),
+ limit=dict(type="float"),
+ headers=dict(type="dict"),
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = True
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ fabric_id=params.get("fabricId"),
+ network_device_id=params.get("networkDeviceId"),
+ offset=params.get("offset"),
+ limit=params.get("limit"),
+ headers=params.get("headers"),
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ self._result.update(dict(dnac_response={}))
+
+ dnac = DNACSDK(params=self._task.args)
+
+ response = dnac.exec(
+ family="sda",
+ function='get_fabric_devices_layer2_handoffs',
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/sda_fabric_devices_layer2_handoffs_ip_transits.py b/ansible_collections/cisco/dnac/plugins/action/sda_fabric_devices_layer2_handoffs_ip_transits.py
new file mode 100644
index 000000000..7135b248e
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/sda_fabric_devices_layer2_handoffs_ip_transits.py
@@ -0,0 +1,279 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+ dnac_compare_equality,
+ get_dict_result,
+)
+from ansible_collections.cisco.dnac.plugins.plugin_utils.exceptions import (
+ AnsibleSDAException,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ state=dict(type="str", default="present", choices=["present", "absent"]),
+ payload=dict(type="list"),
+ fabricId=dict(type="str"),
+ networkDeviceId=dict(type="str"),
+ id=dict(type="str"),
+))
+
+required_if = [
+ ("state", "present", ["payload"], True),
+]
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class SdaFabricDevicesLayer2HandoffsIpTransits(object):
+ def __init__(self, params, dnac):
+ self.dnac = dnac
+ self.new_object = dict(
+ payload=params.get("payload"),
+ fabric_id=params.get("fabricId"),
+ network_device_id=params.get("networkDeviceId"),
+ id=params.get("id"),
+ )
+
+ def get_all_params(self, name=None, id=None):
+ new_object_params = {}
+ new_object_params['fabric_id'] = self.new_object.get('fabricId') or \
+ self.new_object.get('fabric_id')
+ new_object_params['network_device_id'] = self.new_object.get('networkDeviceId') or \
+ self.new_object.get('network_device_id')
+ new_object_params['offset'] = self.new_object.get('offset')
+ new_object_params['limit'] = self.new_object.get('limit')
+ return new_object_params
+
+ def create_params(self):
+ new_object_params = {}
+ new_object_params['payload'] = self.new_object.get('payload')
+ return new_object_params
+
+ def delete_by_id_params(self):
+ new_object_params = {}
+ new_object_params['id'] = self.new_object.get('id')
+ return new_object_params
+
+ def update_all_params(self):
+ new_object_params = {}
+ new_object_params['payload'] = self.new_object.get('payload')
+ return new_object_params
+
+ def get_object_by_name(self, name, is_absent=False):
+ result = None
+ # NOTE: Does not have a get by name method or it is in another action
+ try:
+ items = self.dnac.exec(
+ family="sda",
+ function="get_fabric_devices_layer3_handoffs_with_ip_transit",
+ params=self.get_all_params(name=name),
+ )
+ if isinstance(items, dict):
+ if 'response' in items:
+ items = items.get('response')
+ if isinstance(items, dict) and items.get("status") == "failed":
+ if is_absent:
+ raise AnsibleSDAException(response=items)
+ result = None
+ return result
+ result = get_dict_result(items, 'name', name)
+ except Exception:
+ if is_absent:
+ raise
+ result = None
+ return result
+
+ def get_object_by_id(self, id):
+ result = None
+ # NOTE: Does not have a get by id method or it is in another action
+ try:
+ items = self.dnac.exec(
+ family="sda",
+ function="get_fabric_devices_layer3_handoffs_with_ip_transit",
+ params=self.get_all_params(id=id),
+ )
+ if isinstance(items, dict):
+ if 'response' in items:
+ items = items.get('response')
+ result = get_dict_result(items, 'id', id)
+ except Exception:
+ result = None
+ return result
+
+ def exists(self, is_absent=False):
+ name = self.new_object.get("name")
+ prev_obj = self.get_object_by_name(name, is_absent=is_absent)
+ it_exists = prev_obj is not None and isinstance(prev_obj, dict) and prev_obj.get("status") != "failed"
+ return (it_exists, prev_obj)
+
+ def requires_update(self, current_obj):
+ requested_obj = self.new_object.get('payload')
+ if requested_obj and len(requested_obj) > 0:
+ requested_obj = requested_obj[0]
+
+ obj_params = [
+ ("id", "id"),
+ ("networkDeviceId", "networkDeviceId"),
+ ("fabricId", "fabricId"),
+ ("transitNetworkId", "transitNetworkId"),
+ ("interfaceName", "interfaceName"),
+ ("externalConnectivityIpPoolName", "externalConnectivityIpPoolName"),
+ ("virtualNetworkName", "virtualNetworkName"),
+ ("vlanId", "vlanId"),
+ ("tcpMssAdjustment", "tcpMssAdjustment"),
+ ("localIpAddress", "localIpAddress"),
+ ("remoteIpAddress", "remoteIpAddress"),
+ ("localIpv6Address", "localIpv6Address"),
+ ("remoteIpv6Address", "remoteIpv6Address"),
+ ("fabricId", "fabric_id"),
+ ("networkDeviceId", "network_device_id"),
+ ("id", "id"),
+ ]
+ # Method 1. Params present in request (Ansible) obj are the same as the current (DNAC) params
+ # If any does not have eq params, it requires update
+ return any(not dnac_compare_equality(current_obj.get(dnac_param),
+ requested_obj.get(ansible_param))
+ for (dnac_param, ansible_param) in obj_params)
+
+ def create(self):
+ result = self.dnac.exec(
+ family="sda",
+ function="add_fabric_devices_layer3_handoffs_with_ip_transit",
+ params=self.create_params(),
+ op_modifies=True,
+ )
+ if isinstance(result, dict):
+ if 'response' in result:
+ result = result.get('response')
+ if isinstance(result, dict) and result.get("status") == "failed":
+ raise AnsibleSDAException(response=result)
+ return result
+
+ def update(self):
+ requested_obj = self.new_object.get('payload')
+ if requested_obj and len(requested_obj) > 0:
+ requested_obj = requested_obj[0]
+ id = self.new_object.get("id") or requested_obj.get("id")
+ name = self.new_object.get("name") or requested_obj.get("name")
+ result = None
+ result = self.dnac.exec(
+ family="sda",
+ function="update_fabric_devices_layer3_handoffs_with_ip_transit",
+ params=self.update_all_params(),
+ op_modifies=True,
+ )
+ return result
+
+ def delete(self):
+ requested_obj = self.new_object.get('payload')
+ if requested_obj and len(requested_obj) > 0:
+ requested_obj = requested_obj[0]
+ id = self.new_object.get("id") or requested_obj.get("id")
+ name = self.new_object.get("name") or requested_obj.get("name")
+ result = None
+ if not id:
+ prev_obj_name = self.get_object_by_name(name)
+ id_ = None
+ if prev_obj_name:
+ id_ = prev_obj_name.get("id")
+ if id_:
+ self.new_object.update(dict(id=id_))
+ result = self.dnac.exec(
+ family="sda",
+ function="delete_fabric_device_layer3_handoff_with_ip_transit_by_id",
+ params=self.delete_by_id_params(),
+ )
+ return result
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = False
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ dnac = DNACSDK(self._task.args)
+ obj = SdaFabricDevicesLayer2HandoffsIpTransits(self._task.args, dnac)
+
+ state = self._task.args.get("state")
+
+ response = None
+
+ if state == "present":
+ (obj_exists, prev_obj) = obj.exists()
+ if obj_exists:
+ if obj.requires_update(prev_obj):
+ response = obj.update()
+ dnac.object_updated()
+ else:
+ response = prev_obj
+ dnac.object_already_present()
+ else:
+ try:
+ response = obj.create()
+ dnac.object_created()
+ except AnsibleSDAException as e:
+ dnac.fail_json("Could not create object {e}".format(e=e._response))
+
+ elif state == "absent":
+ try:
+ (obj_exists, prev_obj) = obj.exists(is_absent=True)
+ if obj_exists:
+ response = obj.delete()
+ dnac.object_deleted()
+ else:
+ dnac.object_already_absent()
+ except AnsibleSDAException as e:
+ dnac.fail_json("Could not get object to be delete {e}".format(e=e._response))
+
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/sda_fabric_devices_layer2_handoffs_ip_transits_count_info.py b/ansible_collections/cisco/dnac/plugins/action/sda_fabric_devices_layer2_handoffs_ip_transits_count_info.py
new file mode 100644
index 000000000..bab514675
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/sda_fabric_devices_layer2_handoffs_ip_transits_count_info.py
@@ -0,0 +1,91 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ fabricId=dict(type="str"),
+ networkDeviceId=dict(type="str"),
+ headers=dict(type="dict"),
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = True
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ fabric_id=params.get("fabricId"),
+ network_device_id=params.get("networkDeviceId"),
+ headers=params.get("headers"),
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ self._result.update(dict(dnac_response={}))
+
+ dnac = DNACSDK(params=self._task.args)
+
+ response = dnac.exec(
+ family="sda",
+ function='get_fabric_devices_layer3_handoffs_with_ip_transit_count',
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/sda_fabric_devices_layer2_handoffs_ip_transits_info.py b/ansible_collections/cisco/dnac/plugins/action/sda_fabric_devices_layer2_handoffs_ip_transits_info.py
new file mode 100644
index 000000000..dd843a500
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/sda_fabric_devices_layer2_handoffs_ip_transits_info.py
@@ -0,0 +1,95 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ fabricId=dict(type="str"),
+ networkDeviceId=dict(type="str"),
+ offset=dict(type="float"),
+ limit=dict(type="float"),
+ headers=dict(type="dict"),
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = True
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ fabric_id=params.get("fabricId"),
+ network_device_id=params.get("networkDeviceId"),
+ offset=params.get("offset"),
+ limit=params.get("limit"),
+ headers=params.get("headers"),
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ self._result.update(dict(dnac_response={}))
+
+ dnac = DNACSDK(params=self._task.args)
+
+ response = dnac.exec(
+ family="sda",
+ function='get_fabric_devices_layer3_handoffs_with_ip_transit',
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/sda_fabric_devices_layer2_handoffs_sda_transits.py b/ansible_collections/cisco/dnac/plugins/action/sda_fabric_devices_layer2_handoffs_sda_transits.py
new file mode 100644
index 000000000..5353e6183
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/sda_fabric_devices_layer2_handoffs_sda_transits.py
@@ -0,0 +1,264 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+ dnac_compare_equality,
+ get_dict_result,
+)
+from ansible_collections.cisco.dnac.plugins.plugin_utils.exceptions import (
+ AnsibleSDAException,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ state=dict(type="str", default="present", choices=["present", "absent"]),
+ payload=dict(type="list"),
+ fabricId=dict(type="str"),
+ networkDeviceId=dict(type="str"),
+))
+
+required_if = [
+ ("state", "present", ["payload"], True),
+]
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class SdaFabricDevicesLayer2HandoffsSdaTransits(object):
+ def __init__(self, params, dnac):
+ self.dnac = dnac
+ self.new_object = dict(
+ payload=params.get("payload"),
+ fabric_id=params.get("fabricId"),
+ network_device_id=params.get("networkDeviceId"),
+ )
+
+ def get_all_params(self, name=None, id=None):
+ new_object_params = {}
+ new_object_params['fabric_id'] = self.new_object.get('fabricId') or \
+ self.new_object.get('fabric_id')
+ new_object_params['network_device_id'] = self.new_object.get('networkDeviceId') or \
+ self.new_object.get('network_device_id')
+ new_object_params['offset'] = self.new_object.get('offset')
+ new_object_params['limit'] = self.new_object.get('limit')
+ return new_object_params
+
+ def create_params(self):
+ new_object_params = {}
+ new_object_params['payload'] = self.new_object.get('payload')
+ return new_object_params
+
+ def delete_all_params(self):
+ new_object_params = {}
+ new_object_params['fabric_id'] = self.new_object.get('fabric_id')
+ new_object_params['network_device_id'] = self.new_object.get('network_device_id')
+ return new_object_params
+
+ def update_all_params(self):
+ new_object_params = {}
+ new_object_params['payload'] = self.new_object.get('payload')
+ return new_object_params
+
+ def get_object_by_name(self, name, is_absent=False):
+ result = None
+ # NOTE: Does not have a get by name method or it is in another action
+ try:
+ items = self.dnac.exec(
+ family="sda",
+ function="get_fabric_devices_layer3_handoffs_with_sda_transit",
+ params=self.get_all_params(name=name),
+ )
+ if isinstance(items, dict):
+ if 'response' in items:
+ items = items.get('response')
+ if isinstance(items, dict) and items.get("status") == "failed":
+ if is_absent:
+ raise AnsibleSDAException(response=items)
+ result = None
+ return result
+ result = get_dict_result(items, 'name', name)
+ except Exception:
+ if is_absent:
+ raise
+ result = None
+ return result
+
+ def get_object_by_id(self, id):
+ result = None
+ # NOTE: Does not have a get by id method or it is in another action
+ try:
+ items = self.dnac.exec(
+ family="sda",
+ function="get_fabric_devices_layer3_handoffs_with_sda_transit",
+ params=self.get_all_params(id=id),
+ )
+ if isinstance(items, dict):
+ if 'response' in items:
+ items = items.get('response')
+ result = get_dict_result(items, 'id', id)
+ except Exception:
+ result = None
+ return result
+
+ def exists(self, is_absent=False):
+ name = self.new_object.get("name")
+ prev_obj = self.get_object_by_name(name, is_absent=is_absent)
+ it_exists = prev_obj is not None and isinstance(prev_obj, dict) and prev_obj.get("status") != "failed"
+ return (it_exists, prev_obj)
+
+ def requires_update(self, current_obj):
+ requested_obj = self.new_object.get('payload')
+ if requested_obj and len(requested_obj) > 0:
+ requested_obj = requested_obj[0]
+
+ obj_params = [
+ ("networkDeviceId", "networkDeviceId"),
+ ("fabricId", "fabricId"),
+ ("transitNetworkId", "transitNetworkId"),
+ ("affinityIdPrime", "affinityIdPrime"),
+ ("affinityIdDecider", "affinityIdDecider"),
+ ("connectedToInternet", "connectedToInternet"),
+ ("isMulticastOverTransitEnabled", "isMulticastOverTransitEnabled"),
+ ("fabricId", "fabric_id"),
+ ("networkDeviceId", "network_device_id"),
+ ]
+ # Method 1. Params present in request (Ansible) obj are the same as the current (DNAC) params
+ # If any does not have eq params, it requires update
+ return any(not dnac_compare_equality(current_obj.get(dnac_param),
+ requested_obj.get(ansible_param))
+ for (dnac_param, ansible_param) in obj_params)
+
+ def create(self):
+ result = self.dnac.exec(
+ family="sda",
+ function="add_fabric_devices_layer3_handoffs_with_sda_transit",
+ params=self.create_params(),
+ op_modifies=True,
+ )
+ if isinstance(result, dict):
+ if 'response' in result:
+ result = result.get('response')
+ if isinstance(result, dict) and result.get("status") == "failed":
+ raise AnsibleSDAException(response=result)
+ return result
+
+ def update(self):
+ requested_obj = self.new_object.get('payload')
+ if requested_obj and len(requested_obj) > 0:
+ requested_obj = requested_obj[0]
+ id = self.new_object.get("id") or requested_obj.get("id")
+ name = self.new_object.get("name") or requested_obj.get("name")
+ result = None
+ result = self.dnac.exec(
+ family="sda",
+ function="update_fabric_devices_layer3_handoffs_with_sda_transit",
+ params=self.update_all_params(),
+ op_modifies=True,
+ )
+ return result
+
+ def delete(self):
+ requested_obj = self.new_object.get('payload')
+ if requested_obj and len(requested_obj) > 0:
+ requested_obj = requested_obj[0]
+ id = self.new_object.get("id") or requested_obj.get("id")
+ name = self.new_object.get("name") or requested_obj.get("name")
+ result = None
+ result = self.dnac.exec(
+ family="sda",
+ function="delete_fabric_device_layer3_handoffs_with_sda_transit",
+ params=self.delete_all_params(),
+ )
+ return result
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = False
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ dnac = DNACSDK(self._task.args)
+ obj = SdaFabricDevicesLayer2HandoffsSdaTransits(self._task.args, dnac)
+
+ state = self._task.args.get("state")
+
+ response = None
+
+ if state == "present":
+ (obj_exists, prev_obj) = obj.exists()
+ if obj_exists:
+ if obj.requires_update(prev_obj):
+ response = obj.update()
+ dnac.object_updated()
+ else:
+ response = prev_obj
+ dnac.object_already_present()
+ else:
+ try:
+ response = obj.create()
+ dnac.object_created()
+ except AnsibleSDAException as e:
+ dnac.fail_json("Could not create object {e}".format(e=e._response))
+
+ elif state == "absent":
+ try:
+ (obj_exists, prev_obj) = obj.exists(is_absent=True)
+ if obj_exists:
+ response = obj.delete()
+ dnac.object_deleted()
+ else:
+ dnac.object_already_absent()
+ except AnsibleSDAException as e:
+ dnac.fail_json("Could not get object to be delete {e}".format(e=e._response))
+
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/sda_fabric_devices_layer2_handoffs_sda_transits_count_info.py b/ansible_collections/cisco/dnac/plugins/action/sda_fabric_devices_layer2_handoffs_sda_transits_count_info.py
new file mode 100644
index 000000000..4a46b53b5
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/sda_fabric_devices_layer2_handoffs_sda_transits_count_info.py
@@ -0,0 +1,91 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ fabricId=dict(type="str"),
+ networkDeviceId=dict(type="str"),
+ headers=dict(type="dict"),
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = True
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ fabric_id=params.get("fabricId"),
+ network_device_id=params.get("networkDeviceId"),
+ headers=params.get("headers"),
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ self._result.update(dict(dnac_response={}))
+
+ dnac = DNACSDK(params=self._task.args)
+
+ response = dnac.exec(
+ family="sda",
+ function='get_fabric_devices_layer3_handoffs_with_sda_transit_count',
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/sda_fabric_devices_layer2_handoffs_sda_transits_info.py b/ansible_collections/cisco/dnac/plugins/action/sda_fabric_devices_layer2_handoffs_sda_transits_info.py
new file mode 100644
index 000000000..556e656c6
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/sda_fabric_devices_layer2_handoffs_sda_transits_info.py
@@ -0,0 +1,95 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ fabricId=dict(type="str"),
+ networkDeviceId=dict(type="str"),
+ offset=dict(type="float"),
+ limit=dict(type="float"),
+ headers=dict(type="dict"),
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = True
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ fabric_id=params.get("fabricId"),
+ network_device_id=params.get("networkDeviceId"),
+ offset=params.get("offset"),
+ limit=params.get("limit"),
+ headers=params.get("headers"),
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ self._result.update(dict(dnac_response={}))
+
+ dnac = DNACSDK(params=self._task.args)
+
+ response = dnac.exec(
+ family="sda",
+ function='get_fabric_devices_layer3_handoffs_with_sda_transit',
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/sda_fabric_sites.py b/ansible_collections/cisco/dnac/plugins/action/sda_fabric_sites.py
new file mode 100644
index 000000000..c9f4a4378
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/sda_fabric_sites.py
@@ -0,0 +1,263 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+ dnac_compare_equality,
+ get_dict_result,
+)
+from ansible_collections.cisco.dnac.plugins.plugin_utils.exceptions import (
+ AnsibleSDAException,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ state=dict(type="str", default="present", choices=["present", "absent"]),
+ payload=dict(type="list"),
+ id=dict(type="str"),
+))
+
+required_if = [
+ ("state", "present", ["payload"], True),
+]
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class SdaFabricSites(object):
+ def __init__(self, params, dnac):
+ self.dnac = dnac
+ self.new_object = dict(
+ payload=params.get("payload"),
+ id=params.get("id"),
+ )
+
+ def get_all_params(self, name=None, id=None):
+ new_object_params = {}
+ new_object_params['id'] = id or self.new_object.get('id')
+ new_object_params['site_id'] = self.new_object.get('siteId') or \
+ self.new_object.get('site_id')
+ new_object_params['offset'] = self.new_object.get('offset')
+ new_object_params['limit'] = self.new_object.get('limit')
+ return new_object_params
+
+ def create_params(self):
+ new_object_params = {}
+ new_object_params['payload'] = self.new_object.get('payload')
+ return new_object_params
+
+ def delete_by_id_params(self):
+ new_object_params = {}
+ new_object_params['id'] = self.new_object.get('id')
+ return new_object_params
+
+ def update_all_params(self):
+ new_object_params = {}
+ new_object_params['payload'] = self.new_object.get('payload')
+ return new_object_params
+
+ def get_object_by_name(self, name, is_absent=False):
+ result = None
+ # NOTE: Does not have a get by name method or it is in another action
+ try:
+ items = self.dnac.exec(
+ family="sda",
+ function="get_fabric_sites",
+ params=self.get_all_params(name=name),
+ )
+ if isinstance(items, dict):
+ if 'response' in items:
+ items = items.get('response')
+ if isinstance(items, dict) and items.get("status") == "failed":
+ if is_absent:
+ raise AnsibleSDAException(response=items)
+ result = None
+ return result
+ result = get_dict_result(items, 'name', name)
+ except Exception:
+ if is_absent:
+ raise
+ result = None
+ return result
+
+ def get_object_by_id(self, id):
+ result = None
+ # NOTE: Does not have a get by id method or it is in another action
+ try:
+ items = self.dnac.exec(
+ family="sda",
+ function="get_fabric_sites",
+ params=self.get_all_params(id=id),
+ )
+ if isinstance(items, dict):
+ if 'response' in items:
+ items = items.get('response')
+ result = get_dict_result(items, 'id', id)
+ except Exception:
+ result = None
+ return result
+
+ def exists(self, is_absent=False):
+ name = self.new_object.get("name")
+ prev_obj = self.get_object_by_name(name, is_absent=is_absent)
+ it_exists = prev_obj is not None and isinstance(prev_obj, dict) and prev_obj.get("status") != "failed"
+ return (it_exists, prev_obj)
+
+ def requires_update(self, current_obj):
+ requested_obj = self.new_object.get('payload')
+ if requested_obj and len(requested_obj) > 0:
+ requested_obj = requested_obj[0]
+
+ obj_params = [
+ ("id", "id"),
+ ("siteId", "siteId"),
+ ("authenticationProfileName", "authenticationProfileName"),
+ ("isPubSubEnabled", "isPubSubEnabled"),
+ ("id", "id"),
+ ]
+ # Method 1. Params present in request (Ansible) obj are the same as the current (DNAC) params
+ # If any does not have eq params, it requires update
+ return any(not dnac_compare_equality(current_obj.get(dnac_param),
+ requested_obj.get(ansible_param))
+ for (dnac_param, ansible_param) in obj_params)
+
+ def create(self):
+ result = self.dnac.exec(
+ family="sda",
+ function="add_fabric_site",
+ params=self.create_params(),
+ op_modifies=True,
+ )
+ if isinstance(result, dict):
+ if 'response' in result:
+ result = result.get('response')
+ if isinstance(result, dict) and result.get("status") == "failed":
+ raise AnsibleSDAException(response=result)
+ return result
+
+ def update(self):
+ requested_obj = self.new_object.get('payload')
+ if requested_obj and len(requested_obj) > 0:
+ requested_obj = requested_obj[0]
+ id = self.new_object.get("id") or requested_obj.get("id")
+ name = self.new_object.get("name") or requested_obj.get("name")
+ result = None
+ result = self.dnac.exec(
+ family="sda",
+ function="update_fabric_site",
+ params=self.update_all_params(),
+ op_modifies=True,
+ )
+ return result
+
+ def delete(self):
+ requested_obj = self.new_object.get('payload')
+ if requested_obj and len(requested_obj) > 0:
+ requested_obj = requested_obj[0]
+ id = self.new_object.get("id") or requested_obj.get("id")
+ name = self.new_object.get("name") or requested_obj.get("name")
+ result = None
+ if not id:
+ prev_obj_name = self.get_object_by_name(name)
+ id_ = None
+ if prev_obj_name:
+ id_ = prev_obj_name.get("id")
+ if id_:
+ self.new_object.update(dict(id=id_))
+ result = self.dnac.exec(
+ family="sda",
+ function="delete_fabric_site_by_id",
+ params=self.delete_by_id_params(),
+ )
+ return result
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = False
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ dnac = DNACSDK(self._task.args)
+ obj = SdaFabricSites(self._task.args, dnac)
+
+ state = self._task.args.get("state")
+
+ response = None
+
+ if state == "present":
+ (obj_exists, prev_obj) = obj.exists()
+ if obj_exists:
+ if obj.requires_update(prev_obj):
+ response = obj.update()
+ dnac.object_updated()
+ else:
+ response = prev_obj
+ dnac.object_already_present()
+ else:
+ try:
+ response = obj.create()
+ dnac.object_created()
+ except AnsibleSDAException as e:
+ dnac.fail_json("Could not create object {e}".format(e=e._response))
+
+ elif state == "absent":
+ try:
+ (obj_exists, prev_obj) = obj.exists(is_absent=True)
+ if obj_exists:
+ response = obj.delete()
+ dnac.object_deleted()
+ else:
+ dnac.object_already_absent()
+ except AnsibleSDAException as e:
+ dnac.fail_json("Could not get object to be delete {e}".format(e=e._response))
+
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/sda_fabric_sites_count_info.py b/ansible_collections/cisco/dnac/plugins/action/sda_fabric_sites_count_info.py
new file mode 100644
index 000000000..28d684571
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/sda_fabric_sites_count_info.py
@@ -0,0 +1,87 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ headers=dict(type="dict"),
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = True
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ headers=params.get("headers"),
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ self._result.update(dict(dnac_response={}))
+
+ dnac = DNACSDK(params=self._task.args)
+
+ response = dnac.exec(
+ family="sda",
+ function='get_fabric_site_count',
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/sda_fabric_sites_info.py b/ansible_collections/cisco/dnac/plugins/action/sda_fabric_sites_info.py
new file mode 100644
index 000000000..9bea361ed
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/sda_fabric_sites_info.py
@@ -0,0 +1,95 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ id=dict(type="str"),
+ siteId=dict(type="str"),
+ offset=dict(type="int"),
+ limit=dict(type="int"),
+ headers=dict(type="dict"),
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = True
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ id=params.get("id"),
+ site_id=params.get("siteId"),
+ offset=params.get("offset"),
+ limit=params.get("limit"),
+ headers=params.get("headers"),
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ self._result.update(dict(dnac_response={}))
+
+ dnac = DNACSDK(params=self._task.args)
+
+ response = dnac.exec(
+ family="sda",
+ function='get_fabric_sites',
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/sda_fabric_zones.py b/ansible_collections/cisco/dnac/plugins/action/sda_fabric_zones.py
new file mode 100644
index 000000000..f441b8890
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/sda_fabric_zones.py
@@ -0,0 +1,262 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+ dnac_compare_equality,
+ get_dict_result,
+)
+from ansible_collections.cisco.dnac.plugins.plugin_utils.exceptions import (
+ AnsibleSDAException,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ state=dict(type="str", default="present", choices=["present", "absent"]),
+ payload=dict(type="list"),
+ id=dict(type="str"),
+))
+
+required_if = [
+ ("state", "present", ["payload"], True),
+]
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class SdaFabricZones(object):
+ def __init__(self, params, dnac):
+ self.dnac = dnac
+ self.new_object = dict(
+ payload=params.get("payload"),
+ id=params.get("id"),
+ )
+
+ def get_all_params(self, name=None, id=None):
+ new_object_params = {}
+ new_object_params['id'] = id or self.new_object.get('id')
+ new_object_params['site_id'] = self.new_object.get('siteId') or \
+ self.new_object.get('site_id')
+ new_object_params['offset'] = self.new_object.get('offset')
+ new_object_params['limit'] = self.new_object.get('limit')
+ return new_object_params
+
+ def create_params(self):
+ new_object_params = {}
+ new_object_params['payload'] = self.new_object.get('payload')
+ return new_object_params
+
+ def delete_by_id_params(self):
+ new_object_params = {}
+ new_object_params['id'] = self.new_object.get('id')
+ return new_object_params
+
+ def update_all_params(self):
+ new_object_params = {}
+ new_object_params['payload'] = self.new_object.get('payload')
+ return new_object_params
+
+ def get_object_by_name(self, name, is_absent=False):
+ result = None
+ # NOTE: Does not have a get by name method or it is in another action
+ try:
+ items = self.dnac.exec(
+ family="sda",
+ function="get_fabric_zones",
+ params=self.get_all_params(name=name),
+ )
+ if isinstance(items, dict):
+ if 'response' in items:
+ items = items.get('response')
+ if isinstance(items, dict) and items.get("status") == "failed":
+ if is_absent:
+ raise AnsibleSDAException(response=items)
+ result = None
+ return result
+ result = get_dict_result(items, 'name', name)
+ except Exception:
+ if is_absent:
+ raise
+ result = None
+ return result
+
+ def get_object_by_id(self, id):
+ result = None
+ # NOTE: Does not have a get by id method or it is in another action
+ try:
+ items = self.dnac.exec(
+ family="sda",
+ function="get_fabric_zones",
+ params=self.get_all_params(id=id),
+ )
+ if isinstance(items, dict):
+ if 'response' in items:
+ items = items.get('response')
+ result = get_dict_result(items, 'id', id)
+ except Exception:
+ result = None
+ return result
+
+ def exists(self, is_absent=False):
+ name = self.new_object.get("name")
+ prev_obj = self.get_object_by_name(name, is_absent=is_absent)
+ it_exists = prev_obj is not None and isinstance(prev_obj, dict) and prev_obj.get("status") != "failed"
+ return (it_exists, prev_obj)
+
+ def requires_update(self, current_obj):
+ requested_obj = self.new_object.get('payload')
+ if requested_obj and len(requested_obj) > 0:
+ requested_obj = requested_obj[0]
+
+ obj_params = [
+ ("id", "id"),
+ ("siteId", "siteId"),
+ ("authenticationProfileName", "authenticationProfileName"),
+ ("id", "id"),
+ ]
+ # Method 1. Params present in request (Ansible) obj are the same as the current (DNAC) params
+ # If any does not have eq params, it requires update
+ return any(not dnac_compare_equality(current_obj.get(dnac_param),
+ requested_obj.get(ansible_param))
+ for (dnac_param, ansible_param) in obj_params)
+
+ def create(self):
+ result = self.dnac.exec(
+ family="sda",
+ function="add_fabric_zone",
+ params=self.create_params(),
+ op_modifies=True,
+ )
+ if isinstance(result, dict):
+ if 'response' in result:
+ result = result.get('response')
+ if isinstance(result, dict) and result.get("status") == "failed":
+ raise AnsibleSDAException(response=result)
+ return result
+
+ def update(self):
+ requested_obj = self.new_object.get('payload')
+ if requested_obj and len(requested_obj) > 0:
+ requested_obj = requested_obj[0]
+ id = self.new_object.get("id") or requested_obj.get("id")
+ name = self.new_object.get("name") or requested_obj.get("name")
+ result = None
+ result = self.dnac.exec(
+ family="sda",
+ function="update_fabric_zone",
+ params=self.update_all_params(),
+ op_modifies=True,
+ )
+ return result
+
+ def delete(self):
+ requested_obj = self.new_object.get('payload')
+ if requested_obj and len(requested_obj) > 0:
+ requested_obj = requested_obj[0]
+ id = self.new_object.get("id") or requested_obj.get("id")
+ name = self.new_object.get("name") or requested_obj.get("name")
+ result = None
+ if not id:
+ prev_obj_name = self.get_object_by_name(name)
+ id_ = None
+ if prev_obj_name:
+ id_ = prev_obj_name.get("id")
+ if id_:
+ self.new_object.update(dict(id=id_))
+ result = self.dnac.exec(
+ family="sda",
+ function="delete_fabric_zone_by_id",
+ params=self.delete_by_id_params(),
+ )
+ return result
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = False
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ dnac = DNACSDK(self._task.args)
+ obj = SdaFabricZones(self._task.args, dnac)
+
+ state = self._task.args.get("state")
+
+ response = None
+
+ if state == "present":
+ (obj_exists, prev_obj) = obj.exists()
+ if obj_exists:
+ if obj.requires_update(prev_obj):
+ response = obj.update()
+ dnac.object_updated()
+ else:
+ response = prev_obj
+ dnac.object_already_present()
+ else:
+ try:
+ response = obj.create()
+ dnac.object_created()
+ except AnsibleSDAException as e:
+ dnac.fail_json("Could not create object {e}".format(e=e._response))
+
+ elif state == "absent":
+ try:
+ (obj_exists, prev_obj) = obj.exists(is_absent=True)
+ if obj_exists:
+ response = obj.delete()
+ dnac.object_deleted()
+ else:
+ dnac.object_already_absent()
+ except AnsibleSDAException as e:
+ dnac.fail_json("Could not get object to be delete {e}".format(e=e._response))
+
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/sda_fabric_zones_count_info.py b/ansible_collections/cisco/dnac/plugins/action/sda_fabric_zones_count_info.py
new file mode 100644
index 000000000..28b84a9c0
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/sda_fabric_zones_count_info.py
@@ -0,0 +1,87 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ headers=dict(type="dict"),
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = True
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ headers=params.get("headers"),
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ self._result.update(dict(dnac_response={}))
+
+ dnac = DNACSDK(params=self._task.args)
+
+ response = dnac.exec(
+ family="sda",
+ function='get_fabric_zone_count',
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/sda_fabric_zones_info.py b/ansible_collections/cisco/dnac/plugins/action/sda_fabric_zones_info.py
new file mode 100644
index 000000000..c1e5a1016
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/sda_fabric_zones_info.py
@@ -0,0 +1,95 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ id=dict(type="str"),
+ siteId=dict(type="str"),
+ offset=dict(type="int"),
+ limit=dict(type="int"),
+ headers=dict(type="dict"),
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = True
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ id=params.get("id"),
+ site_id=params.get("siteId"),
+ offset=params.get("offset"),
+ limit=params.get("limit"),
+ headers=params.get("headers"),
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ self._result.update(dict(dnac_response={}))
+
+ dnac = DNACSDK(params=self._task.args)
+
+ response = dnac.exec(
+ family="sda",
+ function='get_fabric_zones',
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/sda_port_assignments.py b/ansible_collections/cisco/dnac/plugins/action/sda_port_assignments.py
new file mode 100644
index 000000000..2c2158103
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/sda_port_assignments.py
@@ -0,0 +1,291 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+ dnac_compare_equality,
+ get_dict_result,
+)
+from ansible_collections.cisco.dnac.plugins.plugin_utils.exceptions import (
+ AnsibleSDAException,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ state=dict(type="str", default="present", choices=["present", "absent"]),
+ payload=dict(type="list"),
+ fabricId=dict(type="str"),
+ networkDeviceId=dict(type="str"),
+ interfaceName=dict(type="str"),
+ dataVlanName=dict(type="str"),
+ voiceVlanName=dict(type="str"),
+ id=dict(type="str"),
+))
+
+required_if = [
+ ("state", "present", ["payload"], True),
+]
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class SdaPortAssignments(object):
+ def __init__(self, params, dnac):
+ self.dnac = dnac
+ self.new_object = dict(
+ payload=params.get("payload"),
+ fabric_id=params.get("fabricId"),
+ network_device_id=params.get("networkDeviceId"),
+ interface_name=params.get("interfaceName"),
+ data_vlan_name=params.get("dataVlanName"),
+ voice_vlan_name=params.get("voiceVlanName"),
+ id=params.get("id"),
+ )
+
+ def get_all_params(self, name=None, id=None):
+ new_object_params = {}
+ new_object_params['fabric_id'] = self.new_object.get('fabricId') or \
+ self.new_object.get('fabric_id')
+ new_object_params['network_device_id'] = self.new_object.get('networkDeviceId') or \
+ self.new_object.get('network_device_id')
+ new_object_params['interface_name'] = self.new_object.get('interfaceName') or \
+ self.new_object.get('interface_name')
+ new_object_params['data_vlan_name'] = self.new_object.get('dataVlanName') or \
+ self.new_object.get('data_vlan_name')
+ new_object_params['voice_vlan_name'] = self.new_object.get('voiceVlanName') or \
+ self.new_object.get('voice_vlan_name')
+ new_object_params['offset'] = self.new_object.get('offset')
+ new_object_params['limit'] = self.new_object.get('limit')
+ return new_object_params
+
+ def create_params(self):
+ new_object_params = {}
+ new_object_params['payload'] = self.new_object.get('payload')
+ return new_object_params
+
+ def delete_by_id_params(self):
+ new_object_params = {}
+ new_object_params['id'] = self.new_object.get('id')
+ return new_object_params
+
+ def update_all_params(self):
+ new_object_params = {}
+ new_object_params['payload'] = self.new_object.get('payload')
+ return new_object_params
+
+ def get_object_by_name(self, name, is_absent=False):
+ result = None
+ # NOTE: Does not have a get by name method or it is in another action
+ try:
+ items = self.dnac.exec(
+ family="sda",
+ function="get_port_assignments",
+ params=self.get_all_params(name=name),
+ )
+ if isinstance(items, dict):
+ if 'response' in items:
+ items = items.get('response')
+ if isinstance(items, dict) and items.get("status") == "failed":
+ if is_absent:
+ raise AnsibleSDAException(response=items)
+ result = None
+ return result
+ result = get_dict_result(items, 'name', name)
+ except Exception:
+ if is_absent:
+ raise
+ result = None
+ return result
+
+ def get_object_by_id(self, id):
+ result = None
+ # NOTE: Does not have a get by id method or it is in another action
+ try:
+ items = self.dnac.exec(
+ family="sda",
+ function="get_port_assignments",
+ params=self.get_all_params(id=id),
+ )
+ if isinstance(items, dict):
+ if 'response' in items:
+ items = items.get('response')
+ result = get_dict_result(items, 'id', id)
+ except Exception:
+ result = None
+ return result
+
+ def exists(self, is_absent=False):
+ name = self.new_object.get("name")
+ prev_obj = self.get_object_by_name(name, is_absent=is_absent)
+ it_exists = prev_obj is not None and isinstance(prev_obj, dict) and prev_obj.get("status") != "failed"
+ return (it_exists, prev_obj)
+
+ def requires_update(self, current_obj):
+ requested_obj = self.new_object.get('payload')
+ if requested_obj and len(requested_obj) > 0:
+ requested_obj = requested_obj[0]
+
+ obj_params = [
+ ("id", "id"),
+ ("fabricId", "fabricId"),
+ ("networkDeviceId", "networkDeviceId"),
+ ("interfaceName", "interfaceName"),
+ ("connectedDeviceType", "connectedDeviceType"),
+ ("dataVlanName", "dataVlanName"),
+ ("voiceVlanName", "voiceVlanName"),
+ ("authenticateTemplateName", "authenticateTemplateName"),
+ ("scalableGroupName", "scalableGroupName"),
+ ("interfaceDescription", "interfaceDescription"),
+ ("fabricId", "fabric_id"),
+ ("networkDeviceId", "network_device_id"),
+ ("interfaceName", "interface_name"),
+ ("dataVlanName", "data_vlan_name"),
+ ("voiceVlanName", "voice_vlan_name"),
+ ("id", "id"),
+ ]
+ # Method 1. Params present in request (Ansible) obj are the same as the current (DNAC) params
+ # If any does not have eq params, it requires update
+ return any(not dnac_compare_equality(current_obj.get(dnac_param),
+ requested_obj.get(ansible_param))
+ for (dnac_param, ansible_param) in obj_params)
+
+ def create(self):
+ result = self.dnac.exec(
+ family="sda",
+ function="add_port_assignments",
+ params=self.create_params(),
+ op_modifies=True,
+ )
+ if isinstance(result, dict):
+ if 'response' in result:
+ result = result.get('response')
+ if isinstance(result, dict) and result.get("status") == "failed":
+ raise AnsibleSDAException(response=result)
+ return result
+
+ def update(self):
+ requested_obj = self.new_object.get('payload')
+ if requested_obj and len(requested_obj) > 0:
+ requested_obj = requested_obj[0]
+ id = self.new_object.get("id") or requested_obj.get("id")
+ name = self.new_object.get("name") or requested_obj.get("name")
+ result = None
+ result = self.dnac.exec(
+ family="sda",
+ function="update_port_assignments",
+ params=self.update_all_params(),
+ op_modifies=True,
+ )
+ return result
+
+ def delete(self):
+ requested_obj = self.new_object.get('payload')
+ if requested_obj and len(requested_obj) > 0:
+ requested_obj = requested_obj[0]
+ id = self.new_object.get("id") or requested_obj.get("id")
+ name = self.new_object.get("name") or requested_obj.get("name")
+ result = None
+ if not id:
+ prev_obj_name = self.get_object_by_name(name)
+ id_ = None
+ if prev_obj_name:
+ id_ = prev_obj_name.get("id")
+ if id_:
+ self.new_object.update(dict(id=id_))
+ result = self.dnac.exec(
+ family="sda",
+ function="delete_port_assignment_by_id",
+ params=self.delete_by_id_params(),
+ )
+ return result
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = False
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ dnac = DNACSDK(self._task.args)
+ obj = SdaPortAssignments(self._task.args, dnac)
+
+ state = self._task.args.get("state")
+
+ response = None
+
+ if state == "present":
+ (obj_exists, prev_obj) = obj.exists()
+ if obj_exists:
+ if obj.requires_update(prev_obj):
+ response = obj.update()
+ dnac.object_updated()
+ else:
+ response = prev_obj
+ dnac.object_already_present()
+ else:
+ try:
+ response = obj.create()
+ dnac.object_created()
+ except AnsibleSDAException as e:
+ dnac.fail_json("Could not create object {e}".format(e=e._response))
+
+ elif state == "absent":
+ try:
+ (obj_exists, prev_obj) = obj.exists(is_absent=True)
+ if obj_exists:
+ response = obj.delete()
+ dnac.object_deleted()
+ else:
+ dnac.object_already_absent()
+ except AnsibleSDAException as e:
+ dnac.fail_json("Could not get object to be delete {e}".format(e=e._response))
+
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/sda_port_assignments_count_info.py b/ansible_collections/cisco/dnac/plugins/action/sda_port_assignments_count_info.py
new file mode 100644
index 000000000..1552d0b04
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/sda_port_assignments_count_info.py
@@ -0,0 +1,97 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ fabricId=dict(type="str"),
+ networkDeviceId=dict(type="str"),
+ interfaceName=dict(type="str"),
+ dataVlanName=dict(type="str"),
+ voiceVlanName=dict(type="str"),
+ headers=dict(type="dict"),
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = True
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ fabric_id=params.get("fabricId"),
+ network_device_id=params.get("networkDeviceId"),
+ interface_name=params.get("interfaceName"),
+ data_vlan_name=params.get("dataVlanName"),
+ voice_vlan_name=params.get("voiceVlanName"),
+ headers=params.get("headers"),
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ self._result.update(dict(dnac_response={}))
+
+ dnac = DNACSDK(params=self._task.args)
+
+ response = dnac.exec(
+ family="sda",
+ function='get_port_assignment_count',
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/sda_port_assignments_info.py b/ansible_collections/cisco/dnac/plugins/action/sda_port_assignments_info.py
new file mode 100644
index 000000000..58d5080e6
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/sda_port_assignments_info.py
@@ -0,0 +1,101 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ fabricId=dict(type="str"),
+ networkDeviceId=dict(type="str"),
+ interfaceName=dict(type="str"),
+ dataVlanName=dict(type="str"),
+ voiceVlanName=dict(type="str"),
+ offset=dict(type="float"),
+ limit=dict(type="float"),
+ headers=dict(type="dict"),
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = True
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ fabric_id=params.get("fabricId"),
+ network_device_id=params.get("networkDeviceId"),
+ interface_name=params.get("interfaceName"),
+ data_vlan_name=params.get("dataVlanName"),
+ voice_vlan_name=params.get("voiceVlanName"),
+ offset=params.get("offset"),
+ limit=params.get("limit"),
+ headers=params.get("headers"),
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ self._result.update(dict(dnac_response={}))
+
+ dnac = DNACSDK(params=self._task.args)
+
+ response = dnac.exec(
+ family="sda",
+ function='get_port_assignments',
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/sda_provision_devices.py b/ansible_collections/cisco/dnac/plugins/action/sda_provision_devices.py
new file mode 100644
index 000000000..836c8d928
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/sda_provision_devices.py
@@ -0,0 +1,270 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+ dnac_compare_equality,
+ get_dict_result,
+)
+from ansible_collections.cisco.dnac.plugins.plugin_utils.exceptions import (
+ AnsibleSDAException,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ state=dict(type="str", default="present", choices=["present", "absent"]),
+ payload=dict(type="list"),
+ networkDeviceId=dict(type="str"),
+ siteId=dict(type="str"),
+ id=dict(type="str"),
+))
+
+required_if = [
+ ("state", "present", ["payload"], True),
+]
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class SdaProvisionDevices(object):
+ def __init__(self, params, dnac):
+ self.dnac = dnac
+ self.new_object = dict(
+ payload=params.get("payload"),
+ network_device_id=params.get("networkDeviceId"),
+ site_id=params.get("siteId"),
+ id=params.get("id"),
+ )
+
+ def get_all_params(self, name=None, id=None):
+ new_object_params = {}
+ new_object_params['id'] = id or self.new_object.get('id')
+ new_object_params['network_device_id'] = self.new_object.get('networkDeviceId') or \
+ self.new_object.get('network_device_id')
+ new_object_params['site_id'] = self.new_object.get('siteId') or \
+ self.new_object.get('site_id')
+ new_object_params['offset'] = self.new_object.get('offset')
+ new_object_params['limit'] = self.new_object.get('limit')
+ return new_object_params
+
+ def create_params(self):
+ new_object_params = {}
+ new_object_params['payload'] = self.new_object.get('payload')
+ return new_object_params
+
+ def delete_by_id_params(self):
+ new_object_params = {}
+ new_object_params['id'] = self.new_object.get('id')
+ return new_object_params
+
+ def update_all_params(self):
+ new_object_params = {}
+ new_object_params['payload'] = self.new_object.get('payload')
+ return new_object_params
+
+ def get_object_by_name(self, name, is_absent=False):
+ result = None
+ # NOTE: Does not have a get by name method or it is in another action
+ try:
+ items = self.dnac.exec(
+ family="sda",
+ function="get_provisioned_devices",
+ params=self.get_all_params(name=name),
+ )
+ if isinstance(items, dict):
+ if 'response' in items:
+ items = items.get('response')
+ if isinstance(items, dict) and items.get("status") == "failed":
+ if is_absent:
+ raise AnsibleSDAException(response=items)
+ result = None
+ return result
+ result = get_dict_result(items, 'name', name)
+ except Exception:
+ if is_absent:
+ raise
+ result = None
+ return result
+
+ def get_object_by_id(self, id):
+ result = None
+ # NOTE: Does not have a get by id method or it is in another action
+ try:
+ items = self.dnac.exec(
+ family="sda",
+ function="get_provisioned_devices",
+ params=self.get_all_params(id=id),
+ )
+ if isinstance(items, dict):
+ if 'response' in items:
+ items = items.get('response')
+ result = get_dict_result(items, 'id', id)
+ except Exception:
+ result = None
+ return result
+
+ def exists(self, is_absent=False):
+ name = self.new_object.get("name")
+ prev_obj = self.get_object_by_name(name, is_absent=is_absent)
+ it_exists = prev_obj is not None and isinstance(prev_obj, dict) and prev_obj.get("status") != "failed"
+ return (it_exists, prev_obj)
+
+ def requires_update(self, current_obj):
+ requested_obj = self.new_object.get('payload')
+ if requested_obj and len(requested_obj) > 0:
+ requested_obj = requested_obj[0]
+
+ obj_params = [
+ ("id", "id"),
+ ("siteId", "siteId"),
+ ("networkDeviceId", "networkDeviceId"),
+ ("networkDeviceId", "network_device_id"),
+ ("siteId", "site_id"),
+ ("id", "id"),
+ ]
+ # Method 1. Params present in request (Ansible) obj are the same as the current (DNAC) params
+ # If any does not have eq params, it requires update
+ return any(not dnac_compare_equality(current_obj.get(dnac_param),
+ requested_obj.get(ansible_param))
+ for (dnac_param, ansible_param) in obj_params)
+
+ def create(self):
+ result = self.dnac.exec(
+ family="sda",
+ function="provision_devices",
+ params=self.create_params(),
+ op_modifies=True,
+ )
+ if isinstance(result, dict):
+ if 'response' in result:
+ result = result.get('response')
+ if isinstance(result, dict) and result.get("status") == "failed":
+ raise AnsibleSDAException(response=result)
+ return result
+
+ def update(self):
+ requested_obj = self.new_object.get('payload')
+ if requested_obj and len(requested_obj) > 0:
+ requested_obj = requested_obj[0]
+ id = self.new_object.get("id") or requested_obj.get("id")
+ name = self.new_object.get("name") or requested_obj.get("name")
+ result = None
+ result = self.dnac.exec(
+ family="sda",
+ function="re_provision_devices",
+ params=self.update_all_params(),
+ op_modifies=True,
+ )
+ return result
+
+ def delete(self):
+ requested_obj = self.new_object.get('payload')
+ if requested_obj and len(requested_obj) > 0:
+ requested_obj = requested_obj[0]
+ id = self.new_object.get("id") or requested_obj.get("id")
+ name = self.new_object.get("name") or requested_obj.get("name")
+ result = None
+ if not id:
+ prev_obj_name = self.get_object_by_name(name)
+ id_ = None
+ if prev_obj_name:
+ id_ = prev_obj_name.get("id")
+ if id_:
+ self.new_object.update(dict(id=id_))
+ result = self.dnac.exec(
+ family="sda",
+ function="delete_provisioned_device_by_id",
+ params=self.delete_by_id_params(),
+ )
+ return result
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = False
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ dnac = DNACSDK(self._task.args)
+ obj = SdaProvisionDevices(self._task.args, dnac)
+
+ state = self._task.args.get("state")
+
+ response = None
+
+ if state == "present":
+ (obj_exists, prev_obj) = obj.exists()
+ if obj_exists:
+ if obj.requires_update(prev_obj):
+ response = obj.update()
+ dnac.object_updated()
+ else:
+ response = prev_obj
+ dnac.object_already_present()
+ else:
+ try:
+ response = obj.create()
+ dnac.object_created()
+ except AnsibleSDAException as e:
+ dnac.fail_json("Could not create object {e}".format(e=e._response))
+
+ elif state == "absent":
+ try:
+ (obj_exists, prev_obj) = obj.exists(is_absent=True)
+ if obj_exists:
+ response = obj.delete()
+ dnac.object_deleted()
+ else:
+ dnac.object_already_absent()
+ except AnsibleSDAException as e:
+ dnac.fail_json("Could not get object to be delete {e}".format(e=e._response))
+
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/sda_provision_devices_count_info.py b/ansible_collections/cisco/dnac/plugins/action/sda_provision_devices_count_info.py
new file mode 100644
index 000000000..3e36f931d
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/sda_provision_devices_count_info.py
@@ -0,0 +1,89 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ siteId=dict(type="str"),
+ headers=dict(type="dict"),
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = True
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ site_id=params.get("siteId"),
+ headers=params.get("headers"),
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ self._result.update(dict(dnac_response={}))
+
+ dnac = DNACSDK(params=self._task.args)
+
+ response = dnac.exec(
+ family="sda",
+ function='get_provisioned_devices_count',
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/sda_provision_devices_info.py b/ansible_collections/cisco/dnac/plugins/action/sda_provision_devices_info.py
new file mode 100644
index 000000000..b06f1252e
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/sda_provision_devices_info.py
@@ -0,0 +1,97 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ id=dict(type="str"),
+ networkDeviceId=dict(type="str"),
+ siteId=dict(type="str"),
+ offset=dict(type="float"),
+ limit=dict(type="float"),
+ headers=dict(type="dict"),
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = True
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ id=params.get("id"),
+ network_device_id=params.get("networkDeviceId"),
+ site_id=params.get("siteId"),
+ offset=params.get("offset"),
+ limit=params.get("limit"),
+ headers=params.get("headers"),
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ self._result.update(dict(dnac_response={}))
+
+ dnac = DNACSDK(params=self._task.args)
+
+ response = dnac.exec(
+ family="sda",
+ function='get_provisioned_devices',
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/sda_site_member_member_info.py b/ansible_collections/cisco/dnac/plugins/action/sda_site_member_member_info.py
new file mode 100644
index 000000000..cc1bca386
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/sda_site_member_member_info.py
@@ -0,0 +1,97 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ id=dict(type="str"),
+ offset=dict(type="str"),
+ limit=dict(type="str"),
+ memberType=dict(type="str"),
+ level=dict(type="str"),
+ headers=dict(type="dict"),
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = True
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ id=params.get("id"),
+ offset=params.get("offset"),
+ limit=params.get("limit"),
+ member_type=params.get("memberType"),
+ level=params.get("level"),
+ headers=params.get("headers"),
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ self._result.update(dict(dnac_response={}))
+
+ dnac = DNACSDK(params=self._task.args)
+
+ response = dnac.exec(
+ family="sites",
+ function='get_devices_that_are_assigned_to_a_site',
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/sda_virtual_network_ip_pool.py b/ansible_collections/cisco/dnac/plugins/action/sda_virtual_network_ip_pool.py
index 674f55018..b5128268b 100644
--- a/ansible_collections/cisco/dnac/plugins/action/sda_virtual_network_ip_pool.py
+++ b/ansible_collections/cisco/dnac/plugins/action/sda_virtual_network_ip_pool.py
@@ -83,8 +83,7 @@ class SdaVirtualNetworkIpPool(object):
def get_all_params(self, name=None, id=None):
new_object_params = {}
- new_object_params['site_name_hierarchy'] = self.new_object.get('site_name_hierarchy') or \
- self.new_object.get('siteNameHierarchy')
+ new_object_params['siteNameHierarchy'] = self.new_object.get('site_name_hierarchy')
new_object_params['virtual_network_name'] = self.new_object.get('virtualNetworkName') or \
self.new_object.get('virtual_network_name')
new_object_params['ip_pool_name'] = self.new_object.get('ipPoolName') or \
diff --git a/ansible_collections/cisco/dnac/plugins/action/security_advisories_ids_per_device_info.py b/ansible_collections/cisco/dnac/plugins/action/security_advisories_ids_per_device_info.py
index eebfa5627..c298066b8 100644
--- a/ansible_collections/cisco/dnac/plugins/action/security_advisories_ids_per_device_info.py
+++ b/ansible_collections/cisco/dnac/plugins/action/security_advisories_ids_per_device_info.py
@@ -83,7 +83,7 @@ class ActionModule(ActionBase):
if id:
response = dnac.exec(
family="security_advisories",
- function='get_advisory_ids_per_device',
+ function='get_advisory_device_detail',
params=self.get_object(self._task.args),
)
self._result.update(dict(dnac_response=response))
diff --git a/ansible_collections/cisco/dnac/plugins/action/sensor.py b/ansible_collections/cisco/dnac/plugins/action/sensor.py
index 797239834..7d2df435d 100644
--- a/ansible_collections/cisco/dnac/plugins/action/sensor.py
+++ b/ansible_collections/cisco/dnac/plugins/action/sensor.py
@@ -31,11 +31,17 @@ argument_spec = dnac_argument_spec()
# Add arguments specific for this module
argument_spec.update(dict(
state=dict(type="str", default="present", choices=["present", "absent"]),
- ssids=dict(type="list"),
name=dict(type="str"),
+ version=dict(type="int"),
+ modelVersion=dict(type="int"),
connection=dict(type="str"),
+ ssids=dict(type="list"),
+ profiles=dict(type="list"),
+ encryptionMode=dict(type="str"),
+ runNow=dict(type="str"),
+ locationInfoList=dict(type="list"),
+ sensors=dict(type="list"),
apCoverage=dict(type="list"),
- modelVersion=dict(type="int"),
templateName=dict(type="str"),
))
@@ -52,11 +58,17 @@ class Sensor(object):
def __init__(self, params, dnac):
self.dnac = dnac
self.new_object = dict(
- ssids=params.get("ssids"),
name=params.get("name"),
+ version=params.get("version"),
+ modelVersion=params.get("modelVersion"),
connection=params.get("connection"),
+ ssids=params.get("ssids"),
+ profiles=params.get("profiles"),
+ encryptionMode=params.get("encryptionMode"),
+ runNow=params.get("runNow"),
+ locationInfoList=params.get("locationInfoList"),
+ sensors=params.get("sensors"),
apCoverage=params.get("apCoverage"),
- modelVersion=params.get("modelVersion"),
template_name=params.get("templateName"),
)
@@ -68,11 +80,17 @@ class Sensor(object):
def create_params(self):
new_object_params = {}
- new_object_params['ssids'] = self.new_object.get('ssids')
new_object_params['name'] = self.new_object.get('name')
+ new_object_params['version'] = self.new_object.get('version')
+ new_object_params['modelVersion'] = self.new_object.get('modelVersion')
new_object_params['connection'] = self.new_object.get('connection')
+ new_object_params['ssids'] = self.new_object.get('ssids')
+ new_object_params['profiles'] = self.new_object.get('profiles')
+ new_object_params['encryptionMode'] = self.new_object.get('encryptionMode')
+ new_object_params['runNow'] = self.new_object.get('runNow')
+ new_object_params['locationInfoList'] = self.new_object.get('locationInfoList')
+ new_object_params['sensors'] = self.new_object.get('sensors')
new_object_params['apCoverage'] = self.new_object.get('apCoverage')
- new_object_params['modelVersion'] = self.new_object.get('modelVersion')
return new_object_params
def delete_all_params(self):
@@ -127,11 +145,17 @@ class Sensor(object):
requested_obj = self.new_object
obj_params = [
- ("ssids", "ssids"),
("name", "name"),
+ ("version", "version"),
+ ("modelVersion", "modelVersion"),
("connection", "connection"),
+ ("ssids", "ssids"),
+ ("profiles", "profiles"),
+ ("encryptionMode", "encryptionMode"),
+ ("runNow", "runNow"),
+ ("locationInfoList", "locationInfoList"),
+ ("sensors", "sensors"),
("apCoverage", "apCoverage"),
- ("modelVersion", "modelVersion"),
("templateName", "template_name"),
]
# Method 1. Params present in request (Ansible) obj are the same as the current (ISE) params
diff --git a/ansible_collections/cisco/dnac/plugins/action/sensor_test_template_edit.py b/ansible_collections/cisco/dnac/plugins/action/sensor_test_template_edit.py
index 5e75c953b..9b3a8d200 100644
--- a/ansible_collections/cisco/dnac/plugins/action/sensor_test_template_edit.py
+++ b/ansible_collections/cisco/dnac/plugins/action/sensor_test_template_edit.py
@@ -26,8 +26,33 @@ argument_spec = dnac_argument_spec()
# Add arguments specific for this module
argument_spec.update(dict(
templateName=dict(type="str"),
+ name=dict(type="str"),
+ _id=dict(type="str"),
+ version=dict(type="int"),
+ modelVersion=dict(type="int"),
+ startTime=dict(type="int"),
+ lastModifiedTime=dict(type="int"),
+ numAssociatedSensor=dict(type="int"),
+ location=dict(type="str"),
+ siteHierarchy=dict(type="str"),
+ status=dict(type="str"),
+ connection=dict(type="str"),
+ actionInProgress=dict(type="str"),
+ frequency=dict(type="dict"),
+ rssiThreshold=dict(type="int"),
+ numNeighborAPThreshold=dict(type="int"),
+ scheduleInDays=dict(type="int"),
+ wlans=dict(type="list"),
+ ssids=dict(type="list"),
+ profiles=dict(type="list"),
+ testScheduleMode=dict(type="str"),
+ showWlcUpgradeBanner=dict(type="bool"),
+ radioAsSensorRemoved=dict(type="bool"),
+ encryptionMode=dict(type="str"),
+ runNow=dict(type="str"),
locationInfoList=dict(type="list"),
- schedule=dict(type="dict"),
+ sensors=dict(type="list"),
+ apCoverage=dict(type="list"),
))
required_if = []
@@ -66,8 +91,33 @@ class ActionModule(ActionBase):
def get_object(self, params):
new_object = dict(
templateName=params.get("templateName"),
+ name=params.get("name"),
+ _id=params.get("_id"),
+ version=params.get("version"),
+ modelVersion=params.get("modelVersion"),
+ startTime=params.get("startTime"),
+ lastModifiedTime=params.get("lastModifiedTime"),
+ numAssociatedSensor=params.get("numAssociatedSensor"),
+ location=params.get("location"),
+ siteHierarchy=params.get("siteHierarchy"),
+ status=params.get("status"),
+ connection=params.get("connection"),
+ actionInProgress=params.get("actionInProgress"),
+ frequency=params.get("frequency"),
+ rssiThreshold=params.get("rssiThreshold"),
+ numNeighborAPThreshold=params.get("numNeighborAPThreshold"),
+ scheduleInDays=params.get("scheduleInDays"),
+ wlans=params.get("wlans"),
+ ssids=params.get("ssids"),
+ profiles=params.get("profiles"),
+ testScheduleMode=params.get("testScheduleMode"),
+ showWlcUpgradeBanner=params.get("showWlcUpgradeBanner"),
+ radioAsSensorRemoved=params.get("radioAsSensorRemoved"),
+ encryptionMode=params.get("encryptionMode"),
+ runNow=params.get("runNow"),
locationInfoList=params.get("locationInfoList"),
- schedule=params.get("schedule"),
+ sensors=params.get("sensors"),
+ apCoverage=params.get("apCoverage"),
)
return new_object
diff --git a/ansible_collections/cisco/dnac/plugins/action/site_assign_device.py b/ansible_collections/cisco/dnac/plugins/action/site_assign_device.py
index dec7df437..d905f21e7 100644
--- a/ansible_collections/cisco/dnac/plugins/action/site_assign_device.py
+++ b/ansible_collections/cisco/dnac/plugins/action/site_assign_device.py
@@ -78,7 +78,7 @@ class ActionModule(ActionBase):
Display().warning("This module is currently unmaintained "
"and will be removed in future releases."
"New module is assign_device_to_site "
- "for 2.3.5.3 or higher")
+ "for 2.3.7.6 or higher")
dnac = DNACSDK(params=self._task.args)
diff --git a/ansible_collections/cisco/dnac/plugins/action/site_count_v2_info.py b/ansible_collections/cisco/dnac/plugins/action/site_count_v2_info.py
new file mode 100644
index 000000000..8f74ffe04
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/site_count_v2_info.py
@@ -0,0 +1,89 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ id=dict(type="str"),
+ headers=dict(type="dict"),
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = True
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ id=params.get("id"),
+ headers=params.get("headers"),
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ self._result.update(dict(dnac_response={}))
+
+ dnac = DNACSDK(params=self._task.args)
+
+ response = dnac.exec(
+ family="sites",
+ function='get_site_count_v2',
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/site_health_info.py b/ansible_collections/cisco/dnac/plugins/action/site_health_info.py
index 7a38d07db..adf65e082 100644
--- a/ansible_collections/cisco/dnac/plugins/action/site_health_info.py
+++ b/ansible_collections/cisco/dnac/plugins/action/site_health_info.py
@@ -25,10 +25,10 @@ from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
argument_spec = dnac_argument_spec()
# Add arguments specific for this module
argument_spec.update(dict(
- timestamp=dict(type="str"),
siteType=dict(type="str"),
- offset=dict(type="int"),
- limit=dict(type="int"),
+ offset=dict(type="float"),
+ limit=dict(type="float"),
+ timestamp=dict(type="float"),
headers=dict(type="dict"),
))
@@ -67,10 +67,10 @@ class ActionModule(ActionBase):
def get_object(self, params):
new_object = dict(
- timestamp=params.get("timestamp"),
site_type=params.get("siteType"),
offset=params.get("offset"),
limit=params.get("limit"),
+ timestamp=params.get("timestamp"),
headers=params.get("headers"),
)
return new_object
diff --git a/ansible_collections/cisco/dnac/plugins/action/site_membership_info.py b/ansible_collections/cisco/dnac/plugins/action/site_membership_info.py
index 7c5ec8467..445783fac 100644
--- a/ansible_collections/cisco/dnac/plugins/action/site_membership_info.py
+++ b/ansible_collections/cisco/dnac/plugins/action/site_membership_info.py
@@ -26,8 +26,8 @@ argument_spec = dnac_argument_spec()
# Add arguments specific for this module
argument_spec.update(dict(
siteId=dict(type="str"),
- offset=dict(type="int"),
- limit=dict(type="int"),
+ offset=dict(type="float"),
+ limit=dict(type="float"),
deviceFamily=dict(type="str"),
serialNumber=dict(type="str"),
headers=dict(type="dict"),
diff --git a/ansible_collections/cisco/dnac/plugins/action/site_v2_info.py b/ansible_collections/cisco/dnac/plugins/action/site_v2_info.py
new file mode 100644
index 000000000..de6f8d250
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/site_v2_info.py
@@ -0,0 +1,97 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ groupNameHierarchy=dict(type="str"),
+ id=dict(type="str"),
+ type=dict(type="str"),
+ offset=dict(type="str"),
+ limit=dict(type="str"),
+ headers=dict(type="dict"),
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = True
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ group_name_hierarchy=params.get("groupNameHierarchy"),
+ id=params.get("id"),
+ type=params.get("type"),
+ offset=params.get("offset"),
+ limit=params.get("limit"),
+ headers=params.get("headers"),
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ self._result.update(dict(dnac_response={}))
+
+ dnac = DNACSDK(params=self._task.args)
+
+ response = dnac.exec(
+ family="sites",
+ function='get_site_v2',
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/snmpv2_read_community_credential.py b/ansible_collections/cisco/dnac/plugins/action/snmpv2_read_community_credential.py
index cda2e0c06..917e96d3e 100644
--- a/ansible_collections/cisco/dnac/plugins/action/snmpv2_read_community_credential.py
+++ b/ansible_collections/cisco/dnac/plugins/action/snmpv2_read_community_credential.py
@@ -89,7 +89,7 @@ class Snmpv2ReadCommunityCredential(object):
if isinstance(items, dict):
if 'response' in items:
items = items.get('response')
- result = get_dict_result(items, 'description', name)
+ result = get_dict_result(items, 'description', name) or get_dict_result(items, 'username', name)
except Exception:
result = None
return result
@@ -115,7 +115,7 @@ class Snmpv2ReadCommunityCredential(object):
id_exists = False
name_exists = False
o_id = self.new_object.get("id")
- name = self.new_object.get("username") or self.new_object.get("description")
+ name = self.new_object.get("description") or self.new_object.get("username")
if o_id:
prev_obj = self.get_object_by_id(o_id)
id_exists = prev_obj is not None and isinstance(prev_obj, dict)
diff --git a/ansible_collections/cisco/dnac/plugins/action/snmpv2_write_community_credential.py b/ansible_collections/cisco/dnac/plugins/action/snmpv2_write_community_credential.py
index 2ff190d75..e1244b868 100644
--- a/ansible_collections/cisco/dnac/plugins/action/snmpv2_write_community_credential.py
+++ b/ansible_collections/cisco/dnac/plugins/action/snmpv2_write_community_credential.py
@@ -89,7 +89,7 @@ class Snmpv2WriteCommunityCredential(object):
if isinstance(items, dict):
if 'response' in items:
items = items.get('response')
- result = get_dict_result(items, 'description', name)
+ result = get_dict_result(items, 'description', name) or get_dict_result(items, 'username', name)
except Exception:
result = None
return result
@@ -115,7 +115,7 @@ class Snmpv2WriteCommunityCredential(object):
id_exists = False
name_exists = False
o_id = self.new_object.get("id")
- name = self.new_object.get("username") or self.new_object.get("description")
+ name = self.new_object.get("description") or self.new_object.get("username")
if o_id:
prev_obj = self.get_object_by_id(o_id)
id_exists = prev_obj is not None and isinstance(prev_obj, dict)
diff --git a/ansible_collections/cisco/dnac/plugins/action/snmpv3_credential.py b/ansible_collections/cisco/dnac/plugins/action/snmpv3_credential.py
index 313087715..480c82e66 100644
--- a/ansible_collections/cisco/dnac/plugins/action/snmpv3_credential.py
+++ b/ansible_collections/cisco/dnac/plugins/action/snmpv3_credential.py
@@ -137,7 +137,7 @@ class Snmpv3Credential(object):
id_exists = False
name_exists = False
o_id = self.new_object.get("id")
- name = self.new_object.get("username") or self.new_object.get("description")
+ name = self.new_object.get("description") or self.new_object.get("username")
if o_id:
prev_obj = self.get_object_by_id(o_id)
id_exists = prev_obj is not None and isinstance(prev_obj, dict)
@@ -167,9 +167,6 @@ class Snmpv3Credential(object):
("snmpMode", "snmpMode"),
("username", "username"),
]
-
- print("requested_obj: ", requested_obj)
- print("current_obj: ", current_obj)
# Method 1. Params present in request (Ansible) obj are the same as the current (ISE) params
# If any does not have eq params, it requires update
return any(not dnac_compare_equality(current_obj.get(dnac_param),
diff --git a/ansible_collections/cisco/dnac/plugins/action/swim_import_local.py b/ansible_collections/cisco/dnac/plugins/action/swim_import_local.py
index b5e3b8a6c..57946e44e 100644
--- a/ansible_collections/cisco/dnac/plugins/action/swim_import_local.py
+++ b/ansible_collections/cisco/dnac/plugins/action/swim_import_local.py
@@ -20,7 +20,6 @@ from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
DNACSDK,
dnac_argument_spec,
)
-import os
# Get common arguements specification
argument_spec = dnac_argument_spec()
@@ -42,8 +41,7 @@ required_together = []
class ActionModule(ActionBase):
def __init__(self, *args, **kwargs):
if not ANSIBLE_UTILS_IS_INSTALLED:
- raise AnsibleActionFail(
- "ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
super(ActionModule, self).__init__(*args, **kwargs)
self._supports_async = False
self._supports_check_mode = False
@@ -72,12 +70,8 @@ class ActionModule(ActionBase):
is_third_party=params.get("isThirdParty"),
third_party_vendor=params.get("thirdPartyVendor"),
third_party_image_family=params.get("thirdPartyImageFamily"),
- third_party_application_type=params.get(
- "thirdPartyApplicationType"),
+ third_party_application_type=params.get("thirdPartyApplicationType"),
file_path=params.get("filePath"),
- multipart_fields={'file': (os.path.basename(params.get(
- "filePath")), open(params.get("filePath"), 'rb'))},
- multipart_monitor_callback=None
)
return new_object
diff --git a/ansible_collections/cisco/dnac/plugins/action/system_health_info.py b/ansible_collections/cisco/dnac/plugins/action/system_health_info.py
index 28683d23b..6c97265c2 100644
--- a/ansible_collections/cisco/dnac/plugins/action/system_health_info.py
+++ b/ansible_collections/cisco/dnac/plugins/action/system_health_info.py
@@ -28,8 +28,8 @@ argument_spec.update(dict(
summary=dict(type="bool"),
domain=dict(type="str"),
subdomain=dict(type="str"),
- limit=dict(type="int"),
- offset=dict(type="int"),
+ limit=dict(type="float"),
+ offset=dict(type="float"),
headers=dict(type="dict"),
))
diff --git a/ansible_collections/cisco/dnac/plugins/action/system_performance_historical_info.py b/ansible_collections/cisco/dnac/plugins/action/system_performance_historical_info.py
index 287c72245..27eab2963 100644
--- a/ansible_collections/cisco/dnac/plugins/action/system_performance_historical_info.py
+++ b/ansible_collections/cisco/dnac/plugins/action/system_performance_historical_info.py
@@ -26,8 +26,8 @@ argument_spec = dnac_argument_spec()
# Add arguments specific for this module
argument_spec.update(dict(
kpi=dict(type="str"),
- startTime=dict(type="int"),
- endTime=dict(type="int"),
+ startTime=dict(type="float"),
+ endTime=dict(type="float"),
headers=dict(type="dict"),
))
diff --git a/ansible_collections/cisco/dnac/plugins/action/system_performance_info.py b/ansible_collections/cisco/dnac/plugins/action/system_performance_info.py
index b05406cbf..3535f9fb6 100644
--- a/ansible_collections/cisco/dnac/plugins/action/system_performance_info.py
+++ b/ansible_collections/cisco/dnac/plugins/action/system_performance_info.py
@@ -27,8 +27,8 @@ argument_spec = dnac_argument_spec()
argument_spec.update(dict(
kpi=dict(type="str"),
function=dict(type="str"),
- startTime=dict(type="int"),
- endTime=dict(type="int"),
+ startTime=dict(type="float"),
+ endTime=dict(type="float"),
headers=dict(type="dict"),
))
diff --git a/ansible_collections/cisco/dnac/plugins/action/tag_count_info.py b/ansible_collections/cisco/dnac/plugins/action/tag_count_info.py
index 03827a43d..6766d9a9e 100644
--- a/ansible_collections/cisco/dnac/plugins/action/tag_count_info.py
+++ b/ansible_collections/cisco/dnac/plugins/action/tag_count_info.py
@@ -28,7 +28,6 @@ argument_spec.update(dict(
name=dict(type="str"),
nameSpace=dict(type="str"),
attributeName=dict(type="str"),
- level=dict(type="str"),
size=dict(type="str"),
systemTag=dict(type="str"),
headers=dict(type="dict"),
@@ -72,7 +71,6 @@ class ActionModule(ActionBase):
name=params.get("name"),
name_space=params.get("nameSpace"),
attribute_name=params.get("attributeName"),
- level=params.get("level"),
size=params.get("size"),
system_tag=params.get("systemTag"),
headers=params.get("headers"),
diff --git a/ansible_collections/cisco/dnac/plugins/action/tag_info.py b/ansible_collections/cisco/dnac/plugins/action/tag_info.py
index 773cc8858..acd9b93b4 100644
--- a/ansible_collections/cisco/dnac/plugins/action/tag_info.py
+++ b/ansible_collections/cisco/dnac/plugins/action/tag_info.py
@@ -29,8 +29,8 @@ argument_spec.update(dict(
additionalInfo_nameSpace=dict(type="str"),
additionalInfo_attributes=dict(type="str"),
level=dict(type="str"),
- offset=dict(type="int"),
- limit=dict(type="int"),
+ offset=dict(type="float"),
+ limit=dict(type="float"),
size=dict(type="str"),
field=dict(type="str"),
sortBy=dict(type="str"),
diff --git a/ansible_collections/cisco/dnac/plugins/action/tag_member.py b/ansible_collections/cisco/dnac/plugins/action/tag_member.py
index 54ae99e17..54682fc7f 100644
--- a/ansible_collections/cisco/dnac/plugins/action/tag_member.py
+++ b/ansible_collections/cisco/dnac/plugins/action/tag_member.py
@@ -32,7 +32,7 @@ argument_spec = dnac_argument_spec()
argument_spec.update(dict(
state=dict(type="str", default="present", choices=["present", "absent"]),
payload=dict(type="dict"),
- object=dict(type="str"),
+ memberType=dict(type="list"),
id=dict(type="str"),
memberId=dict(type="str"),
))
@@ -51,17 +51,17 @@ class TagMember(object):
self.dnac = dnac
self.new_object = dict(
payload=params.get("payload"),
- object=params.get("object"),
+ memberType=params.get("memberType"),
id=params.get("id"),
member_id=params.get("memberId"),
- member_type=params.get("memberType"),
)
def create_params(self):
new_object_params = {}
new_object_params['payload'] = self.new_object.get('payload')
new_object_params['id'] = self.new_object.get('id')
- new_object_params['object'] = self.new_object.get('object')
+ new_object_params['memberType'] = self.new_object.get('memberType')
+ new_object_params['id'] = self.new_object.get('id')
return new_object_params
def delete_by_id_params(self):
@@ -82,8 +82,7 @@ class TagMember(object):
items = self.dnac.exec(
family="tag",
function="get_tag_members_by_id",
- params={"id": id, "memberType": self.new_object.get(
- 'member_type'), }
+ params={"id": id}
)
if isinstance(items, dict):
if 'response' in items:
@@ -110,8 +109,7 @@ class TagMember(object):
_id = prev_obj.get("id")
_id = _id or prev_obj.get("memberId")
if id_exists and name_exists and o_id != _id:
- raise InconsistentParameters(
- "The 'id' and 'name' params don't refer to the same object")
+ raise InconsistentParameters("The 'id' and 'name' params don't refer to the same object")
if _id:
self.new_object.update(dict(id=_id))
self.new_object.update(dict(member_id=_id))
@@ -124,7 +122,7 @@ class TagMember(object):
requested_obj = self.new_object
obj_params = [
- ("object", "object"),
+ ("memberType", "memberType"),
("id", "id"),
("memberId", "member_id"),
]
@@ -167,8 +165,7 @@ class TagMember(object):
class ActionModule(ActionBase):
def __init__(self, *args, **kwargs):
if not ANSIBLE_UTILS_IS_INSTALLED:
- raise AnsibleActionFail(
- "ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
super(ActionModule, self).__init__(*args, **kwargs)
self._supports_async = False
self._supports_check_mode = False
diff --git a/ansible_collections/cisco/dnac/plugins/action/tag_member_count_info.py b/ansible_collections/cisco/dnac/plugins/action/tag_member_count_info.py
index f3457c067..93287adc6 100644
--- a/ansible_collections/cisco/dnac/plugins/action/tag_member_count_info.py
+++ b/ansible_collections/cisco/dnac/plugins/action/tag_member_count_info.py
@@ -28,7 +28,6 @@ argument_spec.update(dict(
id=dict(type="str"),
memberType=dict(type="str"),
memberAssociationType=dict(type="str"),
- level=dict(type="str"),
headers=dict(type="dict"),
))
@@ -70,7 +69,6 @@ class ActionModule(ActionBase):
id=params.get("id"),
member_type=params.get("memberType"),
member_association_type=params.get("memberAssociationType"),
- level=params.get("level"),
headers=params.get("headers"),
)
return new_object
diff --git a/ansible_collections/cisco/dnac/plugins/action/tag_member_info.py b/ansible_collections/cisco/dnac/plugins/action/tag_member_info.py
index e90f5551e..36c364086 100644
--- a/ansible_collections/cisco/dnac/plugins/action/tag_member_info.py
+++ b/ansible_collections/cisco/dnac/plugins/action/tag_member_info.py
@@ -27,8 +27,8 @@ argument_spec = dnac_argument_spec()
argument_spec.update(dict(
id=dict(type="str"),
memberType=dict(type="str"),
- offset=dict(type="str"),
- limit=dict(type="str"),
+ offset=dict(type="float"),
+ limit=dict(type="float"),
memberAssociationType=dict(type="str"),
level=dict(type="str"),
headers=dict(type="dict"),
diff --git a/ansible_collections/cisco/dnac/plugins/action/tag_membership.py b/ansible_collections/cisco/dnac/plugins/action/tag_membership.py
index 451501f94..bb7b8a8b7 100644
--- a/ansible_collections/cisco/dnac/plugins/action/tag_membership.py
+++ b/ansible_collections/cisco/dnac/plugins/action/tag_membership.py
@@ -25,7 +25,7 @@ from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
argument_spec = dnac_argument_spec()
# Add arguments specific for this module
argument_spec.update(dict(
- memberToTags=dict(type="list"),
+ memberToTags=dict(type="dict"),
memberType=dict(type="str"),
))
@@ -79,7 +79,7 @@ class ActionModule(ActionBase):
response = dnac.exec(
family="tag",
- function='updates_tag_membership',
+ function='update_tag_membership',
op_modifies=True,
params=self.get_object(self._task.args),
)
diff --git a/ansible_collections/cisco/dnac/plugins/action/topology_network_health_info.py b/ansible_collections/cisco/dnac/plugins/action/topology_network_health_info.py
index d8a8e75da..5b64f0398 100644
--- a/ansible_collections/cisco/dnac/plugins/action/topology_network_health_info.py
+++ b/ansible_collections/cisco/dnac/plugins/action/topology_network_health_info.py
@@ -25,7 +25,7 @@ from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
argument_spec = dnac_argument_spec()
# Add arguments specific for this module
argument_spec.update(dict(
- timestamp=dict(type="str"),
+ timestamp=dict(type="float"),
headers=dict(type="dict"),
))
diff --git a/ansible_collections/cisco/dnac/plugins/action/transit_peer_network.py b/ansible_collections/cisco/dnac/plugins/action/transit_peer_network.py
index a15848230..cbcfe8910 100644
--- a/ansible_collections/cisco/dnac/plugins/action/transit_peer_network.py
+++ b/ansible_collections/cisco/dnac/plugins/action/transit_peer_network.py
@@ -23,7 +23,7 @@ from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
get_dict_result,
)
from ansible_collections.cisco.dnac.plugins.plugin_utils.exceptions import (
- AnsibleSDAException,
+ InconsistentParameters,
)
# Get common arguments specification
@@ -74,7 +74,7 @@ class TransitPeerNetwork(object):
new_object_params['transit_peer_network_name'] = self.new_object.get('transit_peer_network_name')
return new_object_params
- def get_object_by_name(self, name, is_absent=False):
+ def get_object_by_name(self, name):
result = None
# NOTE: Does not have a get by name method, using get all
try:
@@ -86,15 +86,8 @@ class TransitPeerNetwork(object):
if isinstance(items, dict):
if 'response' in items:
items = items.get('response')
- if isinstance(items, dict) and items.get("status") == "failed":
- if is_absent:
- raise AnsibleSDAException(response=items)
- result = None
- return result
result = get_dict_result(items, 'name', name)
except Exception:
- if is_absent:
- raise
result = None
return result
@@ -103,10 +96,25 @@ class TransitPeerNetwork(object):
# NOTE: Does not have a get by id method or it is in another action
return result
- def exists(self, is_absent=False):
- name = self.new_object.get("transitPeerNetworkName")
- prev_obj = self.get_object_by_name(name, is_absent=is_absent)
- it_exists = prev_obj is not None and isinstance(prev_obj, dict) and prev_obj.get("status") != "failed"
+ def exists(self):
+ prev_obj = None
+ id_exists = False
+ name_exists = False
+ o_id = self.new_object.get("id")
+ name = self.new_object.get("name")
+ if o_id:
+ prev_obj = self.get_object_by_id(o_id)
+ id_exists = prev_obj is not None and isinstance(prev_obj, dict)
+ if not id_exists and name:
+ prev_obj = self.get_object_by_name(name)
+ name_exists = prev_obj is not None and isinstance(prev_obj, dict)
+ if name_exists:
+ _id = prev_obj.get("id")
+ if id_exists and name_exists and o_id != _id:
+ raise InconsistentParameters("The 'id' and 'name' params don't refer to the same object")
+ if _id:
+ self.new_object.update(dict(id=_id))
+ it_exists = prev_obj is not None and isinstance(prev_obj, dict)
return (it_exists, prev_obj)
def requires_update(self, current_obj):
@@ -198,7 +206,7 @@ class ActionModule(ActionBase):
response = obj.create()
dnac.object_created()
elif state == "absent":
- (obj_exists, prev_obj) = obj.exists(is_absent=True)
+ (obj_exists, prev_obj) = obj.exists()
if obj_exists:
response = obj.delete()
dnac.object_deleted()
diff --git a/ansible_collections/cisco/dnac/plugins/action/user.py b/ansible_collections/cisco/dnac/plugins/action/user.py
index 1896e57ff..ea9b6930d 100644
--- a/ansible_collections/cisco/dnac/plugins/action/user.py
+++ b/ansible_collections/cisco/dnac/plugins/action/user.py
@@ -30,7 +30,7 @@ from ansible_collections.cisco.dnac.plugins.plugin_utils.exceptions import (
argument_spec = dnac_argument_spec()
# Add arguments specific for this module
argument_spec.update(dict(
- state=dict(type="str", default="present", choices=["present"]),
+ state=dict(type="str", default="present", choices=["present", "absent"]),
firstName=dict(type="str"),
lastName=dict(type="str"),
username=dict(type="str"),
@@ -41,6 +41,8 @@ argument_spec.update(dict(
))
required_if = [
+ ("state", "present", ["userId"], True),
+ ("state", "absent", ["userId"], True),
]
required_one_of = []
mutually_exclusive = []
@@ -58,12 +60,15 @@ class User(object):
email=params.get("email"),
roleList=params.get("roleList"),
userId=params.get("userId"),
+ user_id=params.get("userId"),
)
def get_all_params(self, name=None, id=None):
new_object_params = {}
new_object_params['invoke_source'] = self.new_object.get('invokeSource') or \
self.new_object.get('invoke_source')
+ new_object_params['auth_source'] = self.new_object.get('authSource') or \
+ self.new_object.get('auth_source')
return new_object_params
def create_params(self):
@@ -76,6 +81,11 @@ class User(object):
new_object_params['roleList'] = self.new_object.get('roleList')
return new_object_params
+ def delete_by_id_params(self):
+ new_object_params = {}
+ new_object_params['user_id'] = self.new_object.get('user_id')
+ return new_object_params
+
def update_all_params(self):
new_object_params = {}
new_object_params['firstName'] = self.new_object.get('firstName')
@@ -88,10 +98,10 @@ class User(object):
def get_object_by_name(self, name):
result = None
- # NOTE: Does not have a get by name method, using get all
+ # NOTE: Does not have a get by name method or it is in another action
try:
items = self.dnac.exec(
- family="user_and_roles",
+ family="userand_roles",
function="get_users_api",
params=self.get_all_params(name=name),
)
@@ -106,13 +116,26 @@ class User(object):
def get_object_by_id(self, id):
result = None
# NOTE: Does not have a get by id method or it is in another action
+ try:
+ items = self.dnac.exec(
+ family="userand_roles",
+ function="get_users_api",
+ params=self.get_all_params(id=id),
+ )
+ if isinstance(items, dict):
+ if 'response' in items:
+ items = items.get('response')
+ result = get_dict_result(items, 'id', id)
+ except Exception:
+ result = None
return result
def exists(self):
- prev_obj = None
id_exists = False
name_exists = False
+ prev_obj = None
o_id = self.new_object.get("id")
+ o_id = o_id or self.new_object.get("user_id")
name = self.new_object.get("name")
if o_id:
prev_obj = self.get_object_by_id(o_id)
@@ -122,10 +145,12 @@ class User(object):
name_exists = prev_obj is not None and isinstance(prev_obj, dict)
if name_exists:
_id = prev_obj.get("id")
+ _id = _id or prev_obj.get("userId")
if id_exists and name_exists and o_id != _id:
raise InconsistentParameters("The 'id' and 'name' params don't refer to the same object")
if _id:
self.new_object.update(dict(id=_id))
+ self.new_object.update(dict(user_id=_id))
it_exists = prev_obj is not None and isinstance(prev_obj, dict)
return (it_exists, prev_obj)
@@ -139,8 +164,9 @@ class User(object):
("email", "email"),
("roleList", "roleList"),
("userId", "userId"),
+ ("userId", "user_id"),
]
- # Method 1. Params present in request (Ansible) obj are the same as the current (ISE) params
+ # Method 1. Params present in request (Ansible) obj are the same as the current (DNAC) params
# If any does not have eq params, it requires update
return any(not dnac_compare_equality(current_obj.get(dnac_param),
requested_obj.get(ansible_param))
@@ -148,7 +174,7 @@ class User(object):
def create(self):
result = self.dnac.exec(
- family="user_and_roles",
+ family="userand_roles",
function="add_user_api",
params=self.create_params(),
op_modifies=True,
@@ -160,13 +186,33 @@ class User(object):
name = self.new_object.get("name")
result = None
result = self.dnac.exec(
- family="user_and_roles",
+ family="userand_roles",
function="update_user_api",
params=self.update_all_params(),
op_modifies=True,
)
return result
+ def delete(self):
+ id = self.new_object.get("id")
+ id = id or self.new_object.get("user_id")
+ name = self.new_object.get("name")
+ result = None
+ if not id:
+ prev_obj_name = self.get_object_by_name(name)
+ id_ = None
+ if prev_obj_name:
+ id_ = prev_obj_name.get("id")
+ id_ = id_ or prev_obj_name.get("userId")
+ if id_:
+ self.new_object.update(dict(user_id=id_))
+ result = self.dnac.exec(
+ family="userand_roles",
+ function="delete_user_api",
+ params=self.delete_by_id_params(),
+ )
+ return result
+
class ActionModule(ActionBase):
def __init__(self, *args, **kwargs):
@@ -207,6 +253,7 @@ class ActionModule(ActionBase):
state = self._task.args.get("state")
response = None
+
if state == "present":
(obj_exists, prev_obj) = obj.exists()
if obj_exists:
@@ -220,6 +267,14 @@ class ActionModule(ActionBase):
response = obj.create()
dnac.object_created()
+ elif state == "absent":
+ (obj_exists, prev_obj) = obj.exists()
+ if obj_exists:
+ response = obj.delete()
+ dnac.object_deleted()
+ else:
+ dnac.object_already_absent()
+
self._result.update(dict(dnac_response=response))
self._result.update(dnac.exit_json())
return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/user_info.py b/ansible_collections/cisco/dnac/plugins/action/user_info.py
index 7a5906fcc..f92391406 100644
--- a/ansible_collections/cisco/dnac/plugins/action/user_info.py
+++ b/ansible_collections/cisco/dnac/plugins/action/user_info.py
@@ -26,6 +26,7 @@ argument_spec = dnac_argument_spec()
# Add arguments specific for this module
argument_spec.update(dict(
invokeSource=dict(type="str"),
+ authSource=dict(type="str"),
headers=dict(type="dict"),
))
@@ -65,6 +66,7 @@ class ActionModule(ActionBase):
def get_object(self, params):
new_object = dict(
invoke_source=params.get("invokeSource"),
+ auth_source=params.get("authSource"),
headers=params.get("headers"),
)
return new_object
@@ -80,7 +82,7 @@ class ActionModule(ActionBase):
dnac = DNACSDK(params=self._task.args)
response = dnac.exec(
- family="user_and_roles",
+ family="userand_roles",
function='get_users_api',
params=self.get_object(self._task.args),
)
diff --git a/ansible_collections/cisco/dnac/plugins/action/users_external_authentication.py b/ansible_collections/cisco/dnac/plugins/action/users_external_authentication.py
new file mode 100644
index 000000000..2c5ab3af7
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/users_external_authentication.py
@@ -0,0 +1,179 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+ dnac_compare_equality,
+ get_dict_result,
+)
+from ansible_collections.cisco.dnac.plugins.plugin_utils.exceptions import (
+ InconsistentParameters,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ state=dict(type="str", default="present", choices=["present"]),
+ enable=dict(type="bool"),
+))
+
+required_if = [
+]
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class UsersExternalAuthentication(object):
+ def __init__(self, params, dnac):
+ self.dnac = dnac
+ self.new_object = dict(
+ enable=params.get("enable"),
+ )
+
+ def get_all_params(self, name=None, id=None):
+ new_object_params = {}
+ return new_object_params
+
+ def create_params(self):
+ new_object_params = {}
+ new_object_params['enable'] = self.new_object.get('enable')
+ return new_object_params
+
+ def get_object_by_name(self, name):
+ result = None
+ # NOTE: Does not have a get by name method, using get all
+ try:
+ items = self.dnac.exec(
+ family="userand_roles",
+ function="get_external_authentication_setting_api",
+ params=self.get_all_params(name=name),
+ )
+ if isinstance(items, dict):
+ if 'response' in items:
+ items = items.get('response')
+ result = get_dict_result(items, 'name', name)
+ except Exception:
+ result = None
+ return result
+
+ def get_object_by_id(self, id):
+ result = None
+ # NOTE: Does not have a get by id method or it is in another action
+ return result
+
+ def exists(self):
+ prev_obj = None
+ id_exists = False
+ name_exists = False
+ o_id = self.new_object.get("id")
+ name = self.new_object.get("name")
+ if o_id:
+ prev_obj = self.get_object_by_id(o_id)
+ id_exists = prev_obj is not None and isinstance(prev_obj, dict)
+ if not id_exists and name:
+ prev_obj = self.get_object_by_name(name)
+ name_exists = prev_obj is not None and isinstance(prev_obj, dict)
+ if name_exists:
+ _id = prev_obj.get("id")
+ if id_exists and name_exists and o_id != _id:
+ raise InconsistentParameters("The 'id' and 'name' params don't refer to the same object")
+ if _id:
+ self.new_object.update(dict(id=_id))
+ it_exists = prev_obj is not None and isinstance(prev_obj, dict)
+ return (it_exists, prev_obj)
+
+ def requires_update(self, current_obj):
+ requested_obj = self.new_object
+
+ obj_params = [
+ ("enable", "enable"),
+ ]
+ # Method 1. Params present in request (Ansible) obj are the same as the current (ISE) params
+ # If any does not have eq params, it requires update
+ return any(not dnac_compare_equality(current_obj.get(dnac_param),
+ requested_obj.get(ansible_param))
+ for (dnac_param, ansible_param) in obj_params)
+
+ def create(self):
+ result = self.dnac.exec(
+ family="userand_roles",
+ function="manage_external_authentication_setting_api",
+ params=self.create_params(),
+ op_modifies=True,
+ )
+ return result
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = False
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ dnac = DNACSDK(self._task.args)
+ obj = UsersExternalAuthentication(self._task.args, dnac)
+
+ state = self._task.args.get("state")
+
+ response = None
+ if state == "present":
+ (obj_exists, prev_obj) = obj.exists()
+ if obj_exists:
+ if obj.requires_update(prev_obj):
+ response = prev_obj
+ dnac.object_present_and_different()
+ else:
+ response = prev_obj
+ dnac.object_already_present()
+ else:
+ response = obj.create()
+ dnac.object_created()
+
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/users_external_authentication_info.py b/ansible_collections/cisco/dnac/plugins/action/users_external_authentication_info.py
new file mode 100644
index 000000000..7332d1681
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/users_external_authentication_info.py
@@ -0,0 +1,87 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ headers=dict(type="dict"),
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = True
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ headers=params.get("headers"),
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ self._result.update(dict(dnac_response={}))
+
+ dnac = DNACSDK(params=self._task.args)
+
+ response = dnac.exec(
+ family="userand_roles",
+ function='get_external_authentication_setting_api',
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/users_external_servers_aaa_attribute.py b/ansible_collections/cisco/dnac/plugins/action/users_external_servers_aaa_attribute.py
new file mode 100644
index 000000000..ef9c3aceb
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/users_external_servers_aaa_attribute.py
@@ -0,0 +1,201 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+ dnac_compare_equality,
+ get_dict_result,
+)
+from ansible_collections.cisco.dnac.plugins.plugin_utils.exceptions import (
+ InconsistentParameters,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ state=dict(type="str", default="present", choices=["present", "absent"]),
+ attributeName=dict(type="str"),
+))
+
+required_if = [
+]
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class UsersExternalServersAaaAttribute(object):
+ def __init__(self, params, dnac):
+ self.dnac = dnac
+ self.new_object = dict(
+ attributeName=params.get("attributeName"),
+ )
+
+ def get_all_params(self, name=None, id=None):
+ new_object_params = {}
+ return new_object_params
+
+ def create_params(self):
+ new_object_params = {}
+ new_object_params['attributeName'] = self.new_object.get('attributeName')
+ return new_object_params
+
+ def delete_all_params(self):
+ new_object_params = {}
+ return new_object_params
+
+ def get_object_by_name(self, name):
+ result = None
+ # NOTE: Does not have a get by name method, using get all
+ try:
+ items = self.dnac.exec(
+ family="userand_roles",
+ function="get_a_a_a_attribute_api",
+ params=self.get_all_params(name=name),
+ )
+ if isinstance(items, dict):
+ if 'response' in items:
+ items = items.get('response')
+ result = get_dict_result(items, 'name', name)
+ except Exception:
+ result = None
+ return result
+
+ def get_object_by_id(self, id):
+ result = None
+ # NOTE: Does not have a get by id method or it is in another action
+ return result
+
+ def exists(self):
+ prev_obj = None
+ id_exists = False
+ name_exists = False
+ o_id = self.new_object.get("id")
+ name = self.new_object.get("name")
+ if o_id:
+ prev_obj = self.get_object_by_id(o_id)
+ id_exists = prev_obj is not None and isinstance(prev_obj, dict)
+ if not id_exists and name:
+ prev_obj = self.get_object_by_name(name)
+ name_exists = prev_obj is not None and isinstance(prev_obj, dict)
+ if name_exists:
+ _id = prev_obj.get("id")
+ if id_exists and name_exists and o_id != _id:
+ raise InconsistentParameters("The 'id' and 'name' params don't refer to the same object")
+ if _id:
+ self.new_object.update(dict(id=_id))
+ it_exists = prev_obj is not None and isinstance(prev_obj, dict)
+ return (it_exists, prev_obj)
+
+ def requires_update(self, current_obj):
+ requested_obj = self.new_object
+
+ obj_params = [
+ ("attributeName", "attributeName"),
+ ]
+ # Method 1. Params present in request (Ansible) obj are the same as the current (ISE) params
+ # If any does not have eq params, it requires update
+ return any(not dnac_compare_equality(current_obj.get(dnac_param),
+ requested_obj.get(ansible_param))
+ for (dnac_param, ansible_param) in obj_params)
+
+ def create(self):
+ result = self.dnac.exec(
+ family="userand_roles",
+ function="add_and_update_a_a_a_attribute_api",
+ params=self.create_params(),
+ op_modifies=True,
+ )
+ return result
+
+ def delete(self):
+ id = self.new_object.get("id")
+ name = self.new_object.get("name")
+ result = None
+ result = self.dnac.exec(
+ family="userand_roles",
+ function="delete_a_a_a_attribute_api",
+ params=self.delete_all_params(),
+ )
+ return result
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = False
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ dnac = DNACSDK(self._task.args)
+ obj = UsersExternalServersAaaAttribute(self._task.args, dnac)
+
+ state = self._task.args.get("state")
+
+ response = None
+ if state == "present":
+ (obj_exists, prev_obj) = obj.exists()
+ if obj_exists:
+ if obj.requires_update(prev_obj):
+ response = prev_obj
+ dnac.object_present_and_different()
+ else:
+ response = prev_obj
+ dnac.object_already_present()
+ else:
+ response = obj.create()
+ dnac.object_created()
+ elif state == "absent":
+ (obj_exists, prev_obj) = obj.exists()
+ if obj_exists:
+ response = obj.delete()
+ dnac.object_deleted()
+ else:
+ dnac.object_already_absent()
+
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/users_external_servers_aaa_attribute_info.py b/ansible_collections/cisco/dnac/plugins/action/users_external_servers_aaa_attribute_info.py
new file mode 100644
index 000000000..d0cc29d0a
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/users_external_servers_aaa_attribute_info.py
@@ -0,0 +1,87 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguments specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ headers=dict(type="dict"),
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = True
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ headers=params.get("headers"),
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ self._result.update(dict(dnac_response={}))
+
+ dnac = DNACSDK(params=self._task.args)
+
+ response = dnac.exec(
+ family="userand_roles",
+ function='get_a_a_a_attribute_api',
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/wireless_accespoint_configuration.py b/ansible_collections/cisco/dnac/plugins/action/wireless_accespoint_configuration.py
index f99e312ed..45b2e7822 100644
--- a/ansible_collections/cisco/dnac/plugins/action/wireless_accespoint_configuration.py
+++ b/ansible_collections/cisco/dnac/plugins/action/wireless_accespoint_configuration.py
@@ -30,8 +30,6 @@ argument_spec.update(dict(
adminStatus=dict(type="bool"),
configureApMode=dict(type="bool"),
apMode=dict(type="int"),
- configureApHeight=dict(type="bool"),
- apHeight=dict(type="int"),
configureFailoverPriority=dict(type="bool"),
failoverPriority=dict(type="int"),
configureLedStatus=dict(type="bool"),
@@ -48,6 +46,7 @@ argument_spec.update(dict(
tertiaryControllerName=dict(type="str"),
tertiaryIpAddress=dict(type="dict"),
radioConfigurations=dict(type="list"),
+ isAssignedSiteAsLocation=dict(type="bool"),
))
required_if = []
@@ -90,8 +89,6 @@ class ActionModule(ActionBase):
adminStatus=params.get("adminStatus"),
configureApMode=params.get("configureApMode"),
apMode=params.get("apMode"),
- configureApHeight=params.get("configureApHeight"),
- apHeight=params.get("apHeight"),
configureFailoverPriority=params.get("configureFailoverPriority"),
failoverPriority=params.get("failoverPriority"),
configureLedStatus=params.get("configureLedStatus"),
@@ -108,6 +105,7 @@ class ActionModule(ActionBase):
tertiaryControllerName=params.get("tertiaryControllerName"),
tertiaryIpAddress=params.get("tertiaryIpAddress"),
radioConfigurations=params.get("radioConfigurations"),
+ isAssignedSiteAsLocation=params.get("isAssignedSiteAsLocation"),
)
return new_object
@@ -121,7 +119,7 @@ class ActionModule(ActionBase):
response = dnac.exec(
family="wireless",
- function='configure_access_points',
+ function='configure_access_points_v1',
op_modifies=True,
params=self.get_object(self._task.args),
)
diff --git a/ansible_collections/cisco/dnac/plugins/action/wireless_accesspoint_configuration_create.py b/ansible_collections/cisco/dnac/plugins/action/wireless_accesspoint_configuration_create.py
new file mode 100644
index 000000000..8778b6757
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/action/wireless_accesspoint_configuration_create.py
@@ -0,0 +1,140 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+from ansible.plugins.action import ActionBase
+try:
+ from ansible_collections.ansible.utils.plugins.module_utils.common.argspec_validate import (
+ AnsibleArgSpecValidator,
+ )
+except ImportError:
+ ANSIBLE_UTILS_IS_INSTALLED = False
+else:
+ ANSIBLE_UTILS_IS_INSTALLED = True
+from ansible.errors import AnsibleActionFail
+from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
+ DNACSDK,
+ dnac_argument_spec,
+)
+
+# Get common arguements specification
+argument_spec = dnac_argument_spec()
+# Add arguments specific for this module
+argument_spec.update(dict(
+ apList=dict(type="list"),
+ configureAdminStatus=dict(type="bool"),
+ adminStatus=dict(type="bool"),
+ configureApMode=dict(type="bool"),
+ apMode=dict(type="int"),
+ configureFailoverPriority=dict(type="bool"),
+ failoverPriority=dict(type="int"),
+ configureLedStatus=dict(type="bool"),
+ ledStatus=dict(type="bool"),
+ configureLedBrightnessLevel=dict(type="bool"),
+ ledBrightnessLevel=dict(type="int"),
+ configureLocation=dict(type="bool"),
+ location=dict(type="str"),
+ configureHAController=dict(type="bool"),
+ primaryControllerName=dict(type="str"),
+ primaryIpAddress=dict(type="dict"),
+ secondaryControllerName=dict(type="str"),
+ secondaryIpAddress=dict(type="dict"),
+ tertiaryControllerName=dict(type="str"),
+ tertiaryIpAddress=dict(type="dict"),
+ radioConfigurations=dict(type="list"),
+ configureCleanAirSI24Ghz=dict(type="bool"),
+ cleanAirSI24=dict(type="bool"),
+ configureCleanAirSI5Ghz=dict(type="bool"),
+ cleanAirSI5=dict(type="bool"),
+ configureCleanAirSI6Ghz=dict(type="bool"),
+ cleanAirSI6=dict(type="bool"),
+ isAssignedSiteAsLocation=dict(type="bool"),
+))
+
+required_if = []
+required_one_of = []
+mutually_exclusive = []
+required_together = []
+
+
+class ActionModule(ActionBase):
+ def __init__(self, *args, **kwargs):
+ if not ANSIBLE_UTILS_IS_INSTALLED:
+ raise AnsibleActionFail("ansible.utils is not installed. Execute 'ansible-galaxy collection install ansible.utils'")
+ super(ActionModule, self).__init__(*args, **kwargs)
+ self._supports_async = False
+ self._supports_check_mode = False
+ self._result = None
+
+ # Checks the supplied parameters against the argument spec for this module
+ def _check_argspec(self):
+ aav = AnsibleArgSpecValidator(
+ data=self._task.args,
+ schema=dict(argument_spec=argument_spec),
+ schema_format="argspec",
+ schema_conditionals=dict(
+ required_if=required_if,
+ required_one_of=required_one_of,
+ mutually_exclusive=mutually_exclusive,
+ required_together=required_together,
+ ),
+ name=self._task.action,
+ )
+ valid, errors, self._task.args = aav.validate()
+ if not valid:
+ raise AnsibleActionFail(errors)
+
+ def get_object(self, params):
+ new_object = dict(
+ apList=params.get("apList"),
+ configureAdminStatus=params.get("configureAdminStatus"),
+ adminStatus=params.get("adminStatus"),
+ configureApMode=params.get("configureApMode"),
+ apMode=params.get("apMode"),
+ configureFailoverPriority=params.get("configureFailoverPriority"),
+ failoverPriority=params.get("failoverPriority"),
+ configureLedStatus=params.get("configureLedStatus"),
+ ledStatus=params.get("ledStatus"),
+ configureLedBrightnessLevel=params.get("configureLedBrightnessLevel"),
+ ledBrightnessLevel=params.get("ledBrightnessLevel"),
+ configureLocation=params.get("configureLocation"),
+ location=params.get("location"),
+ configureHAController=params.get("configureHAController"),
+ primaryControllerName=params.get("primaryControllerName"),
+ primaryIpAddress=params.get("primaryIpAddress"),
+ secondaryControllerName=params.get("secondaryControllerName"),
+ secondaryIpAddress=params.get("secondaryIpAddress"),
+ tertiaryControllerName=params.get("tertiaryControllerName"),
+ tertiaryIpAddress=params.get("tertiaryIpAddress"),
+ radioConfigurations=params.get("radioConfigurations"),
+ configureCleanAirSI24Ghz=params.get("configureCleanAirSI24Ghz"),
+ cleanAirSI24=params.get("cleanAirSI24"),
+ configureCleanAirSI5Ghz=params.get("configureCleanAirSI5Ghz"),
+ cleanAirSI5=params.get("cleanAirSI5"),
+ configureCleanAirSI6Ghz=params.get("configureCleanAirSI6Ghz"),
+ cleanAirSI6=params.get("cleanAirSI6"),
+ isAssignedSiteAsLocation=params.get("isAssignedSiteAsLocation"),
+ )
+ return new_object
+
+ def run(self, tmp=None, task_vars=None):
+ self._task.diff = False
+ self._result = super(ActionModule, self).run(tmp, task_vars)
+ self._result["changed"] = False
+ self._check_argspec()
+
+ dnac = DNACSDK(params=self._task.args)
+
+ response = dnac.exec(
+ family="wireless",
+ function='configure_access_points_v2',
+ op_modifies=True,
+ params=self.get_object(self._task.args),
+ )
+ self._result.update(dict(dnac_response=response))
+ self._result.update(dnac.exit_json())
+ return self._result
diff --git a/ansible_collections/cisco/dnac/plugins/action/wireless_dynamic_interface.py b/ansible_collections/cisco/dnac/plugins/action/wireless_dynamic_interface.py
index 51f458f57..416d665a0 100644
--- a/ansible_collections/cisco/dnac/plugins/action/wireless_dynamic_interface.py
+++ b/ansible_collections/cisco/dnac/plugins/action/wireless_dynamic_interface.py
@@ -32,13 +32,10 @@ argument_spec = dnac_argument_spec()
argument_spec.update(dict(
state=dict(type="str", default="present", choices=["present", "absent"]),
interfaceName=dict(type="str"),
- vlanId=dict(type="int"),
- headers=dict(type="dict"),
+ vlanId=dict(type="float"),
))
required_if = [
- ("state", "present", ["interfaceName"], True),
- ("state", "absent", ["interfaceName"], True),
]
required_one_of = []
mutually_exclusive = []
@@ -51,7 +48,6 @@ class WirelessDynamicInterface(object):
self.new_object = dict(
interfaceName=params.get("interfaceName"),
vlanId=params.get("vlanId"),
- headers=params.get("headers"),
interface_name=params.get("interfaceName"),
)
@@ -66,7 +62,7 @@ class WirelessDynamicInterface(object):
new_object_params['vlanId'] = self.new_object.get('vlanId')
return new_object_params
- def delete_by_name_params(self):
+ def delete_all_params(self):
new_object_params = {}
new_object_params['interface_name'] = self.new_object.get('interface_name')
new_object_params['headers'] = self.new_object.get('headers')
@@ -100,18 +96,12 @@ class WirelessDynamicInterface(object):
name_exists = False
o_id = self.new_object.get("id")
name = self.new_object.get("name")
- name = name or self.new_object.get("interface_name")
if o_id:
prev_obj = self.get_object_by_id(o_id)
id_exists = prev_obj is not None and isinstance(prev_obj, dict)
if not id_exists and name:
prev_obj = self.get_object_by_name(name)
name_exists = prev_obj is not None and isinstance(prev_obj, dict)
- if id_exists:
- _name = prev_obj.get("name")
- _name = _name or prev_obj.get("interfaceName")
- if _name:
- self.new_object.update(dict(interface_name=_name))
if name_exists:
_id = prev_obj.get("id")
if id_exists and name_exists and o_id != _id:
@@ -147,20 +137,11 @@ class WirelessDynamicInterface(object):
def delete(self):
id = self.new_object.get("id")
name = self.new_object.get("name")
- name = name or self.new_object.get("interface_name")
result = None
- if not name:
- prev_obj_id = self.get_object_by_id(id)
- name_ = None
- if prev_obj_id:
- name_ = prev_obj_id.get("name")
- name_ = name_ or prev_obj_id.get("interfaceName")
- if name_:
- self.new_object.update(dict(interface_name=name_))
result = self.dnac.exec(
family="wireless",
function="delete_dynamic_interface",
- params=self.delete_by_name_params(),
+ params=self.delete_all_params(),
)
return result
diff --git a/ansible_collections/cisco/dnac/plugins/action/wireless_enterprise_ssid.py b/ansible_collections/cisco/dnac/plugins/action/wireless_enterprise_ssid.py
index 7e2d129c0..4773b9bcc 100644
--- a/ansible_collections/cisco/dnac/plugins/action/wireless_enterprise_ssid.py
+++ b/ansible_collections/cisco/dnac/plugins/action/wireless_enterprise_ssid.py
@@ -50,6 +50,19 @@ argument_spec.update(dict(
enableNeighborList=dict(type="bool"),
mfpClientProtection=dict(type="str"),
nasOptions=dict(type="list"),
+ profileName=dict(type="str"),
+ policyProfileName=dict(type="str"),
+ aaaOverride=dict(type="bool"),
+ coverageHoleDetectionEnable=dict(type="bool"),
+ protectedManagementFrame=dict(type="str"),
+ multiPSKSettings=dict(type="list"),
+ clientRateLimit=dict(type="float"),
+ authKeyMgmt=dict(type="list"),
+ rsnCipherSuiteGcmp256=dict(type="bool"),
+ rsnCipherSuiteCcmp256=dict(type="bool"),
+ rsnCipherSuiteGcmp128=dict(type="bool"),
+ ghz6PolicyClientSteering=dict(type="bool"),
+ ghz24Policy=dict(type="str"),
ssidName=dict(type="str"),
))
@@ -85,6 +98,19 @@ class WirelessEnterpriseSsid(object):
enableNeighborList=params.get("enableNeighborList"),
mfpClientProtection=params.get("mfpClientProtection"),
nasOptions=params.get("nasOptions"),
+ profileName=params.get("profileName"),
+ policyProfileName=params.get("policyProfileName"),
+ aaaOverride=params.get("aaaOverride"),
+ coverageHoleDetectionEnable=params.get("coverageHoleDetectionEnable"),
+ protectedManagementFrame=params.get("protectedManagementFrame"),
+ multiPSKSettings=params.get("multiPSKSettings"),
+ clientRateLimit=params.get("clientRateLimit"),
+ authKeyMgmt=params.get("authKeyMgmt"),
+ rsnCipherSuiteGcmp256=params.get("rsnCipherSuiteGcmp256"),
+ rsnCipherSuiteCcmp256=params.get("rsnCipherSuiteCcmp256"),
+ rsnCipherSuiteGcmp128=params.get("rsnCipherSuiteGcmp128"),
+ ghz6PolicyClientSteering=params.get("ghz6PolicyClientSteering"),
+ ghz24Policy=params.get("ghz24Policy"),
ssid_name=params.get("ssidName"),
)
@@ -115,6 +141,19 @@ class WirelessEnterpriseSsid(object):
new_object_params['enableNeighborList'] = self.new_object.get('enableNeighborList')
new_object_params['mfpClientProtection'] = self.new_object.get('mfpClientProtection')
new_object_params['nasOptions'] = self.new_object.get('nasOptions')
+ new_object_params['profileName'] = self.new_object.get('profileName')
+ new_object_params['policyProfileName'] = self.new_object.get('policyProfileName')
+ new_object_params['aaaOverride'] = self.new_object.get('aaaOverride')
+ new_object_params['coverageHoleDetectionEnable'] = self.new_object.get('coverageHoleDetectionEnable')
+ new_object_params['protectedManagementFrame'] = self.new_object.get('protectedManagementFrame')
+ new_object_params['multiPSKSettings'] = self.new_object.get('multiPSKSettings')
+ new_object_params['clientRateLimit'] = self.new_object.get('clientRateLimit')
+ new_object_params['authKeyMgmt'] = self.new_object.get('authKeyMgmt')
+ new_object_params['rsnCipherSuiteGcmp256'] = self.new_object.get('rsnCipherSuiteGcmp256')
+ new_object_params['rsnCipherSuiteCcmp256'] = self.new_object.get('rsnCipherSuiteCcmp256')
+ new_object_params['rsnCipherSuiteGcmp128'] = self.new_object.get('rsnCipherSuiteGcmp128')
+ new_object_params['ghz6PolicyClientSteering'] = self.new_object.get('ghz6PolicyClientSteering')
+ new_object_params['ghz24Policy'] = self.new_object.get('ghz24Policy')
return new_object_params
def delete_by_name_params(self):
@@ -143,6 +182,19 @@ class WirelessEnterpriseSsid(object):
new_object_params['enableNeighborList'] = self.new_object.get('enableNeighborList')
new_object_params['mfpClientProtection'] = self.new_object.get('mfpClientProtection')
new_object_params['nasOptions'] = self.new_object.get('nasOptions')
+ new_object_params['profileName'] = self.new_object.get('profileName')
+ new_object_params['policyProfileName'] = self.new_object.get('policyProfileName')
+ new_object_params['aaaOverride'] = self.new_object.get('aaaOverride')
+ new_object_params['coverageHoleDetectionEnable'] = self.new_object.get('coverageHoleDetectionEnable')
+ new_object_params['protectedManagementFrame'] = self.new_object.get('protectedManagementFrame')
+ new_object_params['multiPSKSettings'] = self.new_object.get('multiPSKSettings')
+ new_object_params['clientRateLimit'] = self.new_object.get('clientRateLimit')
+ new_object_params['authKeyMgmt'] = self.new_object.get('authKeyMgmt')
+ new_object_params['rsnCipherSuiteGcmp256'] = self.new_object.get('rsnCipherSuiteGcmp256')
+ new_object_params['rsnCipherSuiteCcmp256'] = self.new_object.get('rsnCipherSuiteCcmp256')
+ new_object_params['rsnCipherSuiteGcmp128'] = self.new_object.get('rsnCipherSuiteGcmp128')
+ new_object_params['ghz6PolicyClientSteering'] = self.new_object.get('ghz6PolicyClientSteering')
+ new_object_params['ghz24Policy'] = self.new_object.get('ghz24Policy')
return new_object_params
def get_object_by_name(self, name):
@@ -231,6 +283,19 @@ class WirelessEnterpriseSsid(object):
("enableNeighborList", "enableNeighborList"),
("mfpClientProtection", "mfpClientProtection"),
("nasOptions", "nasOptions"),
+ ("profileName", "profileName"),
+ ("policyProfileName", "policyProfileName"),
+ ("aaaOverride", "aaaOverride"),
+ ("coverageHoleDetectionEnable", "coverageHoleDetectionEnable"),
+ ("protectedManagementFrame", "protectedManagementFrame"),
+ ("multiPSKSettings", "multiPSKSettings"),
+ ("clientRateLimit", "clientRateLimit"),
+ ("authKeyMgmt", "authKeyMgmt"),
+ ("rsnCipherSuiteGcmp256", "rsnCipherSuiteGcmp256"),
+ ("rsnCipherSuiteCcmp256", "rsnCipherSuiteCcmp256"),
+ ("rsnCipherSuiteGcmp128", "rsnCipherSuiteGcmp128"),
+ ("ghz6PolicyClientSteering", "ghz6PolicyClientSteering"),
+ ("ghz24Policy", "ghz24Policy"),
("ssidName", "ssid_name"),
]
# Method 1. Params present in request (Ansible) obj are the same as the current (DNAC) params
diff --git a/ansible_collections/cisco/dnac/plugins/action/wireless_profile.py b/ansible_collections/cisco/dnac/plugins/action/wireless_profile.py
index b2b22fb8e..acf011b1e 100644
--- a/ansible_collections/cisco/dnac/plugins/action/wireless_profile.py
+++ b/ansible_collections/cisco/dnac/plugins/action/wireless_profile.py
@@ -53,7 +53,7 @@ class WirelessProfile(object):
def get_all_params(self, name=None, id=None):
new_object_params = {}
- new_object_params['profile_name'] = name or self.new_object.get('wireless_profile_name')
+ new_object_params['profile_name'] = self.new_object.get('wireless_profile_name')
return new_object_params
def create_params(self):
@@ -103,9 +103,6 @@ class WirelessProfile(object):
o_id = self.new_object.get("id")
name = self.new_object.get("name")
name = name or self.new_object.get("wireless_profile_name")
- profile_details = self.new_object.get("profileDetails")
- if profile_details and profile_details.get("name"):
- name = name or profile_details.get("name")
if o_id:
prev_obj = self.get_object_by_id(o_id)
id_exists = prev_obj is not None and isinstance(prev_obj, dict)
diff --git a/ansible_collections/cisco/dnac/plugins/action/wireless_psk_override.py b/ansible_collections/cisco/dnac/plugins/action/wireless_psk_override.py
index 6fe372d58..1bf29f6e6 100644
--- a/ansible_collections/cisco/dnac/plugins/action/wireless_psk_override.py
+++ b/ansible_collections/cisco/dnac/plugins/action/wireless_psk_override.py
@@ -25,7 +25,10 @@ from ansible_collections.cisco.dnac.plugins.plugin_utils.dnac import (
argument_spec = dnac_argument_spec()
# Add arguments specific for this module
argument_spec.update(dict(
- payload=dict(type="list"),
+ ssidName=dict(type="str"),
+ site=dict(type="str"),
+ passPhrase=dict(type="str"),
+ wlanProfileName=dict(type="str"),
))
required_if = []
@@ -63,7 +66,10 @@ class ActionModule(ActionBase):
def get_object(self, params):
new_object = dict(
- payload=params.get("payload"),
+ ssidName=params.get("ssidName"),
+ site=params.get("site"),
+ passPhrase=params.get("passPhrase"),
+ wlanProfileName=params.get("wlanProfileName"),
)
return new_object
diff --git a/ansible_collections/cisco/dnac/plugins/action/wireless_sensor_test_results_info.py b/ansible_collections/cisco/dnac/plugins/action/wireless_sensor_test_results_info.py
index 5948fad3c..26ff29cd8 100644
--- a/ansible_collections/cisco/dnac/plugins/action/wireless_sensor_test_results_info.py
+++ b/ansible_collections/cisco/dnac/plugins/action/wireless_sensor_test_results_info.py
@@ -26,8 +26,8 @@ argument_spec = dnac_argument_spec()
# Add arguments specific for this module
argument_spec.update(dict(
siteId=dict(type="str"),
- startTime=dict(type="int"),
- endTime=dict(type="int"),
+ startTime=dict(type="float"),
+ endTime=dict(type="float"),
testFailureBy=dict(type="str"),
headers=dict(type="dict"),
))
diff --git a/ansible_collections/cisco/dnac/plugins/doc_fragments/module.py b/ansible_collections/cisco/dnac/plugins/doc_fragments/module.py
index 6c92fb75f..01286a622 100644
--- a/ansible_collections/cisco/dnac/plugins/doc_fragments/module.py
+++ b/ansible_collections/cisco/dnac/plugins/doc_fragments/module.py
@@ -43,7 +43,7 @@ options:
description:
- Informs the SDK which version of Cisco DNA Center to use.
type: str
- default: 2.3.5.3
+ default: 2.3.7.6
dnac_debug:
description:
- Flag for Cisco DNA Center SDK to enable debugging.
diff --git a/ansible_collections/cisco/dnac/plugins/doc_fragments/module_info.py b/ansible_collections/cisco/dnac/plugins/doc_fragments/module_info.py
index 7ac68f449..507ef0608 100644
--- a/ansible_collections/cisco/dnac/plugins/doc_fragments/module_info.py
+++ b/ansible_collections/cisco/dnac/plugins/doc_fragments/module_info.py
@@ -43,7 +43,7 @@ options:
description:
- Informs the SDK which version of Cisco DNA Center to use.
type: str
- default: 2.3.5.3
+ default: 2.3.7.6
dnac_debug:
description:
- Flag for Cisco DNA Center SDK to enable debugging.
diff --git a/ansible_collections/cisco/dnac/plugins/module_utils/dnac.py b/ansible_collections/cisco/dnac/plugins/module_utils/dnac.py
index 1d4b804cd..d4b470dca 100644
--- a/ansible_collections/cisco/dnac/plugins/module_utils/dnac.py
+++ b/ansible_collections/cisco/dnac/plugins/module_utils/dnac.py
@@ -17,6 +17,7 @@ from ansible.module_utils.common import validation
from abc import ABCMeta, abstractmethod
try:
import logging
+ import ipaddress
except ImportError:
LOGGING_IN_STANDARD = False
else:
@@ -28,6 +29,7 @@ import json
import inspect
import re
import socket
+import time
class DnacBase():
@@ -66,6 +68,7 @@ class DnacBase():
'parsed': self.verify_diff_parsed
}
self.dnac_log = dnac_params.get("dnac_log")
+ self.max_timeout = self.params.get('dnac_api_task_timeout')
if self.dnac_log and not DnacBase.__is_log_init:
self.dnac_log_level = dnac_params.get("dnac_log_level") or 'WARNING'
@@ -255,6 +258,30 @@ class DnacBase():
return re.match(pattern, password) is not None
+ def is_valid_email(self, email):
+ """
+ Validate an email address.
+ Args:
+ self (object): An instance of a class that provides access to Cisco Catalyst Center.
+ email (str): The email address to be validated.
+ Returns:
+ bool: True if the email is valid, False otherwise.
+ Description:
+ This function checks if the provided email address is valid based on the following criteria:
+ - It contains one or more alphanumeric characters or allowed special characters before the '@'.
+ - It contains one or more alphanumeric characters or dashes after the '@' and before the domain.
+ - It contains a period followed by at least two alphabetic characters at the end of the string.
+ The allowed special characters before the '@' are: ._%+-.
+ """
+
+ # Define the regex pattern for a valid email address
+ pattern = r'^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$'
+ # Use re.match to see if the email matches the pattern
+ if re.match(pattern, email):
+ return True
+ else:
+ return False
+
def get_dnac_params(self, params):
"""Store the Cisco Catalyst Center parameters from the playbook"""
@@ -299,14 +326,16 @@ class DnacBase():
return result
- def check_task_response_status(self, response, validation_string, data=False):
+ def check_task_response_status(self, response, validation_string, api_name, data=False):
"""
Get the site id from the site name.
Parameters:
self - The current object details.
response (dict) - API response.
- validation_string (string) - String used to match the progress status.
+ validation_string (str) - String used to match the progress status.
+ api_name (str) - API name.
+ data (bool) - Set to True if the API is returning any information. Else, False.
Returns:
self
@@ -329,7 +358,15 @@ class DnacBase():
return self
task_id = response.get("taskId")
+ start_time = time.time()
while True:
+ end_time = time.time()
+ if (end_time - start_time) >= self.max_timeout:
+ self.log("Max timeout of {0} sec has reached for the execution id '{1}'. "
+ "Exiting the loop due to unexpected API '{2}' status."
+ .format(self.max_timeout, task_id, api_name), "WARNING")
+ break
+
task_details = self.get_task_details(task_id)
self.log('Getting task details from task ID {0}: {1}'.format(task_id, task_details), "DEBUG")
@@ -348,7 +385,7 @@ class DnacBase():
self.status = "success"
break
- self.log("progress set to {0} for taskid: {1}".format(task_details.get('progress'), task_id), "DEBUG")
+ self.log("Progress is {0} for task ID: {1}".format(task_details.get('progress'), task_id), "DEBUG")
return self
@@ -378,12 +415,13 @@ class DnacBase():
self.log("Response for the current execution: {0}".format(response))
return response
- def check_execution_response_status(self, response):
+ def check_execution_response_status(self, response, api_name):
"""
Checks the reponse status provided by API in the Cisco Catalyst Center
Parameters:
response (dict) - API response
+ api_name (str) - API name
Returns:
self
@@ -399,9 +437,17 @@ class DnacBase():
self.status = "failed"
return self
- executionid = response.get("executionId")
+ execution_id = response.get("executionId")
+ start_time = time.time()
while True:
- execution_details = self.get_execution_details(executionid)
+ end_time = time.time()
+ if (end_time - start_time) >= self.max_timeout:
+ self.log("Max timeout of {0} sec has reached for the execution id '{1}'. "
+ "Exiting the loop due to unexpected API '{2}' status."
+ .format(self.max_timeout, execution_id, api_name), "WARNING")
+ break
+
+ execution_details = self.get_execution_details(execution_id)
if execution_details.get("status") == "SUCCESS":
self.result['changed'] = True
self.msg = "Successfully executed"
@@ -510,6 +556,79 @@ class DnacBase():
except socket.error:
return False
+ def check_status_api_events(self, status_execution_id):
+ """
+ Checks the status of API events in Cisco Catalyst Center until completion or timeout.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ status_execution_id (str): The execution ID for the event to check the status.
+ Returns:
+ dict or None: The response from the API once the status is no longer "IN_PROGRESS",
+ or None if the maximum timeout is reached.
+ Description:
+ This method repeatedly checks the status of an API event in Cisco Catalyst Center using the provided
+ execution ID. The status is checked at intervals specified by the 'dnac_task_poll_interval' parameter
+ until the status is no longer "IN_PROGRESS" or the maximum timeout ('dnac_api_task_timeout') is reached.
+ If the status becomes anything other than "IN_PROGRESS" before the timeout, the method returns the
+ response from the API. If the timeout is reached first, the method logs a warning and returns None.
+ """
+
+ events_response = None
+ start_time = time.time()
+
+ while True:
+ end_time = time.time()
+ if (end_time - start_time) >= self.max_timeout:
+ self.log("""Max timeout of {0} sec has reached for the execution id '{1}' for the event and unexpected
+ api status so moving out of the loop.""".format(self.max_timeout, status_execution_id), "WARNING")
+ break
+ # Now we check the status of API Events for configuring destination and notifications
+ response = self.dnac._exec(
+ family="event_management",
+ function='get_status_api_for_events',
+ op_modifies=True,
+ params={"execution_id": status_execution_id}
+ )
+ self.log("Received API response from 'get_status_api_for_events': {0}".format(str(response)), "DEBUG")
+ if response['apiStatus'] != "IN_PROGRESS":
+ events_response = response
+ break
+ time.sleep(self.params.get('dnac_task_poll_interval'))
+
+ return events_response
+
+ def is_valid_server_address(self, server_address):
+ """
+ Validates the server address to check if it's a valid IPv4, IPv6 address, or a valid hostname.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ server_address (str): The server address to validate.
+ Returns:
+ bool: True if the server address is valid, otherwise False.
+ """
+ # Check if the address is a valid IPv4 or IPv6 address
+ try:
+ ipaddress.ip_address(server_address)
+ return True
+ except ValueError:
+ pass
+
+ # Define the regex for a valid hostname
+ hostname_regex = re.compile(
+ r'^(' # Start of the string
+ r'([A-Za-z0-9]+([A-Za-z0-9-]*[A-Za-z0-9])?\.)+[A-Za-z]{2,6}|' # Domain name (e.g., example.com)
+ r'localhost|' # Localhost
+ r'(\d{1,3}\.)+\d{1,3}|' # Custom IPv4-like format (e.g., 2.2.3.31.3.4.4)
+ r'[A-Fa-f0-9:]+$' # IPv6 address (e.g., 2f8:192:3::40:41:41:42)
+ r')$' # End of the string
+ )
+
+ # Check if the address is a valid hostname
+ if hostname_regex.match(server_address):
+ return True
+
+ return False
+
def is_path_exists(self, file_path):
"""
Check if the file path 'file_path' exists or not.
@@ -521,7 +640,11 @@ class DnacBase():
True/False (bool) - True if the file path exists, else False.
"""
- if not os.path.exists(file_path):
+ current_working_directory = os.getcwd()
+ final_file_path = os.path.join(current_working_directory, file_path)
+ self.log(str(final_file_path))
+ if not os.path.exists(final_file_path):
+ self.log("The specified path '{0}' is not valid. Please provide a valid path.".format(final_file_path), "ERROR")
return False
return True
diff --git a/ansible_collections/cisco/dnac/plugins/modules/accesspoint_configuration_details_by_task_id_info.py b/ansible_collections/cisco/dnac/plugins/modules/accesspoint_configuration_details_by_task_id_info.py
index 642034e5c..d990d8610 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/accesspoint_configuration_details_by_task_id_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/accesspoint_configuration_details_by_task_id_info.py
@@ -11,7 +11,7 @@ short_description: Information module for Accesspoint Configuration Details By T
description:
- Get Accesspoint Configuration Details By Task Id by id.
- Users can query the access point configuration result using this intent API.
-version_added: '6.7.0'
+version_added: '3.1.0'
extends_documentation_fragment:
- cisco.dnac.module_info
author: Rafael Campos (@racampos)
@@ -24,8 +24,8 @@ options:
- Task_id path parameter. Task id information of ap config.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Wireless GetAccessPointConfigurationTaskResult
description: Complete reference of the GetAccessPointConfigurationTaskResult API.
@@ -54,7 +54,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/app_policy_default_info.py b/ansible_collections/cisco/dnac/plugins/modules/app_policy_default_info.py
index 921f7a095..39c441037 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/app_policy_default_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/app_policy_default_info.py
@@ -20,8 +20,8 @@ options:
description: Additional headers.
type: dict
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Application Policy GetApplicationPolicyDefault
description: Complete reference of the GetApplicationPolicyDefault API.
@@ -49,7 +49,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/app_policy_info.py b/ansible_collections/cisco/dnac/plugins/modules/app_policy_info.py
index 3479d1ede..79030d9e3 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/app_policy_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/app_policy_info.py
@@ -24,8 +24,8 @@ options:
- PolicyScope query parameter. Policy scope name.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Application Policy GetApplicationPolicy
description: Complete reference of the GetApplicationPolicy API.
@@ -54,7 +54,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/app_policy_intent_create.py b/ansible_collections/cisco/dnac/plugins/modules/app_policy_intent_create.py
index fd487ca0c..f67eeeb07 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/app_policy_intent_create.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/app_policy_intent_create.py
@@ -28,7 +28,8 @@ options:
elements: dict
suboptions:
groupId:
- description: Group id.
+ description: The site(s) ID where the Application QoS Policy will be
+ deployed.
elements: str
type: list
ssid:
@@ -124,7 +125,8 @@ options:
elements: dict
suboptions:
groupId:
- description: Group id.
+ description: The site(s) ID where the Application QoS Policy will be
+ deployed.
elements: str
type: list
id:
@@ -226,8 +228,8 @@ options:
type: dict
type: list
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Application Policy ApplicationPolicyIntent
description: Complete reference of the ApplicationPolicyIntent API.
@@ -314,7 +316,6 @@ EXAMPLES = r"""
- idRef: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/app_policy_queuing_profile.py b/ansible_collections/cisco/dnac/plugins/modules/app_policy_queuing_profile.py
index 9260ffbf5..f8a38f535 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/app_policy_queuing_profile.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/app_policy_queuing_profile.py
@@ -75,7 +75,7 @@ options:
type: str
type: list
type:
- description: Type.
+ description: The allowed clause types are BANDWIDTH, DSCP_CUSTOMIZATION.
type: str
type: list
description:
@@ -89,8 +89,8 @@ options:
type: str
type: list
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Application Policy CreateApplicationPolicyQueuingProfile
description: Complete reference of the CreateApplicationPolicyQueuingProfile API.
@@ -183,7 +183,6 @@ EXAMPLES = r"""
id: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/app_policy_queuing_profile_count_info.py b/ansible_collections/cisco/dnac/plugins/modules/app_policy_queuing_profile_count_info.py
index 2a3b2c4df..5054b84eb 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/app_policy_queuing_profile_count_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/app_policy_queuing_profile_count_info.py
@@ -20,8 +20,8 @@ options:
description: Additional headers.
type: dict
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Application Policy GetApplicationPolicyQueuingProfileCount
description: Complete reference of the GetApplicationPolicyQueuingProfileCount API.
@@ -49,7 +49,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/app_policy_queuing_profile_info.py b/ansible_collections/cisco/dnac/plugins/modules/app_policy_queuing_profile_info.py
index bb89873a6..2f58e0a53 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/app_policy_queuing_profile_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/app_policy_queuing_profile_info.py
@@ -24,8 +24,8 @@ options:
- Name query parameter. Queuing profile name.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Application Policy GetApplicationPolicyQueuingProfile
description: Complete reference of the GetApplicationPolicyQueuingProfile API.
@@ -54,7 +54,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/application_policy_application_set.py b/ansible_collections/cisco/dnac/plugins/modules/application_policy_application_set.py
new file mode 100644
index 000000000..279aff243
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/application_policy_application_set.py
@@ -0,0 +1,117 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: application_policy_application_set
+short_description: Resource module for Application Policy Application Set
+description:
+- Manage operations create and delete of the resource Application Policy Application Set.
+- Create new custom application set/s.
+- Delete existing custom application set by id.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module
+author: Rafael Campos (@racampos)
+options:
+ id:
+ description: Id path parameter. Id of custom application set to delete.
+ type: str
+ payload:
+ description: Application Policy Application Set's payload.
+ elements: dict
+ suboptions:
+ defaultBusinessRelevance:
+ description: Default business relevance.
+ type: str
+ name:
+ description: Application Set name.
+ type: str
+ namespace:
+ description: Namespace, should be set to scalablegroup application.
+ type: str
+ qualifier:
+ description: Qualifier, should be set to application.
+ type: str
+ scalableGroupExternalHandle:
+ description: Scalable group external handle, should be set to application set
+ name.
+ type: str
+ scalableGroupType:
+ description: Scalable group type, should be set to APPLICATION_GROUP.
+ type: str
+ type:
+ description: Type, should be set to scalablegroup.
+ type: str
+ type: list
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for Application Policy CreateApplicationSets
+ description: Complete reference of the CreateApplicationSets API.
+ link: https://developer.cisco.com/docs/dna-center/#!create-application-sets
+- name: Cisco DNA Center documentation for Application Policy DeleteApplicationSet
+ description: Complete reference of the DeleteApplicationSet API.
+ link: https://developer.cisco.com/docs/dna-center/#!delete-application-set
+notes:
+ - SDK Method used are
+ application_policy.ApplicationPolicy.create_application_sets,
+ application_policy.ApplicationPolicy.delete_application_set,
+
+ - Paths used are
+ post /dna/intent/api/v2/application-policy-application-set,
+ delete /dna/intent/api/v2/application-policy-application-set/{id},
+
+"""
+
+EXAMPLES = r"""
+- name: Create
+ cisco.dnac.application_policy_application_set:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ state: present
+ payload:
+ - defaultBusinessRelevance: string
+ name: string
+ namespace: string
+ qualifier: string
+ scalableGroupExternalHandle: string
+ scalableGroupType: string
+ type: string
+
+- name: Delete by id
+ cisco.dnac.application_policy_application_set:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ state: absent
+ id: string
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": {
+ "taskId": "string",
+ "url": "string"
+ },
+ "version": "string"
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/application_policy_application_set_count_info.py b/ansible_collections/cisco/dnac/plugins/modules/application_policy_application_set_count_info.py
new file mode 100644
index 000000000..c3807376a
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/application_policy_application_set_count_info.py
@@ -0,0 +1,67 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: application_policy_application_set_count_info
+short_description: Information module for Application Policy Application Set Count
+description:
+- Get all Application Policy Application Set Count.
+- Get the number of all existing application sets.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module_info
+author: Rafael Campos (@racampos)
+options:
+ headers:
+ description: Additional headers.
+ type: dict
+ scalableGroupType:
+ description:
+ - ScalableGroupType query parameter. Scalable group type to retrieve, valid value APPLICATION_GROUP.
+ type: str
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for Application Policy GetApplicationSetCount
+ description: Complete reference of the GetApplicationSetCount API.
+ link: https://developer.cisco.com/docs/dna-center/#!get-application-set-count
+notes:
+ - SDK Method used are
+ application_policy.ApplicationPolicy.get_application_set_count,
+
+ - Paths used are
+ get /dna/intent/api/v2/application-policy-application-set-count,
+
+"""
+
+EXAMPLES = r"""
+- name: Get all Application Policy Application Set Count
+ cisco.dnac.application_policy_application_set_count_info:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ headers: "{{my_headers | from_json}}"
+ scalableGroupType: string
+ register: result
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": 0,
+ "version": "string"
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/application_policy_application_set_info.py b/ansible_collections/cisco/dnac/plugins/modules/application_policy_application_set_info.py
new file mode 100644
index 000000000..f6c8fa6cb
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/application_policy_application_set_info.py
@@ -0,0 +1,101 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: application_policy_application_set_info
+short_description: Information module for Application Policy Application Set
+description:
+- Get all Application Policy Application Set.
+- Get application set/s by offset/limit or by name.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module_info
+author: Rafael Campos (@racampos)
+options:
+ headers:
+ description: Additional headers.
+ type: dict
+ attributes:
+ description:
+ - Attributes query parameter. Attributes to retrieve, valid value applicationSet.
+ type: str
+ name:
+ description:
+ - Name query parameter. Application set name.
+ type: str
+ offset:
+ description:
+ - Offset query parameter. The starting point or index from where the paginated results should begin.
+ type: float
+ limit:
+ description:
+ - >
+ Limit query parameter. The limit which is the maximum number of items to include in a single page of
+ results, max value 500.
+ type: float
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for Application Policy GetApplicationSets2
+ description: Complete reference of the GetApplicationSets2 API.
+ link: https://developer.cisco.com/docs/dna-center/#!get-application-sets-2
+notes:
+ - SDK Method used are
+ application_policy.ApplicationPolicy.get_application_sets2,
+
+ - Paths used are
+ get /dna/intent/api/v2/application-policy-application-set,
+
+"""
+
+EXAMPLES = r"""
+- name: Get all Application Policy Application Set
+ cisco.dnac.application_policy_application_set_info:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ headers: "{{my_headers | from_json}}"
+ attributes: string
+ name: string
+ offset: 0
+ limit: 0
+ register: result
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": [
+ {
+ "id": "string",
+ "instanceId": 0,
+ "displayName": "string",
+ "instanceVersion": 0,
+ "defaultBusinessRelevance": "string",
+ "identitySource": {
+ "id": "string",
+ "type": "string"
+ },
+ "name": "string",
+ "namespace": "string",
+ "scalableGroupExternalHandle": "string",
+ "scalableGroupType": "string",
+ "type": "string"
+ }
+ ],
+ "version": "string"
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/application_sets.py b/ansible_collections/cisco/dnac/plugins/modules/application_sets.py
index b4c5dd14c..a32e36a04 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/application_sets.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/application_sets.py
@@ -29,19 +29,19 @@ options:
type: str
type: list
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Application Policy CreateApplicationSet
description: Complete reference of the CreateApplicationSet API.
link: https://developer.cisco.com/docs/dna-center/#!create-application-set
-- name: Cisco DNA Center documentation for Application Policy DeleteApplicationSet
- description: Complete reference of the DeleteApplicationSet API.
- link: https://developer.cisco.com/docs/dna-center/#!delete-application-set
+- name: Cisco DNA Center documentation for Application Policy DeleteApplicationSet2
+ description: Complete reference of the DeleteApplicationSet2 API.
+ link: https://developer.cisco.com/docs/dna-center/#!delete-application-set-2
notes:
- SDK Method used are
application_policy.ApplicationPolicy.create_application_set,
- application_policy.ApplicationPolicy.delete_application_set,
+ application_policy.ApplicationPolicy.delete_application_set2,
- Paths used are
post /dna/intent/api/v1/application-policy-application-set,
@@ -76,7 +76,6 @@ EXAMPLES = r"""
- name: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/application_sets_count_info.py b/ansible_collections/cisco/dnac/plugins/modules/application_sets_count_info.py
index bd6336fb7..e6b8c7359 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/application_sets_count_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/application_sets_count_info.py
@@ -20,8 +20,8 @@ options:
description: Additional headers.
type: dict
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Application Policy GetApplicationSetsCount
description: Complete reference of the GetApplicationSetsCount API.
@@ -49,7 +49,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/application_sets_info.py b/ansible_collections/cisco/dnac/plugins/modules/application_sets_info.py
index 168674c87..ba478221d 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/application_sets_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/application_sets_info.py
@@ -22,18 +22,18 @@ options:
offset:
description:
- Offset query parameter.
- type: int
+ type: float
limit:
description:
- Limit query parameter.
- type: int
+ type: float
name:
description:
- Name query parameter.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Application Policy GetApplicationSets
description: Complete reference of the GetApplicationSets API.
@@ -64,7 +64,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/applications.py b/ansible_collections/cisco/dnac/plugins/modules/applications.py
index 43c18a12b..069d9a51f 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/applications.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/applications.py
@@ -32,29 +32,6 @@ options:
description: Id Ref.
type: str
type: dict
- indicativeNetworkIdentity:
- description: Applications's indicativeNetworkIdentity.
- elements: dict
- suboptions:
- displayName:
- description: DisplayName.
- type: str
- id:
- description: Id.
- type: str
- lowerPort:
- description: LowerPort.
- type: int
- ports:
- description: Ports.
- type: str
- protocol:
- description: Protocol.
- type: str
- upperPort:
- description: UpperPort.
- type: int
- type: list
name:
description: Name.
type: str
@@ -133,22 +110,22 @@ options:
type: list
type: list
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Application Policy CreateApplication
description: Complete reference of the CreateApplication API.
link: https://developer.cisco.com/docs/dna-center/#!create-application
-- name: Cisco DNA Center documentation for Application Policy DeleteApplication
- description: Complete reference of the DeleteApplication API.
- link: https://developer.cisco.com/docs/dna-center/#!delete-application
+- name: Cisco DNA Center documentation for Application Policy DeleteApplication2
+ description: Complete reference of the DeleteApplication2 API.
+ link: https://developer.cisco.com/docs/dna-center/#!delete-application-2
- name: Cisco DNA Center documentation for Application Policy EditApplication
description: Complete reference of the EditApplication API.
link: https://developer.cisco.com/docs/dna-center/#!edit-application
notes:
- SDK Method used are
application_policy.ApplicationPolicy.create_application,
- application_policy.ApplicationPolicy.delete_application,
+ application_policy.ApplicationPolicy.delete_application2,
application_policy.ApplicationPolicy.edit_application,
- Paths used are
@@ -172,13 +149,6 @@ EXAMPLES = r"""
payload:
- applicationSet:
idRef: string
- indicativeNetworkIdentity:
- - displayName: string
- id: string
- lowerPort: 0
- ports: string
- protocol: string
- upperPort: 0
name: string
networkApplications:
- appProtocol: string
@@ -258,7 +228,6 @@ EXAMPLES = r"""
id: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -266,7 +235,10 @@ dnac_response:
type: dict
sample: >
{
- "taskId": "string",
- "url": "string"
+ "response": {
+ "taskId": "string",
+ "url": "string"
+ },
+ "version": "string"
}
"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/applications_count_info.py b/ansible_collections/cisco/dnac/plugins/modules/applications_count_info.py
index cf4eb7d8d..db3059772 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/applications_count_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/applications_count_info.py
@@ -20,8 +20,8 @@ options:
description: Additional headers.
type: dict
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Application Policy GetApplicationsCount
description: Complete reference of the GetApplicationsCount API.
@@ -49,7 +49,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -57,7 +56,7 @@ dnac_response:
type: dict
sample: >
{
- "response": 0,
+ "response": "string",
"version": "string"
}
"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/applications_count_v2_info.py b/ansible_collections/cisco/dnac/plugins/modules/applications_count_v2_info.py
new file mode 100644
index 000000000..7becc30ff
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/applications_count_v2_info.py
@@ -0,0 +1,67 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: applications_count_v2_info
+short_description: Information module for Applications Count V2
+description:
+- Get all Applications Count V2.
+- Get the number of all existing applications.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module_info
+author: Rafael Campos (@racampos)
+options:
+ headers:
+ description: Additional headers.
+ type: dict
+ scalableGroupType:
+ description:
+ - ScalableGroupType query parameter. Scalable group type to retrieve, valid value APPLICATION.
+ type: str
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for Application Policy GetApplicationCount
+ description: Complete reference of the GetApplicationCount API.
+ link: https://developer.cisco.com/docs/dna-center/#!get-application-count
+notes:
+ - SDK Method used are
+ application_policy.ApplicationPolicy.get_application_count,
+
+ - Paths used are
+ get /dna/intent/api/v2/applications-count,
+
+"""
+
+EXAMPLES = r"""
+- name: Get all Applications Count V2
+ cisco.dnac.applications_count_v2_info:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ headers: "{{my_headers | from_json}}"
+ scalableGroupType: string
+ register: result
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": 0,
+ "version": "string"
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/applications_health_info.py b/ansible_collections/cisco/dnac/plugins/modules/applications_health_info.py
index 883fd5398..3080fcb55 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/applications_health_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/applications_health_info.py
@@ -36,11 +36,11 @@ options:
startTime:
description:
- StartTime query parameter. Starting epoch time in milliseconds of time window.
- type: int
+ type: float
endTime:
description:
- EndTime query parameter. Ending epoch time in milliseconds of time window.
- type: int
+ type: float
applicationHealth:
description:
- >
@@ -52,20 +52,20 @@ options:
- >
Offset query parameter. The offset of the first application in the returned data (optionally used with
siteId only).
- type: int
+ type: float
limit:
description:
- >
Limit query parameter. The max number of application entries in returned data 1, 1000 (optionally used with
siteId only).
- type: int
+ type: float
applicationName:
description:
- ApplicationName query parameter. The name of the application to get information on.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Applications Applications
description: Complete reference of the Applications API.
@@ -102,7 +102,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/applications_info.py b/ansible_collections/cisco/dnac/plugins/modules/applications_info.py
index 6570ca75d..e255fd436 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/applications_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/applications_info.py
@@ -22,25 +22,25 @@ options:
offset:
description:
- Offset query parameter. The offset of the first application to be returned.
- type: int
+ type: float
limit:
description:
- Limit query parameter. The maximum number of applications to be returned.
- type: int
+ type: float
name:
description:
- Name query parameter. Application's name.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
-- name: Cisco DNA Center documentation for Application Policy GetApplications
- description: Complete reference of the GetApplications API.
- link: https://developer.cisco.com/docs/dna-center/#!get-applications
+- name: Cisco DNA Center documentation for Application Policy GetApplications2
+ description: Complete reference of the GetApplications2 API.
+ link: https://developer.cisco.com/docs/dna-center/#!get-applications-2
notes:
- SDK Method used are
- application_policy.ApplicationPolicy.get_applications,
+ application_policy.ApplicationPolicy.get_applications2,
- Paths used are
get /dna/intent/api/v1/applications,
@@ -64,7 +64,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -76,16 +75,6 @@ dnac_response:
{
"id": "string",
"name": "string",
- "indicativeNetworkIdentity": [
- {
- "id": "string",
- "displayName": "string",
- "lowerPort": 0,
- "ports": "string",
- "protocol": "string",
- "upperPort": 0
- }
- ],
"networkApplications": [
{
"id": "string",
@@ -98,8 +87,8 @@ dnac_response:
"helpString": "string",
"longDescription": "string",
"name": "string",
- "popularity": 0,
- "rank": 0,
+ "popularity": "string",
+ "rank": "string",
"trafficClass": "string",
"serverName": "string",
"url": "string",
@@ -111,10 +100,10 @@ dnac_response:
{
"id": "string",
"displayName": "string",
- "lowerPort": 0,
+ "lowerPort": "string",
"ports": "string",
"protocol": "string",
- "upperPort": 0
+ "upperPort": "string"
}
],
"applicationSet": {
diff --git a/ansible_collections/cisco/dnac/plugins/modules/applications_v2.py b/ansible_collections/cisco/dnac/plugins/modules/applications_v2.py
new file mode 100644
index 000000000..5c5d1c2b1
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/applications_v2.py
@@ -0,0 +1,342 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: applications_v2
+short_description: Resource module for Applications V2
+description:
+- Manage operations create, update and delete of the resource Applications V2.
+- Create new custom application/s.
+- Delete existing custom application by id.
+- Edit the attributes of an existing application.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module
+author: Rafael Campos (@racampos)
+options:
+ id:
+ description: Id path parameter. Id of custom application to delete.
+ type: str
+ payload:
+ description: Applications V2's payload.
+ elements: dict
+ suboptions:
+ displayName:
+ description: Display name.
+ type: str
+ id:
+ description: Application id.
+ type: str
+ indicativeNetworkIdentity:
+ description: Applications V2's indicativeNetworkIdentity.
+ elements: dict
+ suboptions:
+ displayName:
+ description: Display name.
+ type: str
+ id:
+ description: Id.
+ type: str
+ lowerPort:
+ description: Lower port.
+ type: float
+ ports:
+ description: Ports.
+ type: str
+ protocol:
+ description: Protocol.
+ type: str
+ upperPort:
+ description: Upper port.
+ type: float
+ type: list
+ instanceId:
+ description: Instance id.
+ type: int
+ instanceVersion:
+ description: Instance version.
+ type: float
+ name:
+ description: Application name.
+ type: str
+ namespace:
+ description: Namespace.
+ type: str
+ networkApplications:
+ description: Applications V2's networkApplications.
+ elements: dict
+ suboptions:
+ appProtocol:
+ description: App protocol.
+ type: str
+ applicationSubType:
+ description: Application sub type, LEARNED discovered application, NONE
+ nbar and custom application.
+ type: str
+ applicationType:
+ description: Application type, DEFAULT nbar application, DEFAULT_MODIFIED
+ nbar modified application, CUSTOM custom application.
+ type: str
+ categoryId:
+ description: Category id.
+ type: str
+ displayName:
+ description: Display name.
+ type: str
+ dscp:
+ description: Dscp.
+ type: str
+ engineId:
+ description: Engine id.
+ type: str
+ helpString:
+ description: Help string.
+ type: str
+ id:
+ description: Id.
+ type: str
+ ignoreConflict:
+ description: Ignore conflict, true or false.
+ type: bool
+ longDescription:
+ description: Long description.
+ type: str
+ name:
+ description: Application name.
+ type: str
+ popularity:
+ description: Popularity.
+ type: float
+ rank:
+ description: Rank, any value between 1 to 65535.
+ type: int
+ selectorId:
+ description: Selector id.
+ type: str
+ serverName:
+ description: Server name.
+ type: str
+ trafficClass:
+ description: Traffic class.
+ type: str
+ url:
+ description: Url.
+ type: str
+ type: list
+ networkIdentity:
+ description: Applications V2's networkIdentity.
+ elements: dict
+ suboptions:
+ displayName:
+ description: Display name.
+ type: str
+ id:
+ description: Id.
+ type: str
+ ipv4Subnet:
+ description: Ipv4 subnet.
+ elements: str
+ type: list
+ ipv6Subnet:
+ description: Ipv6 subnet.
+ elements: dict
+ type: list
+ lowerPort:
+ description: Lower port.
+ type: float
+ ports:
+ description: Ports.
+ type: str
+ protocol:
+ description: Protocol.
+ type: str
+ upperPort:
+ description: Upper port.
+ type: float
+ type: list
+ parentScalableGroup:
+ description: Applications V2's parentScalableGroup.
+ suboptions:
+ idRef:
+ description: Id reference to parent application set.
+ type: str
+ type: dict
+ qualifier:
+ description: Qualifier, valid value application.
+ type: str
+ scalableGroupExternalHandle:
+ description: Scalable group external handle, should be equal to Application
+ name.
+ type: str
+ scalableGroupType:
+ description: Scalable group type, valid value APPLICATION.
+ type: str
+ type:
+ description: Type, valid value scalablegroup.
+ type: str
+ type: list
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for Application Policy CreateApplications
+ description: Complete reference of the CreateApplications API.
+ link: https://developer.cisco.com/docs/dna-center/#!create-applications
+- name: Cisco DNA Center documentation for Application Policy DeleteApplication
+ description: Complete reference of the DeleteApplication API.
+ link: https://developer.cisco.com/docs/dna-center/#!delete-application
+- name: Cisco DNA Center documentation for Application Policy EditApplications
+ description: Complete reference of the EditApplications API.
+ link: https://developer.cisco.com/docs/dna-center/#!edit-applications
+notes:
+ - SDK Method used are
+ application_policy.ApplicationPolicy.create_applications,
+ application_policy.ApplicationPolicy.delete_application,
+ application_policy.ApplicationPolicy.edit_applications,
+
+ - Paths used are
+ post /dna/intent/api/v2/applications,
+ delete /dna/intent/api/v2/applications/{id},
+ put /dna/intent/api/v2/applications,
+
+"""
+
+EXAMPLES = r"""
+- name: Update all
+ cisco.dnac.applications_v2:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ state: present
+ payload:
+ - displayName: string
+ id: string
+ indicativeNetworkIdentity:
+ - displayName: string
+ id: string
+ lowerPort: 0
+ ports: string
+ protocol: string
+ upperPort: 0
+ instanceId: 0
+ instanceVersion: 0
+ name: string
+ namespace: string
+ networkApplications:
+ - appProtocol: string
+ applicationSubType: string
+ applicationType: string
+ categoryId: string
+ displayName: string
+ dscp: string
+ engineId: string
+ helpString: string
+ id: string
+ ignoreConflict: true
+ longDescription: string
+ name: string
+ popularity: 0
+ rank: 0
+ selectorId: string
+ serverName: string
+ trafficClass: string
+ url: string
+ networkIdentity:
+ - displayName: string
+ id: string
+ ipv4Subnet:
+ - string
+ ipv6Subnet:
+ - {}
+ lowerPort: 0
+ ports: string
+ protocol: string
+ upperPort: 0
+ parentScalableGroup:
+ idRef: string
+ qualifier: string
+ scalableGroupExternalHandle: string
+ scalableGroupType: string
+ type: string
+
+- name: Create
+ cisco.dnac.applications_v2:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ state: present
+ payload:
+ - indicativeNetworkIdentity:
+ - ipv4Subnet:
+ - string
+ ipv6Subnet:
+ - string
+ lowerPort: 0
+ ports: string
+ protocol: string
+ upperPort: 0
+ name: string
+ networkApplications:
+ - appProtocol: string
+ applicationType: string
+ categoryId: string
+ dscp: string
+ engineId: 0
+ helpString: string
+ ignoreConflict: true
+ rank: 0
+ serverName: string
+ trafficClass: string
+ type: string
+ url: string
+ networkIdentity:
+ - ipv4Subnet:
+ - string
+ lowerPort: 0
+ ports: string
+ protocol: string
+ upperPort: 0
+ parentScalableGroup:
+ idRef: string
+ scalableGroupType: string
+ type: string
+
+- name: Delete by id
+ cisco.dnac.applications_v2:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ state: absent
+ id: string
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": {
+ "taskId": "string",
+ "url": "string"
+ },
+ "version": "string"
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/applications_v2_info.py b/ansible_collections/cisco/dnac/plugins/modules/applications_v2_info.py
new file mode 100644
index 000000000..7e08d4d70
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/applications_v2_info.py
@@ -0,0 +1,152 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: applications_v2_info
+short_description: Information module for Applications V2
+description:
+- Get all Applications V2.
+- Get application/s by offset/limit or by name.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module_info
+author: Rafael Campos (@racampos)
+options:
+ headers:
+ description: Additional headers.
+ type: dict
+ attributes:
+ description:
+ - Attributes query parameter. Attributes to retrieve, valid value application.
+ type: str
+ name:
+ description:
+ - Name query parameter. The application name.
+ type: str
+ offset:
+ description:
+ - Offset query parameter. The starting point or index from where the paginated results should begin.
+ type: float
+ limit:
+ description:
+ - >
+ Limit query parameter. The limit which is the maximum number of items to include in a single page of
+ results, max value 500.
+ type: float
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for Application Policy GetApplications
+ description: Complete reference of the GetApplications API.
+ link: https://developer.cisco.com/docs/dna-center/#!get-applications
+notes:
+ - SDK Method used are
+ application_policy.ApplicationPolicy.get_applications,
+
+ - Paths used are
+ get /dna/intent/api/v2/applications,
+
+"""
+
+EXAMPLES = r"""
+- name: Get all Applications V2
+ cisco.dnac.applications_v2_info:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ headers: "{{my_headers | from_json}}"
+ attributes: string
+ name: string
+ offset: 0
+ limit: 0
+ register: result
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": [
+ {
+ "id": "string",
+ "instanceId": 0,
+ "displayName": "string",
+ "instanceVersion": 0,
+ "identitySource": {
+ "id": "string",
+ "type": "string"
+ },
+ "indicativeNetworkIdentity": [
+ {
+ "id": "string",
+ "displayName": "string",
+ "lowerPort": 0,
+ "ports": "string",
+ "protocol": "string",
+ "upperPort": 0
+ }
+ ],
+ "name": "string",
+ "namespace": "string",
+ "networkApplications": [
+ {
+ "id": "string",
+ "appProtocol": "string",
+ "applicationSubType": "string",
+ "applicationType": "string",
+ "categoryId": "string",
+ "displayName": "string",
+ "dscp": "string",
+ "engineId": "string",
+ "helpString": "string",
+ "longDescription": "string",
+ "name": "string",
+ "popularity": 0,
+ "rank": 0,
+ "selectorId": "string",
+ "serverName": "string",
+ "url": "string",
+ "trafficClass": "string"
+ }
+ ],
+ "networkIdentity": [
+ {
+ "id": "string",
+ "displayName": "string",
+ "ipv4Subnet": [
+ "string"
+ ],
+ "ipv6Subnet": [
+ {}
+ ],
+ "lowerPort": 0,
+ "ports": "string",
+ "protocol": "string",
+ "upperPort": 0
+ }
+ ],
+ "parentScalableGroup": {
+ "id": "string",
+ "idRef": "string"
+ },
+ "qualifier": "string",
+ "scalableGroupExternalHandle": "string",
+ "scalableGroupType": "string",
+ "type": "string"
+ }
+ ],
+ "version": "string"
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/assign_device_to_site.py b/ansible_collections/cisco/dnac/plugins/modules/assign_device_to_site.py
index c1da97fe9..9d3e5ad57 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/assign_device_to_site.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/assign_device_to_site.py
@@ -11,7 +11,7 @@ short_description: Resource module for Assign Device To Site
description:
- Manage operation create of the resource Assign Device To Site.
- Assigns unassigned devices to a site. This API does not move assigned devices to other sites.
-version_added: '6.5.0'
+version_added: '6.0.0'
extends_documentation_fragment:
- cisco.dnac.module
author: Rafael Campos (@racampos)
@@ -21,25 +21,26 @@ options:
elements: dict
suboptions:
ip:
- description: Device ip (eg 10.104.240.64).
+ description: Device IP. It can be either IPv4 or IPv6. IPV4 e.g., 10.104.240.64.
+ IPV6 e.g., 2001 420 284 2004 4 181 500 183.
type: str
type: list
headers:
description: Additional headers.
type: dict
siteId:
- description: SiteId path parameter. Site id to which site the device to assign.
+ description: SiteId path parameter. Site Id where device(s) needs to be assigned.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
-- name: Cisco DNA Center documentation for AssignDevicesToSite
+- name: Cisco DNA Center documentation for Sites AssignDevicesToSite
description: Complete reference of the AssignDevicesToSite API.
link: https://developer.cisco.com/docs/dna-center/#!assign-devices-to-site
notes:
- SDK Method used are
- ..assign_devices_to_site,
+ sites.Sites.assign_devices_to_site,
- Paths used are
post /dna/intent/api/v1/assign-device-to-site/{siteId}/device,
@@ -62,7 +63,6 @@ EXAMPLES = r"""
siteId: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/associate_site_to_network_profile.py b/ansible_collections/cisco/dnac/plugins/modules/associate_site_to_network_profile.py
index b01851556..57beda1c5 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/associate_site_to_network_profile.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/associate_site_to_network_profile.py
@@ -23,8 +23,8 @@ options:
description: SiteId path parameter. Site Id to be associated.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Site Design Associate
description: Complete reference of the Associate API.
@@ -52,7 +52,6 @@ EXAMPLES = r"""
siteId: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/auth_token_create.py b/ansible_collections/cisco/dnac/plugins/modules/auth_token_create.py
new file mode 100644
index 000000000..0e4af137a
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/auth_token_create.py
@@ -0,0 +1,58 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: auth_token_create
+short_description: Resource module for Auth Token Create
+description:
+- Manage operation create of the resource Auth Token Create.
+- >
+ API to obtain an access token, which remains valid for 1 hour. The token obtained using this API is required to be
+ set as value to the X-Auth-Token HTTP Header for all API calls to Cisco DNA Center.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module
+author: Rafael Campos (@racampos)
+options: {}
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for Authentication AuthenticationAPI
+ description: Complete reference of the AuthenticationAPI API.
+ link: https://developer.cisco.com/docs/dna-center/#!authentication-api
+notes:
+ - SDK Method used are
+ authentication.Authentication.authentication_api,
+
+ - Paths used are
+ post /dna/system/api/v1/auth/token,
+
+"""
+
+EXAMPLES = r"""
+- name: Create
+ cisco.dnac.auth_token_create:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "Token": "string"
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/authentication_import_certificate.py b/ansible_collections/cisco/dnac/plugins/modules/authentication_import_certificate.py
index 15cb6d2b6..7f0cdeb47 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/authentication_import_certificate.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/authentication_import_certificate.py
@@ -30,8 +30,8 @@ options:
description: PkPassword query parameter. Private Key Passsword.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Authentication Management ImportCertificate
description: Complete reference of the ImportCertificate API.
@@ -61,7 +61,6 @@ EXAMPLES = r"""
pkPassword: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/authentication_import_certificate_p12.py b/ansible_collections/cisco/dnac/plugins/modules/authentication_import_certificate_p12.py
index 23d98d277..2ca313724 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/authentication_import_certificate_p12.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/authentication_import_certificate_p12.py
@@ -30,8 +30,8 @@ options:
description: PkPassword query parameter. Private Key Passsword.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Authentication Management ImportCertificateP12
description: Complete reference of the ImportCertificateP12 API.
@@ -61,7 +61,6 @@ EXAMPLES = r"""
pkPassword: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/authentication_policy_servers.py b/ansible_collections/cisco/dnac/plugins/modules/authentication_policy_servers.py
new file mode 100644
index 000000000..10be5c25a
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/authentication_policy_servers.py
@@ -0,0 +1,255 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: authentication_policy_servers
+short_description: Resource module for Authentication Policy Servers
+description:
+- Manage operations create, update and delete of the resource Authentication Policy Servers.
+- >
+ API to add AAA/ISE server access configuration. Protocol can be configured as either RADIUS OR TACACS OR
+ RADIUS_TACACS. If configuring Cisco ISE server, after configuration, use 'Cisco ISE Server Integration Status'
+ Intent API to check the integration status. Based on integration status, if require use 'Accept Cisco ISE Server
+ Certificate for Cisco ISE Server Integration' Intent API to accept the Cisco ISE certificate for Cisco ISE server
+ integration, then use again 'Cisco ISE Server Integration Status' Intent API to check the integration status.
+- API to delete AAA/ISE server access configuration.
+- >
+ API to edit AAA/ISE server access configuration. After edit, use 'Cisco ISE Server Integration Status' Intent API
+ to check the integration status.
+version_added: '3.1.0'
+extends_documentation_fragment:
+ - cisco.dnac.module
+author: Rafael Campos (@racampos)
+options:
+ accountingPort:
+ description: Accounting port of RADIUS server (readonly). The range is from 1 to
+ 65535. E.g. 1813.
+ type: int
+ authenticationPort:
+ description: Authentication port of RADIUS server (readonly). The range is from
+ 1 to 65535. E.g. 1812.
+ type: int
+ ciscoIseDtos:
+ description: Authentication Policy Servers's ciscoIseDtos.
+ elements: dict
+ suboptions:
+ description:
+ description: Description about the Cisco ISE server.
+ type: str
+ fqdn:
+ description: Fully-qualified domain name of the Cisco ISE server (readonly).
+ E.g. Xi-62.my.com.
+ type: str
+ ipAddress:
+ description: IP Address of the Cisco ISE Server (readonly).
+ type: str
+ password:
+ description: Password of the Cisco ISE server.
+ type: str
+ sshkey:
+ description: SSH key of the Cisco ISE server.
+ type: str
+ subscriberName:
+ description: Subscriber name of the Cisco ISE server (readonly). E.g. Pxgrid_client_1662589467.
+ type: str
+ userName:
+ description: User name of the Cisco ISE server.
+ type: str
+ type: list
+ encryptionKey:
+ description: Encryption key used to encrypt shared secret (readonly).
+ type: str
+ encryptionScheme:
+ description: Type of encryption scheme for additional security (readonly).
+ type: str
+ externalCiscoIseIpAddrDtos:
+ description: Authentication Policy Servers's externalCiscoIseIpAddrDtos.
+ elements: dict
+ suboptions:
+ externalCiscoIseIpAddresses:
+ description: Authentication Policy Servers's externalCiscoIseIpAddresses.
+ elements: dict
+ suboptions:
+ externalIpAddress:
+ description: External IP Address.
+ type: str
+ type: list
+ type:
+ description: Type.
+ type: str
+ type: list
+ id:
+ description: Id path parameter. Authentication and Policy Server Identifier. Use
+ 'Get Authentication and Policy Servers' intent API to find the identifier.
+ type: str
+ ipAddress:
+ description: IP address of authentication and policy server (readonly).
+ type: str
+ isIseEnabled:
+ description: Value true for Cisco ISE Server (readonly). Default value is false.
+ type: bool
+ messageKey:
+ description: Message key used to encrypt shared secret (readonly).
+ type: str
+ port:
+ description: Port of TACACS server (readonly). The range is from 1 to 65535.
+ type: int
+ protocol:
+ description: Type of protocol for authentication and policy server. If already saved
+ with RADIUS, can update to RADIUS_TACACS. If already saved with TACACS, can update
+ to RADIUS_TACACS.
+ type: str
+ pxgridEnabled:
+ description: Value true for enable, false for disable. Default value is true.
+ type: bool
+ retries:
+ description: Number of communication retries between devices and authentication
+ and policy server. The range is from 1 to 3.
+ type: str
+ role:
+ description: Role of authentication and policy server (readonly). E.g. Primary,
+ secondary.
+ type: str
+ sharedSecret:
+ description: Shared secret between devices and authentication and policy server
+ (readonly).
+ type: str
+ timeoutSeconds:
+ description: Number of seconds before timing out between devices and authentication
+ and policy server. The range is from 2 to 20.
+ type: str
+ useDnacCertForPxgrid:
+ description: Value true to use DNAC certificate for Pxgrid. Default value is false.
+ type: bool
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for System Settings AddAuthenticationAndPolicyServerAccessConfiguration
+ description: Complete reference of the AddAuthenticationAndPolicyServerAccessConfiguration API.
+ link: https://developer.cisco.com/docs/dna-center/#!add-authentication-and-policy-server-access-configuration
+- name: Cisco DNA Center documentation for System Settings DeleteAuthenticationAndPolicyServerAccessConfiguration
+ description: Complete reference of the DeleteAuthenticationAndPolicyServerAccessConfiguration API.
+ link: https://developer.cisco.com/docs/dna-center/#!delete-authentication-and-policy-server-access-configuration
+- name: Cisco DNA Center documentation for System Settings EditAuthenticationAndPolicyServerAccessConfiguration
+ description: Complete reference of the EditAuthenticationAndPolicyServerAccessConfiguration API.
+ link: https://developer.cisco.com/docs/dna-center/#!edit-authentication-and-policy-server-access-configuration
+notes:
+ - SDK Method used are
+ system_settings.SystemSettings.add_authentication_and_policy_server_access_configuration,
+ system_settings.SystemSettings.delete_authentication_and_policy_server_access_configuration,
+ system_settings.SystemSettings.edit_authentication_and_policy_server_access_configuration,
+
+ - Paths used are
+ post /dna/intent/api/v1/authentication-policy-servers,
+ delete /dna/intent/api/v1/authentication-policy-servers/{id},
+ put /dna/intent/api/v1/authentication-policy-servers/{id},
+
+"""
+
+EXAMPLES = r"""
+- name: Create
+ cisco.dnac.authentication_policy_servers:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ state: present
+ accountingPort: 0
+ authenticationPort: 0
+ ciscoIseDtos:
+ - description: string
+ fqdn: string
+ ipAddress: string
+ password: string
+ sshkey: string
+ subscriberName: string
+ userName: string
+ encryptionKey: string
+ encryptionScheme: string
+ externalCiscoIseIpAddrDtos:
+ - externalCiscoIseIpAddresses:
+ - externalIpAddress: string
+ type: string
+ ipAddress: string
+ isIseEnabled: true
+ messageKey: string
+ port: 0
+ protocol: string
+ pxgridEnabled: true
+ retries: string
+ role: string
+ sharedSecret: string
+ timeoutSeconds: string
+ useDnacCertForPxgrid: true
+
+- name: Delete by id
+ cisco.dnac.authentication_policy_servers:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ state: absent
+ id: string
+
+- name: Update by id
+ cisco.dnac.authentication_policy_servers:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ state: present
+ accountingPort: 0
+ authenticationPort: 0
+ ciscoIseDtos:
+ - description: string
+ fqdn: string
+ ipAddress: string
+ password: string
+ sshkey: string
+ subscriberName: string
+ userName: string
+ encryptionKey: string
+ encryptionScheme: string
+ externalCiscoIseIpAddrDtos:
+ - externalCiscoIseIpAddresses:
+ - externalIpAddress: string
+ type: string
+ id: string
+ ipAddress: string
+ isIseEnabled: true
+ messageKey: string
+ port: 0
+ protocol: string
+ pxgridEnabled: true
+ retries: string
+ role: string
+ sharedSecret: string
+ timeoutSeconds: string
+ useDnacCertForPxgrid: true
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "taskId": "string",
+ "url": "string"
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/authentication_policy_servers_info.py b/ansible_collections/cisco/dnac/plugins/modules/authentication_policy_servers_info.py
index d0366ecc2..ced3c63f0 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/authentication_policy_servers_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/authentication_policy_servers_info.py
@@ -11,7 +11,7 @@ short_description: Information module for Authentication Policy Servers
description:
- Get all Authentication Policy Servers.
- API to get Authentication and Policy Servers.
-version_added: '6.7.0'
+version_added: '3.1.0'
extends_documentation_fragment:
- cisco.dnac.module_info
author: Rafael Campos (@racampos)
@@ -25,15 +25,15 @@ options:
type: bool
state_:
description:
- - State query parameter. Valid values are INPROGRESS, ACTIVE, DELETED, RBAC-FAILURE, FAILED.
+ - State query parameter. Valid values are ACTIVE, INACTIVE, RBAC_SUCCESS, RBAC_FAILURE, DELETED, FAILED, INPROGRESS.
type: str
role:
description:
- Role query parameter. Authentication and Policy Server Role (Example primary, secondary).
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for System Settings GetAuthenticationAndPolicyServers
description: Complete reference of the GetAuthenticationAndPolicyServers API.
@@ -64,7 +64,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -79,8 +78,8 @@ dnac_response:
"protocol": "string",
"role": "string",
"port": 0,
- "authenticationPort": "string",
- "accountingPort": "string",
+ "authenticationPort": 0,
+ "accountingPort": 0,
"retries": 0,
"timeoutSeconds": 0,
"isIseEnabled": true,
@@ -115,7 +114,9 @@ dnac_response:
"encryptionKey": "string",
"useDnacCertForPxgrid": true,
"iseEnabled": true,
- "pxgridEnabled": true
+ "pxgridEnabled": true,
+ "rbacUuid": "string",
+ "multiDnacEnabled": true
}
]
"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/buildings_planned_access_points_info.py b/ansible_collections/cisco/dnac/plugins/modules/buildings_planned_access_points_info.py
index 20aaae21a..8e042a25e 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/buildings_planned_access_points_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/buildings_planned_access_points_info.py
@@ -26,18 +26,18 @@ options:
limit:
description:
- Limit query parameter.
- type: int
+ type: float
offset:
description:
- Offset query parameter.
- type: int
+ type: float
radios:
description:
- Radios query parameter. Inlcude planned radio details.
type: bool
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Devices GetPlannedAccessPointsForBuilding
description: Complete reference of the GetPlannedAccessPointsForBuilding API.
@@ -69,7 +69,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/business_sda_hostonboarding_ssid_ippool.py b/ansible_collections/cisco/dnac/plugins/modules/business_sda_hostonboarding_ssid_ippool.py
index 53a47bdc9..962bbf83d 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/business_sda_hostonboarding_ssid_ippool.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/business_sda_hostonboarding_ssid_ippool.py
@@ -10,16 +10,17 @@ module: business_sda_hostonboarding_ssid_ippool
short_description: Resource module for Business Sda Hostonboarding Ssid Ippool
description:
- Manage operations create and update of the resource Business Sda Hostonboarding Ssid Ippool.
-- Add SSID to IP Pool Mapping.
-- Update SSID to IP Pool Mapping.
+- >
+ Update SSID mapping to a VLAN. The request does not need to include all the SSIDs currently mapped to a VLAN; it
+ can include only the SSIDs that require update. Note ECA is not supported.
+- >
+ Update SSID mapping to a VLAN. The request does not need to include all the SSIDs currently mapped to a VLAN; it
+ can include only the SSIDs that require update. Note ECA is not supported.
version_added: '4.0.0'
extends_documentation_fragment:
- cisco.dnac.module
author: Rafael Campos (@racampos)
options:
- headers:
- description: Additional headers.
- type: dict
scalableGroupName:
description: Scalable Group Name.
type: str
@@ -34,8 +35,8 @@ options:
description: VLAN Name.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Fabric Wireless AddSSIDToIPPoolMapping
description: Complete reference of the AddSSIDToIPPoolMapping API.
@@ -65,7 +66,6 @@ EXAMPLES = r"""
dnac_version: "{{dnac_version}}"
dnac_debug: "{{dnac_debug}}"
state: present
- headers: '{{my_headers | from_json}}'
scalableGroupName: string
siteNameHierarchy: string
ssidNames:
@@ -89,18 +89,15 @@ EXAMPLES = r"""
vlanName: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
returned: always
- type: list
+ type: dict
sample: >
- [
- {
- "executionId": "string",
- "executionStatusURL": "string",
- "message": "string"
- }
- ]
+ {
+ "executionId": "string",
+ "executionStatusUrl": "string",
+ "message": "string"
+ }
"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/business_sda_hostonboarding_ssid_ippool_info.py b/ansible_collections/cisco/dnac/plugins/modules/business_sda_hostonboarding_ssid_ippool_info.py
index 7de5bd47f..6522ef615 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/business_sda_hostonboarding_ssid_ippool_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/business_sda_hostonboarding_ssid_ippool_info.py
@@ -28,8 +28,8 @@ options:
- SiteNameHierarchy query parameter. Site Name Heirarchy.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Fabric Wireless GetSSIDToIPPoolMapping
description: Complete reference of the GetSSIDToIPPoolMapping API.
@@ -59,7 +59,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/business_sda_virtual_network_summary_info.py b/ansible_collections/cisco/dnac/plugins/modules/business_sda_virtual_network_summary_info.py
index beb749da6..cd1738536 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/business_sda_virtual_network_summary_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/business_sda_virtual_network_summary_info.py
@@ -11,7 +11,7 @@ short_description: Information module for Business Sda Virtual Network Summary
description:
- Get all Business Sda Virtual Network Summary.
- Get Virtual Network Summary.
-version_added: '6.5.0'
+version_added: '6.0.0'
extends_documentation_fragment:
- cisco.dnac.module_info
author: Rafael Campos (@racampos)
@@ -24,15 +24,15 @@ options:
- SiteNameHierarchy query parameter. Complete fabric siteNameHierarchy Path.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
-- name: Cisco DNA Center documentation for GetVirtualNetworkSummary
+- name: Cisco DNA Center documentation for SDA GetVirtualNetworkSummary
description: Complete reference of the GetVirtualNetworkSummary API.
link: https://developer.cisco.com/docs/dna-center/#!get-virtual-network-summary
notes:
- SDK Method used are
- ..get_virtual_network_summary,
+ sda.Sda.get_virtual_network_summary,
- Paths used are
get /dna/intent/api/v1/business/sda/virtual-network/summary,
@@ -54,7 +54,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -65,11 +64,16 @@ dnac_response:
"virtualNetworkCount": 0,
"virtualNetworkSummary": [
{
+ "virtualNetworkContextId": "string",
+ "virtualNetworkId": "string",
"siteNameHierarchy": "string",
- "virtualNetworkName": "string"
+ "virtualNetworkName": "string",
+ "layer3Instance": 0,
+ "virtualNetworkStatus": "string"
}
],
"status": "string",
- "description": "string"
+ "description": "string",
+ "executionId": "string"
}
"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/business_sda_wireless_controller_create.py b/ansible_collections/cisco/dnac/plugins/modules/business_sda_wireless_controller_create.py
index 9b1619f90..a295936c6 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/business_sda_wireless_controller_create.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/business_sda_wireless_controller_create.py
@@ -17,14 +17,14 @@ extends_documentation_fragment:
author: Rafael Campos (@racampos)
options:
deviceName:
- description: EWLC Device Name.
+ description: WLC Device Name.
type: str
siteNameHierarchy:
- description: Site Name Hierarchy.
+ description: Fabric Site Name Hierarchy.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Fabric Wireless AddWLCToFabricDomain
description: Complete reference of the AddWLCToFabricDomain API.
@@ -53,7 +53,6 @@ EXAMPLES = r"""
siteNameHierarchy: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -62,7 +61,7 @@ dnac_response:
sample: >
{
"executionId": "string",
- "executionStatusURL": "string",
+ "executionStatusUrl": "string",
"message": "string"
}
"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/business_sda_wireless_controller_delete.py b/ansible_collections/cisco/dnac/plugins/modules/business_sda_wireless_controller_delete.py
index 3231a6e23..8ddf96ec7 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/business_sda_wireless_controller_delete.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/business_sda_wireless_controller_delete.py
@@ -23,8 +23,8 @@ options:
description: Additional headers.
type: dict
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Fabric Wireless RemoveWLCFromFabricDomain
description: Complete reference of the RemoveWLCFromFabricDomain API.
diff --git a/ansible_collections/cisco/dnac/plugins/modules/cli_credential.py b/ansible_collections/cisco/dnac/plugins/modules/cli_credential.py
index 9a7c0e489..59e6f3fbb 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/cli_credential.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/cli_credential.py
@@ -18,35 +18,35 @@ extends_documentation_fragment:
author: Rafael Campos (@racampos)
options:
comments:
- description: Cli Credential's comments.
+ description: Comments to identify the CLI credential.
type: str
credentialType:
- description: Cli Credential's credentialType.
+ description: Credential type to identify the application that uses the CLI credential.
type: str
description:
- description: Cli Credential's description.
+ description: Description for CLI Credentials.
type: str
enablePassword:
- description: Cli Credential's enablePassword.
+ description: CLI Enable Password.
type: str
id:
- description: Cli Credential's id.
+ description: Id of the CLI Credential in UUID format.
type: str
instanceTenantId:
- description: Cli Credential's instanceTenantId.
+ description: Deprecated.
type: str
instanceUuid:
- description: Cli Credential's instanceUuid.
+ description: Deprecated.
type: str
password:
- description: Cli Credential's password.
+ description: CLI Password.
type: str
username:
- description: Cli Credential's username.
+ description: CLI Username.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Discovery CreateCLICredentials
description: Complete reference of the CreateCLICredentials API.
@@ -107,7 +107,6 @@ EXAMPLES = r"""
username: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/client_detail_info.py b/ansible_collections/cisco/dnac/plugins/modules/client_detail_info.py
index 5cc6ca6d9..01010ac50 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/client_detail_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/client_detail_info.py
@@ -19,17 +19,17 @@ options:
headers:
description: Additional headers.
type: dict
- timestamp:
- description:
- - Timestamp query parameter. Epoch time(in milliseconds) when the Client health data is required.
- type: str
macAddress:
description:
- MacAddress query parameter. MAC Address of the client.
type: str
+ timestamp:
+ description:
+ - Timestamp query parameter. Epoch time(in milliseconds) when the Client health data is required.
+ type: float
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Clients GetClientDetail
description: Complete reference of the GetClientDetail API.
@@ -54,12 +54,11 @@ EXAMPLES = r"""
dnac_version: "{{dnac_version}}"
dnac_debug: "{{dnac_debug}}"
headers: "{{my_headers | from_json}}"
- timestamp: string
macAddress: string
+ timestamp: 0
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -70,12 +69,20 @@ dnac_response:
"detail": {
"id": "string",
"connectionStatus": "string",
+ "tracked": "string",
"hostType": "string",
- "userId": {},
+ "userId": "string",
+ "duid": "string",
+ "identifier": "string",
"hostName": "string",
- "hostOs": {},
- "hostVersion": {},
+ "hostOs": "string",
+ "hostVersion": "string",
"subType": "string",
+ "firmwareVersion": "string",
+ "deviceVendor": "string",
+ "deviceForm": "string",
+ "salesCode": "string",
+ "countryCode": "string",
"lastUpdated": 0,
"healthScore": [
{
@@ -91,57 +98,129 @@ dnac_response:
],
"authType": "string",
"vlanId": 0,
+ "l3VirtualNetwork": "string",
+ "l2VirtualNetwork": "string",
"vnid": 0,
+ "upnId": "string",
+ "upnName": "string",
"ssid": "string",
"frequency": "string",
"channel": "string",
- "apGroup": {},
- "location": {},
+ "apGroup": "string",
+ "sgt": "string",
+ "location": "string",
"clientConnection": "string",
"connectedDevice": [
- {}
+ {
+ "type": "string",
+ "name": "string",
+ "mac": "string",
+ "id": "string",
+ "ip address": "string",
+ "mgmtIp": "string",
+ "band": "string",
+ "mode": "string"
+ }
],
"issueCount": 0,
"rssi": "string",
- "avgRssi": {},
+ "rssiThreshold": "string",
+ "rssiIsInclude": "string",
+ "avgRssi": "string",
"snr": "string",
- "avgSnr": {},
+ "snrThreshold": "string",
+ "snrIsInclude": "string",
+ "avgSnr": "string",
"dataRate": "string",
"txBytes": "string",
"rxBytes": "string",
- "dnsSuccess": {},
- "dnsFailure": {},
+ "dnsResponse": "string",
+ "dnsRequest": "string",
"onboarding": {
- "averageRunDuration": {},
- "maxRunDuration": {},
- "averageAssocDuration": {},
- "maxAssocDuration": {},
- "averageAuthDuration": {},
- "maxAuthDuration": {},
- "averageDhcpDuration": {},
- "maxDhcpDuration": {},
+ "averageRunDuration": "string",
+ "maxRunDuration": "string",
+ "averageAssocDuration": "string",
+ "maxAssocDuration": "string",
+ "averageAuthDuration": "string",
+ "maxAuthDuration": "string",
+ "averageDhcpDuration": "string",
+ "maxDhcpDuration": "string",
"aaaServerIp": "string",
- "dhcpServerIp": {},
- "authDoneTime": {},
- "assocDoneTime": {},
- "dhcpDoneTime": {},
+ "dhcpServerIp": "string",
+ "authDoneTime": 0,
+ "assocDoneTime": 0,
+ "dhcpDoneTime": 0,
"assocRootcauseList": [
- {}
+ "string"
],
"aaaRootcauseList": [
- {}
+ "string"
],
"dhcpRootcauseList": [
- {}
+ "string"
],
"otherRootcauseList": [
- {}
+ "string"
+ ],
+ "latestRootCauseList": [
+ "string"
]
},
"clientType": "string",
- "onboardingTime": {},
- "port": {},
- "iosCapable": true
+ "onboardingTime": 0,
+ "port": "string",
+ "iosCapable": true,
+ "usage": 0,
+ "linkSpeed": 0,
+ "linkThreshold": "string",
+ "remoteEndDuplexMode": "string",
+ "txLinkError": 0,
+ "rxLinkError": 0,
+ "txRate": 0,
+ "rxRate": 0,
+ "rxRetryPct": "string",
+ "versionTime": 0,
+ "dot11Protocol": "string",
+ "slotId": 0,
+ "dot11ProtocolCapability": "string",
+ "privateMac": true,
+ "dhcpServerIp": "string",
+ "aaaServerIp": "string",
+ "aaaServerTransaction": 0,
+ "aaaServerFailedTransaction": 0,
+ "aaaServerSuccessTransaction": 0,
+ "aaaServerLatency": 0,
+ "aaaServerMABLatency": 0,
+ "aaaServerEAPLatency": 0,
+ "dhcpServerTransaction": 0,
+ "dhcpServerFailedTransaction": 0,
+ "dhcpServerSuccessTransaction": 0,
+ "dhcpServerLatency": 0,
+ "dhcpServerDOLatency": 0,
+ "dhcpServerRALatency": 0,
+ "maxRoamingDuration": "string",
+ "upnOwner": "string",
+ "connectedUpn": "string",
+ "connectedUpnOwner": "string",
+ "connectedUpnId": "string",
+ "isGuestUPNEndpoint": true,
+ "wlcName": "string",
+ "wlcUuid": "string",
+ "sessionDuration": "string",
+ "intelCapable": true,
+ "hwModel": "string",
+ "powerType": "string",
+ "modelName": "string",
+ "bridgeVMMode": "string",
+ "dhcpNakIp": "string",
+ "dhcpDeclineIp": "string",
+ "portDescription": "string",
+ "latencyVoice": 0,
+ "latencyVideo": 0,
+ "latencyBg": 0,
+ "latencyBe": 0,
+ "trustScore": "string",
+ "trustDetails": "string"
},
"connectionInfo": {
"hostType": "string",
@@ -164,31 +243,61 @@ dnac_response:
"id": "string",
"description": "string",
"deviceType": "string",
- "platformId": {},
- "family": {},
+ "platformId": "string",
+ "family": "string",
"ip": "string",
- "softwareVersion": {},
- "userId": {},
+ "ipv6": [
+ "string"
+ ],
+ "softwareVersion": "string",
+ "userId": "string",
"nodeType": "string",
- "radioFrequency": {},
- "clients": {},
- "count": {},
+ "radioFrequency": "string",
+ "clients": 0,
+ "count": 0,
"healthScore": 0,
"level": 0,
- "fabricGroup": {},
- "connectedDevice": {}
+ "fabricGroup": "string",
+ "fabricRole": [
+ "string"
+ ],
+ "connectedDevice": "string",
+ "stackType": "string"
}
],
"links": [
{
"source": "string",
"linkStatus": "string",
+ "sourceLinkStatus": "string",
+ "targetLinkStatus": "string",
"label": [
"string"
],
"target": "string",
- "id": {},
- "portUtilization": {}
+ "id": "string",
+ "portUtilization": 0,
+ "sourceInterfaceName": "string",
+ "targetInterfaceName": "string",
+ "sourceDuplexInfo": "string",
+ "targetDuplexInfo": "string",
+ "sourcePortMode": "string",
+ "targetPortMode": "string",
+ "sourceAdminStatus": "string",
+ "targetAdminStatus": "string",
+ "apRadioAdminStatus": "string",
+ "apRadioOperStatus": "string",
+ "sourcePortVLANInfo": "string",
+ "targetPortVLANInfo": "string",
+ "interfaceDetails": [
+ {
+ "clientMacAddress": "string",
+ "connectedDeviceIntName": "string",
+ "duplex": "string",
+ "portMode": "string",
+ "adminStatus": "string"
+ }
+ ]
}
]
}
diff --git a/ansible_collections/cisco/dnac/plugins/modules/client_enrichment_details_info.py b/ansible_collections/cisco/dnac/plugins/modules/client_enrichment_details_info.py
index 54be2389a..e814ec874 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/client_enrichment_details_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/client_enrichment_details_info.py
@@ -22,8 +22,8 @@ options:
description: Additional headers.
type: dict
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Clients GetClientEnrichmentDetails
description: Complete reference of the GetClientEnrichmentDetails API.
@@ -51,7 +51,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/client_health_info.py b/ansible_collections/cisco/dnac/plugins/modules/client_health_info.py
index f036bb5bd..e48cbe4d0 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/client_health_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/client_health_info.py
@@ -22,10 +22,10 @@ options:
timestamp:
description:
- Timestamp query parameter. Epoch time(in milliseconds) when the Client health data is required.
- type: str
+ type: float
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Clients GetOverallClientHealth
description: Complete reference of the GetOverallClientHealth API.
@@ -50,60 +50,58 @@ EXAMPLES = r"""
dnac_version: "{{dnac_version}}"
dnac_debug: "{{dnac_debug}}"
headers: "{{my_headers | from_json}}"
- timestamp: string
+ timestamp: 0
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
returned: always
- type: list
- elements: dict
+ type: dict
sample: >
- [
- {
- "siteId": "string",
- "scoreDetail": [
- {
- "scoreCategory": {
- "scoreCategory": "string",
- "value": "string"
- },
- "scoreValue": 0,
- "clientCount": 0,
- "clientUniqueCount": 0,
- "starttime": 0,
- "endtime": 0,
- "scoreList": [
- {
- "scoreCategory": {
- "scoreCategory": "string",
- "value": "string"
- },
- "scoreValue": 0,
- "clientCount": 0,
- "clientUniqueCount": 0,
- "starttime": 0,
- "endtime": 0,
- "scoreList": [
- {
- "scoreCategory": {
- "scoreCategory": "string",
- "value": "string"
- },
- "scoreValue": 0,
- "clientCount": 0,
- "clientUniqueCount": {},
- "starttime": 0,
- "endtime": 0
- }
- ]
- }
- ]
- }
- ]
- }
- ]
+ {
+ "version": "string",
+ "response": [
+ {
+ "siteId": "string",
+ "scoreDetail": [
+ {
+ "scoreCategory": {
+ "scoreCategory": "string",
+ "value": "string"
+ },
+ "scoreValue": 0,
+ "clientCount": 0,
+ "clientUniqueCount": 0,
+ "maintenanceAffectedClientCount": 0,
+ "randomMacCount": 0,
+ "duidCount": 0,
+ "starttime": 0,
+ "endtime": 0,
+ "connectedToUdnCount": 0,
+ "unconnectedToUdnCount": 0,
+ "scoreList": [
+ {
+ "scoreCategory": {
+ "scoreCategory": "string",
+ "value": "string"
+ },
+ "scoreValue": 0,
+ "clientCount": 0,
+ "clientUniqueCount": 0,
+ "maintenanceAffectedClientCount": 0,
+ "randomMacCount": 0,
+ "duidCount": 0,
+ "starttime": 0,
+ "endtime": 0,
+ "connectedToUdnCount": 0,
+ "unconnectedToUdnCount": 0
+ }
+ ]
+ }
+ ]
+ }
+ ]
+ }
"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/client_proximity_info.py b/ansible_collections/cisco/dnac/plugins/modules/client_proximity_info.py
index 7d30d7542..67eea124b 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/client_proximity_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/client_proximity_info.py
@@ -32,16 +32,16 @@ options:
- >
Number_days query parameter. Number of days to track proximity until current date. Defaults and maximum up
to 14 days.
- type: int
+ type: float
time_resolution:
description:
- >
Time_resolution query parameter. Time interval (in minutes) to measure proximity. Defaults to 15 minutes
with a minimum 5 minutes.
- type: int
+ type: float
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Clients ClientProximity
description: Complete reference of the ClientProximity API.
@@ -72,7 +72,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/command_runner_run_command.py b/ansible_collections/cisco/dnac/plugins/modules/command_runner_run_command.py
index 5b12002ed..342789b09 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/command_runner_run_command.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/command_runner_run_command.py
@@ -17,25 +17,26 @@ extends_documentation_fragment:
author: Rafael Campos (@racampos)
options:
commands:
- description: Command Runner Run Command's commands.
+ description: Commands to be executed.
elements: str
type: list
description:
- description: Command Runner Run Command's description.
+ description: Describe the details about the command request.
type: str
deviceUuids:
- description: Command Runner Run Command's deviceUuids.
+ description: Device Id of the device.
elements: str
type: list
name:
- description: Command Runner Run Command's name.
+ description: Name of the the request like getshowrun , deviceinterfacestatusCli.
type: str
timeout:
- description: Command Runner Run Command's timeout.
+ description: The timeout value in unit of second. If no timeout provided wait till
+ 300sec.
type: int
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Command Runner RunReadOnlyCommandsOnDevicesToGetTheirRealTimeConfiguration
description: Complete reference of the RunReadOnlyCommandsOnDevicesToGetTheirRealTimeConfiguration API.
@@ -68,7 +69,6 @@ EXAMPLES = r"""
timeout: 0
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/compliance_check_run.py b/ansible_collections/cisco/dnac/plugins/modules/compliance_check_run.py
index c244f41f8..89ce5c0e2 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/compliance_check_run.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/compliance_check_run.py
@@ -17,19 +17,23 @@ extends_documentation_fragment:
author: Rafael Campos (@racampos)
options:
categories:
- description: Compliance Check Run's categories.
+ description: Category can have any value among 'INTENT'(mapped to compliance types
+ NETWORK_SETTINGS,NETWORK_PROFILE,WORKFLOW,FABRIC,APPLICATION_VISIBILITY), 'RUNNING_CONFIG'
+ , 'IMAGE' , 'PSIRT' , 'EOX' , 'NETWORK_SETTINGS'.
elements: str
type: list
deviceUuids:
- description: Compliance Check Run's deviceUuids.
+ description: UUID of the device.
elements: str
type: list
triggerFull:
- description: TriggerFull flag.
+ description: If it is true then compliance will be triggered for all categories.
+ If it is false then compliance will be triggered for categories mentioned in categories
+ section .
type: bool
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Compliance RunCompliance
description: Complete reference of the RunCompliance API.
@@ -60,7 +64,6 @@ EXAMPLES = r"""
triggerFull: true
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/compliance_device_by_id_info.py b/ansible_collections/cisco/dnac/plugins/modules/compliance_device_by_id_info.py
index 99d967c02..86ae63e50 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/compliance_device_by_id_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/compliance_device_by_id_info.py
@@ -21,33 +21,28 @@ options:
type: dict
deviceUuid:
description:
- - DeviceUuid path parameter.
+ - DeviceUuid path parameter. Device Id.
type: str
category:
description:
- - Category query parameter. ComplianceCategory can have any value among 'INTENT', 'RUNNING_CONFIG'.
+ - >
+ Category query parameter. Category can have any value among 'INTENT', 'RUNNING_CONFIG' , 'IMAGE' , 'PSIRT' ,
+ 'DESIGN_OOD' , 'EOX' , 'NETWORK_SETTINGS'.
type: str
complianceType:
description:
- >
- ComplianceType query parameter. ComplianceType can have any value among 'NETWORK_DESIGN', 'NETWORK_PROFILE',
- 'FABRIC', 'POLICY', 'RUNNING_CONFIG'.
+ ComplianceType query parameter. Specify "Compliance type(s)" separated by commas. The Compliance type can be
+ 'APPLICATION_VISIBILITY', 'EOX', 'FABRIC', 'IMAGE', 'NETWORK_PROFILE', 'NETWORK_SETTINGS', 'PSIRT',
+ 'RUNNING_CONFIG', 'WORKFLOW'.
type: str
diffList:
description:
- DiffList query parameter. Diff list pass true to fetch the diff list.
type: bool
- key:
- description:
- - Key query parameter. Extended attribute key.
- type: str
- value:
- description:
- - Value query parameter. Extended attribute value.
- type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Compliance ComplianceDetailsOfDevice
description: Complete reference of the ComplianceDetailsOfDevice API.
@@ -75,13 +70,10 @@ EXAMPLES = r"""
category: string
complianceType: string
diffList: True
- key: string
- value: string
deviceUuid: string
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -89,55 +81,63 @@ dnac_response:
type: dict
sample: >
{
- "deviceUuid": "string",
- "version": "string",
"response": [
{
- "displayName": "string",
+ "deviceUuid": "string",
"complianceType": "string",
+ "status": "string",
+ "state": "string",
"lastSyncTime": 0,
- "additionalDataURL": "string",
+ "lastUpdateTime": 0,
"sourceInfoList": [
{
+ "name": "string",
+ "nameWithBusinessKey": "string",
+ "sourceEnum": "string",
+ "type": "string",
+ "appName": "string",
"count": 0,
- "displayName": "string",
+ "ackStatus": "string",
+ "businessKey": {
+ "resourceName": "string",
+ "businessKeyAttributes": {},
+ "otherAttributes": {
+ "name": "string",
+ "cfsAttributes": {
+ "displayName": "string",
+ "appName": "string",
+ "description": "string",
+ "source": "string",
+ "type": "string"
+ }
+ }
+ },
"diffList": [
{
- "displayName": "string",
- "moveFromPath": "string",
"op": "string",
"configuredValue": "string",
"intendedValue": "string",
- "path": "string",
+ "moveFromPath": "string",
"businessKey": "string",
- "extendedAttributes": "string"
+ "path": "string",
+ "extendedAttributes": {
+ "attributeDisplayName": "string",
+ "path": "string",
+ "dataConverter": "string",
+ "type": "string"
+ },
+ "ackStatus": "string",
+ "instanceUUID": "string",
+ "displayName": "string"
}
],
- "sourceEnum": "string",
- "licenseAppName": "string",
- "provisioningArea": "string",
- "networkProfileName": "string",
- "nameWithBusinessKey": "string",
- "appName": "string",
- "name": "string",
- "type": "string",
- "businessKey": {
- "otherAttributes": {
- "cfsAttributes": "string",
- "name": "string"
- },
- "resourceName": "string",
- "businessKeyAttributes": "string"
- }
+ "displayName": "string"
}
],
- "deviceUuid": "string",
- "message": "string",
- "state": "string",
- "status": "string",
- "category": "string",
- "lastUpdateTime": 0
+ "ackStatus": "string",
+ "version": "string"
}
- ]
+ ],
+ "deviceUuid": "string"
}
"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/compliance_device_details_count_info.py b/ansible_collections/cisco/dnac/plugins/modules/compliance_device_details_count_info.py
index a51939ed4..0f7dfa1ea 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/compliance_device_details_count_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/compliance_device_details_count_info.py
@@ -22,18 +22,19 @@ options:
complianceType:
description:
- >
- ComplianceType query parameter. ComplianceType can have any value among 'NETWORK_PROFILE', 'IMAGE',
- 'APPLICATION_VISIBILITY', 'FABRIC', 'PSIRT', 'RUNNING_CONFIG', 'WORKFLOW'.
+ ComplianceType query parameter. Specify "Compliance type(s)" separated by commas. The Compliance type can be
+ 'APPLICATION_VISIBILITY', 'EOX', 'FABRIC', 'IMAGE', 'NETWORK_PROFILE', 'NETWORK_SETTINGS', 'PSIRT',
+ 'RUNNING_CONFIG', 'WORKFLOW'.
type: str
complianceStatus:
description:
- >
- ComplianceStatus query parameter. Compliance status can have value among 'COMPLIANT', 'NON_COMPLIANT',
- 'IN_PROGRESS', 'NOT_AVAILABLE', 'NOT_APPLICABLE', 'ERROR'.
+ ComplianceStatus query parameter. Specify "Compliance status(es)" separated by commas. The Compliance status
+ can be 'COMPLIANT', 'NON_COMPLIANT', 'IN_PROGRESS', 'NOT_AVAILABLE', 'NOT_APPLICABLE', 'ERROR'.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Compliance GetComplianceDetailCount
description: Complete reference of the GetComplianceDetailCount API.
@@ -63,7 +64,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/compliance_device_details_info.py b/ansible_collections/cisco/dnac/plugins/modules/compliance_device_details_info.py
index bc06458f2..5ce6c666f 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/compliance_device_details_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/compliance_device_details_info.py
@@ -22,30 +22,31 @@ options:
complianceType:
description:
- >
- ComplianceType query parameter. ComplianceType can have any value among 'NETWORK_PROFILE', 'IMAGE',
- 'APPLICATION_VISIBILITY', 'FABRIC', 'PSIRT', 'RUNNING_CONFIG', 'WORKFLOW'.
+ ComplianceType query parameter. Specify "Compliance type(s)" in commas. The Compliance type can be
+ 'NETWORK_PROFILE', 'IMAGE', 'FABRIC', 'APPLICATION_VISIBILITY', 'FABRIC', RUNNING_CONFIG',
+ 'NETWORK_SETTINGS', 'WORKFLOW' , 'EOX'.
type: str
complianceStatus:
description:
- >
- ComplianceStatus query parameter. Compliance status can have value among 'COMPLIANT', 'NON_COMPLIANT',
- 'IN_PROGRESS', 'NOT_AVAILABLE', 'NOT_APPLICABLE', 'ERROR'.
+ ComplianceStatus query parameter. Specify "Compliance status(es)" in commas. The Compliance status can be
+ 'COMPLIANT', 'NON_COMPLIANT', 'IN_PROGRESS', 'NOT_AVAILABLE', 'NOT_APPLICABLE', 'ERROR'.
type: str
deviceUuid:
description:
- - DeviceUuid query parameter. Comma separated deviceUuids.
+ - DeviceUuid query parameter. Comma separated "Device Id(s)".
type: str
offset:
description:
- Offset query parameter. Offset/starting row.
- type: int
+ type: float
limit:
description:
- Limit query parameter. Number of records to be retrieved.
- type: int
+ type: float
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Compliance GetComplianceDetail
description: Complete reference of the GetComplianceDetail API.
@@ -78,7 +79,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/compliance_device_info.py b/ansible_collections/cisco/dnac/plugins/modules/compliance_device_info.py
index 38eaafb12..40a4e3afb 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/compliance_device_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/compliance_device_info.py
@@ -24,24 +24,16 @@ options:
complianceStatus:
description:
- >
- ComplianceStatus query parameter. Compliance status can be have value among
- 'COMPLIANT','NON_COMPLIANT','IN_PROGRESS', 'ERROR'.
+ ComplianceStatus query parameter. Specify "Compliance status(es)" separated by commas. The Compliance status
+ can be 'COMPLIANT', 'NON_COMPLIANT', 'IN_PROGRESS', 'NOT_AVAILABLE', 'NOT_APPLICABLE', 'ERROR'.
type: str
deviceUuid:
description:
- - DeviceUuid query parameter. Comma separated deviceUuids.
+ - DeviceUuid query parameter. Comma separated 'Device Ids'.
type: str
- offset:
- description:
- - Offset query parameter. Offset/starting row.
- type: int
- limit:
- description:
- - Limit query parameter. Number of records to be retrieved.
- type: int
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Compliance DeviceComplianceStatus
description: Complete reference of the DeviceComplianceStatus API.
@@ -73,8 +65,6 @@ EXAMPLES = r"""
headers: "{{my_headers | from_json}}"
complianceStatus: string
deviceUuid: string
- offset: 0
- limit: 0
register: result
- name: Get Compliance Device by id
@@ -91,7 +81,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -99,13 +88,12 @@ dnac_response:
type: dict
sample: >
{
- "version": "string",
"response": {
"deviceUuid": "string",
"complianceStatus": "string",
- "message": "string",
- "scheduleTime": 0,
- "lastUpdateTime": 0
- }
+ "lastUpdateTime": 0,
+ "scheduleTime": "string"
+ },
+ "version": "string"
}
"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/compliance_device_status_count_info.py b/ansible_collections/cisco/dnac/plugins/modules/compliance_device_status_count_info.py
index 6c7897f94..9feeecf3c 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/compliance_device_status_count_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/compliance_device_status_count_info.py
@@ -22,12 +22,12 @@ options:
complianceStatus:
description:
- >
- ComplianceStatus query parameter. Compliance status can have value among 'COMPLIANT', 'NON_COMPLIANT',
- 'IN_PROGRESS', 'NOT_AVAILABLE', 'NOT_APPLICABLE', 'ERROR'.
+ ComplianceStatus query parameter. Specify "Compliance status(es)" separated by commas. The Compliance status
+ can be 'COMPLIANT', 'NON_COMPLIANT', 'IN_PROGRESS', 'NOT_AVAILABLE', 'NOT_APPLICABLE', 'ERROR'.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Compliance GetComplianceStatusCount
description: Complete reference of the GetComplianceStatusCount API.
@@ -56,7 +56,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -65,6 +64,6 @@ dnac_response:
sample: >
{
"version": "string",
- "response": 0
+ "response": "string"
}
"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/configuration_archive_details_info.py b/ansible_collections/cisco/dnac/plugins/modules/configuration_archive_details_info.py
new file mode 100644
index 000000000..3ba95b469
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/configuration_archive_details_info.py
@@ -0,0 +1,135 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: configuration_archive_details_info
+short_description: Information module for Network Device Config
+description:
+- Get all Network Device Config.
+- >
+ Returns the historical device configurations running configuration , startup configuration , vlan if applicable by
+ specified criteria.
+version_added: '6.15.0'
+extends_documentation_fragment:
+ - cisco.dnac.module_info
+author: Rafael Campos (@racampos)
+options:
+ headers:
+ description: Additional headers.
+ type: dict
+ deviceId:
+ description:
+ - >
+ DeviceId query parameter. Comma separated device id for example
+ cf35b0a1-407f-412f-b2f4-f0c3156695f9,aaa38191-0c22-4158-befd-779a09d7cec1. If device id is not provided it
+ will fetch for all devices.
+ type: str
+ fileType:
+ description:
+ - FileType query parameter. Config File Type can be RUNNINGCONFIG or STARTUPCONFIG.
+ type: str
+ createdTime:
+ description:
+ - CreatedTime query parameter. Supported with logical filters GT,GTE,LT,LTE & BT time in milliseconds (epoc format).
+ type: str
+ createdBy:
+ description:
+ - >
+ CreatedBy query parameter. Comma separated values for createdBy - SCHEDULED, USER, CONFIG_CHANGE_EVENT,
+ SCHEDULED_FIRST_TIME, DR_CALL_BACK, PRE_DEPLOY.
+ type: str
+ offset:
+ description:
+ - Offset query parameter.
+ type: float
+ limit:
+ description:
+ - Limit query parameter.
+ type: float
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for Configuration Archive GetConfigurationArchiveDetails
+ description: Complete reference of the GetConfigurationArchiveDetails API.
+ link: https://developer.cisco.com/docs/dna-center/#!get-configuration-archive-details
+notes:
+ - SDK Method used are
+ configuration_archive.ConfigurationArchive.get_configuration_archive_details,
+
+ - Paths used are
+ get /dna/intent/api/v1/network-device-config,
+
+"""
+
+EXAMPLES = r"""
+- name: Get all Network Device Config
+ cisco.dnac.configuration_archive_details_info :
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ headers: "{{my_headers | from_json}}"
+ deviceId: string
+ fileType: string
+ createdTime: string
+ createdBy: string
+ offset: 0
+ limit: 0
+ register: result
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: list
+ elements: dict
+ sample: >
+ [
+ {
+ "ipAddress": "string",
+ "deviceId": "string",
+ "versions": [
+ {
+ "files": [
+ {
+ "fileType": "string",
+ "fileId": "string",
+ "downloadPath": "string"
+ }
+ ],
+ "createdBy": "string",
+ "configChangeType": "string",
+ "syslogConfigEventDto": [
+ {
+ "userName": "string",
+ "deviceUuid": "string",
+ "outOfBand": true,
+ "configMethod": "string",
+ "terminalName": "string",
+ "loginIpAddress": "string",
+ "processName": "string",
+ "syslogTime": 0
+ }
+ ],
+ "createdTime": 0,
+ "startupRunningStatus": "string",
+ "id": "string",
+ "tags": [
+ "string"
+ ],
+ "lastUpdatedTime": 0
+ }
+ ],
+ "deviceName": "string"
+ }
+ ]
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/configuration_template.py b/ansible_collections/cisco/dnac/plugins/modules/configuration_template.py
index 05e7acf35..2914807d5 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/configuration_template.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/configuration_template.py
@@ -512,8 +512,8 @@ options:
description: Current version of template.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Configuration Templates DeletesTheTemplate
description: Complete reference of the DeletesTheTemplate API.
@@ -713,7 +713,6 @@ EXAMPLES = r"""
templateId: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/configuration_template_clone.py b/ansible_collections/cisco/dnac/plugins/modules/configuration_template_clone.py
index c21257ba4..71b45426e 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/configuration_template_clone.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/configuration_template_clone.py
@@ -28,8 +28,8 @@ options:
description: TemplateId path parameter. UUID of the template to clone it.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Configuration Templates CreatesACloneOfTheGivenTemplate
description: Complete reference of the CreatesACloneOfTheGivenTemplate API.
@@ -58,7 +58,6 @@ EXAMPLES = r"""
templateId: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/configuration_template_create.py b/ansible_collections/cisco/dnac/plugins/modules/configuration_template_create.py
index f5beee649..3edc2b9b2 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/configuration_template_create.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/configuration_template_create.py
@@ -508,8 +508,8 @@ options:
description: Current version of template.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Configuration Templates CreateTemplate
description: Complete reference of the CreateTemplate API.
@@ -691,7 +691,6 @@ EXAMPLES = r"""
version: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/configuration_template_deploy.py b/ansible_collections/cisco/dnac/plugins/modules/configuration_template_deploy.py
index a1b1b5265..cc2c2ede5 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/configuration_template_deploy.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/configuration_template_deploy.py
@@ -27,7 +27,8 @@ options:
type: str
memberTemplateDeploymentInfo:
description: MemberTemplateDeploymentInfo.
- type: str
+ elements: dict
+ type: list
targetInfo:
description: Configuration Template Deploy's targetInfo.
elements: dict
@@ -42,8 +43,10 @@ options:
description: Template params/values to be provisioned.
type: dict
resourceParams:
- description: Resource params to be provisioned.
- type: dict
+ description: Resource params to be provisioned. Refer to features page for usage
+ details.
+ elements: dict
+ type: list
type:
description: Target type of device.
type: str
@@ -55,8 +58,8 @@ options:
description: UUID of template to be provisioned.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Configuration Templates DeployTemplate
description: Complete reference of the DeployTemplate API.
@@ -83,18 +86,19 @@ EXAMPLES = r"""
forcePushTemplate: true
isComposite: true
mainTemplateId: string
- memberTemplateDeploymentInfo: string
+ memberTemplateDeploymentInfo:
+ - {}
targetInfo:
- hostName: string
id: string
params: {}
- resourceParams: {}
+ resourceParams:
+ - {}
type: string
versionedTemplateId: string
templateId: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/configuration_template_deploy_status_info.py b/ansible_collections/cisco/dnac/plugins/modules/configuration_template_deploy_status_info.py
index 46c7439eb..9fd594f6b 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/configuration_template_deploy_status_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/configuration_template_deploy_status_info.py
@@ -24,8 +24,8 @@ options:
- DeploymentId path parameter. UUID of deployment to retrieve template deployment status.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Configuration Templates StatusOfTemplateDeployment
description: Complete reference of the StatusOfTemplateDeployment API.
@@ -54,7 +54,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/configuration_template_deploy_v2.py b/ansible_collections/cisco/dnac/plugins/modules/configuration_template_deploy_v2.py
index af7f1094d..c7bbfac85 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/configuration_template_deploy_v2.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/configuration_template_deploy_v2.py
@@ -27,7 +27,8 @@ options:
type: str
memberTemplateDeploymentInfo:
description: MemberTemplateDeploymentInfo.
- type: str
+ elements: dict
+ type: list
targetInfo:
description: Configuration Template Deploy V2's targetInfo.
elements: dict
@@ -42,8 +43,10 @@ options:
description: Template params/values to be provisioned.
type: dict
resourceParams:
- description: Resource params to be provisioned.
- type: dict
+ description: Resource params to be provisioned. Refer to features page for usage
+ details.
+ elements: dict
+ type: list
type:
description: Target type of device.
type: str
@@ -55,8 +58,8 @@ options:
description: UUID of template to be provisioned.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Configuration Templates DeployTemplateV2
description: Complete reference of the DeployTemplateV2 API.
@@ -83,18 +86,19 @@ EXAMPLES = r"""
forcePushTemplate: true
isComposite: true
mainTemplateId: string
- memberTemplateDeploymentInfo: string
+ memberTemplateDeploymentInfo:
+ - {}
targetInfo:
- hostName: string
id: string
params: {}
- resourceParams: {}
+ resourceParams:
+ - {}
type: string
versionedTemplateId: string
templateId: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/configuration_template_export_project.py b/ansible_collections/cisco/dnac/plugins/modules/configuration_template_export_project.py
index f41475005..03265f1ea 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/configuration_template_export_project.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/configuration_template_export_project.py
@@ -21,8 +21,8 @@ options:
elements: dict
type: list
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Configuration Templates ExportsTheProjectsForAGivenCriteria
description: Complete reference of the ExportsTheProjectsForAGivenCriteria API.
@@ -50,7 +50,6 @@ EXAMPLES = r"""
- {}
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/configuration_template_export_template.py b/ansible_collections/cisco/dnac/plugins/modules/configuration_template_export_template.py
index d460cef40..cb128692e 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/configuration_template_export_template.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/configuration_template_export_template.py
@@ -21,8 +21,8 @@ options:
elements: dict
type: list
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Configuration Templates ExportsTheTemplatesForAGivenCriteria
description: Complete reference of the ExportsTheTemplatesForAGivenCriteria API.
@@ -50,7 +50,6 @@ EXAMPLES = r"""
- {}
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/configuration_template_import_project.py b/ansible_collections/cisco/dnac/plugins/modules/configuration_template_import_project.py
index 37b2096e4..6ae1b8bf3 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/configuration_template_import_project.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/configuration_template_import_project.py
@@ -23,8 +23,8 @@ options:
fails with 'Template already exists' error.
type: bool
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Configuration Templates ImportsTheProjectsProvided
description: Complete reference of the ImportsTheProjectsProvided API.
@@ -51,7 +51,6 @@ EXAMPLES = r"""
doVersion: true
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/configuration_template_import_template.py b/ansible_collections/cisco/dnac/plugins/modules/configuration_template_import_template.py
index 7f9cf31f6..76bb086bf 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/configuration_template_import_template.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/configuration_template_import_template.py
@@ -523,8 +523,8 @@ options:
project.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Configuration Templates ImportsTheTemplatesProvided
description: Complete reference of the ImportsTheTemplatesProvided API.
@@ -709,7 +709,6 @@ EXAMPLES = r"""
projectName: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/configuration_template_info.py b/ansible_collections/cisco/dnac/plugins/modules/configuration_template_info.py
index ad9e264cf..045aff1f5 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/configuration_template_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/configuration_template_info.py
@@ -76,8 +76,8 @@ options:
- LatestVersion query parameter. LatestVersion flag to get the latest versioned template.
type: bool
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Configuration Templates GetsDetailsOfAGivenTemplate
description: Complete reference of the GetsDetailsOfAGivenTemplate API.
@@ -135,7 +135,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/configuration_template_project.py b/ansible_collections/cisco/dnac/plugins/modules/configuration_template_project.py
index 7678e283c..98731e78d 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/configuration_template_project.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/configuration_template_project.py
@@ -50,504 +50,10 @@ options:
templates:
description: List of templates within the project.
elements: dict
- suboptions:
- author:
- description: Author of template.
- type: str
- composite:
- description: Is it composite template.
- type: bool
- containingTemplates:
- description: Configuration Template Project's containingTemplates.
- elements: dict
- suboptions:
- composite:
- description: Is it composite template.
- type: bool
- description:
- description: Description of template.
- type: str
- deviceTypes:
- description: Configuration Template Project's deviceTypes.
- elements: dict
- suboptions:
- productFamily:
- description: Device family.
- type: str
- productSeries:
- description: Device series.
- type: str
- productType:
- description: Device type.
- type: str
- type: list
- id:
- description: UUID of template.
- type: str
- language:
- description: Template language (JINJA or VELOCITY).
- type: str
- name:
- description: Name of template.
- type: str
- projectName:
- description: Project name.
- type: str
- rollbackTemplateParams:
- description: Configuration Template Project's rollbackTemplateParams.
- elements: dict
- suboptions:
- binding:
- description: Bind to source.
- type: str
- customOrder:
- description: CustomOrder of template param.
- type: int
- dataType:
- description: Datatype of template param.
- type: str
- defaultValue:
- description: Default value of template param.
- type: str
- description:
- description: Description of template param.
- type: str
- displayName:
- description: Display name of param.
- type: str
- group:
- description: Group.
- type: str
- id:
- description: UUID of template param.
- type: str
- instructionText:
- description: Instruction text for param.
- type: str
- key:
- description: Key.
- type: str
- notParam:
- description: Is it not a variable.
- type: bool
- order:
- description: Order of template param.
- type: int
- paramArray:
- description: Is it an array.
- type: bool
- parameterName:
- description: Name of template param.
- type: str
- provider:
- description: Provider.
- type: str
- range:
- description: Configuration Template Project's range.
- elements: dict
- suboptions:
- id:
- description: UUID of range.
- type: str
- maxValue:
- description: Max value of range.
- type: int
- minValue:
- description: Min value of range.
- type: int
- type: list
- required:
- description: Is param required.
- type: bool
- selection:
- description: Configuration Template Project's selection.
- suboptions:
- defaultSelectedValues:
- description: Default selection values.
- elements: str
- type: list
- id:
- description: UUID of selection.
- type: str
- selectionType:
- description: Type of selection(SINGLE_SELECT or MULTI_SELECT).
- type: str
- selectionValues:
- description: Selection values.
- type: dict
- type: dict
- type: list
- tags:
- description: Configuration Template Project's tags.
- elements: dict
- suboptions:
- id:
- description: UUID of tag.
- type: str
- name:
- description: Name of tag.
- type: str
- type: list
- templateContent:
- description: Template content.
- type: str
- templateParams:
- description: Configuration Template Project's templateParams.
- elements: dict
- suboptions:
- binding:
- description: Bind to source.
- type: str
- customOrder:
- description: CustomOrder of template param.
- type: int
- dataType:
- description: Datatype of template param.
- type: str
- defaultValue:
- description: Default value of template param.
- type: str
- description:
- description: Description of template param.
- type: str
- displayName:
- description: Display name of param.
- type: str
- group:
- description: Group.
- type: str
- id:
- description: UUID of template param.
- type: str
- instructionText:
- description: Instruction text for param.
- type: str
- key:
- description: Key.
- type: str
- notParam:
- description: Is it not a variable.
- type: bool
- order:
- description: Order of template param.
- type: int
- paramArray:
- description: Is it an array.
- type: bool
- parameterName:
- description: Name of template param.
- type: str
- provider:
- description: Provider.
- type: str
- range:
- description: Configuration Template Project's range.
- elements: dict
- suboptions:
- id:
- description: UUID of range.
- type: str
- maxValue:
- description: Max value of range.
- type: int
- minValue:
- description: Min value of range.
- type: int
- type: list
- required:
- description: Is param required.
- type: bool
- selection:
- description: Configuration Template Project's selection.
- suboptions:
- defaultSelectedValues:
- description: Default selection values.
- elements: str
- type: list
- id:
- description: UUID of selection.
- type: str
- selectionType:
- description: Type of selection(SINGLE_SELECT or MULTI_SELECT).
- type: str
- selectionValues:
- description: Selection values.
- type: dict
- type: dict
- type: list
- version:
- description: Current version of template.
- type: str
- type: list
- createTime:
- description: Create time of template.
- type: int
- customParamsOrder:
- description: Custom Params Order.
- type: bool
- description:
- description: Description of template.
- type: str
- deviceTypes:
- description: Configuration Template Project's deviceTypes.
- elements: dict
- suboptions:
- productFamily:
- description: Device family.
- type: str
- productSeries:
- description: Device series.
- type: str
- productType:
- description: Device type.
- type: str
- type: list
- failurePolicy:
- description: Define failure policy if template provisioning fails.
- type: str
- id:
- description: UUID of template.
- type: str
- language:
- description: Template language (JINJA or VELOCITY).
- type: str
- lastUpdateTime:
- description: Update time of template.
- type: int
- latestVersionTime:
- description: Latest versioned template time.
- type: int
- name:
- description: Name of template.
- type: str
- parentTemplateId:
- description: Parent templateID.
- type: str
- projectId:
- description: Project UUID.
- type: str
- projectName:
- description: Project name.
- type: str
- rollbackTemplateContent:
- description: Rollback template content.
- type: str
- rollbackTemplateParams:
- description: Configuration Template Project's rollbackTemplateParams.
- elements: dict
- suboptions:
- binding:
- description: Bind to source.
- type: str
- customOrder:
- description: CustomOrder of template param.
- type: int
- dataType:
- description: Datatype of template param.
- type: str
- defaultValue:
- description: Default value of template param.
- type: str
- description:
- description: Description of template param.
- type: str
- displayName:
- description: Display name of param.
- type: str
- group:
- description: Group.
- type: str
- id:
- description: UUID of template param.
- type: str
- instructionText:
- description: Instruction text for param.
- type: str
- key:
- description: Key.
- type: str
- notParam:
- description: Is it not a variable.
- type: bool
- order:
- description: Order of template param.
- type: int
- paramArray:
- description: Is it an array.
- type: bool
- parameterName:
- description: Name of template param.
- type: str
- provider:
- description: Provider.
- type: str
- range:
- description: Configuration Template Project's range.
- elements: dict
- suboptions:
- id:
- description: UUID of range.
- type: str
- maxValue:
- description: Max value of range.
- type: int
- minValue:
- description: Min value of range.
- type: int
- type: list
- required:
- description: Is param required.
- type: bool
- selection:
- description: Configuration Template Project's selection.
- suboptions:
- defaultSelectedValues:
- description: Default selection values.
- elements: str
- type: list
- id:
- description: UUID of selection.
- type: str
- selectionType:
- description: Type of selection(SINGLE_SELECT or MULTI_SELECT).
- type: str
- selectionValues:
- description: Selection values.
- type: dict
- type: dict
- type: list
- softwareType:
- description: Applicable device software type.
- type: str
- softwareVariant:
- description: Applicable device software variant.
- type: str
- softwareVersion:
- description: Applicable device software version.
- type: str
- tags:
- description: Configuration Template Project's tags.
- elements: dict
- suboptions:
- id:
- description: UUID of tag.
- type: str
- name:
- description: Name of tag.
- type: str
- type: list
- templateContent:
- description: Template content.
- type: str
- templateParams:
- description: Configuration Template Project's templateParams.
- elements: dict
- suboptions:
- binding:
- description: Bind to source.
- type: str
- customOrder:
- description: CustomOrder of template param.
- type: int
- dataType:
- description: Datatype of template param.
- type: str
- defaultValue:
- description: Default value of template param.
- type: str
- description:
- description: Description of template param.
- type: str
- displayName:
- description: Display name of param.
- type: str
- group:
- description: Group.
- type: str
- id:
- description: UUID of template param.
- type: str
- instructionText:
- description: Instruction text for param.
- type: str
- key:
- description: Key.
- type: str
- notParam:
- description: Is it not a variable.
- type: bool
- order:
- description: Order of template param.
- type: int
- paramArray:
- description: Is it an array.
- type: bool
- parameterName:
- description: Name of template param.
- type: str
- provider:
- description: Provider.
- type: str
- range:
- description: Configuration Template Project's range.
- elements: dict
- suboptions:
- id:
- description: UUID of range.
- type: str
- maxValue:
- description: Max value of range.
- type: int
- minValue:
- description: Min value of range.
- type: int
- type: list
- required:
- description: Is param required.
- type: bool
- selection:
- description: Configuration Template Project's selection.
- suboptions:
- defaultSelectedValues:
- description: Default selection values.
- elements: str
- type: list
- id:
- description: UUID of selection.
- type: str
- selectionType:
- description: Type of selection(SINGLE_SELECT or MULTI_SELECT).
- type: str
- selectionValues:
- description: Selection values.
- type: dict
- type: dict
- type: list
- validationErrors:
- description: Configuration Template Project's validationErrors.
- suboptions:
- rollbackTemplateErrors:
- description: Validation or design conflicts errors of rollback template.
- elements: dict
- type: list
- templateErrors:
- description: Validation or design conflicts errors.
- elements: dict
- type: list
- templateId:
- description: UUID of template.
- type: str
- templateVersion:
- description: Current version of template.
- type: str
- type: dict
- version:
- description: Current version of template.
- type: str
type: list
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Configuration Templates CreateProject
description: Complete reference of the CreateProject API.
@@ -591,164 +97,7 @@ EXAMPLES = r"""
- id: string
name: string
templates:
- - author: string
- composite: true
- containingTemplates:
- - composite: true
- description: string
- deviceTypes:
- - productFamily: string
- productSeries: string
- productType: string
- id: string
- language: string
- name: string
- projectName: string
- rollbackTemplateParams:
- - binding: string
- customOrder: 0
- dataType: string
- defaultValue: string
- description: string
- displayName: string
- group: string
- id: string
- instructionText: string
- key: string
- notParam: true
- order: 0
- paramArray: true
- parameterName: string
- provider: string
- range:
- - id: string
- maxValue: 0
- minValue: 0
- required: true
- selection:
- defaultSelectedValues:
- - string
- id: string
- selectionType: string
- selectionValues: {}
- tags:
- - id: string
- name: string
- templateContent: string
- templateParams:
- - binding: string
- customOrder: 0
- dataType: string
- defaultValue: string
- description: string
- displayName: string
- group: string
- id: string
- instructionText: string
- key: string
- notParam: true
- order: 0
- paramArray: true
- parameterName: string
- provider: string
- range:
- - id: string
- maxValue: 0
- minValue: 0
- required: true
- selection:
- defaultSelectedValues:
- - string
- id: string
- selectionType: string
- selectionValues: {}
- version: string
- createTime: 0
- customParamsOrder: true
- description: string
- deviceTypes:
- - productFamily: string
- productSeries: string
- productType: string
- failurePolicy: string
- id: string
- language: string
- lastUpdateTime: 0
- latestVersionTime: 0
- name: string
- parentTemplateId: string
- projectId: string
- projectName: string
- rollbackTemplateContent: string
- rollbackTemplateParams:
- - binding: string
- customOrder: 0
- dataType: string
- defaultValue: string
- description: string
- displayName: string
- group: string
- id: string
- instructionText: string
- key: string
- notParam: true
- order: 0
- paramArray: true
- parameterName: string
- provider: string
- range:
- - id: string
- maxValue: 0
- minValue: 0
- required: true
- selection:
- defaultSelectedValues:
- - string
- id: string
- selectionType: string
- selectionValues: {}
- softwareType: string
- softwareVariant: string
- softwareVersion: string
- tags:
- - id: string
- name: string
- templateContent: string
- templateParams:
- - binding: string
- customOrder: 0
- dataType: string
- defaultValue: string
- description: string
- displayName: string
- group: string
- id: string
- instructionText: string
- key: string
- notParam: true
- order: 0
- paramArray: true
- parameterName: string
- provider: string
- range:
- - id: string
- maxValue: 0
- minValue: 0
- required: true
- selection:
- defaultSelectedValues:
- - string
- id: string
- selectionType: string
- selectionValues: {}
- validationErrors:
- rollbackTemplateErrors:
- - {}
- templateErrors:
- - {}
- templateId: string
- templateVersion: string
- version: string
+ - {}
- name: Update all
cisco.dnac.configuration_template_project:
@@ -768,165 +117,7 @@ EXAMPLES = r"""
tags:
- id: string
name: string
- templates:
- - author: string
- composite: true
- containingTemplates:
- - composite: true
- description: string
- deviceTypes:
- - productFamily: string
- productSeries: string
- productType: string
- id: string
- language: string
- name: string
- projectName: string
- rollbackTemplateParams:
- - binding: string
- customOrder: 0
- dataType: string
- defaultValue: string
- description: string
- displayName: string
- group: string
- id: string
- instructionText: string
- key: string
- notParam: true
- order: 0
- paramArray: true
- parameterName: string
- provider: string
- range:
- - id: string
- maxValue: 0
- minValue: 0
- required: true
- selection:
- defaultSelectedValues:
- - string
- id: string
- selectionType: string
- selectionValues: {}
- tags:
- - id: string
- name: string
- templateContent: string
- templateParams:
- - binding: string
- customOrder: 0
- dataType: string
- defaultValue: string
- description: string
- displayName: string
- group: string
- id: string
- instructionText: string
- key: string
- notParam: true
- order: 0
- paramArray: true
- parameterName: string
- provider: string
- range:
- - id: string
- maxValue: 0
- minValue: 0
- required: true
- selection:
- defaultSelectedValues:
- - string
- id: string
- selectionType: string
- selectionValues: {}
- version: string
- createTime: 0
- customParamsOrder: true
- description: string
- deviceTypes:
- - productFamily: string
- productSeries: string
- productType: string
- failurePolicy: string
- id: string
- language: string
- lastUpdateTime: 0
- latestVersionTime: 0
- name: string
- parentTemplateId: string
- projectId: string
- projectName: string
- rollbackTemplateContent: string
- rollbackTemplateParams:
- - binding: string
- customOrder: 0
- dataType: string
- defaultValue: string
- description: string
- displayName: string
- group: string
- id: string
- instructionText: string
- key: string
- notParam: true
- order: 0
- paramArray: true
- parameterName: string
- provider: string
- range:
- - id: string
- maxValue: 0
- minValue: 0
- required: true
- selection:
- defaultSelectedValues:
- - string
- id: string
- selectionType: string
- selectionValues: {}
- softwareType: string
- softwareVariant: string
- softwareVersion: string
- tags:
- - id: string
- name: string
- templateContent: string
- templateParams:
- - binding: string
- customOrder: 0
- dataType: string
- defaultValue: string
- description: string
- displayName: string
- group: string
- id: string
- instructionText: string
- key: string
- notParam: true
- order: 0
- paramArray: true
- parameterName: string
- provider: string
- range:
- - id: string
- maxValue: 0
- minValue: 0
- required: true
- selection:
- defaultSelectedValues:
- - string
- id: string
- selectionType: string
- selectionValues: {}
- validationErrors:
- rollbackTemplateErrors:
- - {}
- templateErrors:
- - {}
- templateId: string
- templateVersion: string
- version: string
+ templates: {}
- name: Delete by id
cisco.dnac.configuration_template_project:
@@ -941,7 +132,6 @@ EXAMPLES = r"""
projectId: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/configuration_template_project_info.py b/ansible_collections/cisco/dnac/plugins/modules/configuration_template_project_info.py
index f200203db..f32cb608f 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/configuration_template_project_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/configuration_template_project_info.py
@@ -34,8 +34,8 @@ options:
- ProjectId path parameter. ProjectId(UUID) of project to get project details.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Configuration Templates GetsAListOfProjects
description: Complete reference of the GetsAListOfProjects API.
@@ -83,7 +83,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -102,213 +101,6 @@ dnac_response:
"id": "string",
"lastUpdateTime": 0,
"name": "string",
- "templates": [
- {
- "tags": [
- {
- "id": "string",
- "name": "string"
- }
- ],
- "author": "string",
- "composite": true,
- "containingTemplates": [
- {
- "tags": [
- {
- "id": "string",
- "name": "string"
- }
- ],
- "composite": true,
- "description": "string",
- "deviceTypes": [
- {
- "productFamily": "string",
- "productSeries": "string",
- "productType": "string"
- }
- ],
- "id": "string",
- "language": "string",
- "name": "string",
- "projectName": "string",
- "rollbackTemplateParams": [
- {
- "binding": "string",
- "customOrder": 0,
- "dataType": "string",
- "defaultValue": "string",
- "description": "string",
- "displayName": "string",
- "group": "string",
- "id": "string",
- "instructionText": "string",
- "key": "string",
- "notParam": true,
- "order": 0,
- "paramArray": true,
- "parameterName": "string",
- "provider": "string",
- "range": [
- {
- "id": "string",
- "maxValue": 0,
- "minValue": 0
- }
- ],
- "required": true,
- "selection": {
- "defaultSelectedValues": [
- "string"
- ],
- "id": "string",
- "selectionType": "string",
- "selectionValues": {}
- }
- }
- ],
- "templateContent": "string",
- "templateParams": [
- {
- "binding": "string",
- "customOrder": 0,
- "dataType": "string",
- "defaultValue": "string",
- "description": "string",
- "displayName": "string",
- "group": "string",
- "id": "string",
- "instructionText": "string",
- "key": "string",
- "notParam": true,
- "order": 0,
- "paramArray": true,
- "parameterName": "string",
- "provider": "string",
- "range": [
- {
- "id": "string",
- "maxValue": 0,
- "minValue": 0
- }
- ],
- "required": true,
- "selection": {
- "defaultSelectedValues": [
- "string"
- ],
- "id": "string",
- "selectionType": "string",
- "selectionValues": {}
- }
- }
- ],
- "version": "string"
- }
- ],
- "createTime": 0,
- "customParamsOrder": true,
- "description": "string",
- "deviceTypes": [
- {
- "productFamily": "string",
- "productSeries": "string",
- "productType": "string"
- }
- ],
- "failurePolicy": "string",
- "id": "string",
- "language": "string",
- "lastUpdateTime": 0,
- "latestVersionTime": 0,
- "name": "string",
- "parentTemplateId": "string",
- "projectId": "string",
- "projectName": "string",
- "rollbackTemplateContent": "string",
- "rollbackTemplateParams": [
- {
- "binding": "string",
- "customOrder": 0,
- "dataType": "string",
- "defaultValue": "string",
- "description": "string",
- "displayName": "string",
- "group": "string",
- "id": "string",
- "instructionText": "string",
- "key": "string",
- "notParam": true,
- "order": 0,
- "paramArray": true,
- "parameterName": "string",
- "provider": "string",
- "range": [
- {
- "id": "string",
- "maxValue": 0,
- "minValue": 0
- }
- ],
- "required": true,
- "selection": {
- "defaultSelectedValues": [
- "string"
- ],
- "id": "string",
- "selectionType": "string",
- "selectionValues": {}
- }
- }
- ],
- "softwareType": "string",
- "softwareVariant": "string",
- "softwareVersion": "string",
- "templateContent": "string",
- "templateParams": [
- {
- "binding": "string",
- "customOrder": 0,
- "dataType": "string",
- "defaultValue": "string",
- "description": "string",
- "displayName": "string",
- "group": "string",
- "id": "string",
- "instructionText": "string",
- "key": "string",
- "notParam": true,
- "order": 0,
- "paramArray": true,
- "parameterName": "string",
- "provider": "string",
- "range": [
- {
- "id": "string",
- "maxValue": 0,
- "minValue": 0
- }
- ],
- "required": true,
- "selection": {
- "defaultSelectedValues": [
- "string"
- ],
- "id": "string",
- "selectionType": "string",
- "selectionValues": {}
- }
- }
- ],
- "validationErrors": {
- "rollbackTemplateErrors": {},
- "templateErrors": {},
- "templateId": "string",
- "templateVersion": "string"
- },
- "version": "string"
- }
- ]
+ "templates": {}
}
"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/configuration_template_version_create.py b/ansible_collections/cisco/dnac/plugins/modules/configuration_template_version_create.py
index d0317ea84..95476d6bf 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/configuration_template_version_create.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/configuration_template_version_create.py
@@ -23,8 +23,8 @@ options:
description: UUID of template.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Configuration Templates VersionTemplate
description: Complete reference of the VersionTemplate API.
@@ -52,7 +52,6 @@ EXAMPLES = r"""
templateId: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/configuration_template_version_info.py b/ansible_collections/cisco/dnac/plugins/modules/configuration_template_version_info.py
index 0dfdb8a93..5608b7912 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/configuration_template_version_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/configuration_template_version_info.py
@@ -24,8 +24,8 @@ options:
- TemplateId path parameter. TemplateId(UUID) to get list of versioned templates.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Configuration Templates GetsAllTheVersionsOfAGivenTemplate
description: Complete reference of the GetsAllTheVersionsOfAGivenTemplate API.
@@ -54,7 +54,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/credential_to_site_by_siteid_create_v2.py b/ansible_collections/cisco/dnac/plugins/modules/credential_to_site_by_siteid_create_v2.py
index 1ee9e33a9..51e26c9f4 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/credential_to_site_by_siteid_create_v2.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/credential_to_site_by_siteid_create_v2.py
@@ -11,7 +11,7 @@ short_description: Resource module for Credential To Site By Siteid Create V2
description:
- Manage operation create of the resource Credential To Site By Siteid Create V2.
- API to assign Device Credential to a site.
-version_added: '6.7.0'
+version_added: '3.1.0'
extends_documentation_fragment:
- cisco.dnac.module
author: Rafael Campos (@racampos)
@@ -38,8 +38,8 @@ options:
description: SNMPv3 Credential Id.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Network Settings AssignDeviceCredentialToSiteV2
description: Complete reference of the AssignDeviceCredentialToSiteV2 API.
@@ -72,7 +72,6 @@ EXAMPLES = r"""
snmpV3Id: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/device_configs_backup_workflow_manager.py b/ansible_collections/cisco/dnac/plugins/modules/device_configs_backup_workflow_manager.py
new file mode 100644
index 000000000..2658c09ce
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/device_configs_backup_workflow_manager.py
@@ -0,0 +1,623 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2024, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+__author__ = ("Abinash Mishra, Madhan Sankaranarayanan")
+
+DOCUMENTATION = r"""
+---
+module: device_configs_backup_workflow_manager
+short_description: Resource module for device_configs_backup functions
+description:
+- Manage operation related to taking the backup of running config, static config and vlan.dat.bat
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.workflow_manager_params
+author: Abinash Mishra (@abimishr)
+ Madhan Sankaranarayanan (@madhansansel)
+options:
+ config_verify:
+ description: Set to True to verify the Cisco Catalyst Center config after applying the playbook config.
+ type: bool
+ default: False
+ state:
+ description: The state of Cisco Catalyst Center after module completion.
+ type: str
+ choices: [ merged ]
+ default: merged
+ config:
+ description:
+ - List of details regarding the device configuration backups being taken
+ - Alteast one of the paramters mentioned in the suboptions must be passed in config
+ type: list
+ elements: dict
+ required: true
+ suboptions:
+ hostname:
+ description: Hostname of the device as displayed on the inventory GUI of Cisco Catalyst Center
+ type: str
+ management_ip_address:
+ description: IP address of the device as displayed on the inventory GUI of Cisco Catalyst Center
+ type: str
+ mac_address:
+ description: Mac address of the device as displayed on the inventory GUI of Cisco Catalyst Center
+ type: str
+ serial_number:
+ description: Serial number of the device as displayed on the inventory GUI of Cisco Catalyst Center
+ type: str
+ family:
+ description: Family of the device(s) as displayed on the inventory GUI of Cisco Catalyst Center
+ type: str
+ type:
+ description: Specifies the type of the device(s) from the family, like Cisco Catalyst 9300 Switch or Cisco Catalyst 9500 Switch
+ type: str
+ series:
+ description: Specifies the series of the device(s) from the type, like Cisco Catalyst 9300 Series Switches
+ type: str
+ collection_status:
+ description: Specifies the collection status of the device(s) on the GUI of Cisco Catalyst Center
+ type: str
+ file_path:
+ description:
+ - Location of the path or folder where the configs need to be exported in local system.
+ - If the file_path is not provided, the backup file(s) will be stored in a directory named
+ "tmp" in the same directory as the playbook.
+ type: str
+ default: tmp
+ file_password:
+ description:
+ - Optional file password for zipping and unzipping the config file.
+ - Minimum password length is 8 and it should contain atleast one lower case letter, one uppercase
+ letter, one digit and one special characters from -=\\\\\\\\;,./~!@$%^&*()_+{}[]|:?"
+ type: str
+requirements:
+ - dnacentersdk == 2.6.10
+ - python >= 3.5
+notes:
+ - SDK Methods used are devices.Devices.get_device_list,
+ configuration_archive.ConfigurationsArchive.export_device_configurations,
+ task.Task.get_task_by_id
+ - Paths used are get /dna/intent/api/v1/network-device
+ post dna/intent/api/v1/network-device-archive/cleartext
+ get /dna/intent/api/v1/task/${taskId}
+
+"""
+
+EXAMPLES = r"""
+- name: Take backup of a 9300 wired device
+ cisco.dnac.device_configs_backup_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log: True
+ dnac_log_level: "{{dnac_log_level}}"
+ state: merged
+ config:
+ - hostname: NY-BN-9500.cisco.local
+ management_ip_address: 205.1.1.4
+ serial_number: F2AKI0082J
+ family: Switches and Hubs
+ type: Cisco Catalyst 9300 Switch
+ series: Cisco Catalyst 9300 Series Switches
+ collection_status: Managed
+ file_path: /home/admin/madhan_ansible/collections/ansible_collections/cisco/dnac/playbooks/new_tmp
+"""
+
+RETURN = r"""
+# Case_1: Successful creation and exportation of device configs
+response_1:
+ description: A dictionary with with the response returned by the Cisco Catalyst Center Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response":
+ {
+ "response": String,
+ "version": String
+ },
+ "msg": String
+ }
+
+# Case_2: Error while taking a device_configs_backup
+response_2:
+ description: A string with the response returned by the Cisco Catalyst Center Python SDK
+ returned: always
+ type: list
+ sample: >
+ {
+ "response": [],
+ "msg": String
+ }
+"""
+# common approach when a module relies on optional dependencies that are not available during the validation process.
+try:
+ import pyzipper
+ HAS_PYZIPPER = True
+except ImportError:
+ HAS_PYZIPPER = False
+ pyzipper = None
+
+try:
+ import pathlib
+except ImportError:
+ pathlib = None
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.cisco.dnac.plugins.module_utils.dnac import (
+ DnacBase,
+ validate_list_of_dicts
+)
+from io import BytesIO
+import random
+import string
+import re
+import time
+
+
+class Device_configs_backup(DnacBase):
+
+ """
+ Class containing member attributes for device_configs_backup workflow_manager module
+ """
+ def __init__(self, module):
+ super().__init__(module)
+
+ def validate_input(self):
+ """
+ Validate the fields provided in the playbook. Checks the
+ configuration provided in the playbook against a predefined
+ specification to ensure it adheres to the expected structure
+ and data types.
+
+ Parameters:
+ - self: The instance of the class containing the 'config' attribute
+ to be validated.
+ Returns:
+ The method returns an instance of the class with updated attributes:
+ - self.msg: A message describing the validation result.
+ - self.status: The status of the validation (either 'success' or 'failed').
+ - self.validated_config: If successful, a validated version of the
+ 'config' parameter.
+ Example:
+ To use this method, create an instance of the class and call
+ 'validate_input' on it.If the validation succeeds, 'self.status'
+ will be 'success'and 'self.validated_config' will contain the
+ validated configuration. If it fails, 'self.status' will be
+ 'failed', and 'self.msg' will describe the validation issues.
+ """
+
+ if not self.config:
+ self.msg = "config not available in playbook for validattion"
+ self.status = "success"
+ return self
+
+ device_configs_backup_spec = {
+ 'hostname': {'type': 'str', 'required': False},
+ 'management_ip_address': {'type': 'str', 'required': False},
+ 'mac_address': {'type': 'str', 'required': False},
+ 'serial_number': {'type': 'str', 'required': False},
+ 'family': {'type': 'str', 'required': False},
+ 'type': {'type': 'str', 'required': False},
+ 'series': {'type': 'str', 'required': False},
+ 'collection_status': {'type': 'str', 'required': False},
+ 'file_path': {'type': 'str', 'required': False, 'default': 'tmp'},
+ 'file_password': {'type': 'str', 'required': False}
+ }
+ # Validate device_configs_backup params
+ valid_device_configs_backup, invalid_params = validate_list_of_dicts(
+ self.config, device_configs_backup_spec
+ )
+ if invalid_params:
+ self.msg = "Invalid parameters in playbook: {0}".format(
+ "\n".join(invalid_params))
+ self.log(str(self.msg), "ERROR")
+ self.status = "failed"
+ return self
+
+ self.validated_config = valid_device_configs_backup
+ self.msg = "Successfully validated playbook configuration parameters using 'validate_input': {0}".format(str(valid_device_configs_backup))
+ self.status = "success"
+ return self
+
+ def validate_ipv4_address(self):
+ """
+ Validates the management ip adress passed by the user
+ """
+
+ ip_address = self.validated_config[0].get("management_ip_address")
+
+ if ip_address:
+ if self.is_valid_ipv4(ip_address) is False:
+ msg = "IP address {0} is not valid".format(ip_address)
+ self.log(msg, "CRITICAL")
+ self.module.fail_json(msg=msg)
+
+ self.log("Validated IP address collected for config collection is {0}".format(ip_address), "INFO")
+
+ def get_have(self):
+ """
+ Get the current device_configs_backup details
+ Args:
+ self: The instance of the class containing the 'config' attribute to be validated.
+ Returns:
+ The method returns an instance of the class with updated attributes:
+ - self.
+ Example:
+ Stored paramters are used to call the APIs to store the validated configs
+ """
+
+ have = {}
+ have = self.validated_config[0]
+ self.have = have
+ self.log("Parameters collected from get have api are {0}".format(self.have), "INFO")
+ return self
+
+ def get_device_ids_list(self):
+ """
+ Fethces the list of device ids from various paramters passed in the playbook
+ Args:
+ self: The instance of the class containing the 'config' attribute to be validated.
+ Returns:
+ dev_id_list: The list of device ids based on the parameters passed by the user
+ Example:
+ Stored paramters like management ip address/ family can be used to fetch the device ids
+ list
+ """
+
+ device_params = self.validated_config[0]
+ if device_params.get("file_password"):
+ if len(device_params) - 1 == 0:
+ msg = "Please provide atleast one device parameter as mentioned in the documentation to fetch device configs"
+ self.log(msg, "CRITICAL")
+ self.module.fail_json(msg=msg)
+
+ response = self.dnac_apply['exec'](
+ family="devices",
+ function='get_device_list',
+ params=device_params,
+ op_modifies=True
+ )
+ self.log("Reponse collected from the API 'get_device_list' is {0}".format(str(response)), "DEBUG")
+ device_list = response.get("response")
+
+ self.log("Length of the device list fetched from the API 'get_device_list' is {0}".format(str(device_list)), "INFO")
+ if len(device_list) == 0:
+ msg = "Couldn't find any devices in the inventory that match the given parameters."
+ self.log(msg, "CRITICAL")
+ self.module.fail_json(msg=msg)
+
+ dev_id_list = [id.get("id") for id in device_list]
+ self.log("Device Ids list collected is {0}".format(dev_id_list), "INFO")
+ return dev_id_list
+
+ def password_generator(self):
+ """
+ Creates a password that matches Cisco Catalyst Center's requirements
+ Min password length is 8 and it should contain atleast one lower case letter,
+ one uppercase letter, one digit and one special characters from -=\\\\;,./~!@#$%^&*()_+{}[]|:?
+ """
+
+ punctuation = "-=;,.~!@#$%^&*()_+{}[]|:?"
+ password_chars = punctuation + string.ascii_letters + string.digits
+ password_list = [
+ random.choice(punctuation),
+ random.choice(string.ascii_uppercase),
+ random.choice(string.ascii_lowercase),
+ random.choice(string.digits),
+ random.choice(password_chars),
+ random.choice(password_chars),
+ random.choice(password_chars),
+ random.choice(password_chars),
+ ]
+ password = []
+ while password_list:
+ password.append(
+ password_list.pop(random.randint(0, len(password_list) - 1))
+ )
+ password = "".join(password)
+
+ self.log("File password is generated using the password generator API", "INFO")
+ return password
+
+ def validate_password(self, password=None):
+ """
+ Validates the user-defined password for Cisco catalyst Center's requirements
+ Min password length is 8 and it should contain atleast one lower case letter,
+ one uppercase letter, one digit and one special characters from -=\\\\;,./~!@#$%^&*()_+{}[]|:?
+ """
+
+ pattern = r"^(?=.*[a-z])(?=.*[A-Z])(?=.*\d)(?=.*[-=\\;,./~!@#$%^&*()_+{}[\]|:?\"]).{8,}$"
+ self.log("User defined password is {0}".format(password), "DEBUG")
+ if re.match(pattern, password):
+ return True
+ else:
+ return False
+
+ def get_want(self):
+ """
+ Get all device_configs_backup related informantion from the playbook and preprare it to call
+ the API to export the device configurations.
+ Args:
+ self: The instance of the class containing the 'config' attribute to be validated.
+ Returns:
+ The method returns an instance of the class with updated attributes:
+ - self.want: A dictionary of paramters obtained from the playbook
+ - self.msg: A message indicating all the paramters from the playbook are
+ collected
+ - self.status: Success
+ Example:
+ It stores all the paramters passed from the playbook for further processing
+ before calling the APIs
+ """
+
+ self.want = {}
+
+ self.want["deviceId"] = self.get_device_ids_list()
+ if self.validated_config[0].get("file_password"):
+ password = self.validated_config[0].get("file_password")
+ if self.validate_password(password=password) is True:
+ self.want["password"] = password
+
+ else:
+ msg = "Invalid input as Invalid password. Min password length is 8 and it should contain" + \
+ "atleast one lower case letter, one uppercase letter, one digit and one special characters" + \
+ "from -=\\\\\\\\;,./~!@#$%^&*()_+{}[]|:?"
+ self.log(msg, "CRITICAL")
+ self.module.fail_json(msg=msg)
+
+ else:
+ self.want["password"] = self.password_generator()
+
+ self.msg = "Successfully collected all parameters from playbook " + \
+ "for comparison"
+ self.status = "success"
+ self.log(self.msg, "INFO")
+ return self
+
+ def get_device_config(self):
+ """
+ Cisco Catalyst Center creates a ZIP file by calling the export API
+ """
+
+ response = self.dnac_apply['exec'](
+ family="configuration_archive",
+ function='export_device_configurations',
+ params=self.want,
+ op_modifies=True
+ )
+ response = response.get("response")
+
+ self.log("Response collected from 'export_device_configurations' API is {0}".format(str(response)), "DEBUG")
+ if response.get("errorCode"):
+ msg = response.get("message")
+ self.log(msg, "CRITICAL")
+ self.module.fail_json(msg=msg)
+
+ task_id = response.get("taskId")
+ self.log("Task Id of the task is {0}".format(task_id), "INFO")
+ return task_id
+
+ def get_task_status(self, task_id=None):
+ """
+ Monitor the status of a task of creation of dicovery in the Cisco Catalyst Center.
+ It checks the task status periodically until the task is no longer 'In Progress'
+ or other states. If the task encounters an error or fails, it immediately fails the
+ module and returns False.
+
+ Parameters:
+ - task_id: The ID of the task to monitor.
+
+ Returns:
+ - result: True if the task completed successfully, False otherwise.
+ With True it also returns additionalStatusURL
+ """
+
+ result = False
+ params = dict(task_id=task_id)
+ while True:
+ response = self.dnac_apply['exec'](
+ family="task",
+ function='get_task_by_id',
+ params=params,
+ op_modifies=True,
+ )
+ response = response.response
+
+ self.log("Response collected from 'get task by id' is {0}".format(response), "DEBUG")
+ if response.get('isError') or re.search(
+ 'failed', response.get('progress'), flags=re.IGNORECASE
+ ):
+ msg = 'Device backup task with id {0} has not completed - Reason: {1}'.format(
+ task_id, response.get("failureReason"))
+ self.log(msg, "CRITICAL")
+ self.module.fail_json(msg=msg)
+ return False
+
+ self.log("Task status for the task id (before checking status) {0} is {1}".format(str(task_id), str(response)), "INFO")
+ progress = response.get('progress')
+ self.log("Progress of the task is {0}".format(str(progress)), "DEBUG")
+
+ if progress == "Device configuration Successfully exported as password protected ZIP.":
+ result = True
+ additionalStatusURL = response.get("additionalStatusURL")
+ self.log("The backup process is completed", "INFO")
+ self.result.update(dict(backup_task=response))
+ return (result, additionalStatusURL)
+
+ self.log("The progress status is {0}, continue to check the status after 3 seconds. Putting into sleep for 3 seconds".format(progress), "INFO")
+ time.sleep(3)
+
+ def download_file(self, additionalStatusURL=None):
+ """
+ Downloading file and store locally
+ Using unzip path settings for directory
+ Paremetrs:
+ self: The instance of the class containing the 'config' attribute to be validated.
+ additionalStatusURL: This paramter is used to fetch the file id
+
+ Returns:
+ - result: True if the file downloaded and uzipped, else False
+ """
+
+ self.log("Downloading: {0}".format(additionalStatusURL), "INFO")
+ file_id = additionalStatusURL.split("/")[-1]
+
+ try:
+ response = self.dnac._exec(
+ family="file",
+ function='download_a_file_by_fileid',
+ op_modifies=True,
+ params={"file_id": file_id},
+ )
+ self.log("Received API response from 'download_a_file_by_fileid': {0}".format(str(response)), "DEBUG")
+ except Exception as e:
+ self.log("File couldn't be downloaded: {0}".format(e), "INFO")
+ return False
+
+ if isinstance(response, dict) and response.get("errorCode"):
+ self.log(response.get("message"), "CRITICAL")
+ self.module.fail_json(msg=response.get("message"))
+
+ zip_data = BytesIO(response.data)
+ self.log("ZIP data collected is {0}".format(zip_data), "INFO")
+
+ pathlib.Path(self.have.get("file_path")).mkdir(parents=True, exist_ok=True)
+ self.log("Unzipping file after completion of download", "INFO")
+
+ try:
+ with pyzipper.AESZipFile(zip_data, 'r') as f:
+ f.pwd = bytes(self.want.get("password"), encoding="utf-8")
+ f.extractall(path=str(self.have.get("file_path")))
+ except Exception as e:
+ self.log("Error in unzipping: {0}".format(e), "CRITICAL")
+ return False
+
+ self.log("Unzipping complete", "INFO")
+ return True
+
+ def get_diff_merged(self):
+ """
+ Add to device_configs_backup database
+ Args:
+ self: An instance of a class used for interacting with Cisco Catalyst Center.
+ Returns:
+ object: An instance of the class with updated results and status
+ based on the processing of differences.
+ Description:
+ The function processes the differences and, depending on the
+ changes required, it may add, update,or resynchronize devices in
+ Cisco Catalyst Center. The updated results and status are stored in the
+ class instance for further use.
+ """
+
+ if self.have.get('management_ip_address'):
+ self.validate_ipv4_address()
+
+ task_id = self.get_device_config()
+ result, additionalStatusURL = self.get_task_status(task_id=task_id)
+
+ if result is True:
+ download_status = self.download_file(additionalStatusURL=additionalStatusURL)
+ if download_status is True:
+ self.result['response'] = task_id
+ self.result['msg'] = "Device configs got downloaded"
+ self.log(self.result['msg'], "INFO")
+ self.result['changed'] = True
+ return self
+ return self
+
+ def verify_diff_merged(self):
+ """
+ Verify the merged status(Creation/Updation) of Discovery in Cisco Catalyst Center.
+ Args:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Return:
+ - self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ This method checks the merged status of a configuration in Cisco Catalyst Center by
+ retrieving the current state (have) and desired state (want) of the configuration,
+ logs the states, and validates whether the specified device(s) exists in the DNA
+ Center configuration's Discovery Database.
+ """
+
+ self.log("Current State (have): {0}".format(str(self.have)), "INFO")
+ self.log("Desired State (want): {0}".format(str(self.want)), "INFO")
+ # Code to validate Cisco Catalyst Center config for merged state
+ window_seconds = 10
+ current_time = time.time()
+ window_start_time = current_time - window_seconds
+ files_modified_within_window = [
+ f.name for f in pathlib.Path(self.have.get("file_path")).iterdir()
+ if f.stat().st_mtime > window_start_time
+ ]
+
+ if len(files_modified_within_window) > 0:
+ self.log("Backup has been taken in the following files {0}".format(str(files_modified_within_window)), "INFO")
+ else:
+ self.log("Backup has not been taken, please check", "WARNING")
+
+ self.status = "success"
+
+ return self
+
+
+def main():
+
+ """
+ main entry point for module execution
+ """
+
+ element_spec = {'dnac_host': {'required': True, 'type': 'str'},
+ 'dnac_port': {'type': 'str', 'default': '443'},
+ 'dnac_username': {'type': 'str', 'default': 'admin', 'aliases': ['user']},
+ 'dnac_password': {'type': 'str', 'no_log': True},
+ 'dnac_verify': {'type': 'bool', 'default': 'True'},
+ 'dnac_version': {'type': 'str', 'default': '2.2.3.3'},
+ 'dnac_debug': {'type': 'bool', 'default': False},
+ 'dnac_log': {'type': 'bool', 'default': False},
+ "dnac_log_level": {"type": 'str', "default": 'WARNING'},
+ "dnac_log_file_path": {"type": 'str', "default": 'dnac.log'},
+ "dnac_log_append": {"type": 'bool', "default": True},
+ "config_verify": {"type": 'bool', "default": False},
+ 'dnac_api_task_timeout': {'type': 'int', "default": 1200},
+ 'dnac_task_poll_interval': {'type': 'int', "default": 2},
+ 'validate_response_schema': {'type': 'bool', 'default': True},
+ 'config': {'required': True, 'type': 'list', 'elements': 'dict'},
+ 'state': {'default': 'merged', 'choices': ['merged']}
+ }
+ module = AnsibleModule(argument_spec=element_spec,
+ supports_check_mode=False)
+ ccc_device_configs_backup = Device_configs_backup(module)
+
+ state = ccc_device_configs_backup.params.get("state")
+ if state not in ccc_device_configs_backup.supported_states:
+ ccc_device_configs_backup.status = "invalid"
+ ccc_device_configs_backup.msg = "State {0} is invalid".format(state)
+ ccc_device_configs_backup.check_return_status()
+
+ config_verify = ccc_device_configs_backup.params.get("config_verify")
+ ccc_device_configs_backup.validate_input().check_return_status()
+
+ for config in ccc_device_configs_backup.validated_config:
+ ccc_device_configs_backup.reset_values()
+ ccc_device_configs_backup.get_have().check_return_status()
+ ccc_device_configs_backup.get_want().check_return_status()
+ ccc_device_configs_backup.get_diff_state_apply[state]().check_return_status()
+ if config_verify:
+ ccc_device_configs_backup.verify_diff_state_apply[state]().check_return_status()
+
+ module.exit_json(**ccc_device_configs_backup.result)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/cisco/dnac/plugins/modules/device_configurations_export.py b/ansible_collections/cisco/dnac/plugins/modules/device_configurations_export.py
index 4d7ba14db..41f81ecea 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/device_configurations_export.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/device_configurations_export.py
@@ -17,15 +17,17 @@ extends_documentation_fragment:
author: Rafael Campos (@racampos)
options:
deviceId:
- description: Device Id.
- elements: str
- type: list
+ description: UUIDs of the devices for which configurations need to be exported.
+ type: str
password:
- description: Password.
+ description: Password for the zip file to protect exported configurations. Must
+ contain, at minimum 8 characters, one lowercase letter, one uppercase letter,
+ one number, one special character(-=;,./~!@#$%^&*()_+{}| ?). It may not contain
+ white space or the characters <>.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Configuration Archive ExportDeviceConfigurations
description: Complete reference of the ExportDeviceConfigurations API.
@@ -49,12 +51,10 @@ EXAMPLES = r"""
dnac_port: "{{dnac_port}}"
dnac_version: "{{dnac_version}}"
dnac_debug: "{{dnac_debug}}"
- deviceId:
- - string
+ deviceId: string
password: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -62,10 +62,10 @@ dnac_response:
type: dict
sample: >
{
+ "version": "string",
"response": {
- "taskId": "string",
- "url": "string"
- },
- "version": "string"
+ "url": "string",
+ "taskId": "string"
+ }
}
"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/device_credential_create.py b/ansible_collections/cisco/dnac/plugins/modules/device_credential_create.py
index b26ec83a0..5f10f1cbc 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/device_credential_create.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/device_credential_create.py
@@ -10,7 +10,9 @@ module: device_credential_create
short_description: Resource module for Device Credential Create
description:
- Manage operation create of the resource Device Credential Create.
-- API to create device credentials.
+- >
+ API to create device credentials. This API has been deprecated and will not be available in a Cisco DNA Center
+ release after August 1st 2024 23 59 59 GMT. Please refer new Intent API Create Global Credentials V2.
version_added: '3.1.0'
extends_documentation_fragment:
- cisco.dnac.module
@@ -48,7 +50,7 @@ options:
type: str
port:
description: Port for http read credential.
- type: int
+ type: float
username:
description: User name of the http read credential.
type: str
@@ -65,7 +67,7 @@ options:
type: str
port:
description: Port for http write credential.
- type: int
+ type: float
username:
description: User name of the http write credential.
type: str
@@ -120,8 +122,8 @@ options:
type: list
type: dict
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Network Settings CreateDeviceCredentials
description: Complete reference of the CreateDeviceCredentials API.
@@ -177,7 +179,6 @@ EXAMPLES = r"""
username: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/device_credential_delete.py b/ansible_collections/cisco/dnac/plugins/modules/device_credential_delete.py
index 73d144997..89686f253 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/device_credential_delete.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/device_credential_delete.py
@@ -10,7 +10,9 @@ module: device_credential_delete
short_description: Resource module for Device Credential Delete
description:
- Manage operation delete of the resource Device Credential Delete.
-- Delete device credential.
+- >
+ Delete device credential. This API has been deprecated and will not be available in a Cisco DNA Center release
+ after August 1st 2024 23 59 59 GMT. Please refer new Intent API Delete Global Credentials V2.
version_added: '3.1.0'
extends_documentation_fragment:
- cisco.dnac.module
@@ -20,8 +22,8 @@ options:
description: Id path parameter. Global credential id.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Network Settings DeleteDeviceCredential
description: Complete reference of the DeleteDeviceCredential API.
@@ -48,7 +50,6 @@ EXAMPLES = r"""
id: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/device_credential_info.py b/ansible_collections/cisco/dnac/plugins/modules/device_credential_info.py
index 27175a669..33a8f4ef2 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/device_credential_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/device_credential_info.py
@@ -10,7 +10,9 @@ module: device_credential_info
short_description: Information module for Device Credential
description:
- Get all Device Credential.
-- API to get device credential details.
+- >
+ API to get device credential details. This API has been deprecated and will not be available in a Cisco DNA Center
+ release after August 1st 2024 23 59 59 GMT. Please refer new Intent API Get All Global Credentials V2.
version_added: '3.1.0'
extends_documentation_fragment:
- cisco.dnac.module_info
@@ -24,8 +26,8 @@ options:
- SiteId query parameter. Site id to retrieve the credential details associated with the site.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Network Settings GetDeviceCredentialDetails
description: Complete reference of the GetDeviceCredentialDetails API.
@@ -54,7 +56,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/device_credential_intent.py b/ansible_collections/cisco/dnac/plugins/modules/device_credential_intent.py
index 3b887aa8d..0bd15f1fa 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/device_credential_intent.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/device_credential_intent.py
@@ -298,7 +298,7 @@ options:
description: snmp_v3 Credential Id. Use Description or Id.
type: str
requirements:
-- dnacentersdk >= 2.6.0
+- dnacentersdk >= 2.7.1
- python >= 3.9
seealso:
- name: Cisco DNA Center documentation for Discovery CreateGlobalCredentialsV2
diff --git a/ansible_collections/cisco/dnac/plugins/modules/device_credential_update.py b/ansible_collections/cisco/dnac/plugins/modules/device_credential_update.py
index 7c5cafd5b..29b82a49e 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/device_credential_update.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/device_credential_update.py
@@ -10,7 +10,9 @@ module: device_credential_update
short_description: Resource module for Device Credential Update
description:
- Manage operation update of the resource Device Credential Update.
-- API to update device credentials.
+- >
+ API to update device credentials. This API has been deprecated and will not be available in a Cisco DNA Center
+ release after August 1st 2024 23 59 59 GMT. Please refer new Intent API Update Global Credentials V2.
version_added: '3.1.0'
extends_documentation_fragment:
- cisco.dnac.module
@@ -132,8 +134,8 @@ options:
type: dict
type: dict
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Network Settings UpdateDeviceCredentials
description: Complete reference of the UpdateDeviceCredentials API.
@@ -195,7 +197,6 @@ EXAMPLES = r"""
username: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/device_credential_workflow_manager.py b/ansible_collections/cisco/dnac/plugins/modules/device_credential_workflow_manager.py
index a6d188de4..1d412f501 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/device_credential_workflow_manager.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/device_credential_workflow_manager.py
@@ -298,7 +298,7 @@ options:
description: snmp_v3 Credential Id. Use Description or Id.
type: str
requirements:
-- dnacentersdk >= 2.6.0
+- dnacentersdk >= 2.7.1
- python >= 3.9
seealso:
- name: Cisco Catalyst Center documentation for Discovery CreateGlobalCredentialsV2
@@ -889,7 +889,12 @@ class DeviceCredential(DnacBase):
.format(site_name), "ERROR")
return None
- _id = response.get("response")[0].get("id")
+ response = response.get("response")
+ if not response:
+ self.log("The site with the name '{0}' is not valid".format(site_name), "ERROR")
+ return None
+
+ _id = response[0].get("id")
self.log("Site ID for the site name {0}: {1}".format(site_name, _id), "INFO")
except Exception as e:
self.log("Exception occurred while getting site_id from the site_name: {0}"
@@ -951,15 +956,15 @@ class DeviceCredential(DnacBase):
def get_snmpV2cRead_params(self, snmpV2cReadDetails):
"""
- Format the snmpV2cRead parameters for the snmpV2cRead
+ Format the snmp_v2c_read parameters for the snmp_v2c_read
credential configuration in Cisco Catalyst Center.
Parameters:
snmpV2cReadDetails (list of dict) - Cisco Catalyst Center
- Details containing snmpV2cRead Credentials.
+ Details containing snmp_v2c_read Credentials.
Returns:
- snmpV2cRead (list of dict) - Processed snmpV2cRead credential
+ snmpV2cRead (list of dict) - Processed snmp_v2c_read credential
data in the format suitable for the Cisco Catalyst Center config.
"""
@@ -977,15 +982,15 @@ class DeviceCredential(DnacBase):
def get_snmpV2cWrite_params(self, snmpV2cWriteDetails):
"""
- Format the snmpV2cWrite parameters for the snmpV2cWrite
+ Format the snmp_v2c_write parameters for the snmp_v2c_write
credential configuration in Cisco Catalyst Center.
Parameters:
snmpV2cWriteDetails (list of dict) - Cisco Catalyst Center
- Details containing snmpV2cWrite Credentials.
+ Details containing snmp_v2c_write Credentials.
Returns:
- snmpV2cWrite (list of dict) - Processed snmpV2cWrite credential
+ snmpV2cWrite (list of dict) - Processed snmp_v2c_write credential
data in the format suitable for the Cisco Catalyst Center config.
"""
@@ -1003,15 +1008,15 @@ class DeviceCredential(DnacBase):
def get_httpsRead_params(self, httpsReadDetails):
"""
- Format the httpsRead parameters for the httpsRead
+ Format the https_read parameters for the https_read
credential configuration in Cisco Catalyst Center.
Parameters:
httpsReadDetails (list of dict) - Cisco Catalyst Center
- Details containing httpsRead Credentials.
+ Details containing https_read Credentials.
Returns:
- httpsRead (list of dict) - Processed httpsRead credential
+ httpsRead (list of dict) - Processed https_read credential
data in the format suitable for the Cisco Catalyst Center config.
"""
@@ -1031,15 +1036,15 @@ class DeviceCredential(DnacBase):
def get_httpsWrite_params(self, httpsWriteDetails):
"""
- Format the httpsWrite parameters for the httpsWrite
+ Format the https_write parameters for the https_write
credential configuration in Cisco Catalyst Center.
Parameters:
httpsWriteDetails (list of dict) - Cisco Catalyst Center
- Details containing httpsWrite Credentials.
+ Details containing https_write Credentials.
Returns:
- httpsWrite (list of dict) - Processed httpsWrite credential
+ httpsWrite (list of dict) - Processed https_write credential
data in the format suitable for the Cisco Catalyst Center config.
"""
@@ -1059,13 +1064,13 @@ class DeviceCredential(DnacBase):
def get_snmpV3_params(self, snmpV3Details):
"""
- Format the snmpV3 parameters for the snmpV3 credential configuration in Cisco Catalyst Center.
+ Format the snmp_v3 parameters for the snmp_v3 credential configuration in Cisco Catalyst Center.
Parameters:
- snmpV3Details (list of dict) - Cisco Catalyst Center details containing snmpV3 Credentials.
+ snmpV3Details (list of dict) - Cisco Catalyst Center details containing snmp_v3 Credentials.
Returns:
- snmpV3 (list of dict) - Processed snmpV3 credential
+ snmpV3 (list of dict) - Processed snmp_v3 credential
data in the format suitable for the Cisco Catalyst Center config.
"""
@@ -1128,8 +1133,8 @@ class DeviceCredential(DnacBase):
if item.get("description") == cliOldDescription \
and item.get("username") == cliOldUsername:
if cliDetail:
- self.msg = "More than one CLI credential with same \
- old_description and old_username. Pass ID."
+ self.msg = "There are multiple CLI credentials with the same old_description and old_username. " + \
+ "Kindly provide the ID for the global device credentials."
self.status = "failed"
return self
cliDetail = item
@@ -1145,8 +1150,8 @@ class DeviceCredential(DnacBase):
if item.get("description") == cliDescription \
and item.get("username") == cliUsername:
if cliDetail:
- self.msg = "More than one CLI Credential with same \
- description and username. Pass ID."
+ self.msg = "There are multiple CLI credentials with the same description and username. " + \
+ "Kindly provide the ID for the global device credentials."
self.status = "failed"
return self
cliDetail = item
@@ -1155,7 +1160,7 @@ class DeviceCredential(DnacBase):
def get_snmpV2cRead_credentials(self, CredentialDetails, global_credentials):
"""
- Get the current snmpV2cRead Credential from
+ Get the current snmp_v2c_read Credential from
Cisco Catalyst Center based on the provided playbook details.
Check this API using the check_return_status.
@@ -1164,14 +1169,16 @@ class DeviceCredential(DnacBase):
global_credentials (dict) - All global device credentials details.
Returns:
- snmpV2cReadDetails (List) - The current snmpV2cRead.
+ snmpV2cReadDetails (List) - The current snmp_v2c_read.
"""
- # Playbook snmpV2cRead Credential details
+ # Playbook snmp_v2c_read Credential details
all_snmpV2cRead = CredentialDetails.get("snmp_v2c_read")
- # All snmpV2cRead details from the Cisco Catalyst Center
+
+ # All snmp_v2c_read details from the Cisco Catalyst Center
snmpV2cRead_details = global_credentials.get("snmpV2cRead")
- # Cisco Catalyst Center details for the snmpV2cRead Credential given in the playbook
+
+ # Cisco Catalyst Center details for the snmp_v2c_read Credential given in the playbook
snmpV2cReadDetails = []
if all_snmpV2cRead and snmpV2cRead_details:
for snmpV2cReadCredential in all_snmpV2cRead:
@@ -1180,7 +1187,7 @@ class DeviceCredential(DnacBase):
if snmpV2cReadId:
snmpV2cReadDetail = get_dict_result(snmpV2cRead_details, "id", snmpV2cReadId)
if not snmpV2cReadDetail:
- self.msg = "snmpV2cRead credential ID is invalid"
+ self.msg = "snmp_v2c_read credential ID is invalid"
self.status = "failed"
return self
@@ -1192,7 +1199,7 @@ class DeviceCredential(DnacBase):
snmpV2cReadOldDescription
)
if not snmpV2cReadDetail:
- self.msg = "snmpV2cRead credential old_description is invalid"
+ self.msg = "snmp_v2c_read credential old_description is invalid"
self.status = "failed"
return self
@@ -1208,7 +1215,7 @@ class DeviceCredential(DnacBase):
def get_snmpV2cWrite_credentials(self, CredentialDetails, global_credentials):
"""
- Get the current snmpV2cWrite Credential from
+ Get the current snmp_v2c_write Credential from
Cisco Catalyst Center based on the provided playbook details.
Check this API using the check_return_status.
@@ -1217,14 +1224,16 @@ class DeviceCredential(DnacBase):
global_credentials (dict) - All global device credentials details.
Returns:
- snmpV2cWriteDetails (List) - The current snmpV2cWrite.
+ snmpV2cWriteDetails (List) - The current snmp_v2c_write.
"""
- # Playbook snmpV2cWrite Credential details
+ # Playbook snmp_v2c_write Credential details
all_snmpV2cWrite = CredentialDetails.get("snmp_v2c_write")
- # All snmpV2cWrite details from the Cisco Catalyst Center
+
+ # All snmp_v2c_write details from the Cisco Catalyst Center
snmpV2cWrite_details = global_credentials.get("snmpV2cWrite")
- # Cisco Catalyst Center details for the snmpV2cWrite Credential given in the playbook
+
+ # Cisco Catalyst Center details for the snmp_v2c_write Credential given in the playbook
snmpV2cWriteDetails = []
if all_snmpV2cWrite and snmpV2cWrite_details:
for snmpV2cWriteCredential in all_snmpV2cWrite:
@@ -1233,7 +1242,7 @@ class DeviceCredential(DnacBase):
if snmpV2cWriteId:
snmpV2cWriteDetail = get_dict_result(snmpV2cWrite_details, "id", snmpV2cWriteId)
if not snmpV2cWriteDetail:
- self.msg = "snmpV2cWrite credential ID is invalid"
+ self.msg = "snmp_v2c_write credential ID is invalid"
self.status = "failed"
return self
@@ -1245,7 +1254,7 @@ class DeviceCredential(DnacBase):
snmpV2cWriteOldDescription
)
if not snmpV2cWriteDetail:
- self.msg = "snmpV2cWrite credential old_description is invalid "
+ self.msg = "snmp_v2c_write credential old_description is invalid "
self.status = "failed"
return self
@@ -1261,7 +1270,7 @@ class DeviceCredential(DnacBase):
def get_httpsRead_credentials(self, CredentialDetails, global_credentials):
"""
- Get the current httpsRead Credential from
+ Get the current https_read Credential from
Cisco Catalyst Center based on the provided playbook details.
Check this API using the check_return_status.
@@ -1270,14 +1279,16 @@ class DeviceCredential(DnacBase):
global_credentials (dict) - All global device credentials details.
Returns:
- httpsReadDetails (List) - The current httpsRead.
+ httpsReadDetails (List) - The current https_read.
"""
- # Playbook httpsRead Credential details
+ # Playbook https_read Credential details
all_httpsRead = CredentialDetails.get("https_read")
- # All httpsRead details from the Cisco Catalyst Center
+
+ # All https_read details from the Cisco Catalyst Center
httpsRead_details = global_credentials.get("httpsRead")
- # Cisco Catalyst Center details for the httpsRead Credential given in the playbook
+
+ # Cisco Catalyst Center details for the https_read Credential given in the playbook
httpsReadDetails = []
if all_httpsRead and httpsRead_details:
for httpsReadCredential in all_httpsRead:
@@ -1286,7 +1297,7 @@ class DeviceCredential(DnacBase):
if httpsReadId:
httpsReadDetail = get_dict_result(httpsRead_details, "id", httpsReadId)
if not httpsReadDetail:
- self.msg = "httpsRead credential Id is invalid"
+ self.msg = "https_read credential Id is invalid"
self.status = "failed"
return self
@@ -1297,13 +1308,13 @@ class DeviceCredential(DnacBase):
if item.get("description") == httpsReadOldDescription \
and item.get("username") == httpsReadOldUsername:
if httpsReadDetail:
- self.msg = "More than one httpsRead credential with same \
- old_description and old_username. Pass ID."
+ self.msg = "There are multiple https_read credentials with the same old_description and old_username. " + \
+ "Kindly provide the ID for the global device credentials."
self.status = "failed"
return self
httpsReadDetail = item
if not httpsReadDetail:
- self.msg = "httpsRead credential old_description or old_username is invalid"
+ self.msg = "https_read credential old_description or old_username is invalid"
self.status = "failed"
return self
@@ -1314,8 +1325,8 @@ class DeviceCredential(DnacBase):
if item.get("description") == httpsReadDescription \
and item.get("username") == httpsReadUsername:
if httpsReadDetail:
- self.msg = "More than one httpsRead credential with same \
- description and username. Pass ID."
+ self.msg = "There are multiple https_read credentials with the same description and username. " + \
+ "Kindly provide the ID for the global device credentials."
self.status = "failed"
return self
httpsReadDetail = item
@@ -1324,7 +1335,7 @@ class DeviceCredential(DnacBase):
def get_httpsWrite_credentials(self, CredentialDetails, global_credentials):
"""
- Get the current httpsWrite Credential from
+ Get the current https_write Credential from
Cisco Catalyst Center based on the provided playbook details.
Check this API using the check_return_status.
@@ -1333,14 +1344,16 @@ class DeviceCredential(DnacBase):
global_credentials (dict) - All global device credentials details.
Returns:
- httpsWriteDetails (List) - The current httpsWrite.
+ httpsWriteDetails (List) - The current https_write.
"""
- # Playbook httpsWrite Credential details
+ # Playbook https_write Credential details
all_httpsWrite = CredentialDetails.get("https_write")
- # All httpsWrite details from the Cisco Catalyst Center
+
+ # All https_write details from the Cisco Catalyst Center
httpsWrite_details = global_credentials.get("httpsWrite")
- # Cisco Catalyst Center details for the httpsWrite Credential given in the playbook
+
+ # Cisco Catalyst Center details for the https_write Credential given in the playbook
httpsWriteDetails = []
if all_httpsWrite and httpsWrite_details:
for httpsWriteCredential in all_httpsWrite:
@@ -1349,7 +1362,7 @@ class DeviceCredential(DnacBase):
if httpsWriteId:
httpsWriteDetail = get_dict_result(httpsWrite_details, "id", httpsWriteId)
if not httpsWriteDetail:
- self.msg = "httpsWrite credential Id is invalid"
+ self.msg = "https_write credential Id is invalid"
self.status = "failed"
return self
@@ -1360,14 +1373,14 @@ class DeviceCredential(DnacBase):
if item.get("description") == httpsWriteOldDescription \
and item.get("username") == httpsWriteOldUsername:
if httpsWriteDetail:
- self.msg = "More than one httpsWrite credential with same \
- old_description and old_username. Pass ID"
+ self.msg = "There are multiple https_write credentials with the same old_description and old_username. " + \
+ "Kindly provide the ID for the global device credentials."
self.status = "failed"
return self
httpsWriteDetail = item
if not httpsWriteDetail:
- self.msg = "httpsWrite credential old_description or \
- old_username is invalid"
+ self.msg = "https_write credential old_description or " + \
+ "old_username is invalid"
self.status = "failed"
return self
@@ -1377,13 +1390,18 @@ class DeviceCredential(DnacBase):
for item in httpsWrite_details:
if item.get("description") == httpsWriteDescription \
and item.get("username") == httpsWriteUsername:
+ if httpsWriteDetail:
+ self.msg = "There are multiple https_write credentials with the same description and username. " + \
+ "Kindly provide the ID for the global device credentials."
+ self.status = "failed"
+ return self
httpsWriteDetail = item
httpsWriteDetails.append(httpsWriteDetail)
return httpsWriteDetails
def get_snmpV3_credentials(self, CredentialDetails, global_credentials):
"""
- Get the current snmpV3 Credential from
+ Get the current snmp_v3 Credential from
Cisco Catalyst Center based on the provided playbook details.
Check this API using the check_return_status.
@@ -1392,14 +1410,16 @@ class DeviceCredential(DnacBase):
global_credentials (dict) - All global device credentials details.
Returns:
- snmpV3Details (List) - The current snmpV3.
+ snmpV3Details (List) - The current snmp_v3.
"""
- # Playbook snmpV3 Credential details
+ # Playbook snmp_v3 Credential details
all_snmpV3 = CredentialDetails.get("snmp_v3")
- # All snmpV3 details from the Cisco Catalyst Center
+
+ # All snmp_v3 details from the Cisco Catalyst Center
snmpV3_details = global_credentials.get("snmpV3")
- # Cisco Catalyst Center details for the snmpV3 Credential given in the playbook
+
+ # Cisco Catalyst Center details for the snmp_v3 Credential given in the playbook
snmpV3Details = []
if all_snmpV3 and snmpV3_details:
for snmpV3Credential in all_snmpV3:
@@ -1408,7 +1428,7 @@ class DeviceCredential(DnacBase):
if snmpV3Id:
snmpV3Detail = get_dict_result(snmpV3_details, "id", snmpV3Id)
if not snmpV3Detail:
- self.msg = "snmpV3 credential id is invalid"
+ self.msg = "snmp_v3 credential id is invalid"
self.status = "failed"
return self
@@ -1417,7 +1437,7 @@ class DeviceCredential(DnacBase):
snmpV3Detail = get_dict_result(snmpV3_details,
"description", snmpV3OldDescription)
if not snmpV3Detail:
- self.msg = "snmpV3 credential old_description is invalid"
+ self.msg = "snmp_v3 credential old_description is invalid"
self.status = "failed"
return self
@@ -1533,8 +1553,7 @@ class DeviceCredential(DnacBase):
create_credential[create_cli_ptr] \
.update({values[i]: item.get(values[i])})
else:
- self.msg = values[i] + " is mandatory for creating \
- cliCredential " + str(have_cli_ptr)
+ self.msg = values[i] + " is mandatory for creating cli_credential " + str(have_cli_ptr)
self.status = "failed"
return self
@@ -1551,8 +1570,7 @@ class DeviceCredential(DnacBase):
update_credential[update_cli_ptr] \
.update({"password": item.get("password")})
else:
- self.msg = "password is mandatory for udpating \
- cliCredential " + str(have_cli_ptr)
+ self.msg = "password is mandatory for updating cli_credential " + str(have_cli_ptr)
self.status = "failed"
return self
@@ -1592,8 +1610,7 @@ class DeviceCredential(DnacBase):
create_credential[create_snmpv2cread_ptr] \
.update({keys[i]: item.get(values[i])})
else:
- self.msg = values[i] + " is mandatory for creating \
- snmpV2cRead " + str(have_snmpv2cread_ptr)
+ self.msg = values[i] + " is mandatory for creating snmp_v2c_read " + str(have_snmpv2cread_ptr)
self.status = "failed"
return self
create_snmpv2cread_ptr = create_snmpv2cread_ptr + 1
@@ -1606,8 +1623,7 @@ class DeviceCredential(DnacBase):
update_credential[update_snmpv2cread_ptr] \
.update({"readCommunity": item.get("read_community")})
else:
- self.msg = "read_community is mandatory for updating \
- snmpV2cRead " + str(have_snmpv2cread_ptr)
+ self.msg = "read_community is mandatory for updating snmp_v2c_read " + str(have_snmpv2cread_ptr)
self.status = "failed"
return self
for i in range(1, 3):
@@ -1641,8 +1657,7 @@ class DeviceCredential(DnacBase):
create_credential[create_snmpv2cwrite_ptr] \
.update({keys[i]: item.get(values[i])})
else:
- self.msg = values[i] + " is mandatory for creating \
- snmpV2cWrite " + str(have_snmpv2cwrite_ptr)
+ self.msg = values[i] + " is mandatory for creating snmp_v2c_write " + str(have_snmpv2cwrite_ptr)
self.status = "failed"
return self
create_snmpv2cwrite_ptr = create_snmpv2cwrite_ptr + 1
@@ -1655,8 +1670,7 @@ class DeviceCredential(DnacBase):
update_credential[update_snmpv2cwrite_ptr] \
.update({"writeCommunity": item.get("write_community")})
else:
- self.msg = "write_community is mandatory for updating \
- snmpV2cWrite " + str(have_snmpv2cwrite_ptr)
+ self.msg = "write_community is mandatory for updating snmp_v2c_write " + str(have_snmpv2cwrite_ptr)
self.status = "failed"
return self
for i in range(1, 3):
@@ -1691,8 +1705,7 @@ class DeviceCredential(DnacBase):
create_credential[create_httpsread_ptr] \
.update({values[i]: item.get(values[i])})
else:
- self.msg = values[i] + " is mandatory for creating \
- httpsRead " + str(have_httpsread_ptr)
+ self.msg = values[i] + " is mandatory for creating https_read " + str(have_httpsread_ptr)
self.status = "failed"
return self
if item.get("port"):
@@ -1711,8 +1724,7 @@ class DeviceCredential(DnacBase):
update_credential[update_httpsread_ptr] \
.update({"password": item.get("password")})
else:
- self.msg = "password is mandatory for updating \
- httpsRead " + str(have_httpsread_ptr)
+ self.msg = "The password is mandatory for updating https_read " + str(have_httpsread_ptr)
self.status = "failed"
return self
for i in range(1, 5):
@@ -1745,8 +1757,7 @@ class DeviceCredential(DnacBase):
create_credential[create_httpswrite_ptr] \
.update({values[i]: item.get(values[i])})
else:
- self.msg = values[i] + " is mandatory for creating \
- httpsWrite " + str(have_httpswrite_ptr)
+ self.msg = values[i] + " is mandatory for creating https_write " + str(have_httpswrite_ptr)
self.status = "failed"
return self
if item.get("port"):
@@ -1765,8 +1776,7 @@ class DeviceCredential(DnacBase):
update_credential[update_httpswrite_ptr] \
.update({"password": item.get("password")})
else:
- self.msg = "password is mandatory for updating \
- httpsRead " + str(have_httpswrite_ptr)
+ self.msg = "The password is mandatory for updating https_write " + str(have_httpswrite_ptr)
self.status = "failed"
return self
for i in range(1, 5):
@@ -1799,8 +1809,7 @@ class DeviceCredential(DnacBase):
create_credential[create_snmpv3_ptr] \
.update({values[i]: item.get(values[i])})
else:
- self.msg = values[i] + " is mandatory for creating \
- snmpV3 " + str(have_snmpv3_ptr)
+ self.msg = values[i] + " is mandatory for creating snmp_v3 " + str(have_snmpv3_ptr)
self.status = "failed"
return self
if item.get("snmp_mode"):
@@ -1821,8 +1830,7 @@ class DeviceCredential(DnacBase):
create_credential[create_snmpv3_ptr] \
.update({keys[auth]: item.get(auth)})
else:
- self.msg = auth + " is mandatory for creating \
- snmpV3 " + str(have_snmpv3_ptr)
+ self.msg = auth + " is mandatory for creating snmp_v3 " + str(have_snmpv3_ptr)
self.status = "failed"
return self
if len(item.get("auth_password")) < 8:
@@ -1842,8 +1850,7 @@ class DeviceCredential(DnacBase):
create_credential[create_snmpv3_ptr] \
.update({key[priv]: item.get(priv)})
else:
- self.msg = priv + " is mandatory for creating \
- snmpV3 " + str(have_snmpv3_ptr)
+ self.msg = priv + " is mandatory for creating snmp_v3 " + str(have_snmpv3_ptr)
self.status = "failed"
return self
if len(item.get("privacy_password")) < 8:
@@ -1851,8 +1858,7 @@ class DeviceCredential(DnacBase):
self.status = "failed"
return self
elif create_credential[create_snmpv3_ptr].get("snmpMode") != "NOAUTHNOPRIV":
- self.msg = "snmp_mode in snmpV3 is not \
- ['AUTHPRIV', 'AUTHNOPRIV', 'NOAUTHNOPRIV']"
+ self.msg = "snmp_mode in snmpV3 is not ['AUTHPRIV', 'AUTHNOPRIV', 'NOAUTHNOPRIV']"
self.status = "failed"
return self
create_snmpv3_ptr = create_snmpv3_ptr + 1
@@ -1885,7 +1891,7 @@ class DeviceCredential(DnacBase):
.get("snmpMode")[have_snmpv3_ptr].get("authType")
})
else:
- self.msg = "auth_type is required for updating snmpV3 " + \
+ self.msg = "auth_type is required for updating snmp_v3 " + \
str(have_snmpv3_ptr)
self.status = "failed"
return self
@@ -1893,7 +1899,7 @@ class DeviceCredential(DnacBase):
update_credential[update_snmpv3_ptr] \
.update({"authPassword": item.get("auth_password")})
else:
- self.msg = "auth_password is required for updating snmpV3 " + \
+ self.msg = "auth_password is required for updating snmp_v3 " + \
str(have_snmpv3_ptr)
self.status = "failed"
return self
@@ -1912,7 +1918,7 @@ class DeviceCredential(DnacBase):
.get("snmpMode")[have_snmpv3_ptr].get("privacyType")
})
else:
- self.msg = "privacy_type is required for updating snmpV3 " + \
+ self.msg = "privacy_type is required for updating snmp_v3 " + \
str(have_snmpv3_ptr)
self.status = "failed"
return self
@@ -1920,7 +1926,7 @@ class DeviceCredential(DnacBase):
update_credential[update_snmpv3_ptr] \
.update({"privacyPassword": item.get("privacy_password")})
else:
- self.msg = "privacy_password is required for updating snmpV3 " + \
+ self.msg = "privacy_password is required for updating snmp_v3 " + \
str(have_snmpv3_ptr)
self.status = "failed"
return self
@@ -1951,20 +1957,22 @@ class DeviceCredential(DnacBase):
want = {
"assign_credentials": {}
}
- site_name = AssignCredentials.get("site_name")
- if not site_name:
- self.msg = "site_name is required for AssignCredentials"
+ site_names = AssignCredentials.get("site_name")
+ if not site_names:
+ self.msg = "The 'site_name' is required parameter for 'assign_credentials_to_site'"
self.status = "failed"
return self
- site_id = []
- for site_name in site_name:
- siteId = self.get_site_id(site_name)
- if not site_name:
- self.msg = "site_name is invalid in AssignCredentials"
+
+ site_ids = []
+ for site_name in site_names:
+ current_site_id = self.get_site_id(site_name)
+ if not current_site_id:
+ self.msg = "The site_name '{0}' is invalid in 'assign_credentials_to_site'".format(site_name)
self.status = "failed"
return self
- site_id.append(siteId)
- want.update({"site_id": site_id})
+ site_ids.append(current_site_id)
+
+ want.update({"site_id": site_ids})
global_credentials = self.get_global_credentials_params()
cli_credential = AssignCredentials.get("cli_credential")
if cli_credential:
@@ -2003,17 +2011,17 @@ class DeviceCredential(DnacBase):
snmpV2cReadDescription = snmp_v2c_read.get("description")
if snmpV2cReadId or snmpV2cReadDescription:
- # All snmpV2cRead details from the Cisco Catalyst Center
+ # All snmp_v2c_read details from the Cisco Catalyst Center
snmpV2cRead_details = global_credentials.get("snmpV2cRead")
if not snmpV2cRead_details:
- self.msg = "Global snmpV2cRead credential is not available"
+ self.msg = "Global snmp_v2c_read credential is not available"
self.status = "failed"
return self
snmpV2cReadDetail = None
if snmpV2cReadId:
snmpV2cReadDetail = get_dict_result(snmpV2cRead_details, "id", snmpV2cReadId)
if not snmpV2cReadDetail:
- self.msg = "The ID of the snmpV2cRead credential is not valid."
+ self.msg = "The ID of the snmp_v2c_read credential is not valid."
self.status = "failed"
return self
elif snmpV2cReadDescription:
@@ -2021,7 +2029,7 @@ class DeviceCredential(DnacBase):
if item.get("description") == snmpV2cReadDescription:
snmpV2cReadDetail = item
if not snmpV2cReadDetail:
- self.msg = "The username and description for the snmpV2cRead credential are invalid."
+ self.msg = "The username and description for the snmp_v2c_read credential are invalid."
self.status = "failed"
return self
want.get("assign_credentials").update({"snmpV2ReadId": snmpV2cReadDetail.get("id")})
@@ -2032,17 +2040,17 @@ class DeviceCredential(DnacBase):
snmpV2cWriteDescription = snmp_v2c_write.get("description")
if snmpV2cWriteId or snmpV2cWriteDescription:
- # All snmpV2cWrite details from the Cisco Catalyst Center
+ # All snmp_v2c_write details from the Cisco Catalyst Center
snmpV2cWrite_details = global_credentials.get("snmpV2cWrite")
if not snmpV2cWrite_details:
- self.msg = "Global snmpV2cWrite Credential is not available"
+ self.msg = "Global snmp_v2c_write Credential is not available"
self.status = "failed"
return self
snmpV2cWriteDetail = None
if snmpV2cWriteId:
snmpV2cWriteDetail = get_dict_result(snmpV2cWrite_details, "id", snmpV2cWriteId)
if not snmpV2cWriteDetail:
- self.msg = "The ID of the snmpV2cWrite credential is invalid."
+ self.msg = "The ID of the snmp_v2c_write credential is invalid."
self.status = "failed"
return self
elif snmpV2cWriteDescription:
@@ -2050,7 +2058,7 @@ class DeviceCredential(DnacBase):
if item.get("description") == snmpV2cWriteDescription:
snmpV2cWriteDetail = item
if not snmpV2cWriteDetail:
- self.msg = "The username and description of the snmpV2cWrite credential are invalid."
+ self.msg = "The username and description of the snmp_v2c_write credential are invalid."
self.status = "failed"
return self
want.get("assign_credentials").update({"snmpV2WriteId": snmpV2cWriteDetail.get("id")})
@@ -2123,17 +2131,17 @@ class DeviceCredential(DnacBase):
snmpV3Description = snmp_v3.get("description")
if snmpV3Id or snmpV3Description:
- # All snmpV3 details from the Cisco Catalyst Center
+ # All snmp_v3 details from the Cisco Catalyst Center
snmpV3_details = global_credentials.get("snmpV3")
if not snmpV3_details:
- self.msg = "Global snmpV3 Credential is not available."
+ self.msg = "Global snmp_v3 Credential is not available."
self.status = "failed"
return self
snmpV3Detail = None
if snmpV3Id:
snmpV3Detail = get_dict_result(snmpV3_details, "id", snmpV3Id)
if not snmpV3Detail:
- self.msg = "The ID of the snmpV3 credential is not valid."
+ self.msg = "The ID of the snmp_v3 credential is not valid."
self.status = "failed"
return self
elif snmpV3Description:
@@ -2141,7 +2149,7 @@ class DeviceCredential(DnacBase):
if item.get("description") == snmpV3Description:
snmpV3Detail = item
if not snmpV3Detail:
- self.msg = "The username and description for the snmpV2cWrite credential are invalid."
+ self.msg = "The username and description for the snmp_v2c_write credential are invalid."
self.status = "failed"
return self
want.get("assign_credentials").update({"snmpV3Id": snmpV3Detail.get("id")})
@@ -2215,7 +2223,7 @@ class DeviceCredential(DnacBase):
self.log("Received API response from 'create_global_credentials_v2': {0}"
.format(response), "DEBUG")
validation_string = "global credential addition performed"
- self.check_task_response_status(response, validation_string).check_return_status()
+ self.check_task_response_status(response, validation_string, "create_global_credentials_v2").check_return_status()
self.log("Global credential created successfully", "INFO")
result_global_credential.update({
"Creation": {
@@ -2280,7 +2288,7 @@ class DeviceCredential(DnacBase):
self.log("Received API response for 'update_global_credentials_v2': {0}"
.format(response), "DEBUG")
validation_string = "global credential update performed"
- self.check_task_response_status(response, validation_string).check_return_status()
+ self.check_task_response_status(response, validation_string, "update_global_credentials_v2").check_return_status()
self.log("Updating device credential API input parameters: {0}"
.format(final_response), "DEBUG")
self.log("Global device credential updated successfully", "INFO")
@@ -2336,7 +2344,7 @@ class DeviceCredential(DnacBase):
self.log("Received API response for 'assign_device_credential_to_site_v2': {0}"
.format(response), "DEBUG")
validation_string = "desired common settings operation successful"
- self.check_task_response_status(response, validation_string).check_return_status()
+ self.check_task_response_status(response, validation_string, "assign_device_credential_to_site_v2").check_return_status()
self.log("Device credential assigned to site {0} is successfully."
.format(site_ids), "INFO")
self.log("Desired State for assign credentials to a site: {0}"
@@ -2400,20 +2408,27 @@ class DeviceCredential(DnacBase):
"httpsRead": "https_read",
"httpsWrite": "https_write"
}
+ failed_status = False
+ changed_status = False
for item in have_values:
- config_itr = 0
+ config_itr = -1
final_response.update({item: []})
for value in have_values.get(item):
+ config_itr = config_itr + 1
+ description = config.get("global_credential_details") \
+ .get(credential_mapping.get(item))[config_itr].get("description")
if value is None:
self.log("Credential Name: {0}".format(item), "DEBUG")
self.log("Credential Item: {0}".format(config.get("global_credential_details")
.get(credential_mapping.get(item))), "DEBUG")
- final_response.get(item).append(
- str(config.get("global_credential_details")
- .get(credential_mapping.get(item))[config_itr]) + " is not found."
- )
+ final_response.get(item).append({
+ "description": description,
+ "response": "Global credential not found"
+ })
continue
+
_id = have_values.get(item)[config_itr].get("id")
+ changed_status = True
response = self.dnac._exec(
family="discovery",
function="delete_global_credential_v2",
@@ -2423,21 +2438,59 @@ class DeviceCredential(DnacBase):
self.log("Received API response for 'delete_global_credential_v2': {0}"
.format(response), "DEBUG")
validation_string = "global credential deleted successfully"
- self.check_task_response_status(response, validation_string).check_return_status()
- final_response.get(item).append(_id)
- config_itr = config_itr + 1
+ response = response.get("response")
+ if response.get("errorcode") is not None:
+ self.msg = response.get("response").get("detail")
+ self.status = "failed"
+ return self
+
+ task_id = response.get("taskId")
+ while True:
+ task_details = self.get_task_details(task_id)
+ self.log('Getting task details from task ID {0}: {1}'.format(task_id, task_details), "DEBUG")
+
+ if task_details.get("isError") is True:
+ if task_details.get("failureReason"):
+ failure_msg = str(task_details.get("failureReason"))
+ else:
+ failure_msg = str(task_details.get("progress"))
+ self.status = "failed"
+ break
+
+ if validation_string in task_details.get("progress").lower():
+ self.status = "success"
+ break
+
+ self.log("progress set to {0} for taskid: {1}".format(task_details.get('progress'), task_id), "DEBUG")
+
+ if self.status == "failed":
+ failed_status = True
+ final_response.get(item).append({
+ "description": description,
+ "failure_response": failure_msg
+ })
+ else:
+ final_response.get(item).append({
+ "description": description,
+ "response": "Global credential deleted successfully"
+ })
self.log("Deleting device credential API input parameters: {0}"
.format(final_response), "DEBUG")
- self.log("Successfully deleted global device credential.", "INFO")
result_global_credential.update({
"Deletion": {
"response": final_response,
- "msg": "Global Device Credentials Deleted Successfully"
}
})
- self.msg = "Global Device Credentials Updated Successfully"
- self.status = "success"
+ if failed_status is True:
+ self.msg = "Global device credentials are not deleted."
+ self.module.fail_json(msg=self.msg, response=final_response)
+ else:
+ self.result['changed'] = changed_status
+ self.msg = "Global device credentials deleted successfully"
+ self.log(str(self.msg), "INFO")
+ self.status = "success"
+
return self
def get_diff_deleted(self, config):
@@ -2470,7 +2523,6 @@ class DeviceCredential(DnacBase):
self
"""
- self.log(str("Entered the verify function."), "DEBUG")
self.get_have(config)
self.get_want(config)
self.log("Current State (have): {0}".format(self.have), "INFO")
@@ -2513,8 +2565,7 @@ class DeviceCredential(DnacBase):
self.log("Successfully validated the assign device credential to site", "INFO")
self.result.get("response")[0].get("assignCredential").update({"Validation": "Success"})
- self.msg = "Successfully validated the Global Device Credential and \
- Assign Device Credential to Site."
+ self.msg = "Successfully validated the global device credential and assigned device credential to site."
self.status = "success"
return self
@@ -2540,12 +2591,14 @@ class DeviceCredential(DnacBase):
credential_types = ["cliCredential", "snmpV2cRead", "snmpV2cWrite",
"httpsRead", "httpsWrite", "snmpV3"]
for credential_type in credential_types:
- for item in have_global_credential.get(credential_type):
- if item is not None:
- self.msg = "Delete Global Device Credentials config \
- is not applied to the config"
- self.status = "failed"
- return self
+ have_global_credential_type = have_global_credential.get(credential_type)
+ if have_global_credential_type is not None:
+ for item in have_global_credential_type:
+ if item is not None:
+ self.msg = "The configuration for deleting the global device credentials " + \
+ "is not being applied to the current configuration"
+ self.status = "failed"
+ return self
self.log("Successfully validated absence of global device credential.", "INFO")
self.result.get("response")[0].get("globalCredential").update({"Validation": "Success"})
diff --git a/ansible_collections/cisco/dnac/plugins/modules/device_details_info.py b/ansible_collections/cisco/dnac/plugins/modules/device_details_info.py
index 05793acd2..7b85c4535 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/device_details_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/device_details_info.py
@@ -21,19 +21,19 @@ options:
type: dict
timestamp:
description:
- - Timestamp query parameter. Epoch time(in milliseconds) when the device data is required.
- type: str
- searchBy:
+ - Timestamp query parameter. UTC timestamp of device data in milliseconds.
+ type: float
+ identifier:
description:
- - SearchBy query parameter. MAC Address or Device Name value or UUID of the network device.
+ - Identifier query parameter. One of "macAddress", "nwDeviceName", "uuid" (case insensitive).
type: str
- identifier:
+ searchBy:
description:
- - Identifier query parameter. One of keywords macAddress or uuid or nwDeviceName.
+ - SearchBy query parameter. MAC Address, device name, or UUID of the network device.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Devices GetDeviceDetail
description: Complete reference of the GetDeviceDetail API.
@@ -58,13 +58,12 @@ EXAMPLES = r"""
dnac_version: "{{dnac_version}}"
dnac_debug: "{{dnac_debug}}"
headers: "{{my_headers | from_json}}"
- timestamp: string
- searchBy: string
+ timestamp: 0
identifier: string
+ searchBy: string
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -72,49 +71,91 @@ dnac_response:
type: dict
sample: >
{
- "HALastResetReason": "string",
- "managementIpAddr": "string",
- "HAPrimaryPowerStatus": "string",
- "redundancyMode": "string",
- "communicationState": "string",
+ "noiseScore": 0,
+ "policyTagName": "string",
+ "interferenceScore": 0,
+ "opState": "string",
+ "powerSaveMode": "string",
+ "mode": "string",
+ "resetReason": "string",
+ "nwDeviceRole": "string",
+ "protocol": "string",
+ "powerMode": "string",
+ "connectedTime": "string",
+ "ringStatus": true,
+ "ledFlashSeconds": "string",
+ "ip_addr_managementIpAddr": "string",
+ "stackType": "string",
+ "subMode": "string",
+ "serialNumber": "string",
"nwDeviceName": "string",
- "redundancyUnit": "string",
- "platformId": "string",
- "redundancyPeerState": "string",
+ "deviceGroupHierarchyId": "string",
+ "cpu": "string",
+ "utilization": "string",
"nwDeviceId": "string",
- "redundancyState": "string",
- "nwDeviceRole": "string",
+ "siteHierarchyGraphId": "string",
"nwDeviceFamily": "string",
"macAddress": "string",
- "collectionStatus": "string",
+ "homeApEnabled": "string",
"deviceSeries": "string",
- "osType": "string",
- "clientCount": "string",
- "HASecondaryPowerStatus": "string",
+ "collectionStatus": "string",
+ "utilizationScore": 0,
+ "maintenanceMode": true,
+ "interference": "string",
"softwareVersion": "string",
- "nwDeviceType": "string",
+ "tagIdList": [
+ {}
+ ],
+ "powerType": "string",
"overallHealth": 0,
- "memoryScore": 0,
- "cpuScore": 0,
- "noiseScore": 0,
- "utilizationScore": 0,
- "airQualityScore": 0,
- "interferenceScore": 0,
- "wqeScore": 0,
- "freeMbufScore": 0,
- "packetPoolScore": 0,
- "freeTimerScore": 0,
+ "managementIpAddr": "string",
"memory": "string",
- "cpu": "string",
+ "communicationState": "string",
+ "apType": "string",
+ "adminState": "string",
"noise": "string",
- "utilization": "string",
+ "icapCapability": "string",
+ "regulatoryDomain": "string",
+ "ethernetMac": "string",
+ "nwDeviceType": "string",
"airQuality": "string",
- "interference": "string",
- "wqe": "string",
- "freeMbuf": "string",
- "packetPool": "string",
- "freeTimer": "string",
+ "rfTagName": "string",
+ "siteTagName": "string",
+ "platformId": "string",
+ "upTime": "string",
+ "memoryScore": 0,
+ "powerSaveModeCapable": "string",
+ "powerProfile": "string",
+ "airQualityScore": 0,
"location": "string",
- "timestamp": "string"
+ "flexGroup": "string",
+ "lastBootTime": 0,
+ "powerCalendarProfile": "string",
+ "connectivityStatus": 0,
+ "ledFlashEnabled": "string",
+ "cpuScore": 0,
+ "avgTemperature": 0,
+ "maxTemperature": 0,
+ "haStatus": "string",
+ "osType": "string",
+ "timestamp": 0,
+ "apGroup": "string",
+ "redundancyMode": "string",
+ "featureFlagList": [
+ "string"
+ ],
+ "freeMbufScore": 0,
+ "HALastResetReason": "string",
+ "wqeScore": 0,
+ "redundancyPeerStateDerived": "string",
+ "freeTimerScore": 0,
+ "redundancyPeerState": "string",
+ "redundancyStateDerived": "string",
+ "redundancyState": "string",
+ "packetPoolScore": 0,
+ "freeTimer": 0,
+ "packetPool": 0,
+ "wqe": 0,
+ "freeMbuf": 0
}
"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/device_enrichment_details_info.py b/ansible_collections/cisco/dnac/plugins/modules/device_enrichment_details_info.py
index 37a46f792..935691085 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/device_enrichment_details_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/device_enrichment_details_info.py
@@ -22,8 +22,8 @@ options:
description: Additional headers.
type: dict
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Devices GetDeviceEnrichmentDetails
description: Complete reference of the GetDeviceEnrichmentDetails API.
@@ -51,7 +51,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/device_family_identifiers_details_info.py b/ansible_collections/cisco/dnac/plugins/modules/device_family_identifiers_details_info.py
index 97736aa74..f118d7372 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/device_family_identifiers_details_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/device_family_identifiers_details_info.py
@@ -20,8 +20,8 @@ options:
description: Additional headers.
type: dict
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Software Image Management (SWIM) GetDeviceFamilyIdentifiers
description: Complete reference of the GetDeviceFamilyIdentifiers API.
@@ -49,7 +49,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/device_health_info.py b/ansible_collections/cisco/dnac/plugins/modules/device_health_info.py
index 7b0775f33..eccb009ab 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/device_health_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/device_health_info.py
@@ -23,35 +23,35 @@ options:
type: dict
deviceRole:
description:
- - DeviceRole query parameter. The device role (One of CORE, ACCESS, DISTRIBUTION, ROUTER, WLC, AP).
+ - DeviceRole query parameter. CORE, ACCESS, DISTRIBUTION, ROUTER, WLC, or AP (case insensitive).
type: str
siteId:
description:
- - SiteId query parameter. Assurance site UUID value.
+ - SiteId query parameter. DNAC site UUID.
type: str
health:
description:
- - Health query parameter. The device overall health (One of POOR, FAIR, GOOD).
+ - Health query parameter. DNAC health catagory POOR, FAIR, or GOOD (case insensitive).
type: str
startTime:
description:
- StartTime query parameter. UTC epoch time in milliseconds.
- type: int
+ type: float
endTime:
description:
- - EndTime query parameter. UTC epoch time in miliseconds.
- type: int
+ - EndTime query parameter. UTC epoch time in milliseconds.
+ type: float
limit:
description:
- - Limit query parameter. Max number of device entries in the response (default to 50. Max at 1000).
- type: int
+ - Limit query parameter. Max number of device entries in the response (default to 50. Max at 500).
+ type: float
offset:
description:
- - Offset query parameter. The offset of the first device in the returned data.
- type: int
+ - Offset query parameter. The offset of the first device in the returned data (Mutiple of 'limit' + 1).
+ type: float
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Devices Devices
description: Complete reference of the Devices API.
@@ -86,7 +86,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -98,51 +97,84 @@ dnac_response:
"totalCount": 0,
"response": [
{
- "name": "string",
- "model": "string",
- "osVersion": "string",
- "ipAddress": "string",
- "overallHealth": 0,
- "issueCount": 0,
- "location": "string",
- "deviceFamily": "string",
"deviceType": "string",
- "macAddress": "string",
- "interfaceLinkErrHealth": 0,
- "cpuUlitilization": 0,
- "cpuHealth": 0,
- "memoryUtilizationHealth": 0,
- "memoryUtilization": 0,
- "interDeviceLinkAvailHealth": 0,
- "reachabilityHealth": "string",
- "clientCount": {
+ "cpuUtilization": 0,
+ "overallHealth": 0,
+ "utilizationHealth": {
"radio0": 0,
"radio1": 0,
+ "radio2": 0,
+ "radio3": 0,
"Ghz24": 0,
"Ghz50": 0
},
- "interferenceHealth": {
+ "airQualityHealth": {
"radio0": 0,
"radio1": 0,
+ "radio2": 0,
+ "radio3": 0,
"Ghz24": 0,
"Ghz50": 0
},
+ "ipAddress": "string",
+ "cpuHealth": 0,
+ "deviceFamily": "string",
+ "issueCount": 0,
+ "macAddress": "string",
"noiseHealth": {
+ "radio0": 0,
"radio1": 0,
+ "radio2": 0,
+ "radio3": 0,
+ "Ghz24": 0,
"Ghz50": 0
},
- "airQualityHealth": {
+ "osVersion": "string",
+ "name": "string",
+ "interfaceLinkErrHealth": 0,
+ "memoryUtilization": 0,
+ "interDeviceLinkAvailHealth": 0,
+ "interferenceHealth": {
"radio0": 0,
"radio1": 0,
+ "radio2": 0,
+ "radio3": 0,
"Ghz24": 0,
"Ghz50": 0
},
- "utilizationHealth": {
+ "model": "string",
+ "location": "string",
+ "reachabilityHealth": "string",
+ "band": {
+ "radio0": "string",
+ "radio1": "string",
+ "radio2": "string",
+ "radio3": 0
+ },
+ "memoryUtilizationHealth": 0,
+ "clientCount": {
"radio0": 0,
"radio1": 0,
+ "radio2": 0,
+ "radio3": 0,
"Ghz24": 0,
"Ghz50": 0
- }
+ },
+ "avgTemperature": 0,
+ "maxTemperature": 0,
+ "interDeviceLinkAvailFabric": 0,
+ "apCount": 0,
+ "freeTimerScore": 0,
+ "freeTimer": 0,
+ "packetPoolHealth": 0,
+ "packetPool": 0,
+ "freeMemoryBufferHealth": 0,
+ "freeMemoryBuffer": 0,
+ "wqePoolsHealth": 0,
+ "wqePools": 0,
+ "wanLinkUtilization": 0,
+ "cpuUlitilization": 0,
+ "uuid": "string"
}
]
}
diff --git a/ansible_collections/cisco/dnac/plugins/modules/device_interface_by_ip_info.py b/ansible_collections/cisco/dnac/plugins/modules/device_interface_by_ip_info.py
index df7223b15..306d0e51b 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/device_interface_by_ip_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/device_interface_by_ip_info.py
@@ -24,8 +24,8 @@ options:
- IpAddress path parameter. IP address of the interface.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Devices GetInterfaceByIP
description: Complete reference of the GetInterfaceByIP API.
@@ -54,7 +54,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -64,9 +63,24 @@ dnac_response:
{
"response": [
{
+ "addresses": [
+ {
+ "address": {
+ "ipAddress": {
+ "address": "string"
+ },
+ "ipMask": {
+ "address": "string"
+ },
+ "isInverseMask": true
+ },
+ "type": "string"
+ }
+ ],
"adminStatus": "string",
"className": "string",
"description": "string",
+ "name": "string",
"deviceId": "string",
"duplex": "string",
"id": "string",
@@ -77,11 +91,14 @@ dnac_response:
"ipv4Address": "string",
"ipv4Mask": "string",
"isisSupport": "string",
+ "lastOutgoingPacketTime": 0,
+ "lastIncomingPacketTime": 0,
"lastUpdated": "string",
"macAddress": "string",
"mappedPhysicalInterfaceId": "string",
"mappedPhysicalInterfaceName": "string",
"mediaType": "string",
+ "mtu": "string",
"nativeVlanId": "string",
"ospfSupport": "string",
"pid": "string",
diff --git a/ansible_collections/cisco/dnac/plugins/modules/device_interface_count_info.py b/ansible_collections/cisco/dnac/plugins/modules/device_interface_count_info.py
index 01d3361fb..750a96207 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/device_interface_count_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/device_interface_count_info.py
@@ -20,12 +20,12 @@ options:
description: Additional headers.
type: dict
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
-- name: Cisco DNA Center documentation for Devices GetDeviceInterfaceCount
- description: Complete reference of the GetDeviceInterfaceCount API.
- link: https://developer.cisco.com/docs/dna-center/#!get-device-interface-count
+- name: Cisco DNA Center documentation for Devices GetDeviceInterfaceCountForMultipleDevices
+ description: Complete reference of the GetDeviceInterfaceCountForMultipleDevices API.
+ link: https://developer.cisco.com/docs/dna-center/#!get-device-interface-count-for-multiple-devices
notes:
- SDK Method used are
devices.Devices.get_device_interface_count,
@@ -49,7 +49,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/device_interface_info.py b/ansible_collections/cisco/dnac/plugins/modules/device_interface_info.py
index 31b2bc4c6..e5ff99aff 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/device_interface_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/device_interface_info.py
@@ -42,8 +42,8 @@ options:
- Id path parameter. Interface ID.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Devices GetAllInterfaces
description: Complete reference of the GetAllInterfaces API.
@@ -93,7 +93,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -102,9 +101,24 @@ dnac_response:
sample: >
{
"response": {
+ "addresses": [
+ {
+ "address": {
+ "ipAddress": {
+ "address": "string"
+ },
+ "ipMask": {
+ "address": "string"
+ },
+ "isInverseMask": true
+ },
+ "type": "string"
+ }
+ ],
"adminStatus": "string",
"className": "string",
"description": "string",
+ "name": "string",
"deviceId": "string",
"duplex": "string",
"id": "string",
@@ -115,11 +129,14 @@ dnac_response:
"ipv4Address": "string",
"ipv4Mask": "string",
"isisSupport": "string",
+ "lastOutgoingPacketTime": 0,
+ "lastIncomingPacketTime": 0,
"lastUpdated": "string",
"macAddress": "string",
"mappedPhysicalInterfaceId": "string",
"mappedPhysicalInterfaceName": "string",
"mediaType": "string",
+ "mtu": "string",
"nativeVlanId": "string",
"ospfSupport": "string",
"pid": "string",
diff --git a/ansible_collections/cisco/dnac/plugins/modules/device_interface_isis_info.py b/ansible_collections/cisco/dnac/plugins/modules/device_interface_isis_info.py
index 798c47821..4d47bcea2 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/device_interface_isis_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/device_interface_isis_info.py
@@ -20,8 +20,8 @@ options:
description: Additional headers.
type: dict
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Devices GetISISInterfaces
description: Complete reference of the GetISISInterfaces API.
@@ -49,7 +49,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -59,9 +58,24 @@ dnac_response:
{
"response": [
{
+ "addresses": [
+ {
+ "address": {
+ "ipAddress": {
+ "address": "string"
+ },
+ "ipMask": {
+ "address": "string"
+ },
+ "isInverseMask": true
+ },
+ "type": "string"
+ }
+ ],
"adminStatus": "string",
"className": "string",
"description": "string",
+ "name": "string",
"deviceId": "string",
"duplex": "string",
"id": "string",
@@ -72,11 +86,14 @@ dnac_response:
"ipv4Address": "string",
"ipv4Mask": "string",
"isisSupport": "string",
+ "lastOutgoingPacketTime": 0,
+ "lastIncomingPacketTime": 0,
"lastUpdated": "string",
"macAddress": "string",
"mappedPhysicalInterfaceId": "string",
"mappedPhysicalInterfaceName": "string",
"mediaType": "string",
+ "mtu": "string",
"nativeVlanId": "string",
"ospfSupport": "string",
"pid": "string",
diff --git a/ansible_collections/cisco/dnac/plugins/modules/device_interface_ospf_info.py b/ansible_collections/cisco/dnac/plugins/modules/device_interface_ospf_info.py
index ed87a4b0c..5c2db2091 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/device_interface_ospf_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/device_interface_ospf_info.py
@@ -20,8 +20,8 @@ options:
description: Additional headers.
type: dict
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Devices GetOSPFInterfaces
description: Complete reference of the GetOSPFInterfaces API.
@@ -49,7 +49,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -59,9 +58,24 @@ dnac_response:
{
"response": [
{
+ "addresses": [
+ {
+ "address": {
+ "ipAddress": {
+ "address": "string"
+ },
+ "ipMask": {
+ "address": "string"
+ },
+ "isInverseMask": true
+ },
+ "type": "string"
+ }
+ ],
"adminStatus": "string",
"className": "string",
"description": "string",
+ "name": "string",
"deviceId": "string",
"duplex": "string",
"id": "string",
@@ -72,11 +86,14 @@ dnac_response:
"ipv4Address": "string",
"ipv4Mask": "string",
"isisSupport": "string",
+ "lastOutgoingPacketTime": 0,
+ "lastIncomingPacketTime": 0,
"lastUpdated": "string",
"macAddress": "string",
"mappedPhysicalInterfaceId": "string",
"mappedPhysicalInterfaceName": "string",
"mediaType": "string",
+ "mtu": "string",
"nativeVlanId": "string",
"ospfSupport": "string",
"pid": "string",
diff --git a/ansible_collections/cisco/dnac/plugins/modules/device_reboot_apreboot.py b/ansible_collections/cisco/dnac/plugins/modules/device_reboot_apreboot.py
index 92e7c41ee..4b86369b2 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/device_reboot_apreboot.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/device_reboot_apreboot.py
@@ -21,8 +21,8 @@ options:
elements: str
type: list
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Wireless RebootAccessPoints
description: Complete reference of the RebootAccessPoints API.
@@ -51,7 +51,6 @@ EXAMPLES = r"""
- string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/device_reboot_apreboot_info.py b/ansible_collections/cisco/dnac/plugins/modules/device_reboot_apreboot_info.py
index c4e500062..9b71326f7 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/device_reboot_apreboot_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/device_reboot_apreboot_info.py
@@ -11,7 +11,7 @@ short_description: Information module for Device Reboot Apreboot
description:
- Get all Device Reboot Apreboot.
- Users can query the access point reboot status using this intent API.
-version_added: '6.7.0'
+version_added: '3.1.0'
extends_documentation_fragment:
- cisco.dnac.module_info
author: Rafael Campos (@racampos)
@@ -24,8 +24,8 @@ options:
- ParentTaskId query parameter. Task id of ap reboot request.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Wireless GetAccessPointRebootTaskResult
description: Complete reference of the GetAccessPointRebootTaskResult API.
@@ -54,7 +54,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/device_replacement.py b/ansible_collections/cisco/dnac/plugins/modules/device_replacement.py
index a5353ddd0..4f2d1fdfc 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/device_replacement.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/device_replacement.py
@@ -22,51 +22,52 @@ options:
elements: dict
suboptions:
creationTime:
- description: Device Replacement's creationTime.
+ description: Date and time of marking the device for replacement.
type: int
family:
- description: Device Replacement's family.
+ description: Faulty device family.
type: str
faultyDeviceId:
- description: Device Replacement's faultyDeviceId.
+ description: Unique identifier of the faulty device.
type: str
faultyDeviceName:
- description: Device Replacement's faultyDeviceName.
+ description: Faulty device name.
type: str
faultyDevicePlatform:
- description: Device Replacement's faultyDevicePlatform.
+ description: Faulty device platform.
type: str
faultyDeviceSerialNumber:
- description: Device Replacement's faultyDeviceSerialNumber.
+ description: Faulty device serial number.
type: str
id:
- description: Device Replacement's id.
+ description: Unique identifier of the device replacement resource.
type: str
neighbourDeviceId:
- description: Device Replacement's neighbourDeviceId.
+ description: Unique identifier of the neighbor device to create the DHCP server.
type: str
networkReadinessTaskId:
- description: Device Replacement's networkReadinessTaskId.
+ description: Unique identifier of network readiness task.
type: str
replacementDevicePlatform:
- description: Device Replacement's replacementDevicePlatform.
+ description: Replacement device platform.
type: str
replacementDeviceSerialNumber:
- description: Device Replacement's replacementDeviceSerialNumber.
+ description: Replacement device serial number.
type: str
replacementStatus:
- description: Device Replacement's replacementStatus.
+ description: Device replacement status. Use NON-FAULTY to unmark the device
+ for replacement.
type: str
replacementTime:
- description: Device Replacement's replacementTime.
+ description: Date and time of device replacement.
type: int
workflowId:
- description: Device Replacement's workflowId.
+ description: Unique identifier of the device replacement workflow.
type: str
type: list
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Device Replacement MarkDeviceForReplacement
description: Complete reference of the MarkDeviceForReplacement API.
@@ -139,7 +140,6 @@ EXAMPLES = r"""
workflowId: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/device_replacement_count_info.py b/ansible_collections/cisco/dnac/plugins/modules/device_replacement_count_info.py
index 8aae86979..de053a02c 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/device_replacement_count_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/device_replacement_count_info.py
@@ -27,8 +27,8 @@ options:
elements: str
type: list
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Device Replacement ReturnReplacementDevicesCount
description: Complete reference of the ReturnReplacementDevicesCount API.
@@ -57,7 +57,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/device_replacement_deploy.py b/ansible_collections/cisco/dnac/plugins/modules/device_replacement_deploy.py
index 81dfc9251..d55764327 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/device_replacement_deploy.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/device_replacement_deploy.py
@@ -19,14 +19,14 @@ extends_documentation_fragment:
author: Rafael Campos (@racampos)
options:
faultyDeviceSerialNumber:
- description: Device Replacement Deploy's faultyDeviceSerialNumber.
+ description: Faulty device serial number.
type: str
replacementDeviceSerialNumber:
- description: Device Replacement Deploy's replacementDeviceSerialNumber.
+ description: Replacement device serial number.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Device Replacement DeployDeviceReplacementWorkflow
description: Complete reference of the DeployDeviceReplacementWorkflow API.
@@ -54,7 +54,6 @@ EXAMPLES = r"""
replacementDeviceSerialNumber: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/device_replacement_info.py b/ansible_collections/cisco/dnac/plugins/modules/device_replacement_info.py
index 4dd9756d5..07dfdd6b1 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/device_replacement_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/device_replacement_info.py
@@ -71,8 +71,8 @@ options:
- Limit query parameter.
type: int
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Device Replacement ReturnListOfReplacementDevicesWithReplacementDetails
description: Complete reference of the ReturnListOfReplacementDevicesWithReplacementDetails API.
@@ -111,7 +111,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -134,7 +133,9 @@ dnac_response:
"replacementDeviceSerialNumber": "string",
"replacementStatus": "string",
"replacementTime": 0,
- "workflowId": "string"
+ "workflowId": "string",
+ "workflowFailedStep": "string",
+ "readinesscheckTaskId": "string"
}
],
"version": "string"
diff --git a/ansible_collections/cisco/dnac/plugins/modules/disassociate_site_to_network_profile.py b/ansible_collections/cisco/dnac/plugins/modules/disassociate_site_to_network_profile.py
index 2972ac7f2..ece1ea010 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/disassociate_site_to_network_profile.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/disassociate_site_to_network_profile.py
@@ -23,8 +23,8 @@ options:
description: SiteId path parameter. Site Id to be associated.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Site Design Disassociate
description: Complete reference of the Disassociate API.
@@ -52,7 +52,6 @@ EXAMPLES = r"""
siteId: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/disasterrecovery_system_operationstatus_info.py b/ansible_collections/cisco/dnac/plugins/modules/disasterrecovery_system_operationstatus_info.py
index 1c6621afe..3f821aeef 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/disasterrecovery_system_operationstatus_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/disasterrecovery_system_operationstatus_info.py
@@ -20,7 +20,7 @@ options:
description: Additional headers.
type: dict
requirements:
-- dnacentersdk >= 2.6.0
+- dnacentersdk >= 2.7.1
- python >= 3.9
notes:
- SDK Method used are
diff --git a/ansible_collections/cisco/dnac/plugins/modules/disasterrecovery_system_status_info.py b/ansible_collections/cisco/dnac/plugins/modules/disasterrecovery_system_status_info.py
index f1c3c318c..2c3d3bb9d 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/disasterrecovery_system_status_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/disasterrecovery_system_status_info.py
@@ -20,7 +20,7 @@ options:
description: Additional headers.
type: dict
requirements:
-- dnacentersdk >= 2.6.0
+- dnacentersdk >= 2.7.1
- python >= 3.9
notes:
- SDK Method used are
diff --git a/ansible_collections/cisco/dnac/plugins/modules/discovery.py b/ansible_collections/cisco/dnac/plugins/modules/discovery.py
index 94fca37e0..41d86c786 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/discovery.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/discovery.py
@@ -22,200 +22,206 @@ extends_documentation_fragment:
author: Rafael Campos (@racampos)
options:
attributeInfo:
- description: Discovery's attributeInfo.
+ description: Deprecated.
type: dict
cdpLevel:
- description: Discovery's cdpLevel.
+ description: CDP level to which neighbor devices to be discovered.
type: int
deviceIds:
- description: Discovery's deviceIds.
+ description: Ids of the devices discovered in a discovery.
type: str
discoveryCondition:
- description: Discovery's discoveryCondition.
+ description: To indicate the discovery status. Available options Complete or In
+ Progress.
type: str
discoveryStatus:
- description: Discovery's discoveryStatus.
+ description: Status of the discovery. Available options are active, inactive, edit.
type: str
discoveryType:
- description: Discovery's discoveryType.
+ description: Type of the discovery. 'SINGLE', 'RANGE', 'MULTI RANGE', 'CDP', 'LLDP',
+ 'CIDR'.
type: str
enablePasswordList:
- description: Discovery's enablePasswordList.
+ description: Enable Password of the devices to be discovered.
type: str
globalCredentialIdList:
- description: Discovery's globalCredentialIdList.
+ description: List of global credential ids to be used.
elements: str
type: list
httpReadCredential:
description: Discovery's httpReadCredential.
suboptions:
comments:
- description: Discovery's comments.
+ description: Comments to identify the credential.
type: str
credentialType:
- description: Discovery's credentialType.
+ description: Credential type to identify the application that uses the credential.
type: str
description:
- description: Discovery's description.
+ description: Description of the credential.
type: str
id:
- description: Discovery's id.
+ description: Credential Id.
type: str
instanceTenantId:
- description: Discovery's instanceTenantId.
+ description: Credential Tenant Id.
type: str
instanceUuid:
- description: Discovery's instanceUuid.
+ description: Credential Id.
type: str
password:
- description: Discovery's password.
+ description: HTTP(S) password.
type: str
port:
- description: Discovery's port.
+ description: HTTP(S) port.
type: int
secure:
- description: Secure flag.
+ description: Flag for HTTPS.
type: bool
username:
- description: Discovery's username.
+ description: HTTP(S) username.
type: str
type: dict
httpWriteCredential:
description: Discovery's httpWriteCredential.
suboptions:
comments:
- description: Discovery's comments.
+ description: Comments to identify the credential.
type: str
credentialType:
- description: Discovery's credentialType.
+ description: Credential type to identify the application that uses the credential.
type: str
description:
- description: Discovery's description.
+ description: Description of the credential.
type: str
id:
- description: Discovery's id.
+ description: Credential Id.
type: str
instanceTenantId:
- description: Discovery's instanceTenantId.
+ description: Credential Tenant Id.
type: str
instanceUuid:
- description: Discovery's instanceUuid.
+ description: Credential Id.
type: str
password:
- description: Discovery's password.
+ description: HTTP(S) password.
type: str
port:
- description: Discovery's port.
+ description: HTTP(S) port.
type: int
secure:
- description: Secure flag.
+ description: Flag for HTTPS.
type: bool
username:
- description: Discovery's username.
+ description: HTTP(S) username.
type: str
type: dict
id:
- description: Discovery's id.
+ description: Unique Discovery Id.
type: str
ipAddressList:
- description: Discovery's ipAddressList.
+ description: List of IP address of the devices to be discovered.
type: str
ipFilterList:
- description: Discovery's ipFilterList.
+ description: IP addresses of the devices to be filtered.
type: str
isAutoCdp:
- description: IsAutoCdp flag.
+ description: Flag to mention if CDP discovery or not.
type: bool
lldpLevel:
- description: Discovery's lldpLevel.
+ description: LLDP level to which neighbor devices to be discovered.
type: int
name:
- description: Discovery's name.
+ description: Name for the discovery.
type: str
netconfPort:
- description: Discovery's netconfPort.
+ description: Netconf port on the device. Netconf will need valid sshv2 credentials
+ for it to work.
type: str
numDevices:
- description: Discovery's numDevices.
+ description: Number of devices discovered in the discovery.
type: int
parentDiscoveryId:
- description: Discovery's parentDiscoveryId.
+ description: Parent Discovery Id from which the discovery was initiated.
type: str
passwordList:
- description: Discovery's passwordList.
+ description: Password of the devices to be discovered.
type: str
preferredMgmtIPMethod:
- description: Discovery's preferredMgmtIPMethod.
+ description: Preferred management IP method. Available options are 'None' and 'UseLoopBack'.
type: str
protocolOrder:
- description: Discovery's protocolOrder.
+ description: Order of protocol (ssh/telnet) in which device connection will be tried.
+ Ex 'telnet' only telnet; 'ssh,telnet' ssh with higher order than telnet.
type: str
retry:
description: Number of times to try establishing connection to device.
type: int
retryCount:
- description: Discovery's retryCount.
+ description: Number of times to try establishing connection to device.
type: int
snmpAuthPassphrase:
- description: Discovery's snmpAuthPassphrase.
+ description: Auth passphrase for SNMP.
type: str
snmpAuthProtocol:
- description: Discovery's snmpAuthProtocol.
+ description: SNMP auth protocol. SHA' or 'MD5'.
type: str
snmpMode:
- description: Discovery's snmpMode.
+ description: Mode of SNMP. 'AUTHPRIV' or 'AUTHNOPRIV' or 'NOAUTHNOPRIV'.
type: str
snmpPrivPassphrase:
- description: Discovery's snmpPrivPassphrase.
+ description: Passphrase for SNMP privacy.
type: str
snmpPrivProtocol:
- description: Discovery's snmpPrivProtocol.
+ description: SNMP privacy protocol. 'AES128'.
type: str
snmpROCommunity:
- description: Snmp RO community of the devices to be discovered.
+ description: SNMP RO community of the devices to be discovered.
type: str
snmpROCommunityDesc:
- description: Description for Snmp RO community.
+ description: Description for SNMP RO community.
type: str
snmpRWCommunity:
- description: Snmp RW community of the devices to be discovered.
+ description: SNMP RW community of the devices to be discovered.
type: str
snmpRWCommunityDesc:
- description: Description for Snmp RW community.
+ description: Description for SNMP RW community.
type: str
snmpRoCommunity:
- description: Discovery's snmpRoCommunity.
+ description: SNMP RO community of the devices to be discovered.
type: str
snmpRoCommunityDesc:
- description: Discovery's snmpRoCommunityDesc.
+ description: Description for SNMP RO community.
type: str
snmpRwCommunity:
- description: Discovery's snmpRwCommunity.
+ description: SNMP RW community of the devices to be discovered.
type: str
snmpRwCommunityDesc:
- description: Discovery's snmpRwCommunityDesc.
+ description: Description for SNMP RW community.
type: str
snmpUserName:
- description: Discovery's snmpUserName.
+ description: SNMP username of the device.
type: str
snmpVersion:
description: Version of SNMP. V2 or v3.
type: str
timeOut:
- description: Discovery's timeOut.
+ description: Time to wait for device response.
type: int
timeout:
description: Time to wait for device response in seconds.
type: int
updateMgmtIp:
- description: UpdateMgmtIp flag.
+ description: Updates Management IP if multiple IPs are available for a device. If
+ set to true, when a device is rediscovered with a different IP, the management
+ IP is updated. Default value is false.
type: bool
userNameList:
- description: Discovery's userNameList.
+ description: Username of the devices to be discovered.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Discovery StartDiscovery
description: Complete reference of the StartDiscovery API.
@@ -388,7 +394,6 @@ EXAMPLES = r"""
id: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/discovery_count_info.py b/ansible_collections/cisco/dnac/plugins/modules/discovery_count_info.py
index 59f92499a..d094d1e7b 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/discovery_count_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/discovery_count_info.py
@@ -20,8 +20,8 @@ options:
description: Additional headers.
type: dict
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Discovery GetCountOfAllDiscoveryJobs
description: Complete reference of the GetCountOfAllDiscoveryJobs API.
@@ -49,7 +49,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/discovery_device_count_info.py b/ansible_collections/cisco/dnac/plugins/modules/discovery_device_count_info.py
index 51da8df80..8363da33c 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/discovery_device_count_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/discovery_device_count_info.py
@@ -30,8 +30,8 @@ options:
- TaskId query parameter.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Discovery GetDevicesDiscoveredById
description: Complete reference of the GetDevicesDiscoveredById API.
@@ -61,7 +61,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/discovery_device_info.py b/ansible_collections/cisco/dnac/plugins/modules/discovery_device_info.py
index 2bc42ae22..f55590840 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/discovery_device_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/discovery_device_info.py
@@ -30,8 +30,8 @@ options:
- TaskId query parameter.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Discovery GetDiscoveredNetworkDevicesByDiscoveryId
description: Complete reference of the GetDiscoveredNetworkDevicesByDiscoveryId API.
@@ -61,7 +61,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/discovery_device_range_info.py b/ansible_collections/cisco/dnac/plugins/modules/discovery_device_range_info.py
index ade1f2828..9e09578d8 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/discovery_device_range_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/discovery_device_range_info.py
@@ -27,19 +27,19 @@ options:
type: str
startIndex:
description:
- - StartIndex path parameter. Start index.
+ - StartIndex path parameter. Starting index for the records.
type: int
recordsToReturn:
description:
- - RecordsToReturn path parameter. Number of records to return.
+ - RecordsToReturn path parameter. Number of records to fetch from the start index.
type: int
taskId:
description:
- TaskId query parameter.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Discovery GetDiscoveredDevicesByRange
description: Complete reference of the GetDiscoveredDevicesByRange API.
@@ -71,7 +71,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/discovery_info.py b/ansible_collections/cisco/dnac/plugins/modules/discovery_info.py
index 68d1cc74f..cd1e7183b 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/discovery_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/discovery_info.py
@@ -24,8 +24,8 @@ options:
- Id path parameter. Discovery ID.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Discovery GetDiscoveryById
description: Complete reference of the GetDiscoveryById API.
@@ -54,7 +54,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/discovery_job_info.py b/ansible_collections/cisco/dnac/plugins/modules/discovery_job_info.py
index 2e6de4f14..d3eaf5eff 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/discovery_job_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/discovery_job_info.py
@@ -44,8 +44,8 @@ options:
- Id path parameter. Discovery ID.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Discovery GetDiscoveryJobsByIP
description: Complete reference of the GetDiscoveryJobsByIP API.
@@ -98,7 +98,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/discovery_range_delete.py b/ansible_collections/cisco/dnac/plugins/modules/discovery_range_delete.py
index 876881242..6846a0166 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/discovery_range_delete.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/discovery_range_delete.py
@@ -17,14 +17,15 @@ extends_documentation_fragment:
author: Rafael Campos (@racampos)
options:
recordsToDelete:
- description: RecordsToDelete path parameter. Number of records to delete.
+ description: RecordsToDelete path parameter. Number of records to delete from the
+ starting index.
type: int
startIndex:
- description: StartIndex path parameter. Start index.
+ description: StartIndex path parameter. Starting index for the records.
type: int
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Discovery DeleteDiscoveryBySpecifiedRange
description: Complete reference of the DeleteDiscoveryBySpecifiedRange API.
@@ -52,7 +53,6 @@ EXAMPLES = r"""
startIndex: 0
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/discovery_range_info.py b/ansible_collections/cisco/dnac/plugins/modules/discovery_range_info.py
index c4fc60ba7..b4c6b126a 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/discovery_range_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/discovery_range_info.py
@@ -10,7 +10,7 @@ module: discovery_range_info
short_description: Information module for Discovery Range
description:
- Get all Discovery Range.
-- Returns the discovery by specified range.
+- Returns the discoveries by specified range.
version_added: '3.1.0'
extends_documentation_fragment:
- cisco.dnac.module_info
@@ -21,15 +21,15 @@ options:
type: dict
startIndex:
description:
- - StartIndex path parameter. Start index.
+ - StartIndex path parameter. Starting index for the records.
type: int
recordsToReturn:
description:
- - RecordsToReturn path parameter. Number of records to return.
+ - RecordsToReturn path parameter. Number of records to fetch from the starting index.
type: int
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Discovery GetDiscoveriesByRange
description: Complete reference of the GetDiscoveriesByRange API.
@@ -59,7 +59,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/discovery_summary_info.py b/ansible_collections/cisco/dnac/plugins/modules/discovery_summary_info.py
index 9cff8b3ee..e7db3d3b7 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/discovery_summary_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/discovery_summary_info.py
@@ -11,8 +11,8 @@ short_description: Information module for Discovery Summary
description:
- Get all Discovery Summary.
- >
- Returns the network devices from a discovery job based on given filters. Discovery ID can be obtained using the
- "Get Discoveries by range" API.
+ Returns the devices discovered in the given discovery based on given filters. Discovery ID can be obtained using
+ the "Get Discoveries by range" API.
version_added: '3.1.0'
extends_documentation_fragment:
- cisco.dnac.module_info
@@ -31,45 +31,57 @@ options:
type: str
sortBy:
description:
- - SortBy query parameter.
+ - >
+ SortBy query parameter. Sort by field. Available values are pingStatus, cliStatus,snmpStatus, httpStatus and
+ netconfStatus.
type: str
sortOrder:
description:
- - SortOrder query parameter.
+ - SortOrder query parameter. Order of sorting based on sortBy. Available values are 'asc' and 'des'.
type: str
ipAddress:
description:
- - IpAddress query parameter.
+ - IpAddress query parameter. IP Address of the device.
elements: str
type: list
pingStatus:
description:
- - PingStatus query parameter.
+ - >
+ PingStatus query parameter. Ping status for the IP during the job run. Available values are 'SUCCESS',
+ 'FAILURE', 'NOT-PROVIDED' and 'NOT-VALIDATED'.
elements: str
type: list
snmpStatus:
description:
- - SnmpStatus query parameter.
+ - >
+ SnmpStatus query parameter. SNMP status for the IP during the job run. Available values are 'SUCCESS',
+ 'FAILURE', 'NOT-PROVIDED' and 'NOT-VALIDATED'.
elements: str
type: list
cliStatus:
description:
- - CliStatus query parameter.
+ - >
+ CliStatus query parameter. CLI status for the IP during the job run. Available values are 'SUCCESS',
+ 'FAILURE', 'NOT-PROVIDED' and 'NOT-VALIDATED'.
elements: str
type: list
netconfStatus:
description:
- - NetconfStatus query parameter.
+ - >
+ NetconfStatus query parameter. NETCONF status for the IP during the job run. Available values are 'SUCCESS',
+ 'FAILURE', 'NOT-PROVIDED' and 'NOT-VALIDATED'.
elements: str
type: list
httpStatus:
description:
- - HttpStatus query parameter.
+ - >
+ HttpStatus query parameter. HTTP staus for the IP during the job run. Available values are 'SUCCESS',
+ 'FAILURE', 'NOT-PROVIDED' and 'NOT-VALIDATED'.
elements: str
type: list
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Discovery GetNetworkDevicesFromDiscovery
description: Complete reference of the GetNetworkDevicesFromDiscovery API.
@@ -107,7 +119,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/dna_command_runner_keywords_info.py b/ansible_collections/cisco/dnac/plugins/modules/dna_command_runner_keywords_info.py
index 1e63e6b13..7be30c855 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/dna_command_runner_keywords_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/dna_command_runner_keywords_info.py
@@ -20,8 +20,8 @@ options:
description: Additional headers.
type: dict
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Command Runner GetAllKeywordsOfCLIsAcceptedByCommandRunner
description: Complete reference of the GetAllKeywordsOfCLIsAcceptedByCommandRunner API.
@@ -49,7 +49,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/dna_event_snmp_config_info.py b/ansible_collections/cisco/dnac/plugins/modules/dna_event_snmp_config_info.py
new file mode 100644
index 000000000..f6a40ab57
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/dna_event_snmp_config_info.py
@@ -0,0 +1,103 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: dna_event_snmp_config_info
+short_description: Information module for Dna Event Snmp Config
+description:
+- Get all Dna Event Snmp Config.
+- Get SNMP Destination.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module_info
+author: Rafael Campos (@racampos)
+options:
+ headers:
+ description: Additional headers.
+ type: dict
+ configId:
+ description:
+ - ConfigId query parameter. List of SNMP configurations.
+ type: str
+ offset:
+ description:
+ - Offset query parameter. The number of SNMP configuration's to offset in the resultset whose default value 0.
+ type: float
+ limit:
+ description:
+ - Limit query parameter. The number of SNMP configuration's to limit in the resultset whose default value 10.
+ type: float
+ sortBy:
+ description:
+ - SortBy query parameter. SortBy field name.
+ type: str
+ order:
+ description:
+ - Order query parameter.
+ type: str
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for Event Management GetSNMPDestination
+ description: Complete reference of the GetSNMPDestination API.
+ link: https://developer.cisco.com/docs/dna-center/#!get-snmp-destination
+notes:
+ - SDK Method used are
+ event_management.EventManagement.get_snmp_destination,
+
+ - Paths used are
+ get /dna/intent/api/v1/dna-event/snmp-config,
+
+"""
+
+EXAMPLES = r"""
+- name: Get all Dna Event Snmp Config
+ cisco.dnac.dna_event_snmp_config_info:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ headers: "{{my_headers | from_json}}"
+ configId: string
+ offset: 0
+ limit: 0
+ sortBy: string
+ order: string
+ register: result
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: list
+ elements: dict
+ sample: >
+ [
+ {
+ "version": "string",
+ "tenantId": "string",
+ "configId": "string",
+ "name": "string",
+ "description": "string",
+ "ipAddress": "string",
+ "port": 0,
+ "snmpVersion": "string",
+ "community": "string",
+ "userName": "string",
+ "snmpMode": "string",
+ "snmpAuthType": "string",
+ "authPassword": "string",
+ "snmpPrivacyType": "string",
+ "privacyPassword": "string"
+ }
+ ]
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/dnac_packages_info.py b/ansible_collections/cisco/dnac/plugins/modules/dnac_packages_info.py
index ebc2481a5..a833d2352 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/dnac_packages_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/dnac_packages_info.py
@@ -11,7 +11,7 @@ short_description: Information module for Dnac Packages
description:
- Get all Dnac Packages.
- Provides information such as name, version of packages installed on the DNA center.
-version_added: '6.7.0'
+version_added: '3.1.0'
extends_documentation_fragment:
- cisco.dnac.module_info
author: Rafael Campos (@racampos)
@@ -20,8 +20,8 @@ options:
description: Additional headers.
type: dict
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Platform CiscoDNACenterPackagesSummary
description: Complete reference of the CiscoDNACenterPackagesSummary API.
@@ -49,7 +49,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/dnacaap_management_execution_status_info.py b/ansible_collections/cisco/dnac/plugins/modules/dnacaap_management_execution_status_info.py
index 3fc122f4d..9092108e0 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/dnacaap_management_execution_status_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/dnacaap_management_execution_status_info.py
@@ -24,8 +24,8 @@ options:
- ExecutionId path parameter. Execution Id of API.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Task GetBusinessAPIExecutionDetails
description: Complete reference of the GetBusinessAPIExecutionDetails API.
@@ -54,7 +54,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -71,7 +70,7 @@ dnac_response:
"endTimeEpoch": 0,
"timeDuration": 0,
"status": "string",
- "bapiError": "string",
- "runtimeInstanceId": "string"
+ "runtimeInstanceId": "string",
+ "bapiError": "string"
}
"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/endpoint_analytics_profiling_rules.py b/ansible_collections/cisco/dnac/plugins/modules/endpoint_analytics_profiling_rules.py
index 43633824b..bbe87bf9b 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/endpoint_analytics_profiling_rules.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/endpoint_analytics_profiling_rules.py
@@ -114,7 +114,7 @@ options:
elements: str
type: list
requirements:
-- dnacentersdk >= 2.6.0
+- dnacentersdk >= 2.7.1
- python >= 3.9
notes:
- SDK Method used are
diff --git a/ansible_collections/cisco/dnac/plugins/modules/endpoint_analytics_profiling_rules_info.py b/ansible_collections/cisco/dnac/plugins/modules/endpoint_analytics_profiling_rules_info.py
index 1e3ca88c2..45bf1bf51 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/endpoint_analytics_profiling_rules_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/endpoint_analytics_profiling_rules_info.py
@@ -60,7 +60,7 @@ options:
- RuleId path parameter. Unique rule identifier.
type: str
requirements:
-- dnacentersdk >= 2.6.0
+- dnacentersdk >= 2.7.1
- python >= 3.9
notes:
- SDK Method used are
diff --git a/ansible_collections/cisco/dnac/plugins/modules/eox_status_device_info.py b/ansible_collections/cisco/dnac/plugins/modules/eox_status_device_info.py
index dc1073450..30df0dd9a 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/eox_status_device_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/eox_status_device_info.py
@@ -13,7 +13,7 @@ description:
- Get Eox Status Device by id.
- Retrieves EoX details for a device.
- Retrieves EoX status for all devices in the network.
-version_added: '6.7.0'
+version_added: '3.1.0'
extends_documentation_fragment:
- cisco.dnac.module_info
author: Rafael Campos (@racampos)
@@ -26,8 +26,8 @@ options:
- DeviceId path parameter. Device instance UUID.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for EoX GetEoXDetailsPerDevice
description: Complete reference of the GetEoXDetailsPerDevice API.
@@ -37,8 +37,8 @@ seealso:
link: https://developer.cisco.com/docs/dna-center/#!get-eo-x-status-for-all-devices
notes:
- SDK Method used are
- eo_x.EoX.get_eo_x_details_per_device,
- eo_x.EoX.get_eo_x_status_for_all_devices,
+ eox.Eox.get_eox_details_per_device,
+ eox.Eox.get_eox_status_for_all_devices,
- Paths used are
get /dna/intent/api/v1/eox-status/device,
@@ -73,7 +73,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -86,27 +85,30 @@ dnac_response:
"alertCount": 0,
"eoxDetails": [
{
+ "name": "string",
"bulletinHeadline": "string",
+ "bulletinName": "string",
"bulletinNumber": "string",
"bulletinURL": "string",
- "endOfHardwareNewServiceAttachmentDate": 0,
- "endOfHardwareServiceContractRenewalDate": 0,
- "endOfLastHardwareShipDate": 0,
- "endOfLifeDate": 0,
- "endOfLifeExternalAnnouncementDate": 0,
- "endOfSaleDate": 0,
- "endOfSignatureReleasesDate": 0,
- "endOfSoftwareVulnerabilityOrSecuritySupportDate": 0,
- "endOfSoftwareVulnerabilityOrSecuritySupportDateHw": 0,
- "endOfSoftwareMaintenanceReleasesDate": 0,
+ "endOfHardwareNewServiceAttachmentDate": "string",
+ "endOfHardwareServiceContractRenewalDate": "string",
+ "endOfLastHardwareShipDate": "string",
+ "endOfLifeExternalAnnouncementDate": "string",
+ "endOfSignatureReleasesDate": "string",
+ "endOfSoftwareVulnerabilityOrSecuritySupportDate": "string",
+ "endOfSoftwareVulnerabilityOrSecuritySupportDateHw": "string",
+ "endOfSaleDate": "string",
+ "endOfLifeDate": "string",
+ "lastDateOfSupport": "string",
+ "endOfSoftwareMaintenanceReleasesDate": "string",
"eoxAlertType": "string",
- "lastDateOfSupport": 0,
- "name": "string"
+ "eoXPhysicalType": "string",
+ "bulletinPID": "string"
}
],
"scanStatus": "string",
"comments": [
- {}
+ "string"
],
"lastScanTime": 0
},
diff --git a/ansible_collections/cisco/dnac/plugins/modules/eox_status_summary_info.py b/ansible_collections/cisco/dnac/plugins/modules/eox_status_summary_info.py
index 26dc6d99d..5d846e3c7 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/eox_status_summary_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/eox_status_summary_info.py
@@ -11,7 +11,7 @@ short_description: Information module for Eox Status Summary
description:
- Get all Eox Status Summary.
- Retrieves EoX summary for all devices in the network.
-version_added: '6.7.0'
+version_added: '3.1.0'
extends_documentation_fragment:
- cisco.dnac.module_info
author: Rafael Campos (@racampos)
@@ -20,15 +20,15 @@ options:
description: Additional headers.
type: dict
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for EoX GetEoXSummary
description: Complete reference of the GetEoXSummary API.
link: https://developer.cisco.com/docs/dna-center/#!get-eo-x-summary
notes:
- SDK Method used are
- eo_x.EoX.get_eo_x_summary,
+ eox.Eox.get_eox_summary,
- Paths used are
get /dna/intent/api/v1/eox-status/summary,
@@ -49,7 +49,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/event_api_status_info.py b/ansible_collections/cisco/dnac/plugins/modules/event_api_status_info.py
index 8c6cf8d13..f649fd675 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/event_api_status_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/event_api_status_info.py
@@ -24,8 +24,8 @@ options:
- ExecutionId path parameter. Execution ID.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Event Management GetStatusAPIForEvents
description: Complete reference of the GetStatusAPIForEvents API.
@@ -54,7 +54,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/event_artifact_count_info.py b/ansible_collections/cisco/dnac/plugins/modules/event_artifact_count_info.py
index fb7d3ab7d..7ab97bb75 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/event_artifact_count_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/event_artifact_count_info.py
@@ -10,7 +10,7 @@ module: event_artifact_count_info
short_description: Information module for Event Artifact Count
description:
- Get all Event Artifact Count.
-- Get the count of registered event artifacts with provided eventIds or tags as mandatory.
+- Get the count of registered event artifacts.
version_added: '3.1.0'
extends_documentation_fragment:
- cisco.dnac.module_info
@@ -20,8 +20,8 @@ options:
description: Additional headers.
type: dict
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Event Management EventArtifactCount
description: Complete reference of the EventArtifactCount API.
@@ -49,7 +49,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/event_artifact_info.py b/ansible_collections/cisco/dnac/plugins/modules/event_artifact_info.py
index 196b1233c..cac6a320b 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/event_artifact_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/event_artifact_info.py
@@ -30,11 +30,11 @@ options:
offset:
description:
- Offset query parameter. Record start offset.
- type: int
+ type: float
limit:
description:
- Limit query parameter. # of records to return in result set.
- type: int
+ type: float
sortBy:
description:
- SortBy query parameter. Sort by field.
@@ -48,8 +48,8 @@ options:
- Search query parameter. Findd matches in name, description, eventId, type, category.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Event Management GetEventArtifacts
description: Complete reference of the GetEventArtifacts API.
@@ -84,7 +84,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -128,6 +127,10 @@ dnac_response:
"supportedConnectorTypes": [
"string"
],
+ "configs": {
+ "isAlert": true,
+ "isACKnowledgeable": true
+ },
"tenantId": "string"
}
]
diff --git a/ansible_collections/cisco/dnac/plugins/modules/event_config_connector_types_info.py b/ansible_collections/cisco/dnac/plugins/modules/event_config_connector_types_info.py
index b30a9659a..69b0c73c6 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/event_config_connector_types_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/event_config_connector_types_info.py
@@ -20,8 +20,8 @@ options:
description: Additional headers.
type: dict
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Event Management GetConnectorTypes
description: Complete reference of the GetConnectorTypes API.
@@ -49,7 +49,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/event_count_info.py b/ansible_collections/cisco/dnac/plugins/modules/event_count_info.py
index 864364655..94910dca3 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/event_count_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/event_count_info.py
@@ -28,8 +28,8 @@ options:
- Tags query parameter. The registered Tags should be provided.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Event Management CountOfEvents
description: Complete reference of the CountOfEvents API.
@@ -59,7 +59,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/event_email_config.py b/ansible_collections/cisco/dnac/plugins/modules/event_email_config.py
index fd5fdb882..819ed8708 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/event_email_config.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/event_email_config.py
@@ -12,7 +12,7 @@ description:
- Manage operations create and update of the resource Event Email Config.
- Create Email Destination.
- Update Email Destination.
-version_added: '6.7.0'
+version_added: '3.1.0'
extends_documentation_fragment:
- cisco.dnac.module
author: Rafael Campos (@racampos)
@@ -35,6 +35,9 @@ options:
port:
description: Port.
type: str
+ smtpType:
+ description: SmtpType.
+ type: str
userName:
description: User Name.
type: str
@@ -51,6 +54,9 @@ options:
port:
description: Port.
type: str
+ smtpType:
+ description: SmtpType.
+ type: str
userName:
description: User Name.
type: str
@@ -62,8 +68,8 @@ options:
description: To Email.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Event Management CreateEmailDestination
description: Complete reference of the CreateEmailDestination API.
@@ -99,11 +105,13 @@ EXAMPLES = r"""
hostName: string
password: string
port: string
+ smtpType: string
userName: string
secondarySMTPConfig:
hostName: string
password: string
port: string
+ smtpType: string
userName: string
subject: string
toEmail: string
@@ -124,17 +132,18 @@ EXAMPLES = r"""
hostName: string
password: string
port: string
+ smtpType: string
userName: string
secondarySMTPConfig:
hostName: string
password: string
port: string
+ smtpType: string
userName: string
subject: string
toEmail: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/event_email_config_create.py b/ansible_collections/cisco/dnac/plugins/modules/event_email_config_create.py
index 4be4ec362..c5e6c875c 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/event_email_config_create.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/event_email_config_create.py
@@ -34,6 +34,9 @@ options:
port:
description: Port.
type: str
+ smtpType:
+ description: SmtpType.
+ type: str
userName:
description: User Name.
type: str
@@ -50,6 +53,9 @@ options:
port:
description: Port.
type: str
+ smtpType:
+ description: SmtpType.
+ type: str
userName:
description: User Name.
type: str
@@ -61,8 +67,8 @@ options:
description: To Email.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Event Management CreateEmailDestination
description: Complete reference of the CreateEmailDestination API.
@@ -92,17 +98,18 @@ EXAMPLES = r"""
hostName: string
password: string
port: string
+ smtpType: string
userName: string
secondarySMTPConfig:
hostName: string
password: string
port: string
+ smtpType: string
userName: string
subject: string
toEmail: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/event_email_config_info.py b/ansible_collections/cisco/dnac/plugins/modules/event_email_config_info.py
index 272118cd7..b576ac4f8 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/event_email_config_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/event_email_config_info.py
@@ -11,7 +11,7 @@ short_description: Information module for Event Email Config
description:
- Get all Event Email Config.
- Get Email Destination.
-version_added: '6.7.0'
+version_added: '3.1.0'
extends_documentation_fragment:
- cisco.dnac.module_info
author: Rafael Campos (@racampos)
@@ -20,8 +20,8 @@ options:
description: Additional headers.
type: dict
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Event Management GetEmailDestination
description: Complete reference of the GetEmailDestination API.
@@ -49,7 +49,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -65,14 +64,14 @@ dnac_response:
"port": "string",
"userName": "string",
"password": "string",
- "security": "string"
+ "smtpType": "string"
},
"secondarySMTPConfig": {
"hostName": "string",
"port": "string",
"userName": "string",
"password": "string",
- "security": "string"
+ "smtpType": "string"
},
"fromEmail": "string",
"toEmail": "string",
diff --git a/ansible_collections/cisco/dnac/plugins/modules/event_email_config_update.py b/ansible_collections/cisco/dnac/plugins/modules/event_email_config_update.py
index c471ff807..5735077a3 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/event_email_config_update.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/event_email_config_update.py
@@ -34,6 +34,9 @@ options:
port:
description: Port.
type: str
+ smtpType:
+ description: SmtpType.
+ type: str
userName:
description: User Name.
type: str
@@ -50,6 +53,9 @@ options:
port:
description: Port.
type: str
+ smtpType:
+ description: SmtpType.
+ type: str
userName:
description: User Name.
type: str
@@ -61,8 +67,8 @@ options:
description: To Email.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Event Management UpdateEmailDestination
description: Complete reference of the UpdateEmailDestination API.
@@ -92,17 +98,18 @@ EXAMPLES = r"""
hostName: string
password: string
port: string
+ smtpType: string
userName: string
secondarySMTPConfig:
hostName: string
password: string
port: string
+ smtpType: string
userName: string
subject: string
toEmail: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/event_info.py b/ansible_collections/cisco/dnac/plugins/modules/event_info.py
index a3eafb297..abb4d598f 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/event_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/event_info.py
@@ -30,11 +30,11 @@ options:
offset:
description:
- Offset query parameter. The number of Registries to offset in the resultset whose default value 0.
- type: int
+ type: float
limit:
description:
- Limit query parameter. The number of Registries to limit in the resultset whose default value 10.
- type: int
+ type: float
sortBy:
description:
- SortBy query parameter. SortBy field name.
@@ -44,8 +44,8 @@ options:
- Order query parameter.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Event Management GetEvents
description: Complete reference of the GetEvents API.
@@ -79,7 +79,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/event_series_audit_logs_info.py b/ansible_collections/cisco/dnac/plugins/modules/event_series_audit_logs_info.py
index b5fa297d9..707523f0e 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/event_series_audit_logs_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/event_series_audit_logs_info.py
@@ -92,23 +92,23 @@ options:
offset:
description:
- Offset query parameter. Position of a particular Audit Log record in the data.
- type: int
+ type: float
limit:
description:
- Limit query parameter. Number of Audit Log records to be returned per page.
- type: int
+ type: float
startTime:
description:
- >
StartTime query parameter. Start Time in milliseconds since Epoch Eg. 1597950637211 (when provided endTime
is mandatory).
- type: int
+ type: float
endTime:
description:
- >
EndTime query parameter. End Time in milliseconds since Epoch Eg. 1597961437211 (when provided startTime is
mandatory).
- type: int
+ type: float
sortBy:
description:
- >
@@ -122,8 +122,8 @@ options:
values asc, desc.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Event Management GetAuditLogRecords
description: Complete reference of the GetAuditLogRecords API.
@@ -173,7 +173,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/event_series_audit_logs_parent_records_info.py b/ansible_collections/cisco/dnac/plugins/modules/event_series_audit_logs_parent_records_info.py
index 26bd96d29..ec518bc1a 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/event_series_audit_logs_parent_records_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/event_series_audit_logs_parent_records_info.py
@@ -88,23 +88,23 @@ options:
offset:
description:
- Offset query parameter. Position of a particular Audit Log record in the data.
- type: int
+ type: float
limit:
description:
- Limit query parameter. Number of Audit Log records to be returned per page.
- type: int
+ type: float
startTime:
description:
- >
StartTime query parameter. Start Time in milliseconds since Epoch Eg. 1597950637211 (when provided endTime
is mandatory).
- type: int
+ type: float
endTime:
description:
- >
EndTime query parameter. End Time in milliseconds since Epoch Eg. 1597961437211 (when provided startTime is
mandatory).
- type: int
+ type: float
sortBy:
description:
- >
@@ -118,8 +118,8 @@ options:
values asc, desc.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Event Management GetAuditLogParentRecords
description: Complete reference of the GetAuditLogParentRecords API.
@@ -168,7 +168,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/event_series_audit_logs_summary_info.py b/ansible_collections/cisco/dnac/plugins/modules/event_series_audit_logs_summary_info.py
index 2e0d2e133..af24cebeb 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/event_series_audit_logs_summary_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/event_series_audit_logs_summary_info.py
@@ -98,16 +98,16 @@ options:
- >
StartTime query parameter. Start Time in milliseconds since Epoch Eg. 1597950637211 (when provided endTime
is mandatory).
- type: int
+ type: float
endTime:
description:
- >
EndTime query parameter. End Time in milliseconds since Epoch Eg. 1597961437211 (when provided startTime is
mandatory).
- type: int
+ type: float
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Event Management GetAuditLogSummary
description: Complete reference of the GetAuditLogSummary API.
@@ -154,7 +154,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/event_series_count_info.py b/ansible_collections/cisco/dnac/plugins/modules/event_series_count_info.py
index ebd6576f0..5c7741fc8 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/event_series_count_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/event_series_count_info.py
@@ -26,11 +26,11 @@ options:
startTime:
description:
- StartTime query parameter. Start Time in milliseconds.
- type: int
+ type: float
endTime:
description:
- EndTime query parameter. End Time in milliseconds.
- type: int
+ type: float
category:
description:
- Category query parameter.
@@ -56,8 +56,8 @@ options:
- Source query parameter.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Event Management CountOfNotifications
description: Complete reference of the CountOfNotifications API.
@@ -94,7 +94,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/event_series_info.py b/ansible_collections/cisco/dnac/plugins/modules/event_series_info.py
index 982aa0cbc..cf96789ad 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/event_series_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/event_series_info.py
@@ -26,11 +26,11 @@ options:
startTime:
description:
- StartTime query parameter. Start Time in milliseconds.
- type: int
+ type: float
endTime:
description:
- EndTime query parameter. End Time in milliseconds.
- type: int
+ type: float
category:
description:
- Category query parameter.
@@ -58,11 +58,11 @@ options:
offset:
description:
- Offset query parameter. Start Offset.
- type: int
+ type: float
limit:
description:
- Limit query parameter. # of records.
- type: int
+ type: float
sortBy:
description:
- SortBy query parameter. Sort By column.
@@ -84,8 +84,8 @@ options:
- SiteId query parameter. Site Id.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Event Management GetNotifications
description: Complete reference of the GetNotifications API.
@@ -129,7 +129,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/event_snmp_config.py b/ansible_collections/cisco/dnac/plugins/modules/event_snmp_config.py
new file mode 100644
index 000000000..41d9a29d6
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/event_snmp_config.py
@@ -0,0 +1,144 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: event_snmp_config
+short_description: Resource module for Event Snmp Config
+description:
+- Manage operations create and update of the resource Event Snmp Config.
+- Create SNMP Destination.
+- Update SNMP Destination.
+version_added: '3.1.0'
+extends_documentation_fragment:
+ - cisco.dnac.module
+author: Rafael Campos (@racampos)
+options:
+ authPassword:
+ description: Auth Password.
+ type: str
+ community:
+ description: Required only if snmpVersion is V2C.
+ type: str
+ configId:
+ description: Config Id.
+ type: str
+ description:
+ description: Description.
+ type: str
+ ipAddress:
+ description: Ip Address.
+ type: str
+ name:
+ description: Name.
+ type: str
+ port:
+ description: Port.
+ type: str
+ privacyPassword:
+ description: Privacy Password.
+ type: str
+ snmpAuthType:
+ description: Snmp Auth Type.
+ type: str
+ snmpMode:
+ description: If snmpVersion is V3 it is required and cannot be NONE.
+ type: str
+ snmpPrivacyType:
+ description: Snmp Privacy Type.
+ type: str
+ snmpVersion:
+ description: Snmp Version.
+ type: str
+ userName:
+ description: Required only if snmpVersion is V3.
+ type: str
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for Event Management CreateSNMPDestination
+ description: Complete reference of the CreateSNMPDestination API.
+ link: https://developer.cisco.com/docs/dna-center/#!create-snmp-destination
+- name: Cisco DNA Center documentation for Event Management UpdateSNMPDestination
+ description: Complete reference of the UpdateSNMPDestination API.
+ link: https://developer.cisco.com/docs/dna-center/#!update-snmp-destination
+notes:
+ - SDK Method used are
+ event_management.EventManagement.create_snmp_destination,
+ event_management.EventManagement.update_snmp_destination,
+
+ - Paths used are
+ post /dna/intent/api/v1/event/snmp-config,
+ put /dna/intent/api/v1/event/snmp-config,
+
+"""
+
+EXAMPLES = r"""
+- name: Create
+ cisco.dnac.event_snmp_config:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ state: present
+ authPassword: string
+ community: string
+ description: string
+ ipAddress: string
+ name: string
+ port: string
+ privacyPassword: string
+ snmpAuthType: string
+ snmpMode: string
+ snmpPrivacyType: string
+ snmpVersion: string
+ userName: string
+
+- name: Update all
+ cisco.dnac.event_snmp_config:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ state: present
+ authPassword: string
+ community: string
+ configId: string
+ description: string
+ ipAddress: string
+ name: string
+ port: string
+ privacyPassword: string
+ snmpAuthType: string
+ snmpMode: string
+ snmpPrivacyType: string
+ snmpVersion: string
+ userName: string
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "errorMessage": {
+ "errors": [
+ {}
+ ]
+ },
+ "apiStatus": "string",
+ "statusMessage": "string"
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/event_snmp_config_info.py b/ansible_collections/cisco/dnac/plugins/modules/event_snmp_config_info.py
index b87081672..bdcbc5180 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/event_snmp_config_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/event_snmp_config_info.py
@@ -40,7 +40,7 @@ options:
- Order query parameter.
type: str
requirements:
-- dnacentersdk >= 2.6.0
+- dnacentersdk >= 2.7.1
- python >= 3.9
seealso:
- name: Cisco DNA Center documentation for Event Management GetSNMPDestination
diff --git a/ansible_collections/cisco/dnac/plugins/modules/event_subscription.py b/ansible_collections/cisco/dnac/plugins/modules/event_subscription.py
index 0aca4d2db..5c5be35ac 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/event_subscription.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/event_subscription.py
@@ -94,8 +94,8 @@ options:
description: Subscriptions query parameter. List of EventSubscriptionId's for removal.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Event Management CreateEventSubscriptions
description: Complete reference of the CreateEventSubscriptions API.
@@ -207,7 +207,6 @@ EXAMPLES = r"""
version: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/event_subscription_count_info.py b/ansible_collections/cisco/dnac/plugins/modules/event_subscription_count_info.py
index daf1a6fdf..6a8bf1055 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/event_subscription_count_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/event_subscription_count_info.py
@@ -24,8 +24,8 @@ options:
- EventIds query parameter. List of subscriptions related to the respective eventIds.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Event Management CountOfEventSubscriptions
description: Complete reference of the CountOfEventSubscriptions API.
@@ -54,7 +54,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/event_subscription_details_email_info.py b/ansible_collections/cisco/dnac/plugins/modules/event_subscription_details_email_info.py
index b6487d13f..e360e19a2 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/event_subscription_details_email_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/event_subscription_details_email_info.py
@@ -30,11 +30,11 @@ options:
offset:
description:
- Offset query parameter. The number of Email Subscription detail's to offset in the resultset whose default value 0.
- type: int
+ type: float
limit:
description:
- Limit query parameter. The number of Email Subscription detail's to limit in the resultset whose default value 10.
- type: int
+ type: float
sortBy:
description:
- SortBy query parameter. SortBy field name.
@@ -44,8 +44,8 @@ options:
- Order query parameter.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Event Management GetEmailSubscriptionDetails
description: Complete reference of the GetEmailSubscriptionDetails API.
@@ -79,7 +79,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/event_subscription_details_rest_info.py b/ansible_collections/cisco/dnac/plugins/modules/event_subscription_details_rest_info.py
index ee793eff1..b3801252e 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/event_subscription_details_rest_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/event_subscription_details_rest_info.py
@@ -32,13 +32,13 @@ options:
- >
Offset query parameter. The number of Rest/Webhook Subscription detail's to offset in the resultset whose
default value 0.
- type: int
+ type: float
limit:
description:
- >
Limit query parameter. The number of Rest/Webhook Subscription detail's to limit in the resultset whose
default value 10.
- type: int
+ type: float
sortBy:
description:
- SortBy query parameter. SortBy field name.
@@ -48,8 +48,8 @@ options:
- Order query parameter.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Event Management GetRestWebhookSubscriptionDetails
description: Complete reference of the GetRestWebhookSubscriptionDetails API.
@@ -83,7 +83,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -99,7 +98,7 @@ dnac_response:
"connectorType": "string",
"url": "string",
"method": "string",
- "trustCert": "string",
+ "trustCert": true,
"headers": [
{
"name": "string",
@@ -111,7 +110,14 @@ dnac_response:
],
"pathParams": [
"string"
- ]
+ ],
+ "body": "string",
+ "connectTimeout": 0,
+ "readTimeout": 0,
+ "serviceName": "string",
+ "servicePort": "string",
+ "namespace": "string",
+ "proxyRoute": true
}
]
"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/event_subscription_details_syslog_info.py b/ansible_collections/cisco/dnac/plugins/modules/event_subscription_details_syslog_info.py
index 18e68fa77..891a8c719 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/event_subscription_details_syslog_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/event_subscription_details_syslog_info.py
@@ -32,11 +32,11 @@ options:
- >
Offset query parameter. The number of Syslog Subscription detail's to offset in the resultset whose default
value 0.
- type: int
+ type: float
limit:
description:
- Limit query parameter. The number of Syslog Subscription detail's to limit in the resultset whose default value 10.
- type: int
+ type: float
sortBy:
description:
- SortBy query parameter. SortBy field name.
@@ -46,8 +46,8 @@ options:
- Order query parameter.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Event Management GetSyslogSubscriptionDetails
description: Complete reference of the GetSyslogSubscriptionDetails API.
@@ -81,7 +81,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/event_subscription_email.py b/ansible_collections/cisco/dnac/plugins/modules/event_subscription_email.py
index 13dc4cfe8..baccc36eb 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/event_subscription_email.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/event_subscription_email.py
@@ -72,7 +72,8 @@ options:
elements: dict
suboptions:
instanceId:
- description: (From Get Email Subscription Details --> pick InstanceId).
+ description: (From Get Email Subscription Details --> pick InstanceId if
+ available).
type: str
subscriptionDetails:
description: Event Subscription Email's subscriptionDetails.
@@ -106,8 +107,8 @@ options:
type: str
type: list
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Event Management CreateEmailEventSubscription
description: Complete reference of the CreateEmailEventSubscription API.
@@ -214,7 +215,6 @@ EXAMPLES = r"""
version: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/event_subscription_email_info.py b/ansible_collections/cisco/dnac/plugins/modules/event_subscription_email_info.py
index 3eab8992b..7c16281ef 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/event_subscription_email_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/event_subscription_email_info.py
@@ -28,11 +28,11 @@ options:
offset:
description:
- Offset query parameter. The number of Subscriptions's to offset in the resultset whose default value 0.
- type: int
+ type: float
limit:
description:
- Limit query parameter. The number of Subscriptions's to limit in the resultset whose default value 10.
- type: int
+ type: float
sortBy:
description:
- SortBy query parameter. SortBy field name.
@@ -62,8 +62,8 @@ options:
- Name query parameter. List of email subscriptions related to the respective name.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Event Management GetEmailEventSubscriptions
description: Complete reference of the GetEmailEventSubscriptions API.
@@ -101,7 +101,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/event_subscription_info.py b/ansible_collections/cisco/dnac/plugins/modules/event_subscription_info.py
index 9349d81e7..99702a03e 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/event_subscription_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/event_subscription_info.py
@@ -26,11 +26,11 @@ options:
offset:
description:
- Offset query parameter. The number of Subscriptions's to offset in the resultset whose default value 0.
- type: int
+ type: float
limit:
description:
- Limit query parameter. The number of Subscriptions's to limit in the resultset whose default value 10.
- type: int
+ type: float
sortBy:
description:
- SortBy query parameter. SortBy field name.
@@ -40,8 +40,8 @@ options:
- Order query parameter.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Event Management GetEventSubscriptions
description: Complete reference of the GetEventSubscriptions API.
@@ -74,7 +74,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/event_subscription_rest.py b/ansible_collections/cisco/dnac/plugins/modules/event_subscription_rest.py
index 81eea1f2e..154c54b25 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/event_subscription_rest.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/event_subscription_rest.py
@@ -90,8 +90,8 @@ options:
type: str
type: list
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Event Management CreateRestWebhookEventSubscription
description: Complete reference of the CreateRestWebhookEventSubscription API.
@@ -186,7 +186,6 @@ EXAMPLES = r"""
version: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/event_subscription_rest_info.py b/ansible_collections/cisco/dnac/plugins/modules/event_subscription_rest_info.py
index 06e8dab61..8653c9f43 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/event_subscription_rest_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/event_subscription_rest_info.py
@@ -26,11 +26,11 @@ options:
offset:
description:
- Offset query parameter. The number of Subscriptions's to offset in the resultset whose default value 0.
- type: int
+ type: float
limit:
description:
- Limit query parameter. The number of Subscriptions's to limit in the resultset whose default value 10.
- type: int
+ type: float
sortBy:
description:
- SortBy query parameter. SortBy field name.
@@ -60,8 +60,8 @@ options:
- Name query parameter. List of subscriptions related to the respective name.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Event Management GetRestWebhookEventSubscriptions
description: Complete reference of the GetRestWebhookEventSubscriptions API.
@@ -99,7 +99,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/event_subscription_syslog.py b/ansible_collections/cisco/dnac/plugins/modules/event_subscription_syslog.py
index aeff30dd3..b66e5b4c3 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/event_subscription_syslog.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/event_subscription_syslog.py
@@ -90,8 +90,8 @@ options:
type: str
type: list
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Event Management CreateSyslogEventSubscription
description: Complete reference of the CreateSyslogEventSubscription API.
@@ -186,7 +186,6 @@ EXAMPLES = r"""
version: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/event_subscription_syslog_info.py b/ansible_collections/cisco/dnac/plugins/modules/event_subscription_syslog_info.py
index ef6ffb044..e3420a68e 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/event_subscription_syslog_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/event_subscription_syslog_info.py
@@ -26,11 +26,11 @@ options:
offset:
description:
- Offset query parameter. The number of Subscriptions's to offset in the resultset whose default value 0.
- type: int
+ type: float
limit:
description:
- Limit query parameter. The number of Subscriptions's to limit in the resultset whose default value 10.
- type: int
+ type: float
sortBy:
description:
- SortBy query parameter. SortBy field name.
@@ -60,8 +60,8 @@ options:
- Name query parameter. List of subscriptions related to the respective name.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Event Management GetSyslogEventSubscriptions
description: Complete reference of the GetSyslogEventSubscriptions API.
@@ -99,7 +99,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/event_syslog_config.py b/ansible_collections/cisco/dnac/plugins/modules/event_syslog_config.py
index 11c6d9508..5eef3d989 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/event_syslog_config.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/event_syslog_config.py
@@ -12,7 +12,7 @@ description:
- Manage operations create and update of the resource Event Syslog Config.
- Create Syslog Destination.
- Update Syslog Destination.
-version_added: '6.7.0'
+version_added: '3.1.0'
extends_documentation_fragment:
- cisco.dnac.module
author: Rafael Campos (@racampos)
@@ -31,13 +31,13 @@ options:
type: str
port:
description: Port.
- type: str
+ type: int
protocol:
description: Protocol.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Event Management CreateSyslogDestination
description: Complete reference of the CreateSyslogDestination API.
@@ -71,7 +71,7 @@ EXAMPLES = r"""
description: string
host: string
name: string
- port: string
+ port: 0
protocol: string
- name: Create
@@ -88,11 +88,10 @@ EXAMPLES = r"""
description: string
host: string
name: string
- port: string
+ port: 0
protocol: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/event_syslog_config_info.py b/ansible_collections/cisco/dnac/plugins/modules/event_syslog_config_info.py
index a2e6009c8..8b78a13ff 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/event_syslog_config_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/event_syslog_config_info.py
@@ -11,7 +11,7 @@ short_description: Information module for Event Syslog Config
description:
- Get all Event Syslog Config.
- Get Syslog Destination.
-version_added: '6.7.0'
+version_added: '3.1.0'
extends_documentation_fragment:
- cisco.dnac.module_info
author: Rafael Campos (@racampos)
@@ -34,11 +34,11 @@ options:
offset:
description:
- Offset query parameter. The number of syslog configuration's to offset in the resultset whose default value 0.
- type: int
+ type: float
limit:
description:
- Limit query parameter. The number of syslog configuration's to limit in the resultset whose default value 10.
- type: int
+ type: float
sortBy:
description:
- SortBy query parameter. SortBy field name.
@@ -48,8 +48,8 @@ options:
- Order query parameter.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Event Management GetSyslogDestination
description: Complete reference of the GetSyslogDestination API.
@@ -84,7 +84,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/event_webhook_create.py b/ansible_collections/cisco/dnac/plugins/modules/event_webhook_create.py
index 2859b7615..ec8080e00 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/event_webhook_create.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/event_webhook_create.py
@@ -36,6 +36,9 @@ options:
description: Value.
type: str
type: list
+ isProxyRoute:
+ description: Is Proxy Route.
+ type: bool
method:
description: Method.
type: str
@@ -52,8 +55,8 @@ options:
description: Required only for update webhook configuration.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Event Management CreateWebhookDestination
description: Complete reference of the CreateWebhookDestination API.
@@ -83,6 +86,7 @@ EXAMPLES = r"""
encrypt: true
name: string
value: string
+ isProxyRoute: true
method: string
name: string
trustCert: true
@@ -90,7 +94,6 @@ EXAMPLES = r"""
webhookId: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/event_webhook_read_info.py b/ansible_collections/cisco/dnac/plugins/modules/event_webhook_read_info.py
new file mode 100644
index 000000000..077df28d4
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/event_webhook_read_info.py
@@ -0,0 +1,112 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: event_webhook_read_info
+short_description: Information module for Event Webhook Read
+description:
+- Get all Event Webhook Read.
+- Get Webhook Destination.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module_info
+author: Rafael Campos (@racampos)
+options:
+ headers:
+ description: Additional headers.
+ type: dict
+ webhookIds:
+ description:
+ - WebhookIds query parameter. List of webhook configurations.
+ type: str
+ offset:
+ description:
+ - Offset query parameter. The number of webhook configuration's to offset in the resultset whose default value 0.
+ type: float
+ limit:
+ description:
+ - Limit query parameter. The number of webhook configuration's to limit in the resultset whose default value 10.
+ type: float
+ sortBy:
+ description:
+ - SortBy query parameter. SortBy field name.
+ type: str
+ order:
+ description:
+ - Order query parameter.
+ type: str
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for Event Management GetWebhookDestination
+ description: Complete reference of the GetWebhookDestination API.
+ link: https://developer.cisco.com/docs/dna-center/#!get-webhook-destination
+notes:
+ - SDK Method used are
+ event_management.EventManagement.get_webhook_destination,
+
+ - Paths used are
+ get /dna/intent/api/v1/event/webhook,
+
+"""
+
+EXAMPLES = r"""
+- name: Get all Event Webhook Read
+ cisco.dnac.event_webhook_read_info:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ headers: "{{my_headers | from_json}}"
+ webhookIds: string
+ offset: 0
+ limit: 0
+ sortBy: string
+ order: string
+ register: result
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "errorMessage": {
+ "errors": [
+ "string"
+ ]
+ },
+ "apiStatus": "string",
+ "statusMessage": [
+ {
+ "version": "string",
+ "tenantId": "string",
+ "webhookId": "string",
+ "name": "string",
+ "description": "string",
+ "url": "string",
+ "method": "string",
+ "trustCert": true,
+ "headers": [
+ {
+ "name": "string",
+ "value": "string",
+ "defaultValue": "string",
+ "encrypt": true
+ }
+ ],
+ "isProxyRoute": true
+ }
+ ]
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/event_webhook_update.py b/ansible_collections/cisco/dnac/plugins/modules/event_webhook_update.py
index 0c50462ed..63f683eea 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/event_webhook_update.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/event_webhook_update.py
@@ -36,6 +36,9 @@ options:
description: Value.
type: str
type: list
+ isProxyRoute:
+ description: Is Proxy Route.
+ type: bool
method:
description: Method.
type: str
@@ -52,8 +55,8 @@ options:
description: Required only for update webhook configuration.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Event Management UpdateWebhookDestination
description: Complete reference of the UpdateWebhookDestination API.
@@ -83,6 +86,7 @@ EXAMPLES = r"""
encrypt: true
name: string
value: string
+ isProxyRoute: true
method: string
name: string
trustCert: true
@@ -90,7 +94,6 @@ EXAMPLES = r"""
webhookId: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/events_and_notifications_workflow_manager.py b/ansible_collections/cisco/dnac/plugins/modules/events_and_notifications_workflow_manager.py
new file mode 100644
index 000000000..917ff615b
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/events_and_notifications_workflow_manager.py
@@ -0,0 +1,5162 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2022, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+__author__ = ("Abhishek Maheshwari, Madhan Sankaranarayanan")
+
+DOCUMENTATION = r"""
+---
+module: events_and_notifications_workflow_manager
+short_description: Configure various types of destinations to deliver event notifications from Cisco Catalyst Center Platform.
+description:
+- Configure various types of destinations to deliver event notifications from Cisco Catalyst Center Platform.
+- Configuring/Updating the Webhook destination details in Cisco Catalyst Center.
+- Configuring/Updating the Email destination details in Cisco Catalyst Center.
+- Configuring/Updating the Syslog destination details in Cisco Catalyst Center.
+- Configuring/Updating the SNMP destination details in Cisco Catalyst Center.
+- Configuring/Updating the ITSM Integration Settings in Cisco Catalyst Center.
+- Deletes the ITSM Integration Settings from Cisco Catalyst Center.
+- Create/Update Notification using the above destination in Cisco Catalyst Center.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.workflow_manager_params
+author: Abhishek Maheshwari (@abmahesh)
+ Madhan Sankaranarayanan (@madhansansel)
+options:
+ config_verify:
+ description: Set to True to verify the Cisco Catalyst Center config after applying the playbook config.
+ type: bool
+ default: False
+ state:
+ description: The state of Cisco Catalyst Center after module completion.
+ type: str
+ choices: [ merged, deleted ]
+ default: merged
+ config:
+ description: List containing the subscription configuration for events, notification on site through one or more channels.
+ type: list
+ elements: dict
+ required: True
+ suboptions:
+ webhook_destination:
+ description: Dictionary containing the details for configuring/updating the REST Endpoint to receive Audit logs and
+ Events from Cisco Catalyst Center Platform.
+ type: dict
+ suboptions:
+ name:
+ description: Name of the webhook destination. A unique identifier for the webhook destination within the system.
+ type: str
+ required: True
+ description:
+ description: A brief explanation of what the webhook destination is used for.
+ type: str
+ url:
+ description: Fully qualified URL to which the webhook will send requests (e.g., "https://ciscocatalyst.com").
+ type: str
+ required: True
+ method:
+ description: The HTTP method used by the webhook when sending requests (e.g., POST, PUT).
+ POST - It is typically used to create a new webhook destination. When you set up a new webhook in Cisco Catalyst Center,
+ you would use the POST method to send the necessary configuration details (like URL, headers, payload format, etc.)
+ to the server.
+ PUT - It is used to update an existing webhook destination. If a webhook destination is already configured and you need to
+ change any of its settings—such as modifying the URL, adjusting headers, or changing the payload format.
+ type: str
+ trust_cert:
+ description: A boolean that indicates whether to verify the SSL/TLS certificate of the URL, setting this to true will bypass
+ certificate verification. By default, it is set to false.
+ type: bool
+ headers:
+ description: A list of HTTP headers to be included in the webhook request. Each header is represented as a dictionary.
+ While giving the headers details we can categorize them into - "Basic, Token and No Auth".
+ Basic Authentication - It is used to ensure that the receiving server can validate the identity of the requesting server by checking
+ the credentials against its store. This method is straightforward but less secure compared to others since credentials are sent
+ encoded but not encrypted.
+ Token Authentication - It involves security tokens which are typically generated by the server. A client must send this token in the
+ HTTP header to access resources. It is more secure than Basic Authentication as it allows the server to issue tokens that can have
+ a limited lifetime, be revoked, and carry specific permissions without exposing user credentials.
+ No Auth - It implies that no authentication method is required to access the webhook destination. This setting can be used in environments
+ where security is either handled by other means (such as network isolation) or where the data being transmitted is not sensitive.
+ type: list
+ elements: dict
+ suboptions:
+ name:
+ description: Name of the HTTP header.
+ type: str
+ value:
+ description: Value assigned to the HTTP header.
+ type: str
+ default_value:
+ description: Default value for the HTTP header that can be used if no specific value is provided.
+ type: str
+ encrypt:
+ description: Indicates whether the value of the header should be encrypted. Useful for sensitive data.
+ type: bool
+ is_proxy_route:
+ description: A boolean value indicating if the request should use a proxy server. It will set to true for proxy routing, and false for direct
+ connection. By default, it is set to True.
+ type: bool
+ email_destination:
+ description: Configure settings to send out emails from Cisco Catalyst Center. Also we can create or configure email destination in Cisco Catalyst
+ Center only once then later we can just modify it. This one is just used to configure the Primary and Secondary SMTP server while configuring
+ the email destination. It's not related to email event subscription notification.
+ type: dict
+ suboptions:
+ primary_smtp_config:
+ description: Add the primary configuration for smtp while creating/updating email destination.
+ type: dict
+ suboptions:
+ server_address:
+ description: Hostname or IP address of the primary SMTP server. Supports both IPv4 and IPv6.
+ type: str
+ required: True
+ smtp_type:
+ description: The type of connection used for the SMTP server, with options being DEFAULT, TLS, or SSL. By default, it is set to DEFAULT.
+ DEFAULT - Chooses a standard SMTP connection without encryption. If it's selected then port will be 25 only.
+ TLS - Initiates an unencrypted SMTP connection and upgrades to TLS encryption when available. If it's selected then port will be
+ either 465 or 587.
+ SSL - Begins with an encrypted SMTP connection using SSL from the start. If it's selected then port will be
+ either 465 or 587.
+ type: str
+ required: True
+ port:
+ description: Port number used for configuring Primary SMTP Server. Also there is a mapping of smtype and port if snmp_type is DEFAULT
+ then port is 25 and for smtp_type TLS or SSL we can choose either 465 or 587 as port number.
+ type: str
+ username:
+ description: Username for Authenticating Primary SMTP Server.
+ type: str
+ password:
+ description: Password for Authenticating Primary SMTP Server.
+ type: str
+ secondary_smtp_config:
+ description: Include an optional secondary SMTP configuration when creating or updating an email destination.
+ type: dict
+ suboptions:
+ server_address:
+ description: Hostname or IP address of the secondary SMTP server. Supports both IPv4 and IPv6.
+ type: str
+ smtp_type:
+ description: The type of connection used for the SMTP server, with options being DEFAULT, TLS, or SSL. By default, it is set to DEFAULT.
+ DEFAULT - Chooses a standard SMTP connection without encryption. If it's selected then port will be 25 only.
+ TLS - Initiates an unencrypted SMTP connection and upgrades to TLS encryption when available. If it's selected then port will be
+ either 465 or 587.
+ SSL - Begins with an encrypted SMTP connection using SSL from the start. If it's selected then port will be
+ either 465 or 587.
+ type: str
+ port:
+ description: Port number used for configuring Secondary SMTP Server. Also there is a mapping of smtype and port if snmp_type is DEFAULT
+ then port is 25 and for smtp_type TLS or SSL we can choose either 465 or 587 as port number.
+ type: str
+ username:
+ description: Username for Authenticating Secondary SMTP Server.
+ type: str
+ password:
+ description: Password for Authenticating Secondary SMTP Server.
+ type: str
+ from_email:
+ description: Sender's email address used when setting up or modifying an email destination.
+ type: str
+ required: True
+ to_email:
+ description: Recipient's email address that will receive emails when an email destination is created or updated.
+ type: str
+ required: True
+ subject:
+ description: Subject line of the email to be used when sending emails from the specified email destination.
+ type: str
+ required: True
+ syslog_destination:
+ description: Dictionary containing the details for configuring/updating the Syslog Server to collect Audit logs and Events
+ from the Cisco Catalyst Center.
+ type: dict
+ suboptions:
+ name:
+ description: Name of the syslog destination.
+ type: str
+ required: True
+ description:
+ description: A brief explanation detailing the purpose of the syslog destination.
+ type: str
+ required: True
+ server_address:
+ description: Hostname or IP address of the Syslog server.
+ type: str
+ required: True
+ protocol:
+ description: Protocol used for sending syslog messages (e.g., UDP, TCP).
+ Transmission Control Protocol (TCP) - It is a connection-oriented protocol used for reliable and ordered communication
+ between devices on a network. It provides error-checking, retransmission of lost packets, and ensures that data is
+ delivered in the correct order.
+ User Datagram Protocol (UDP) - It is a connectionless protocol used for sending datagrams between devices on a network.
+ It provides a lightweight, best-effort delivery mechanism without guaranteeing delivery or ordering of packets. UDP
+ is commonly used for real-time applications such as streaming media, online gaming, and VoIP.
+ type: str
+ required: True
+ port:
+ description: Port number on which the syslog server is listening. It must be in the range of 1-65535. If not given any port then
+ we will use 514 as default port.
+ type: int
+ required: True
+ snmp_destination:
+ description: Dictionary containing the details for configuring/updating the SNMP Trap Server to receive Audit logs and Events from
+ Cisco Catalyst Center.
+ type: dict
+ suboptions:
+ name:
+ description: Name of the SNMP destination.
+ type: str
+ required: True
+ description:
+ description: Description of the SNMP destination.
+ type: str
+ required: True
+ server_address:
+ description: IP address of the SNMP server.
+ type: str
+ required: True
+ port:
+ description: Port number on which the SNMP server is listening.
+ type: str
+ required: True
+ snmp_version:
+ description: The SNMP protocol version used for network management and monitoring, selectable between SNMPv2c and SNMPv3.
+ V2C - Utilizes community strings for the authentication between the SNMP manager (like Cisco Catalyst) and managed network
+ devices (routers, switches, access points), without encryption, as strings are sent in plain text.
+ V3 - Offers enhanced security features over V2C, including authentication, integrity, and encryption, using usernames,
+ passwords, and encryption keys for secure communications.
+ type: str
+ community:
+ description: SNMP community string used for authentication, necessary only when the snmp_version is set to V2C.
+ type: str
+ username:
+ description: Username required for SNMP authentication, applicable exclusively when the snmp_version is configured to V3.
+ type: str
+ mode:
+ description: The security mode for SNMP communication (options - AUTH_PRIVACY, AUTH_NO_PRIVACY, NO_AUTH_NO_PRIVACY).
+ Mandatory for snmp_version V3 and must not be set to NONE.
+ AUTH_PRIVACY - The most secure mode, providing both verification of the message source through authentication and protection of
+ message contents with encryption.
+ If this option is selected, must need to provide auth_type, auth_password, privacy_type, privacy_password parameter in the playbook.
+ AUTH_NO_PRIVACY - This mode ensures the authenticity of SNMP messages via a community string for validation but does not encrypt
+ the data, leaving it vulnerable to interception.
+ If this option is selected, must need to provide auth_type, auth_password parameter in the playbook.
+ NO_AUTH_NO_PRIVACY - In this mode, SNMP messages are neither authenticated nor encrypted, making it the least secure as it requires no
+ credentials or data protection.
+ If this option is selected, not need to provide auth_type, auth_password, privacy_type, privacy_password parameter in the playbook.
+ type: str
+ auth_type:
+ description: Type of SNMP authentication protocol to use, such as MD5 or SHA.
+ SHA - Stands for Secure Hash Algorithm, a suite of cryptographic hash functions developed by the National Security Agency (NSA) offering
+ enhanced security.
+ MD5 - Refers to Message Digest Algorithm 5, a well-established cryptographic hash function generating a 128-bit hash value, employed in
+ SNMPv3 for message authenticity and integrity verification.
+ type: str
+ auth_password:
+ description: Password used for SNMP authentication.
+ type: str
+ privacy_type:
+ description: Encryption algorithm used for SNMP privacy, such as AES128.
+ type: str
+ privacy_password:
+ description: Password used for encryption in SNMP privacy.
+ type: str
+ itsm_setting:
+ description: Dictionary containing the configuration details to configure the ServiceNow/BMCRemedy settings to automatically create
+ incidents/problems/RFC's from Cisco Catalyst Center.
+ type: dict
+ suboptions:
+ instance_name:
+ description: The name of the ITSM configuration. This helps in identifying the integration within the system. Also while deleting
+ the ITSM Intergration setting from Cisco Catalyst Center.
+ type: str
+ required: True
+ description:
+ description: A brief description of the ITSM settings, outlining its purpose or usage within the organization.
+ type: str
+ connection_settings:
+ description: A dictionary of settings required to establish a connection with the ITSM system.
+ type: dict
+ suboptions:
+ url:
+ description: The URL of the ITSM system API endpoint. This is the base URL used for ITSM service requests.
+ type: str
+ required: True
+ username:
+ description: The username used for authentication with the ITSM system. This is required for accessing the API.
+ type: str
+ required: True
+ password:
+ description: The password associated with the username for API authentication. It is recommended to handle this data securely.
+ type: str
+ required: True
+ webhook_event_notification:
+ description: Dictionary containing the details for creating/updating the Webhook Event subscription notification in Cisco Catalyst
+ Center.
+ type: dict
+ suboptions:
+ name:
+ description: Name of the Webhook event subscription notification.
+ type: str
+ required: True
+ description:
+ description: A brief explanation detailing the purpose of the email events subscription notification.
+ type: str
+ required: True
+ version:
+ description: Version label for the event subscription, helping track updates or changes.
+ type: str
+ destination:
+ description: The name of the destination for sending event notifications via webhook.
+ type: str
+ required: True
+ events:
+ description: List of event names to be subscribed to for notification configurations (e.g., ["AP Flap", "AP Reboot Crash"]).
+ type: list
+ elements: str
+ required: True
+ domain:
+ description: The main category or domain under which events fall (e.g., Know Your Network, Connectivity, etc.).
+ type: str
+ subdomains:
+ description: More specific categories within the main domain to further classify events (e.g., ["Wireless", "Applications"]).
+ type: list
+ elements: str
+ event_types:
+ description: Types of events that trigger the notifications, defining the nature of the event (e.g., ["APP", "NETWORK"]).
+ type: list
+ elements: str
+ event_categories:
+ description: List of event categories to be included in the subscription for notifications
+ (e.g., WARN, INFO, ERROR, ALERT, TASK_COMPLETE, TASK_FAILURE).
+ type: list
+ elements: str
+ event_severities:
+ description: List of event severities to be included in the subscription for notifications (e.g., ["1", "2", "3"]).
+ type: list
+ elements: str
+ event_sources:
+ description: List of event sources to be included in the subscription for notifications.
+ type: list
+ elements: str
+ sites:
+ description: List of site names where events are included in the notification subscription(e.g., ["Global/India", "Global/USA"]).
+ type: list
+ elements: str
+ email_event_notification:
+ description: Configuration for setting up or modifying an Email Event Subscription in Cisco Catalyst Center.
+ This includes parameters for the email notification itself as well as details for the associated email instance.
+ type: dict
+ suboptions:
+ name:
+ description: Name of the Email event subscription notification.
+ type: str
+ required: True
+ description:
+ description: A brief explanation detailing the purpose of the Email events subscription notification.
+ type: str
+ required: True
+ version:
+ description: Version label for the event subscription, helping track updates or changes.
+ type: str
+ events:
+ description: List of event names to be subscribed to for notification configurations (e.g., ["AP Flap", "AP Reboot Crash"]).
+ type: list
+ elements: str
+ required: True
+ sender_email:
+ description: Originating email address for sending out the notifications.
+ type: str
+ required: True
+ recipient_emails:
+ description: Recipient email addresses that will receive the notifications.
+ type: list
+ elements: str
+ required: True
+ subject:
+ description: The Subject line for the email notification, briefly indicating the notification content.
+ type: str
+ required: True
+ instance:
+ description: Name assigned to the specific email instance used for sending the notification.
+ type: str
+ required: True
+ instance_description:
+ description: Detailed explanation of the email instance's purpose and how it relates to the notifications.
+ type: str
+ required: True
+ domain:
+ description: The main category or domain under which events fall (e.g., Know Your Network, Connectivity, etc.).
+ type: str
+ subdomains:
+ description: More specific categories within the main domain to further classify events (e.g., ["Wireless", "Applications"]).
+ type: list
+ elements: str
+ event_types:
+ description: Types of events that trigger the notifications, defining the nature of the event (e.g., ["APP", "NETWORK"]).
+ type: list
+ elements: str
+ event_categories:
+ description: List of event categories to be included in the subscription for notifications
+ (e.g., WARN, INFO, ERROR, ALERT, TASK_COMPLETE, TASK_FAILURE).
+ type: list
+ elements: str
+ event_severities:
+ description: List of event severities to be included in the subscription for notifications (e.g., ["1", "2", "3"]).
+ type: list
+ elements: str
+ event_sources:
+ description: List of event sources to be included in the subscription for notifications.
+ type: list
+ elements: str
+ sites:
+ description: List of site names where events are included in the notification subscription(e.g., ["Global/India", "Global/USA"]).
+ type: list
+ elements: str
+ syslog_event_notification:
+ description: Configuration for establishing or revising a Syslog Event Subscription in the Cisco Catalyst Center.
+ This allows for the specification of Syslog event notification parameters and destination settings.
+ type: dict
+ suboptions:
+ name:
+ description: Name of the Syslog event subscription notification.
+ type: str
+ required: True
+ description:
+ description: A brief explanation detailing the purpose of the syslog events subscription notification.
+ type: str
+ required: True
+ version:
+ description: Version label for the event subscription, helping track updates or changes.
+ type: str
+ destination:
+ description: The name of the destination for sending event notifications via syslog.
+ type: str
+ required: True
+ events:
+ description: List of event names to be subscribed to for notification configurations (e.g., ["AP Flap", "AP Reboot Crash"]).
+ type: list
+ elements: str
+ required: True
+ domain:
+ description: The main category or domain under which events fall (e.g., Know Your Network, Connectivity, etc.).
+ type: str
+ subdomains:
+ description: More specific categories within the main domain to further classify events (e.g., ["Wireless", "Applications"]).
+ type: list
+ elements: str
+ event_types:
+ description: Types of events that trigger the notifications, defining the nature of the event (e.g., ["APP", "NETWORK"]).
+ type: list
+ elements: str
+ event_categories:
+ description: List of event categories to be included in the subscription for notifications
+ (e.g., WARN, INFO, ERROR, ALERT, TASK_COMPLETE, TASK_FAILURE).
+ type: list
+ elements: str
+ event_severities:
+ description: List of event severities to be included in the subscription for notifications (e.g., ["1", "2", "3"]).
+ type: list
+ elements: str
+ event_sources:
+ description: List of event sources to be included in the subscription for notifications.
+ type: list
+ elements: str
+ sites:
+ description: List of site names where events are included in the notification subscription(e.g., ["Global/India", "Global/USA"]).
+ type: list
+ elements: str
+
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+
+notes:
+ - Configuring the webhook destination with headers now supports starting from dnacentersdk version 2.9.1 onwards. This enhancement is in
+ alignment with Catalyst Center Release 2.3.7.5.
+ - Configuring the SNMP destination now supports starting from dnacentersdk version 2.9.1 onwards. This enhancement is in
+ alignment with Catalyst Center Release 2.3.7.5.
+ - SDK Method used are
+ events.Events.get_syslog_destination,
+ events.Events.create_syslog_destination,
+ events.Events.update_syslog_destination,
+ events.Events.get_snmp_destination,
+ events.Events.create_snmp_destination,
+ events.Events.update_snmp_destination,
+ events.Events.get_webhook_destination,
+ events.Events.create_webhook_destination,
+ events.Events.update_webhook_destination,
+ events.Events.get_email_destination,
+ events.Events.create_email_destination,
+ events.Events.get_status_api_for_events,
+ events.Events.get_all_itsm_integration_settings,
+ events.Events.get_itsm_integration_setting_by_id,
+ events.Events.create_itsm_integration_setting,
+ events.Events.update_itsm_integration_setting,
+ events.Events.delete_itsm_integration_setting,
+ events.Events.get_eventartifacts,
+ events.Events.get_site,
+ events.Events.get_syslog_event_subscriptions,
+ events.Events.get_syslog_subscription_details,
+ events.Events.create_syslog_event_subscription,
+ events.Events.update_syslog_event_subscription,
+ events.Events.get_rest_webhook_event_subscriptions,
+ events.Events.get_rest_webhook_subscription_details,
+ events.Events.create_rest_webhook_event_subscription,
+ events.Events.update_rest_webhook_event_subscription,
+ events.Events.get_email_event_subscriptions,
+ events.Events.get_email_subscription_details,
+ events.Events.create_email_event_subscription,
+ events.Events.update_email_event_subscription,
+ events.Events.delete_event_subscriptions
+
+"""
+
+EXAMPLES = r"""
+- name: Create Rest Webhook destination with given name.
+ cisco.dnac.events_and_notifications_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ state: merged
+ config:
+ - webhook_destination:
+ name: "webhook test"
+ description: "creating webhook for testing"
+ url: "https://10.195.227.14/dna"
+ method: "POST"
+ trust_cert: False
+
+- name: Updating Rest Webhook destination with given name.
+ cisco.dnac.events_and_notifications_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ state: merged
+ config:
+ - webhook_destination:
+ name: "webhook test"
+ description: "updating webhook for testing"
+
+
+- name: Configuring the email destination in the system.
+ cisco.dnac.events_and_notifications_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ state: merged
+ config:
+ - email_destination:
+ from_email: "test@cisco.com"
+ to_email: "demo@cisco.com"
+ subject: "Ansible testing"
+ primary_smtp_config:
+ server_address: "outbound.cisco.com"
+ port: "25"
+ smtp_type: "DEFAULT"
+
+- name: Updating the email destination in the system.
+ cisco.dnac.events_and_notifications_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ state: merged
+ config:
+ - email_destination:
+ from_email: "test@cisco.com"
+ to_email: "demo123@cisco.com"
+ subject: "Ansible updated email config testing"
+
+- name: Create Syslog destination with given name.
+ cisco.dnac.events_and_notifications_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ state: merged
+ config:
+ - syslog_destination:
+ name: Syslog test
+ description: "Adding syslog destination"
+ server_address: "10.30.0.90"
+ protocol: "TCP"
+ port: 6553
+
+- name: Update Syslog destination with given name.
+ cisco.dnac.events_and_notifications_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ state: merged
+ config:
+ - syslog_destination:
+ name: Syslog test
+ description: "Updating syslog destination."
+
+- name: Create SNMP destination with given name.
+ cisco.dnac.events_and_notifications_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ state: merged
+ config:
+ - snmp_destination:
+ name: Snmp test
+ description: "Adding snmp destination for testing."
+ server_address: "10.30.0.90"
+ port: "25"
+ snmp_version: "V3"
+ username: cisco
+ mode: AUTH_PRIVACY
+ auth_type: SHA
+ auth_password: authpass123
+ privacy_type: AES128
+ privacy_password: privacy123
+
+- name: Update SNMP destination with given name.
+ cisco.dnac.events_and_notifications_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ state: merged
+ config:
+ - snmp_destination:
+ name: Snmp test
+ description: "Updating snmp destination with snmp version v2."
+ server_address: "10.30.0.90"
+ port: "25"
+ snmp_version: "V2C"
+ community: "public123"
+
+- name: Create ITSM Integration Setting with given name in the system.
+ cisco.dnac.events_and_notifications_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ state: merged
+ config:
+ - itsm_setting:
+ instance_name: "ITSM test"
+ description: "ITSM description for testing"
+ connection_settings:
+ url: "http/catalystcenter.com"
+ username: "catalyst"
+ password: "catalyst@123"
+
+- name: Updating ITSM Integration Setting with given name in the system.
+ cisco.dnac.events_and_notifications_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ state: merged
+ config:
+ - itsm_setting:
+ instance_name: "ITSM test"
+ connection_settings:
+ url: "http/catalystcenterupdate.com"
+ password: "catalyst@123"
+
+- name: Creating Webhook Notification with the list of names of subscribed events in the system.
+ cisco.dnac.events_and_notifications_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ state: merged
+ config:
+ - webhook_event_notification:
+ name: "Webhook Notification."
+ description: "Notification for webhook events subscription"
+ sites: ["Global/India", "Global/USA"]
+ events: ["AP Flap", "AP Reboot Crash"]
+ destination: "Webhook Demo"
+
+- name: Updating Webhook Notification with the list of names of subscribed events in the system.
+ cisco.dnac.events_and_notifications_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ state: merged
+ config:
+ - webhook_event_notification:
+ name: "Webhook Notification."
+ description: "Updated notification for webhook events subscription"
+ sites: ["Global/India", "Global/USA", "Global/China"]
+ destination: "Webhook Demo"
+
+- name: Creating Email Notification with the list of names of subscribed events in the system.
+ cisco.dnac.events_and_notifications_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ state: merged
+ config:
+ - email_event_notification:
+ name: "Email Notification"
+ description: "Notification description for email subscription creation"
+ sites: ["Global/India", "Global/USA"]
+ events: ["AP Flap", "AP Reboot Crash"]
+ sender_email: "catalyst@cisco.com"
+ recipient_emails: ["test@cisco.com", "demo@cisco.com"]
+ subject: "Mail test"
+ instance: Email Instance test
+
+- name: Updating Email Notification with the list of names of subscribed events in the system.
+ cisco.dnac.events_and_notifications_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ state: merged
+ config:
+ - email_event_notification:
+ name: "Email Notification"
+ description: "Notification description for email subscription updation"
+ sites: ["Global/India", "Global/USA"]
+ events: ["AP Flap", "AP Reboot Crash"]
+ sender_email: "catalyst@cisco.com"
+ recipient_emails: ["test@cisco.com", "demo@cisco.com", "update@cisco.com"]
+ subject: "Mail test for updation"
+ instance: Email Instance test
+
+- name: Creating Syslog Notification with the list of names of subscribed events in the system.
+ cisco.dnac.events_and_notifications_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ state: merged
+ config:
+ - syslog_event_notification:
+ name: "Syslog Notification."
+ description: "Notification for syslog events subscription"
+ sites: ["Global/India", "Global/USA"]
+ events: ["AP Flap", "AP Reboot Crash"]
+ destination: "Syslog Demo"
+
+- name: Updating Syslog Notification with the list of names of subscribed events in the system.
+ cisco.dnac.events_and_notifications_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ state: merged
+ config:
+ - syslog_event_notification:
+ name: "Syslog Notification."
+ description: "Updated notification for syslog events subscription"
+ sites: ["Global/India", "Global/USA", "Global/China"]
+ events: ["AP Flap", "AP Reboot Crash"]
+
+- name: Deleting ITSM Integration Setting with given name from the system.
+ cisco.dnac.events_and_notifications_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ state: deleted
+ config:
+ - itsm_setting:
+ instance_name: "ITSM test"
+
+- name: Deleting Webhook Events Subscription Notification with given name from the system.
+ cisco.dnac.events_and_notifications_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ state: deleted
+ config:
+ - webhook_event_notification:
+ name: "Webhook Notification"
+
+- name: Deleting Email Events Subscription Notification with given name from the system.
+ cisco.dnac.events_and_notifications_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ state: deleted
+ config:
+ - email_event_notification:
+ name: "Email Notification"
+
+- name: Deleting Syslog Events Subscription Notification with given name from the system.
+ cisco.dnac.events_and_notifications_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ state: deleted
+ config:
+ - syslog_event_notification:
+ name: "Syslog Notification"
+
+"""
+
+RETURN = r"""
+
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco Catalyst Center Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": {
+ "taskId": "string",
+ "url": "string"
+ },
+ "version": "string"
+ }
+"""
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.cisco.dnac.plugins.module_utils.dnac import (
+ DnacBase,
+ validate_list_of_dicts,
+)
+import re
+import time
+
+
+class Events(DnacBase):
+ """Class containing member attributes for inventory workflow manager module"""
+
+ def __init__(self, module):
+ super().__init__(module)
+ self.supported_states = ["merged", "deleted"]
+
+ def validate_input(self):
+ """
+ Validate the fields provided in the playbook.
+ Checks the configuration provided in the playbook against a predefined specification
+ to ensure it adheres to the expected structure and data types.
+ Parameters:
+ self: The instance of the class containing the 'config' attribute to be validated.
+ Returns:
+ The method returns an instance of the class with updated attributes:
+ - self.msg: A message describing the validation result.
+ - self.status: The status of the validation (either 'success' or 'failed').
+ - self.validated_config: If successful, a validated version of the 'config' parameter.
+ Example:
+ To use this method, create an instance of the class and call 'validate_input' on it.
+ If the validation succeeds, 'self.status' will be 'success' and 'self.validated_config'
+ will contain the validated configuration. If it fails, 'self.status' will be 'failed', and
+ 'self.msg' will describe the validation issues.
+ """
+
+ temp_spec = {
+ 'webhook_destination': {
+ 'type': 'dict',
+ 'name': {'type': 'str'},
+ 'description': {'type': 'str'},
+ 'url': {'type': 'str'},
+ 'method': {'type': 'str', 'default': 'POST'},
+ 'trust_cert': {'type': 'bool', 'default': False},
+ 'headers': {
+ 'type': 'dict',
+ 'name': {'type': 'str'},
+ 'value': {'type': 'str'},
+ 'default_value': {'type': 'str'},
+ 'encrypt': {'type': 'bool'},
+ },
+ 'is_proxy_route': {'type': 'bool', 'default': True}
+ },
+ 'email_destination': {
+ 'type': 'dict',
+ 'primary_smtp_config': {
+ 'type': 'dict',
+ 'server_address': {'type': 'str'},
+ 'smtp_type': {'type': 'str', 'default': 'DEFAULT'},
+ 'port': {'type': 'str', 'default': '25'},
+ 'username': {'type': 'str'},
+ 'password': {'type': 'str'},
+ },
+ 'secondary_smtp_config': {
+ 'type': 'dict',
+ 'server_address': {'type': 'str'},
+ 'smtp_type': {'type': 'str'},
+ 'port': {'type': 'str'},
+ 'username': {'type': 'str'},
+ 'password': {'type': 'str'},
+ },
+ 'from_email': {'type': 'str'},
+ 'to_email': {'type': 'str'},
+ 'subject': {'type': 'str'},
+ },
+ 'syslog_destination': {
+ 'type': 'dict',
+ 'name': {'type': 'str'},
+ 'description': {'type': 'str'},
+ 'server_address': {'type': 'str'},
+ 'protocol': {'type': 'str'},
+ 'port': {'type': 'int', 'default': 514},
+ },
+ 'snmp_destination': {
+ 'type': 'dict',
+ 'name': {'type': 'str'},
+ 'description': {'type': 'str'},
+ 'server_address': {'type': 'str'},
+ 'port': {'type': 'str'},
+ 'snmp_version': {'type': 'str'},
+ 'community': {'type': 'str'},
+ 'username': {'type': 'str'},
+ 'mode': {'type': 'str'},
+ 'auth_type': {'type': 'str'},
+ 'auth_password': {'type': 'str'},
+ 'privacy_type': {'type': 'str'},
+ 'privacy_password': {'type': 'str'},
+ },
+ 'itsm_setting': {
+ 'type': 'dict',
+ 'instance_name': {'type': 'str'},
+ 'description': {'type': 'str'},
+ 'connection_settings': {
+ 'type': 'dict',
+ 'url': {'type': 'str'},
+ 'username': {'type': 'str'},
+ 'password': {'type': 'str'},
+ },
+ },
+ 'webhook_event_notification': {
+ 'type': 'dict',
+ 'name': {'type': 'str'},
+ 'version': {'type': 'str'},
+ 'description': {'type': 'str'},
+ 'sites': {'type': 'list', 'elements': 'str'},
+ 'events': {'type': 'list', 'elements': 'str'},
+ 'destination': {'type': 'str'},
+ 'domain': {'type': 'str'},
+ 'subdomains': {'type': 'list', 'elements': 'str'},
+ 'event_types': {'type': 'list', 'elements': 'str'},
+ 'event_categories': {'type': 'list', 'elements': 'str'},
+ 'event_severities': {'type': 'list', 'elements': 'str'},
+ 'event_sources': {'type': 'list', 'elements': 'str'},
+ },
+ 'email_event_notification': {
+ 'type': 'dict',
+ 'name': {'type': 'str'},
+ 'version': {'type': 'str'},
+ 'description': {'type': 'str'},
+ 'sites': {'type': 'list', 'elements': 'str'},
+ 'events': {'type': 'list', 'elements': 'str'},
+ 'sender_email': {'type': 'str'},
+ 'recipient_emails': {'type': 'list', 'elements': 'str'},
+ 'subject': {'type': 'str'},
+ 'instance': {'type': 'str'},
+ 'instance_description': {'type': 'str'},
+ 'domain': {'type': 'str'},
+ 'subdomains': {'type': 'list', 'elements': 'str'},
+ 'event_types': {'type': 'list', 'elements': 'str'},
+ 'event_categories': {'type': 'list', 'elements': 'str'},
+ 'event_severities': {'type': 'list', 'elements': 'str'},
+ 'event_sources': {'type': 'list', 'elements': 'str'},
+ },
+ 'syslog_event_notification': {
+ 'type': 'dict',
+ 'name': {'type': 'str'},
+ 'version': {'type': 'str'},
+ 'description': {'type': 'str'},
+ 'sites': {'type': 'list', 'elements': 'str'},
+ 'events': {'type': 'list', 'elements': 'str'},
+ 'destination': {'type': 'str'},
+ 'domain': {'type': 'str'},
+ 'subdomains': {'type': 'list', 'elements': 'str'},
+ 'event_types': {'type': 'list', 'elements': 'str'},
+ 'event_categories': {'type': 'list', 'elements': 'str'},
+ 'event_severities': {'type': 'list', 'elements': 'str'},
+ 'event_sources': {'type': 'list', 'elements': 'str'},
+ },
+ }
+
+ # Validate device params
+ valid_temp, invalid_params = validate_list_of_dicts(
+ self.config, temp_spec
+ )
+
+ if invalid_params:
+ self.msg = "The playbook contains invalid parameters: {0}".format(invalid_params)
+ self.log(self.msg, "ERROR")
+ self.status = "failed"
+ return self
+
+ self.validated_config = valid_temp
+ self.msg = "Successfully validated playbook configuration parameters using 'validate_input': {0}".format(str(valid_temp))
+ self.log(self.msg, "INFO")
+ self.status = "success"
+
+ return self
+
+ def get_have(self, config):
+ """
+ Retrieve and check destinations information present in Cisco Catalyst Center.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ config (dict): A dictionary containing the configuration details of destinations to be checked.
+ Returns:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center having destination details.
+ - syslog_destinations (list): A list of syslog destinations existing in Cisco Catalyst Center.
+ - snmp_destinations (list): A list of SNMP destinations existing in Cisco Catalyst Center.
+ - webhook_destinations (list): A list of webhook destinations existing in Cisco Catalyst Center.
+ - email_destination (list): A list of email destinations existing in Cisco Catalyst Center.
+ - itsm_setting (list): A list of ITSM settings existing in Cisco Catalyst Center.
+ Description:
+ This function checks the specified destinations in the playbook against the destinations existing in Cisco Catalyst Center.
+ It retrieves information about various types of destinations (syslog, SNMP, webhook, email, ITSM) and returns a dictionary
+ with keys representing each type of destination and corresponding lists of existing destinations in Cisco Catalyst Center.
+ """
+
+ have = {}
+
+ if config.get('syslog_destination'):
+ syslog_destination = self.get_syslog_destination_in_ccc()
+ if syslog_destination:
+ have['syslog_destinations'] = syslog_destination
+
+ if config.get('snmp_destination'):
+ snmp_destinations = self.get_snmp_destination_in_ccc()
+ if snmp_destinations:
+ have['snmp_destinations'] = snmp_destinations
+
+ if config.get('webhook_destination'):
+ webhook_destinations = self.get_webhook_destination_in_ccc()
+ if webhook_destinations:
+ have['webhook_destinations'] = webhook_destinations
+
+ if config.get('email_destination'):
+ email_destination = self.get_email_destination_in_ccc()
+ if email_destination:
+ have['email_destination'] = email_destination
+
+ if config.get('itsm_setting'):
+ itsm_setting = self.get_itsm_settings_in_ccc()
+ if itsm_setting:
+ have['itsm_setting'] = itsm_setting
+
+ if config.get('syslog_event_notification'):
+ syslog_subscription_notifications = self.get_syslog_notification_details()
+ if syslog_subscription_notifications:
+ have['syslog_subscription_notifications'] = syslog_subscription_notifications
+
+ if config.get('webhook_event_notification'):
+ webhook_subscription_notifications = self.get_webhook_notification_details()
+ if webhook_subscription_notifications:
+ have['webhook_subscription_notifications'] = webhook_subscription_notifications
+
+ if config.get('email_event_notification'):
+ email_subscription_notifications = self.get_email_notification_details()
+ if email_subscription_notifications:
+ have['email_subscription_notifications'] = email_subscription_notifications
+
+ self.have = have
+ self.log("Current State (have): {0}".format(str(self.have)), "INFO")
+
+ return self
+
+ def get_want(self, config):
+ """
+ Retrieve the desired configuration parameters specified in the playbook.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ config (dict): A dictionary containing the desired configuration details specified in the playbook.
+ Returns:
+ self (object): An instance of the class with the desired configuration parameters collected from the playbook.
+ Description:
+ This function retrieves the desired configuration parameters specified in the playbook and organizes them into a dictionary.
+ It collects details related to various types of destinations (syslog, SNMP, webhook, email, ITSM) based on the playbook configuration
+ and stores them in the 'want' attribute of the class instance.
+ """
+
+ want = {}
+
+ if config.get('syslog_destination'):
+ want['syslog_details'] = config.get('syslog_destination')
+
+ if config.get('snmp_destination'):
+ want['snmp_details'] = config.get('snmp_destination')
+
+ if config.get('webhook_destination'):
+ want['webhook_details'] = config.get('webhook_destination')
+
+ if config.get('email_destination'):
+ want['email_details'] = config.get('email_destination')
+
+ if config.get('itsm_setting'):
+ want['itsm_details'] = config.get('itsm_setting')
+
+ if config.get('webhook_event_notification'):
+ want['webhook_event_notification'] = config.get('webhook_event_notification')
+
+ if config.get('email_event_notification'):
+ want['email_event_notification'] = config.get('email_event_notification')
+
+ if config.get('syslog_event_notification'):
+ want['syslog_event_notification'] = config.get('syslog_event_notification')
+
+ self.want = want
+ self.msg = "Successfully collected all parameters from the playbook "
+ self.status = "success"
+ self.log("Desired State (want): {0}".format(str(self.want)), "INFO")
+
+ return self
+
+ def get_syslog_destination_in_ccc(self):
+ """
+ Retrieve the details of syslog destinations present in Cisco Catalyst Center.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Returns:
+ str: A string containing the details of syslog destinations present in Cisco Catalyst Center.
+ Description:
+ This function queries Cisco Catalyst Center to retrieve the details of syslog destinations.
+ The response contains the status message indicating the syslog destinations present in Cisco Catalyst Center.
+ If no syslog destinations are found, it returns an empty string.
+ In case of any errors during the API call, an exception is raised with an error message.
+ """
+ try:
+ response = self.dnac._exec(
+ family="event_management",
+ function='get_syslog_destination'
+ )
+ self.log("Received API response from 'get_syslog_destination': {0}".format(str(response)), "DEBUG")
+ response = response.get('statusMessage')
+
+ if not response:
+ self.log("There is no Syslog destination present in Cisco Catalyst Center", "INFO")
+ return response
+
+ return response
+
+ except Exception as e:
+ self.status = "failed"
+ self.msg = "Error while getting the details of Syslog destination present in Cisco Catalyst Center: {0}".format(str(e))
+ self.log(self.msg, "ERROR")
+ self.check_return_status()
+
+ def syslog_dest_needs_update(self, syslog_details, syslog_details_in_ccc):
+ """
+ Check if the syslog destination needs an update based on a comparison between desired and current details.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ syslog_details (dict): A dictionary containing the desired syslog destination details.
+ syslog_details_in_ccc (dict): A dictionary containing the current syslog destination details in Cisco Catalyst Center.
+ Returns:
+ bool: A boolean indicating whether an update is needed for the syslog destination.
+ Description:
+ This function compares the desired syslog destination details with the current details retrieved from Cisco Catalyst Center.
+ It iterates through each key-value pair in the desired syslog details and checks if the corresponding value in the current
+ details matches or if the desired value is empty. If any discrepancy is found, indicating a difference between desired and
+ current details, the function sets the 'update_needed' flag to True, indicating that an update is needed.
+ If no discrepancies are found, the function returns False, indicating that no update is needed.
+ """
+
+ update_needed = False
+ for key, value in syslog_details.items():
+ if key == "server_address":
+ if syslog_details_in_ccc["host"] != value:
+ update_needed = True
+ break
+ elif str(syslog_details_in_ccc[key]) == value or value == "":
+ continue
+ else:
+ update_needed = True
+ break
+
+ return update_needed
+
+ def add_syslog_destination(self, syslog_details):
+ """
+ Add a syslog destination to Cisco Catalyst Center based on the provided details.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ syslog_details (dict): A dictionary containing the details of the syslog destination to be added.
+ Returns:
+ self (object): An instance of the class with the result of the operation.
+ - If successful, 'status' is set to 'success', 'result['changed']' is True, and 'msg' contains a success message.
+ - If unsuccessful, 'status' is set to 'failed', 'result['changed']' is False, and 'msg' contains an error message.
+ Description:
+ This function adds a syslog destination to Cisco Catalyst Center using the provided details.
+ It validates the input parameters, including the protocol, and constructs the necessary parameters for the API call.
+ If the operation is successful, the function sets the appropriate status, logs a success message, and returns the result.
+ If the operation fails, the function sets the status to 'failed', logs an error message, and returns the result with
+ details of the failure.
+ """
+
+ try:
+ name = syslog_details.get('name')
+ description = syslog_details.get('description')
+ server_address = syslog_details.get('server_address')
+ protocol = syslog_details.get('protocol')
+
+ if not protocol:
+ self.status = "failed"
+ self.msg = "Protocol is needed while configuring the syslog destionation with name '{0}' in Cisco Catalyst Center".format(name)
+ self.log(self.msg, "ERROR")
+ return self
+
+ protocol = protocol.upper()
+ if protocol not in ["TCP", "UDP"]:
+ self.status = "failed"
+ self.msg = """Invalid protocol name '{0}' for creating syslog destination in Cisco Catalyst Center.
+ Select one of the following protocol 'TCP/UDP'.""".format(protocol)
+ self.log(self.msg, "ERROR")
+ return self
+
+ port = syslog_details.get('port', 514)
+ add_syslog_params = {
+ 'name': name,
+ 'description': description,
+ 'host': server_address,
+ 'protocol': protocol,
+ 'port': int(port)
+ }
+
+ response = self.dnac._exec(
+ family="event_management",
+ function='create_syslog_destination',
+ op_modifies=True,
+ params=add_syslog_params
+ )
+ self.log("Received API response from 'create_syslog_destination': {0}".format(str(response)), "DEBUG")
+ status = response.get('apiStatus')
+
+ if status == 'SUCCESS':
+ self.status = "success"
+ self.result['changed'] = True
+ self.msg = "Syslog Destination with name '{0}' added successfully in Cisco Catalyst Center".format(name)
+ self.log(self.msg, "INFO")
+ self.result['response'] = self.msg
+ return self
+
+ self.status = "failed"
+ try:
+ failure_msg = response.get('errorMessage').get('errors')
+ except Exception as e:
+ failure_msg = "Unable to Add syslog destination with name '{0}' in Cisco Catalyst Center".format(name)
+
+ self.msg = failure_msg
+ self.log(self.msg, "ERROR")
+ self.result['response'] = self.msg
+
+ except Exception as e:
+ self.status = "failed"
+ self.msg = "Error while adding the Syslog destination with the name '{0}' in Cisco Catalyst Center: {1}".format(name, str(e))
+ self.log(self.msg, "ERROR")
+
+ return self
+
+ def update_syslog_destination(self, syslog_details, syslog_details_in_ccc):
+ """
+ Update an existing syslog destination in Cisco Catalyst Center with the provided details.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ syslog_details (dict): A dictionary containing the desired syslog destination details to update.
+ syslog_details_in_ccc (dict): A dictionary containing the current syslog destination details in Cisco Catalyst Center.
+ Returns:
+ self (object): An instance of the class with the result of the operation.
+ - If successful, 'status' is set to 'success', 'result['changed']' is True, and 'msg' contains a success message.
+ - If unsuccessful, 'status' is set to 'failed', 'result['changed']' is False, and 'msg' contains an error message.
+ Description:
+ This function updates an existing syslog destination in Cisco Catalyst Center with the provided details.
+ It constructs the parameters required for the API call by merging the desired syslog details with the current details.
+ If the operation is successful, the function sets the appropriate status, logs a success message, and returns the result.
+ If the operation fails, the function sets the status to 'failed', logs an error message, returns the result with failure details.
+ """
+
+ try:
+ update_syslog_params = {}
+ update_syslog_params['name'] = syslog_details.get('name') or syslog_details_in_ccc.get('name')
+ update_syslog_params['description'] = syslog_details.get('description') or syslog_details_in_ccc.get('description')
+ update_syslog_params['host'] = syslog_details.get('server_address') or syslog_details_in_ccc.get('host')
+ update_syslog_params['protocol'] = syslog_details.get('protocol') or syslog_details_in_ccc.get('protocol')
+ update_syslog_params['port'] = int(syslog_details.get('port') or syslog_details_in_ccc.get('port'))
+ update_syslog_params['configId'] = syslog_details_in_ccc.get('configId')
+ name = update_syslog_params.get('name')
+
+ if update_syslog_params.get('protocol').upper() not in ["TCP", "UDP"]:
+ self.status = "failed"
+ self.msg = """Invalid protocol name '{0}' for updating syslog destination in Cisco Catalyst Center.
+ Select one of the following protocol 'TCP/UDP'.""".format(update_syslog_params.get('protocol'))
+ self.log(self.msg, "ERROR")
+ return self
+
+ response = self.dnac._exec(
+ family="event_management",
+ function='update_syslog_destination',
+ op_modifies=True,
+ params=update_syslog_params
+ )
+ self.log("Received API response from 'update_syslog_destination': {0}".format(str(response)), "DEBUG")
+ status = response.get('apiStatus')
+
+ if status == 'SUCCESS':
+ self.status = "success"
+ self.result['changed'] = True
+ self.msg = "Syslog Destination with name '{0}' updated successfully in Cisco Catalyst Center".format(name)
+ self.log(self.msg, "INFO")
+ self.result['response'] = self.msg
+ return self
+
+ self.status = "failed"
+ try:
+ failure_msg = response.get('errorMessage').get('errors')
+ except Exception as e:
+ failure_msg = "Unable to update syslog destination with name '{0}' in Cisco Catalyst Center".format(name)
+
+ self.msg = failure_msg
+ self.log(self.msg, "ERROR")
+ self.result['response'] = self.msg
+
+ except Exception as e:
+ self.status = "failed"
+ self.msg = "Error while updating the Syslog destination with the name '{0}' in Cisco Catalyst Center: {1}".format(name, str(e))
+ self.log(self.msg, "ERROR")
+
+ return self
+
+ def get_snmp_destination_in_ccc(self):
+ """
+ Retrieve the details of SNMP destinations present in Cisco Catalyst Center.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Returns:
+ dict: A dictionary containing the details of SNMP destinations present in Cisco Catalyst Center.
+ Description:
+ This function queries Cisco Catalyst Center to retrieve the details of SNMP destinations.
+ It utilizes the 'event_management' API endpoint with the 'get_snmp_destination' function.
+ The response contains information about the SNMP destinations present in Cisco Catalyst Center.
+ If no SNMP destinations are found, it returns an empty dictionary.
+ """
+ try:
+ response = self.dnac._exec(
+ family="event_management",
+ function='get_snmp_destination'
+ )
+ self.log("Received API response from 'get_snmp_destination': {0}".format(str(response)), "DEBUG")
+
+ if not response:
+ self.log("There is no SNMP destination present in Cisco Catalyst Center", "INFO")
+ return response
+
+ return response
+
+ except Exception as e:
+ self.status = "failed"
+ self.msg = "Error while getting the details of SNMP destination present in Cisco Catalyst Center: {0}".format(str(e))
+ self.log(self.msg, "ERROR")
+ self.check_return_status()
+
+ def collect_snmp_playbook_params(self, snmp_details):
+ """
+ Collect the SNMP playbook parameters based on the provided SNMP details.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ snmp_details (dict): A dictionary containing the SNMP destination details.
+ Returns:
+ dict: A dictionary containing the SNMP playbook parameters.
+ Description:
+ This function constructs the SNMP playbook parameters based on the provided SNMP destination details.
+ It extracts relevant information such as name, description etc.
+ The constructed playbook parameters are returned for further use in the playbook.
+ """
+
+ playbook_params = {
+ 'name': snmp_details.get('name'),
+ 'description': snmp_details.get('description'),
+ 'ipAddress': snmp_details.get('server_address'),
+ 'port': snmp_details.get('port'),
+ 'snmpVersion': snmp_details.get('snmp_version')
+ }
+ server_address = snmp_details.get('server_address')
+ snmp_version = playbook_params.get("snmpVersion")
+
+ if snmp_version and snmp_version not in ["V2C", "V3"]:
+ self.status = "failed"
+ self.msg = "Invalid SNMP version '{0}' given in the playbook for configuring SNMP destination".format(snmp_version)
+ self.log(self.msg, "ERROR")
+ self.check_return_status()
+
+ if server_address and not self.is_valid_server_address(server_address):
+ self.status = "failed"
+ self.msg = "Invalid server address '{0}' given in the playbook for configuring SNMP destination".format(server_address)
+ self.log(self.msg, "ERROR")
+ self.check_return_status()
+
+ if snmp_version == "V2C":
+ playbook_params['community'] = snmp_details.get('community')
+ elif snmp_version == "V3":
+ playbook_params['userName'] = snmp_details.get('username')
+ playbook_params['snmpMode'] = snmp_details.get('mode')
+ mode = playbook_params['snmpMode']
+ auth_type = snmp_details.get('auth_type')
+
+ if not mode or (mode not in ["AUTH_PRIVACY", "AUTH_NO_PRIVACY", "NO_AUTH_NO_PRIVACY"]):
+ self.status = "failed"
+ self.msg = """Invalid SNMP Mode '{0}' given in the playbook for configuring SNMP destination. Please select one of
+ the mode - AUTH_PRIVACY, AUTH_NO_PRIVACY, NO_AUTH_NO_PRIVACY in the playbook""".format(mode)
+ self.log(self.msg, "ERROR")
+ self.check_return_status()
+
+ if auth_type and auth_type not in ["SHA", "MD5"]:
+ self.status = "failed"
+ self.msg = """Invalid SNMP Authentication Type '{0}' given in the playbook for configuring SNMP destination. Please
+ select either SHA or MD5 as authentication type in the playbook""".format(auth_type)
+ self.log(self.msg, "ERROR")
+ self.check_return_status()
+
+ if playbook_params.get("snmpMode") == "AUTH_PRIVACY":
+ playbook_params['snmpAuthType'] = auth_type
+ playbook_params['authPassword'] = snmp_details.get('auth_password')
+ playbook_params['snmpPrivacyType'] = snmp_details.get('privacy_type', 'AES128')
+ playbook_params['privacyPassword'] = snmp_details.get('privacy_password')
+ elif playbook_params.get("snmpMode") == "AUTH_NO_PRIVACY":
+ playbook_params['snmpAuthType'] = auth_type
+ playbook_params['authPassword'] = snmp_details.get('auth_password')
+
+ return playbook_params
+
+ def check_snmp_required_parameters(self, snmp_params):
+ """
+ Check if all the required parameters for adding an SNMP destination in Cisco Catalyst Center are present.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ snmp_params (dict): A dictionary containing the SNMP destination parameters.
+ Returns:
+ self (object): An instance of the class with the result of the parameter check.
+ - If all required parameters are present, 'status' is set to 'success', and 'msg' contains a success message.
+ - If any required parameter is missing, 'status' is set to 'failed', 'msg' contains an error message,
+ and the missing parameters are logged.
+ Description:
+ This function validates whether all the required parameters for adding an SNMP destination in Cisco Catalyst Center
+ are present in the provided SNMP destination parameters. If any required parameter is missing, it logs an error
+ message with the missing parameters and sets the status to 'failed'.
+ If all required parameters are present, it logs a success message and sets the status to 'success'.
+ """
+
+ missing_params_list = []
+ required_parameter_list = ["name", "description", "ipAddress", "port", "snmpVersion"]
+
+ if snmp_params['snmpVersion'] == "V2C":
+ required_parameter_list.append("community")
+ else:
+ required_parameter_list.extend(["userName", "snmpMode"])
+ if snmp_params['snmpMode'] == "AUTH_PRIVACY":
+ required_parameter_list.extend(["snmpAuthType", "authPassword", "privacyPassword"])
+ elif snmp_params['snmpMode'] == "AUTH_NO_PRIVACY":
+ required_parameter_list.extend(["snmpAuthType", "authPassword"])
+
+ for item in required_parameter_list:
+ if snmp_params[item] is None:
+ missing_params_list.append(item)
+
+ if not missing_params_list:
+ self.status = "success"
+ self.msg = "All the required parameters for adding SNMP Destination in Cisco Catalyst Center is present."
+ self.log(self.msg, "INFO")
+ return self
+
+ self.status = "failed"
+ self.msg = "Required parameter '{0}' is missing for adding SNMP Destination in Cisco Catalyst Center".format(str(missing_params_list))
+ self.log(self.msg, "ERROR")
+
+ return self
+
+ def add_snmp_destination(self, snmp_params):
+ """
+ Add the SNMP destination in Cisco Catalyst Center using the provided SNMP parameters.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ snmp_params (dict): A dictionary containing the SNMP destination parameters.
+
+ Returns:
+ self (object): An instance of the class with the result of the SNMP destination addition.
+ - If the SNMP destination is added successfully, 'status' is set to 'success',
+ 'changed' is set to True, 'msg' contains a success message, and 'response' contains the API response.
+ - If the addition fails, 'status' is set to 'failed', 'msg' contains an error message,
+ and 'response' contains the API error response.
+ Description:
+ This function adds an SNMP destination in Cisco Catalyst Center using the provided SNMP parameters.
+ Upon receiving the API response, it checks the status to determine the success or failure of the operation.
+ If the addition is successful, it sets the appropriate attributes and logs a success message.
+ If the addition fails, it logs the error message from the API response.
+ """
+
+ try:
+ response = self.dnac._exec(
+ family="event_management",
+ function='create_snmp_destination',
+ op_modifies=True,
+ params=snmp_params
+ )
+ self.log("Received API response from 'create_snmp_destination': {0}".format(str(response)), "DEBUG")
+ status = response.get('apiStatus')
+
+ if status == 'SUCCESS':
+ self.status = "success"
+ self.result['changed'] = True
+ self.msg = "SNMP Destination with name '{0}' added successfully in Cisco Catalyst Center".format(snmp_params.get('name'))
+ self.log(self.msg, "INFO")
+ self.result['response'] = self.msg
+ return self
+
+ self.status = "failed"
+ error_messages = response.get('errorMessage')
+ if error_messages:
+ failure_msg = error_messages.get('errors')
+ else:
+ failure_msg = "Unable to Add SNMP destination with name '{0}' in Cisco Catalyst Center".format(snmp_params.get('name'))
+
+ self.msg = failure_msg
+ self.log(self.msg, "ERROR")
+ self.result['response'] = self.msg
+
+ return self
+
+ except Exception as e:
+ self.status = "failed"
+ self.msg = """Error while adding the SNMP destination with the name '{0}' in Cisco Catalyst Center:
+ {1}""".format(snmp_params.get('name'), str(e))
+ self.log(self.msg, "ERROR")
+ self.check_return_status()
+
+ def snmp_dest_needs_update(self, snmp_params, snmp_dest_detail_in_ccc):
+ """
+ Determine if an update is needed for the SNMP destination in Cisco Catalyst Center.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ snmp_params (dict): A dictionary containing the updated SNMP destination parameters.
+ snmp_dest_detail_in_ccc (dict): A dictionary containing the details of existing SNMP destination in Cisco Catalyst Center.
+ Returns:
+ self (object): An instance of the class with the result of the SNMP destination addition.
+ Description:
+ This function compares the provided SNMP destination parameters with the existing SNMP destination details
+ in Cisco Catalyst Center to determine if an update is needed.
+ If any value is different or empty in the updated parameters compared to the existing details,
+ it sets 'update_needed' to True, indicating that an update is needed.
+ Otherwise, if all values match or are empty, it sets 'update_needed' to False.
+ """
+
+ update_needed = False
+ for key, value in snmp_params.items():
+ if str(snmp_dest_detail_in_ccc[key]) == str(value) or value == "":
+ continue
+ else:
+ update_needed = True
+
+ return update_needed
+
+ def update_snmp_destination(self, snmp_params, snmp_dest_detail_in_ccc):
+ """
+ Update an existing SNMP destination in Cisco Catalyst Center with the provided parameters.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ snmp_params (dict): A dictionary containing the updated parameters for the SNMP destination.
+ snmp_dest_detail_in_ccc (dict): A dictionary containing the details of the SNMP destination
+ currently configured in Cisco Catalyst Center.
+ Returns:
+ self (object): An object representing the status of the operation, including whether it was successful or failed,
+ any error messages encountered during the operation, and whether changes were made to the system.
+ Description:
+ This function attempts to update an existing SNMP destination in Cisco Catalyst Center with the provided parameters.
+ It compares the parameters specified in the playbook (`snmp_params`) with the current configuration of the SNMP destination
+ in Cisco Catalyst Center (`snmp_dest_detail_in_ccc`). If any parameter differs between the playbook and the current
+ configuration, the function sends a request to update the SNMP destination with the new parameters.
+ If the operation is successful, it sets the status to "success" and logs a success message.
+ If the operation fails, it sets the status to "failed" and logs an error message along with any error details
+ received from the API response.
+ """
+ try:
+ update_snmp_params = {}
+ update_snmp_params['name'] = snmp_params.get('name') or snmp_dest_detail_in_ccc.get('name')
+ update_snmp_params['description'] = snmp_params.get('description') or snmp_dest_detail_in_ccc.get('description')
+ update_snmp_params['ipAddress'] = snmp_params.get('ipAddress') or snmp_dest_detail_in_ccc.get('ipAddress')
+ update_snmp_params['port'] = snmp_params.get('port') or snmp_dest_detail_in_ccc.get('port')
+ update_snmp_params['snmpVersion'] = snmp_params.get('snmpVersion') or snmp_dest_detail_in_ccc.get('snmpVersion')
+
+ if update_snmp_params.get('port'):
+ try:
+ port = int(snmp_params.get('port'))
+ if port not in range(1, 65536):
+ self.status = "failed"
+ self.msg = "Invalid Notification trap port '{0}' given in playbook. Select port from the number range(1, 65535)".format(port)
+ self.log(self.msg, "ERROR")
+ return self
+ except Exception as e:
+ self.status = "failed"
+ self.msg = """Invalid datatype for the Notification trap port '{0}' given in playbook. Select port with correct datatype from the
+ number range(1, 65535).""".format(port)
+ self.log(self.msg, "ERROR")
+ return self
+
+ if update_snmp_params['snmpVersion'] == "V2C":
+ update_snmp_params['community'] = snmp_params.get('community') or snmp_dest_detail_in_ccc.get('community')
+ else:
+ update_snmp_params['userName'] = snmp_params.get('userName') or snmp_dest_detail_in_ccc.get('userName')
+ update_snmp_params['snmpMode'] = snmp_params.get('snmpMode') or snmp_dest_detail_in_ccc.get('snmpMode')
+ if update_snmp_params['snmpMode'] == "AUTH_PRIVACY":
+ update_snmp_params['snmpAuthType'] = snmp_params.get('snmpAuthType') or snmp_dest_detail_in_ccc.get('snmpAuthType')
+ update_snmp_params['authPassword'] = snmp_params.get('authPassword') or snmp_dest_detail_in_ccc.get('authPassword')
+ update_snmp_params['snmpPrivacyType'] = snmp_params.get('snmpPrivacyType', 'AES128')
+ update_snmp_params['privacyPassword'] = snmp_params.get('privacyPassword') or snmp_dest_detail_in_ccc.get('privacyPassword')
+ elif update_snmp_params['snmpMode'] == "AUTH_NO_PRIVACY":
+ update_snmp_params['snmpAuthType'] = snmp_params.get('snmpAuthType') or snmp_dest_detail_in_ccc.get('snmpAuthType')
+ update_snmp_params['authPassword'] = snmp_params.get('authPassword') or snmp_dest_detail_in_ccc.get('authPassword')
+
+ update_snmp_params['configId'] = snmp_dest_detail_in_ccc.get('configId')
+
+ response = self.dnac._exec(
+ family="event_management",
+ function='update_snmp_destination',
+ op_modifies=True,
+ params=update_snmp_params
+ )
+ self.log("Received API response from 'update_snmp_destination': {0}".format(str(response)), "DEBUG")
+ status = response.get('apiStatus')
+
+ if status == 'SUCCESS':
+ self.status = "success"
+ self.result['changed'] = True
+ self.msg = "SNMP Destination with name '{0}' updated successfully in Cisco Catalyst Center".format(update_snmp_params.get('name'))
+ self.log(self.msg, "INFO")
+ self.result['response'] = self.msg
+ return self
+
+ self.status = "failed"
+ error_messages = response.get('errorMessage')
+ if error_messages:
+ failure_msg = error_messages.get('errors')
+ else:
+ failure_msg = "Unable to update SNMP destination with name '{0}' in Cisco Catalyst Center".format(update_snmp_params.get('name'))
+
+ self.msg = failure_msg
+ self.log(self.msg, "ERROR")
+ self.result['response'] = self.msg
+
+ except Exception as e:
+ self.status = "failed"
+ self.msg = "Error while updating the SNMP destination with name '{0}' in Cisco Catalyst Center: {1}".format(update_snmp_params.get('name'), str(e))
+ self.log(self.msg, "ERROR")
+
+ return self
+
+ def get_webhook_destination_in_ccc(self):
+ """
+ Retrieve details of Rest Webhook destinations present in Cisco Catalyst Center.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Returns:
+ dict: A dictionary containing details of Rest Webhook destinations present in Cisco Catalyst Center,
+ or None if no Rest Webhook destinations are found.
+ Description:
+ This function retrieves the details of Rest Webhook destinations present in Cisco Catalyst Center
+ using the 'event_management' API endpoint with the 'get_webhook_destination' function.
+ If an error occurs during the retrieval process, it logs the error message and raises an Exception.
+ """
+
+ try:
+ response = self.dnac._exec(
+ family="event_management",
+ function='get_webhook_destination'
+ )
+ self.log("Received API response from 'get_webhook_destination': {0}".format(str(response)), "DEBUG")
+ response = response.get('statusMessage')
+
+ if not response:
+ self.log("There is no Rest Webhook destination present in Cisco Catalyst Center", "INFO")
+ return response
+
+ return response
+
+ except Exception as e:
+ self.status = "failed"
+ self.msg = "Error while getting the details of Webhook destination(s) present in Cisco Catalyst Center: {0}".format(str(e))
+ self.log(self.msg, "ERROR")
+ self.check_return_status()
+
+ def collect_webhook_playbook_params(self, webhook_details):
+ """
+ Collect parameters for configuring a Rest Webhook destination from the playbook.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ webhook_details (dict): A dictionary containing the details of the Rest Webhook destination to be configured.
+ Returns:
+ dict: A dictionary containing the collected parameters for configuring the Rest Webhook destination.
+ Description:
+ This function collects parameters for configuring a Rest Webhook destination from the playbook.
+ """
+
+ playbook_params = {
+ 'name': webhook_details.get('name'),
+ 'description': webhook_details.get('description'),
+ 'url': webhook_details.get('url'),
+ 'method': webhook_details.get('method', 'POST').upper(),
+ 'trustCert': webhook_details.get('trust_cert', False),
+ 'isProxyRoute': webhook_details.get('is_proxy_route', True)
+ }
+
+ if webhook_details.get('headers'):
+ custom_header = webhook_details['headers']
+ playbook_params['headers'] = []
+ for header in custom_header:
+ playbook_params['headers'].append(header)
+
+ return playbook_params
+
+ def add_webhook_destination(self, webhook_params):
+ """
+ Add or configure REST webhook destination in Cisco Catalyst Center.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ webhook_params (dict): A dictionary containing the parameters for configuring the REST webhook destination.
+ Returns:
+ self (object): An instance of a class representing the status of the operation, including whether it was
+ successful or failed, any error messages encountered during operation.
+ Description:
+ This function attempts to add a REST webhook destination in Cisco Catalyst Center using the provided parameters.
+ It sends a request to create a webhook destination with the specified parameters.
+ If the operation is successful, it sets the status to "success" and logs a success message.
+ If the operation fails, it sets the status to "failed" and logs an error message along with any error details
+ received from the API response.
+ """
+
+ try:
+ self.log("Requested payload for creating webhook destination - {0}".format(str(webhook_params)), "INFO")
+ response = self.dnac._exec(
+ family="event_management",
+ function='create_webhook_destination',
+ op_modifies=True,
+ params=webhook_params
+ )
+ self.log("Received API response from 'create_webhook_destination': {0}".format(str(response)), "DEBUG")
+ status = response.get('apiStatus')
+
+ if status == 'SUCCESS':
+ self.status = "success"
+ self.result['changed'] = True
+ self.msg = "Webhook Destination with name '{0}' added successfully in Cisco Catalyst Center".format(webhook_params.get('name'))
+ self.log(self.msg, "INFO")
+ self.result['response'] = self.msg
+ return self
+
+ self.status = "failed"
+ error_messages = response.get('errorMessage')
+ if error_messages:
+ failure_msg = error_messages.get('errors')
+ else:
+ failure_msg = "Unable to Add Webhook destination with name '{0}' in Cisco Catalyst Center".format(webhook_params.get('name'))
+
+ self.msg = failure_msg
+ self.log(self.msg, "ERROR")
+ self.result['response'] = self.msg
+
+ except Exception as e:
+ self.status = "failed"
+ self.msg = "Error while adding the Webhook destination with the name '{0}' in Cisco Catalyst Center: {1}".format(webhook_params.get('name'), str(e))
+ self.log(self.msg, "ERROR")
+
+ return self
+
+ def webhook_dest_needs_update(self, webhook_params, webhook_dest_detail_in_ccc):
+ """
+ Check if updates are needed for a webhook destination in Cisco Catalyst Center.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ webhook_params (dict): A dictionary containing the updated parameters for the webhook destination.
+ webhook_dest_detail_in_ccc (dict): A dictionary containing the details of the webhook destination
+ currently configured in Cisco Catalyst Center.
+ Returns:
+ bool: A boolean value indicating whether updates are needed for the webhook destination.
+ Description:
+ This function compares the parameters specified in the playbook (`webhook_params`) with the current configuration
+ of the webhook destination in Cisco Catalyst Center (`webhook_dest_detail_in_ccc`). If any parameter differs between
+ the playbook and the current configuration, it returns True, indicating that updates are needed.
+ If all parameters match or are None, it returns False, indicating that no updates are needed.
+ """
+
+ update_needed = False
+
+ for key, value in webhook_params.items():
+ if isinstance(value, list):
+ update_needed = self.webhook_dest_needs_update(value[0], webhook_dest_detail_in_ccc[key][0])
+ if update_needed:
+ break
+ elif webhook_dest_detail_in_ccc[key] == value or value is None:
+ continue
+ else:
+ update_needed = True
+ break
+
+ return update_needed
+
+ def update_webhook_destination(self, webhook_params, webhook_dest_detail_in_ccc):
+ """
+ Update a webhook destination in Cisco Catalyst Center with the provided details.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ webhook_params (dict): A dictionary containing the details of the webhook destination to be updated.
+ webhook_dest_detail_in_ccc (dict): A dictionary containing the details of the webhook destination in Cisco Catalyst Center.
+ Returns:
+ self (object): An instance of a class representing the status of the operation, including whether it was
+ successful or failed, any error messages encountered during operation.
+ Description:
+ This function updates a webhook destination in Cisco Catalyst Center with the provided details.
+ It constructs the parameters needed for the update based on the provided and existing details.
+ Then, it sends an API request to update the webhook destination with the constructed parameters.
+ If the update is successful, it sets the status to "success" and logs the success message.
+ If the update fails, it sets the status to "failed" and logs the failure message.
+ """
+
+ try:
+ update_webhook_params = {}
+ update_webhook_params['name'] = webhook_params.get('name') or webhook_dest_detail_in_ccc.get('name')
+ update_webhook_params['description'] = webhook_params.get('description') or webhook_dest_detail_in_ccc.get('description')
+ update_webhook_params['url'] = webhook_params.get('url') or webhook_dest_detail_in_ccc.get('url')
+ update_webhook_params['method'] = webhook_params.get('method') or webhook_dest_detail_in_ccc.get('method')
+ update_webhook_params['trustCert'] = webhook_params.get('trustCert') or webhook_dest_detail_in_ccc.get('trustCert')
+ update_webhook_params['isProxyRoute'] = webhook_params.get('isProxyRoute') or webhook_dest_detail_in_ccc.get('isProxyRoute')
+ update_webhook_params['headers'] = webhook_params.get('headers')
+
+ if not update_webhook_params['headers'] and webhook_dest_detail_in_ccc.get('headers'):
+ update_webhook_params['headers'] = webhook_dest_detail_in_ccc.get('headers')[0]
+
+ update_webhook_params['webhookId'] = webhook_dest_detail_in_ccc.get('webhookId')
+ name = update_webhook_params.get('name')
+
+ response = self.dnac._exec(
+ family="event_management",
+ function='update_webhook_destination',
+ op_modifies=True,
+ params=update_webhook_params
+ )
+ self.log("Received API response from 'update_webhook_destination': {0}".format(str(response)), "DEBUG")
+ status = response.get('apiStatus')
+
+ if status == 'SUCCESS':
+ self.status = "success"
+ self.result['changed'] = True
+ self.msg = "Rest Webhook Destination with name '{0}' updated successfully in Cisco Catalyst Center".format(name)
+ self.log(self.msg, "INFO")
+ self.result['response'] = self.msg
+ return self
+
+ self.status = "failed"
+ error_messages = response.get('errorMessage')
+
+ if error_messages:
+ failure_msg = error_messages.get('errors')
+ else:
+ failure_msg = "Unable to update rest webhook destination with name '{0}' in Cisco Catalyst Center".format(name)
+
+ self.msg = failure_msg
+ self.log(self.msg, "ERROR")
+ self.result['response'] = self.msg
+
+ except Exception as e:
+ self.status = "failed"
+ self.msg = "Error while updating the Rest Webhook destination with the name '{0}' in Cisco Catalyst Center: {1}".format(name, str(e))
+ self.log(self.msg, "ERROR")
+
+ return self
+
+ def get_email_destination_in_ccc(self):
+ """
+ Retrieve the details of the Email destination present in Cisco Catalyst Center.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Returns:
+ dict or None: A dictionary containing the details of the Email destination if it exists,
+ otherwise returns None.
+ Description:
+ This function retrieves the details of the Email destination present in Cisco Catalyst Center.
+ If the Email destination exists, it returns a dictionary containing its details.
+ If no Email destination is found, it returns None.
+ """
+
+ try:
+ response = self.dnac._exec(
+ family="event_management",
+ function='get_email_destination'
+ )
+ self.log("Received API response from 'get_email_destination': {0}".format(str(response)), "DEBUG")
+
+ if not response:
+ self.log("There is no Email destination present in Cisco Catalyst Center", "INFO")
+ return response
+
+ return response[0]
+
+ except Exception as e:
+ self.status = "failed"
+ self.msg = "Error while getting the details of Email destination present in Cisco Catalyst Center: {0}".format(str(e))
+ self.log(self.msg, "ERROR")
+ self.check_return_status()
+
+ def collect_email_playbook_params(self, email_details):
+ """
+ Collects the parameters required for configuring Email destinations from the playbook.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ email_details (dict): A dictionary containing the Email destination details from the playbook.
+ Returns:
+ dict: A dictionary containing the collected parameters for configuring Email destinations.
+ Description:
+ This function collects the parameters required for configuring Email destinations from the playbook.
+ It extracts parameters such as 'fromEmail', 'toEmail', 'subject', and SMTP configurations
+ (primary and secondary) from the provided email_details dictionary.
+ """
+
+ playbook_params = {
+ 'fromEmail': email_details.get('from_email'),
+ 'toEmail': email_details.get('to_email'),
+ 'subject': email_details.get('subject')
+ }
+
+ if email_details.get('primary_smtp_config'):
+ primary_smtp_details = email_details.get('primary_smtp_config')
+ primary_smtp_type = primary_smtp_details.get('smtp_type', "DEFAULT")
+ if primary_smtp_type not in ["DEFAULT", "TLS", "SSL"]:
+ self.status = "failed"
+ self.msg = """Invalid Primary SMTP Type '{0}' given in the playbook for configuring primary smtp server.
+ Please select one of the type - DEFAULT, TLS, SSL in the playbook""".format(primary_smtp_type)
+ self.log(self.msg, "ERROR")
+ self.check_return_status()
+
+ playbook_params['primarySMTPConfig'] = {}
+ playbook_params['primarySMTPConfig']['hostName'] = primary_smtp_details.get('server_address')
+ playbook_params['primarySMTPConfig']['smtpType'] = primary_smtp_type
+
+ if primary_smtp_type == 'DEFAULT':
+ playbook_params['primarySMTPConfig']['port'] = "25"
+ else:
+ playbook_params['primarySMTPConfig']['port'] = primary_smtp_details.get('port')
+ playbook_params['primarySMTPConfig']['userName'] = primary_smtp_details.get('username', '')
+ playbook_params['primarySMTPConfig']['password'] = primary_smtp_details.get('password', '')
+
+ if email_details.get('secondary_smtp_config'):
+ secondary_smtp_details = email_details.get('secondary_smtp_config')
+ secondary_smtp_type = secondary_smtp_details.get('smtp_type', "DEFAULT")
+
+ if secondary_smtp_type and secondary_smtp_type not in ["DEFAULT", "TLS", "SSL"]:
+ self.status = "failed"
+ self.msg = """Invalid Secondary SMTP Type '{0}' given in the playbook for configuring secondary smtp server.
+ Please select one of the type - DEFAULT, TLS, SSL in the playbook""".format(secondary_smtp_type)
+ self.log(self.msg, "ERROR")
+ self.check_return_status()
+
+ playbook_params['secondarySMTPConfig'] = {}
+ playbook_params['secondarySMTPConfig']['hostName'] = secondary_smtp_details.get('server_address')
+ playbook_params['secondarySMTPConfig']['smtpType'] = secondary_smtp_type
+
+ if playbook_params['secondarySMTPConfig']['smtpType'] == 'DEFAULT':
+ playbook_params['secondarySMTPConfig']['port'] = "25"
+ else:
+ playbook_params['secondarySMTPConfig']['port'] = secondary_smtp_details.get('port')
+ playbook_params['secondarySMTPConfig']['userName'] = secondary_smtp_details.get('username', '')
+ playbook_params['secondarySMTPConfig']['password'] = secondary_smtp_details.get('password', '')
+
+ return playbook_params
+
+ def add_email_destination(self, email_params):
+ """
+ Adds an Email destination in Cisco Catalyst Center using the provided parameters.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ email_params (dict): A dictionary containing the parameters required for adding an Email destination.
+ Returns:
+ self (object): An instance of a class representing the status of the operation, including whether it was
+ successful or failed, any error messages encountered during operation.
+ Description:
+ This function adds an Email destination in Cisco Catalyst Center using the provided parameters.
+ After the API call, it checks the status of the execution using the 'get_status_api_for_events' API.
+ If the status indicates success, it sets the status of the operation as 'success' and logs an informational message.
+ If the status indicates failure, it sets the status of the operation as 'failed' and logs an error message.
+
+ """
+
+ try:
+ response = self.dnac._exec(
+ family="event_management",
+ function='create_email_destination',
+ op_modifies=True,
+ params=email_params
+ )
+ self.log("Received API response from 'create_email_destination': {0}".format(str(response)), "DEBUG")
+ time.sleep(2)
+ status = response.get('statusUri')
+ status_execution_id = status.split("/")[-1]
+ status_response = self.check_status_api_events(status_execution_id)
+
+ if status_response['apiStatus'] == "SUCCESS":
+ self.status = "success"
+ self.result['changed'] = True
+ self.msg = "Email Destination added successfully in Cisco Catalyst Center"
+ self.log(self.msg, "INFO")
+ self.result['response'] = self.msg
+ return self
+
+ self.status = "failed"
+ error_messages = response.get('errorMessage')
+
+ if error_messages:
+ failure_msg = error_messages.get('errors')
+ else:
+ failure_msg = "Unable to Add Email destination in Cisco Catalyst Center."
+
+ self.msg = failure_msg
+ self.log(self.msg, "ERROR")
+ self.result['response'] = self.msg
+
+ except Exception as e:
+ self.status = "failed"
+ self.msg = "Error while adding the Email destination in Cisco Catalyst Center: {0}".format(str(e))
+ self.log(self.msg, "ERROR")
+
+ return self
+
+ def email_dest_needs_update(self, email_params, email_dest_detail_in_ccc):
+ """
+ Checks if an update is needed for an Email destination based on the provided parameters and details.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ email_params (dict): A dictionary containing the parameters for the Email destination to be updated.
+ email_dest_detail_in_ccc (dict): A dictionary containing the current details of Email destination in Cisco Catalyst Center.
+ Returns:
+ bool: A boolean value indicating whether an update is needed for the Email destination.
+ Description:
+ This function compares the parameters of the Email destination specified in email_params
+ with the current details of the Email destination in Cisco Catalyst Center specified in email_dest_detail_in_ccc.
+ If any parameter value in email_params differs from the corresponding value in email_dest_detail_in_ccc,
+ it indicates that an update is needed and returns True else it returns False indicating that no update is needed.
+ """
+
+ update_needed = False
+
+ for key, value in email_params.items():
+ if not email_dest_detail_in_ccc.get(key):
+ update_needed = True
+ break
+
+ if isinstance(value, dict):
+ # Recursive call should impact the update_needed flag
+ update_needed = self.email_dest_needs_update(value, email_dest_detail_in_ccc[key])
+ if update_needed:
+ break
+ elif email_dest_detail_in_ccc.get(key) != value and value != "":
+ update_needed = True
+ break
+
+ return update_needed
+
+ def update_email_destination(self, email_details, email_dest_detail_in_ccc):
+ """
+ Updates an Email destination based on the provided parameters and current details.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ email_details (dict): A dictionary containing the updated parameters for the Email destination.
+ email_dest_detail_in_ccc (dict): A dictionary containing the current details of the Email
+ destination in Cisco Catalyst Center.
+ Returns:
+ self (object): An instance of a class representing the result of the update operation.
+ Description:
+ This function updates the Email destination in Cisco Catalyst Center based on the provided email_details
+ and the current details of the Email destination specified in email_dest_detail_in_ccc.
+ It constructs the update_email_params dictionary with the updated parameters.
+ If the update is successful, it sets the status to 'success' and logs a success message.
+ If the update fails, it sets the status to 'failed' and logs an error message.
+ Finally, it returns the result object containing the status and response message.
+ """
+
+ try:
+ update_email_params = {}
+ update_email_params['primarySMTPConfig'] = email_details.get('primarySMTPConfig') or email_dest_detail_in_ccc.get('primarySMTPConfig')
+ update_email_params['secondarySMTPConfig'] = email_details.get('secondarySMTPConfig') or email_dest_detail_in_ccc.get('secondarySMTPConfig', 'None')
+ update_email_params['fromEmail'] = email_details.get('fromEmail') or email_dest_detail_in_ccc.get('fromEmail')
+ update_email_params['toEmail'] = email_details.get('toEmail') or email_dest_detail_in_ccc.get('toEmail')
+ update_email_params['subject'] = email_details.get('subject') or email_dest_detail_in_ccc.get('subject')
+ update_email_params['emailConfigId'] = email_dest_detail_in_ccc.get('emailConfigId')
+
+ response = self.dnac._exec(
+ family="event_management",
+ function='update_email_destination',
+ op_modifies=True,
+ params=update_email_params
+ )
+ self.log("Received API response from 'update_email_destination': {0}".format(str(response)), "DEBUG")
+ time.sleep(2)
+ status = response.get('statusUri')
+ status_execution_id = status.split("/")[-1]
+ status_response = self.check_status_api_events(status_execution_id)
+
+ if status_response['apiStatus'] == "SUCCESS":
+ self.status = "success"
+ self.result['changed'] = True
+ self.msg = "Email Destination updated successfully in Cisco Catalyst Center"
+ self.log(self.msg, "INFO")
+ self.result['response'] = self.msg
+ return self
+
+ self.status = "failed"
+ error_messages = status_response.get('errorMessage')
+
+ if error_messages:
+ failure_msg = error_messages.get('errors')
+ else:
+ failure_msg = "Unable to update Email destination in Cisco Catalyst Center."
+
+ self.msg = failure_msg
+ self.log(self.msg, "ERROR")
+ self.result['response'] = self.msg
+
+ except Exception as e:
+ self.status = "failed"
+ self.msg = "Error while updating the Email destination in Cisco Catalyst Center: {0}".format(str(e))
+ self.log(self.msg, "ERROR")
+
+ return self
+
+ def get_itsm_settings_in_ccc(self):
+ """
+ Retrieves the ITSM Integration Settings present in Cisco Catalyst Center.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Returns:
+ dict: A dictionary containing the list of details of ITSM Integration Settings.
+ Description:
+ This function retrieves the ITSM Integration Settings present in Cisco Catalyst Center
+ by executing the 'get_all_itsm_integration_settings' API call.
+ It logs the API response and extracts the data.
+ If there are no ITSM Integration Settings, it logs an INFO message.
+ If an error occurs during the process, it logs an ERROR message and raises an Exception.
+ """
+
+ try:
+ response = self.dnac._exec(
+ family="itsm_integration",
+ function='get_all_itsm_integration_settings'
+ )
+ self.log("Received API response from 'get_all_itsm_integration_settings': {0}".format(str(response)), "DEBUG")
+ response = response.get('data')
+ if not response:
+ self.log("There is no ITSM Integration settings present in Cisco Catalyst Center", "INFO")
+
+ return response
+
+ except Exception as e:
+ self.status = "failed"
+ self.msg = "Error while getting the details of ITSM Integration Settings present in Cisco Catalyst Center: {0}".format(str(e))
+ self.log(self.msg, "ERROR")
+ self.check_return_status()
+
+ def get_itsm_settings_by_id(self, itsm_id):
+ """
+ Retrieves the ITSM Integration Settings with the specified ID from Cisco Catalyst Center.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ itsm_id (str): The ID of the ITSM Integration Setting to retrieve.
+ Returns:
+ dict: A dictionary containing the ITSM Integration Setting information for the given itsm id.
+ Description:
+ This function retrieves the ITSM Integration Setting with the specified ID from Cisco Catalyst Center.
+ It logs the API response and returns the data if it exists.
+ If there is no ITSM Integration Setting with the given ID, it logs an INFO message.
+ If an error occurs during the process, it logs an ERROR message and raises an Exception.
+ """
+
+ try:
+ response = self.dnac._exec(
+ family="itsm_integration",
+ function='get_itsm_integration_setting_by_id',
+ op_modifies=True,
+ params={"instance_id": itsm_id}
+ )
+ self.log("Received API response from 'get_itsm_integration_setting_by_id': {0}".format(str(response)), "DEBUG")
+
+ if not response:
+ self.log("There is no ITSM Integration settings with given ID present in Cisco Catalyst Center", "INFO")
+
+ return response
+
+ except Exception as e:
+ self.status = "failed"
+ self.msg = "Error while getting the details of ITSM Integration Setting by id present in Cisco Catalyst Center: {0}".format(str(e))
+ self.log(self.msg, "ERROR")
+ self.check_return_status()
+
+ def collect_itsm_playbook_params(self, itsm_details):
+ """
+ Constructs the ITSM playbook parameters from the provided ITSM details.
+ Args:
+ self (object): An instance of a class used for ITSM playbook operations.
+ itsm_details (dict): A dictionary containing details about an ITSM integration.
+ Returns:
+ dict: A dictionary structured as required by the ITSM playbook for interaction.
+ Description:
+ This function takes a dictionary containing ITSM integration details, and constructs
+ a set of parameters formatted to meet the requirements of an ITSM playbook. These parameters can then be used to
+ configure ITSM connections through playbook executions.
+ """
+
+ playbook_params = {
+ 'name': itsm_details.get('instance_name'),
+ 'description': itsm_details.get('description'),
+ 'dypName': 'ServiceNowConnection'
+ }
+ playbook_params['data'] = {}
+ connection_details = itsm_details.get('connection_settings')
+
+ if connection_details:
+ playbook_params['data']['ConnectionSettings'] = {}
+ playbook_params['data']['ConnectionSettings']['Url'] = connection_details.get('url')
+ playbook_params['data']['ConnectionSettings']['Auth_UserName'] = connection_details.get('username')
+ playbook_params['data']['ConnectionSettings']['Auth_Password'] = connection_details.get('password')
+
+ return playbook_params
+
+ def check_required_itsm_param(self, itsm_params, invalid_itsm_params):
+ """
+ Recursively checks for required ITSM parameters and collects any that are missing.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ itsm_params (dict): A dictionary of ITSM parameters that need validation.
+ invalid_itsm_params (list): A list to accumulate the keys of missing parameters.
+ Returns:
+ list: A list containing the keys of parameters that are found to be missing or None.
+ Description:
+ This method iteratively and recursively examines a dictionary of ITSM parameters
+ to ensure that all necessary parameters except 'description' are present and not None.
+ If a parameter is found to be missing or explicitly set to None, its key is added to the
+ 'invalid_itsm_params' list. This function is particularly useful for validating nested
+ parameter structures commonly found in configurations for ITSM systems.
+ """
+
+ for key, value in itsm_params.items():
+ if isinstance(value, dict):
+ self.check_required_itsm_param(value, invalid_itsm_params)
+ elif key == "description":
+ continue
+ elif itsm_params.get(key) is None:
+ invalid_itsm_params.append(key)
+
+ return invalid_itsm_params
+
+ def create_itsm_integration_setting(self, itsm_params):
+ """
+ Creates a new ITSM integration setting in the Cisco Catalyst Center using provided parameters.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ itsm_params (dict): A dictionary containing the parameters necessary to create an ITSM integration setting.
+ Returns:
+ self (object): An instance of a class representing the status of the operation, including whether it was
+ successful or failed, any error messages encountered during operation.
+ Description:
+ This method sends a request to the Cisco Catalyst Center to create an ITSM integration setting based on the
+ parameters provided in 'itsm_params'.
+ It then makes an API call and logs the response. If the creation is successful, indicated by the presence
+ of a 'createdDate' in the response, it logs a success message, sets the internal state to 'success', and
+ marks the operation as having changed the system state. If the creation fails, it attempts to log any errors
+ returned by the API or logs a generic failure message if no specific error is provided.
+ """
+
+ try:
+ instance_name = itsm_params.get('name')
+ response = self.dnac._exec(
+ family="itsm_integration",
+ function='create_itsm_integration_setting',
+ op_modifies=True,
+ params=itsm_params
+ )
+ self.log("Received API response from 'create_itsm_integration_setting': {0}".format(str(response)), "DEBUG")
+ created_date = response.get('createdDate')
+
+ if created_date:
+ self.status = "success"
+ self.result['changed'] = True
+ self.msg = "ITSM Integration Settings with name '{0}' has been created successfully in Cisco Catalyst Center".format(instance_name)
+ self.log(self.msg, "INFO")
+ self.result['response'] = self.msg
+ return self
+
+ self.status = "failed"
+ failure_msg = response.get('errors')
+
+ if not failure_msg:
+ failure_msg = "Unable to create ITSM Integration Settings with name '{0}' in Cisco Catalyst Center".format(instance_name)
+
+ self.msg = failure_msg
+ self.log(self.msg, "ERROR")
+ self.result['response'] = self.msg
+
+ except Exception as e:
+ self.status = "failed"
+ self.msg = "Error while creating the ITSM Integration Settings with name '{0}' in Cisco Catalyst Center: {1}".format(instance_name, str(e))
+ self.log(self.msg, "ERROR")
+
+ return self
+
+ def itsm_needs_update(self, itsm_params, itsm_in_ccc):
+ """
+ Checks if the ITSM settings in Cisco Catalyst Center need to be updated based on provided parameters.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ itsm_params (dict): A dictionary containing the new ITSM settings parameters.
+ itsm_in_ccc (dict): A dictionary containing the existing ITSM settings in the Cisco Catalyst Center.
+ Returns:
+ bool: True if an update is required based on the differences between the provided parameters and the existing settings, False otherwise.
+ Description:
+ This method compares provided ITSM integration parameters against the current settings stored in the Cisco Catalyst Center
+ to determine if an update is necessary.
+ If any of the checked fields or connection settings differ between the provided parameters and the existing settings, the method
+ will return True indicating an update is required. Otherwise, it returns False.
+ """
+
+ itsm_require_update = False
+ required_params = ["name", "description"]
+ for key in required_params:
+ if key == "description" and itsm_params[key]:
+ if itsm_params[key] != itsm_in_ccc[key]:
+ itsm_require_update = True
+ return itsm_require_update
+ elif itsm_params[key] != itsm_in_ccc[key]:
+ itsm_require_update = True
+ return itsm_require_update
+
+ if itsm_params.get('data') is None or itsm_params.get('data').get('ConnectionSettings') is None:
+ self.log("ITSM Connection settings parameters are not given in the input playbook so no update required.", "INFO")
+ return itsm_require_update
+
+ url = itsm_params.get('data').get('ConnectionSettings').get('Url')
+ username = itsm_params.get('data').get('ConnectionSettings').get('Auth_UserName')
+
+ if url and url != itsm_in_ccc.get('data').get('ConnectionSettings').get('Url'):
+ itsm_require_update = True
+ if username and username != itsm_in_ccc.get('data').get('ConnectionSettings').get('Auth_UserName'):
+ itsm_require_update = True
+
+ return itsm_require_update
+
+ def update_itsm_integration_setting(self, itsm_params, itsm_in_ccc):
+ """
+ Updates the ITSM integration settings in the Cisco Catalyst Center based on the provided parameters.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ itsm_params (dict): A dictionary containing the new ITSM settings parameters.
+ itsm_in_ccc (dict): A dictionary containing the existing ITSM settings in the Cisco Catalyst Center.
+ Returns:
+ self (object): The instance itself with updated status and message properties reflecting the result of the operation.
+ Description:
+ This method updates existing ITSM integration settings in the Cisco Catalyst Center using the provided new parameters.
+ The method performs several checks:
+ - It verifies that the 'password' is provided when updating the connection settings. If not, it sets the status
+ to 'failed' and logs an informational message.
+ - It validates that the provided URL starts with 'https://'. If the URL is invalid, it sets the status to 'failed' and
+ logs an informational message.
+ Upon successful update, the method logs the success and returns the instance with a 'success' status. If the update
+ fails for any reason (such as an invalid URL or API errors), it logs the failure and returns the instance with a 'failed'
+ status.
+ """
+
+ try:
+ update_itsm_params = {}
+ update_itsm_params['name'] = itsm_params.get('name') or itsm_in_ccc.get('name')
+ update_itsm_params['description'] = itsm_params.get('description') or itsm_in_ccc.get('description')
+ update_itsm_params['dypName'] = 'ServiceNowConnection'
+
+ update_itsm_params['data'] = {}
+ update_itsm_params['data']['ConnectionSettings'] = {}
+ if itsm_params.get('data') is None or itsm_params.get('data').get('ConnectionSettings') is None:
+ update_itsm_params['data']['ConnectionSettings']['Url'] = itsm_in_ccc.get('data').get('ConnectionSettings').get('Url')
+ update_itsm_params['data']['ConnectionSettings']['Auth_UserName'] = itsm_in_ccc.get('data').get('ConnectionSettings').get('Auth_UserName')
+ else:
+ connection_params = itsm_params.get('data').get('ConnectionSettings')
+ update_itsm_params['data']['ConnectionSettings']['Url'] = connection_params.get('Url')
+ update_itsm_params['data']['ConnectionSettings']['Auth_UserName'] = connection_params.get('Auth_UserName')
+
+ if not connection_params.get('Auth_Password'):
+ self.status = "failed"
+ self.msg = """Unable to update ITSM setting '{0}' as 'Auth Password' is the required parameter for updating
+ ITSM Intergartion setting.""".format(update_itsm_params.get('name'))
+ self.log(self.msg, "INFO")
+ return self
+
+ update_itsm_params['data']['ConnectionSettings']['Auth_Password'] = connection_params.get('Auth_Password')
+
+ # Check whether the given url is valid or not
+ url = update_itsm_params.get('data').get('ConnectionSettings').get('Url')
+ regex_pattern = r'https://\S+'
+
+ if not re.match(regex_pattern, url):
+ self.status = "failed"
+ self.msg = "Given url '{0}' is invalid url for ITSM Intergartion setting. It must start with 'https://'".format(url)
+ self.log(self.msg, "ERROR")
+ return self
+
+ itsm_param_dict = {
+ 'payload': update_itsm_params,
+ 'instance_id': itsm_in_ccc.get('id')
+ }
+
+ response = self.dnac._exec(
+ family="itsm_integration",
+ function='update_itsm_integration_setting',
+ op_modifies=True,
+ params=itsm_param_dict,
+ )
+ self.log("Received API response from 'update_itsm_integration_setting': {0}".format(str(response)), "DEBUG")
+
+ updated_date = response.get('updatedDate')
+
+ if updated_date:
+ self.status = "success"
+ self.result['changed'] = True
+ self.msg = """ITSM Integration Settings with name '{0}' has been updated successfully in Cisco Catalyst
+ Center.""".format(update_itsm_params.get('name'))
+ self.log(self.msg, "INFO")
+ self.result['response'] = self.msg
+ return self
+
+ self.status = "failed"
+ failure_msg = response.get('errors')
+
+ if not failure_msg:
+ failure_msg = "Unable to update ITSM Integration Settings with name '{0}' in Cisco Catalyst Center".format(update_itsm_params.get('name'))
+
+ self.msg = failure_msg
+ self.log(self.msg, "ERROR")
+ self.result['response'] = self.msg
+
+ except Exception as e:
+ self.status = "failed"
+ self.msg = """Error while updating the ITSM Integration Settings with name '{0}' in Cisco Catalyst Center due to:
+ {1}""".format(update_itsm_params.get('name'), str(e))
+ self.log(self.msg, "ERROR")
+
+ return self
+
+ def delete_itsm_integration_setting(self, itsm_name, itsm_id):
+ """
+ Deletes a specified ITSM integration setting from the Cisco Catalyst Center.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ itsm_name (str): The name of the ITSM integration setting to be deleted.
+ itsm_id (str): The unique identifier of the ITSM integration setting to be deleted.
+ Returns:
+ self (object): The instance itself with updated status and message properties reflecting the result of the operation.
+ Description:
+ This method attempts to delete an ITSM integration setting based on the provided name and ID.
+ If the deletion is not successful, the method logs an error message and sets the 'status' attribute to 'failed'.
+ This could occur if the ITSM integration setting does not exist or due to a failure in the API call.
+ Exceptions caught during the API call are handled by logging an error message detailing the issue and setting the 'status'
+ attribute to 'failed'.
+ """
+
+ try:
+ response = self.dnac._exec(
+ family="itsm_integration",
+ function='delete_itsm_integration_setting',
+ op_modifies=True,
+ params={"instance_id": itsm_id}
+ )
+ self.log("Received API response from 'delete_itsm_integration_setting': {0}".format(str(response)), "DEBUG")
+
+ if "successfully" in response:
+ self.msg = "ITSM Integration settings instance with name '{0}' deleted successfully from Cisco Catalyst Center".format(itsm_name)
+ self.status = "success"
+ self.log(self.msg, "INFO")
+ self.result['changed'] = True
+ self.result['response'] = self.msg
+ return self
+
+ self.status = "failed"
+ self.msg = "Cannot delete ITSM Integration settings instance with name '{0}' from Cisco Catalyst Center".format(itsm_name)
+ self.log(self.msg, "ERROR")
+
+ except Exception as e:
+ self.status = "failed"
+ self.msg = "Error while deleting ITSM Integration Setting with name '{0}' from Cisco Catalyst Center due to: {1}".format(itsm_name, str(e))
+ self.log(self.msg, "ERROR")
+
+ return self
+
+ def get_syslog_notification_details(self):
+ """
+ Retrieves the details of a Syslog Event Notification subscription from the Cisco Catalyst Center.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Returns:
+ dict or None: A dictionary containing the details of the Syslog Event Notification subscription if found.
+ Returns None if no subscription is found or if an error occurs during the API call.
+ Description:
+ This function calls an API to fetch the details of a specified Syslog Event Notification subscription. If the
+ subscription exists, it returns the response containing the subscription details. If no subscription is found
+ or an error occurs, it logs the appropriate message and handles the exception accordingly.
+ """
+
+ try:
+ response = self.dnac._exec(
+ family="event_management",
+ function='get_syslog_event_subscriptions',
+ )
+ self.log("Received API response from 'get_syslog_event_subscriptions': {0}".format(str(response)), "DEBUG")
+
+ if not response:
+ self.log("There is no Syslog Event Notification present in Cisco Catalyst Center.", "INFO")
+ return response
+
+ return response
+
+ except Exception as e:
+ self.status = "failed"
+ self.msg = (
+ "An error occurred while retrieving Syslog Event subscription Notification details "
+ "from Cisco Catalyst Center: {0}".format(repr(e))
+ )
+ self.log(self.msg, "ERROR")
+ self.check_return_status()
+
+ def get_syslog_subscription_detail(self, destination):
+ """
+ Retrieves the details of a specific Syslog destination subscription from the Cisco Catalyst Center.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ destination (str): The name of the Syslog destination for which details needs to be fetched.
+ Returns:
+ dict or list: A dictionary containing the details of the Syslog destination subscription if found.
+ Returns an empty list if no destination is found or if an error occurs during the API call.
+ Description:
+ This function calls an API to fetch the details of all Syslog destination from the Cisco Catalyst Center.
+ It then searches for a subscription that matches the given `destination`. If a match is found, it returns
+ details of the matching subscription. If no match is found or if an error occurs, it logs the appropriate message
+ and handles the exception accordingly.
+ """
+
+ try:
+ response = self.dnac._exec(
+ family="event_management",
+ function='get_syslog_subscription_details',
+ )
+ self.log("Received API response from 'get_syslog_subscription_details': {0}".format(str(response)), "DEBUG")
+ sys_destination_details = []
+
+ if not response:
+ self.log("There is no Syslog destination present in Cisco Catalyst Center.", "INFO")
+ return sys_destination_details
+
+ for dest in response:
+ if dest["name"] == destination:
+ return dest
+ self.log("Syslog destination with the name '{0}' not found in Cisco Catalyst Center.".format(destination), "INFO")
+
+ return sys_destination_details
+
+ except Exception as e:
+ self.status = "failed"
+ self.msg = (
+ "Error while getting the details of the Syslog Subscription with the given name '{0}'"
+ " from Cisco Catalyst Center: {1}".format(destination, repr(e))
+ )
+ self.log(self.msg, "ERROR")
+ self.check_return_status()
+
+ def get_event_ids(self, events):
+ """
+ Retrieves the event IDs for a given list of event names from the Cisco Catalyst Center.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ events (list of str): A list of event names for which the event IDs need to be retrieved.
+ Returns:
+ list of str: A list of event IDs corresponding to the provided event names. If an event name is not
+ found, it is skipped.
+ Description:
+ This function iterates over a list of event names and calls an API to fetch the details of each event from
+ the Cisco Catalyst Center. If the event is found, its event ID is extracted and added to the list of event IDs.
+ The function logs messages for successfulAPI responses, missing events, and any errors encountered during the
+ process. The final list of event IDs is returned.
+ """
+
+ event_ids = []
+
+ for event_name in events:
+ try:
+ response = self.dnac._exec(
+ family="event_management",
+ function='get_eventartifacts',
+ op_modifies=True,
+ params={"search": event_name}
+ )
+ self.log("Received API response from 'get_eventartifacts': {0}".format(str(response)), "DEBUG")
+
+ if not response:
+ self.log("There is no Event with name '{0}' present in Cisco Catalyst Center.".format(event_name), "INFO")
+ continue
+
+ response = response[0]
+ event_payload = response.get('eventPayload')
+ if event_payload:
+ event_id = event_payload.get('eventId')
+ event_ids.append(event_id)
+
+ except Exception as e:
+ self.msg = """Error while getting the details of Event with given name '{0}' present in
+ Cisco Catalyst Center: {1}""".format(event_name, str(e))
+ self.log(self.msg, "ERROR")
+
+ return event_ids
+
+ def get_site_ids(self, sites):
+ """
+ Retrieves the site IDs for a given list of site names from the Cisco Catalyst Center.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ sites (list of str): A list of site names for which the site IDs need to be retrieved.
+ Returns:
+ list of str: A list of site IDs corresponding to the provided site names. If a site name is not
+ found, it is skipped and return empty list.
+ Description:
+ This function iterates over a list of site names and calls an API to fetch the details of each site
+ from the Cisco Catalyst Center. If the site is found, its site ID is extracted and added to the list
+ of site IDs. The function logs messages for successful API responses, missing sites, and any errors
+ encountered during the process. The final list of site IDs is returned.
+ """
+
+ site_ids = []
+ for site in sites:
+ try:
+ response = self.dnac._exec(
+ family="sites",
+ function='get_site',
+ op_modifies=True,
+ params={"name": site},
+ )
+ self.log("Received API response from 'get_site': {0}".format(str(response)), "DEBUG")
+ response = response.get('response')
+ if not response:
+ self.log("No site with the name '{0}' found in Cisco Catalyst Center.".format(site), "INFO")
+ continue
+ site_id = response[0].get("id")
+ if not site_id:
+ self.log("Site '{0}' found, but no ID available in the response.".format(site), "WARNING")
+ continue
+ site_ids.append(site_id)
+
+ except Exception as e:
+ self.msg = """Error while getting the details of Site with given name '{0}' present in
+ Cisco Catalyst Center: {1}""".format(site, str(e))
+ self.log(self.msg, "ERROR")
+
+ return site_ids
+
+ def collect_syslog_notification_playbook_params(self, syslog_notification_details):
+ """
+ Collects and prepares parameters for creating or updating a Syslog Event Notification.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ syslog_notification_details (dict): A dictionary containing the details required for creating or updating
+ the Syslog Event Notification.
+ Returns:
+ list of dict: A list containing a dictionary with the parameters for creating the Syslog Event Notification.
+ Description:
+ This function collects and structures the necessary parameters for creating or updating a Syslog Event Notification.
+ It fetches additional details such as instance IDs and connector types from the Cisco Catalyst Center
+ and prepares the subscription endpoints and filters. The function handles missing or incorrect details by logging
+ appropriate messages and adjusting the status and returns a list containing required parameter.
+ """
+
+ syslog_notification_params = []
+ name = syslog_notification_details.get('name')
+ playbook_params = {
+ 'name': name,
+ 'description': syslog_notification_details.get('description'),
+ 'version': syslog_notification_details.get('version'),
+ 'subscriptionEndpoints': [],
+ 'filter': {}
+
+ }
+ # Collect the Instance ID of the syslog destination
+ self.log("Collecting parameters for Syslog Event Notification named '{0}'.".format(name), "INFO")
+ destination = syslog_notification_details.get('destination')
+
+ if destination:
+ subscription_details = self.get_syslog_subscription_detail(destination)
+
+ if not subscription_details:
+ self.status = "failed"
+ self.msg = """Unable to create/update the syslog event notification '{0}' as syslog desination '{1}' is not configured or
+ present in Cisco Catalyst Center""".format(name, destination)
+ self.log(self.msg, "ERROR")
+ self.check_return_status()
+
+ instance_id = subscription_details.get('instanceId')
+ connector_type = subscription_details.get('connectorType')
+ temp_subscript_endpoint = {
+ "instanceId": instance_id,
+ "subscriptionDetails": {
+ "connectorType": connector_type
+ }
+ }
+ playbook_params["subscriptionEndpoints"].append(temp_subscript_endpoint)
+
+ events = syslog_notification_details.get('events')
+ if events:
+ events_ids = self.get_event_ids(events)
+ if not events_ids:
+ self.status = "failed"
+ self.msg = (
+ "Unable to create/update Syslog event notification as the given event names '{0}' "
+ "are incorrect or could not be found."
+ ).format(str(events))
+ self.log(self.msg, "ERROR")
+ self.check_return_status()
+
+ playbook_params["filter"]["eventIds"] = events_ids
+
+ domain = syslog_notification_details.get("domain")
+ subdomains = syslog_notification_details.get("subdomains")
+ if domain and subdomains:
+ playbook_params["filter"]["domainsSubdomains"] = []
+ domain_dict = {
+ "domain": domain,
+ "subDomains": subdomains
+ }
+ playbook_params["filter"]["domainsSubdomains"].append(domain_dict)
+
+ # Add other filter parameters if present
+ filter_keys = ["event_types", "event_categories", "event_severities", "event_sources"]
+ filter_mapping = {
+ "event_types": "types",
+ "event_categories": "categories",
+ "event_severities": "severities",
+ "event_sources": "sources"
+ }
+
+ for key in filter_keys:
+ value = syslog_notification_details.get(key)
+ if value:
+ playbook_params["filter"][filter_mapping[key]] = value
+
+ sites = syslog_notification_details.get("sites")
+ if sites:
+ site_ids = self.get_site_ids(sites)
+ if not site_ids:
+ self.msg = "Unable to find the Site ID's for the given site(s) - '{0}' in the playbook's input.".format(sites)
+ self.log(self.msg, "INFO")
+
+ playbook_params["filter"]["siteIds"] = site_ids
+ syslog_notification_params.append(playbook_params)
+ self.log("Syslog notification playbook parameters collected successfully for '{0}': {1}".format(name, playbook_params), "INFO")
+
+ return syslog_notification_params
+
+ def mandatory_syslog_notification_parameter_check(self, syslog_notification_params):
+ """
+ Checks for the presence of mandatory parameters required for adding a Syslog Event Notification.
+ Args:
+ syslog_notification_params (list of dict): A list containing a single dictionary with the parameters
+ for the Syslog Event Notification.
+ Returns:
+ self: The instance of the class with updated status and message if any required parameter is missing.
+ Description:
+ This function verifies the presence of required parameters for creating or updating a Syslog Event Notification.
+ If any required parameter is absent, it logs an error message, updates the status to "failed",
+ and sets the message attribute. It then returns the instance of the class with the updated status and message.
+ """
+
+ required_params_absent = []
+ syslog_notification_params = syslog_notification_params[0]
+ notification_name = syslog_notification_params.get("name")
+ description = syslog_notification_params.get("description")
+
+ if not notification_name:
+ required_params_absent.append("name")
+
+ if not description:
+ required_params_absent.append("description")
+
+ subs_endpoints = syslog_notification_params.get('subscriptionEndpoints')
+
+ if not subs_endpoints:
+ required_params_absent.append("destination")
+
+ filters = syslog_notification_params.get("filter")
+
+ if not filters.get("eventIds"):
+ required_params_absent.append("events")
+
+ if required_params_absent:
+ self.status = "failed"
+ self.msg = """Missing required parameter '{0}' for adding Syslog Event Notification with given
+ name {1}""".format(str(required_params_absent), notification_name)
+ self.log(self.msg, "ERROR")
+ self.check_return_status()
+
+ return self
+
+ def create_syslog_notification(self, syslog_notification_params):
+ """
+ Creates a Syslog Event Notification subscription in Cisco Catalyst Center based on the provided parameters.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ syslog_notification_params (list): A list containing a dictionary having the required parameter for creating
+ syslog event subscription notification.
+ Returns:
+ self (object): An instance of a class representing the status of the operation, including whether it was
+ successful or failed, any error messages encountered during operation.
+ Description:
+ This function makes an API call to create a Syslog Event Notification subscription in Cisco Catalyst Center.
+ It takes the provided parameters as input and constructs the payload for the API call. After making the
+ API call, it checks the status of the execution and updates the status and result attributes accordingly.
+ If the creation is successful, it sets the status to "success" and updates the result attribute with the
+ success message. If an error occurs during the process, it sets the status to "failed" and logs the
+ appropriate error message.
+ """
+
+ try:
+ notification_name = syslog_notification_params[0].get('name')
+ self.log("Requested payload for create_syslog_event_subscription - {0}".format(str(syslog_notification_params)), "INFO")
+ response = self.dnac._exec(
+ family="event_management",
+ function='create_syslog_event_subscription',
+ op_modifies=True,
+ params={'payload': syslog_notification_params}
+ )
+ time.sleep(1)
+ self.log("Received API response from 'create_syslog_event_subscription': {0}".format(str(response)), "DEBUG")
+ status = response.get('statusUri')
+ status_execution_id = status.split("/")[-1]
+ status_response = self.check_status_api_events(status_execution_id)
+
+ if status_response['apiStatus'] == "SUCCESS":
+ self.status = "success"
+ self.result['changed'] = True
+ self.msg = "Syslog Event Notification '{0}' created successfully in Cisco Catalyst Center".format(notification_name)
+ self.log(self.msg, "INFO")
+ self.result['response'] = self.msg
+ return self
+
+ self.status = "failed"
+ error_messages = status_response.get('errorMessage')
+
+ if error_messages:
+ failure_msg = error_messages.get('errors')
+ else:
+ failure_msg = "Unable to add Syslog Event Notification '{0}' in Cisco Catalyst Center.".format(notification_name)
+
+ self.log(failure_msg, "ERROR")
+ self.result['response'] = failure_msg
+
+ except Exception as e:
+ self.status = "failed"
+ self.msg = """Error while adding the Syslog Event Subscription Notification with name '{0}' in Cisco Catalyst Center:
+ {1}""".format(notification_name, str(e))
+ self.log(self.msg, "ERROR")
+
+ return self
+
+ def is_element_missing(self, playbook_list, ccc_list):
+ """
+ Checks if any element in the playbook list is missing in the CCC list.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ playbook_list (list): List of elements from the playbook.
+ ccc_list (list): List of elements from the CCC.
+ Returns:
+ bool: True if any element from the playbook list is missing in the CCC list, False otherwise.
+ Description:
+ This function iterates through each element in the playbook list and checks if it is present in the CCC list.
+ If any element from the playbook list is not found in the CCC list, it returns True indicating that an element
+ is missing. If all elements are found, it returns False indicating that no element is missing.
+ """
+
+ for item in playbook_list:
+ if item not in ccc_list:
+ return True
+
+ return False
+
+ def compare_notification_filters(self, filters_in_playbook, filters_in_ccc):
+ """
+ Compares notification filters between the playbook and Cisco Catalyst Center.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ filters_in_playbook (dict): Dictionary containing notification filters from the playbook.
+ filters_in_ccc (dict): Dictionary containing notification filters from Cisco Catalyst Center.
+ Returns:
+ bool: True if notification filters need update, False otherwise.
+ Description:
+ This function compares notification filters between the playbook and Cisco Catalyst Center.
+ It iterates through each key-value pair in the playbook filters and checks if they match with the
+ corresponding key-value pair in the CCC filters.
+ If any mismatch is found, it logs a message indicating the need for an update and returns True.
+ If all filters match, it returns False indicating that no update is required.
+ """
+
+ for key, value in filters_in_playbook.items():
+ if key == "domainsSubdomains":
+ domain_subdomain_input = filters_in_playbook.get("domainsSubdomains")
+ domain_subdomain_in_ccc = filters_in_ccc.get("domainsSubdomains")
+
+ if domain_subdomain_input: # Ensure that there is input for 'domainsSubdomains'
+ domain_input = domain_subdomain_input[0].get("domain")
+ subdomains_input = domain_subdomain_input[0].get("subDomains")
+ else:
+ domain_input = subdomains_input = None
+
+ if not domain_subdomain_in_ccc:
+ self.log("Since no domain or subdomains are present in Catalyst Center, the notification needs an update.", "INFO")
+ return True
+ domain_in_ccc = domain_subdomain_in_ccc.get("domain")
+ subdomain_in_ccc = domain_subdomain_in_ccc.get("subDomains")
+
+ if domain_input and domain_input != domain_in_ccc:
+ self.log("Domain '{0}' given in the playbook does not match with domain in Cisco Catalyst Center".format(domain_input), "INFO")
+ return True
+
+ if subdomains_input:
+ list_needs_update = self.is_element_missing(subdomains_input, subdomain_in_ccc)
+ if list_needs_update:
+ self.log(("Given subdomain_names '{0}' in the playbook do not match with the values present in "
+ "Cisco Catalyst Center, so the notification needs an update.").format(subdomains_input), "INFO")
+ return True
+ elif isinstance(value, list):
+ list_needs_update = self.is_element_missing(value, filters_in_ccc[key])
+ if list_needs_update:
+ self.log(("Parameter '{0}' given in the playbook does not match with the value present in Cisco Catalyst "
+ "Center so notification needs update.").format(key), "INFO")
+ return True
+
+ return False
+
+ def syslog_notification_needs_update(self, syslog_notification_params, syslog_notification_in_ccc):
+ """
+ Checks if a syslog notification needs update based on a comparison between playbook and CCC configurations.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ syslog_notification_params (dict): Dictionary containing syslog notification parameters from the playbook.
+ syslog_notification_in_ccc (dict): Dictionary containing syslog notification parameters from Cisco Catalyst Center.
+ Returns:
+ bool: True if the syslog notification needs update, False otherwise.
+ Description:
+ This function checks if a syslog notification needs update by comparing its parameters
+ with the corresponding parameters in Cisco Catalyst Center.
+ It compares the description, syslog destination, and filters between the playbook and CCC configurations.
+ If any parameter mismatch is found, it logs a message indicating the need for an update and returns True.
+ If all parameters match, it returns False indicating that no update is required.
+ """
+
+ syslog_notification_params = syslog_notification_params[0]
+ name = syslog_notification_params.get("name")
+ description_in_playbook = syslog_notification_params.get("description")
+ description_in_ccc = syslog_notification_in_ccc.get("description")
+ subs_endpoints = syslog_notification_params.get("subscriptionEndpoints")
+ ccc_endpoints = syslog_notification_in_ccc.get("subscriptionEndpoints")[0]
+
+ if description_in_playbook and description_in_playbook != description_in_ccc:
+ self.log("Parameter 'description' does not match with the value of description present in Cisco Catalyst Center "
+ "so given Syslog Event Notification '{0}' needs an update".format(name), "INFO")
+ return True
+
+ if subs_endpoints:
+ instance_id = subs_endpoints[0].get("instanceId")
+ ccc_instance_id = ccc_endpoints.get("instanceId")
+ if instance_id != ccc_instance_id:
+ self.log("Given Syslog destination in the playbook is different from Syslog destination present in Cisco Catalyst Center "
+ "so given Syslog Event Notification '{0}' needs an update".format(name), "INFO")
+ return True
+
+ filters_in_playbook = syslog_notification_params.get("filter")
+ filters_in_ccc = syslog_notification_in_ccc.get("filter")
+
+ if self.compare_notification_filters(filters_in_playbook, filters_in_ccc):
+ self.log("Notification filters differ between the playbook and Cisco Catalyst Center. Syslog Event Subscription Notification "
+ "'{0}' needs an update.".format(name), "INFO")
+ return True
+
+ return False
+
+ def collect_notification_filter_params(self, playbook_params, filter, ccc_filter):
+ """
+ Collects notification filter parameters from playbook and CCC configurations.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ playbook_params (dict): Dictionary containing parameters from the playbook.
+ filter (dict): Dictionary containing filter parameters from the playbook.
+ ccc_filter (dict): Dictionary containing filter parameters from Cisco Catalyst Center.
+ Returns:
+ dict: Dictionary containing updated playbook parameters with notification filter parameters.
+ Description:
+ This function collects notification filter parameters from both the playbook and CCC configurations.
+ It checks if filter parameters are provided in the playbook. If provided, it updates the playbook parameters
+ with the filter parameters from the playbook. If not provided, it updates the playbook parameters
+ with the filter parameters from Cisco Catalyst Center.
+ """
+
+ filter_keys = ["eventIds", "domainsSubdomains", "types", "categories", "severities", "sources", "siteIds"]
+
+ if filter:
+ for key in filter_keys:
+ playbook_params["filter"][key] = filter.get(key) or ccc_filter.get(key)
+ else:
+ # Need to take all required/optional parameter from Cisco Catalyst Center
+ for key in filter_keys:
+ playbook_params["filter"][key] = ccc_filter.get(key)
+
+ return playbook_params
+
+ def update_syslog_notification(self, syslog_notification_params, syslog_notification_in_ccc):
+ """
+ Updates a Syslog Event Notification subscription in Cisco Catalyst Center.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ syslog_notification_params (dict): Parameters for updating the Syslog Event Notification.
+ syslog_notification_in_ccc (dict): Current configuration of the Syslog Event Notification in CCC.
+
+ Returns:
+ self (object): An instance of a class representing the status of the operation, including whether it was
+ successful or failed, any error messages encountered during operation.
+ Description:
+ This function updates a Syslog Event Notification subscription in Cisco Catalyst Center based on the provided parameters.
+ It constructs the payload for the update operation and sends it as an API request to the Cisco Catalyst Center.
+ After the update operation, it checks the status of the API request and logs appropriate messages based on the response.
+ """
+
+ syslog_notification_params = syslog_notification_params[0]
+ sys_notification_update_params = []
+ name = syslog_notification_params.get("name")
+
+ playbook_params = {
+ "subscriptionId": syslog_notification_in_ccc.get("subscriptionId"),
+ "name": name,
+ "description": syslog_notification_params.get("description") or syslog_notification_in_ccc.get("description"),
+ "version": syslog_notification_params.get("version") or syslog_notification_in_ccc.get("version"),
+ "filter": {}
+ }
+ subs_endpoints = syslog_notification_params.get("subscriptionEndpoints")
+
+ if subs_endpoints:
+ playbook_params["subscriptionEndpoints"] = subs_endpoints
+ else:
+ playbook_params["subscriptionEndpoints"] = []
+ instance_id = syslog_notification_in_ccc.get("subscriptionEndpoints")[0].get("instanceId")
+ playbook_params["subscriptionEndpoints"] = [{
+ "instanceId": instance_id,
+ "subscriptionDetails": {
+ "connectorType": "SYSLOG"
+ }
+ }]
+
+ filter = syslog_notification_params.get("filter")
+ ccc_filter = syslog_notification_in_ccc.get("filter")
+ notification_params = self.collect_notification_filter_params(playbook_params, filter, ccc_filter)
+ sys_notification_update_params.append(notification_params)
+
+ try:
+ self.log("Requested payload for update_syslog_event_subscription - {0}".format(str(sys_notification_update_params)), "INFO")
+ response = self.dnac._exec(
+ family="event_management",
+ function='update_syslog_event_subscription',
+ op_modifies=True,
+ params={'payload': sys_notification_update_params}
+ )
+ time.sleep(1)
+ self.log("Received API response from 'update_syslog_event_subscription': {0}".format(str(response)), "DEBUG")
+ status = response.get('statusUri')
+ status_execution_id = status.split("/")[-1]
+ status_response = self.check_status_api_events(status_execution_id)
+
+ if status_response['apiStatus'] == "SUCCESS":
+ self.status = "success"
+ self.result['changed'] = True
+ self.msg = "Syslog Event Notification '{0}' updated successfully in Cisco Catalyst Center".format(name)
+ self.log(self.msg, "INFO")
+ self.result['response'] = self.msg
+ return self
+
+ self.status = "failed"
+ error_messages = status_response.get('errorMessage')
+
+ if error_messages:
+ failure_msg = error_messages.get('errors')
+ else:
+ failure_msg = "Unable to update Syslog Event Notification '{0}' in Cisco Catalyst Center.".format(name)
+
+ self.log(failure_msg, "ERROR")
+ self.result['response'] = failure_msg
+
+ except Exception as e:
+ self.status = "failed"
+ self.msg = "Error while updating the Syslog Event Notification with name '{0}' in Cisco Catalyst Center: {1}".format(name, str(e))
+ self.log(self.msg, "ERROR")
+
+ return self
+
+ def get_webhook_notification_details(self):
+ """
+ Retrieves the details of a Webhook Event Notification subscription from the Cisco Catalyst Center.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Returns:
+ dict or None: A dictionary containing the details of the Webhook Event Notification subscription if found.
+ Returns None if no subscription is found or if an error occurs during the API call.
+ Description:
+ This function calls an API to fetch the details of a specified Webhook Event Notification subscription. If the
+ subscription exists, it returns the response containing the subscription details. If no subscription is found
+ or an error occurs, it logs the appropriate message and handles the exception accordingly.
+ """
+
+ try:
+ response = self.dnac._exec(
+ family="event_management",
+ function='get_rest_webhook_event_subscriptions',
+ )
+ self.log("Received API response from 'get_rest_webhook_event_subscriptions': {0}".format(str(response)), "DEBUG")
+
+ if not response:
+ self.log("There is no Webhook Events Subscription Notification present in Cisco Catalyst Center.", "INFO")
+ return response
+
+ return response
+
+ except Exception as e:
+ self.status = "failed"
+ self.log("Error while retrieving Webhook Event Notification details: {0}".format(str(e)), "ERROR")
+ self.log(self.msg, "ERROR")
+ self.check_return_status()
+
+ def get_webhook_subscription_detail(self, destination):
+ """
+ Retrieves the details of a specific webhook destination subscription from the Cisco Catalyst Center.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ destination (str): The name of the webhook destination for which details needs to be fetched.
+ Returns:
+ dict or list: A dictionary containing the details of the webhook destination subscription if found.
+ Returns an empty list if no destination is found or if an error occurs during the API call.
+ Description:
+ This function calls an API to fetch the details of all webhook destination from the Cisco Catalyst Center.
+ It then searches for a subscription that matches the given `destination`. If a match is found, it returns
+ details of the matching subscription. If no match is found or if an error occurs, it logs the appropriate message
+ and handles the exception accordingly.
+ """
+
+ try:
+ response = self.dnac._exec(
+ family="event_management",
+ function='get_rest_webhook_subscription_details',
+ )
+ self.log("Received API response from 'get_rest_webhook_subscription_details': {0}".format(str(response)), "DEBUG")
+ web_destination_details = []
+
+ if not response:
+ self.log("There is no webhook destination present in Cisco Catalyst Center.", "INFO")
+ return web_destination_details
+
+ for dest in response:
+ if dest["name"] == destination:
+ return dest
+ self.log("There is no webhook destination with given name '{0}' present in Cisco Catalyst Center.".format(destination), "INFO")
+
+ return web_destination_details
+
+ except Exception as e:
+ self.status = "failed"
+ self.msg = """Error while getting the details of webhook Subscription with given name '{0}' present in
+ Cisco Catalyst Center: {1}""".format(destination, str(e))
+ self.log(self.msg, "ERROR")
+ self.check_return_status()
+
+ def collect_webhook_notification_playbook_params(self, webhook_notification_details):
+ """
+ Collects and prepares parameters for creating or updating a webhook Event Notification.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ webhook_notification_details (dict): A dictionary containing the details required for creating or updating
+ the webhook Event Notification.
+ Returns:
+ list of dict: A list containing a dictionary with the parameters for creating the webhook Event Notification.
+ Description:
+ This function collects and structures the necessary parameters for creating or updating a webhook Event Notification.
+ It fetches additional details such as instance IDs and connector types from the Cisco Catalyst Center
+ and prepares the subscription endpoints and filters. The function handles missing or incorrect details by logging
+ appropriate messages and adjusting the status and returns a list containing required parameter.
+ """
+
+ webhook_notification_params = []
+ name = webhook_notification_details.get('name')
+ playbook_params = {
+ 'name': name,
+ 'description': webhook_notification_details.get('description'),
+ 'version': webhook_notification_details.get('version'),
+ 'subscriptionEndpoints': [],
+ 'filter': {}
+
+ }
+ # Collect the Instance ID of the webhook destination
+ self.log("Collecting parameters for Webhook Event Notification named '{0}'.".format(name), "INFO")
+ destination = webhook_notification_details.get('destination')
+
+ if destination:
+ subscription_details = self.get_webhook_subscription_detail(destination)
+
+ if not subscription_details:
+ self.status = "failed"
+ self.msg = """Unable to create/update the webhook event notification '{0}' as webhook desination '{1}' is not configured or
+ present in Cisco Catalyst Center""".format(name, destination)
+ self.log(self.msg, "ERROR")
+ self.check_return_status()
+
+ instance_id = subscription_details.get('instanceId')
+ connector_type = subscription_details.get('connectorType')
+ temp_subscript_endpoint = {
+ "instanceId": instance_id,
+ "subscriptionDetails": {
+ "connectorType": connector_type
+ }
+ }
+ playbook_params["subscriptionEndpoints"].append(temp_subscript_endpoint)
+
+ events = webhook_notification_details.get('events')
+ if events:
+ events_ids = self.get_event_ids(events)
+ if not events_ids:
+ self.status = "failed"
+ self.msg = (
+ "Unable to create/update Webhook event notification as the given event names '{0}' "
+ "are incorrect or could not be found."
+ ).format(str(events))
+ self.log(self.msg, "ERROR")
+ self.check_return_status()
+
+ playbook_params["filter"]["eventIds"] = events_ids
+
+ domain = webhook_notification_details.get("domain")
+ subdomains = webhook_notification_details.get("subdomains")
+ if domain and subdomains:
+ playbook_params["filter"]["domainsSubdomains"] = []
+ domain_dict = {
+ "domain": domain,
+ "subDomains": subdomains
+ }
+ playbook_params["filter"]["domainsSubdomains"].append(domain_dict)
+
+ # Add other filter parameters if present
+ filter_keys = ["event_types", "event_categories", "event_severities", "event_sources"]
+ filter_mapping = {
+ "event_types": "types",
+ "event_categories": "categories",
+ "event_severities": "severities",
+ "event_sources": "sources"
+ }
+
+ for key in filter_keys:
+ value = webhook_notification_details.get(key)
+ if value:
+ playbook_params["filter"][filter_mapping[key]] = value
+
+ sites = webhook_notification_details.get("sites")
+ if sites:
+ site_ids = self.get_site_ids(sites)
+ if not site_ids:
+ self.msg = "Unable to find the Site IDs for the given site(s) - '{0}' in the playbook's input.".format(sites)
+ self.log(self.msg, "INFO")
+
+ playbook_params["filter"]["siteIds"] = site_ids
+ self.log("Site IDs '{0}' found for site names '{1}'. Added to filter.".format(site_ids, sites), "INFO")
+
+ self.log("Webhook notification playbook parameters collected successfully for '{0}': {1}".format(name, playbook_params), "INFO")
+ webhook_notification_params.append(playbook_params)
+
+ return webhook_notification_params
+
+ def mandatory_webhook_notification_parameter_check(self, webhook_notification_params):
+ """
+ Checks for the presence of mandatory parameters required for adding a webhook Event Notification.
+ Args:
+ webhook_notification_params (list of dict): A list containing a single dictionary with the parameters
+ for the webhook Event Notification.
+ Returns:
+ self: The instance of the class with updated status and message if any required parameter is missing.
+ Description:
+ This function verifies the presence of required parameters for creating or updating a webhook Event Notification.
+ If any required parameter is absent, it logs an error message, updates the status to "failed",
+ and sets the message attribute. It then returns the instance of the class with the updated status and message.
+ """
+
+ required_params_absent = []
+ webhook_params = webhook_notification_params[0]
+ notification_name = webhook_params.get("name")
+ description = webhook_params.get("description")
+ subs_endpoints = webhook_params.get('subscriptionEndpoints')
+ filters = webhook_params.get("filter")
+
+ if not notification_name:
+ required_params_absent.append("name")
+
+ if not description:
+ required_params_absent.append("description")
+
+ if not subs_endpoints:
+ required_params_absent.append("destination")
+
+ if not filters.get("eventIds"):
+ required_params_absent.append("events")
+
+ if required_params_absent:
+ self.status = "failed"
+ self.msg = (
+ "Missing required parameter(s) '{0}' for adding Webhook Event Notification with the given "
+ "name '{1}'."
+ ).format(str(required_params_absent), notification_name)
+ self.log(self.msg, "ERROR")
+ self.check_return_status()
+
+ return self
+
+ def create_webhook_notification(self, webhook_notification_params):
+ """
+ Creates a webhook Event Notification subscription in Cisco Catalyst Center based on the provided parameters.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ webhook_notification_params (list): A list containing a dictionary having the required parameter for creating
+ webhook event subscription notification.
+ Returns:
+ self (object): An instance of a class representing the status of the operation, including whether it was
+ successful or failed, any error messages encountered during operation.
+ Description:
+ This function makes an API call to create a webhook Event Notification subscription in Cisco Catalyst Center.
+ It takes the provided parameters as input and constructs the payload for the API call. After making the
+ API call, it checks the status of the execution and updates the status and result attributes accordingly.
+ If the creation is successful, it sets the status to "success" and updates the result attribute with the
+ success message. If an error occurs during the process, it sets the status to "failed" and logs the
+ appropriate error message.
+ """
+
+ try:
+ notification_name = webhook_notification_params[0].get('name')
+ self.log("Requested payload for create_rest_webhook_event_subscription - {0}".format(str(webhook_notification_params)), "INFO")
+ response = self.dnac._exec(
+ family="event_management",
+ function='create_rest_webhook_event_subscription',
+ op_modifies=True,
+ params={'payload': webhook_notification_params}
+ )
+ time.sleep(1)
+ self.log("Received API response from 'create_rest_webhook_event_subscription': {0}".format(str(response)), "DEBUG")
+ status = response.get('statusUri')
+ status_execution_id = status.split("/")[-1]
+ status_response = self.check_status_api_events(status_execution_id)
+
+ if status_response['apiStatus'] == "SUCCESS":
+ self.status = "success"
+ self.result['changed'] = True
+ self.msg = "Webhook Event Subscription Notification '{0}' created successfully in Cisco Catalyst Center".format(notification_name)
+ self.log(self.msg, "INFO")
+ self.result['response'] = self.msg
+ return self
+
+ self.status = "failed"
+ error_messages = status_response.get('errorMessage')
+
+ if error_messages:
+ failure_msg = error_messages.get('errors')
+ else:
+ failure_msg = "Unable to add Webhook Events Subscription Notification '{0}' in Cisco Catalyst Center.".format(notification_name)
+
+ self.log(failure_msg, "ERROR")
+ self.result['response'] = failure_msg
+
+ except Exception as e:
+ self.status = "failed"
+ self.msg = "Error while adding the webhook Event Notification with name '{0}' in Cisco Catalyst Center: {1}".format(notification_name, str(e))
+ self.log(self.msg, "ERROR")
+
+ return self
+
+ def webhook_notification_needs_update(self, webhook_notification_params, webhook_notification_in_ccc):
+ """
+ Checks if a webhook notification needs update based on a comparison between playbook and CCC configurations.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ webhook_notification_params (dict): Dictionary containing webhook notification parameters from the playbook.
+ webhook_notification_in_ccc (dict): Dictionary containing webhook notification parameters from Cisco Catalyst Center.
+ Returns:
+ bool: True if the webhook notification needs update, False otherwise.
+ Description:
+ This function checks if a webhook notification needs update by comparing its parameters
+ with the corresponding parameters in Cisco Catalyst Center.
+ It compares the description, webhook destination, and filters between the playbook and CCC configurations.
+ If any parameter mismatch is found, it logs a message indicating the need for an update and returns True.
+ If all parameters match, it returns False indicating that no update is required.
+ """
+
+ webhook_params = webhook_notification_params[0]
+ name = webhook_params.get("name")
+ description_in_playbook = webhook_params.get("description")
+ description_in_ccc = webhook_notification_in_ccc.get("description")
+ subs_endpoints = webhook_params.get("subscriptionEndpoints")
+ ccc_endpoints = webhook_notification_in_ccc.get("subscriptionEndpoints")[0]
+
+ if description_in_playbook and description_in_playbook != description_in_ccc:
+ self.log("Parameter 'description' does not match with the value of description present in Cisco Catalyst Center "
+ "so given Webhook Event Notification '{0}' needs an update".format(name), "INFO")
+ return True
+
+ if subs_endpoints:
+ instance_id = subs_endpoints[0].get("instanceId")
+ ccc_instance_id = ccc_endpoints.get("instanceId")
+ if instance_id != ccc_instance_id:
+ self.log("Given Webhook destination in the playbook is different from Webhook destination present in Cisco Catalyst "
+ "Center so given Webhook Event Subscription Notification '{0}' needs an update".format(name), "INFO")
+ return True
+
+ filters_in_playbook = webhook_params.get("filter")
+ filters_in_ccc = webhook_notification_in_ccc.get("filter")
+
+ if self.compare_notification_filters(filters_in_playbook, filters_in_ccc):
+ self.log("Notification filters differ between the playbook and Cisco Catalyst Center. Webhook Event Subscription Notification "
+ "'{0}' needs an update.".format(name), "INFO")
+ return True
+
+ return False
+
+ def update_webhook_notification(self, webhook_notification_params, webhook_notification_in_ccc):
+ """
+ Updates a Webhook Event Notification subscription in Cisco Catalyst Center.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ webhook_notification_params (dict): Dictionary containing parameters for updating the webhook Event Notification.
+ webhook_notification_in_ccc (dict): Dictionary containing current configuration of the webhook Event Notification in CCC.
+ Returns:
+ self (object): An instance of a class representing the status of the operation, including whether it was
+ successful or failed, any error messages encountered during operation.
+ Description:
+ This function updates a Webhook Event Notification subscription in Cisco Catalyst Center based on the provided parameters.
+ It constructs the payload for the update operation and sends it as an API request to the Cisco Catalyst Center.
+ After the update operation, it checks the status of the API request and logs appropriate messages based on the response.
+ """
+
+ webhook_params = webhook_notification_params[0]
+ web_notification_update_params = []
+ name = webhook_params.get("name")
+
+ playbook_params = {
+ "subscriptionId": webhook_notification_in_ccc.get("subscriptionId"),
+ "name": name,
+ "description": webhook_params.get("description") or webhook_notification_in_ccc.get("description"),
+ "version": webhook_params.get("version") or webhook_notification_in_ccc.get("version"),
+ "filter": {},
+ }
+ subs_endpoints = webhook_params.get("subscriptionEndpoints")
+
+ if subs_endpoints:
+ playbook_params["subscriptionEndpoints"] = subs_endpoints
+ else:
+ playbook_params["subscriptionEndpoints"] = []
+ instance_id = webhook_notification_in_ccc.get("subscriptionEndpoints")[0].get("instanceId")
+ temp_subscript_endpoint = {
+ "instanceId": instance_id,
+ "subscriptionDetails": {
+ "connectorType": "REST"
+ }
+ }
+ playbook_params['subscriptionEndpoints'].append(temp_subscript_endpoint)
+
+ filter = webhook_params.get("filter")
+ ccc_filter = webhook_notification_in_ccc.get("filter")
+ webhook_update_params = self.collect_notification_filter_params(playbook_params, filter, ccc_filter)
+ web_notification_update_params.append(webhook_update_params)
+
+ try:
+ self.log("Requested payload for update_rest_webhook_event_subscription - {0}".format(str(web_notification_update_params)), "INFO")
+ response = self.dnac._exec(
+ family="event_management",
+ function='update_rest_webhook_event_subscription',
+ op_modifies=True,
+ params={'payload': web_notification_update_params}
+ )
+ time.sleep(1)
+ self.log("Received API response from 'update_rest_webhook_event_subscription': {0}".format(str(response)), "DEBUG")
+ status = response.get('statusUri')
+ status_execution_id = status.split("/")[-1]
+ status_response = self.check_status_api_events(status_execution_id)
+
+ if status_response['apiStatus'] == "SUCCESS":
+ self.status = "success"
+ self.result['changed'] = True
+ self.msg = "Webhook Event Subscription Notification '{0}' updated successfully in Cisco Catalyst Center".format(name)
+ self.log(self.msg, "INFO")
+ self.result['response'] = self.msg
+ return self
+
+ self.status = "failed"
+ error_messages = status_response.get('errorMessage')
+
+ if error_messages:
+ failure_msg = error_messages.get('errors')
+ else:
+ failure_msg = "Unable to update webhook Event Subscription Notification '{0}' in Cisco Catalyst Center.".format(name)
+
+ self.log(failure_msg, "ERROR")
+ self.result['response'] = failure_msg
+
+ except Exception as e:
+ self.status = "failed"
+ self.msg = "Exception occurred while updating Webhook Notification with name '{0}': {1}.".format(name, str(e))
+ self.log(self.msg, "ERROR")
+
+ return self
+
+ def get_email_notification_details(self):
+ """
+ Retrieves the details of a email Event Notification subscription from the Cisco Catalyst Center.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Returns:
+ dict or None: A dictionary containing the details of the email Event Notification subscription if found.
+ Returns None if no subscription is found or if an error occurs during the API call.
+ Description:
+ This function calls an API to fetch the details of a specified email Event Notification subscription. If the
+ subscription exists, it returns the response containing the subscription details. If no subscription is found
+ or an error occurs, it logs the appropriate message and handles the exception accordingly.
+ """
+
+ try:
+ response = self.dnac._exec(
+ family="event_management",
+ function='get_email_event_subscriptions',
+ )
+ self.log("Received API response from 'get_email_event_subscriptions': {0}".format(str(response)), "DEBUG")
+
+ if not response:
+ self.log("There is no Email Events Subscription Notification present in Cisco Catalyst Center.", "INFO")
+ return response
+
+ return response
+ except Exception as e:
+ self.status = "failed"
+ self.msg = "Exception occurred while retrieving Email Event Subscription Notification: {0}".format(str(e))
+ self.log(self.msg, "ERROR")
+ self.check_return_status()
+
+ def get_email_subscription_detail(self, instance):
+ """
+ Retrieves the details of a specific email destination subscription from the Cisco Catalyst Center.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ instance (str): The name of the email destination for which details needs to be fetched.
+ Returns:
+ dict or list: A dictionary containing the details of the email destination subscription if found.
+ Returns an empty list if no destination is found or if an error occurs during the API call.
+ Description:
+ This function calls an API to fetch the details of all email destination from the Cisco Catalyst Center.
+ It then searches for a subscription that matches the given `instance`. If a match is found, it returns
+ details of the matching subscription. If no match is found or if an error occurs, it logs the appropriate message
+ and handles the exception accordingly.
+ """
+
+ try:
+ response = self.dnac._exec(
+ family="event_management",
+ function='get_email_subscription_details',
+ )
+ self.log("Received API response from 'get_email_subscription_details': {0}".format(str(response)), "DEBUG")
+ email_destination_details = None
+
+ if not response:
+ self.log("There is no email destination present in Cisco Catalyst Center.", "INFO")
+ return email_destination_details
+
+ for dest in response:
+ if dest["name"] == instance:
+ return dest
+ self.log("There is no email destination with given name '{0}' present in Cisco Catalyst Center.".format(instance), "INFO")
+
+ return email_destination_details
+
+ except Exception as e:
+ self.status = "failed"
+ self.msg = """Error while getting the details of Email event Subscription with given destination name '{0}' present in
+ Cisco Catalyst Center: {1}""".format(instance, str(e))
+ self.log(self.msg, "ERROR")
+ self.check_return_status()
+
+ def collect_email_notification_playbook_params(self, email_notification_details):
+ """
+ Collects and prepares parameters for creating or updating a email Event Notification.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ email_notification_details (dict): A dictionary containing the details required for creating or updating
+ the email Event Notification.
+ Returns:
+ list of dict: A list containing a dictionary with the parameters for creating the email Event Notification.
+ Description:
+ This function collects and structures the necessary parameters for creating or updating a email Event Notification.
+ It fetches additional details such as instance IDs and connector types from the Cisco Catalyst Center
+ and prepares the subscription endpoints and filters. The function handles missing or incorrect details by logging
+ appropriate messages and adjusting the status and returns a list containing required parameter.
+ """
+
+ email_notification_params = []
+ email_notf_name = email_notification_details.get('name')
+ playbook_params = {
+ 'name': email_notf_name,
+ 'description': email_notification_details.get('description'),
+ 'version': email_notification_details.get('version'),
+ 'subscriptionEndpoints': [],
+ 'filter': {}
+
+ }
+ # Collect the Instance ID of the email destination
+ self.log("Collecting parameters for Email Event Notification named '{0}'.".format(email_notf_name), "INFO")
+ instance = email_notification_details.get('instance')
+
+ if not instance:
+ self.status = "failed"
+ self.msg = "Instance name for Subscription Endpoints is required for Email notification '{0}'.".format(email_notf_name)
+ self.log(self.msg, "ERROR")
+ return self
+
+ subscription_details = self.get_email_subscription_detail(instance)
+ instance_id = None
+
+ if subscription_details:
+ instance_id = subscription_details.get("instanceId")
+ fromEmailAddress = email_notification_details.get("sender_email") or subscription_details.get("fromEmailAddress")
+ toEmailAddresses = email_notification_details.get("recipient_emails") or subscription_details.get("toEmailAddresses")
+ subject = email_notification_details.get("subject") or subscription_details.get("subject")
+ description = email_notification_details.get("instance_description") or subscription_details.get("description")
+
+ if not self.is_valid_email(fromEmailAddress):
+ self.status = "failed"
+ self.msg = (
+ "Unable to create/update Email event notification as the given sender_email '{0}' "
+ "are incorrect or invalid given in the playbook."
+ ).format(fromEmailAddress)
+ self.log(self.msg, "ERROR")
+ self.check_return_status()
+
+ for email in toEmailAddresses:
+ if not self.is_valid_email(email):
+ self.status = "failed"
+ self.msg = (
+ "Unable to create/update Email event notification as the given recipient_email '{0}' "
+ "is incorrect or invalid given in the playbook."
+ ).format(email)
+ self.log(self.msg, "ERROR")
+ self.check_return_status()
+
+ temp_subscript_endpoint = {
+ "instanceId": instance_id,
+ "subscriptionDetails": {
+ "connectorType": "EMAIL",
+ "fromEmailAddress": fromEmailAddress,
+ "toEmailAddresses": toEmailAddresses,
+ "subject": subject,
+ "name": instance,
+ "description": description
+ }
+ }
+ playbook_params["subscriptionEndpoints"].append(temp_subscript_endpoint)
+ else:
+ self.log("No subscription details found for instance '{0}'.".format(instance), "WARNING")
+
+ events = email_notification_details.get('events')
+ if events:
+ events_ids = self.get_event_ids(events)
+ if not events_ids:
+ self.status = "failed"
+ self.msg = (
+ "Unable to create/update Email event notification as the given event names '{0}' "
+ "are incorrect or could not be found."
+ ).format(str(events))
+ self.log(self.msg, "ERROR")
+ self.check_return_status()
+
+ playbook_params["filter"]["eventIds"] = events_ids
+
+ domain = email_notification_details.get("domain")
+ subdomains = email_notification_details.get("subdomains")
+ if domain and subdomains:
+ playbook_params["filter"]["domainsSubdomains"] = []
+ domain_dict = {
+ "domain": domain,
+ "subDomains": subdomains
+ }
+ playbook_params["filter"]["domainsSubdomains"].append(domain_dict)
+
+ # Add other filter parameters if present
+ filter_keys = ["event_types", "event_categories", "event_severities", "event_sources"]
+ filter_mapping = {
+ "event_types": "types",
+ "event_categories": "categories",
+ "event_severities": "severities",
+ "event_sources": "sources"
+ }
+
+ for key in filter_keys:
+ value = email_notification_details.get(key)
+ if value:
+ playbook_params["filter"][filter_mapping[key]] = value
+
+ sites = email_notification_details.get("sites")
+ if sites:
+ site_ids = self.get_site_ids(sites)
+ if not site_ids:
+ self.msg = "Unable to find the Site IDs for the given site(s) - '{0}' in the playbook's input.".format(sites)
+ self.log(self.msg, "INFO")
+
+ playbook_params["filter"]["siteIds"] = site_ids
+ email_notification_params.append(playbook_params)
+ self.log(
+ "Email notification playbook parameters collected successfully for "
+ "'{0}': {1}"
+ .format(email_notf_name, playbook_params), "INFO"
+ )
+
+ return email_notification_params
+
+ def mandatory_email_notification_parameter_check(self, email_notification_params):
+ """
+ Checks for the presence of mandatory parameters required for adding a Email Event Subscription Notification.
+ Args:
+ email_notification_params (list of dict): A list containing a single dictionary with the parameters
+ for the email Event Notification.
+ Returns:
+ self: The instance of the class with updated status and message if any required parameter is missing.
+ Description:
+ This function verifies the presence of required parameters for creating or updating a email Event Notification.
+ If any required parameter is absent, it logs an error message, updates the status to "failed",
+ and sets the message attribute. It then returns the instance of the class with the updated status and message.
+ """
+
+ required_params_absent = []
+ email_notification_params = email_notification_params[0]
+ notification_name = email_notification_params.get("name")
+ description = email_notification_params.get("description")
+
+ if not notification_name:
+ required_params_absent.append("name")
+
+ if not description:
+ required_params_absent.append("description")
+
+ subs_endpoints = email_notification_params.get('subscriptionEndpoints')
+
+ if not subs_endpoints:
+ required_params_absent.extends(["instance", "sender_email", "recipient_emails", "subject"])
+ else:
+ subs_endpoints = subs_endpoints[0].get("subscriptionDetails")
+ if not subs_endpoints.get("fromEmailAddress"):
+ required_params_absent.append("sender_email")
+ if not subs_endpoints.get("toEmailAddresses"):
+ required_params_absent.append("recipient_emails")
+ if not subs_endpoints.get("subject"):
+ required_params_absent.append("subject")
+ if not subs_endpoints.get("name"):
+ required_params_absent.append("instance")
+
+ filters = email_notification_params.get("filter")
+ if not filters:
+ required_params_absent.append("events")
+
+ if required_params_absent:
+ self.status = "failed"
+ missing_params = ", ".join(required_params_absent)
+ self.msg = "Missing required parameters [{0}] for adding Email Events Subscription Notification '{1}'.".format(missing_params, notification_name)
+ self.log(self.msg, "ERROR")
+ self.check_return_status()
+ self.log("All mandatory parameters for Email Event Subscription Notification are present.", "INFO")
+
+ return self
+
+ def create_email_notification(self, email_notification_params):
+ """
+ Creates a Email Event Notification Subscription in Cisco Catalyst Center based on the provided parameters.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ email_notification_params (list): A list containing a dictionary having the required parameter for creating
+ email event subscription notification.
+ Returns:
+ self (object): An instance of a class representing the status of the operation, including whether it was
+ successful or failed, any error messages encountered during operation.
+ Description:
+ This function makes an API call to create a Email Event Notification subscription in Cisco Catalyst Center.
+ It takes the provided parameters as input and constructs the payload for the API call. After making the
+ API call, it checks the status of the execution and updates the status and result attributes accordingly.
+ If the creation is successful, it sets the status to "success" and updates the result attribute with the
+ success message. If an error occurs during the process, it sets the status to "failed" and logs the
+ appropriate error message.
+ """
+
+ try:
+ notification_name = email_notification_params[0].get('name')
+ self.log("Requested payload for create_email_event_subscription - {0}".format(str(email_notification_params)), "INFO")
+ response = self.dnac._exec(
+ family="event_management",
+ function='create_email_event_subscription',
+ op_modifies=True,
+ params={'payload': email_notification_params}
+ )
+ time.sleep(1)
+ self.log("Received API response from 'create_email_event_subscription': {0}".format(str(response)), "DEBUG")
+ status = response.get('statusUri')
+ status_execution_id = status.split("/")[-1]
+ status_response = self.check_status_api_events(status_execution_id)
+
+ if status_response['apiStatus'] == "SUCCESS":
+ self.status = "success"
+ self.result['changed'] = True
+ self.msg = "Email Event Subscription Notification '{0}' created successfully in Cisco Catalyst Center".format(notification_name)
+ self.log(self.msg, "INFO")
+ self.result['response'] = self.msg
+ return self
+
+ self.status = "failed"
+ error_messages = status_response.get('errorMessage')
+
+ if error_messages:
+ failure_msg = error_messages.get('errors')
+ else:
+ failure_msg = "Unable to add Email Events Subscription Notification '{0}' in Cisco Catalyst Center.".format(notification_name)
+
+ self.log(failure_msg, "ERROR")
+ self.result['response'] = failure_msg
+
+ except Exception as e:
+ self.status = "failed"
+ self.msg = "Error while adding the Email Event Notification with name '{0}' in Cisco Catalyst Center: {1}".format(notification_name, str(e))
+ self.log(self.msg, "ERROR")
+
+ return self
+
+ def compare_email_subs_endpoints(self, subs_endpoints, ccc_endpoints):
+ """
+ Compare email subscription endpoints parameters to determine if they match or not.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ subs_endpoints (dict): A dictionary containing the subscription endpoint parameters from the playbook.
+ ccc_endpoints (dict): A dictionary containing the current subscription endpoint parameters in Cisco Catalyst Center.
+ Returns:
+ bool: Returns True if there is any difference between the parameters in subs_endpoints and ccc_endpoints, otherwise False.
+ Description:
+ This function compares the specified parameters of email subscription endpoints from the provided dictionaries.
+ If any of the parameters differ between subs_endpoints and ccc_endpoints, the function returns True, indicating
+ that the subscription endpoints need to be updated. If all parameters match, the function returns False.
+ """
+
+ params_to_compare = ["fromEmailAddress", "toEmailAddresses", "subject", "name", "description"]
+ subs_endpoints = subs_endpoints.get("subscriptionDetails")
+ ccc_endpoints = ccc_endpoints.get("subscriptionDetails")
+
+ for param in params_to_compare:
+ playbook_param = subs_endpoints.get(param)
+ if isinstance(playbook_param, list):
+ ccc_list_param = ccc_endpoints.get(param)
+ list_needs_update = self.is_element_missing(playbook_param, ccc_list_param)
+
+ if list_needs_update:
+ self.log("""Parameter '{0}' given in the playbook does not match with the value present in Cisco Catalyst Center
+ so notification needs update.""".format(param), "INFO")
+ return True
+ elif subs_endpoints.get(param) != ccc_endpoints.get(param):
+ return True
+
+ return False
+
+ def email_notification_needs_update(self, email_notification_params, email_notification_in_ccc):
+ """
+ Checks if a Email notification needs update based on a comparison between playbook and CCC configurations.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ email_notification_params (dict): Dictionary containing email notification parameters from the playbook.
+ email_notification_in_ccc (dict): Dictionary containing email notification parameters from Cisco Catalyst Center.
+ Returns:
+ bool: True if the email notification needs update, False otherwise.
+ Description:
+ This function checks if a email notification needs update by comparing its parameters
+ with the corresponding parameters in Cisco Catalyst Center.
+ It compares the description, email destination, and filters between the playbook and CCC configurations.
+ If any parameter mismatch is found, it logs a message indicating the need for an update and returns True.
+ If all parameters match, it returns False indicating that no update is required.
+ """
+
+ email_notification_params = email_notification_params[0]
+ name = email_notification_params.get("name")
+ description_in_playbook = email_notification_params.get("description")
+ description_in_ccc = email_notification_in_ccc.get("description")
+ subs_endpoints = email_notification_params.get("subscriptionEndpoints")
+ ccc_endpoints = email_notification_in_ccc.get("subscriptionEndpoints")[0]
+
+ if description_in_playbook and description_in_playbook != description_in_ccc:
+ self.log("Parameter 'description' does not match with the value of description present in Cisco Catalyst Center "
+ "so given Email Event Notification '{0}' needs an update".format(name), "INFO")
+ return True
+
+ if subs_endpoints:
+ subs_endpoints = subs_endpoints[0]
+ notification_update = self.compare_email_subs_endpoints(subs_endpoints, ccc_endpoints)
+
+ if notification_update:
+ self.log("Given Email Instance details in the playbook is different from email instance present in Cisco Catalyst "
+ "Center so given email Event Subscription Notification {0} needs an update".format(name), "INFO")
+ return True
+
+ filters_in_playbook = email_notification_params.get("filter")
+ filters_in_ccc = email_notification_in_ccc.get("filter")
+
+ if self.compare_notification_filters(filters_in_playbook, filters_in_ccc):
+ self.log("Notification filters differ between the playbook and Cisco Catalyst Center. Email Event Subscription Notification "
+ "'{0}' needs an update.".format(name), "INFO")
+ return True
+
+ return False
+
+ def update_email_notification(self, email_notification_params, email_notification_in_ccc):
+ """
+ Updates a Email Event Notification subscription in Cisco Catalyst Center.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ email_notification_params (dict): Dictionary containing parameters for updating the Email Event Notification.
+ email_notification_in_ccc (dict): Dictionary containing current configuration of the Email Event Notification in CCC.
+ Returns:
+ self (object): An instance of a class representing the status of the operation, including whether it was
+ successful or failed, any error messages encountered during operation.
+ Description:
+ This function updates a email Event Notification subscription in Cisco Catalyst Center based on the provided parameters.
+ It constructs the payload for the update operation and sends it as an API request to the Cisco Catalyst Center.
+ After the update operation, it checks the status of the API request and logs appropriate messages based on the response.
+ """
+
+ email_notification_params = email_notification_params[0]
+ notification_update_params = []
+ name = email_notification_params.get("name")
+
+ # Prepare the parameters for the update operation
+ playbook_params = {
+ "subscriptionId": email_notification_in_ccc.get("subscriptionId"),
+ "name": name,
+ "description": email_notification_params.get("description", email_notification_in_ccc.get("description")),
+ "version": email_notification_params.get("version", email_notification_in_ccc.get("version")),
+ "filter": {},
+ "subscriptionEndpoints": []
+ }
+ subs_endpoints = email_notification_params.get("subscriptionEndpoints")
+ subs_endpoints_in_ccc = email_notification_in_ccc.get("subscriptionEndpoints")[0]
+ instance_id = subs_endpoints_in_ccc.get("instanceId")
+
+ if subs_endpoints:
+ playbook_params["subscriptionEndpoints"] = subs_endpoints
+ else:
+ playbook_params["subscriptionEndpoints"] = [{
+ "instanceId": instance_id,
+ "subscriptionDetails": {
+ "connectorType": "EMAIL"
+ },
+ "fromEmailAddress": subs_endpoints_in_ccc.get('fromEmailAddress'),
+ "toEmailAddresses": subs_endpoints_in_ccc.get('toEmailAddresses'),
+ "subject": subs_endpoints_in_ccc.get('subject'),
+ "name": subs_endpoints_in_ccc.get('name'),
+ "description": subs_endpoints_in_ccc.get('description')
+ }]
+
+ filter = email_notification_params.get("filter")
+ ccc_filter = email_notification_in_ccc.get("filter")
+ email_update_params = self.collect_notification_filter_params(playbook_params, filter, ccc_filter)
+ notification_update_params.append(email_update_params)
+
+ try:
+ self.log("Updating Email Event Notification '{0}' with following payload: {1}".format(name, str(notification_update_params)), "INFO")
+ response = self.dnac._exec(
+ family="event_management",
+ function='update_email_event_subscription',
+ op_modifies=True,
+ params={'payload': notification_update_params}
+ )
+ time.sleep(2)
+ self.log("Received API response from 'update_email_event_subscription': {0}".format(str(response)), "DEBUG")
+ status = response.get('statusUri')
+ status_execution_id = status.split("/")[-1]
+ status_response = self.check_status_api_events(status_execution_id)
+
+ if status_response['apiStatus'] == "SUCCESS":
+ self.status = "success"
+ self.result['changed'] = True
+ self.msg = "Email Event Subscription Notification '{0}' updated successfully in Cisco Catalyst Center".format(name)
+ self.log(self.msg, "INFO")
+ self.result['response'] = self.msg
+ return self
+
+ self.status = "failed"
+ error_messages = status_response.get('errorMessage')
+
+ if error_messages:
+ failure_msg = error_messages.get('errors')
+ else:
+ failure_msg = "Unable to update Email Event Subscription Notification '{0}' in Cisco Catalyst Center.".format(name)
+
+ self.log(failure_msg, "ERROR")
+ self.result['response'] = failure_msg
+ self.msg = failure_msg
+
+ except Exception as e:
+ self.status = "failed"
+ self.msg = "An error occurred while updating Email Event Subscription Notification '{0}': {1}".format(name, str(e))
+ self.log(self.msg, "ERROR")
+
+ return self
+
+ def delete_events_subscription_notification(self, subscription_id, subscription_name):
+ """
+ Delete an event subscription notification from Cisco Catalyst Center.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ subscription_id (str): The ID of the subscription notification to be deleted.
+ subscription_name (str): The name of the subscription notification to be deleted.
+ Returns:
+ self (object): Returns the instance of the class with updated status and result.
+ Description:
+ This function deletes an event subscription notification from Cisco Catalyst Center using the provided
+ subscription ID for Webhook, Email and Syslog events subscription notification. If the deletion is successful,
+ it updates the status to 'success' and logs the success message. If the deletion fails, it updates the status
+ to 'failed' and logs the error message.
+ The function also calls 'check_status_api_events' to monitor the deletion status and ensure the process
+ is completed successfully before returning the result.
+ """
+
+ try:
+ response = self.dnac._exec(
+ family="event_management",
+ function="delete_event_subscriptions",
+ op_modifies=True,
+ params={"subscriptions": subscription_id},
+ )
+ self.log("Received API response from 'update_email_event_subscription': {0}".format(str(response)), "DEBUG")
+ status = response.get('statusUri')
+ status_execution_id = status.split("/")[-1]
+ status_response = self.check_status_api_events(status_execution_id)
+
+ if status_response['apiStatus'] == "SUCCESS":
+ self.status = "success"
+ self.result['changed'] = True
+ self.msg = "Event Subscription Notification '{0}' deleted successfully from Cisco Catalyst Center".format(subscription_name)
+ self.log(self.msg, "INFO")
+ self.result['response'] = self.msg
+ return self
+
+ self.status = "failed"
+ error_messages = status_response.get('errorMessage')
+
+ if error_messages:
+ failure_msg = error_messages.get('errors')
+ else:
+ failure_msg = "Unable to delete Event Subscription Notification '{0}' from Cisco Catalyst Center.".format(subscription_name)
+
+ self.log(failure_msg, "ERROR")
+ self.result['response'] = failure_msg
+ self.msg = failure_msg
+ except Exception as e:
+ self.status = "failed"
+ self.msg = "Exception occurred while deleting Event Subscription Notification '{0}' due to: {1}".format(subscription_name, str(e))
+ self.log(self.msg, "ERROR")
+
+ return self
+
+ def get_diff_merged(self, config):
+ """
+ Processes the configuration difference and merges them into Cisco Catalyst Center.
+ This method updates Cisco Catalyst Center configurations based on the differences detected
+ between the desired state (`want`) and the current state (`have`). It handles different
+ types of configurations such as syslog, SNMP, REST webhook, email, and ITSM settings.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ config (dict): A dictionary containing various destination settings that may include
+ syslog destination, SNMP destination, REST webhook destination,
+ email destination, and ITSM settings. Each key should point to a dictionary
+ that defines specific configuration for that setting type.
+ Return:
+ self (object): Returns the instance itself after potentially modifying internal state to reflect
+ the status of operation, messages to log, and response details.
+ Description:
+ This method acts as a controller that delegates specific tasks such as adding or updating
+ configurations for syslog, SNMP, REST webhook, email, and ITSM settings in the Cisco Catalyst
+ Center. It ensures required parameters are present, validates them, and calls the appropriate
+ methods to add or update the configurations. Error handling is included to manage any exceptions
+ or invalid configurations, updating the internal state to reflect these errors.
+ """
+
+ # Create/Update Rest Webhook destination in Cisco Catalyst Center
+ if config.get('webhook_destination'):
+ webhook_details = self.want.get('webhook_details')
+ destination = webhook_details.get('name')
+
+ if not destination:
+ self.status = "failed"
+ self.msg = "Name is required parameter for adding/updating Webhook destination for creating/updating the event."
+ self.log(self.msg, "ERROR")
+ return self
+
+ is_destination_exist = False
+ for webhook_dict in self.have.get('webhook_destinations'):
+ if webhook_dict['name'] == destination:
+ webhook_dest_detail_in_ccc = webhook_dict
+ is_destination_exist = True
+ break
+ webhook_params = self.collect_webhook_playbook_params(webhook_details)
+
+ if webhook_params.get('method') not in ["POST", "PUT"]:
+ self.status = "failed"
+ self.msg = (
+ "Invalid Webhook method name '{0}' for creating/updating Webhook destination in Cisco Catalyst Center."
+ "Select one of the following method 'POST/PUT'.".format(webhook_params.get('method'))
+ )
+ self.log(self.msg, "ERROR")
+ return self
+
+ regex_pattern = re.compile(
+ r'^https://' # Ensure the URL starts with "https://"
+ r'('
+ r'(([A-Za-z0-9-*.&@]+\.)+[A-Za-z]{2,6})|' # Domain name with wildcards and special characters
+ r'localhost|' # Localhost
+ r'(?:(?:\d{1,3}\.){3}\d{1,3}\b\.?)' # Partial or complete IPv4 address with optional trailing dot
+ r'(\[[A-Fa-f0-9:]+\])?' # Optional IPv6 address in square brackets (e.g., [2001:db8::1])
+ r'|' # Alternation for different valid segments
+ r'([A-Za-z-_.&@]+)' # Hostname with allowed special characters
+ r')'
+ r'(:\d+)?' # Optional port
+ r'(\/[A-Za-z0-9._~:/?#[@!$&\'()*+,;=-]*)?' # Optional path
+ r'$' # End of the string
+ )
+ url = webhook_params.get('url')
+
+ # Check if the input string matches the pattern
+ if url and not regex_pattern.match(url):
+ self.status = "failed"
+ self.msg = (
+ "Given url '{0}' is invalid url for Creating/Updating Webhook destination. It must starts with "
+ "'https://' and follow the valid https url format.".format(url)
+ )
+ self.log(self.msg, "ERROR")
+ return self
+
+ if not is_destination_exist:
+ # Need to Add snmp destination in Cisco Catalyst Center with given playbook params
+ if not url:
+ self.status = "failed"
+ self.msg = "Url is required parameter for creating Webhook destination for creating/updating the event in Cisco Catalyst Center."
+ self.log(self.msg, "ERROR")
+ return self
+
+ self.add_webhook_destination(webhook_params).check_return_status()
+ else:
+ # Check destination needs update and if yes then update SNMP Destination
+ webhook_need_update = self.webhook_dest_needs_update(webhook_params, webhook_dest_detail_in_ccc)
+
+ if not webhook_need_update:
+ self.msg = "Webhook Destination with name '{0}' needs no update in Cisco Catalyst Center".format(destination)
+ self.log(self.msg, "INFO")
+ self.result['changed'] = False
+ self.result['response'] = self.msg
+ else:
+ # Update the syslog destination with given
+ self.update_webhook_destination(webhook_params, webhook_dest_detail_in_ccc).check_return_status()
+
+ # Create/Update Email destination in Cisco Catalyst Center
+ if config.get('email_destination'):
+ email_details = self.want.get('email_details')
+ email_params = self.collect_email_playbook_params(email_details)
+ primary_config = email_params.get("primarySMTPConfig")
+
+ if primary_config and primary_config.get("hostName"):
+ server_address = primary_config.get("hostName")
+ special_chars = r'[!@#$%^&*()_+\=\[\]{};\'\\:"|,<>\/?]'
+
+ if server_address and re.search(special_chars, server_address):
+ self.status = "failed"
+ self.msg = (
+ "Invalid Primary SMTP server hostname '{0}' as special character present in the input server "
+ "address so unable to add/update the email destination in CCC".format(server_address)
+ )
+ self.log(self.msg, "ERROR")
+ return self
+
+ if not self.have.get('email_destination'):
+ # Need to Add email destination in Cisco Catalyst Center with given playbook params
+ invalid_email_params = []
+
+ if email_params.get('primarySMTPConfig') and not email_params.get('primarySMTPConfig').get('hostName'):
+ self.status = "failed"
+ self.msg = (
+ "Required parameter '{0}' for configuring Email Destination in Cisco Catalyst Center "
+ "is missing.".format(str(invalid_email_params))
+ )
+ self.log(self.msg, "ERROR")
+ self.result['response'] = self.msg
+ return self
+ self.log("Required parameter validated successfully for adding Email Destination in Cisco Catalyst Center.", "INFO")
+ self.add_email_destination(email_params).check_return_status()
+ else:
+ # Check destination needs update and if yes then update Email Destination
+ email_dest_detail_in_ccc = self.have.get('email_destination')
+ email_need_update = self.email_dest_needs_update(email_params, email_dest_detail_in_ccc)
+
+ if not email_need_update:
+ self.msg = "Email Destination needs no update in Cisco Catalyst Center"
+ self.log(self.msg, "INFO")
+ self.result['changed'] = False
+ self.result['response'] = self.msg
+ else:
+ # Update the email destination with given details in the playbook
+ self.update_email_destination(email_params, email_dest_detail_in_ccc).check_return_status()
+
+ # Create/Update Syslog destination in Cisco Catalyst Center
+ if config.get('syslog_destination'):
+ syslog_details = self.want.get('syslog_details')
+ name = syslog_details.get('name')
+ port = syslog_details.get('port')
+ server_address = syslog_details.get("server_address")
+
+ if not name:
+ self.status = "failed"
+ self.msg = "Name is required parameter for adding/updating syslog destination for creating/updating the event."
+ self.log(self.msg, "ERROR")
+ return self
+
+ if isinstance(port, str):
+ if not port.isdigit() or (int(port) not in range(1, 65536)):
+ self.status = "failed"
+ self.msg = """Invalid Syslog destination port '{0}' given in playbook. Please choose a port within the range of
+ numbers (1, 65535)""".format(port)
+ self.log(self.msg, "ERROR")
+ return self
+
+ if isinstance(port, int) and (int(port) not in range(1, 65536)):
+ self.status = "failed"
+ self.msg = "Invalid Syslog destination port '{0}' given in playbook. Please choose a port within the range of numbers (1, 65535)".format(port)
+ self.log(self.msg, "ERROR")
+ return self
+
+ if server_address and not self.is_valid_server_address(server_address):
+ self.status = "failed"
+ self.msg = "Invalid server address '{0}' given in the playbook for configuring Syslog destination".format(server_address)
+ self.log(self.msg, "ERROR")
+ return self
+
+ destinations_in_ccc = self.have.get('syslog_destinations')
+ is_destination_exist_in_ccc = False
+
+ if destinations_in_ccc:
+ for destination in destinations_in_ccc:
+ if destination["name"] == name:
+ is_destination_exist_in_ccc = True
+ syslog_details_in_ccc = destination
+ break
+
+ if not is_destination_exist_in_ccc:
+ # We need to Add the Syslog Destination in the Catalyst Center
+ self.add_syslog_destination(syslog_details).check_return_status()
+ else:
+ # Check destination needs update and if yes then update Syslog Destination
+ syslog_need_update = self.syslog_dest_needs_update(syslog_details, syslog_details_in_ccc)
+ if not syslog_need_update:
+ self.msg = "Syslog Destination with name '{0}' needs no update in Cisco Catalyst Center".format(name)
+ self.log(self.msg, "INFO")
+ self.result['changed'] = False
+ self.result['response'] = self.msg
+ else:
+ # Update the syslog destination with given
+ self.update_syslog_destination(syslog_details, syslog_details_in_ccc).check_return_status()
+
+ # Create/Update snmp destination in Cisco Catalyst Center
+ if config.get('snmp_destination'):
+ snmp_details = self.want.get("snmp_details")
+ destination = snmp_details.get("name")
+
+ if not destination:
+ self.status = "failed"
+ self.msg = "Name is required parameter for adding/updating SNMP destination for creating/updating the event."
+ self.log(self.msg, "ERROR")
+ return self
+ is_destination_exist = False
+
+ for snmp_dict in self.have.get('snmp_destinations'):
+ if snmp_dict['name'] == destination:
+ snmp_dest_detail_in_ccc = snmp_dict
+ is_destination_exist = True
+ break
+ snmp_params = self.collect_snmp_playbook_params(snmp_details)
+
+ if snmp_params.get('port'):
+ try:
+ port = int(snmp_params.get('port'))
+ if port not in range(1, 65536):
+ self.status = "failed"
+ self.msg = "Invalid Notification trap port '{0}' given in playbook. Select port from the number range(1, 65535)".format(port)
+ self.log(self.msg, "ERROR")
+ return self
+ except Exception as e:
+ self.status = "failed"
+ self.msg = "Invalid Notification trap port '{0}' given in playbook. Select port from the number range(1, 65535)".format(port)
+ self.log(self.msg, "ERROR")
+ return self
+ privacy_type = snmp_params.get("snmpPrivacyType")
+
+ if privacy_type and privacy_type not in ["AES128", "DES"]:
+ self.status = "failed"
+ self.msg = """Invalid SNMP Privacy type '{0}' given in playbook. Select either AES128/DES as privacy type to add/update the snmp
+ destination '{1}' in the Cisco Catalyst Center.""".format(privacy_type, destination)
+ self.log(self.msg, "ERROR")
+ return self
+
+ if not is_destination_exist:
+ # Need to Add snmp destination in Cisco Catalyst Center with given playbook params
+ self.check_snmp_required_parameters(snmp_params).check_return_status()
+ self.log("""Required parameter validated successfully for adding SNMP Destination with name '{0}' in Cisco
+ Catalyst Center.""".format(destination), "INFO")
+ self.add_snmp_destination(snmp_params).check_return_status()
+ else:
+ # Check destination needs update and if yes then update SNMP Destination
+ snmp_need_update = self.snmp_dest_needs_update(snmp_params, snmp_dest_detail_in_ccc)
+ if not snmp_need_update:
+ self.msg = "SNMP Destination with name '{0}' needs no update in Cisco Catalyst Center".format(destination)
+ self.log(self.msg, "INFO")
+ self.result['changed'] = False
+ self.result['response'] = self.msg
+ else:
+ # Update the email destination with given details in the playbook
+ self.update_snmp_destination(snmp_params, snmp_dest_detail_in_ccc).check_return_status()
+
+ # Create/Update ITSM Integration Settings in Cisco Catalyst Center
+ if config.get('itsm_setting'):
+ itsm_details = self.want.get('itsm_details')
+ itsm_name = itsm_details.get('instance_name')
+ if not itsm_name:
+ self.status = "failed"
+ self.msg = "Instance name is required parameter for adding/updating ITSM integration setting in Cisco Catalyst Center."
+ self.log(self.msg, "ERROR")
+ return self
+
+ itsm_params = self.collect_itsm_playbook_params(itsm_details)
+
+ is_itsm_exist = False
+ itsm_detail_in_ccc = self.have.get('itsm_setting')
+ if not itsm_detail_in_ccc:
+ self.log("There is no ITSM Intergartion setting present in Cisco Catalyst Center", "INFO")
+ else:
+ # Check whether the given itsm integration present in Cisco Catalyst Center or not.
+ for itsm in itsm_detail_in_ccc:
+ if itsm['name'] == itsm_name:
+ itsm_id = itsm['id']
+ is_itsm_exist = True
+ break
+
+ if not is_itsm_exist:
+ # Need to Add snmp destination in Cisco Catalyst Center with given playbook params
+ invalid_itsm_params = []
+ invalid_itsm_params = self.check_required_itsm_param(itsm_params, invalid_itsm_params)
+ connection_setting = itsm_params.get('data').get('ConnectionSettings')
+
+ if not connection_setting:
+ invalid_itsm_params.extends(["url", "username", "password"])
+ self.status = "failed"
+ self.msg = (
+ "Required parameter '{0}' for configuring ITSM Intergartion setting in Cisco Catalyst "
+ "is missing.".format(str(invalid_itsm_params))
+ )
+ self.log(self.msg, "ERROR")
+ self.result['response'] = self.msg
+ return self
+
+ # Check whether the url exist or not and if exists is it valid
+ url = connection_setting.get('Url')
+ if not url and "Url" not in invalid_itsm_params:
+ invalid_itsm_params.append("URL")
+
+ if invalid_itsm_params:
+ self.status = "failed"
+ self.msg = (
+ "Required parameter '{0}' for configuring ITSM Intergartion setting in Cisco Catalyst "
+ "is missing.".format(str(invalid_itsm_params))
+ )
+ self.log(self.msg, "ERROR")
+ self.result['response'] = self.msg
+ return self
+
+ regex_pattern = r'https://\S+'
+ if not re.match(regex_pattern, url):
+ self.status = "failed"
+ self.msg = "Given url '{0}' is invalid url for ITSM Intergartion setting. It must starts with 'https://'".format(url)
+ self.log(self.msg, "ERROR")
+ return self
+
+ self.log("Required parameter validated successfully for configuring ITSM Intergartion setting in Cisco Catalyst Center.", "INFO")
+ self.create_itsm_integration_setting(itsm_params).check_return_status()
+ else:
+ itsm_in_ccc = self.get_itsm_settings_by_id(itsm_id)
+ if not itsm_in_ccc:
+ self.status = "failed"
+ self.msg = "Unable to update as there is no ITSM Integration setting with name '{0}' present in Cisco Catalyst Center".format(itsm_name)
+ self.log(self.msg, "ERROR")
+ return self
+
+ # Check destination needs update and if yes then update Email Destination
+ itsm_need_update = self.itsm_needs_update(itsm_params, itsm_in_ccc)
+
+ if not itsm_need_update:
+ self.msg = "ITSM Intergartion setting with name '{0}' needs no update in Cisco Catalyst Center".format(itsm_name)
+ self.log(self.msg, "INFO")
+ self.result['changed'] = False
+ self.result['response'] = self.msg
+ else:
+ # Update the ITSM integration settings with given details in the playbook
+ self.update_itsm_integration_setting(itsm_params, itsm_in_ccc).check_return_status()
+
+ # Create Rest Webhook Events Subscription Notification in Cisco Catalyst Center
+ if config.get('webhook_event_notification'):
+ webhook_notification_details = self.want.get('webhook_event_notification')
+ notification_name = webhook_notification_details.get('name')
+
+ if not notification_name:
+ self.status = "failed"
+ self.msg = (
+ "Name is required parameter for creating/updating webhook events subscription notification"
+ "in Cisco Catalyst Center."
+ )
+ self.log(self.msg, "ERROR")
+ return self
+
+ webhook_notification_params = self.collect_webhook_notification_playbook_params(webhook_notification_details)
+ current_webhook_notifications = self.have.get("webhook_subscription_notifications")
+ is_webhook_notification_exist = False
+
+ if current_webhook_notifications:
+ for notification in current_webhook_notifications:
+ if notification["name"] == notification_name:
+ is_webhook_notification_exist = True
+ webhook_notification_in_ccc = notification
+ break
+
+ if not is_webhook_notification_exist:
+ # Need to create webhook event notification in Cisco Catalyst Center
+ self.mandatory_webhook_notification_parameter_check(webhook_notification_params).check_return_status()
+ self.log("""Successfully validated the required parameter for creating the Webhook Event Notification with
+ given name '{0}'""".format(notification_name), "INFO")
+ self.create_webhook_notification(webhook_notification_params).check_return_status()
+ else:
+ # Check whether the webhook evenet notification needs any update or not.
+ notification_update = self.webhook_notification_needs_update(webhook_notification_params, webhook_notification_in_ccc)
+ if not notification_update:
+ self.msg = "Webhook Notification with name '{0}' needs no update in Cisco Catalyst Center".format(notification_name)
+ self.log(self.msg, "INFO")
+ self.result['changed'] = False
+ self.result['response'] = self.msg
+ else:
+ # Update the webhook notification with given playbook parameters
+ self.update_webhook_notification(webhook_notification_params, webhook_notification_in_ccc).check_return_status()
+
+ # Create Email Events Subscription Notification in Cisco Catalyst Center
+ if config.get('email_event_notification'):
+ email_notification_details = self.want.get('email_event_notification')
+ notification_name = email_notification_details.get('name')
+
+ if not notification_name:
+ self.status = "failed"
+ self.msg = (
+ "Name is required parameter for creating/updating Email events subscription notification"
+ "in Cisco Catalyst Center."
+ )
+ self.log(self.msg, "ERROR")
+ return self
+
+ email_notification_params = self.collect_email_notification_playbook_params(email_notification_details)
+ current_email_notifications = self.have.get("email_subscription_notifications")
+ is_email_notification_exist = False
+
+ if current_email_notifications:
+ for notification in current_email_notifications:
+ if notification["name"] == notification_name:
+ is_email_notification_exist = True
+ email_notification_in_ccc = notification
+ break
+
+ if not is_email_notification_exist:
+ # Need to create email event notification in Cisco Catalyst Center
+ self.mandatory_email_notification_parameter_check(email_notification_params).check_return_status()
+ self.log("""Successfully validated the required parameter for creating the email Event Notification with
+ given name '{0}'""".format(notification_name), "INFO")
+ self.create_email_notification(email_notification_params).check_return_status()
+ else:
+ # Check whether the email evenet notification needs any update or not.
+ notification_update = self.email_notification_needs_update(email_notification_params, email_notification_in_ccc)
+
+ if not notification_update:
+ self.msg = "Email Notification with name '{0}' needs no update in Cisco Catalyst Center".format(notification_name)
+ self.log(self.msg, "INFO")
+ self.result['changed'] = False
+ self.result['response'] = self.msg
+ else:
+ # Update the email notification with given playbook parameters
+ self.update_email_notification(email_notification_params, email_notification_in_ccc).check_return_status()
+
+ # Create Syslog Events Subscription Notification in Cisco Catalyst Center
+ if config.get('syslog_event_notification'):
+ syslog_notification_details = self.want.get('syslog_event_notification')
+ notification_name = syslog_notification_details.get('name')
+
+ if not notification_name:
+ self.status = "failed"
+ self.msg = (
+ "Name is required parameter for creating/updating Syslog events subscription notification"
+ "in Cisco Catalyst Center."
+ )
+ self.log(self.msg, "ERROR")
+ return self
+
+ syslog_notification_params = self.collect_syslog_notification_playbook_params(syslog_notification_details)
+ current_syslog_notifications = self.have.get("syslog_subscription_notifications")
+ is_syslog_notification_exist = False
+
+ if current_syslog_notifications:
+ for notification in current_syslog_notifications:
+ if notification["name"] == notification_name:
+ is_syslog_notification_exist = True
+ syslog_notification_in_ccc = notification
+ break
+
+ if not is_syslog_notification_exist:
+ # Need to create syslog event notification in Cisco Catalyst Center
+ self.mandatory_syslog_notification_parameter_check(syslog_notification_params).check_return_status()
+ self.log("""Successfully validated the required parameter for creating the Syslog Event Notification with
+ given name '{0}'""".format(notification_name), "INFO")
+ self.create_syslog_notification(syslog_notification_params).check_return_status()
+ else:
+ # Check whether the syslog evenet notification needs any update or not.
+ sys_notification_update = self.syslog_notification_needs_update(syslog_notification_params, syslog_notification_in_ccc)
+ if not sys_notification_update:
+ self.msg = "Syslog Notification with name '{0}' needs no update in Cisco Catalyst Center".format(notification_name)
+ self.log(self.msg, "INFO")
+ self.result['changed'] = False
+ self.result['response'] = self.msg
+ else:
+ # Update the syslog notification with given playbook parameters
+ self.update_syslog_notification(syslog_notification_params, syslog_notification_in_ccc).check_return_status()
+
+ return self
+
+ def get_diff_deleted(self, config):
+ """
+ Handles the deletion of ITSM integration settings in Cisco Catalyst Center based on the configuration provided.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ config (dict): A dictionary containing the 'itsm_setting' key with details about the ITSM integration to be deleted.
+ Returns:
+ self (object): The instance of the class with updated status, results, and message based on the deletion operation.
+ Description:
+ This function is responsible for deleting an ITSM setting from Cisco Catalyst Center if it exists.
+ It checks whether the specified ITSM setting exists in the current Catalyst Center configuration. If it exists,
+ the function proceeds to delete it. If it does not exist or is already deleted, the function updates the instance
+ status and results to reflect that no change was needed.
+ """
+
+ if config.get('webhook_destination'):
+ self.status = "failed"
+ self.msg = "Deleting the Webhook destination is not supported in Cisco Catalyst Center because of API limitations"
+ self.log(self.msg, "ERROR")
+ self.result['changed'] = False
+ return self
+
+ if config.get('email_destination'):
+ self.status = "failed"
+ self.msg = "Deleting the Email destination is not supported in Cisco Catalyst Center because of API limitations"
+ self.log(self.msg, "ERROR")
+ self.result['changed'] = False
+ return self
+
+ if config.get('syslog_destination'):
+ self.status = "failed"
+ self.msg = "Deleting the Syslog destination is not supported in Cisco Catalyst Center because of API limitations"
+ self.log(self.msg, "ERROR")
+ self.result['changed'] = False
+ return self
+
+ if config.get('snmp_destination'):
+ self.status = "failed"
+ self.msg = "Deleting the SNMP destination is not supported in Cisco Catalyst Center because of API limitations"
+ self.log(self.msg, "ERROR")
+ self.result['changed'] = False
+ return self
+
+ # Delete ITSM Integration setting from Cisco Catalyst Center
+ if config.get('itsm_setting'):
+ itsm_details = self.want.get('itsm_details')
+ itsm_name = itsm_details.get('instance_name')
+ itsm_detail_in_ccc = self.have.get('itsm_setting')
+ if not itsm_detail_in_ccc:
+ self.status = "success"
+ self.result['changed'] = False
+ self.msg = """There is no ITSM Intergartion setting present in Cisco Catalyst Center so cannot delete
+ the ITSM Integartion setting with name '{0}'""".format(itsm_name)
+ self.log(self.name, "INFO")
+ return self
+
+ # Check whether the given itsm integration present in Catalyst Center or not
+ itsm_exist = False
+ for itsm in itsm_detail_in_ccc:
+ if itsm['name'] == itsm_name:
+ itsm_id = itsm.get('id')
+ itsm_exist = True
+ break
+ if itsm_exist:
+ self.delete_itsm_integration_setting(itsm_name, itsm_id).check_return_status()
+ else:
+ self.msg = "Unable to delete ITSM Integartion setting with name '{0}' as it is not present in Cisco Catalyst Center".format(itsm_name)
+ self.log(self.msg, "INFO")
+ self.result['changed'] = False
+ self.result['response'] = self.msg
+
+ # Delete Webhook Events Subscription Notification from Cisco Catalyst Center
+ if config.get('webhook_event_notification'):
+ webhook_notification_details = self.want.get('webhook_event_notification')
+ webhook_notification_name = webhook_notification_details.get('name')
+ current_webhook_notifications = self.have.get("webhook_subscription_notifications")
+ webhook_notification_id = None
+
+ if not current_webhook_notifications:
+ self.status = "success"
+ self.result['changed'] = False
+ self.msg = (
+ "There is no Webhook Event Subscription Notification with name '{0}' present in in Cisco Catalyst Center "
+ "so cannot delete the notification.".format(webhook_notification_name)
+ )
+ self.log(self.name, "INFO")
+ return self
+
+ for notification in current_webhook_notifications:
+ if notification["name"] == webhook_notification_name:
+ webhook_notification_id = notification["subscriptionId"]
+ break
+
+ if webhook_notification_id:
+ self.delete_events_subscription_notification(webhook_notification_id, webhook_notification_name).check_return_status()
+ else:
+ self.msg = (
+ "Unable to delete Webhook Event Subscription Notification with name '{0}' as it is not present in "
+ "Cisco Catalyst Center.".format(webhook_notification_name)
+ )
+ self.log(self.msg, "INFO")
+ self.result['changed'] = False
+ self.result['response'] = self.msg
+
+ # Delete Email Events Subscription Notification from Cisco Catalyst Center
+ if config.get('email_event_notification'):
+ email_notification_details = self.want.get('email_event_notification')
+ email_notification_name = email_notification_details.get('name')
+ current_email_notifications = self.have.get("email_subscription_notifications")
+ email_notification_id = None
+
+ if not current_email_notifications:
+ self.status = "success"
+ self.result['changed'] = False
+ self.msg = (
+ "There is no Email Event Subscription Notification with name '{0}' present in in Cisco Catalyst Center "
+ "so cannot delete the notification.".format(email_notification_name)
+ )
+ self.log(self.name, "INFO")
+ return self
+
+ for notification in current_email_notifications:
+ if notification["name"] == email_notification_name:
+ email_notification_id = notification["subscriptionId"]
+ break
+
+ if email_notification_id:
+ self.delete_events_subscription_notification(email_notification_id, email_notification_name).check_return_status()
+ else:
+ self.msg = (
+ "Unable to delete Email Event Subscription Notification with name '{0}' as it is not present in "
+ "Cisco Catalyst Center.".format(email_notification_name)
+ )
+ self.log(self.msg, "INFO")
+ self.result['changed'] = False
+ self.result['response'] = self.msg
+
+ # Delete Syslog Events Subscription Notification from Cisco Catalyst Center
+ if config.get('syslog_event_notification'):
+ syslog_notification_details = self.want.get('syslog_event_notification')
+ syslog_notification_name = syslog_notification_details.get('name')
+ current_syslog_notifications = self.have.get("syslog_subscription_notifications")
+ syslog_notification_id = None
+
+ if not current_syslog_notifications:
+ self.status = "success"
+ self.result['changed'] = False
+ self.msg = (
+ "There is no Syslog Event Subscription Notification with name '{0}' present in in Cisco Catalyst Center "
+ "so cannot delete the notification.".format(syslog_notification_name)
+ )
+ self.log(self.name, "INFO")
+ return self
+
+ for notification in current_syslog_notifications:
+ if notification["name"] == syslog_notification_name:
+ syslog_notification_id = notification["subscriptionId"]
+ break
+
+ if syslog_notification_id:
+ self.delete_events_subscription_notification(syslog_notification_id, syslog_notification_name).check_return_status()
+ else:
+ self.msg = (
+ "Unable to delete Syslog Event Subscription Notification with name '{0}' as it is not present in "
+ "Cisco Catalyst Center.".format(syslog_notification_name)
+ )
+ self.log(self.msg, "INFO")
+ self.result['changed'] = False
+ self.result['response'] = self.msg
+
+ return self
+
+ def verify_diff_merged(self, config):
+ """
+ Verify the addition/update status of configurations in Cisco Catalyst Center.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ config (dict): The configuration details to be verified.
+ Returns:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ This method verifies whether the specified configurations have been successfully added/updated
+ in Cisco Catalyst Center as desired.
+ """
+
+ self.get_have(config)
+ self.log("Current State (have): {0}".format(str(self.have)), "INFO")
+ self.log("Desired State (want): {0}".format(str(self.want)), "INFO")
+
+ if config.get('syslog_destination'):
+ syslog_details = self.want.get('syslog_details')
+ syslog_name = syslog_details.get('name')
+ destinations_in_ccc = self.have.get('syslog_destinations')
+ is_destination_exist_in_ccc = False
+
+ if destinations_in_ccc:
+ for destination in destinations_in_ccc:
+ if destination["name"] == syslog_name:
+ is_destination_exist_in_ccc = True
+ break
+
+ if is_destination_exist_in_ccc:
+ self.status = "success"
+ msg = """Requested Syslog Destination '{0}' have been successfully added/updated to the Cisco Catalyst Center and their
+ addition/updation has been verified.""".format(syslog_name)
+ self.log(msg, "INFO")
+ else:
+ self.log("""Playbook's input does not match with Cisco Catalyst Center, indicating that the Syslog destination with name
+ '{0}' addition/updation task may not have executed successfully.""".format(syslog_name), "INFO")
+
+ if config.get('snmp_destination'):
+ snmp_details = self.want.get('snmp_details')
+ snmp_dest_name = snmp_details.get('name')
+ is_snmp_dest_exist = False
+
+ for snmp_dict in self.have.get('snmp_destinations'):
+ if snmp_dict['name'] == snmp_dest_name:
+ is_snmp_dest_exist = True
+ break
+
+ if is_snmp_dest_exist:
+ self.status = "success"
+ msg = """Requested SNMP Destination '{0}' have been successfully added/updated to the Cisco Catalyst Center and their
+ addition/updation has been verified.""".format(snmp_dest_name)
+ self.log(msg, "INFO")
+ else:
+ self.log("""Playbook's input does not match with Cisco Catalyst Center, indicating that the SNMP destination with name
+ '{0}' addition/updation task may not have executed successfully.""".format(snmp_dest_name), "INFO")
+
+ if config.get('webhook_destination'):
+ webhook_details = self.want.get('webhook_details')
+ webhook_name = webhook_details.get('name')
+
+ is_webhook_dest_exist = False
+ for webhook_dict in self.have.get('webhook_destinations'):
+ if webhook_dict['name'] == webhook_name:
+ is_webhook_dest_exist = True
+ break
+ if is_webhook_dest_exist:
+ self.status = "success"
+ msg = """Requested Rest Webhook Destination '{0}' have been successfully added/updated to the Cisco Catalyst Center and their
+ addition/updation has been verified.""".format(webhook_name)
+ self.log(msg, "INFO")
+ else:
+ self.log("""Playbook's input does not match with Cisco Catalyst Center, indicating that Rest Webhook destination with name
+ '{0}' addition/updation task may not have executed successfully.""".format(webhook_name), "INFO")
+
+ if config.get('email_destination'):
+
+ if self.have.get('email_destination'):
+ self.status = "success"
+ msg = """Requested Email Destination have been successfully configured to the Cisco Catalyst Center and their
+ configuration has been verified."""
+ self.log(msg, "INFO")
+ else:
+ self.log("""Playbook's input does not match with Cisco Catalyst Center, indicating that Email destination configuration
+ task may not have executed successfully.""", "INFO")
+
+ if config.get('itsm_setting'):
+ itsm_details = self.want.get('itsm_details')
+ itsm_name = itsm_details.get('instance_name')
+ is_itsm_exist = False
+ itsm_detail_in_ccc = self.have.get('itsm_setting')
+
+ if not itsm_detail_in_ccc:
+ self.log("There is no ITSM Intergartion setting present in Cisco Catalyst Center", "INFO")
+ else:
+ # Check whether the given itsm integration present in Cisco Catalyst Center or not.
+ for itsm in itsm_detail_in_ccc:
+ if itsm['name'] == itsm_name:
+ is_itsm_exist = True
+ break
+
+ if is_itsm_exist:
+ self.status = "success"
+ msg = """Requested ITSM Integration setting '{0}' have been successfully added/updated to the Cisco Catalyst Center
+ and their addition/updation has been verified.""".format(itsm_name)
+ self.log(msg, "INFO")
+ else:
+ self.log("""Playbook's input does not match with Cisco Catalyst Center, indicating that ITSM Integration setting with
+ name '{0}' addition/updation task may not have executed successfully.""".format(itsm_name), "INFO")
+
+ if config.get('webhook_event_notification'):
+ webhook_notification_details = self.want.get('webhook_event_notification')
+ web_notification_name = webhook_notification_details.get('name')
+ current_webhook_notifications = self.have.get("webhook_subscription_notifications")
+ is_webhook_notification_exist = False
+
+ if current_webhook_notifications:
+ for notification in current_webhook_notifications:
+ if notification["name"] == web_notification_name:
+ is_webhook_notification_exist = True
+ break
+
+ if is_webhook_notification_exist:
+ self.status = "success"
+ msg = """Requested Webhook Events Subscription Notification '{0}' have been successfully created/updated to the Cisco Catalyst Center
+ and their creation/updation has been verified.""".format(web_notification_name)
+ self.log(msg, "INFO")
+ else:
+ self.log("""Playbook's input does not match with Cisco Catalyst Center, indicating that Webhook Event Subscription Notification with
+ name '{0}' creation/updation task may not have executed successfully.""".format(web_notification_name), "INFO")
+
+ if config.get('email_event_notification'):
+ email_notification_details = self.want.get('email_event_notification')
+ email_notification_name = email_notification_details.get('name')
+ current_email_notifications = self.have.get("email_subscription_notifications")
+ is_email_notification_exist = False
+
+ if current_email_notifications:
+ for notification in current_email_notifications:
+ if notification["name"] == email_notification_name:
+ is_email_notification_exist = True
+ break
+
+ if is_email_notification_exist:
+ self.status = "success"
+ msg = """Requested Email Events Subscription Notification '{0}' have been successfully created/updated to the Cisco Catalyst Center
+ and their creation/updation has been verified.""".format(email_notification_name)
+ self.log(msg, "INFO")
+ else:
+ self.log("""Playbook's input does not match with Cisco Catalyst Center, indicating that Email Event Subscription Notification with
+ name '{0}' creation/updation task may not have executed successfully.""".format(email_notification_name), "INFO")
+
+ if config.get('syslog_event_notification'):
+ syslog_notification_details = self.want.get('syslog_event_notification')
+ syslog_notification_name = syslog_notification_details.get('name')
+ current_syslog_notifications = self.have.get("syslog_subscription_notifications")
+ is_syslog_notification_exist = False
+
+ if current_syslog_notifications:
+ for notification in current_syslog_notifications:
+ if notification["name"] == syslog_notification_name:
+ is_syslog_notification_exist = True
+ break
+
+ if is_syslog_notification_exist:
+ self.status = "success"
+ msg = """Requested Syslog Events Subscription Notification '{0}' have been successfully created/updated to the Cisco Catalyst Center
+ and their creation/updation has been verified.""".format(syslog_notification_name)
+ self.log(msg, "INFO")
+ else:
+ self.log("""Playbook's input does not match with Cisco Catalyst Center, indicating that Syslog Event Subscription Notification with
+ name '{0}' creation/updation task may not have executed successfully.""".format(syslog_notification_name), "INFO")
+
+ return self
+
+ def verify_diff_deleted(self, config):
+ """
+ Verify the deletion status of ITSM Integration Setting in Cisco Catalyst Center.
+ Parameters:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ config (dict): The configuration details to be verified.
+ Returns:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ This method checks the deletion status of a configuration in Cisco Catalyst Center.
+ It validates whether the specified ITSM Integration setting deleted from Cisco Catalyst Center.
+ """
+
+ self.get_have(config)
+ self.log("Current State (have): {0}".format(str(self.have)), "INFO")
+ self.log("Desired State (want): {0}".format(str(self.want)), "INFO")
+
+ if config.get('itsm_setting'):
+ itsm_details = self.want.get('itsm_details')
+ itsm_name = itsm_details.get('instance_name')
+ itsm_detail_in_ccc = self.have.get('itsm_setting')
+ itsm_deleted = True
+
+ # Check whether the given itsm integration present in Catalyst Center or not
+ if not itsm_detail_in_ccc:
+ itsm_deleted = True
+ else:
+ for itsm in itsm_detail_in_ccc:
+ if itsm['name'] == itsm_name:
+ itsm_deleted = False
+ break
+
+ if itsm_deleted:
+ self.status = "success"
+ msg = """Requested ITSM Integration setting '{0}' have been successfully deleted from the Cisco Catalyst Center
+ and their deletion has been verified.""".format(itsm_name)
+ self.log(msg, "INFO")
+ else:
+ self.log("""Playbook's input does not match with Cisco Catalyst Center, indicating that ITSM Integration setting with
+ name '{0}' deletion task may not have executed successfully.""".format(itsm_name), "INFO")
+
+ if config.get('webhook_event_notification'):
+ webhook_notification_details = self.want.get('webhook_event_notification')
+ web_notification_name = webhook_notification_details.get('name')
+ current_webhook_notifications = self.have.get("webhook_subscription_notifications")
+ is_webhook_notification_deleted = True
+
+ if current_webhook_notifications:
+ for notification in current_webhook_notifications:
+ if notification["name"] == web_notification_name:
+ is_webhook_notification_deleted = False
+ break
+
+ if is_webhook_notification_deleted:
+ self.status = "success"
+ msg = """Requested Webhook Events Subscription Notification '{0}' have been successfully deleted from the Cisco Catalyst Center
+ and their deletion has been verified.""".format(web_notification_name)
+ self.log(msg, "INFO")
+ else:
+ self.log("""Playbook's input does not match with Cisco Catalyst Center, indicating that Webhook Events Subscription Notification
+ with name '{0}' deletion task may not have executed successfully.""".format(web_notification_name), "INFO")
+
+ if config.get('email_event_notification'):
+ email_notification_details = self.want.get('email_event_notification')
+ email_notification_name = email_notification_details.get('name')
+ current_email_notifications = self.have.get("email_subscription_notifications")
+ is_email_notification_deleted = True
+
+ if current_email_notifications:
+ for notification in current_email_notifications:
+ if notification["name"] == email_notification_name:
+ is_email_notification_deleted = False
+ break
+
+ if is_email_notification_deleted:
+ self.status = "success"
+ msg = """Requested Email Events Subscription Notification '{0}' have been successfully deleted from the Cisco Catalyst Center
+ and their deletion has been verified.""".format(email_notification_name)
+ self.log(msg, "INFO")
+ else:
+ self.log("""Playbook's input does not match with Cisco Catalyst Center, indicating that Email Events Subscription Notification
+ with name '{0}' deletion task may not have executed successfully.""".format(email_notification_name), "INFO")
+
+ if config.get('syslog_event_notification'):
+ syslog_notification_details = self.want.get('syslog_event_notification')
+ syslog_notification_name = syslog_notification_details.get('name')
+ current_syslog_notifications = self.have.get("syslog_subscription_notifications")
+ is_syslog_notification_deleted = True
+
+ if current_syslog_notifications:
+ for notification in current_syslog_notifications:
+ if notification["name"] == syslog_notification_name:
+ is_syslog_notification_deleted = False
+ break
+
+ if is_syslog_notification_deleted:
+ self.status = "success"
+ msg = """Requested Syslog Events Subscription Notification '{0}' have been successfully deleted from the Cisco Catalyst Center
+ and their deletion has been verified.""".format(syslog_notification_name)
+ self.log(msg, "INFO")
+ else:
+ self.log("""Playbook's input does not match with Cisco Catalyst Center, indicating that Syslog Events Subscription Notification
+ with name '{0}' deletion task may not have executed successfully.""".format(syslog_notification_name), "INFO")
+
+ return self
+
+
+def main():
+ """ main entry point for module execution
+ """
+
+ element_spec = {'dnac_host': {'required': True, 'type': 'str'},
+ 'dnac_port': {'type': 'str', 'default': '443'},
+ 'dnac_username': {'type': 'str', 'default': 'admin', 'aliases': ['user']},
+ 'dnac_password': {'type': 'str', 'no_log': True},
+ 'dnac_verify': {'type': 'bool', 'default': 'True'},
+ 'dnac_version': {'type': 'str', 'default': '2.2.3.3'},
+ 'dnac_debug': {'type': 'bool', 'default': False},
+ 'dnac_log_level': {'type': 'str', 'default': 'WARNING'},
+ "dnac_log_file_path": {"type": 'str', "default": 'dnac.log'},
+ "dnac_log_append": {"type": 'bool', "default": True},
+ 'dnac_log': {'type': 'bool', 'default': False},
+ 'validate_response_schema': {'type': 'bool', 'default': True},
+ 'config_verify': {'type': 'bool', "default": False},
+ 'dnac_api_task_timeout': {'type': 'int', "default": 1200},
+ 'dnac_task_poll_interval': {'type': 'int', "default": 2},
+ 'config': {'required': True, 'type': 'list', 'elements': 'dict'},
+ 'state': {'default': 'merged', 'choices': ['merged', 'deleted']}
+ }
+
+ module = AnsibleModule(argument_spec=element_spec,
+ supports_check_mode=False)
+
+ ccc_events = Events(module)
+ state = ccc_events.params.get("state")
+
+ if state not in ccc_events.supported_states:
+ ccc_events.status = "invalid"
+ ccc_events.msg = "State {0} is invalid".format(state)
+ ccc_events.check_return_status()
+
+ ccc_events.validate_input().check_return_status()
+ config_verify = ccc_events.params.get("config_verify")
+
+ for config in ccc_events.validated_config:
+ ccc_events.reset_values()
+ ccc_events.get_want(config).check_return_status()
+ ccc_events.get_have(config).check_return_status()
+ ccc_events.get_diff_state_apply[state](config).check_return_status()
+ if config_verify:
+ ccc_events.verify_diff_state_apply[state](config).check_return_status()
+
+ module.exit_json(**ccc_events.result)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/cisco/dnac/plugins/modules/execute_suggested_actions_commands.py b/ansible_collections/cisco/dnac/plugins/modules/execute_suggested_actions_commands.py
index 43fce3203..8c1c23085 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/execute_suggested_actions_commands.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/execute_suggested_actions_commands.py
@@ -14,7 +14,7 @@ description:
This API triggers the execution of the suggested actions for an issue, given the Issue Id. It will return an
execution Id. At the completion of the execution, the output of the commands associated with the suggested actions
will be provided.
-version_added: '6.7.0'
+version_added: '3.1.0'
extends_documentation_fragment:
- cisco.dnac.module
author: Rafael Campos (@racampos)
@@ -27,8 +27,8 @@ options:
description: Contains the actual value for the entity type that has been defined.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Issues ExecuteSuggestedActionsCommands
description: Complete reference of the ExecuteSuggestedActionsCommands API.
@@ -56,7 +56,6 @@ EXAMPLES = r"""
entity_value: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/file_import.py b/ansible_collections/cisco/dnac/plugins/modules/file_import.py
index f9909eddd..0360b7c87 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/file_import.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/file_import.py
@@ -23,8 +23,8 @@ options:
description: NameSpace path parameter.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for File UploadFile
description: Complete reference of the UploadFile API.
@@ -52,7 +52,6 @@ EXAMPLES = r"""
nameSpace: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/file_info.py b/ansible_collections/cisco/dnac/plugins/modules/file_info.py
index b9bdba4b9..c6d3f9108 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/file_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/file_info.py
@@ -36,8 +36,8 @@ options:
- The filename used to save the download file.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for File DownloadAFileByFileId
description: Complete reference of the DownloadAFileByFileId API.
@@ -63,13 +63,9 @@ EXAMPLES = r"""
dnac_debug: "{{dnac_debug}}"
headers: "{{my_headers | from_json}}"
fileId: string
- dirPath: /tmp/downloads/Test-242.bin
- saveFile: true
- filename: string
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/file_namespace_files_info.py b/ansible_collections/cisco/dnac/plugins/modules/file_namespace_files_info.py
index 0e16dbc1a..dbe7b697a 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/file_namespace_files_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/file_namespace_files_info.py
@@ -24,8 +24,8 @@ options:
- NameSpace path parameter. A listing of fileId's.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for File GetListOfFiles
description: Complete reference of the GetListOfFiles API.
@@ -54,7 +54,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/file_namespaces_info.py b/ansible_collections/cisco/dnac/plugins/modules/file_namespaces_info.py
index 849268111..e8370f1aa 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/file_namespaces_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/file_namespaces_info.py
@@ -20,8 +20,8 @@ options:
description: Additional headers.
type: dict
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for File GetListOfAvailableNamespaces
description: Complete reference of the GetListOfAvailableNamespaces API.
@@ -49,7 +49,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/flexible_report_content_info.py b/ansible_collections/cisco/dnac/plugins/modules/flexible_report_content_info.py
new file mode 100644
index 000000000..5e4edffe5
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/flexible_report_content_info.py
@@ -0,0 +1,72 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: flexible_report_content_info
+short_description: Information module for Flexible Report Content
+description:
+- Get Flexible Report Content by id.
+- >
+ This is used to download the flexible report. The API returns report content. Save the response to a file by
+ converting the response data as a blob and setting the file format available from content-disposition response
+ header.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module_info
+author: Rafael Campos (@racampos)
+options:
+ headers:
+ description: Additional headers.
+ type: dict
+ reportId:
+ description:
+ - ReportId path parameter. Id of the report.
+ type: str
+ executionId:
+ description:
+ - ExecutionId path parameter. Id of execution.
+ type: str
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for Reports DownloadFlexibleReport
+ description: Complete reference of the DownloadFlexibleReport API.
+ link: https://developer.cisco.com/docs/dna-center/#!download-flexible-report
+notes:
+ - SDK Method used are
+ reports.Reports.download_flexible_report,
+
+ - Paths used are
+ get /dna/data/api/v1/flexible-report/report/content/{reportId}/{executionId},
+
+"""
+
+EXAMPLES = r"""
+- name: Get Flexible Report Content by id
+ cisco.dnac.flexible_report_content_info:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ headers: "{{my_headers | from_json}}"
+ reportId: string
+ executionId: string
+ register: result
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: str
+ sample: >
+ "'string'"
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/flexible_report_execute.py b/ansible_collections/cisco/dnac/plugins/modules/flexible_report_execute.py
new file mode 100644
index 000000000..54286db87
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/flexible_report_execute.py
@@ -0,0 +1,70 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: flexible_report_execute
+short_description: Resource module for Flexible Report Execute
+description:
+- Manage operation create of the resource Flexible Report Execute.
+- This API is used for executing the report.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module
+author: Rafael Campos (@racampos)
+options:
+ reportId:
+ description: ReportId path parameter. Id of the Report.
+ type: str
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for Reports ExecutingTheFlexibleReport
+ description: Complete reference of the ExecutingTheFlexibleReport API.
+ link: https://developer.cisco.com/docs/dna-center/#!executing-the-flexible-report
+notes:
+ - SDK Method used are
+ reports.Reports.executing_the_flexible_report,
+
+ - Paths used are
+ post /dna/data/api/v1/flexible-report/report/{reportId}/execute,
+
+"""
+
+EXAMPLES = r"""
+- name: Create
+ cisco.dnac.flexible_report_execute:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ reportId: string
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "executionId": "string",
+ "startTime": 0,
+ "endTime": 0,
+ "processStatus": {},
+ "requestStatus": "string",
+ "errors": [
+ "string"
+ ],
+ "warnings": [
+ {}
+ ]
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/flexible_report_executions_info.py b/ansible_collections/cisco/dnac/plugins/modules/flexible_report_executions_info.py
new file mode 100644
index 000000000..ba987929a
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/flexible_report_executions_info.py
@@ -0,0 +1,84 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: flexible_report_executions_info
+short_description: Information module for Flexible Report Executions
+description:
+- Get all Flexible Report Executions.
+- Get Execution Id by Report Id.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module_info
+author: Rafael Campos (@racampos)
+options:
+ headers:
+ description: Additional headers.
+ type: dict
+ reportId:
+ description:
+ - ReportId path parameter. Id of the report.
+ type: str
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for Reports GetExecutionIdByReportId
+ description: Complete reference of the GetExecutionIdByReportId API.
+ link: https://developer.cisco.com/docs/dna-center/#!get-execution-id-by-report-id
+notes:
+ - SDK Method used are
+ reports.Reports.get_execution_id_by_report_id,
+
+ - Paths used are
+ get /dna/data/api/v1/flexible-report/report/{reportId}/executions,
+
+"""
+
+EXAMPLES = r"""
+- name: Get all Flexible Report Executions
+ cisco.dnac.flexible_report_executions_info:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ headers: "{{my_headers | from_json}}"
+ reportId: string
+ register: result
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "reportId": "string",
+ "reportName": "string",
+ "executions": [
+ {
+ "executionId": "string",
+ "startTime": 0,
+ "endTime": 0,
+ "processStatus": "string",
+ "requestStatus": "string",
+ "errors": [
+ "string"
+ ],
+ "warnings": [
+ {}
+ ]
+ }
+ ],
+ "executionCount": 0,
+ "reportWasExecuted": true
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/flexible_report_schedule.py b/ansible_collections/cisco/dnac/plugins/modules/flexible_report_schedule.py
new file mode 100644
index 000000000..204886170
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/flexible_report_schedule.py
@@ -0,0 +1,65 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: flexible_report_schedule
+short_description: Resource module for Flexible Report Schedule
+description:
+- Manage operation update of the resource Flexible Report Schedule.
+- Update schedule of flexible report.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module
+author: Rafael Campos (@racampos)
+options:
+ reportId:
+ description: ReportId path parameter. Id of the report.
+ type: str
+ schedule:
+ description: Schedule information.
+ type: dict
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for Reports UpdateScheduleOfFlexibleReport
+ description: Complete reference of the UpdateScheduleOfFlexibleReport API.
+ link: https://developer.cisco.com/docs/dna-center/#!update-schedule-of-flexible-report
+notes:
+ - SDK Method used are
+ reports.Reports.update_schedule_of_flexible_report,
+
+ - Paths used are
+ put /dna/data/api/v1/flexible-report/schedule/{reportId},
+
+"""
+
+EXAMPLES = r"""
+- name: Update by id
+ cisco.dnac.flexible_report_schedule:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ state: present
+ reportId: string
+ schedule: {}
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "schedule": {}
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/flexible_report_schedule_info.py b/ansible_collections/cisco/dnac/plugins/modules/flexible_report_schedule_info.py
new file mode 100644
index 000000000..ac0987925
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/flexible_report_schedule_info.py
@@ -0,0 +1,64 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: flexible_report_schedule_info
+short_description: Information module for Flexible Report Schedule
+description:
+- Get Flexible Report Schedule by id.
+- Get flexible report schedule by report id.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module_info
+author: Rafael Campos (@racampos)
+options:
+ headers:
+ description: Additional headers.
+ type: dict
+ reportId:
+ description:
+ - ReportId path parameter. Id of the report.
+ type: str
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for Reports GetFlexibleReportScheduleByReportId
+ description: Complete reference of the GetFlexibleReportScheduleByReportId API.
+ link: https://developer.cisco.com/docs/dna-center/#!get-flexible-report-schedule-by-report-id
+notes:
+ - SDK Method used are
+ reports.Reports.get_flexible_report_schedule_by_report_id,
+
+ - Paths used are
+ get /dna/data/api/v1/flexible-report/schedule/{reportId},
+
+"""
+
+EXAMPLES = r"""
+- name: Get Flexible Report Schedule by id
+ cisco.dnac.flexible_report_schedule_info:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ headers: "{{my_headers | from_json}}"
+ reportId: string
+ register: result
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample:
+ - {}
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/flexible_report_schedules_info.py b/ansible_collections/cisco/dnac/plugins/modules/flexible_report_schedules_info.py
new file mode 100644
index 000000000..684ed4947
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/flexible_report_schedules_info.py
@@ -0,0 +1,69 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: flexible_report_schedules_info
+short_description: Information module for Flexible Report Schedules
+description:
+- Get all Flexible Report Schedules.
+- Get all flexible report schedules.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module_info
+author: Rafael Campos (@racampos)
+options:
+ headers:
+ description: Additional headers.
+ type: dict
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for Reports GetAllFlexibleReportSchedules
+ description: Complete reference of the GetAllFlexibleReportSchedules API.
+ link: https://developer.cisco.com/docs/dna-center/#!get-all-flexible-report-schedules
+notes:
+ - SDK Method used are
+ reports.Reports.get_all_flexible_report_schedules,
+
+ - Paths used are
+ get /dna/data/api/v1/flexible-report/schedules,
+
+"""
+
+EXAMPLES = r"""
+- name: Get all Flexible Report Schedules
+ cisco.dnac.flexible_report_schedules_info:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ headers: "{{my_headers | from_json}}"
+ register: result
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: list
+ elements: dict
+ sample: >
+ [
+ {
+ "reportId": "string",
+ "schedule": {
+ "type": "string",
+ "dateTime": 0
+ },
+ "reportName": "string"
+ }
+ ]
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/global_credential_delete.py b/ansible_collections/cisco/dnac/plugins/modules/global_credential_delete.py
index 3dd9ad1d1..4999bb6e1 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/global_credential_delete.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/global_credential_delete.py
@@ -20,8 +20,8 @@ options:
description: GlobalCredentialId path parameter. ID of global-credential.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Discovery DeleteGlobalCredentialsById
description: Complete reference of the DeleteGlobalCredentialsById API.
@@ -48,7 +48,6 @@ EXAMPLES = r"""
globalCredentialId: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/global_credential_info.py b/ansible_collections/cisco/dnac/plugins/modules/global_credential_info.py
index 2d46ca10f..6fb74241c 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/global_credential_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/global_credential_info.py
@@ -29,19 +29,19 @@ options:
type: str
sortBy:
description:
- - SortBy query parameter.
+ - SortBy query parameter. Field to sort the results by. Sorts by 'instanceId' if no value is provided.
type: str
order:
description:
- - Order query parameter.
+ - Order query parameter. Order of sorting. 'asc' or 'des'.
type: str
id:
description:
- Id path parameter. Global Credential ID.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Discovery GetCredentialSubTypeByCredentialId
description: Complete reference of the GetCredentialSubTypeByCredentialId API.
@@ -90,7 +90,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/global_credential_update.py b/ansible_collections/cisco/dnac/plugins/modules/global_credential_update.py
index 8f2ea144a..b2880e538 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/global_credential_update.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/global_credential_update.py
@@ -20,12 +20,12 @@ options:
description: GlobalCredentialId path parameter. Global credential Uuid.
type: str
siteUuids:
- description: Global Credential Update's siteUuids.
+ description: List of siteUuids where credential is to be updated.
elements: str
type: list
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Discovery UpdateGlobalCredentials
description: Complete reference of the UpdateGlobalCredentials API.
@@ -54,7 +54,6 @@ EXAMPLES = r"""
- string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/global_credential_v2.py b/ansible_collections/cisco/dnac/plugins/modules/global_credential_v2.py
index 2d27cca33..a30cfbf36 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/global_credential_v2.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/global_credential_v2.py
@@ -17,7 +17,7 @@ description:
- >
API to update device credentials. Multiple credentials can be passed at once, but only a single credential of a
given type can be passed at once. Please refer sample Request Body for more information.
-version_added: '6.7.0'
+version_added: '3.1.0'
extends_documentation_fragment:
- cisco.dnac.module
author: Rafael Campos (@racampos)
@@ -26,57 +26,57 @@ options:
description: Global Credential V2's cliCredential.
suboptions:
description:
- description: Description.
+ description: Description for CLI credential.
type: str
enablePassword:
- description: Enable Password.
+ description: CLI Enable Password.
type: str
id:
- description: Id.
+ description: Id of the CLI Credential in UUID format.
type: str
password:
- description: Password.
+ description: CLI Password.
type: str
username:
- description: Username.
+ description: CLI Username.
type: str
type: dict
httpsRead:
description: Global Credential V2's httpsRead.
suboptions:
- id:
- description: Id.
+ description:
+ description: Description for HTTP(S) Read Credentials.
type: str
- name:
- description: Name.
+ id:
+ description: Id of the HTTP(S) Read Credential in UUID format.
type: str
password:
- description: Password.
+ description: HTTP(S) Read Password.
type: str
port:
- description: Port.
+ description: HTTP(S) Port.
type: int
username:
- description: Username.
+ description: HTTP(S) Read Username.
type: str
type: dict
httpsWrite:
description: Global Credential V2's httpsWrite.
suboptions:
- id:
- description: Id.
+ description:
+ description: Description for HTTP(S) Write Credentials.
type: str
- name:
- description: Name.
+ id:
+ description: Id of the HTTP(S) Read Credential in UUID format.
type: str
password:
- description: Password.
+ description: HTTP(S) Write Password.
type: str
port:
- description: Port.
+ description: HTTP(S) Port.
type: int
username:
- description: Username.
+ description: HTTP(S) Write Username.
type: str
type: dict
id:
@@ -86,59 +86,59 @@ options:
description: Global Credential V2's snmpV2cRead.
suboptions:
description:
- description: Description.
+ description: Description for Snmp RO community.
type: str
id:
- description: Id.
+ description: Id of the SNMP Read Credential in UUID format.
type: str
readCommunity:
- description: Read Community.
+ description: Snmp RO community.
type: str
type: dict
snmpV2cWrite:
description: Global Credential V2's snmpV2cWrite.
suboptions:
description:
- description: Description.
+ description: Description for Snmp RW community.
type: str
id:
- description: Id.
+ description: Id of the SNMP Write Credential in UUID format.
type: str
writeCommunity:
- description: Write Community.
+ description: Snmp RW community.
type: str
type: dict
snmpV3:
description: Global Credential V2's snmpV3.
suboptions:
authPassword:
- description: Auth Password.
+ description: Auth Password for SNMP V3.
type: str
authType:
- description: Auth Type.
+ description: SNMP auth protocol. SHA' or 'MD5'.
type: str
description:
- description: Description.
+ description: Description for Snmp V3 Credential.
type: str
id:
- description: Id.
+ description: Id of the SNMP V3 Credential in UUID format.
type: str
privacyPassword:
- description: Privacy Password.
+ description: Privacy Password for SNMP privacy.
type: str
privacyType:
- description: Privacy Type.
+ description: SNMP privacy protocol. 'AES128','AES192','AES256'.
type: str
snmpMode:
- description: Snmp Mode.
+ description: Mode of SNMP. 'AUTHPRIV' or 'AUTHNOPRIV' or 'NOAUTHNOPRIV'.
type: str
username:
- description: Username.
+ description: SNMP V3 Username.
type: str
type: dict
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Discovery CreateGlobalCredentialsV2
description: Complete reference of the CreateGlobalCredentialsV2 API.
@@ -180,14 +180,14 @@ EXAMPLES = r"""
password: string
username: string
httpsRead:
+ description: string
id: string
- name: string
password: string
port: 0
username: string
httpsWrite:
+ description: string
id: string
- name: string
password: string
port: 0
username: string
@@ -225,12 +225,12 @@ EXAMPLES = r"""
password: string
username: string
httpsRead:
- - name: string
+ - description: string
password: string
port: 0
username: string
httpsWrite:
- - name: string
+ - description: string
password: string
port: 0
username: string
@@ -262,7 +262,6 @@ EXAMPLES = r"""
id: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/global_credential_v2_info.py b/ansible_collections/cisco/dnac/plugins/modules/global_credential_v2_info.py
index 6b0bffd4d..697cd2f3c 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/global_credential_v2_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/global_credential_v2_info.py
@@ -13,7 +13,7 @@ description:
- >
API to get device credentials' details. It fetches all global credentials of all types at once, without the need
to pass any input parameters.
-version_added: '6.7.0'
+version_added: '3.1.0'
extends_documentation_fragment:
- cisco.dnac.module_info
author: Rafael Campos (@racampos)
@@ -22,8 +22,8 @@ options:
description: Additional headers.
type: dict
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Discovery GetAllGlobalCredentialsV2
description: Complete reference of the GetAllGlobalCredentialsV2 API.
@@ -51,7 +51,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/global_pool.py b/ansible_collections/cisco/dnac/plugins/modules/global_pool.py
index f52c316d6..ef4a4cd8d 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/global_pool.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/global_pool.py
@@ -48,8 +48,8 @@ options:
type: list
type: dict
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Network Settings CreateGlobalPool
description: Complete reference of the CreateGlobalPool API.
@@ -129,7 +129,6 @@ EXAMPLES = r"""
id: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/global_pool_info.py b/ansible_collections/cisco/dnac/plugins/modules/global_pool_info.py
index 4d95ba9ea..ee7d4a118 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/global_pool_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/global_pool_info.py
@@ -10,7 +10,7 @@ module: global_pool_info
short_description: Information module for Global Pool
description:
- Get all Global Pool.
-- API to get global pool.
+- API to get the global pool.
version_added: '3.1.0'
extends_documentation_fragment:
- cisco.dnac.module_info
@@ -21,15 +21,15 @@ options:
type: dict
offset:
description:
- - Offset query parameter. Offset/starting row.
- type: int
+ - Offset query parameter. Offset/starting row. Indexed from 1. Default value of 1.
+ type: float
limit:
description:
- - Limit query parameter. No of Global Pools to be retrieved.
- type: int
+ - Limit query parameter. Number of Global Pools to be retrieved. Default is 25 if not specified.
+ type: float
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Network Settings GetGlobalPool
description: Complete reference of the GetGlobalPool API.
@@ -59,7 +59,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -76,20 +75,26 @@ dnac_response:
"gateways": [
"string"
],
- "createTime": "string",
- "lastUpdateTime": "string",
- "totalIpAddressCount": "string",
- "usedIpAddressCount": "string",
+ "createTime": 0,
+ "lastUpdateTime": 0,
+ "totalIpAddressCount": 0,
+ "usedIpAddressCount": 0,
"parentUuid": "string",
"owner": "string",
- "shared": "string",
- "overlapping": "string",
- "configureExternalDhcp": "string",
+ "shared": true,
+ "overlapping": true,
+ "configureExternalDhcp": true,
"usedPercentage": "string",
"clientOptions": {},
+ "ipPoolType": "string",
+ "unavailableIpAddressCount": 0,
+ "availableIpAddressCount": 0,
+ "totalAssignableIpAddressCount": 0,
"dnsServerIps": [
"string"
],
+ "hasSubpools": true,
+ "defaultAssignedIpAddressCount": 0,
"context": [
{
"owner": "string",
@@ -97,7 +102,7 @@ dnac_response:
"contextValue": "string"
}
],
- "ipv6": "string",
+ "ipv6": true,
"id": "string",
"ipPoolCidr": "string"
}
diff --git a/ansible_collections/cisco/dnac/plugins/modules/golden_image_create.py b/ansible_collections/cisco/dnac/plugins/modules/golden_image_create.py
index 95385c59d..3357ca740 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/golden_image_create.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/golden_image_create.py
@@ -30,8 +30,8 @@ options:
description: SiteId in uuid format. For Global Site "-1" to be used.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Software Image Management (SWIM) TagAsGoldenImage
description: Complete reference of the TagAsGoldenImage API.
@@ -61,7 +61,6 @@ EXAMPLES = r"""
siteId: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/golden_tag_image_delete.py b/ansible_collections/cisco/dnac/plugins/modules/golden_tag_image_delete.py
index 392165663..2fd210071 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/golden_tag_image_delete.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/golden_tag_image_delete.py
@@ -32,8 +32,8 @@ options:
Global site.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Software Image Management (SWIM) RemoveGoldenTagForImage
description: Complete reference of the RemoveGoldenTagForImage API.
@@ -63,7 +63,6 @@ EXAMPLES = r"""
siteId: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/golden_tag_image_details_info.py b/ansible_collections/cisco/dnac/plugins/modules/golden_tag_image_details_info.py
index c92ed9503..d18bff423 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/golden_tag_image_details_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/golden_tag_image_details_info.py
@@ -38,8 +38,8 @@ options:
- ImageId path parameter. Image Id in uuid format.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Software Image Management (SWIM) GetGoldenTagStatusOfAnImage
description: Complete reference of the GetGoldenTagStatusOfAnImage API.
@@ -71,7 +71,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/http_read_credential.py b/ansible_collections/cisco/dnac/plugins/modules/http_read_credential.py
index 89a5f7f47..a8dcc260b 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/http_read_credential.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/http_read_credential.py
@@ -18,38 +18,39 @@ extends_documentation_fragment:
author: Rafael Campos (@racampos)
options:
comments:
- description: Http Read Credential's comments.
+ description: Comments to identify the HTTP(S) Read credential.
type: str
credentialType:
- description: Http Read Credential's credentialType.
+ description: Credential type to identify the application that uses the HTTP(S) Read
+ credential.
type: str
description:
- description: Http Read Credential's description.
+ description: Description for HTTP(S) Read Credential.
type: str
id:
- description: Http Read Credential's id.
+ description: Id of the HTTP(S) Read Credential in UUID format.
type: str
instanceTenantId:
- description: Http Read Credential's instanceTenantId.
+ description: Deprecated.
type: str
instanceUuid:
- description: Http Read Credential's instanceUuid.
+ description: Deprecated.
type: str
password:
- description: Http Read Credential's password.
+ description: HTTP(S) Read Password.
type: str
port:
- description: Http Read Credential's port.
+ description: HTTP(S) Port. Valid port should be in the range of 1 to 65535.
type: int
secure:
- description: Secure flag.
+ description: Flag for HTTPS Read.
type: bool
username:
- description: Http Read Credential's username.
+ description: HTTP(S) Read Username.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Discovery CreateHTTPReadCredentials
description: Complete reference of the CreateHTTPReadCredentials API.
@@ -112,7 +113,6 @@ EXAMPLES = r"""
username: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/http_write_credential.py b/ansible_collections/cisco/dnac/plugins/modules/http_write_credential.py
index 611f40716..2353a0005 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/http_write_credential.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/http_write_credential.py
@@ -18,38 +18,39 @@ extends_documentation_fragment:
author: Rafael Campos (@racampos)
options:
comments:
- description: Http Write Credential's comments.
+ description: Comments to identify the HTTP(S) Write credential.
type: str
credentialType:
- description: Http Write Credential's credentialType.
+ description: Credential type to identify the application that uses the HTTP(S) Write
+ credential.
type: str
description:
- description: Http Write Credential's description.
+ description: Description for HTTP(S) Write Credential.
type: str
id:
- description: Http Write Credential's id.
+ description: Id of the HTTP(S) Write Credential in UUID format.
type: str
instanceTenantId:
- description: Http Write Credential's instanceTenantId.
+ description: Deprecated.
type: str
instanceUuid:
- description: Http Write Credential's instanceUuid.
+ description: Deprecated.
type: str
password:
- description: Http Write Credential's password.
+ description: HTTP(S) Write Password.
type: str
port:
- description: Http Write Credential's port.
+ description: HTTP(S) Port. Valid port should be in the range of 1 to 65535.
type: int
secure:
- description: Secure flag.
+ description: Flag for HTTPS Write.
type: bool
username:
- description: Http Write Credential's username.
+ description: HTTP(S) Write Username.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Discovery CreateHTTPWriteCredentials
description: Complete reference of the CreateHTTPWriteCredentials API.
@@ -112,7 +113,6 @@ EXAMPLES = r"""
username: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/integration_settings_instances_itsm.py b/ansible_collections/cisco/dnac/plugins/modules/integration_settings_instances_itsm.py
index 86fa8bd76..0a5571bf9 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/integration_settings_instances_itsm.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/integration_settings_instances_itsm.py
@@ -13,7 +13,7 @@ description:
- Creates ITSM Integration setting.
- Deletes the ITSM Integration setting.
- Updates the ITSM Integration setting.
-version_added: '6.7.0'
+version_added: '3.1.0'
extends_documentation_fragment:
- cisco.dnac.module
author: Rafael Campos (@racampos)
@@ -39,7 +39,7 @@ options:
description: Description of the setting instance.
type: str
dypName:
- description: It should be ServiceNowConnection.
+ description: It can be ServiceNowConnection.
type: str
instanceId:
description: InstanceId path parameter. Instance Id of the Integration setting instance.
@@ -48,8 +48,8 @@ options:
description: Name of the setting instance.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for ITSM Integration CreateITSMIntegrationSetting
description: Complete reference of the CreateITSMIntegrationSetting API.
@@ -126,7 +126,6 @@ EXAMPLES = r"""
instanceId: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/integration_settings_instances_itsm_info.py b/ansible_collections/cisco/dnac/plugins/modules/integration_settings_instances_itsm_info.py
index 0519f5e5a..d84cf38d7 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/integration_settings_instances_itsm_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/integration_settings_instances_itsm_info.py
@@ -11,7 +11,7 @@ short_description: Information module for Integration Settings Instances Itsm
description:
- Get Integration Settings Instances Itsm by id.
- Fetches ITSM Integration setting by ID.
-version_added: '6.7.0'
+version_added: '3.1.0'
extends_documentation_fragment:
- cisco.dnac.module_info
author: Rafael Campos (@racampos)
@@ -24,8 +24,8 @@ options:
- InstanceId path parameter. Instance Id of the Integration setting instance.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for ITSM Integration GetITSMIntegrationSettingById
description: Complete reference of the GetITSMIntegrationSettingById API.
@@ -54,7 +54,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/integration_settings_itsm_instances_info.py b/ansible_collections/cisco/dnac/plugins/modules/integration_settings_itsm_instances_info.py
new file mode 100644
index 000000000..58b435032
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/integration_settings_itsm_instances_info.py
@@ -0,0 +1,78 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: integration_settings_itsm_instances_info
+short_description: Information module for Integration Settings Itsm Instances
+description:
+- Get all Integration Settings Itsm Instances.
+- Fetches all ITSM Integration settings.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module_info
+author: Rafael Campos (@racampos)
+options:
+ headers:
+ description: Additional headers.
+ type: dict
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for ITSM Integration GetAllITSMIntegrationSettings
+ description: Complete reference of the GetAllITSMIntegrationSettings API.
+ link: https://developer.cisco.com/docs/dna-center/#!get-all-itsm-integration-settings
+notes:
+ - SDK Method used are
+ itsm_integration.ItsmIntegration.get_all_itsm_integration_settings,
+
+ - Paths used are
+ get /dna/intent/api/v1/integration-settings/itsm/instances,
+
+"""
+
+EXAMPLES = r"""
+- name: Get all Integration Settings Itsm Instances
+ cisco.dnac.integration_settings_itsm_instances_info:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ headers: "{{my_headers | from_json}}"
+ register: result
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: list
+ elements: dict
+ sample: >
+ [
+ {
+ "id": "string",
+ "dypId": "string",
+ "dypName": "string",
+ "name": "string",
+ "uniqueKey": "string",
+ "dypMajorVersion": 0,
+ "description": "string",
+ "createdDate": 0,
+ "createdBy": "string",
+ "updatedBy": "string",
+ "softwareVersionLog": [
+ {}
+ ],
+ "schemaVersion": 0,
+ "tenantId": "string"
+ }
+ ]
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/integration_settings_status_info.py b/ansible_collections/cisco/dnac/plugins/modules/integration_settings_status_info.py
new file mode 100644
index 000000000..5681fcfa9
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/integration_settings_status_info.py
@@ -0,0 +1,74 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: integration_settings_status_info
+short_description: Information module for Integration Settings Status
+description:
+- Get all Integration Settings Status.
+- Fetches ITSM Integration status.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module_info
+author: Rafael Campos (@racampos)
+options:
+ headers:
+ description: Additional headers.
+ type: dict
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for ITSM Integration GetITSMIntegrationStatus
+ description: Complete reference of the GetITSMIntegrationStatus API.
+ link: https://developer.cisco.com/docs/dna-center/#!get-itsm-integration-status
+notes:
+ - SDK Method used are
+ itsm_integration.ItsmIntegration.get_itsm_integration_status,
+
+ - Paths used are
+ get /dna/intent/api/v1/integration-settings/status,
+
+"""
+
+EXAMPLES = r"""
+- name: Get all Integration Settings Status
+ cisco.dnac.integration_settings_status_info:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ headers: "{{my_headers | from_json}}"
+ register: result
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": [
+ {
+ "id": "string",
+ "name": "string",
+ "status": "string",
+ "configurations": [
+ {
+ "dypSchemaName": "string",
+ "dypInstanceId": "string"
+ }
+ ]
+ }
+ ],
+ "version": "string"
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/interface_info.py b/ansible_collections/cisco/dnac/plugins/modules/interface_info.py
index a6f0706ad..868bb4839 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/interface_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/interface_info.py
@@ -24,8 +24,8 @@ options:
- InterfaceUuid path parameter. Interface ID.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Devices LegitOperationsForInterface
description: Complete reference of the LegitOperationsForInterface API.
@@ -54,7 +54,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -63,62 +62,22 @@ dnac_response:
sample: >
{
"response": {
- "type": "string",
- "properties": {
- "interfaceUuid": {
- "type": "string"
- },
- "properties": {
- "type": "string",
- "items": [
- {
- "type": "string",
- "properties": {
- "name": {
- "type": "string"
- },
- "applicable": {
- "type": "string"
- },
- "failureReason": {
- "type": "string"
- }
- },
- "required": [
- "string"
- ]
- }
- ]
- },
- "operations": {
- "type": "string",
- "items": [
- {
- "type": "string",
- "properties": {
- "name": {
- "type": "string"
- },
- "applicable": {
- "type": "string"
- },
- "failureReason": {
- "type": "string"
- }
- },
- "required": [
- "string"
- ]
- }
- ]
+ "interfaceUuid": "string",
+ "properties": [
+ {
+ "name": "string",
+ "applicable": "string",
+ "failureReason": "string"
+ }
+ ],
+ "operations": [
+ {
+ "name": "string",
+ "applicable": "string",
+ "failureReason": "string"
}
- },
- "required": [
- "string"
]
},
- "version": {
- "type": "string"
- }
+ "version": "string"
}
"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/interface_network_device_detail_info.py b/ansible_collections/cisco/dnac/plugins/modules/interface_network_device_detail_info.py
index 81a3203d6..0a0c3b436 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/interface_network_device_detail_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/interface_network_device_detail_info.py
@@ -28,8 +28,8 @@ options:
- Name query parameter. Interface name.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Devices GetInterfaceDetailsByDeviceIdAndInterfaceName
description: Complete reference of the GetInterfaceDetailsByDeviceIdAndInterfaceName API.
@@ -59,7 +59,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -68,9 +67,24 @@ dnac_response:
sample: >
{
"response": {
+ "addresses": [
+ {
+ "address": {
+ "ipAddress": {
+ "address": "string"
+ },
+ "ipMask": {
+ "address": "string"
+ },
+ "isInverseMask": true
+ },
+ "type": "string"
+ }
+ ],
"adminStatus": "string",
"className": "string",
"description": "string",
+ "name": "string",
"deviceId": "string",
"duplex": "string",
"id": "string",
@@ -81,11 +95,14 @@ dnac_response:
"ipv4Address": "string",
"ipv4Mask": "string",
"isisSupport": "string",
+ "lastOutgoingPacketTime": 0,
+ "lastIncomingPacketTime": 0,
"lastUpdated": "string",
"macAddress": "string",
"mappedPhysicalInterfaceId": "string",
"mappedPhysicalInterfaceName": "string",
"mediaType": "string",
+ "mtu": "string",
"nativeVlanId": "string",
"ospfSupport": "string",
"pid": "string",
@@ -97,7 +114,13 @@ dnac_response:
"speed": "string",
"status": "string",
"vlanId": "string",
- "voiceVlan": "string"
+ "voiceVlan": "string",
+ "poweroverethernet": "string",
+ "networkdevice_id": "string",
+ "managedComputeElement": "string",
+ "managedNetworkElement": "string",
+ "managedNetworkElementUrl": "string",
+ "managedComputeElementUrl": "string"
},
"version": "string"
}
diff --git a/ansible_collections/cisco/dnac/plugins/modules/interface_network_device_info.py b/ansible_collections/cisco/dnac/plugins/modules/interface_network_device_info.py
index 62bb64e03..d95c8aaf3 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/interface_network_device_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/interface_network_device_info.py
@@ -24,8 +24,8 @@ options:
- DeviceId path parameter. Device ID.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Devices GetInterfaceInfoById
description: Complete reference of the GetInterfaceInfoById API.
@@ -54,7 +54,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -64,9 +63,24 @@ dnac_response:
{
"response": [
{
+ "addresses": [
+ {
+ "address": {
+ "ipAddress": {
+ "address": "string"
+ },
+ "ipMask": {
+ "address": "string"
+ },
+ "isInverseMask": true
+ },
+ "type": "string"
+ }
+ ],
"adminStatus": "string",
"className": "string",
"description": "string",
+ "name": "string",
"deviceId": "string",
"duplex": "string",
"id": "string",
@@ -77,11 +91,14 @@ dnac_response:
"ipv4Address": "string",
"ipv4Mask": "string",
"isisSupport": "string",
+ "lastOutgoingPacketTime": 0,
+ "lastIncomingPacketTime": 0,
"lastUpdated": "string",
"macAddress": "string",
"mappedPhysicalInterfaceId": "string",
"mappedPhysicalInterfaceName": "string",
"mediaType": "string",
+ "mtu": "string",
"nativeVlanId": "string",
"ospfSupport": "string",
"pid": "string",
diff --git a/ansible_collections/cisco/dnac/plugins/modules/interface_network_device_range_info.py b/ansible_collections/cisco/dnac/plugins/modules/interface_network_device_range_info.py
index 2f2e3a6ea..cc65b4359 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/interface_network_device_range_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/interface_network_device_range_info.py
@@ -32,8 +32,8 @@ options:
- RecordsToReturn path parameter. Number of records to return.
type: int
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Devices GetDeviceInterfacesBySpecifiedRange
description: Complete reference of the GetDeviceInterfacesBySpecifiedRange API.
@@ -64,7 +64,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -74,9 +73,24 @@ dnac_response:
{
"response": [
{
+ "addresses": [
+ {
+ "address": {
+ "ipAddress": {
+ "address": "string"
+ },
+ "ipMask": {
+ "address": "string"
+ },
+ "isInverseMask": true
+ },
+ "type": "string"
+ }
+ ],
"adminStatus": "string",
"className": "string",
"description": "string",
+ "name": "string",
"deviceId": "string",
"duplex": "string",
"id": "string",
@@ -87,11 +101,14 @@ dnac_response:
"ipv4Address": "string",
"ipv4Mask": "string",
"isisSupport": "string",
+ "lastOutgoingPacketTime": 0,
+ "lastIncomingPacketTime": 0,
"lastUpdated": "string",
"macAddress": "string",
"mappedPhysicalInterfaceId": "string",
"mappedPhysicalInterfaceName": "string",
"mediaType": "string",
+ "mtu": "string",
"nativeVlanId": "string",
"ospfSupport": "string",
"pid": "string",
diff --git a/ansible_collections/cisco/dnac/plugins/modules/interface_operation_create.py b/ansible_collections/cisco/dnac/plugins/modules/interface_operation_create.py
index 1bc065c31..3aaf16e0d 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/interface_operation_create.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/interface_operation_create.py
@@ -27,14 +27,14 @@ options:
description: InterfaceUuid path parameter. Interface Id.
type: str
operation:
- description: Operation.
+ description: Operation needs to be specified as 'ClearMacAddress'.
type: str
payload:
- description: Payload.
+ description: Payload is not applicable.
type: dict
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Devices ClearMacAddressTable
description: Complete reference of the ClearMacAddressTable API.
@@ -64,7 +64,6 @@ EXAMPLES = r"""
payload: {}
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/interface_update.py b/ansible_collections/cisco/dnac/plugins/modules/interface_update.py
index 261703d48..b31502f31 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/interface_update.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/interface_update.py
@@ -19,7 +19,7 @@ extends_documentation_fragment:
author: Rafael Campos (@racampos)
options:
adminStatus:
- description: Admin Status.
+ description: Admin status as ('UP'/'DOWN').
type: str
deploymentMode:
description: DeploymentMode query parameter. Preview/Deploy 'Preview' means the
@@ -27,20 +27,20 @@ options:
to the device.
type: str
description:
- description: Description.
+ description: Description for the Interface.
type: str
interfaceUuid:
description: InterfaceUuid path parameter. Interface ID.
type: str
vlanId:
- description: Vlan Id.
+ description: VLAN Id to be Updated.
type: int
voiceVlanId:
- description: Voice Vlan Id.
+ description: Voice Vlan Id to be Updated.
type: int
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Devices UpdateInterfaceDetails
description: Complete reference of the UpdateInterfaceDetails API.
@@ -72,7 +72,6 @@ EXAMPLES = r"""
voiceVlanId: 0
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/inventory_intent.py b/ansible_collections/cisco/dnac/plugins/modules/inventory_intent.py
index af2572486..cada74a18 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/inventory_intent.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/inventory_intent.py
@@ -57,7 +57,7 @@ options:
default: "NETWORK_DEVICE"
cli_transport:
description: The essential prerequisite for adding Network devices is the specification of the transport
- protocol (either SSH or Telnet) used by the device.
+ protocol (either ssh or telnet) used by the device.
type: str
compute_device:
description: Indicates whether a device is a compute device.
@@ -307,7 +307,7 @@ options:
version_added: 6.12.0
requirements:
-- dnacentersdk >= 2.6.0
+- dnacentersdk >= 2.7.1
- python >= 3.9
seealso:
- name: Cisco Catalyst Center documentation for Devices AddDevice2
@@ -775,6 +775,7 @@ class DnacDevice(DnacBase):
'clean_config': {'type': 'bool'},
'add_user_defined_field': {
'type': 'list',
+ 'elements': 'dict',
'name': {'type': 'str'},
'description': {'type': 'str'},
'value': {'type': 'str'},
@@ -797,6 +798,7 @@ class DnacDevice(DnacBase):
},
'provision_wired_device': {
'type': 'list',
+ 'elements': 'dict',
'device_ip': {'type': 'str'},
'site_name': {'type': 'str'},
'resync_retry_count': {'default': 200, 'type': 'int'},
@@ -1586,6 +1588,21 @@ class DnacDevice(DnacBase):
'siteNameHierarchy': site_name
}
+ # Check the provisioning status of device
+ device_prov_status = self.get_provision_wired_device(device_ip)
+ if device_prov_status == 2:
+ self.status = "success"
+ already_provision_count += 1
+ self.result['changed'] = False
+ self.msg = "Device '{0}' is already provisioned in the Cisco Catalyst Center".format(device_ip)
+ self.log(self.msg, "INFO")
+ continue
+ if device_prov_status == 3:
+ self.status = "failed"
+ error_msg = "Cannot do Provisioning for device {0}.".format(device_ip)
+ self.log(error_msg, "ERROR")
+ continue
+
# Check till device comes into managed state
while resync_retry_count:
response = self.get_device_response(device_ip)
@@ -1629,7 +1646,7 @@ class DnacDevice(DnacBase):
if response.get("status") == "failed":
description = response.get("description")
error_msg = "Cannot do Provisioning for device {0} beacuse of {1}".format(device_ip, description)
- self.log(error_msg)
+ self.log(error_msg, "ERROR")
continue
task_id = response.get("taskId")
@@ -1650,9 +1667,6 @@ class DnacDevice(DnacBase):
# Not returning from here as there might be possiblity that for some devices it comes into exception
# but for others it gets provision successfully or If some devices are already provsioned
self.handle_provisioning_exception(device_ip, e, device_type)
- if "already provisioned" in str(e):
- self.log(str(e), "INFO")
- already_provision_count += 1
# Check If all the devices are already provsioned, return from here only
if already_provision_count == total_devices_to_provisioned:
@@ -2465,18 +2479,24 @@ class DnacDevice(DnacBase):
logs the response.
"""
- response = self.dnac._exec(
- family="sda",
- function='get_provisioned_wired_device',
- op_modifies=True,
- params={"device_management_ip_address": device_ip}
- )
+ try:
+ flag = 3
+ response = self.dnac._exec(
+ family="sda",
+ function='get_provisioned_wired_device',
+ op_modifies=True,
+ params={"device_management_ip_address": device_ip}
+ )
- if response.get("status") == "failed":
- self.log("Cannot do provisioning for wired device {0} because of {1}.".format(device_ip, response.get('description')), "ERROR")
- return False
+ if response.get("status") == "success" and "Wired Provisioned device detail retrieved successfully." in response.get("description"):
+ flag = 2
+ self.log("Wired device '{0}' already provisioned in the Cisco Catalyst Center.".format(device_ip), "INFO")
- return True
+ except Exception as e:
+ if "not provisioned to any site" in str(e):
+ flag = 1
+
+ return flag
def clear_mac_address(self, interface_id, deploy_mode, interface_name):
"""
@@ -2879,10 +2899,26 @@ class DnacDevice(DnacBase):
credential_update = self.config[0].get("credential_update", False)
config['type'] = device_type
+ config['ip_address_list'] = devices_to_add
if device_type == "FIREPOWER_MANAGEMENT_SYSTEM":
config['http_port'] = self.config[0].get("http_port", "443")
- config['ip_address_list'] = devices_to_add
+ if self.config[0].get('provision_wired_device'):
+ provision_wired_list = self.config[0]['provision_wired_device']
+ device_not_available = []
+ device_in_ccc = self.device_exists_in_dnac()
+
+ for prov_dict in provision_wired_list:
+ device_ip = prov_dict['device_ip']
+ if device_ip not in device_in_ccc:
+ device_not_available.append(device_ip)
+ if device_not_available:
+ self.status = "failed"
+ self.msg = """Unable to Provision Wired Device(s) because the device(s) listed: {0} are not present in the
+ Cisco Catalyst Center.""".format(str(device_not_available))
+ self.result['response'] = self.msg
+ self.log(self.msg, "ERROR")
+ return self
if self.config[0].get('update_mgmt_ipaddresslist'):
device_ip = self.config[0].get('update_mgmt_ipaddresslist')[0].get('existMgmtIpAddress')
@@ -3032,6 +3068,7 @@ class DnacDevice(DnacBase):
devices_to_update_role = self.get_device_ips_from_config_priority()
device_role = self.config[0].get('role')
role_update_count = 0
+ role_updated_list = []
for device_ip in devices_to_update_role:
device_id = self.get_device_ids([device_ip])
@@ -3076,10 +3113,8 @@ class DnacDevice(DnacBase):
if 'successfully' in progress or 'succesfully' in progress:
self.status = "success"
- self.result['changed'] = True
- self.msg = "Device(s) '{0}' role updated successfully to '{1}'".format(str(devices_to_update_role), device_role)
- self.result['response'] = self.msg
- self.log(self.msg, "INFO")
+ self.log("Device '{0}' role updated successfully to '{1}'".format(device_ip, device_role), "INFO")
+ role_updated_list.append(device_ip)
break
elif execution_details.get("isError"):
self.status = "failed"
@@ -3100,10 +3135,17 @@ class DnacDevice(DnacBase):
self.status = "success"
self.result['changed'] = False
self.msg = """The device role '{0}' is already set in Cisco Catalyst Center, no device role update is needed for the
- devices {1}.""".format(device_role, str(devices_to_update_role))
+ device(s) {1}.""".format(device_role, str(devices_to_update_role))
self.log(self.msg, "INFO")
self.result['response'] = self.msg
+ if role_updated_list:
+ self.status = "success"
+ self.result['changed'] = True
+ self.msg = "Device(s) '{0}' role updated successfully to '{1}'".format(str(role_updated_list), device_role)
+ self.result['response'] = self.msg
+ self.log(self.msg, "INFO")
+
if credential_update:
device_to_update = self.get_device_ips_from_config_priority()
# Update Device details and credentails
@@ -3229,6 +3271,10 @@ class DnacDevice(DnacBase):
if not playbook_params['httpPort']:
playbook_params['httpPort'] = device_data.get('http_port', None)
+ for key, value in playbook_params.items():
+ if value == " ":
+ playbook_params[key] = None
+
try:
if playbook_params['updateMgmtIPaddressList']:
new_mgmt_ipaddress = playbook_params['updateMgmtIPaddressList'][0]['newMgmtIpAddress']
@@ -3598,7 +3644,8 @@ class DnacDevice(DnacBase):
for prov_dict in provision_wired_list:
device_ip = prov_dict['device_ip']
provision_device_list.append(device_ip)
- if not self.get_provision_wired_device(device_ip):
+ device_prov_status = self.get_provision_wired_device(device_ip)
+ if device_prov_status == 1 or device_prov_status == 3:
provision_wired_flag = False
break
diff --git a/ansible_collections/cisco/dnac/plugins/modules/inventory_workflow_manager.py b/ansible_collections/cisco/dnac/plugins/modules/inventory_workflow_manager.py
index 17f63f475..9af856ba6 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/inventory_workflow_manager.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/inventory_workflow_manager.py
@@ -57,7 +57,7 @@ options:
default: "NETWORK_DEVICE"
cli_transport:
description: The essential prerequisite for adding Network devices is the specification of the transport
- protocol (either SSH or Telnet) used by the device.
+ protocol (either ssh or telnet) used by the device.
type: str
compute_device:
description: Indicates whether a device is a compute device.
@@ -307,7 +307,7 @@ options:
version_added: 6.12.0
requirements:
-- dnacentersdk >= 2.6.0
+- dnacentersdk >= 2.7.1
- python >= 3.9
seealso:
- name: Cisco Catalyst Center documentation for Devices AddDevice2
@@ -774,6 +774,7 @@ class Inventory(DnacBase):
'clean_config': {'type': 'bool'},
'add_user_defined_field': {
'type': 'list',
+ 'elements': 'dict',
'name': {'type': 'str'},
'description': {'type': 'str'},
'value': {'type': 'str'},
@@ -796,6 +797,7 @@ class Inventory(DnacBase):
},
'provision_wired_device': {
'type': 'list',
+ 'elements': 'dict',
'device_ip': {'type': 'str'},
'site_name': {'type': 'str'},
'resync_retry_count': {'default': 200, 'type': 'int'},
@@ -1584,6 +1586,21 @@ class Inventory(DnacBase):
'siteNameHierarchy': site_name
}
+ # Check the provisioning status of device
+ device_prov_status = self.get_provision_wired_device(device_ip)
+ if device_prov_status == 2:
+ self.status = "success"
+ already_provision_count += 1
+ self.result['changed'] = False
+ self.msg = "Device '{0}' is already provisioned in the Cisco Catalyst Center".format(device_ip)
+ self.log(self.msg, "INFO")
+ continue
+ if device_prov_status == 3:
+ self.status = "failed"
+ error_msg = "Cannot do Provisioning for device {0}.".format(device_ip)
+ self.log(error_msg, "ERROR")
+ continue
+
# Check till device comes into managed state
while resync_retry_count:
response = self.get_device_response(device_ip)
@@ -1626,7 +1643,7 @@ class Inventory(DnacBase):
if response.get("status") == "failed":
description = response.get("description")
error_msg = "Cannot do Provisioning for device {0} beacuse of {1}".format(device_ip, description)
- self.log(error_msg)
+ self.log(error_msg, "ERROR")
continue
task_id = response.get("taskId")
@@ -1647,9 +1664,6 @@ class Inventory(DnacBase):
# Not returning from here as there might be possiblity that for some devices it comes into exception
# but for others it gets provision successfully or If some devices are already provsioned
self.handle_provisioning_exception(device_ip, e, device_type)
- if "already provisioned" in str(e):
- self.log(str(e), "INFO")
- already_provision_count += 1
# Check If all the devices are already provsioned, return from here only
if already_provision_count == total_devices_to_provisioned:
@@ -2457,18 +2471,24 @@ class Inventory(DnacBase):
logs the response.
"""
- response = self.dnac._exec(
- family="sda",
- function='get_provisioned_wired_device',
- op_modifies=True,
- params={"device_management_ip_address": device_ip}
- )
+ try:
+ flag = 3
+ response = self.dnac._exec(
+ family="sda",
+ function='get_provisioned_wired_device',
+ op_modifies=True,
+ params={"device_management_ip_address": device_ip}
+ )
- if response.get("status") == "failed":
- self.log("Cannot do provisioning for wired device {0} because of {1}.".format(device_ip, response.get('description')), "ERROR")
- return False
+ if response.get("status") == "success" and "retrieved successfully" in response.get("description"):
+ flag = 2
+ self.log("Wired device '{0}' already provisioned in the Cisco Catalyst Center.".format(device_ip), "INFO")
- return True
+ except Exception as e:
+ if "not provisioned to any site" in str(e):
+ flag = 1
+
+ return flag
def clear_mac_address(self, interface_id, deploy_mode, interface_name):
"""
@@ -2870,10 +2890,26 @@ class Inventory(DnacBase):
credential_update = self.config[0].get("credential_update", False)
config['type'] = device_type
+ config['ip_address_list'] = devices_to_add
if device_type == "FIREPOWER_MANAGEMENT_SYSTEM":
config['http_port'] = self.config[0].get("http_port", "443")
- config['ip_address_list'] = devices_to_add
+ if self.config[0].get('provision_wired_device'):
+ provision_wired_list = self.config[0]['provision_wired_device']
+ device_not_available = []
+ device_in_ccc = self.device_exists_in_ccc()
+
+ for prov_dict in provision_wired_list:
+ device_ip = prov_dict['device_ip']
+ if device_ip not in device_in_ccc:
+ device_not_available.append(device_ip)
+ if device_not_available:
+ self.status = "failed"
+ self.msg = """Unable to Provision Wired Device(s) because the device(s) listed: {0} are not present in the
+ Cisco Catalyst Center.""".format(str(device_not_available))
+ self.result['response'] = self.msg
+ self.log(self.msg, "ERROR")
+ return self
if self.config[0].get('update_mgmt_ipaddresslist'):
device_ip = self.config[0].get('update_mgmt_ipaddresslist')[0].get('existMgmtIpAddress')
@@ -3023,6 +3059,7 @@ class Inventory(DnacBase):
devices_to_update_role = self.get_device_ips_from_config_priority()
device_role = self.config[0].get('role')
role_update_count = 0
+ role_updated_list = []
for device_ip in devices_to_update_role:
device_id = self.get_device_ids([device_ip])
@@ -3067,10 +3104,8 @@ class Inventory(DnacBase):
if 'successfully' in progress or 'succesfully' in progress:
self.status = "success"
- self.result['changed'] = True
- self.msg = "Device(s) '{0}' role updated successfully to '{1}'".format(str(devices_to_update_role), device_role)
- self.result['response'] = self.msg
- self.log(self.msg, "INFO")
+ self.log("Device '{0}' role updated successfully to '{1}'".format(device_ip, device_role), "INFO")
+ role_updated_list.append(device_ip)
break
elif execution_details.get("isError"):
self.status = "failed"
@@ -3091,10 +3126,17 @@ class Inventory(DnacBase):
self.status = "success"
self.result['changed'] = False
self.msg = """The device role '{0}' is already set in Cisco Catalyst Center, no device role update is needed for the
- devices {1}.""".format(device_role, str(devices_to_update_role))
+ device(s) {1}.""".format(device_role, str(devices_to_update_role))
self.log(self.msg, "INFO")
self.result['response'] = self.msg
+ if role_updated_list:
+ self.status = "success"
+ self.result['changed'] = True
+ self.msg = "Device(s) '{0}' role updated successfully to '{1}'".format(str(role_updated_list), device_role)
+ self.result['response'] = self.msg
+ self.log(self.msg, "INFO")
+
if credential_update:
device_to_update = self.get_device_ips_from_config_priority()
@@ -3221,6 +3263,10 @@ class Inventory(DnacBase):
if not playbook_params['httpPort']:
playbook_params['httpPort'] = device_data.get('http_port', None)
+ for key, value in playbook_params.items():
+ if value == " ":
+ playbook_params[key] = None
+
try:
if playbook_params['updateMgmtIPaddressList']:
new_mgmt_ipaddress = playbook_params['updateMgmtIPaddressList'][0]['newMgmtIpAddress']
@@ -3591,7 +3637,8 @@ class Inventory(DnacBase):
for prov_dict in provision_wired_list:
device_ip = prov_dict['device_ip']
provision_device_list.append(device_ip)
- if not self.get_provision_wired_device(device_ip):
+ device_prov_status = self.get_provision_wired_device(device_ip)
+ if device_prov_status == 1 or device_prov_status == 3:
provision_wired_flag = False
break
diff --git a/ansible_collections/cisco/dnac/plugins/modules/ise_integration_status_info.py b/ansible_collections/cisco/dnac/plugins/modules/ise_integration_status_info.py
new file mode 100644
index 000000000..bd6a5b196
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/ise_integration_status_info.py
@@ -0,0 +1,74 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: ise_integration_status_info
+short_description: Information module for Ise Integration Status
+description:
+- Get all Ise Integration Status.
+- API to check Cisco ISE server integration status.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module_info
+author: Rafael Campos (@racampos)
+options:
+ headers:
+ description: Additional headers.
+ type: dict
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for System Settings CiscoISEServerIntegrationStatus
+ description: Complete reference of the CiscoISEServerIntegrationStatus API.
+ link: https://developer.cisco.com/docs/dna-center/#!cisco-ise-server-integration-status
+notes:
+ - SDK Method used are
+ system_settings.SystemSettings.cisco_ise_server_integration_status,
+
+ - Paths used are
+ get /dna/intent/api/v1/ise-integration-status,
+
+"""
+
+EXAMPLES = r"""
+- name: Get all Ise Integration Status
+ cisco.dnac.ise_integration_status_info:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ headers: "{{my_headers | from_json}}"
+ register: result
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "aaaServerSettingId": "string",
+ "overallStatus": "string",
+ "overallErrorMessage": "string",
+ "steps": [
+ {
+ "stepId": "string",
+ "stepOrder": 0,
+ "stepName": "string",
+ "stepDescription": "string",
+ "stepStatus": "string",
+ "certAcceptedByUser": true,
+ "stepTime": 0
+ }
+ ]
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/ise_radius_integration_workflow_manager.py b/ansible_collections/cisco/dnac/plugins/modules/ise_radius_integration_workflow_manager.py
new file mode 100644
index 000000000..d8d463f50
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/ise_radius_integration_workflow_manager.py
@@ -0,0 +1,1578 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2024, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+"""Ansible module to operate the Authentication and Policy Servers in Cisco Catalyst Center."""
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+__author__ = ["Muthu Rakesh, Madhan Sankaranarayanan"]
+
+DOCUMENTATION = r"""
+---
+module: ise_radius_integration_workflow_manager
+short_description: Resource module for Authentication and Policy Servers
+description:
+- Manage operations on Authentication and Policy Servers.
+- API to create Authentication and Policy Server Access Configuration.
+- API to update Authentication and Policy Server Access Configuration.
+- API to delete Authentication and Policy Server Access Configuration.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.workflow_manager_params
+author: Muthu Rakesh (@MUTHU-RAKESH-27)
+ Madhan Sankaranarayanan (@madhansansel)
+options:
+ config_verify:
+ description: Set to True to verify the Cisco Catalyst Center after applying the playbook config.
+ type: bool
+ default: False
+ state:
+ description: The state of Cisco Catalyst Center after module completion.
+ type: str
+ choices: [ "merged", "deleted" ]
+ default: merged
+ config:
+ description:
+ - List of details of Authentication and Policy Servers being managed.
+ type: list
+ elements: dict
+ required: true
+ suboptions:
+ authentication_policy_server:
+ description: Manages the Authentication and Policy Servers.
+ type: dict
+ suboptions:
+ server_type:
+ description:
+ - Type of the Authentication and Policy Server.
+ - ISE for Cisco ISE servers.
+ - AAA for Non-Cisco ISE servers.
+ type: str
+ choices: [ "AAA", "ISE" ]
+ default: AAA
+ server_ip_address:
+ description: IP Address of the Authentication and Policy Server.
+ type: str
+ required: True
+ shared_secret:
+ description:
+ - Shared secret between devices and authentication and policy server.
+ - Shared secret must have 4 to 100 characters with no spaces or the following characters - ["<", "?"].
+ - Shared secret is a Read-Only parameter.
+ type: str
+ protocol:
+ description:
+ - Type of protocol for authentication and policy server.
+ - RADIUS provides centralized services (AAA) for users in remote access scenarios.
+ - TACACS focuses on access control and administrative authentication for network devices.
+ type: str
+ choices: [ "TACACS", "RADIUS", "RADIUS_TACACS" ]
+ default: RADIUS
+ encryption_scheme:
+ description:
+ - Type of encryption scheme for additional security.
+ - If encryption scheme is given, then message authenticator code and encryption keys need to be required.
+ - Updation of encryption scheme is not possible.
+ - >
+ KEYWRAP is used for securely wrapping and unwrapping encryption keys,
+ ensuring their confidentiality during transmission or storage.
+ - >
+ RADSEC is an extension of RADIUS that provides secure communication
+ between RADIUS clients and servers over TLS/SSL. Enhances enhancing the
+ confidentiality and integrity of authentication and accounting data exchange.
+ type: str
+ choices: [ "KEYWRAP", "RADSEC" ]
+ encryption_key:
+ description:
+ - Encryption key used to encrypt shared secret.
+ - Updation of encryption scheme is not possible.
+ - Required when encryption_scheme is provided.
+ - >
+ When ASCII format is selected, Encryption Key may contain
+ alphanumeric and special characters. Key must be 16 char long.
+ type: str
+ message_authenticator_code_key:
+ description:
+ - Message key used to encrypt shared secret.
+ - Updation of message key is not possible.
+ - Required when encryption_scheme is provided.
+ - >
+ Message Authentication Code Key may contain alphanumeric and special characters.
+ Key must be 20 char long.
+ type: str
+ authentication_port:
+ description:
+ - Authentication port of RADIUS server.
+ - Updation of authentication port is not possible.
+ - Authentication port should be from 1 to 65535.
+ type: int
+ default: 1812
+ accounting_port:
+ description:
+ - Accounting port of RADIUS server.
+ - Updation of accounting port is not possible.
+ - Accounting port should be from 1 to 65535.
+ type: int
+ default: 1813
+ retries:
+ description:
+ - Number of communication retries between devices and authentication and policy server.
+ - Retries should be from 1 to 3.
+ type: int
+ default: 3
+ timeout:
+ description:
+ - Number of seconds before timing out between devices and authentication and policy server.
+ - Timeout should be from 2 to 20.
+ type: int
+ default: 4
+ role:
+ description:
+ - Role of authentication and policy server.
+ - Updation of role is not possible
+ type: str
+ default: secondary
+ pxgrid_enabled:
+ description:
+ - Set True to enable the Pxgrid and False to disable the Pxgrid.
+ - Pxgrid is available only for the Cisco ISE Servers.
+ - >
+ PxGrid facilitates seamless integration and information sharing across products,
+ enhancing threat detection and response capabilities within the network ecosystem.
+ type: bool
+ default: True
+ use_dnac_cert_for_pxgrid:
+ description: Set True to use the Cisco Catalyst Center certificate for the Pxgrid.
+ type: bool
+ default: False
+ cisco_ise_dtos:
+ description:
+ - List of Cisco ISE Data Transfer Objects (DTOs).
+ - Required when server_type is set to ISE.
+ type: list
+ elements: dict
+ suboptions:
+ user_name:
+ description:
+ - User name of the Cisco ISE server.
+ - Required for passing the cisco_ise_dtos.
+ type: str
+ password:
+ description:
+ - Password of the Cisco ISE server.
+ - Password must have 4 to 127 characters with no spaces or the following characters - "<".
+ - Required for passing the cisco_ise_dtos.
+ type: str
+ fqdn:
+ description:
+ - Fully-qualified domain name of the Cisco ISE server.
+ - Required for passing the cisco_ise_dtos.
+ type: str
+ ip_address:
+ description:
+ - IP Address of the Cisco ISE Server.
+ - Required for passing the cisco_ise_dtos.
+ type: str
+ description:
+ description: Description about the Cisco ISE server.
+ type: str
+ ssh_key:
+ description: SSH key of the Cisco ISE server.
+ type: str
+ external_cisco_ise_ip_addr_dtos:
+ description: External Cisco ISE IP address data transfer objects for future use.
+ type: list
+ elements: dict
+ suboptions:
+ external_cisco_ise_ip_addresses:
+ description: External Cisco ISE IP addresses.
+ type: list
+ elements: dict
+ suboptions:
+ external_ip_address:
+ description: External Cisco ISE IP address.
+ type: str
+ ise_type:
+ description: Type of the Authentication and Policy Server.
+ type: str
+ trusted_server:
+ description:
+ - Indicates whether the certificate is trustworthy for the server.
+ - Serves as a validation of its authenticity and reliability in secure connections.
+ default: True
+ type: bool
+ ise_integration_wait_time:
+ description:
+ - Indicates the sleep time after initiating the Cisco ISE integration process.
+ - Maximum sleep time should be less or equal to 60 seconds.
+ default: 20
+ type: int
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.9
+notes:
+ - SDK Method used are
+ system_settings.SystemSettings.add_authentication_and_policy_server_access_configuration,
+ system_settings.SystemSettings.edit_authentication_and_policy_server_access_configuration,
+ system_settings.SystemSettings.accept_cisco_ise_server_certificate_for_cisco_ise_server_integration,
+ system_settings.SystemSettings.delete_authentication_and_policy_server_access_configuration,
+
+ - Paths used are
+ post /dna/intent/api/v1/authentication-policy-servers,
+ put /dna/intent/api/v1/authentication-policy-servers/${id},
+ put /dna/intent/api/v1/integrate-ise/${id},
+ delete /dna/intent/api/v1/authentication-policy-servers/${id}
+
+"""
+
+EXAMPLES = r"""
+- name: Create an AAA server.
+ cisco.dnac.ise_radius_integration_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log: True
+ dnac_log_level: "{{ dnac_log_level }}"
+ state: merged
+ config_verify: True
+ config:
+ - authentication_policy_server:
+ server_type: AAA
+ server_ip_address: 10.0.0.1
+ shared_secret: "12345"
+ protocol: RADIUS_TACACS
+ encryption_scheme: KEYWRAP
+ encryption_key: "1234567890123456"
+ message_authenticator_code_key: asdfghjklasdfghjklas
+ authentication_port: 1812
+ accounting_port: 1813
+ retries: 3
+ timeout: 4
+ role: secondary
+
+- name: Create an Cisco ISE server.
+ cisco.dnac.ise_radius_integration_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log: True
+ dnac_log_level: "{{ dnac_log_level }}"
+ state: merged
+ config_verify: True
+ config:
+ - authentication_policy_server:
+ server_type: ISE
+ server_ip_address: 10.0.0.2
+ shared_secret: "12345"
+ protocol: RADIUS_TACACS
+ encryption_scheme: KEYWRAP
+ encryption_key: "1234567890123456"
+ message_authenticator_code_key: asdfghjklasdfghjklas
+ authentication_port: 1812
+ accounting_port: 1813
+ retries: 3
+ timeout: 4
+ role: primary
+ use_dnac_cert_for_pxgrid: False
+ pxgrid_enabled: True
+ cisco_ise_dtos:
+ - user_name: Cisco ISE
+ password: "12345"
+ fqdn: abs.cisco.com
+ ip_address: 10.0.0.2
+ description: Cisco ISE
+ trusted_server: True
+ ise_integration_wait_time: 20
+
+- name: Update an AAA server.
+ cisco.dnac.ise_radius_integration_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log: True
+ dnac_log_level: "{{ dnac_log_level }}"
+ state: merged
+ config_verify: True
+ config:
+ - authentication_policy_server:
+ server_type: AAA
+ server_ip_address: 10.0.0.1
+ protocol: RADIUS_TACACS
+ retries: 3
+ timeout: 5
+
+- name: Update an Cisco ISE server.
+ cisco.dnac.ise_radius_integration_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log: True
+ dnac_log_level: "{{ dnac_log_level }}"
+ state: merged
+ config_verify: True
+ config:
+ - authentication_policy_server:
+ server_type: ISE
+ server_ip_address: 10.0.0.2
+ protocol: RADIUS_TACACS
+ retries: 3
+ timeout: 5
+ use_dnac_cert_for_pxgrid: False
+ pxgrid_enabled: True
+ cisco_ise_dtos:
+ - user_name: Cisco ISE
+ password: "12345"
+ fqdn: abs.cisco.com
+ ip_address: 10.0.0.2
+ description: Cisco ISE
+
+- name: Delete an Authentication and Policy server.
+ cisco.dnac.ise_radius_integration_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log: True
+ dnac_log_level: "{{ dnac_log_level }}"
+ state: deleted
+ config_verify: True
+ config:
+ - authentication_policy_server:
+ server_ip_address: 10.0.0.1
+"""
+
+RETURN = r"""
+# Case_1: Successful creation of Authentication and Policy Server.
+response_1:
+ description: A dictionary or list with the response returned by the Cisco Catalyst Center Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": {
+ "taskId": "string",
+ "url": "string"
+ },
+ "version": "string"
+ }
+
+# Case_2: Successful updation of Authentication and Policy Server.
+response_2:
+ description: A dictionary or list with the response returned by the Cisco Catalyst Center Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": {
+ "taskId": "string",
+ "url": "string"
+ },
+ "version": "string"
+ }
+
+# Case_3: Successful creation/updation of network
+response_3:
+ description: A dictionary or list with the response returned by the Cisco Catalyst Center Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": {
+ "taskId": "string",
+ "url": "string"
+ },
+ "version": "string"
+ }
+"""
+
+import copy
+import time
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.cisco.dnac.plugins.module_utils.dnac import (
+ DnacBase,
+ validate_list_of_dicts,
+ get_dict_result,
+ dnac_compare_equality,
+)
+
+
+class IseRadiusIntegration(DnacBase):
+ """Class containing member attributes for ise_radius_integration_workflow_manager module"""
+
+ def __init__(self, module):
+ super().__init__(module)
+ self.result["response"] = [
+ {"authenticationPolicyServer": {"response": {}, "msg": {}}}
+ ]
+ self.authentication_policy_server_obj_params = \
+ self.get_obj_params("authenticationPolicyServer")
+
+ def validate_input(self):
+ """
+ Checks if the configuration parameters provided in the playbook
+ meet the expected structure and data types,
+ as defined in the 'temp_spec' dictionary.
+
+ Parameters:
+ None
+
+ Returns:
+ self
+
+ """
+
+ if not self.config:
+ self.msg = "config not available in playbook for validation"
+ self.status = "success"
+ return self
+
+ # temp_spec is the specification for the expected structure of configuration parameters
+ temp_spec = {
+ "authentication_policy_server": {
+ "type": "dict",
+ "server_type": {"type": 'string', "choices": ["AAA", "ISE"]},
+ "server_ip_address": {"type": 'string'},
+ "shared_secret": {"type": 'string'},
+ "protocol": {"type": 'string', "choices": ["TACACS", "RADIUS", "RADIUS_TACACS"]},
+ "encryption_scheme": {"type": 'string'},
+ "message_authenticator_code_key": {"type": 'string'},
+ "encryption_key": {"type": 'string'},
+ "authentication_port": {"type": 'integer'},
+ "accounting_port": {"type": 'integer'},
+ "retries": {"type": 'integer'},
+ "timeout": {"type": 'integer'},
+ "role": {"type": 'string'},
+ "pxgrid_enabled": {"type": 'bool'},
+ "use_dnac_cert_for_pxgrid": {"type": 'bool'},
+ "cisco_ise_dtos": {
+ "type": 'list',
+ "user_name": {"type": 'string'},
+ "password": {"type": 'string'},
+ "fqdn": {"type": 'string'},
+ "ip_address": {"type": 'string'},
+ "description": {"type": 'string'},
+ "ssh_key": {"type": 'string'},
+ },
+ "external_cisco_ise_ip_addr_dtos": {
+ "type": 'list',
+ "external_cisco_ise_ip_addresses": {
+ "type": 'list',
+ "external_ip_address": {"type": 'string'},
+ },
+ "ise_type": {"type": 'string'},
+ },
+ "trusted_server": {"type": 'bool'},
+ "ise_integration_wait_time": {"type": 'integer'}
+ }
+ }
+
+ # Validate playbook params against the specification (temp_spec)
+ valid_temp, invalid_params = validate_list_of_dicts(self.config, temp_spec)
+ if invalid_params:
+ self.msg = "Invalid parameters in playbook: {0}".format("\n".join(invalid_params))
+ self.status = "failed"
+ return self
+
+ self.validated_config = valid_temp
+ self.log("Successfully validated playbook config params: {0}".format(valid_temp), "INFO")
+ self.msg = "Successfully validated input from the playbook"
+ self.status = "success"
+ return self
+
+ def requires_update(self, have, want, obj_params):
+ """
+ Check if the template config given requires update by comparing
+ current information wih the requested information.
+
+ This method compares the current global pool, reserve pool,
+ or network details from Cisco Catalyst Center with the user-provided details
+ from the playbook, using a specified schema for comparison.
+
+ Parameters:
+ have (dict) - Current information from the Cisco Catalyst Center
+ (global pool, reserve pool, network details)
+ want (dict) - Users provided information from the playbook
+ obj_params (list of tuples) - A list of parameter mappings specifying which
+ Cisco Catalyst Center parameters (dnac_param)
+ correspond to the user-provided
+ parameters (ansible_param).
+
+ Returns:
+ bool - True if any parameter specified in obj_params differs between
+ current_obj and requested_obj, indicating that an update is required.
+ False if all specified parameters are equal.
+
+ """
+
+ current_obj = have
+ requested_obj = want
+ self.log("Current State (have): {0}".format(current_obj), "DEBUG")
+ self.log("Desired State (want): {0}".format(requested_obj), "DEBUG")
+
+ return any(not dnac_compare_equality(current_obj.get(dnac_param),
+ requested_obj.get(ansible_param))
+ for (dnac_param, ansible_param) in obj_params)
+
+ def get_obj_params(self, get_object):
+ """
+ Get the required comparison obj_params value
+
+ Parameters:
+ get_object (str) - identifier for the required obj_params
+
+ Returns:
+ obj_params (list) - obj_params value for comparison.
+ """
+
+ try:
+ obj_params = []
+ if get_object == "authenticationPolicyServer":
+ obj_params = [
+ ("protocol", "protocol"),
+ ("retries", "retries"),
+ ("timeoutSeconds", "timeoutSeconds")
+ ]
+ else:
+ raise ValueError("Received an unexpected value for 'get_object': {0}"
+ .format(get_object))
+ except Exception as msg:
+ self.log("Received exception: {0}".format(msg), "CRITICAL")
+
+ return obj_params
+
+ def get_auth_server_params(self, auth_server_info):
+ """
+ Process Authentication and Policy Server params from playbook data for
+ Authentication and Policy Server config in Cisco Catalyst Center.
+
+ Parameters:
+ auth_server_info (dict) - Cisco Catalyst Center data containing
+ information about the Authentication and Policy Server.
+
+ Returns:
+ dict or None - Processed Authentication and Policy Server data in a format suitable
+ for Cisco Catalyst Center configuration, or None if auth_server_info is empty.
+ """
+
+ if not auth_server_info:
+ self.log("Authentication and Policy Server data is empty", "INFO")
+ return None
+
+ self.log("Authentication and Policy Server Details: {0}".format(auth_server_info), "DEBUG")
+ auth_server = {
+ "authenticationPort": auth_server_info.get("authenticationPort"),
+ "accountingPort": auth_server_info.get("accountingPort"),
+ "isIseEnabled": auth_server_info.get("iseEnabled"),
+ "ipAddress": auth_server_info.get("ipAddress"),
+ "pxgridEnabled": auth_server_info.get("pxgridEnabled"),
+ "useDnacCertForPxgrid": auth_server_info.get("useDnacCertForPxgrid"),
+ "port": auth_server_info.get("port"),
+ "protocol": auth_server_info.get("protocol"),
+ "retries": str(auth_server_info.get("retries")),
+ "role": auth_server_info.get("role"),
+ "timeoutSeconds": str(auth_server_info.get("timeoutSeconds")),
+ "encryptionScheme": auth_server_info.get("encryptionScheme")
+ }
+ self.log("Formated Authentication and Policy Server details: {0}"
+ .format(auth_server), "DEBUG")
+ if auth_server.get("isIseEnabled") is True:
+ auth_server_ise_info = auth_server_info.get("ciscoIseDtos")
+ auth_server.update({"ciscoIseDtos": []})
+ for ise_credential in auth_server_ise_info:
+ auth_server.get("ciscoIseDtos").append({
+ "userName": ise_credential.get("userName"),
+ "fqdn": ise_credential.get("fqdn"),
+ "ipAddress": ise_credential.get("ipAddress"),
+ "subscriberName": ise_credential.get("subscriberName"),
+ "description": ise_credential.get("description")
+ })
+
+ return auth_server
+
+ def auth_server_exists(self, ipAddress):
+ """
+ Check if the Authentication and Policy Server with the given ipAddress exists
+
+ Parameters:
+ ipAddress (str) - The ipAddress of the Authentication and
+ Policy Server to check for existence.
+
+ Returns:
+ dict - A dictionary containing information about the
+ Authentication and Policy Server's existence:
+ - 'exists' (bool): True if the Authentication and Policy Server exists, False otherwise.
+ - 'id' (str or None): The ID of the Authentication and Policy Server if it exists
+ or None if it doesn't.
+ - 'details' (dict or None): Details of the Authentication and Policy Server if it exists
+ else None.
+ """
+
+ AuthServer = {
+ "exists": False,
+ "details": None,
+ "id": None
+ }
+ response = self.dnac._exec(
+ family="system_settings",
+ function='get_authentication_and_policy_servers',
+ )
+ if not isinstance(response, dict):
+ self.log("Failed to retrieve the Authentication and Policy Server details - "
+ "Response is not a dictionary", "CRITICAL")
+ return AuthServer
+
+ all_auth_server_details = response.get("response")
+ auth_server_details = get_dict_result(all_auth_server_details, "ipAddress", ipAddress)
+ self.log("Authentication and Policy Server Ip Address: {0}"
+ .format(ipAddress), "DEBUG")
+ self.log("Authentication and Policy Server details: {0}"
+ .format(auth_server_details), "DEBUG")
+ if not auth_server_details:
+ self.log("Global pool {0} does not exist".format(ipAddress), "INFO")
+ return AuthServer
+
+ AuthServer.update({"exists": True})
+ AuthServer.update({"id": auth_server_details.get("instanceUuid")})
+ AuthServer["details"] = self.get_auth_server_params(auth_server_details)
+
+ self.log("Formatted Authenticaion and Policy Server details: {0}".format(AuthServer), "DEBUG")
+ return AuthServer
+
+ def get_have_authentication_policy_server(self, config):
+ """
+ Get the current Authentication and Policy Server information from
+ Cisco Catalyst Center based on the provided playbook details.
+ check this API using check_return_status.
+
+ Parameters:
+ config (dict) - Playbook details containing
+ Authentication and Policy Server configuration.
+
+ Returns:
+ self - The current object with updated
+ Authentication and Policy Server information.
+ """
+
+ AuthServer = {
+ "exists": False,
+ "details": None,
+ "id": None
+ }
+ authentication_policy_server = config.get("authentication_policy_server")
+ if authentication_policy_server is None:
+ self.msg = "authentication_policy_server in config is missing in the playbook"
+ self.status = "failed"
+ return self
+
+ ip_address = authentication_policy_server.get("server_ip_address")
+ if ip_address is None:
+ self.msg = "Missing parameter 'server_ip_address' is required."
+ self.status = "failed"
+ return self
+
+ AuthServer = self.auth_server_exists(ip_address)
+ self.log("Authentication and Policy Server exists: {0}"
+ .format(AuthServer.get("exists")), "DEBUG")
+ self.log("Authentication and Policy Server details: {0}"
+ .format(AuthServer.get("details")), "DEBUG")
+ self.log("Authentication and Policy Server Id: {0}"
+ .format(AuthServer.get("id")), "DEBUG")
+ self.have.update({"authenticationPolicyServer": AuthServer})
+ self.msg = "Collecting the Authentication and Policy Server " + \
+ "details from the Cisco Catalyst Center."
+ self.status = "success"
+ return self
+
+ def get_have(self, config):
+ """
+ Get the current Authentication and Policy Server details from Cisco Catalyst Center
+
+ Parameters:
+ config (dict) - Playbook details containing
+ Authentication and Policy Server configuration.
+
+ Returns:
+ self - The current object with updated
+ Authentication and Policy Server information.
+ """
+
+ if config.get("authentication_policy_server") is not None:
+ self.get_have_authentication_policy_server(config).check_return_status()
+
+ self.log("Current State (have): {0}".format(self.have), "INFO")
+ self.msg = "Successfully retrieved the details from the Cisco Catalyst Center"
+ self.status = "success"
+ return self
+
+ def get_want_authentication_policy_server(self, auth_policy_server):
+ """
+ Get all the Authentication Policy Server information from playbook
+ Set the status and the msg before returning from the API
+ Check the return value of the API with check_return_status()
+
+ Parameters:
+ auth_policy_server (dict) - Playbook authentication policy server details
+ containing IpAddress, authentication port, accounting port, Cisco ISE Details,
+ protocol, retries, role, timeout seconds, encryption details.
+
+ Returns:
+ self - The current object with updated desired Authentication Policy Server information.
+ """
+
+ auth_server = {}
+ auth_server_exists = self.have.get("authenticationPolicyServer").get("exists")
+ auth_server_details = self.have.get("authenticationPolicyServer").get("details")
+ trusted_server = False
+ if not auth_server_exists:
+ server_type = auth_policy_server.get("server_type")
+ if server_type not in ["ISE", "AAA", None]:
+ self.msg = "The server_type should either be ISE or AAA but not {0}.".format(server_type)
+ self.status = "failed"
+ return self
+
+ if server_type == "ISE":
+ auth_server.update({"isIseEnabled": True})
+ else:
+ auth_server.update({"isIseEnabled": False})
+ else:
+ auth_server.update({"isIseEnabled": auth_server_details.get("isIseEnabled")})
+
+ auth_server.update({"ipAddress": auth_policy_server.get("server_ip_address")})
+
+ auth_server_exists = self.have.get("authenticationPolicyServer").get("exists")
+
+ if not auth_server_exists:
+ shared_secret = auth_policy_server.get("shared_secret")
+ if not shared_secret:
+ self.msg = "Missing parameter 'shared_secret' is required."
+ self.status = "failed"
+ return self
+
+ shared_secret = str(shared_secret)
+ if len(shared_secret) < 4 or len(shared_secret) > 100:
+ self.msg = "The 'shared_secret' should contain between 4 and 100 characters."
+ self.status = "failed"
+ return self
+
+ invalid_chars = " ?<"
+ for char in invalid_chars:
+ if char in shared_secret:
+ self.msg = "The 'shared_secret' should not contain spaces or the characters '?', '<'."
+ self.status = "failed"
+ return self
+
+ auth_server.update({"sharedSecret": shared_secret})
+
+ protocol = auth_policy_server.get("protocol")
+ if protocol not in ["RADIUS", "TACACS", "RADIUS_TACACS", None]:
+ self.msg = "protocol should either be ['RADIUS', 'TACACS', 'RADIUS_TACACS']." + \
+ "It should not be {0}".format(protocol)
+ self.status = "failed"
+ return self
+
+ if protocol is not None:
+ auth_server.update({"protocol": protocol})
+ else:
+ if not auth_server_exists:
+ auth_server.update({"protocol": "RADIUS"})
+ else:
+ auth_server.update({"protocol": auth_server_details.get("protocol")})
+
+ auth_server.update({"port": 49})
+
+ if not auth_server_exists:
+ encryption_scheme = auth_policy_server.get("encryption_scheme")
+ if encryption_scheme not in ["KEYWRAP", "RADSEC", None]:
+ self.msg = "The encryption_scheme should be in ['KEYWRAP', 'RADSEC']. " + \
+ "It should not be {0}.".format(encryption_scheme)
+ self.status = "failed"
+ return self
+
+ if encryption_scheme:
+ auth_server.update({"encryptionScheme": encryption_scheme})
+
+ if encryption_scheme == "KEYWRAP":
+ message_key = auth_policy_server.get("message_authenticator_code_key")
+ if not message_key:
+ self.msg = "The 'message_authenticator_code_key' should not be empty if the encryption_scheme is 'KEYWRAP'."
+ self.status = "failed"
+ return self
+
+ message_key = str(message_key)
+ message_key_length = len(message_key)
+ if message_key_length != 20:
+ self.msg = "The 'message_authenticator_code_key' should be exactly 20 characters."
+ self.status = "failed"
+ return self
+
+ auth_server.update({"messageKey": message_key})
+
+ encryption_key = auth_policy_server.get("encryption_key")
+ if not encryption_key:
+ self.msg = "The encryption_key should not be empty if encryption_scheme is 'KEYWRAP'."
+ self.status = "failed"
+ return self
+
+ encryption_key = str(encryption_key)
+ encryption_key_length = len(encryption_key)
+ if encryption_key_length != 16:
+ self.msg = "The 'encryption_key' must be 16 characters long. It may contain alphanumeric and special characters."
+ self.status = "failed"
+ return self
+
+ auth_server.update({"encryptionKey": encryption_key})
+
+ if not auth_server_exists:
+ authentication_port = auth_policy_server.get("authentication_port")
+ if not authentication_port:
+ authentication_port = 1812
+
+ if not str(authentication_port).isdigit():
+ self.msg = "The 'authentication_port' should contain only digits."
+ self.status = "failed"
+ return self
+
+ if authentication_port < 1 or authentication_port > 65535:
+ self.msg = "The 'authentication_port' should be from 1 to 65535."
+ self.status = "failed"
+ return self
+
+ auth_server.update({"authenticationPort": authentication_port})
+ else:
+ auth_server.update({"authenticationPort": auth_server_details.get("authenticationPort")})
+
+ if not auth_server_exists:
+ accounting_port = auth_policy_server.get("accounting_port")
+ if not accounting_port:
+ accounting_port = 1813
+
+ if not str(accounting_port).isdigit():
+ self.msg = "The 'accounting_port' should contain only digits."
+ self.status = "failed"
+ return self
+
+ if accounting_port < 1 or accounting_port > 65535:
+ self.msg = "The 'accounting_port' should be from 1 to 65535."
+ self.status = "failed"
+ return self
+
+ auth_server.update({"accountingPort": accounting_port})
+ else:
+ auth_server.update({"accountingPort": auth_server_details.get("accountingPort")})
+
+ retries = auth_policy_server.get("retries")
+ if not retries:
+ if not auth_server_exists:
+ auth_server.update({"retries": "3"})
+ else:
+ auth_server.update({"retries": auth_server_details.get("retries")})
+ else:
+ try:
+ retries_int = int(retries)
+ if retries_int < 1 or retries_int > 3:
+ self.msg = "The 'retries' should be from 1 to 3."
+ self.status = "failed"
+ return self
+ except ValueError:
+ self.msg = "The 'retries' should contain only from 0-9."
+ self.status = "failed"
+ return self
+
+ auth_server.update({"retries": str(retries)})
+
+ timeout = auth_policy_server.get("timeout")
+ if not auth_server_exists:
+ default_timeout = "4"
+ else:
+ default_timeout = str(auth_server_details.get("timeoutSeconds"))
+
+ # If 'timeout' is not provided, use 'default_timeout'
+ if timeout is None:
+ auth_server.update({"timeoutSeconds": default_timeout})
+ else:
+ try:
+ timeout_int = int(timeout)
+ if timeout_int < 2 or timeout_int > 20:
+ self.msg = "The 'timeout' should be from 2 to 20."
+ self.status = "failed"
+ return self
+
+ auth_server.update({"timeoutSeconds": str(timeout)})
+ except ValueError:
+ self.msg = "The 'time_out' must contain only digits."
+ self.status = "failed"
+ return self
+
+ # Determine the role based on whether the auth server exists and if the role is specified
+ if not auth_server_exists:
+ # Use the role from 'auth_policy_server' if available, otherwise default to "secondary"
+ role = auth_policy_server.get("role", "secondary")
+ else:
+ # Use the role from 'auth_server_details'
+ role = auth_server_details.get("role")
+
+ auth_server.update({"role": role})
+
+ if auth_server.get("isIseEnabled"):
+ cisco_ise_dtos = auth_policy_server.get("cisco_ise_dtos")
+ if not cisco_ise_dtos:
+ self.msg = "Missing parameter 'cisco_ise_dtos' " + \
+ "required when server_type is 'ISE'."
+ self.status = "failed"
+ return self
+
+ auth_server.update({"ciscoIseDtos": []})
+ position_ise_creds = 0
+ for ise_credential in cisco_ise_dtos:
+ auth_server.get("ciscoIseDtos").append({})
+ user_name = ise_credential.get("user_name")
+ if not user_name:
+ if not auth_server_exists:
+ self.msg = "Missing parameter 'user_name' is required when server_type is ISE."
+ self.status = "failed"
+ return self
+
+ user_name = auth_server_details.get("ciscoIseDtos")[0].get("userName")
+
+ auth_server.get("ciscoIseDtos")[position_ise_creds].update({
+ "userName": user_name
+ })
+
+ password = ise_credential.get("password")
+ if not password:
+ self.msg = "Missing parameter 'password' is required when server_type is ISE."
+ self.status = "failed"
+ return self
+
+ if not 4 <= len(password) <= 127:
+ self.msg = ""
+ self.status = "failed"
+ return self
+
+ auth_server.get("ciscoIseDtos")[position_ise_creds].update({
+ "password": password
+ })
+
+ fqdn = ise_credential.get("fqdn")
+ if not fqdn:
+ if not auth_server_exists:
+ self.msg = "Missing parameter 'fqdn' is required when server_type is ISE."
+ self.status = "failed"
+ return self
+
+ fqdn = auth_server_details.get("ciscoIseDtos")[0].get("fqdn")
+
+ auth_server.get("ciscoIseDtos")[position_ise_creds].update({"fqdn": fqdn})
+
+ ip_address = ise_credential.get("ip_address")
+ if not ip_address:
+ self.msg = "Missing parameter 'ip_address' is required when server_type is ISE."
+ self.status = "failed"
+ return self
+
+ auth_server.get("ciscoIseDtos")[position_ise_creds].update({
+ "ipAddress": ip_address
+ })
+
+ if not auth_server_exists:
+ auth_server.get("ciscoIseDtos")[position_ise_creds].update({
+ "subscriberName": "ersadmin"
+ })
+ else:
+ auth_server.get("ciscoIseDtos")[position_ise_creds].update({
+ "subscriberName": auth_server_details.get("ciscoIseDtos")[0].get("subscriberName")
+ })
+
+ description = ise_credential.get("description")
+ if description:
+ auth_server.get("ciscoIseDtos")[position_ise_creds].update({
+ "description": description
+ })
+
+ ssh_key = ise_credential.get("ssh_key")
+ if ssh_key:
+ auth_server.get("ciscoIseDtos")[position_ise_creds].update({
+ "sshkey": str(ssh_key)
+ })
+
+ position_ise_creds += 1
+
+ pxgrid_enabled = auth_policy_server.get("pxgrid_enabled")
+ if pxgrid_enabled is None:
+ if auth_server_exists:
+ pxgrid_enabled = auth_server_details.get("pxgridEnabled")
+ else:
+ pxgrid_enabled = True
+
+ auth_server.update({"pxgridEnabled": pxgrid_enabled})
+
+ use_dnac_cert_for_pxgrid = auth_policy_server.get("use_dnac_cert_for_pxgrid")
+ if use_dnac_cert_for_pxgrid is None:
+ if auth_server_exists:
+ use_dnac_cert_for_pxgrid = auth_server_details.get("useDnacCertForPxgrid")
+ else:
+ use_dnac_cert_for_pxgrid = False
+
+ auth_server.update({"useDnacCertForPxgrid": use_dnac_cert_for_pxgrid})
+
+ external_cisco_ise_ip_addr_dtos = auth_policy_server \
+ .get("external_cisco_ise_ip_addr_dtos")
+ if external_cisco_ise_ip_addr_dtos:
+ auth_server.update({"externalCiscoIseIpAddrDtos": []})
+ position_ise_addresses = 0
+ for external_cisco_ise in external_cisco_ise_ip_addr_dtos:
+ external_cisco_ise_ip_addresses = external_cisco_ise \
+ .get("external_cisco_ise_ip_addresses")
+ if external_cisco_ise_ip_addresses:
+ auth_server.get("externalCiscoIseIpAddrDtos").append({})
+ auth_server.get("externalCiscoIseIpAddrDtos")[position_ise_addresses] \
+ .update({"externalCiscoIseIpAddresses": []})
+ position_ise_address = 0
+ for external_ip_address in external_cisco_ise_ip_addresses:
+ auth_server.get("externalCiscoIseIpAddrDtos")[position_ise_addresses] \
+ .get("externalCiscoIseIpAddresses").append({})
+ auth_server.get("externalCiscoIseIpAddrDtos")[position_ise_addresses] \
+ .get("externalCiscoIseIpAddresses")[position_ise_address].update({
+ "externalIpAddress": external_ip_address.get("external_ip_address")
+ })
+ position_ise_address += 1
+ ise_type = external_cisco_ise.get("ise_type")
+ if ise_type:
+ auth_server.get("externalCiscoIseIpAddrDtos")[position_ise_addresses] \
+ .update({"type": ise_type})
+ position_ise_addresses += 1
+
+ trusted_server = auth_policy_server.get("trusted_server")
+ if auth_policy_server.get("trusted_server") is None:
+ trusted_server = True
+ else:
+ trusted_server = auth_policy_server.get("trusted_server")
+
+ self.want.update({"trusted_server": trusted_server})
+
+ ise_integration_wait_time = auth_policy_server.get("ise_integration_wait_time")
+ if ise_integration_wait_time is None:
+ ise_integration_wait_time = 20
+ else:
+ try:
+ ise_integration_wait_time_int = int(ise_integration_wait_time)
+ if ise_integration_wait_time_int < 1 or ise_integration_wait_time_int > 60:
+ self.msg = "The ise_integration_wait_time should be from 1 to 60 seconds."
+ self.status = "failed"
+ return self
+
+ except ValueError:
+ self.msg = "The 'ise_integration_wait_time' should contain only digits."
+ self.status = "failed"
+ return self
+
+ self.want.update({"ise_integration_wait_time": ise_integration_wait_time})
+
+ self.log("Authentication and Policy Server playbook details: {0}"
+ .format(auth_server), "DEBUG")
+ self.want.update({"authenticationPolicyServer": auth_server})
+ self.msg = "Collecting the Authentication and Policy Server details from the playbook"
+ self.status = "success"
+ return self
+
+ def get_want(self, config):
+ """
+ Get all the Authentication Policy Server related information from playbook
+
+ Parameters:
+ config (list of dict) - Playbook details
+
+ Returns:
+ None
+ """
+
+ if config.get("authentication_policy_server"):
+ auth_policy_server = config.get("authentication_policy_server")
+ self.get_want_authentication_policy_server(auth_policy_server).check_return_status()
+
+ self.log("Desired State (want): {0}".format(self.want), "INFO")
+ self.msg = "Successfully retrieved details from the playbook"
+ self.status = "success"
+ return self
+
+ def accept_cisco_ise_server_certificate(self, ipAddress, trusted_server):
+ """
+ Accept the Cisco ISE server certificate in Cisco Catalyst
+ Center provided in the playbook.
+
+ Parameters:
+ ipAddress (str) - The Ip address of the Authentication and Policy Server to be deleted.
+ trusted_server (bool) - Indicates whether the certificate is trustworthy for the server.
+
+ Returns:
+ None
+ """
+
+ try:
+ AuthServer = self.auth_server_exists(ipAddress)
+ if not AuthServer:
+ self.msg = "Error while retrieving the Authentication and Policy Server {0} \
+ details.".format(ipAddress)
+ self.log(str(self.msg, "CRITICAL"))
+ self.status = "failed"
+ return self
+
+ cisco_ise_id = AuthServer.get("id")
+ if not cisco_ise_id:
+ self.msg = "Error while retrieving the Authentication and Policy Server {0} id." \
+ .format(ipAddress)
+ self.log(str(self.msg, "CRITICAL"))
+ self.status = "failed"
+ return self
+
+ response = self.dnac._exec(
+ family="system_settings",
+ function="accept_cisco_ise_server_certificate_for_cisco_ise_server_integration",
+ params={
+ "id": cisco_ise_id,
+ "isCertAcceptedByUser": trusted_server
+ },
+ )
+ self.log("Received API response for 'accept_cisco_ise_server_certificate_"
+ "for_cisco_ise_server_integration': {0}".format(response), "DEBUG")
+ except Exception as msg:
+ self.log("Exception occurred while accepting the certificate of {0}: {1}"
+ .format(ipAddress, msg))
+ return None
+ return
+
+ def format_payload_for_update(self, have_auth_server, want_auth_server):
+ """
+ Format the parameter of the payload for updating the authentication and policy server
+ in accordance with the information in the Cisco Catalyst Ceter.
+
+ Parameters:
+ have_auth_server (dict) - Authentication and policy server information from the Cisco Catalyst Center.
+ want_auth_server (dict) - Authentication and policy server information from the Playbook.
+
+ Returns:
+ self - The current object with updated desired Authentication Policy Server information.
+ """
+
+ update_params = ["authenticationPort", "accountingPort", "role"]
+ for item in update_params:
+ have_auth_server_item = have_auth_server.get(item)
+ want_auth_server_item = want_auth_server.get(item)
+ if want_auth_server_item is None:
+ want_auth_server.update({item: have_auth_server_item})
+
+ elif have_auth_server_item != want_auth_server_item:
+ self.msg = "Update does not support modifying '{0}'. Here you are trying to update '{1}'." \
+ .format(update_params, item)
+ self.status = "failed"
+ return self
+
+ have_auth_server_protocol = have_auth_server.get("protocol")
+ want_auth_server_protocol = want_auth_server.get("protocol")
+ if have_auth_server_protocol != want_auth_server_protocol:
+ if want_auth_server_protocol != "RADIUS_TACACS":
+ self.msg = "'protocol' can only be updated to 'RADIUS_TACACS' not from '{0}' to '{1}'" \
+ .format(have_auth_server_protocol, want_auth_server_protocol)
+ self.status = "failed"
+ return self
+
+ self.log("Successfully formatted the parameter of the payload for updating the authentication and policy server.")
+ self.msg = "Successfully formatted the parameter of the payload for updating the authentication and policy server."
+ self.status = "success"
+ return self
+
+ def update_auth_policy_server(self, ipAddress):
+ """
+ Update/Create Authentication and Policy Server in Cisco
+ Catalyst Center with fields provided in playbook.
+
+ Parameters:
+ ipAddress (str) - The Ip address of the Authentication and Policy Server to be deleted.
+
+ Returns:
+ None
+ """
+
+ result_auth_server = self.result.get("response")[0].get("authenticationPolicyServer")
+ result_auth_server.get("response").update({ipAddress: {}})
+
+ # Check Authentication and Policy Server exist, if not create and return
+ is_ise_server = self.want.get("authenticationPolicyServer").get("isIseEnabled")
+ if not self.have.get("authenticationPolicyServer").get("exists"):
+ auth_server_params = self.want.get("authenticationPolicyServer")
+ self.log("Desired State for Authentication and Policy Server (want): {0}"
+ .format(auth_server_params), "DEBUG")
+ response = self.dnac._exec(
+ family="system_settings",
+ function="add_authentication_and_policy_server_access_configuration",
+ params=auth_server_params,
+ )
+ validation_string_set = ("successfully created aaa settings", "operation sucessful")
+ response = response.get("response")
+ if response.get("errorcode") is not None:
+ self.msg = response.get("detail")
+ self.status = "failed"
+ return self
+
+ task_id = response.get("taskId")
+ is_certificate_required = False
+ while True:
+ task_details = self.get_task_details(task_id)
+ self.log('Getting task details from task ID {0}: {1}'.format(task_id, task_details), "DEBUG")
+ if task_details.get("isError") is True:
+ failure_reason = task_details.get("failureReason")
+ if failure_reason:
+ self.msg = str(failure_reason)
+ else:
+ self.msg = str(task_details.get("progress"))
+ self.status = "failed"
+ break
+
+ for validation_string in validation_string_set:
+ if validation_string in task_details.get("progress").lower():
+ self.result['changed'] = True
+ if validation_string == "operation sucessful":
+ is_certificate_required = True
+ self.status = "success"
+
+ if self.result['changed'] is True:
+ self.log("The task with task id '{0}' is successfully executed".format(task_id), "DEBUG")
+ break
+
+ self.log("Progress set to {0} for taskid: {1}".format(task_details.get('progress'), task_id), "DEBUG")
+
+ if is_ise_server and is_certificate_required:
+ trusted_server = self.want.get("trusted_server")
+ self.accept_cisco_ise_server_certificate(ipAddress, trusted_server)
+ ise_integration_wait_time = self.want.get("ise_integration_wait_time")
+ time.sleep(ise_integration_wait_time)
+ response = self.dnac._exec(
+ family="system_settings",
+ function='get_authentication_and_policy_servers',
+ params={"is_ise_enabled": True}
+ )
+ response = response.get("response")
+ if response is None:
+ self.msg = "Failed to retrieve the information from the API 'get_authentication_and_policy_servers' of {0}." \
+ .format(ipAddress)
+ self.status = "failed"
+ return
+
+ ise_server_details = get_dict_result(response, "ipAddress", ipAddress)
+ ise_state_set = {"FAILED", "INPROGRESS"}
+ state = ise_server_details.get("state")
+ if state in ise_state_set:
+ self.msg = "The Cisco ISE server '{0}' integration is not successful. The state is '{1}'. ".format(ipAddress, state) + \
+ "Expected states for successful integration are not in {0}.".format(ise_state_set)
+ self.log(str(self.msg), "ERROR")
+ self.status = "failed"
+ return
+
+ self.log("Successfully created Authentication and Policy Server '{0}'."
+ .format(ipAddress), "INFO")
+ result_auth_server.get("response").get(ipAddress) \
+ .update({
+ "authenticationPolicyServer Details": self.want
+ .get("authenticationPolicyServer")
+ })
+ result_auth_server.get("msg").update({
+ ipAddress: "Authentication and Policy Server Created Successfully"
+ })
+ return
+
+ # Authentication and Policy Server exists, check update is required
+ # Edit API not working, remove this
+ self.format_payload_for_update(self.have.get("authenticationPolicyServer").get("details"),
+ self.want.get("authenticationPolicyServer")).check_return_status()
+ is_ise_server_enabled = self.have.get("authenticationPolicyServer").get("details").get("isIseEnabled")
+ if not (is_ise_server_enabled or self.requires_update(self.have.get("authenticationPolicyServer").get("details"),
+ self.want.get("authenticationPolicyServer"),
+ self.authentication_policy_server_obj_params)):
+ self.log("Authentication and Policy Server '{0}' doesn't require an update"
+ .format(ipAddress), "INFO")
+ result_auth_server.get("response").get(ipAddress).update({
+ "Cisco Catalyst Center params":
+ self.have.get("authenticationPolicyServer").get("details")
+ })
+ result_auth_server.get("response").get(ipAddress).update({
+ "Id": self.have.get("authenticationPolicyServer").get("id")
+ })
+ result_auth_server.get("msg").update({
+ ipAddress: "Authentication and Policy Server doesn't require an update"
+ })
+ return
+
+ self.log("Authentication and Policy Server requires update", "DEBUG")
+
+ # Authenticaiton and Policy Server Exists
+ auth_server_params = copy.deepcopy(self.want.get("authenticationPolicyServer"))
+ auth_server_params.update({"id": self.have.get("authenticationPolicyServer").get("id")})
+ self.log("Desired State for Authentication and Policy Server (want): {0}"
+ .format(auth_server_params), "DEBUG")
+ self.log("Current State for Authentication and Policy Server (have): {0}"
+ .format(self.have.get("authenticationPolicyServer").get("details")), "DEBUG")
+ response = self.dnac._exec(
+ family="system_settings",
+ function="edit_authentication_and_policy_server_access_configuration",
+ params=auth_server_params,
+ )
+ validation_string = "successfully updated aaa settings"
+ self.check_task_response_status(response, validation_string, "edit_authentication_and_policy_server_access_configuration").check_return_status()
+ self.log("Authentication and Policy Server '{0}' updated successfully"
+ .format(ipAddress), "INFO")
+ result_auth_server.get("response").get(ipAddress) \
+ .update({"Id": self.have.get("authenticationPolicyServer").get("id")})
+ result_auth_server.get("msg").update({
+ ipAddress: "Authentication and Policy Server Updated Successfully"
+ })
+ return
+
+ def get_diff_merged(self, config):
+ """
+ Update or create Authentication and Policy Server in
+ Cisco Catalyst Center based on the playbook details.
+
+ Parameters:
+ config (list of dict) - Playbook details containing
+ Authentication and Policy Server information.
+
+ Returns:
+ self
+ """
+
+ if config.get("authentication_policy_server") is not None:
+ ipAddress = config.get("authentication_policy_server").get("server_ip_address")
+ self.update_auth_policy_server(ipAddress)
+
+ return self
+
+ def delete_auth_policy_server(self, ipAddress):
+ """
+ Delete a Authentication and Policy Server by server Ip address in Cisco Catalyst Center.
+
+ Parameters:
+ ipAddress (str) - The Ip address of the Authentication and Policy Server to be deleted.
+
+ Returns:
+ self
+ """
+
+ auth_server_exists = self.have.get("authenticationPolicyServer").get("exists")
+ result_auth_server = self.result.get("response")[0].get("authenticationPolicyServer")
+ if not auth_server_exists:
+ result_auth_server.get("response").update({
+ ipAddress: "Authentication and Policy Server not found"
+ })
+ self.msg = "Authentication and Policy Server not found."
+ self.status = "success"
+ return self
+
+ response = self.dnac._exec(
+ family="system_settings",
+ function="delete_authentication_and_policy_server_access_configuration",
+ params={"id": self.have.get("authenticationPolicyServer").get("id")},
+ )
+
+ self.log("Received API response for 'delete_authentication_and_"
+ "policy_server_access_configuration': {0}".format(response), "DEBUG")
+
+ # Check the task status
+ validation_string = "successfully deleted aaa settings"
+ self.check_task_response_status(response, validation_string, "delete_authentication_and_policy_server_access_configuration").check_return_status()
+ taskid = response.get("response").get("taskId")
+
+ # Update result information
+ result_auth_server.get("response").update({ipAddress: {}})
+ result_auth_server.get("response").get(ipAddress).update({"Task Id": taskid})
+ result_auth_server.get("msg").update({
+ ipAddress: "Authentication and Policy Server deleted successfully."
+ })
+ self.msg = "Authentication and Policy Server - {0} deleted successfully.".format(ipAddress)
+ self.status = "success"
+ return self
+
+ def get_diff_deleted(self, config):
+ """
+ Delete Authentication and Policy Server from the Cisco Catalyst Center.
+
+ Parameters:
+ config (list of dict) - Playbook details
+
+ Returns:
+ self
+ """
+
+ if config.get("authentication_policy_server") is not None:
+ ipAddress = config.get("authentication_policy_server").get("server_ip_address")
+ self.delete_auth_policy_server(ipAddress).check_return_status()
+
+ return self
+
+ def verify_diff_merged(self, config):
+ """
+ Validating the Cisco Catalyst Center configuration with the playbook details
+ when state is merged (Create/Update).
+
+ Parameters:
+ config (dict) - Playbook details containing
+ Authentication and Policy Server configuration.
+
+ Returns:
+ self
+ """
+
+ self.get_have(config)
+ self.log("Current State (have): {0}".format(self.have), "INFO")
+ self.log("Requested State (want): {0}".format(self.want), "INFO")
+ if config.get("authentication_policy_server") is not None:
+ self.log("Desired State of Authentication and Policy Server (want): {0}"
+ .format(self.want.get("authenticationPolicyServer")), "DEBUG")
+ self.log("Current State of Authentication and Policy Server (have): {0}"
+ .format(self.have.get("authenticationPolicyServer")
+ .get("details")), "DEBUG")
+ check_list = ["isIseEnabled", "ipAddress", "pxgridEnabled",
+ "useDnacCertForPxgrid", "port", "protocol",
+ "retries", "role", "timeoutSeconds", "encryptionScheme"]
+ auth_server_have = self.have.get("authenticationPolicyServer").get("details")
+ auth_server_want = self.want.get("authenticationPolicyServer")
+ for item in check_list:
+ if auth_server_have.get(item) and auth_server_want.get(item) and \
+ auth_server_have.get(item) != auth_server_want.get(item):
+ self.msg = "Authentication and Policy Server " + \
+ "Config is not applied to the Cisco Catalyst Center."
+ self.status = "failed"
+ return self
+
+ self.log("Successfully validated Authentication and Policy Server '{0}'."
+ .format(self.want.get("authenticationPolicyServer").get("ipAddress")), "INFO")
+ self.result.get("response")[0].get("authenticationPolicyServer").update({
+ "Validation": "Success"
+ })
+
+ self.msg = "Successfully validated the Authentication and Policy Server."
+ self.status = "success"
+ return self
+
+ def verify_diff_deleted(self, config):
+ """
+ Validating the Cisco Catalyst Center configuration with the playbook details
+ when state is deleted (delete).
+
+ Parameters:
+ config (dict) - Playbook details containing
+ Authentication and Policy Server configuration.
+
+ Returns:
+ self
+ """
+
+ self.get_have(config)
+ ipAddress = config.get("authentication_policy_server").get("server_ip_address")
+ self.log("Current State (have): {0}".format(self.have), "INFO")
+ self.log("Authentication and Policy Server deleted from the Cisco Catalyst Center: {0}"
+ .format(ipAddress), "INFO")
+ if config.get("authentication_policy_server") is not None:
+ auth_server_exists = self.have.get("authenticationPolicyServer").get("exists")
+ if auth_server_exists:
+ self.msg = "Authentication and Policy Server " + \
+ "Config is not applied to the Cisco Catalyst Center."
+ self.status = "failed"
+ return self
+
+ self.log("Successfully validated absence of Authentication and Policy Server '{0}'."
+ .format(config.get("authentication_policy_server").get("ip_address")), "INFO")
+ self.result.get("response")[0].get("authenticationPolicyServer").update({
+ "Validation": "Success"
+ })
+
+ self.msg = "Successfully validated the absence of Authentication and Policy Server."
+ self.status = "success"
+ return self
+
+ def reset_values(self):
+ """
+ Reset all neccessary attributes to default values
+
+ Parameters:
+ None
+
+ Returns:
+ None
+ """
+
+ self.have.clear()
+ self.want.clear()
+ return
+
+
+def main():
+ """main entry point for module execution"""
+
+ # Define the specification for module arguments
+ element_spec = {
+ "dnac_host": {"type": 'str', "required": True},
+ "dnac_port": {"type": 'str', "default": '443'},
+ "dnac_username": {"type": 'str', "default": 'admin', "aliases": ['user']},
+ "dnac_password": {"type": 'str', "no_log": True},
+ "dnac_verify": {"type": 'bool', "default": 'True'},
+ "dnac_version": {"type": 'str', "default": '2.2.3.3'},
+ "dnac_debug": {"type": 'bool', "default": False},
+ "dnac_log": {"type": 'bool', "default": False},
+ "dnac_log_level": {"type": 'str', "default": 'WARNING'},
+ "dnac_log_file_path": {"type": 'str', "default": 'dnac.log'},
+ "dnac_log_append": {"type": 'bool', "default": True},
+ "config_verify": {"type": 'bool', "default": False},
+ 'dnac_api_task_timeout': {'type': 'int', "default": 1200},
+ 'dnac_task_poll_interval': {'type': 'int', "default": 2},
+ "config": {"type": 'list', "required": True, "elements": 'dict'},
+ "state": {"default": 'merged', "choices": ['merged', 'deleted']},
+ "validate_response_schema": {"type": 'bool', "default": True},
+ }
+
+ # Create an AnsibleModule object with argument specifications
+ module = AnsibleModule(argument_spec=element_spec, supports_check_mode=False)
+ ccc_ise_radius = IseRadiusIntegration(module)
+ state = ccc_ise_radius.params.get("state")
+ config_verify = ccc_ise_radius.params.get("config_verify")
+ if state not in ccc_ise_radius.supported_states:
+ ccc_ise_radius.status = "invalid"
+ ccc_ise_radius.msg = "State {0} is invalid".format(state)
+ ccc_ise_radius.check_return_status()
+
+ ccc_ise_radius.validate_input().check_return_status()
+
+ for config in ccc_ise_radius.config:
+ ccc_ise_radius.reset_values()
+ ccc_ise_radius.get_have(config).check_return_status()
+ if state != "deleted":
+ ccc_ise_radius.get_want(config).check_return_status()
+ ccc_ise_radius.get_diff_state_apply[state](config).check_return_status()
+ if config_verify:
+ ccc_ise_radius.verify_diff_state_apply[state](config).check_return_status()
+
+ module.exit_json(**ccc_ise_radius.result)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/ansible_collections/cisco/dnac/plugins/modules/issues_enrichment_details_info.py b/ansible_collections/cisco/dnac/plugins/modules/issues_enrichment_details_info.py
index 58d5c787c..8cb48abac 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/issues_enrichment_details_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/issues_enrichment_details_info.py
@@ -22,8 +22,8 @@ options:
description: Additional headers.
type: dict
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Issues GetIssueEnrichmentDetails
description: Complete reference of the GetIssueEnrichmentDetails API.
@@ -51,7 +51,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/issues_info.py b/ansible_collections/cisco/dnac/plugins/modules/issues_info.py
index f6131d2b8..af77d42c4 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/issues_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/issues_info.py
@@ -24,11 +24,11 @@ options:
startTime:
description:
- StartTime query parameter. Starting epoch time in milliseconds of query time window.
- type: int
+ type: float
endTime:
description:
- EndTime query parameter. Ending epoch time in milliseconds of query time window.
- type: int
+ type: float
siteId:
description:
- SiteId query parameter. Assurance UUID value of the site in the issue content.
@@ -44,22 +44,22 @@ options:
priority:
description:
- >
- Priority query parameter. The issue's priority value (One of P1, P2, P3, or P4)(Use only when macAddress and
- deviceId are not provided).
+ Priority query parameter. The issue's priority value P1, P2, P3, or P4 (case insensitive) (Use only when
+ macAddress and deviceId are not provided).
type: str
- aiDriven:
+ issueStatus:
description:
- - >
- AiDriven query parameter. The issue's AI driven value (Yes or No)(Use only when macAddress and deviceId are
- not provided).
+ - IssueStatus query parameter. The issue's status value ACTIVE, IGNORED, RESOLVED (case insensitive).
type: str
- issueStatus:
+ aiDriven:
description:
- - IssueStatus query parameter. The issue's status value (One of ACTIVE, IGNORED, RESOLVED).
+ - >
+ AiDriven query parameter. The issue's AI driven value YES or NO (case insensitive) (Use only when macAddress
+ and deviceId are not provided).
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Issues Issues
description: Complete reference of the Issues API.
@@ -90,12 +90,11 @@ EXAMPLES = r"""
deviceId: string
macAddress: string
priority: string
- aiDriven: string
issueStatus: string
+ aiDriven: string
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -104,7 +103,7 @@ dnac_response:
sample: >
{
"version": "string",
- "totalCount": 0,
+ "totalCount": "string",
"response": [
{
"issueId": "string",
@@ -112,7 +111,7 @@ dnac_response:
"siteId": "string",
"deviceId": "string",
"deviceRole": "string",
- "aiDriven": true,
+ "aiDriven": "string",
"clientMac": "string",
"issue_occurence_count": 0,
"status": "string",
diff --git a/ansible_collections/cisco/dnac/plugins/modules/itsm_cmdb_sync_status_info.py b/ansible_collections/cisco/dnac/plugins/modules/itsm_cmdb_sync_status_info.py
index 2b3a47ea2..1fe2aa876 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/itsm_cmdb_sync_status_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/itsm_cmdb_sync_status_info.py
@@ -34,8 +34,8 @@ options:
- Date query parameter. Provide date in "YYYY-MM-DD" format.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for ITSM GetCMDBSyncStatus
description: Complete reference of the GetCMDBSyncStatus API.
@@ -65,7 +65,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/itsm_integration_events_failed_info.py b/ansible_collections/cisco/dnac/plugins/modules/itsm_integration_events_failed_info.py
index c2a6dca63..23a095aab 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/itsm_integration_events_failed_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/itsm_integration_events_failed_info.py
@@ -24,8 +24,8 @@ options:
- InstanceId query parameter. Instance Id of the failed event as in the Runtime Dashboard.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for ITSM GetFailedITSMEvents
description: Complete reference of the GetFailedITSMEvents API.
@@ -54,7 +54,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/itsm_integration_events_retry.py b/ansible_collections/cisco/dnac/plugins/modules/itsm_integration_events_retry.py
index 64c97fe80..6293a8573 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/itsm_integration_events_retry.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/itsm_integration_events_retry.py
@@ -24,8 +24,8 @@ options:
elements: str
type: list
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for ITSM RetryIntegrationEvents
description: Complete reference of the RetryIntegrationEvents API.
@@ -53,7 +53,6 @@ EXAMPLES = r"""
- string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/lan_automation_count_info.py b/ansible_collections/cisco/dnac/plugins/modules/lan_automation_count_info.py
index e275eaf2c..bb30e950d 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/lan_automation_count_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/lan_automation_count_info.py
@@ -20,8 +20,8 @@ options:
description: Additional headers.
type: dict
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for LAN Automation LANAutomationSessionCount
description: Complete reference of the LANAutomationSessionCount API.
@@ -49,7 +49,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/lan_automation_create.py b/ansible_collections/cisco/dnac/plugins/modules/lan_automation_create.py
index b9c04f38d..c82c12240 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/lan_automation_create.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/lan_automation_create.py
@@ -63,8 +63,8 @@ options:
type: bool
type: list
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for LAN Automation LANAutomationStart
description: Complete reference of the LANAutomationStart API.
@@ -104,7 +104,6 @@ EXAMPLES = r"""
redistributeIsisToBgp: true
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/lan_automation_delete.py b/ansible_collections/cisco/dnac/plugins/modules/lan_automation_delete.py
index d31484c67..c6bb435c5 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/lan_automation_delete.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/lan_automation_delete.py
@@ -20,8 +20,8 @@ options:
description: Id path parameter. LAN Automation id can be obtained from /dna/intent/api/v1/lan-automation/status.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for LAN Automation LANAutomationStop
description: Complete reference of the LANAutomationStop API.
@@ -48,7 +48,6 @@ EXAMPLES = r"""
id: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/lan_automation_log_by_serial_number_info.py b/ansible_collections/cisco/dnac/plugins/modules/lan_automation_log_by_serial_number_info.py
index fa75bec89..913926bbd 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/lan_automation_log_by_serial_number_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/lan_automation_log_by_serial_number_info.py
@@ -13,7 +13,7 @@ description:
- >
Invoke this API to get the LAN Automation session logs for individual devices based on the given LAN Automation
session id and device serial number.
-version_added: '6.7.0'
+version_added: '3.1.0'
extends_documentation_fragment:
- cisco.dnac.module_info
author: Rafael Campos (@racampos)
@@ -37,8 +37,8 @@ options:
the remaining logs, please leave the query parameter blank.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for LAN Automation LANAutomationLogsForIndividualDevices
description: Complete reference of the LANAutomationLogsForIndividualDevices API.
@@ -69,7 +69,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/lan_automation_log_info.py b/ansible_collections/cisco/dnac/plugins/modules/lan_automation_log_info.py
index 6a8aa1291..46daf2802 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/lan_automation_log_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/lan_automation_log_info.py
@@ -24,18 +24,18 @@ options:
offset:
description:
- Offset query parameter. Starting index of the LAN Automation session. Minimum value is 1.
- type: int
+ type: float
limit:
description:
- Limit query parameter. Number of LAN Automation sessions to be retrieved. Limit value can range between 1 to 10.
- type: int
+ type: float
id:
description:
- Id path parameter. LAN Automation session identifier.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for LAN Automation LANAutomationLog
description: Complete reference of the LANAutomationLog API.
@@ -83,7 +83,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/lan_automation_sessions_info.py b/ansible_collections/cisco/dnac/plugins/modules/lan_automation_sessions_info.py
new file mode 100644
index 000000000..7401b7d39
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/lan_automation_sessions_info.py
@@ -0,0 +1,68 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: lan_automation_sessions_info
+short_description: Information module for Lan Automation Sessions
+description:
+- Get all Lan Automation Sessions.
+- Invoke this API to get the LAN Automation active session information.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module_info
+author: Rafael Campos (@racampos)
+options:
+ headers:
+ description: Additional headers.
+ type: dict
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for LAN Automation LANAutomationActiveSessions
+ description: Complete reference of the LANAutomationActiveSessions API.
+ link: https://developer.cisco.com/docs/dna-center/#!l-an-automation-active-sessions
+notes:
+ - SDK Method used are
+ lan_automation.LanAutomation.lan_automation_active_sessions,
+
+ - Paths used are
+ get /dna/intent/api/v1/lan-automation/sessions,
+
+"""
+
+EXAMPLES = r"""
+- name: Get all Lan Automation Sessions
+ cisco.dnac.lan_automation_sessions_info:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ headers: "{{my_headers | from_json}}"
+ register: result
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": {
+ "maxSupportedCount": "string",
+ "activeSessions": "string",
+ "activeSessionIds": [
+ "string"
+ ]
+ },
+ "version": "string"
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/lan_automation_status_info.py b/ansible_collections/cisco/dnac/plugins/modules/lan_automation_status_info.py
index 3680a97eb..f6e71c7ac 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/lan_automation_status_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/lan_automation_status_info.py
@@ -24,18 +24,18 @@ options:
offset:
description:
- Offset query parameter. Starting index of the LAN Automation session. Minimum value is 1.
- type: int
+ type: float
limit:
description:
- Limit query parameter. Number of LAN Automation sessions to be retrieved. Limit value can range between 1 to 10.
- type: int
+ type: float
id:
description:
- Id path parameter. LAN Automation session identifier.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for LAN Automation LANAutomationStatus
description: Complete reference of the LANAutomationStatus API.
@@ -83,7 +83,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -96,7 +95,7 @@ dnac_response:
"id": "string",
"discoveredDeviceSiteNameHierarchy": "string",
"primaryDeviceManagmentIPAddress": "string",
- "ipPoolList": [
+ "ipPools": [
{
"ipPoolName": "string",
"ipPoolRole": "string"
@@ -120,7 +119,12 @@ dnac_response:
]
}
],
- "redistributeIsisToBgp": true
+ "redistributeIsisToBgp": true,
+ "discoveryLevel": 0,
+ "discoveryTimeout": 0,
+ "discoveryDevices": [
+ {}
+ ]
}
],
"version": "string"
diff --git a/ansible_collections/cisco/dnac/plugins/modules/lan_automation_update.py b/ansible_collections/cisco/dnac/plugins/modules/lan_automation_update.py
new file mode 100644
index 000000000..fa4209d7c
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/lan_automation_update.py
@@ -0,0 +1,78 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: lan_automation_update
+short_description: Resource module for Lan Automation Update
+description:
+- Manage operation update of the resource Lan Automation Update.
+- Invoke this API to stop LAN Automation and Update Loopback0 IP Address of Devices, discovered in the current session.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module
+author: Rafael Campos (@racampos)
+options:
+ id:
+ description: Id path parameter. LAN Automation id can be obtained from /dna/intent/api/v1/lan-automation/status.
+ type: str
+ payload:
+ description: Lan Automation Update's payload.
+ elements: dict
+ suboptions:
+ deviceManagementIPAddress:
+ description: Device Management IP Address.
+ type: str
+ newLoopback0IPAddress:
+ description: New Loopback0 IP Address from LAN pool of Device Discovery Site.
+ type: str
+ type: list
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for LAN Automation LANAutomationStopAndUpdateDevices
+ description: Complete reference of the LANAutomationStopAndUpdateDevices API.
+ link: https://developer.cisco.com/docs/dna-center/#!l-an-automation-stop-and-update-devices
+notes:
+ - SDK Method used are
+ lan_automation.LanAutomation.lan_automation_stop_and_update_devices,
+
+ - Paths used are
+ put /dna/intent/api/v1/lan-automation/{id},
+
+"""
+
+EXAMPLES = r"""
+- name: Update by id
+ cisco.dnac.lan_automation_update:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ id: string
+ payload:
+ - deviceManagementIPAddress: string
+ newLoopback0IPAddress: string
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": {
+ "taskId": "string",
+ "url": "string"
+ },
+ "version": "string"
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/lan_automation_update_device.py b/ansible_collections/cisco/dnac/plugins/modules/lan_automation_update_device.py
new file mode 100644
index 000000000..bf33ed6d0
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/lan_automation_update_device.py
@@ -0,0 +1,122 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: lan_automation_update_device
+short_description: Resource module for Lan Automation Updatedevice
+description:
+- Manage operation update of the resource Lan Automation Updatedevice.
+- >
+ Invoke this API to perform a DAY-N update on LAN Automation-related devices. Supported features include Loopback0
+ IP update, hostname update, link addition, and link deletion.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module
+author: Rafael Campos (@racampos)
+options:
+ feature:
+ description: Feature query parameter. Feature ID for the update. Supported feature
+ IDs include LOOPBACK0_IPADDRESS_UPDATE, HOSTNAME_UPDATE, LINK_ADD, and LINK_DELETE.
+ type: str
+ hostnameUpdateDevices:
+ description: Lan Automation Update Device's hostnameUpdateDevices.
+ elements: dict
+ suboptions:
+ deviceManagementIPAddress:
+ description: Device Management IP Address.
+ type: str
+ newHostName:
+ description: New hostname for the device.
+ type: str
+ type: list
+ linkUpdate:
+ description: Lan Automation Update Device's linkUpdate.
+ suboptions:
+ destinationDeviceInterfaceName:
+ description: Destination Device Interface Name.
+ type: str
+ destinationDeviceManagementIPAddress:
+ description: Destination Device Management IP Address.
+ type: str
+ ipPoolName:
+ description: Name of the IP LAN Pool, required for Link Add should be from discovery
+ site of source and destination device.
+ type: str
+ sourceDeviceInterfaceName:
+ description: Source Device Interface Name.
+ type: str
+ sourceDeviceManagementIPAddress:
+ description: Source Device Management IP Address.
+ type: str
+ type: dict
+ loopbackUpdateDeviceList:
+ description: Lan Automation Update Device's loopbackUpdateDeviceList.
+ elements: dict
+ suboptions:
+ deviceManagementIPAddress:
+ description: Device Management IP Address.
+ type: str
+ newLoopback0IPAddress:
+ description: New Loopback0 IP Address from LAN Pool of Device Discovery Site(Shared
+ pool should not be used).
+ type: str
+ type: list
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for LAN Automation LANAutomationDeviceUpdate
+ description: Complete reference of the LANAutomationDeviceUpdate API.
+ link: https://developer.cisco.com/docs/dna-center/#!l-an-automation-device-update
+notes:
+ - SDK Method used are
+ lan_automation.LanAutomation.lan_automation_device_update,
+
+ - Paths used are
+ put /dna/intent/api/v1/lan-automation/updateDevice,
+
+"""
+
+EXAMPLES = r"""
+- name: Update all
+ cisco.dnac.lan_automation_updateDevice:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ feature: string
+ hostnameUpdateDevices:
+ - deviceManagementIPAddress: string
+ newHostName: string
+ linkUpdate:
+ destinationDeviceInterfaceName: string
+ destinationDeviceManagementIPAddress: string
+ ipPoolName: string
+ sourceDeviceInterfaceName: string
+ sourceDeviceManagementIPAddress: string
+ loopbackUpdateDeviceList:
+ - deviceManagementIPAddress: string
+ newLoopback0IPAddress: string
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": {
+ "taskId": "string",
+ "url": "string"
+ },
+ "version": "string"
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/lan_automation_update_v2.py b/ansible_collections/cisco/dnac/plugins/modules/lan_automation_update_v2.py
new file mode 100644
index 000000000..2514d93f9
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/lan_automation_update_v2.py
@@ -0,0 +1,84 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: lan_automation_update_v2
+short_description: Resource module for Lan Automation Update V2
+description:
+- Manage operation update of the resource Lan Automation Update V2.
+- >
+ Invoke this API to stop LAN Automation and update device parameters such as Loopback0 IP address and/or hostname
+ discovered in the current session.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module
+author: Rafael Campos (@racampos)
+options:
+ id:
+ description: Id path parameter. LAN Automation id can be obtained from /dna/intent/api/v1/lan-automation/status.
+ type: str
+ payload:
+ description: Lan Automation Update V2's payload.
+ elements: dict
+ suboptions:
+ deviceManagementIPAddress:
+ description: Device Management IP Address.
+ type: str
+ newHostName:
+ description: New hostname to be assigned to the device.
+ type: str
+ newLoopback0IPAddress:
+ description: New Loopback0 IP Address from LAN pool of Device Discovery Site.
+ type: str
+ type: list
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for LAN Automation LANAutomationStopAndUpdateDevicesV2
+ description: Complete reference of the LANAutomationStopAndUpdateDevicesV2 API.
+ link: https://developer.cisco.com/docs/dna-center/#!l-an-automation-stop-and-update-devices-v-2
+notes:
+ - SDK Method used are
+ lan_automation.LanAutomation.lan_automation_stop_and_update_devices_v2,
+
+ - Paths used are
+ put /dna/intent/api/v2/lan-automation/{id},
+
+"""
+
+EXAMPLES = r"""
+- name: Update by id
+ cisco.dnac.lan_automation_update_v2:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ id: string
+ payload:
+ - deviceManagementIPAddress: string
+ newHostName: string
+ newLoopback0IPAddress: string
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": {
+ "taskId": "string",
+ "url": "string"
+ },
+ "version": "string"
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/lan_automation_v2.py b/ansible_collections/cisco/dnac/plugins/modules/lan_automation_v2.py
new file mode 100644
index 000000000..5c8b0bb55
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/lan_automation_v2.py
@@ -0,0 +1,166 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: lan_automation_v2
+short_description: Resource module for Lan Automation V2
+description:
+- Manage operation create of the resource Lan Automation V2.
+- >
+ Invoke V2 LAN Automation Start API, which supports optional auto-stop processing feature based on the provided
+ timeout or a specific device list, or both. The stop processing will be executed automatically when either of the
+ cases is satisfied, without specifically calling the stop API. The V2 API behaves similarly to V1 if no timeout or
+ device list is provided, and the user needs to call the stop API for LAN Automation stop processing. With the V2
+ API, the user can also specify the level up to which the devices can be LAN automated.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module
+author: Rafael Campos (@racampos)
+options:
+ payload:
+ description: Lan Automation V2's payload.
+ elements: dict
+ suboptions:
+ discoveredDeviceSiteNameHierarchy:
+ description: Discovered device site name.
+ type: str
+ discoveryDevices:
+ description: Lan Automation V2's discoveryDevices.
+ elements: dict
+ suboptions:
+ deviceHostName:
+ description: Hostname of the device.
+ type: str
+ deviceManagementIPAddress:
+ description: Management IP Address of the device.
+ type: str
+ deviceSerialNumber:
+ description: Serial number of the device.
+ type: str
+ deviceSiteNameHierarchy:
+ description: "Site name hierarchy for the device, must be a child site of\
+ \ the discoveredDeviceSiteNameHierarchy or same if it\u2019s not area\
+ \ type."
+ type: str
+ type: list
+ discoveryLevel:
+ description: Level below primary seed device upto which the new devices will
+ be LAN Automated by this session, level + seed = tier. Supported range for
+ level is 1-5, default level is 2.
+ type: int
+ discoveryTimeout:
+ description: Discovery timeout in minutes. Until this time, the stop processing
+ will not be triggered. Any device contacting after the provided discovery
+ timeout will not be processed, and a device reset and reload will be attempted
+ to bring it back to the PnP agent state before process completion. The supported
+ timeout range is in minutes 20-10080. If both timeout and discovery devices
+ list are provided, the stop processing will be attempted whichever happens
+ earlier. Users can always use the LAN Automation delete API to force stop
+ processing.
+ type: int
+ hostNameFileId:
+ description: Use /dna/intent/api/v1/file/namespace/nw_orch API to get the file
+ ID for the already uploaded file in the nw_orch namespace.
+ type: str
+ hostNamePrefix:
+ description: Host name prefix assigned to the discovered device.
+ type: str
+ ipPools:
+ description: Lan Automation V2's ipPools.
+ elements: dict
+ suboptions:
+ ipPoolName:
+ description: Name of the IP pool.
+ type: str
+ ipPoolRole:
+ description: Role of the IP pool. Supported roles are MAIN_POOL and PHYSICAL_LINK_POOL.
+ type: str
+ type: list
+ isisDomainPwd:
+ description: IS-IS domain password in plain text.
+ type: str
+ multicastEnabled:
+ description: Enable underlay native multicast.
+ type: bool
+ peerDeviceManagmentIPAddress:
+ description: Peer seed management IP address.
+ type: str
+ primaryDeviceInterfaceNames:
+ description: The list of interfaces on primary seed via which the discovered
+ devices are connected.
+ elements: str
+ type: list
+ primaryDeviceManagmentIPAddress:
+ description: Primary seed management IP address.
+ type: str
+ redistributeIsisToBgp:
+ description: Advertise LAN Automation summary route into BGP.
+ type: bool
+ type: list
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for LAN Automation LANAutomationStartV2
+ description: Complete reference of the LANAutomationStartV2 API.
+ link: https://developer.cisco.com/docs/dna-center/#!l-an-automation-start-v-2
+notes:
+ - SDK Method used are
+ lan_automation.LanAutomation.lan_automation_start_v2,
+
+ - Paths used are
+ post /dna/intent/api/v2/lan-automation,
+
+"""
+
+EXAMPLES = r"""
+- name: Create
+ cisco.dnac.lan_automation_v2:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ payload:
+ - discoveredDeviceSiteNameHierarchy: string
+ discoveryDevices:
+ - deviceHostName: string
+ deviceManagementIPAddress: string
+ deviceSerialNumber: string
+ deviceSiteNameHierarchy: string
+ discoveryLevel: 0
+ discoveryTimeout: 0
+ hostNameFileId: string
+ hostNamePrefix: string
+ ipPools:
+ - ipPoolName: string
+ ipPoolRole: string
+ isisDomainPwd: string
+ multicastEnabled: true
+ peerDeviceManagmentIPAddress: string
+ primaryDeviceInterfaceNames:
+ - string
+ primaryDeviceManagmentIPAddress: string
+ redistributeIsisToBgp: true
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": {
+ "taskId": "string",
+ "url": "string"
+ },
+ "version": "string"
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/license_device_count_info.py b/ansible_collections/cisco/dnac/plugins/modules/license_device_count_info.py
index 0ec3976ac..128a9cd38 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/license_device_count_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/license_device_count_info.py
@@ -40,15 +40,15 @@ options:
- Smart_account_id query parameter. Smart account id.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
-- name: Cisco DNA Center documentation for Licenses DeviceCountDetails2
- description: Complete reference of the DeviceCountDetails2 API.
- link: https://developer.cisco.com/docs/dna-center/#!device-count-details-2
+- name: Cisco DNA Center documentation for Licenses DeviceCountDetails
+ description: Complete reference of the DeviceCountDetails API.
+ link: https://developer.cisco.com/docs/dna-center/#!device-count-details
notes:
- SDK Method used are
- licenses.Licenses.device_count_details2,
+ licenses.Licenses.device_count_details,
- Paths used are
get /dna/intent/api/v1/licenses/device/count,
@@ -74,7 +74,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/license_device_deregistration.py b/ansible_collections/cisco/dnac/plugins/modules/license_device_deregistration.py
index ddf70df11..2623fe369 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/license_device_deregistration.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/license_device_deregistration.py
@@ -21,15 +21,15 @@ options:
elements: str
type: list
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
-- name: Cisco DNA Center documentation for Licenses DeviceDeregistration2
- description: Complete reference of the DeviceDeregistration2 API.
- link: https://developer.cisco.com/docs/dna-center/#!device-deregistration-2
+- name: Cisco DNA Center documentation for Licenses DeviceDeregistration
+ description: Complete reference of the DeviceDeregistration API.
+ link: https://developer.cisco.com/docs/dna-center/#!device-deregistration
notes:
- SDK Method used are
- licenses.Licenses.device_deregistration2,
+ licenses.Licenses.device_deregistration,
- Paths used are
put /dna/intent/api/v1/licenses/smartAccount/virtualAccount/deregister,
@@ -50,7 +50,6 @@ EXAMPLES = r"""
- string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/license_device_license_details_info.py b/ansible_collections/cisco/dnac/plugins/modules/license_device_license_details_info.py
index 965c577e3..a1d8a286c 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/license_device_license_details_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/license_device_license_details_info.py
@@ -24,15 +24,15 @@ options:
- Device_uuid path parameter. Id of device.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
-- name: Cisco DNA Center documentation for Licenses DeviceLicenseDetails2
- description: Complete reference of the DeviceLicenseDetails2 API.
- link: https://developer.cisco.com/docs/dna-center/#!device-license-details-2
+- name: Cisco DNA Center documentation for Licenses DeviceLicenseDetails
+ description: Complete reference of the DeviceLicenseDetails API.
+ link: https://developer.cisco.com/docs/dna-center/#!device-license-details
notes:
- SDK Method used are
- licenses.Licenses.device_license_details2,
+ licenses.Licenses.device_license_details,
- Paths used are
get /dna/intent/api/v1/licenses/device/{device_uuid}/details,
@@ -54,70 +54,66 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
returned: always
- type: list
- elements: dict
+ type: dict
sample: >
- [
- {
- "device_uuid": "string",
- "site": "string",
- "model": "string",
- "license_mode": "string",
- "is_license_expired": true,
- "software_version": "string",
- "network_license": "string",
- "evaluation_license_expiry": "string",
- "device_name": "string",
- "device_type": "string",
- "dna_level": "string",
- "virtual_account_name": "string",
- "ip_address": "string",
- "mac_address": "string",
- "sntc_status": "string",
- "feature_license": [
- "string"
- ],
- "has_sup_cards": true,
- "udi": "string",
- "stacked_devices": [
+ {
+ "device_uuid": "string",
+ "site": "string",
+ "model": "string",
+ "license_mode": "string",
+ "is_license_expired": true,
+ "software_version": "string",
+ "network_license": "string",
+ "evaluation_license_expiry": "string",
+ "device_name": "string",
+ "device_type": "string",
+ "dna_level": "string",
+ "virtual_account_name": "string",
+ "ip_address": "string",
+ "mac_address": "string",
+ "sntc_status": "string",
+ "feature_license": [
+ "string"
+ ],
+ "has_sup_cards": true,
+ "udi": "string",
+ "stacked_devices": [
+ {
+ "mac_address": "string",
+ "id": 0,
+ "role": "string",
+ "serial_number": "string"
+ }
+ ],
+ "is_stacked_device": true,
+ "access_points": [
+ {
+ "ap_type": "string",
+ "count": "string"
+ }
+ ],
+ "chassis_details": {
+ "board_serial_number": "string",
+ "modules": [
{
- "mac_address": "string",
- "id": "string",
- "role": "string",
- "serial_number": "string"
+ "module_type": "string",
+ "module_name": "string",
+ "serial_number": "string",
+ "id": 0
}
],
- "is_stacked_device": true,
- "access_points": [
+ "supervisor_cards": [
{
- "ap_type": "string",
- "count": "string"
+ "serial_number": "string",
+ "supervisor_card_type": "string",
+ "status": "string"
}
],
- "chassis_details": {
- "board_serial_number": "string",
- "modules": [
- {
- "module_type": "string",
- "module_name": "string",
- "serial_number": "string",
- "id": "string"
- }
- ],
- "supervisor_cards": [
- {
- "serial_number": "string",
- "supervisor_card_type": "string",
- "status": "string"
- }
- ],
- "port": 0
- }
+ "port": 0
}
- ]
+ }
"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/license_device_license_summary_info.py b/ansible_collections/cisco/dnac/plugins/modules/license_device_license_summary_info.py
index c2806d39a..38cf9ef15 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/license_device_license_summary_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/license_device_license_summary_info.py
@@ -22,7 +22,7 @@ options:
page_number:
description:
- Page_number query parameter. Page number of response.
- type: int
+ type: float
order:
description:
- Order query parameter. Sorting order.
@@ -33,19 +33,22 @@ options:
type: str
dna_level:
description:
- - Dna_level query parameter. Device Cisco DNA license level.
+ - Dna_level query parameter. Device Cisco DNA license level. The valid values are Advantage, Essentials.
type: str
device_type:
description:
- - Device_type query parameter. Type of device.
+ - Device_type query parameter. Type of device. The valid values are Routers, Switches and Hubs, Wireless Controller.
type: str
limit:
description:
- Limit query parameter.
- type: int
+ type: float
registration_status:
description:
- - Registration_status query parameter. Smart license registration status of device.
+ - >
+ Registration_status query parameter. Smart license registration status of device. The valid values are
+ Unknown, NA, Unregistered, Registered, Registration_expired, Reservation_in_progress, Registered_slr,
+ Registered_plr, Registered_satellite.
type: str
virtual_account_name:
description:
@@ -54,21 +57,21 @@ options:
smart_account_id:
description:
- Smart_account_id query parameter. Id of smart account.
- type: int
+ type: str
device_uuid:
description:
- Device_uuid query parameter. Id of device.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
-- name: Cisco DNA Center documentation for Licenses DeviceLicenseSummary2
- description: Complete reference of the DeviceLicenseSummary2 API.
- link: https://developer.cisco.com/docs/dna-center/#!device-license-summary-2
+- name: Cisco DNA Center documentation for Licenses DeviceLicenseSummary
+ description: Complete reference of the DeviceLicenseSummary API.
+ link: https://developer.cisco.com/docs/dna-center/#!device-license-summary
notes:
- SDK Method used are
- licenses.Licenses.device_license_summary2,
+ licenses.Licenses.device_license_summary,
- Paths used are
get /dna/intent/api/v1/licenses/device/summary,
@@ -94,56 +97,57 @@ EXAMPLES = r"""
limit: 0
registration_status: string
virtual_account_name: string
- smart_account_id: 0
+ smart_account_id: string
device_uuid: string
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
returned: always
- type: list
- elements: dict
+ type: dict
sample: >
- [
- {
- "authorization_status": "string",
- "last_updated_time": "string",
- "is_performance_allowed": true,
- "sle_auth_code": "string",
- "throughput_level": "string",
- "hsec_status": "string",
- "device_uuid": "string",
- "site": "string",
- "total_access_point_count": 0,
- "model": "string",
- "is_wireless_capable": true,
- "registration_status": "string",
- "sle_state": "string",
- "performance_license": "string",
- "license_mode": "string",
- "is_license_expired": true,
- "software_version": "string",
- "reservation_status": "string",
- "is_wireless": true,
- "network_license": "string",
- "evaluation_license_expiry": "string",
- "wireless_capable_network_license": "string",
- "device_name": "string",
- "device_type": "string",
- "dna_level": "string",
- "virtual_account_name": "string",
- "last_successful_rum_usage_upload_time": "string",
- "ip_address": "string",
- "wireless_capable_dna_license": "string",
- "mac_address": "string",
- "customer_tag1": "string",
- "customer_tag2": "string",
- "customer_tag3": "string",
- "customer_tag4": "string",
- "smart_account_name": "string"
- }
- ]
+ {
+ "response": [
+ {
+ "authorization_status": "string",
+ "last_updated_time": "string",
+ "is_performance_allowed": true,
+ "sle_auth_code": "string",
+ "throughput_level": "string",
+ "hsec_status": "string",
+ "device_uuid": "string",
+ "site": "string",
+ "total_access_point_count": 0,
+ "model": "string",
+ "is_wireless_capable": true,
+ "registration_status": "string",
+ "sle_state": "string",
+ "performance_license": "string",
+ "license_mode": "string",
+ "is_license_expired": true,
+ "software_version": "string",
+ "reservation_status": "string",
+ "is_wireless": true,
+ "network_license": "string",
+ "evaluation_license_expiry": "string",
+ "wireless_capable_network_license": "string",
+ "device_name": "string",
+ "device_type": "string",
+ "dna_level": "string",
+ "virtual_account_name": "string",
+ "last_successful_rum_usage_upload_time": "string",
+ "ip_address": "string",
+ "wireless_capable_dna_license": "string",
+ "mac_address": "string",
+ "customer_tag1": "string",
+ "customer_tag2": "string",
+ "customer_tag3": "string",
+ "customer_tag4": "string",
+ "smart_account_name": "string"
+ }
+ ],
+ "version": "string"
+ }
"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/license_device_registration.py b/ansible_collections/cisco/dnac/plugins/modules/license_device_registration.py
index de878f790..b963b5a79 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/license_device_registration.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/license_device_registration.py
@@ -24,15 +24,15 @@ options:
description: Virtual_account_name path parameter. Name of virtual account.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
-- name: Cisco DNA Center documentation for Licenses DeviceRegistration2
- description: Complete reference of the DeviceRegistration2 API.
- link: https://developer.cisco.com/docs/dna-center/#!device-registration-2
+- name: Cisco DNA Center documentation for Licenses DeviceRegistration
+ description: Complete reference of the DeviceRegistration API.
+ link: https://developer.cisco.com/docs/dna-center/#!device-registration
notes:
- SDK Method used are
- licenses.Licenses.device_registration2,
+ licenses.Licenses.device_registration,
- Paths used are
put /dna/intent/api/v1/licenses/smartAccount/virtualAccount/{virtual_account_name}/register,
@@ -54,7 +54,6 @@ EXAMPLES = r"""
virtual_account_name: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/license_smart_account_details_info.py b/ansible_collections/cisco/dnac/plugins/modules/license_smart_account_details_info.py
index 70685c2ed..b95e7ed7b 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/license_smart_account_details_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/license_smart_account_details_info.py
@@ -20,8 +20,8 @@ options:
description: Additional headers.
type: dict
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Licenses SmartAccountDetails
description: Complete reference of the SmartAccountDetails API.
@@ -49,7 +49,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/license_term_details_info.py b/ansible_collections/cisco/dnac/plugins/modules/license_term_details_info.py
index d0bf73d4d..190b1fa66 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/license_term_details_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/license_term_details_info.py
@@ -34,15 +34,15 @@ options:
- Device_type query parameter. Type of device like router, switch, wireless or ise.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
-- name: Cisco DNA Center documentation for Licenses LicenseTermDetails2
- description: Complete reference of the LicenseTermDetails2 API.
- link: https://developer.cisco.com/docs/dna-center/#!license-term-details-2
+- name: Cisco DNA Center documentation for Licenses LicenseTermDetails
+ description: Complete reference of the LicenseTermDetails API.
+ link: https://developer.cisco.com/docs/dna-center/#!license-term-details
notes:
- SDK Method used are
- licenses.Licenses.license_term_details2,
+ licenses.Licenses.license_term_details,
- Paths used are
get /dna/intent/api/v1/licenses/term/smartAccount/{smart_account_id}/virtualAccount/{virtual_account_name},
@@ -66,7 +66,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/license_usage_details_info.py b/ansible_collections/cisco/dnac/plugins/modules/license_usage_details_info.py
index 3fd4d5466..b60a19aaf 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/license_usage_details_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/license_usage_details_info.py
@@ -34,15 +34,15 @@ options:
- Device_type query parameter. Type of device like router, switch, wireless or ise.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
-- name: Cisco DNA Center documentation for Licenses LicenseUsageDetails2
- description: Complete reference of the LicenseUsageDetails2 API.
- link: https://developer.cisco.com/docs/dna-center/#!license-usage-details-2
+- name: Cisco DNA Center documentation for Licenses LicenseUsageDetails
+ description: Complete reference of the LicenseUsageDetails API.
+ link: https://developer.cisco.com/docs/dna-center/#!license-usage-details
notes:
- SDK Method used are
- licenses.Licenses.license_usage_details2,
+ licenses.Licenses.license_usage_details,
- Paths used are
get /dna/intent/api/v1/licenses/usage/smartAccount/{smart_account_id}/virtualAccount/{virtual_account_name},
@@ -66,7 +66,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -109,6 +108,24 @@ dnac_response:
"license_count": 0
}
]
+ },
+ "purchased_ise_license": {
+ "total_license_count": 0,
+ "license_count_by_type": [
+ {
+ "license_type": "string",
+ "license_count": 0
+ }
+ ]
+ },
+ "used_ise_license": {
+ "total_license_count": 0,
+ "license_count_by_type": [
+ {
+ "license_type": "string",
+ "license_count": 0
+ }
+ ]
}
}
"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/license_virtual_account_change.py b/ansible_collections/cisco/dnac/plugins/modules/license_virtual_account_change.py
index 762e01540..36ece0e11 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/license_virtual_account_change.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/license_virtual_account_change.py
@@ -27,15 +27,15 @@ options:
description: Virtual_account_name path parameter. Name of target virtual account.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
-- name: Cisco DNA Center documentation for Licenses ChangeVirtualAccount2
- description: Complete reference of the ChangeVirtualAccount2 API.
- link: https://developer.cisco.com/docs/dna-center/#!change-virtual-account-2
+- name: Cisco DNA Center documentation for Licenses ChangeVirtualAccount
+ description: Complete reference of the ChangeVirtualAccount API.
+ link: https://developer.cisco.com/docs/dna-center/#!change-virtual-account
notes:
- SDK Method used are
- licenses.Licenses.change_virtual_account2,
+ licenses.Licenses.change_virtual_account,
- Paths used are
post /dna/intent/api/v1/licenses/smartAccount/{smart_account_id}/virtualAccount/{virtual_account_name}/device/transfer,
@@ -58,7 +58,6 @@ EXAMPLES = r"""
virtual_account_name: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/license_virtual_account_details_info.py b/ansible_collections/cisco/dnac/plugins/modules/license_virtual_account_details_info.py
index 72a2f1041..a039a85c1 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/license_virtual_account_details_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/license_virtual_account_details_info.py
@@ -24,15 +24,15 @@ options:
- Smart_account_id path parameter. Id of smart account.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
-- name: Cisco DNA Center documentation for Licenses VirtualAccountDetails2
- description: Complete reference of the VirtualAccountDetails2 API.
- link: https://developer.cisco.com/docs/dna-center/#!virtual-account-details-2
+- name: Cisco DNA Center documentation for Licenses VirtualAccountDetails
+ description: Complete reference of the VirtualAccountDetails API.
+ link: https://developer.cisco.com/docs/dna-center/#!virtual-account-details
notes:
- SDK Method used are
- licenses.Licenses.virtual_account_details2,
+ licenses.Licenses.virtual_account_details,
- Paths used are
get /dna/intent/api/v1/licenses/smartAccount/{smart_account_id}/virtualAccounts,
@@ -54,7 +54,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/maps_export.py b/ansible_collections/cisco/dnac/plugins/modules/maps_export.py
new file mode 100644
index 000000000..e69633bd6
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/maps_export.py
@@ -0,0 +1,66 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: maps_export
+short_description: Resource module for Maps Export
+description:
+- Manage operation create of the resource Maps Export.
+- Allows exporting a Map archive in an XML interchange format along with the associated images.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module
+author: Rafael Campos (@racampos)
+options:
+ siteHierarchyUuid:
+ description: SiteHierarchyUuid path parameter. The site hierarchy element UUID to
+ export, all child elements starting at this hierarchy element will be included.
+ Limited to a hierarchy that contains 500 or fewer maps.
+ type: str
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for Sites ExportMapArchive
+ description: Complete reference of the ExportMapArchive API.
+ link: https://developer.cisco.com/docs/dna-center/#!export-map-archive
+notes:
+ - SDK Method used are
+ sites.Sites.export_map_archive,
+
+ - Paths used are
+ post /dna/intent/api/v1/maps/export/{siteHierarchyUuid},
+
+"""
+
+EXAMPLES = r"""
+- name: Create
+ cisco.dnac.maps_export:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ siteHierarchyUuid: string
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": {
+ "taskId": "string",
+ "url": "string"
+ },
+ "version": "string"
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/maps_import.py b/ansible_collections/cisco/dnac/plugins/modules/maps_import.py
new file mode 100644
index 000000000..1d7872589
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/maps_import.py
@@ -0,0 +1,61 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: maps_import
+short_description: Resource module for Maps Import
+description:
+- Manage operation delete of the resource Maps Import.
+- >
+ Cancels a previously initatied import, allowing the system to cleanup cached resources about that import data, and
+ ensures the import cannot accidentally be performed / approved at a later time.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module
+author: Rafael Campos (@racampos)
+options:
+ importContextUuid:
+ description: ImportContextUuid path parameter. The unique import context UUID given
+ by a previous call to Start Import API.
+ type: str
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for Sites ImportMapArchiveCancelAnImport
+ description: Complete reference of the ImportMapArchiveCancelAnImport API.
+ link: https://developer.cisco.com/docs/dna-center/#!import-map-archive-cancel-an-import
+notes:
+ - SDK Method used are
+ sites.Sites.import_map_archive_cancel_an_import,
+
+ - Paths used are
+ delete /dna/intent/api/v1/maps/import/{importContextUuid},
+
+"""
+
+EXAMPLES = r"""
+- name: Delete by id
+ cisco.dnac.maps_import:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ importContextUuid: string
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {}
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/maps_import_perform.py b/ansible_collections/cisco/dnac/plugins/modules/maps_import_perform.py
new file mode 100644
index 000000000..bb0630483
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/maps_import_perform.py
@@ -0,0 +1,63 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: maps_import_perform
+short_description: Resource module for Maps Import Perform
+description:
+- Manage operation create of the resource Maps Import Perform.
+- >
+ For a previously initatied import, approves the import to be performed, accepting that data loss may occur. A Map
+ import will fully replace existing Maps data for the sites defined in the archive. The Map Archive Import Status
+ API /maps/import/${contextUuid}/status should always be checked to validate the pre-import validation output prior
+ to performing the import.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module
+author: Rafael Campos (@racampos)
+options:
+ importContextUuid:
+ description: ImportContextUuid path parameter. The unique import context UUID given
+ by a previous call of Start Import API.
+ type: str
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for Sites ImportMapArchivePerformImport
+ description: Complete reference of the ImportMapArchivePerformImport API.
+ link: https://developer.cisco.com/docs/dna-center/#!import-map-archive-perform-import
+notes:
+ - SDK Method used are
+ sites.Sites.import_map_archive_perform_import,
+
+ - Paths used are
+ post /dna/intent/api/v1/maps/import/{importContextUuid}/perform,
+
+"""
+
+EXAMPLES = r"""
+- name: Create
+ cisco.dnac.maps_import_perform:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ importContextUuid: string
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {}
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/maps_import_start.py b/ansible_collections/cisco/dnac/plugins/modules/maps_import_start.py
new file mode 100644
index 000000000..998350d70
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/maps_import_start.py
@@ -0,0 +1,56 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: maps_import_start
+short_description: Resource module for Maps Import Start
+description:
+- Manage operation create of the resource Maps Import Start.
+- >
+ Initiates a map archive import of a tar.gz file. The archive must consist of one xmlDir/MapsImportExport.xml map
+ descriptor file, and 1 or more images for the map areas nested under /images folder.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module
+author: Rafael Campos (@racampos)
+options: {}
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for Sites ImportMapArchiveStartImport
+ description: Complete reference of the ImportMapArchiveStartImport API.
+ link: https://developer.cisco.com/docs/dna-center/#!import-map-archive-start-import
+notes:
+ - SDK Method used are
+ sites.Sites.import_map_archive_start_import,
+
+ - Paths used are
+ post /dna/intent/api/v1/maps/import/start,
+
+"""
+
+EXAMPLES = r"""
+- name: Create
+ cisco.dnac.maps_import_start:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: str
+ sample: >
+ "'string'"
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/maps_import_status_info.py b/ansible_collections/cisco/dnac/plugins/modules/maps_import_status_info.py
new file mode 100644
index 000000000..34cb2ce35
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/maps_import_status_info.py
@@ -0,0 +1,117 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: maps_import_status_info
+short_description: Information module for Maps Import Status
+description:
+- Get all Maps Import Status.
+- >
+ Gets the status of a map archive import operation. For a map archive import that has just been initiated, will
+ provide the result of validation of the archive and a pre-import preview of what will be performed if the import
+ is performed. Once an import is requested to be performed, this API will give the status of the import and upon
+ completion a post-import summary of what was performed by the operation.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module_info
+author: Rafael Campos (@racampos)
+options:
+ headers:
+ description: Additional headers.
+ type: dict
+ importContextUuid:
+ description:
+ - >
+ ImportContextUuid path parameter. The unique import context UUID given by a previous and recent call to
+ maps/import/start API.
+ type: str
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for Sites ImportMapArchiveImportStatus
+ description: Complete reference of the ImportMapArchiveImportStatus API.
+ link: https://developer.cisco.com/docs/dna-center/#!import-map-archive-import-status
+notes:
+ - SDK Method used are
+ sites.Sites.import_map_archive_import_status,
+
+ - Paths used are
+ get /dna/intent/api/v1/maps/import/{importContextUuid}/status,
+
+"""
+
+EXAMPLES = r"""
+- name: Get all Maps Import Status
+ cisco.dnac.maps_import_status_info:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ headers: "{{my_headers | from_json}}"
+ importContextUuid: string
+ register: result
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "auditLog": {
+ "children": [
+ {}
+ ],
+ "entitiesCount": [
+ {
+ "key": 0
+ }
+ ],
+ "entityName": "string",
+ "entityType": "string",
+ "errorEntitiesCount": [
+ {
+ "key": 0
+ }
+ ],
+ "errors": [
+ {
+ "message": "string"
+ }
+ ],
+ "infos": [
+ {
+ "message": "string"
+ }
+ ],
+ "matchingEntitiesCount": [
+ {
+ "key": 0
+ }
+ ],
+ "subTasksRootTaskId": "string",
+ "successfullyImportedFloors": [
+ "string"
+ ],
+ "warnings": [
+ {
+ "message": "string"
+ }
+ ]
+ },
+ "status": "string",
+ "uuid": {
+ "leastSignificantBits": 0,
+ "mostSignificantBits": 0
+ }
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/maps_supported_access_points_info.py b/ansible_collections/cisco/dnac/plugins/modules/maps_supported_access_points_info.py
new file mode 100644
index 000000000..75aada233
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/maps_supported_access_points_info.py
@@ -0,0 +1,72 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: maps_supported_access_points_info
+short_description: Information module for Maps Supported Access Points
+description:
+- Get all Maps Supported Access Points.
+- Gets the list of supported access point types as well as valid antenna pattern names that can be used for each.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module_info
+author: Rafael Campos (@racampos)
+options:
+ headers:
+ description: Additional headers.
+ type: dict
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for Sites MapsSupportedAccessPoints
+ description: Complete reference of the MapsSupportedAccessPoints API.
+ link: https://developer.cisco.com/docs/dna-center/#!maps-supported-access-points
+notes:
+ - SDK Method used are
+ sites.Sites.maps_supported_access_points,
+
+ - Paths used are
+ get /dna/intent/api/v1/maps/supported-access-points,
+
+"""
+
+EXAMPLES = r"""
+- name: Get all Maps Supported Access Points
+ cisco.dnac.maps_supported_access_points_info:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ headers: "{{my_headers | from_json}}"
+ register: result
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: list
+ elements: dict
+ sample: >
+ [
+ {
+ "antennaPatterns": [
+ {
+ "band": "string",
+ "names": [
+ "string"
+ ]
+ }
+ ],
+ "apType": "string"
+ }
+ ]
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/netconf_credential.py b/ansible_collections/cisco/dnac/plugins/modules/netconf_credential.py
index d75908aa6..0fe864287 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/netconf_credential.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/netconf_credential.py
@@ -18,29 +18,30 @@ extends_documentation_fragment:
author: Rafael Campos (@racampos)
options:
comments:
- description: Netconf Credential's comments.
+ description: Comments to identify the netconf credential.
type: str
credentialType:
- description: Netconf Credential's credentialType.
+ description: Credential type to identify the application that uses the netconf credential.
type: str
description:
- description: Netconf Credential's description.
+ description: Description for Netconf Credentials.
type: str
id:
- description: Netconf Credential's id.
+ description: Id of the Netconf Credential in UUID format.
type: str
instanceTenantId:
- description: Netconf Credential's instanceTenantId.
+ description: Deprecated.
type: str
instanceUuid:
- description: Netconf Credential's instanceUuid.
+ description: Deprecated.
type: str
netconfPort:
- description: Netconf Credential's netconfPort.
+ description: Netconf port on the device. Valid port should be in the range of 1
+ to 65535.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Discovery CreateNetconfCredentials
description: Complete reference of the CreateNetconfCredentials API.
@@ -98,7 +99,6 @@ EXAMPLES = r"""
netconfPort: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/network_compliance_workflow_manager.py b/ansible_collections/cisco/dnac/plugins/modules/network_compliance_workflow_manager.py
new file mode 100644
index 000000000..e696231d3
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/network_compliance_workflow_manager.py
@@ -0,0 +1,1454 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2024, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+"""Ansible module to perform Network Compliance Operations on devices in Cisco Catalyst Center."""
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+__author__ = ("Rugvedi Kapse, Madhan Sankaranarayanan")
+
+DOCUMENTATION = r"""
+---
+module: network_compliance_workflow_manager
+short_description: Network Compliance module for managing network compliance tasks on reachable device(s) in Cisco Catalyst Center.
+description:
+- Perform compliance checks or sync configurations on reachable devices using IP Address(s) or Site.
+- API to perform full compliance checks or specific category checks on reachable device(s).
+- API to sync device configuration on device(s).
+version_added: "6.14.0"
+extends_documentation_fragment:
+ - cisco.dnac.workflow_manager_params
+author: Rugvedi Kapse (@rukapse)
+ Madhan Sankaranarayanan (@madhansansel)
+options:
+ config_verify:
+ description: Set to True to verify the Cisco Catalyst Center config after applying the playbook config.
+ type: bool
+ default: False
+ state:
+ description: State of Cisco Catalyst Center after module completion.
+ type: str
+ choices: [ merged ]
+ default: merged
+ config:
+ description: List of device details for running a compliance check or synchronizing device configuration.
+ type: list
+ elements: dict
+ required: True
+ suboptions:
+ ip_address_list:
+ description: List of IP addresses of devices to run a compliance check on or synchronize device configurations.
+ Either "ip_address_list" or "site_name" is required for module to execute.
+ If both "site_name" and "ip_address_list" are provided, operations are performed on devices that are present in both the
+ "ip_address_list" and the specified site.
+ (e.g. ["204.1.2.2", "204.1.2.5", "204.1.2.4"])
+ type: list
+ elements: str
+ site_name:
+ description: When "site_name" is specified, the module executes the operation on all the devices located within the specified site.
+ This is a string value that should represent the complete hierarchical path of the site.
+ Either "site_name" or "ip_address_list" is required for module to execute.
+ If both "site_name" and "ip_address_list" are provided, operations are performed on devices that are present in both the
+ "ip_address_list" and the specified site.
+ (e.g. "Global/USA/San Francisco/Building_2/floor_1")
+ type: str
+ run_compliance:
+ description: Determines if a full compliance check should be triggered on the devices specified in the "ip_address_list" and/or "site_name".
+ if it is True then compliance will be triggered for all categories.
+ If it is False then compliance will be not be triggered even if run_compliance categories are provided.
+ Note - This operation cannot be performed on Access Points (APs) and if APs are provided, they will be skipped.
+ type: bool
+ default: True
+ run_compliance_categories:
+ description: Specifying compliance categories allows you to trigger compliance checks only for the mentioned categories.
+ Category can have one or more values from among the options "INTENT", "RUNNING_CONFIG", "IMAGE", "PSIRT", "EOX", "NETWORK_SETTINGS".
+ Category "INTENT" is mapped to compliance types "NETWORK_SETTINGS", "NETWORK_PROFILE", "WORKFLOW", "FABRIC", "APPLICATION_VISIBILITY".
+ If "run_compliance" is False then compliance will be not be triggered even if "run_compliance_categories" are provided.
+ (e.g. ["INTENT", "RUNNING_CONFIG", "IMAGE", "PSIRT", "EOX", "NETWORK_SETTINGS"])
+ type: list
+ elements: str
+ sync_device_config:
+ description: Determines whether to synchronize the device configuration on the devices specified in the "ip_address_list" and/or "site_name".
+ Sync device configuration, primarily addresses the status of the `RUNNING_CONFIG`.
+ If set to True, and if `RUNNING_CONFIG` status is non-compliant this operation would commit device running configuration
+ to startup by issuing "write memory" to device.
+ Note - This operation cannot be performed on Access Points (APs) and if APs are provided, they will be skipped.
+ type: bool
+ default: False
+
+requirements:
+- dnacentersdk == 2.7.0
+- python >= 3.9
+notes:
+ - SDK Methods used are
+ compliance.Compliance.run_compliance
+ compliance.Compliance.commit_device_configuration
+ task.Task.get_task_by_id
+ task.Task.get_task_tree
+ compliance.Compliance.get_compliance_detail
+
+ - Paths used are
+ post /dna/intent/api/v1/compliance/
+ post /dna/intent/api/v1/network-device-config/write-memory
+ get /dna/intent/api/v1/task/{taskId}
+ get /dna/intent/api/v1/task/{taskId}/tree
+ get /dna/intent/api/v1/compliance/detail
+"""
+
+EXAMPLES = r"""
+- name: Run Compliance check on device(s) using IP address list (run_compliance by default is True)
+ cisco.dnac.network_compliance_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ config:
+ - ip_address_list: ["204.1.2.2", "204.1.2.5", "204.1.2.4"]
+
+- name: Run Compliance check on device(s) using IP address list
+ cisco.dnac.network_compliance_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ config:
+ - ip_address_list: ["204.1.2.2", "204.1.2.5", "204.1.2.4"]
+ run_compliance: True
+
+- name: Run Compliance check on device(s) using Site
+ cisco.dnac.network_compliance_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ config:
+ - site_name: "Global/USA/San Francisco/Building_1/floor_1"
+ run_compliance: True
+
+- name: Run Compliance check on device(s) using both IP address list and Site
+ cisco.dnac.network_compliance_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ config:
+ - ip_address_list: ["204.1.2.2", "204.1.2.5", "204.1.2.4"]
+ site_name: "Global/USA/San Francisco/Building_1/floor_1"
+ run_compliance: True
+
+- name: Run Compliance check with specific categories on device(s) using IP address list
+ cisco.dnac.network_compliance_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ config:
+ - ip_address_list: ["204.1.2.2", "204.1.2.5", "204.1.2.4"]
+ run_compliance: True
+ run_compliance_categories: ["INTENT", "RUNNING_CONFIG", "IMAGE", "PSIRT"]
+
+- name: Run Compliance check with specific categories on device(s) using Site
+ cisco.dnac.network_compliance_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ config:
+ - site_name: "Global/USA/San Francisco/Building_1/floor_1"
+ run_compliance: True
+ run_compliance_categories: ["INTENT", "RUNNING_CONFIG", "IMAGE", "PSIRT"]
+
+- name: Run Compliance check with specific categories on device(s) using both IP address list and Site
+ cisco.dnac.network_compliance_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ config:
+ - ip_address_list: ["204.1.2.2", "204.1.2.5", "204.1.2.4"]
+ site_name: "Global/USA/San Francisco/Building_1/floor_1"
+ run_compliance: True
+ run_compliance_categories: ["INTENT", "RUNNING_CONFIG", "IMAGE", "PSIRT"]
+
+- name: Sync Device Configuration on device(s) using IP address list
+ cisco.dnac.network_compliance_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ config:
+ - site_name: "Global"
+ sync_device_config: True
+ run_compliance: False
+
+- name: Sync Device Configuration on device(s) using Site
+ cisco.dnac.network_compliance_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ config:
+ - site_name: "Global/USA/San Francisco/Building_1/floor_1"
+ sync_device_config: True
+ run_compliance: False
+
+- name: Sync Device Configuration on device(s) using both IP address list and Site
+ cisco.dnac.network_compliance_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ config:
+ - ip_address_list: ["204.1.2.2", "204.1.2.5", "204.1.2.4"]
+ site_name: "Global/USA/San Francisco/Building_1/floor_1"
+ sync_device_config: True
+ run_compliance: False
+
+- name: Run Compliance and Sync Device Configuration using both IP address list and Site
+ cisco.dnac.network_compliance_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log_level: "{{dnac_log_level}}"
+ dnac_log: False
+ config:
+ - ip_address_list: ["204.1.2.2", "204.1.2.5", "204.1.2.4"]
+ site_name: "Global/USA/San Francisco/Building_1/floor_1"
+ run_compliance: True
+ run_compliance_categories: ["INTENT", "RUNNING_CONFIG", "IMAGE", "PSIRT"]
+ sync_device_config: True
+"""
+
+RETURN = r"""
+#Case_1: Response when Run Compliance operation is performed successfully on device/s.
+sample_response_1:
+ description: A dictionary with the response returned by the Cisco Catalyst Center Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "status": "string",
+ "changed": bool,
+ "msg": "string"
+ "response": {
+ "taskId": "string",
+ "url": "string"
+ },
+ "data": dict,
+ "version": "string"
+ }
+
+#Case_2: Response when Sync Device Configuration operation is performed successfully on device/s.
+sample_response_2:
+ description: A dictionary with the response returned by the Cisco Catalyst Center Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "status": "string",
+ "changed": bool,
+ "msg": "string"
+ "response": {
+ "taskId": "string",
+ "url": "string"
+ },
+ "version": "string"
+ }
+
+#Case_3: Response when Error Occurs in performing Run Compliance or Sync Device Configuration operation on device/s.
+sample_response_3:
+ description: A dictionary with the response returned by the Cisco Catalyst Center Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "changed": bool,
+ "msg": "string"
+ }
+"""
+
+import time
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.cisco.dnac.plugins.module_utils.dnac import (
+ DnacBase,
+ validate_list_of_dicts
+)
+
+
+class NetworkCompliance(DnacBase):
+ """Class containing member attributes for network_compliance_workflow_manager module"""
+
+ def __init__(self, module):
+ """
+ Initialize an instance of the class.
+ Parameters:
+ - module: The module associated with the class instance.
+ Returns:
+ The method does not return a value.
+ """
+
+ super().__init__(module)
+
+ def validate_input(self):
+ """
+ Validate the fields provided in the playbook against a predefined specification
+ to ensure they adhere to the expected structure and data types.
+ Parameters:
+ state (optional): A state parameter that can be used to customize validation
+ based on different conditions.
+ Returns:
+ object: An instance of the class with updated attributes:
+ - self.msg: A message describing the validation result.
+ - self.status: The status of the validation (either "success" or "failed").
+ - self.validated_config: If successful, a validated version of the "config" parameter.
+ Description:
+ This method validates the fields provided in the playbook against a predefined specification.
+ It checks if the required fields are present and if their data types match the expected types.
+ If any parameter is found to be invalid, it logs an error message and sets the validation status to "failed".
+ If the validation is successful, it logs a success message and returns an instance of the class
+ with the validated configuration.
+ """
+
+ if not self.config:
+ self.msg = "config not available in playbook for validation"
+ self.status = "success"
+ self.log(self.msg, "ERROR")
+ return self
+
+ temp_spec = {
+ "ip_address_list": {"type": "list", "elements": "str", "required": False},
+ "site_name": {"type": "str", "required": False},
+ "run_compliance": {"type": "bool", "required": False, "default": True},
+ "run_compliance_categories": {"type": "list", "elements": "str", "required": False},
+ "sync_device_config": {"type": "bool", "required": False, "default": False},
+ }
+
+ # Validate device params
+ valid_temp, invalid_params = validate_list_of_dicts(
+ self.config, temp_spec
+ )
+
+ if invalid_params:
+ self.msg = "Invalid parameters in playbook: {0}".format(invalid_params)
+ self.log(self.msg, "ERROR")
+ self.status = "failed"
+ return self
+
+ self.validated_config = valid_temp
+
+ self.msg = "Successfully validated playbook configuration parameters using 'validated_input': {0}".format(str(valid_temp))
+ self.log(self.msg, "INFO")
+ self.status = "success"
+
+ return self
+
+ def validate_ip4_address_list(self, ip_address_list):
+ """
+ Validates the list of IPv4 addresses provided in the playbook.
+ Parameters:
+ ip_address_list (list): A list of IPv4 addresses to be validated.
+ Description:
+ This method iterates through each IP address in the list and checks if it is a valid IPv4 address.
+ If any address is found to be invalid, it logs an error message and fails.
+ After validating all IP addresses, it logs a success message.
+ """
+
+ for ip in ip_address_list:
+ if not self.is_valid_ipv4(ip):
+ self.msg = "IP address {0} is not valid".format(ip)
+ self.log(self.msg, "ERROR")
+ self.module.fail_json(self.msg)
+
+ self.log("Successfully validated the IP address/es: {0}".format(ip_address_list), "DEBUG")
+
+ def validate_run_compliance_paramters(self, mgmt_ip_instance_id_map, run_compliance, run_compliance_categories):
+ """
+ Validate and prepare parameters for running compliance checks.
+ Parameters:
+ - mgmt_ip_instance_id_map (dict): A dictionary mapping management IP addresses to device instance IDs.
+ - run_compliance (bool or None): A boolean indicating whether to run compliance checks.
+ - run_compliance_categories (list): A list of compliance categories to check.
+ Returns:
+ tuple: A tuple containing two dictionaries:
+ - run_compliance_params: Parameters for running compliance checks.
+ - compliance_detail_params: Parameters for compliance detail.
+ Notes:
+ - This method prepares parameters for running compliance checks based on the provided inputs.
+ - If invalid categories are provided in `run_compliance_categories`, a `ValueError` is raised.
+ - If `run_compliance_categories` is provided and neither `run_compliance` nor `run_compliance_categories` is set, an error
+ is logged and the method fails.
+ - If `run_compliance` is set and `run_compliance_categories` is not, full compliance checks are triggered.
+ - If both `run_compliance` and `run_compliance_categories` are set, compliance checks are triggered for specific categories.
+ """
+ # Initializing empty dicts/lists
+ run_compliance_params = {}
+ compliance_detail_params = {}
+ valid_categories = ["INTENT", "RUNNING_CONFIG", "IMAGE", "PSIRT", "EOX", "NETWORK_SETTINGS"]
+
+ if run_compliance_categories:
+ # Validate the categories provided
+ if not all(category.upper() in valid_categories for category in run_compliance_categories):
+ msg = "Invalid category provided. Valid categories are {0}.".format(valid_categories)
+ self.log(msg, "ERROR")
+ self.module.fail_json(msg)
+
+ if run_compliance:
+ # run_compliance_params
+ run_compliance_params["deviceUuids"] = list(mgmt_ip_instance_id_map.values())
+ run_compliance_params["triggerFull"] = False
+ categories_copy = run_compliance_categories.copy()
+ run_compliance_params["categories"] = categories_copy
+
+ # compliance_detail_params
+ compliance_detail_params["deviceUuids"] = ",".join(list(mgmt_ip_instance_id_map.values()))
+ compliance_types = run_compliance_categories
+ if "INTENT" in compliance_types:
+ compliance_types.remove("INTENT")
+ compliance_types.extend(["NETWORK_PROFILE", "APPLICATION_VISIBILITY", "WORKFLOW", "FABRIC", "NETWORK_SETTINGS"])
+ compliance_types = list(set(compliance_types))
+ compliance_detail_params["complianceType"] = "', '".join(compliance_types)
+ compliance_detail_params["complianceType"] = "'" + compliance_detail_params['complianceType'] + "'"
+ # Case when run_compliance_categories provided but run_compliance = False
+ else:
+ msg = "Since run_compliance is set to {0}, even though run_compliance_categories are provided {1}, ".format(
+ run_compliance, run_compliance_categories)
+ msg += "Run Compliance Check will not be executed."
+ self.log(msg, "WARNING")
+
+ elif run_compliance:
+ # run_compliance_params
+ run_compliance_params["deviceUuids"] = list(mgmt_ip_instance_id_map.values())
+ run_compliance_params["triggerFull"] = True
+
+ # compliance_detail_params
+ compliance_detail_params["deviceUuids"] = ",".join(list(mgmt_ip_instance_id_map.values()))
+
+ # Check for devices with Compliance Status of "IN_PROGRESS" and update parameters accordingly
+ if run_compliance_params:
+ device_in_progress = []
+
+ response = self.get_compliance_detail(compliance_detail_params)
+ if not response:
+ msg = (
+ "Error occurred when retrieving Compliance Report to identify if there are "
+ "devices with 'IN_PROGRESS' status. This is required on device(s): {0}"
+ .format(list(mgmt_ip_instance_id_map.keys()))
+ )
+ self.log(msg)
+ self.module.fail_json(msg)
+
+ # Iterate through the response to identify devices with 'IN_PROGRESS' status
+ for device in response:
+ if device["status"] == "IN_PROGRESS":
+ device_in_progress.append(device["deviceUuid"])
+ self.log("Devices currently with a Compliance Status of 'IN_PROGRESS': {0}".format(device_in_progress), "DEBUG")
+
+ if device_in_progress:
+ # Update run_compliance_params to exclude devices with 'IN_PROGRESS' status
+ run_compliance_params["deviceUuids"] = [device_id for device_id in mgmt_ip_instance_id_map.values() if device_id not in device_in_progress]
+ msg = "Excluding 'IN_PROGRESS' devices from compliance check. Updated run_compliance_params: {0}".format(run_compliance_params)
+ self.log(msg, "DEBUG")
+
+ return run_compliance_params, compliance_detail_params
+
+ def site_exists(self, site_name):
+ """
+ Checks the existence of a site in Cisco Catalyst Center.
+ Parameters:
+ site_name (str): The name of the site to be checked.
+ Returns:
+ tuple: A tuple containing two values:
+ - site_exists (bool): Indicates whether the site exists (True) or not (False).
+ - site_id (str or None): The ID of the site if it exists, or None if the site is not found.
+ Description:
+ This method queries Cisco Catalyst Center to determine if a site with the provided name exists.
+ If the site is found, it sets "site_exists" to True and retrieves the site"s ID.
+ If the site does not exist, "site_exists" is set to False, and "site_id" is None.
+ If an exception occurs during the site lookup, an error message is logged, and the module fails.
+ """
+
+ site_exists = False
+ site_id = None
+ response = None
+
+ # Attempt to retrieve site information from Catalyst Center
+ try:
+ response = self.dnac._exec(
+ family="sites",
+ function="get_site",
+ op_modifies=True,
+ params={"name": site_name},
+ )
+ self.log("Response received post 'get_site' API call: {0}".format(str(response)), "DEBUG")
+
+ # Process the response if available
+ if response:
+ site = response.get("response")
+ site_id = site[0].get("id")
+ site_exists = True
+ else:
+ self.log("No response received from the 'get_site' API call.", "ERROR")
+
+ except Exception as e:
+ # Log an error message and fail if an exception occurs
+ self.log("An error occurred while retrieving site details for Site '{0}' using 'get_site' API call: {1}".format(site_name, str(e)), "ERROR")
+
+ if not site_exists:
+ msg = "An error occurred while retrieving site details for Site '{0}'. Please verify that the site exists.".format(site_name)
+ self.log(msg, "ERROR")
+ self.module.fail_json(msg=msg)
+
+ return (site_exists, site_id)
+
+ def get_device_ids_from_ip(self, ip_address_list):
+ """
+ Retrieves the device IDs based on the provided list of IP addresses from Cisco Catalyst Center.
+ Parameters:
+ ip_address_list (list): A list of IP addresses of devices for which you want to retrieve the device IDs.
+ Returns:
+ dict: A dictionary mapping management IP addresses to their instance UUIDs.
+ Description:
+ This method queries Cisco Catalyst Center for device information using the provided IP addresses.
+ For each IP address in the list, it attempts to fetch the device information using the "get_device_list" API.
+ If the device is found and reachable, it extracts the device ID and maps it to the corresponding IP address.
+ If any error occurs during the process, it logs an error message and continues to the next IP address.
+ """
+
+ mgmt_ip_instance_id_map = {}
+
+ # Iterate through the provided list of IP addresses
+ for device_ip in ip_address_list:
+ try:
+ # Query Cisco Catalyst Center for device information using the IP address
+ response = self.dnac._exec(
+ family="devices",
+ function="get_device_list",
+ op_modifies=True,
+ params={"managementIpAddress": device_ip}
+ )
+ self.log("Response received post 'get_device_list' API call: {0}".format(str(response)), "DEBUG")
+
+ # Check if a valid response is received
+ if response.get("response"):
+ response = response.get("response")
+ if not response:
+ continue
+ for device_info in response:
+ if device_info["reachabilityStatus"] == "Reachable":
+ if device_info["family"] != "Unified AP":
+ device_id = device_info["id"]
+ mgmt_ip_instance_id_map[device_ip] = device_id
+ else:
+ msg = "Skipping device {0} as its family is {1}.".format(device_ip, device_info["family"])
+ self.log(msg, "INFO")
+ else:
+ msg = "Skipping device {0} as its status is {2}.".format(device_ip, device_info["reachabilityStatus"])
+ self.log(msg, "INFO")
+ else:
+ # If unable to retrieve device information, log an error message
+ self.log("Unable to retrieve device information for {0}. Please ensure that the device exists and is reachable.".format(device_ip), "ERROR")
+
+ except Exception as e:
+ # Log an error message if any exception occurs during the process
+ self.log("Error while fetching device ID for device: '{0}' from Cisco Catalyst Center: {1}".format(device_ip, str(e)), "ERROR")
+ if not mgmt_ip_instance_id_map:
+ self.msg = "Reachable devices not found in the IP Address List: {0}".format(ip_address_list)
+ self.update_result("ok", False, self.msg, "INFO")
+ self.module.exit_json(**self.result)
+
+ return mgmt_ip_instance_id_map
+
+ def get_device_ids_from_site(self, site_name, site_id):
+ """
+ Retrieves the management IP addresses and their corresponding instance UUIDs of devices associated with a specific site in Cisco Catalyst Center.
+
+ Parameters:
+ site_name (str): The name of the site whose devices" information is to be retrieved.
+ site_id (str): The unique identifier of the site.
+
+ Returns:
+ dict: A dictionary mapping management IP addresses to their instance UUIDs.
+
+ Description:
+ This method queries Cisco Catalyst Center to fetch the list of devices associated with the provided site.
+ It then extracts the management IP addresses and their instance UUIDs from the response.
+ Devices that are not reachable are logged as critical errors, and the function fails.
+ If no reachable devices are found for the specified site, it logs an error message and fails.
+
+ """
+ mgmt_ip_instance_id_map = {}
+
+ site_params = {
+ "site_id": site_id,
+ }
+
+ # Attempt to retrieve device information associated with the site
+ try:
+ response = self.dnac._exec(
+ family="sites",
+ function="get_membership",
+ op_modifies=True,
+ params=site_params,
+ )
+ self.log("Response received post 'get_membership' API Call: {0} ".format(str(response)), "DEBUG")
+
+ # Process the response if available
+ if response:
+ response = response["device"]
+ # Iterate over the devices in the site membership
+ for item in response:
+ if item["response"]:
+ for item_dict in item["response"]:
+ # Check if the device is reachable
+ if item_dict["reachabilityStatus"] == "Reachable":
+ if item_dict["family"] != "Unified AP":
+ mgmt_ip_instance_id_map[item_dict["managementIpAddress"]] = item_dict["instanceUuid"]
+ else:
+ msg = "Skipping device {0} in site {1} as its family is {2}".format(
+ item_dict["managementIpAddress"], site_name, item_dict["family"])
+ self.log(msg, "INFO")
+ else:
+ msg = "Skipping device {0} in site {1} as its status is {2}".format(
+ item_dict["managementIpAddress"], site_name, item_dict["reachabilityStatus"])
+ self.log(msg, "WARNING")
+ else:
+ # If unable to retrieve device information, log an error message
+ self.log("No response received from API call to get membership information for site. {0}".format(site_name), "ERROR")
+
+ except Exception as e:
+ # Log an error message if any exception occurs during the process
+ self.log("Unable to fetch the device(s) associated to the site '{0}' due to {1}".format(site_name, str(e)), "ERROR")
+
+ if not mgmt_ip_instance_id_map:
+ self.msg = "Reachable devices not found at Site: {0}".format(site_name)
+ self.update_result("ok", False, self.msg, "INFO")
+ self.module.exit_json(**self.result)
+
+ return mgmt_ip_instance_id_map
+
+ def get_device_id_list(self, ip_address_list, site_name):
+ """
+ Get the list of unique device IDs for a specified list of management IP addresses or devices associated with a site
+ in Cisco Catalyst Center.
+ Parameters:
+ ip_address_list (list): The management IP addresses of devices for which you want to retrieve the device IDs.
+ site_name (str): The name of the site for which you want to retrieve the device IDs.
+ Returns:
+ dict: A dictionary mapping management IP addresses to device IDs for the specified devices.
+ Description:
+ This method queries Cisco Catalyst Center to retrieve the unique device IDs associated with devices having the
+ specified IP addresses or belonging to the specified site.
+ """
+
+ # Initialize a dictionary to store management IP addresses and their corresponding device IDs
+ mgmt_ip_instance_id_map = {}
+
+ # Check if both site name and IP address list are provided
+ if site_name:
+ (site_exists, site_id) = self.site_exists(site_name)
+ if site_exists:
+ # Retrieve device IDs associated with devices in the site
+ site_mgmt_ip_instance_id_map = self.get_device_ids_from_site(site_name, site_id)
+ mgmt_ip_instance_id_map.update(site_mgmt_ip_instance_id_map)
+
+ if ip_address_list:
+ # Retrieve device IDs associated with devices having specified IP addresses
+ iplist_mgmt_ip_instance_id_map = self.get_device_ids_from_ip(ip_address_list)
+ mgmt_ip_instance_id_map.update(iplist_mgmt_ip_instance_id_map)
+
+ return mgmt_ip_instance_id_map
+
+ def is_sync_required(self, modified_response, mgmt_ip_instance_id_map):
+ """
+ Determine if synchronization of device configurations is required.
+
+ Args:
+ modified_response (dict): A dictionary containing modified responses for each device.
+ mgmt_ip_instance_id_map (dict): A dictionary mapping management IP addresses to instance IDs.
+
+ Returns:
+ tuple: A tuple containing a boolean indicating whether synchronization is required
+ and a message explaining the result.
+
+ Note:
+ This method categorizes devices based on compliance status ("COMPLIANT", "NON_COMPLIANT", "OTHER")
+ and checks if synchronization is necessary. If all devices are "COMPLIANT", synchronization is not
+ required. If there are devices that are not "NON_COMPLIANT", synchronization is also not required.
+ """
+ task_name = "Sync Device Configuration"
+ required = True
+ msg = ""
+
+ # Validate if sync is required
+ self.log("Compliance Report for {0} operation for device(s) {1} : {2}".format(
+ task_name, list(mgmt_ip_instance_id_map.keys()), modified_response), "INFO")
+
+ # Categorize the devices based on status - "COMPLIANT", "NON_COMPLIANT", "OTHER"(status other than COMPLIANT and NON_COMPLIANT)
+ categorized_devices = {"COMPLIANT": {}, "NON_COMPLIANT": {}, "OTHER": {}}
+ for ip_address, compliance_type in modified_response.items():
+ status = compliance_type[0]["status"]
+ if status == "NON_COMPLIANT":
+ categorized_devices["NON_COMPLIANT"][ip_address] = compliance_type
+ elif status == "COMPLIANT":
+ categorized_devices["COMPLIANT"][ip_address] = compliance_type
+ else:
+ categorized_devices["OTHER"][ip_address] = compliance_type
+
+ self.log("Devices Categorized based on Compliance status: {0}".format(categorized_devices), "INFO")
+
+ # Validate if all devices are "COMPLIANT" - then sync not required
+ if len(categorized_devices["COMPLIANT"]) == len(mgmt_ip_instance_id_map):
+ msg = "Device(s) {0} are already compliant with the RUNNING_CONFIG compliance type. Therefore, {1} is not required.".format(
+ list(mgmt_ip_instance_id_map.keys()), task_name)
+ required = False
+
+ return required, msg, categorized_devices
+
+ def get_want(self, config):
+ """
+ Determines the desired state based on the provided configuration.
+ Parameters:
+ config (dict): The configuration specifying the desired state.
+ Returns:
+ dict: A dictionary containing the desired state parameters.
+ Description:
+ This method processes the provided configuration to determine the desired state. It validates the presence of
+ either "ip_address_list" or "site_name" and constructs parameters for running compliance checks and syncing
+ device configurations based on the provided configuration. It also logs the desired state for reference.
+ """
+
+ # Initialize parameters
+ run_compliance_params = {}
+ sync_device_config_params = {}
+ compliance_detail_params = {}
+ compliance_detail_params_sync = {}
+ compliance_details = {}
+
+ # Store input parameters
+ ip_address_list = config.get("ip_address_list")
+ site_name = config.get("site_name")
+ run_compliance = config.get("run_compliance")
+ run_compliance_categories = config.get("run_compliance_categories")
+ sync_device_config = config.get("sync_device_config")
+
+ # Validate either ip_address_list OR site_name is present
+ if not any([ip_address_list, site_name]):
+ msg = "ip_address_list is {0} and site_name is {1}. Either the ip_address_list or the site_name must be provided.".format(
+ ip_address_list, site_name)
+ self.log(msg, "ERROR")
+ self.module.fail_json(msg=msg)
+
+ # Validate if a network compliance operation is present
+ if not any([run_compliance, run_compliance_categories, sync_device_config]):
+ msg = "No actions were requested. This network compliance module can perform the following tasks: Run Compliance Check or Sync Device Config."
+ self.log(msg, "ERROR")
+ self.module.fail_json(msg)
+ return self
+
+ # Validate valid ip_addresses
+ if ip_address_list:
+ self.validate_ip4_address_list(ip_address_list)
+ # Remove Duplicates from list
+ ip_address_list = list(set(ip_address_list))
+
+ # Retrieve device ID list
+ mgmt_ip_instance_id_map = self.get_device_id_list(ip_address_list, site_name)
+ if not mgmt_ip_instance_id_map:
+ # Log an error message if mgmt_ip_instance_id_map is empty
+ msg = ("No device UUIDs were fetched for network compliance operations with the provided IP addresses: {0} "
+ "or site name: {1}. This could be due to Unreachable devices or access points (APs).").format(ip_address_list, site_name)
+ self.log(msg, "ERROR")
+ self.module.fail_json(msg)
+
+ # Run Compliance Paramters
+ run_compliance_params, compliance_detail_params = self.validate_run_compliance_paramters(
+ mgmt_ip_instance_id_map, run_compliance, run_compliance_categories)
+
+ # Sync Device Configuration Parameters
+ if sync_device_config:
+ sync_device_config_params = {
+ "deviceId": list(mgmt_ip_instance_id_map.values())
+ }
+
+ compliance_detail_params_sync = {
+ "deviceUuid": ",".join(list(mgmt_ip_instance_id_map.values())),
+ "complianceType": "RUNNING_CONFIG"
+ }
+
+ # Validate if Sync Device Configuration is required on the device(s)
+ response = self.get_compliance_detail(compliance_detail_params_sync)
+ if not response:
+ msg = "Error occurred when retrieving Compliance Report to identify if Sync Device Config Operation "
+ msg += "is required on device(s): {0}".format(list(mgmt_ip_instance_id_map.keys()))
+ self.log(msg)
+ self.module.fail_json(msg)
+
+ compliance_details = self.modify_compliance_response(response, mgmt_ip_instance_id_map)
+ required, self.msg, categorized_devices = self.is_sync_required(compliance_details, mgmt_ip_instance_id_map)
+ self.log("Is Sync Requied: {0} {1}".format(required, self.msg), "DEBUG")
+ if not required:
+ self.update_result("ok", False, self.msg, "INFO")
+ self.module.exit_json(**self.result)
+
+ # Get the device IDs of devices in the "OTHER" category and "COMPLIANT" category
+ other_device_ids = categorized_devices.get("OTHER", {}).keys()
+ compliant_device_ids = categorized_devices.get("COMPLIANT", {}).keys()
+ excluded_device_ids = set(other_device_ids) | set(compliant_device_ids)
+
+ if excluded_device_ids:
+ # Exclude devices in the "OTHER" category from sync_device_config_params
+ sync_device_config_params["deviceId"] = [device_id for device_id in mgmt_ip_instance_id_map.values() if device_id not in excluded_device_ids]
+ msg = "Skipping these devices because their compliance status is not 'NON_COMPLIANT': {0}".format(excluded_device_ids)
+ self.log(msg, "WARNING")
+
+ # Construct the "want" dictionary containing the desired state parameters
+ want = {}
+ want = dict(
+ ip_address_list=ip_address_list,
+ site_name=site_name,
+ mgmt_ip_instance_id_map=mgmt_ip_instance_id_map,
+ run_compliance_params=run_compliance_params,
+ sync_device_config_params=sync_device_config_params,
+ compliance_detail_params=compliance_detail_params,
+ compliance_detail_params_sync=compliance_detail_params_sync,
+ compliance_details=compliance_details
+ )
+ self.want = want
+ self.log("Desired State (want): {0}".format(str(self.want)), "INFO")
+
+ return self
+
+ def get_compliance_detail(self, compliance_detail_params):
+ """
+ Execute the GET compliance detail operation.
+ Args:
+ compliance_detail_params (dict): A dictionary containing parameters for the compliance detail operation.
+ Returns:
+ dict: A dictionary containing details of the compliance detail response.
+ Returns None if there is an error.
+ """
+ # Execute the GET compliance detial operation
+ try:
+ response = self.dnac_apply["exec"](
+ family="compliance",
+ function="get_compliance_detail",
+ params=compliance_detail_params,
+ op_modifies=True
+ )
+ self.log("Response received post 'get_compliance_detail' API call: {0}".format(str(response)), "DEBUG")
+
+ if response:
+ response = response.response
+ else:
+ self.log("No response received from the 'get_compliance_detail' API call.", "ERROR")
+ return response
+
+ # Log and handle any exceptions that occur during the execution
+ except Exception as e:
+ self.msg = "An error occurred while retrieving Compliance Details using 'get_compliance_detail' API call "
+ self.msg += "for {0}: {1}".format(compliance_detail_params, str(e))
+ self.update_result("failed", False, self.msg, "ERROR")
+ self.check_return_status()
+
+ def modify_compliance_response(self, response, mgmt_ip_instance_id_map):
+ """
+ Modifies the compliance response by mapping device UUIDs to management IP addresses.
+ Parameters:
+ response (list of dict): The original compliance response.
+ mgmt_ip_instance_id_map (dict): Mapping of management IP addresses to instance IDs.
+ Returns:
+ dict: Modified compliance response with management IP addresses as keys.
+ Description:
+ This method takes the original compliance response and maps device UUIDs to their corresponding management
+ IP addresses using the provided mapping. It then constructs a modified response where each IP address is a key
+ associated with a list of compliance items related to that device.
+ """
+ modified_response = {}
+ ip_address = None
+
+ for item in response:
+ device_uuid = item.get("deviceUuid")
+
+ # Find the corresponding management IP address for the device UUID
+ for ip, uuid in mgmt_ip_instance_id_map.items():
+ if uuid == device_uuid:
+ ip_address = ip
+ break
+
+ # If the IP address is found, add the item to the modified response
+ # If ip_address and item.get("status")!= "NOT_APPLICABLE":
+ if ip_address:
+ if ip_address not in modified_response:
+ modified_response[ip_address] = []
+ modified_response[ip_address].append(item)
+
+ return modified_response
+
+ def run_compliance(self, run_compliance_params):
+ """
+ Executes a compliance check operation in Cisco DNA Center.
+ Parameters:
+ run_compliance_params (dict): Parameters for running the compliance check.
+ Returns:
+ str or None: Task ID of the API task created, or None if unsuccessful.
+ Description:
+ This method initiates a compliance check operation in Cisco DNA Center by calling the "run_compliance" function
+ from the "compliance" family of APIs. It passes the provided parameters and updates the result accordingly.
+ """
+
+ # Execute the compliance check operation
+ try:
+ response = self.dnac_apply["exec"](
+ family="compliance",
+ function="run_compliance",
+ params=run_compliance_params,
+ op_modifies=True,
+ )
+ self.log("Response received post 'run_compliancee' API call is {0}".format(str(response)), "DEBUG")
+
+ if response:
+ self.result.update(dict(response=response["response"]))
+ self.log("Task Id for the 'run_compliance' task is {0}".format(response.response.get("taskId")), "INFO")
+ return response.response.get("taskId")
+ else:
+ self.log("No response received from the 'run_compliance' API call.", "ERROR")
+ return None
+
+ # Log and handle any exceptions that occur during the execution
+ except Exception as e:
+ self.msg = "An error occurred while executing the 'run_compliance' operation for {0}: {1}".format(run_compliance_params, str(e))
+ self.update_result("failed", False, self.msg, "ERROR")
+ self.check_return_status()
+
+ def sync_device_config(self, sync_device_config_params):
+ """
+ Synchronize the device configuration using the specified parameters.
+ Parameters:
+ - sync_device_config_params (dict): Parameters for synchronizing the device configuration.
+ Returns:
+ task_id (str): The ID of the task created for the synchronization operation.
+ Note:
+ This method initiates the synchronization of device configurations by making an API call to the Cisco DNA Center.
+ It logs the response received from the API call and extracts the task ID from the response for further monitoring.
+ If an error occurs during the API call, it will be caught and logged.
+ """
+ # Make an API call to synchronize device configuration
+ try:
+ response = self.dnac_apply["exec"](
+ family="compliance",
+ function="commit_device_configuration",
+ params=sync_device_config_params,
+ op_modifies=True,
+ )
+ self.log("Response received post 'commit_device_configuration' API call is {0}".format(str(response)), "DEBUG")
+
+ if response:
+ self.result.update(dict(response=response["response"]))
+ self.log("Task Id for the 'commit_device_configuration' task is {0}".format(response.response.get("taskId")), "INFO")
+ # Return the task ID
+ return response.response.get("taskId")
+ else:
+ self.log("No response received from the 'commit_device_configuration' API call.", "ERROR")
+ return None
+
+ # Log the error if an exception occurs during the API call
+ except Exception as e:
+ self.msg = "Error occurred while synchronizing device configuration for {0}: {1}".format(sync_device_config_params, str(e))
+ self.update_result("failed", False, self.msg, "ERROR")
+ self.check_return_status()
+
+ def get_task_status(self, task_id, task_name):
+ """
+ Retrieve the status of a task by its ID.
+ Parameters:
+ - task_id (str): The ID of the task whose status is to be retrieved.
+ - task_name (str): The name of the task.
+ Returns:
+ response (dict): The response containing the status of the task.
+ Note:
+ This method makes an API call to retrieve the task status and logs the status information.
+ If an error occurs during the API call, it will be caught and logged.
+ """
+
+ # Make an API call to retrieve the task tree
+ try:
+ response = self.dnac_apply["exec"](
+ family="task",
+ function="get_task_by_id",
+ params=dict(task_id=task_id),
+ op_modifies=True,
+ )
+ self.log("Response received post 'get_task_by_id' API Call for the Task {0} with Task id {1} "
+ "is {2}".format(task_name, str(task_id), str(response)), "DEBUG")
+
+ if response:
+ response = response.response
+ else:
+ self.log("No response received from the 'get_task_by_id' API call.", "CRITICAL")
+ return response
+
+ # Log the error if an exception occurs during the API call
+ except Exception as e:
+ self.msg = "Error occurred while retrieving 'get_task_by_id' for Task {0} with Task id {1}: {2}".format(task_name, task_id, str(e))
+ self.update_result("failed", False, self.msg, "ERROR")
+ self.check_return_status()
+
+ def get_task_tree(self, task_id, task_name):
+ """
+ Retrieve the tree of a task by its ID.
+ Parameters:
+ - task_id (str): The ID of the task whose status is to be retrieved.
+ - task_name (str): The name of the task.
+ Returns:
+ response (dict): The response containing the status of the task.
+ Note:
+ This method makes an API call to retrieve the task status and logs the status information.
+ If an error occurs during the API call, it will be caught and logged.
+ """
+
+ # Make an API call to retrieve the task status
+ try:
+ response = self.dnac_apply["exec"](
+ family="task",
+ function="get_task_tree",
+ params=dict(task_id=task_id),
+ op_modifies=True,
+ )
+ self.log("Response received post 'get_task_tree' API call for the Task {0} with Task id {1} "
+ "is {2}".format(task_name, str(task_id), str(response)), "DEBUG")
+ if response:
+ response = response.response
+ else:
+ self.log("No response received from the 'get_task_tree' API call.", "CRITICAL")
+ return response
+
+ # Log the error if an exception occurs during the API call
+ except Exception as e:
+ self.msg = "Error occurred while retrieving 'get_task_tree' for Task {0} with task id {1}: {2}".format(task_name, task_id, str(e))
+ self.update_result("failed", False, self.msg, "ERROR")
+ self.check_return_status()
+
+ def update_result(self, status, changed, msg, log_level, data=None):
+ """
+ Update the result of the operation with the provided status, message, and log level.
+ Parameters:
+ - status (str): The status of the operation ("success" or "failed").
+ - changed (bool): Indicates whether the operation caused changes.
+ - msg (str): The message describing the result of the operation.
+ - log_level (str): The log level at which the message should be logged ("INFO", "ERROR", "CRITICAL", etc.).
+ - data (dict, optional): Additional data related to the operation result.
+ Returns:
+ self (object): An instance of the class.
+ Note:
+ - If the status is "failed", the "failed" key in the result dictionary will be set to True.
+ - If data is provided, it will be included in the result dictionary.
+ """
+
+ # Update the result attributes with the provided values
+ self.status = status
+ self.result["status"] = status
+ self.result["msg"] = msg
+ self.result["changed"] = changed
+
+ # Log the message at the specified log level
+ self.log(msg, log_level)
+
+ # If the status is "failed", set the "failed" key to True
+ if status == "failed":
+ self.result["failed"] = True
+
+ # If additional data is provided, include it in the result dictionary
+ if data:
+ self.result["data"] = data
+
+ return self
+
+ def exit_while_loop(self, start_time, task_id, task_name, response):
+ """
+ Check if the elapsed time exceeds the specified timeout period and exit the while loop if it does.
+ Parameters:
+ - start_time (float): The time when the while loop started.
+ - task_id (str): ID of the task being monitored.
+ - task_name (str): Name of the task being monitored.
+ - response (dict): Response received from the task status check.
+ Returns:
+ bool: True if the elapsed time exceeds the timeout period, False otherwise.
+ """
+
+ # If the elapsed time exceeds the timeout period
+ if time.time() - start_time > 360:
+ if response.get("data"):
+ # If there is data in the response, include it in the error message
+ self.msg = "Task {0} with task id {1} has not completed within the timeout period. Task Status: {2} ".format(
+ task_name, task_id, response.get("data"))
+ else:
+ # If there is no data in the response, generate a generic error message
+ self.msg = "Task {0} with task id {1} has not completed within the timeout period.".format(
+ task_name, task_id)
+
+ # Update the result with failure status and log the error message
+ self.update_result("failed", False, self.msg, "ERROR")
+ return True
+
+ return False
+
+ def handle_error(self, task_name, mgmt_ip_instance_id_map, failure_reason=None):
+ """
+ Handle error encountered during task execution.
+ Parameters:
+ - task_name (str): Name of the task being performed.
+ - mgmt_ip_instance_id_map (dict): Mapping of management IP addresses to instance IDs.
+ - failure_reason (str, optional): Reason for the failure, if available.
+ Returns:
+ self (object): An instance of the class used for interacting with Cisco Catalyst Center.
+ """
+
+ # If failure reason is provided, include it in the error message
+ if failure_reason:
+ self.msg = "An error occurred while performing {0} on device(s): {1}. The operation failed due to the following reason: {2}".format(
+ task_name, list(mgmt_ip_instance_id_map.keys()), failure_reason)
+ # If no failure reason is provided, generate a generic error message
+ else:
+ self.msg = "An error occurred while performing {0} on device(s): {1}".format(
+ task_name, list(mgmt_ip_instance_id_map.keys()))
+
+ # Update the result with failure status and log the error message
+ self.update_result("failed", False, self.msg, "ERROR")
+
+ return self
+
+ def get_compliance_task_status(self, task_id, mgmt_ip_instance_id_map):
+ """
+ This function retrieves the status of compliance check tasks in Cisco Catalyst Center.
+ Parameters:
+ - task_id: The ID of the compliance check task.
+ - mgmt_ip_instance_id_map: A mapping of management IP addresses to instance IDs.
+ Returns:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ This function continuously checks the status of a compliance check task until completion.
+ It handles various scenarios such as task completion, task failure, or errors during execution.
+ Upon successful completion, it logs the modified compliance response and updates the result accordingly.
+ """
+
+ task_name = "Run Compliance Check"
+ start_time = time.time()
+
+ while True:
+ response = self.get_task_status(task_id, task_name)
+
+ # Check if response returned
+ if not response:
+ self.msg = "Error retrieving Task status for {0} with Task Id: {1}".format(task_name, task_id)
+ self.update_result("failed", False, self.msg, "ERROR")
+ break
+
+ # Check if the elapsed time exceeds the timeout
+ if self.exit_while_loop(start_time, task_id, task_name, response):
+ break
+
+ # Handle error if task execution encounters an error
+ if response.get("isError"):
+ failure_reason = response.get("failureReason")
+ self.handle_error(task_name, mgmt_ip_instance_id_map, failure_reason)
+ break
+
+ # Check if task completed successfully
+ elif not response.get("isError") and "success" in response.get("progress").lower():
+ # Task completed successfully
+ self.msg = "{0} has completed successfully on device(s): {1}".format(task_name, list(mgmt_ip_instance_id_map.keys()))
+
+ # Retrieve and modify compliance check details
+ response = self.get_compliance_detail(self.want.get("compliance_detail_params"))
+ if not response:
+ self.msg = "Error Occurred when retrieving Compliance Report after {0} with Task Id {1} for device(s) {2}".format(
+ task_name, task_id, list(mgmt_ip_instance_id_map.keys()))
+ self.update_result("failed", False, self.msg, "ERROR")
+ break
+
+ modified_response = self.modify_compliance_response(response, mgmt_ip_instance_id_map)
+ self.log("Compliance Report for {0} operation for device(s) {1} : {2}".format(
+ task_name, list(mgmt_ip_instance_id_map.keys()), modified_response), "INFO")
+
+ # Update result with modified response
+ self.update_result("success", True, self.msg, "INFO", modified_response)
+ break
+
+ # Check if task failed
+ elif "failed" in response.get("progress").lower():
+ self.msg = "Failed to {0} on the following device(s): {1}".format(task_name, list(mgmt_ip_instance_id_map.keys()))
+ self.update_result("failed", False, self.msg, "CRITICAL")
+ break
+
+ return self
+
+ def get_sync_config_task_status(self, task_id, mgmt_ip_instance_id_map):
+ """
+ This function manages the status of device configuration synchronization tasks in Cisco Catalyst Center.
+ Parameters:
+ - task_id: ID of the synchronization task
+ - mgmt_ip_instance_id_map: Mapping of management IP addresses to instance IDs
+ Returns:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ It validates if synchronization is required, categorizes devices based on compliance status, and checks task completion status.
+ If all devices are already compliant, it logs a success message. If some devices have unexpected statuses, it logs an error.
+ It continuously checks the task status until completion, updating the result accordingly.
+ """
+
+ task_name = "Sync Device Configuration"
+ start_time = time.time()
+
+ while True:
+ success_devices = []
+ failed_devices = []
+
+ response = self.get_task_tree(task_id, task_name)
+
+ # Check if response returned
+ if not response:
+ self.msg = "Error retrieving Task Tree for the task_name {0} task_id {1}".format(task_name, task_id)
+ self.update_result("failed", False, self.msg, "ERROR")
+ break
+
+ # Check if the elapsed time exceeds the timeout
+ if self.exit_while_loop(start_time, task_id, task_name, response):
+ break
+
+ # Handle error if task execution encounters an error
+ if response[0].get("isError"):
+ failure_reason = response.get("failureReason")
+ self.handle_error(task_name, mgmt_ip_instance_id_map, failure_reason)
+ break
+
+ for item in response[1:]:
+ progress = item["progress"]
+ for ip, device_id in mgmt_ip_instance_id_map.items():
+ if device_id in progress and "copy_Running_To_Startup=Success" in progress:
+ success_devices.append(ip)
+ elif device_id in progress and "copy_Running_To_Startup=Failed" in progress:
+ failed_devices.append(ip)
+
+ success_devices = set(success_devices)
+ failed_devices = set(failed_devices)
+
+ # Check conditions and print messages accordingly
+ if len(set(success_devices)) == len(mgmt_ip_instance_id_map):
+ self.msg = "{0} has completed successfully on device(s): {1}".format(task_name, success_devices)
+ self.update_result("success", True, self.msg, "INFO")
+ break
+ elif (failed_devices and
+ len(success_devices) < len(mgmt_ip_instance_id_map) and
+ len(failed_devices) + len(success_devices) == len(mgmt_ip_instance_id_map)):
+ self.msg = "{0} task has failed on device(s): {1} and succeeded on device(s): {2}".format(
+ task_name, failed_devices, success_devices)
+ self.update_result("failed", True, self.msg, "CRITICAL")
+ break
+ elif len(failed_devices) == len(mgmt_ip_instance_id_map):
+ self.msg = "{0} task has failed on device(s): {1}".format(task_name, failed_devices)
+ self.update_result("failed", False, self.msg, "CRITICAL")
+ break
+
+ return self
+
+ def get_diff_merged(self):
+ """
+ This method is designed to Perform Network Compliance Actions in Cisco Catalyst Center.
+ Parameters: None
+ Returns:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ This method orchestrates compliance check operation and device configuration synchronization tasks specified in a playbook.
+ It ensures all required tasks are present, executes them, and checks their status, facilitating smooth playbook execution.
+ """
+
+ # Action map for different network compliance operations
+ action_map = {
+ "run_compliance_params": (self.run_compliance, self.get_compliance_task_status),
+ "sync_device_config_params": (self.sync_device_config, self.get_sync_config_task_status)
+ }
+
+ # Iterate through the action map and execute specified actions
+ for action_param, (action_func, status_func) in action_map.items():
+
+ # Execute the action and check its status
+ if self.want.get(action_param):
+ result_task_id = action_func(self.want.get(action_param))
+ self.log("Performing {0}".format(action_func.__name__), "DEBUG")
+ if not result_task_id:
+ self.msg = "An error occurred while retrieving the task_id of the {0} operation.".format(action_func.__name__)
+ self.update_result("failed", False, self.msg, "CRITICAL")
+ else:
+ status_func(result_task_id, self.want.get("mgmt_ip_instance_id_map")).check_return_status()
+
+ return self
+
+ def verify_diff_merged(self, config):
+ """
+ Verify the success of the "Sync Device Configuration" operation.
+ Parameters:
+ config (dict): A dictionary containing the configuration details.
+ Returns:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Description:
+ This method verifies the success of the "Sync Device Configuration" operation in the context of network compliance management.
+ It checks if the configuration includes the option to synchronize device configurations (`sync_device_config`).
+ If this option is present, the function proceeds to compare compliance details before and after executing the synchronization operation.
+ It logs relevant information at each step and concludes by determining whether the synchronization was successful.
+ """
+ if config.get("sync_device_config"):
+ # Get compliance details before running sync_device_config
+ compliance_details_before = self.want.get("compliance_details")
+ self.log("Compliance details before running sync_device_config: {0}".format(compliance_details_before), "INFO")
+
+ # Get compliance details after running sync_device_config
+ response = self.get_compliance_detail(self.want.get("compliance_detail_params_sync"))
+ if not response:
+ self.msg = "Error occured when Retrieving Compliance Details after for verifying configuration."
+ self.update("failed", False, self.msg, "ERROR")
+ self.check_return_status()
+
+ compliance_details_after = self.modify_compliance_response(response, self.want.get("mgmt_ip_instance_id_map"))
+ self.log("Compliance details after running sync_device_config: {0}.".format(compliance_details_after), "INFO")
+
+ all_statuses_before = []
+ all_statuses_after = []
+ for ip_address, compliance_type in compliance_details_before.items():
+ status = compliance_type[0]["status"]
+ all_statuses_before.append(status)
+
+ if len(set(all_statuses_before)) == 1 and all_statuses_before[0] == "NON_COMPLIANT":
+ for ip_address, compliance_type in compliance_details_after.items():
+ status = compliance_type[0]["status"]
+ all_statuses_after.append(status)
+ if len(set(all_statuses_after)) == 1 and all_statuses_after[0] == "COMPLIANT":
+ self.log("Verified the success of the Sync Device Configuration operation.")
+ else:
+ self.log(
+ "Sync Device Configuration operation may have been unsuccessful "
+ "since not all devices have 'COMPLIANT' status after the operation.",
+ "WARNING"
+ )
+ else:
+ self.log("Sync_device_config may not have been performed since devices have status other than 'NON_COMPLIANT'.", "WARNING")
+ else:
+ self.log("Verification of configuration is not required.", "INFO")
+ return self
+
+
+def main():
+ """
+ main entry point for module execution
+ """
+
+ # Define the specification for the module"s arguments
+ element_spec = {"dnac_host": {"required": True, "type": "str"},
+ "dnac_port": {"type": "str", "default": "443"},
+ "dnac_username": {"type": "str", "default": "admin", "aliases": ["user"]},
+ "dnac_password": {"type": "str", "no_log": True},
+ "dnac_verify": {"type": "bool", "default": "True"},
+ "dnac_version": {"type": "str", "default": "2.2.3.3"},
+ "dnac_debug": {"type": "bool", "default": False},
+ "dnac_log_level": {"type": "str", "default": "WARNING"},
+ "dnac_log_file_path": {"type": "str", "default": "dnac.log"},
+ "dnac_log_append": {"type": "bool", "default": True},
+ "dnac_log": {"type": "bool", "default": False},
+ "validate_response_schema": {"type": "bool", "default": True},
+ "config_verify": {"type": "bool", "default": False},
+ "dnac_api_task_timeout": {"type": "int", "default": 1200},
+ "dnac_task_poll_interval": {"type": "int", "default": 2},
+ "config": {"required": True, "type": "list", "elements": "dict"},
+ "state": {"default": "merged", "choices": ["merged"]}
+ }
+
+ # Initialize the Ansible module with the provided argument specifications
+ module = AnsibleModule(argument_spec=element_spec,
+ supports_check_mode=False)
+
+ # Initialize the NetworkCompliance object with the module
+ ccc_network_compliance = NetworkCompliance(module)
+
+ # Get the state parameter from the provided parameters
+ state = ccc_network_compliance.params.get("state")
+
+ # Check if the state is valid
+ if state not in ccc_network_compliance.supported_states:
+ ccc_network_compliance.status = "invalid"
+ ccc_network_compliance.msg = "State {0} is invalid".format(state)
+ ccc_network_compliance.check_return_status()
+
+ # Validate the input parameters and check the return status
+ ccc_network_compliance.validate_input().check_return_status()
+
+ # Get the config_verify parameter from the provided parameters
+ config_verify = ccc_network_compliance.params.get("config_verify")
+
+ # Iterate over the validated configuration parameters
+ for config in ccc_network_compliance.validated_config:
+ ccc_network_compliance.get_want(config).check_return_status()
+ ccc_network_compliance.get_diff_state_apply[state]().check_return_status()
+ if config_verify:
+ ccc_network_compliance.verify_diff_state_apply[state](config).check_return_status()
+
+ # Exit with the result obtained from the NetworkCompliance object
+ module.exit_json(**ccc_network_compliance.result)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/ansible_collections/cisco/dnac/plugins/modules/network_create.py b/ansible_collections/cisco/dnac/plugins/modules/network_create.py
index 8bf553aba..4862b91b8 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/network_create.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/network_create.py
@@ -78,7 +78,7 @@ options:
type: str
port:
description: Port for NetFlow Collector (eg; 443).
- type: int
+ type: float
type: dict
network_aaa:
description: Network Create's network_aaa.
@@ -134,8 +134,8 @@ options:
the network settings.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Network Settings CreateNetwork
description: Complete reference of the CreateNetwork API.
@@ -199,7 +199,6 @@ EXAMPLES = r"""
siteId: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/network_device.py b/ansible_collections/cisco/dnac/plugins/modules/network_device.py
index 2fca16035..b0355e10c 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/network_device.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/network_device.py
@@ -12,7 +12,9 @@ description:
- Manage operations create, update and delete of the resource Network Device.
- Adds the device with given credential.
- Deletes the network device for the given Id.
-- Sync the devices provided as input.
+- >
+ Update the credentials, management IP address of a given device or a set of devices in Catalyst Center and trigger
+ an inventory sync.
version_added: '3.1.0'
extends_documentation_fragment:
- cisco.dnac.module
@@ -23,102 +25,106 @@ options:
type: bool
version_added: 4.0.0
cliTransport:
- description: Network Device's cliTransport.
+ description: CLI transport. Supported values telnet, ssh2.
type: str
computeDevice:
- description: ComputeDevice flag.
+ description: Compute Device or not. Options are TRUE / FALSE.
type: bool
enablePassword:
- description: Network Device's enablePassword.
+ description: CLI enable password of the device.
type: str
extendedDiscoveryInfo:
- description: Network Device's extendedDiscoveryInfo.
+ description: This field holds that info as whether to add device with canned data
+ or not. Supported values DISCOVER_WITH_CANNED_DATA.
type: str
httpPassword:
- description: Network Device's httpPassword.
+ description: HTTP password of the device.
type: str
httpPort:
- description: Network Device's httpPort.
+ description: HTTP port of the device.
type: str
httpSecure:
- description: HttpSecure flag.
+ description: Flag to select HTTP / HTTPS protocol. Options are TRUE / FALSE. TRUE
+ for HTTPS and FALSE for HTTP.
type: bool
httpUserName:
- description: Network Device's httpUserName.
+ description: HTTP Username of the device.
type: str
id:
description: Id path parameter. Device ID.
type: str
ipAddress:
- description: Network Device's ipAddress.
+ description: IP Address of the device.
elements: str
type: list
merakiOrgId:
- description: Network Device's merakiOrgId.
+ description: Selected meraki organization for which the devices needs to be imported.
elements: str
type: list
netconfPort:
- description: Network Device's netconfPort.
+ description: Netconf Port of the device.
type: str
password:
- description: Network Device's password.
+ description: CLI Password of the device.
type: str
serialNumber:
- description: Network Device's serialNumber.
+ description: Serial Number of the Device.
type: str
snmpAuthPassphrase:
- description: Network Device's snmpAuthPassphrase.
+ description: SNMPV3 auth passphrase of the device.
type: str
snmpAuthProtocol:
- description: Network Device's snmpAuthProtocol.
+ description: SNMPV3 auth protocol. Supported values sha, md5.
type: str
snmpMode:
- description: Network Device's snmpMode.
+ description: SNMPV3 mode. Supported values noAuthnoPriv, authNoPriv, authPriv.
type: str
snmpPrivPassphrase:
- description: Network Device's snmpPrivPassphrase.
+ description: SNMPV3 priv passphrase.
type: str
snmpPrivProtocol:
- description: Network Device's snmpPrivProtocol.
+ description: SNMPV3 priv protocol. Supported values AES128.
type: str
snmpROCommunity:
- description: Network Device's snmpROCommunity.
+ description: SNMP Read Community of the device.
type: str
snmpRWCommunity:
- description: Network Device's snmpRWCommunity.
+ description: SNMP Write Community of the device.
type: str
snmpRetry:
- description: Network Device's snmpRetry.
+ description: SNMP retry count. Max value supported is 3. Default is Global SNMP
+ retry (if exists) or 3.
type: int
snmpTimeout:
- description: Network Device's snmpTimeout.
+ description: SNMP timeout in seconds. Max value supported is 300. Default is Global
+ SNMP timeout (if exists) or 5.
type: int
snmpUserName:
- description: Network Device's snmpUserName.
+ description: SNMPV3 user name of the device.
type: str
snmpVersion:
- description: Network Device's snmpVersion.
+ description: SNMP version. Values supported v2, v3. Default is v2.
type: str
type:
- description: Network Device's type.
+ description: Type of device being added.
type: str
updateMgmtIPaddressList:
description: Network Device's updateMgmtIPaddressList.
elements: dict
suboptions:
existMgmtIpAddress:
- description: Network Device's existMgmtIpAddress.
+ description: ExistMgmtIpAddress IP Address of the device.
type: str
newMgmtIpAddress:
- description: Network Device's newMgmtIpAddress.
+ description: New IP Address to be Updated.
type: str
type: list
userName:
- description: Network Device's userName.
+ description: CLI user name of the device.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Devices AddDevice2
description: Complete reference of the AddDevice2 API.
@@ -126,9 +132,9 @@ seealso:
- name: Cisco DNA Center documentation for Devices DeleteDeviceById
description: Complete reference of the DeleteDeviceById API.
link: https://developer.cisco.com/docs/dna-center/#!delete-device-by-id
-- name: Cisco DNA Center documentation for Devices SyncDevices2
- description: Complete reference of the SyncDevices2 API.
- link: https://developer.cisco.com/docs/dna-center/#!sync-devices
+- name: Cisco DNA Center documentation for Devices UpdateDeviceDetails
+ description: Complete reference of the UpdateDeviceDetails API.
+ link: https://developer.cisco.com/docs/dna-center/#!update-device-details
notes:
- SDK Method used are
devices.Devices.add_device,
@@ -181,9 +187,6 @@ EXAMPLES = r"""
snmpUserName: string
snmpVersion: string
type: string
- updateMgmtIPaddressList:
- - existMgmtIpAddress: string
- newMgmtIpAddress: string
userName: string
- name: Update all
@@ -242,7 +245,6 @@ EXAMPLES = r"""
id: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/network_device_by_ip_info.py b/ansible_collections/cisco/dnac/plugins/modules/network_device_by_ip_info.py
index 502db8ba4..53e62255e 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/network_device_by_ip_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/network_device_by_ip_info.py
@@ -24,8 +24,8 @@ options:
- IpAddress path parameter. Device IP address.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Devices GetNetworkDeviceByIP
description: Complete reference of the GetNetworkDeviceByIP API.
@@ -54,7 +54,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -101,7 +100,19 @@ dnac_response:
"tunnelUdpPort": "string",
"type": "string",
"upTime": "string",
- "waasDeviceMode": "string"
+ "waasDeviceMode": "string",
+ "dnsResolvedManagementAddress": "string",
+ "apEthernetMacAddress": "string",
+ "vendor": "string",
+ "reasonsForPendingSyncRequests": "string",
+ "pendingSyncRequestsCount": "string",
+ "reasonsForDeviceResync": "string",
+ "lastDeviceResyncStartTime": "string",
+ "uptimeSeconds": 0,
+ "managedAtleastOnce": true,
+ "deviceSupportLevel": "string",
+ "managementState": "string",
+ "description": "string"
},
"version": "string"
}
diff --git a/ansible_collections/cisco/dnac/plugins/modules/network_device_by_serial_number_info.py b/ansible_collections/cisco/dnac/plugins/modules/network_device_by_serial_number_info.py
index d40497b17..280393913 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/network_device_by_serial_number_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/network_device_by_serial_number_info.py
@@ -10,7 +10,7 @@ module: network_device_by_serial_number_info
short_description: Information module for Network Device By Serial Number
description:
- Get Network Device By Serial Number by id.
-- Returns the network device if the given serial number matches with any of the serial numbers collected.
+- Returns the network device with given serial number.
version_added: '3.1.0'
extends_documentation_fragment:
- cisco.dnac.module_info
@@ -24,8 +24,8 @@ options:
- SerialNumber path parameter. Device serial number.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Devices GetDeviceBySerialNumber
description: Complete reference of the GetDeviceBySerialNumber API.
@@ -54,7 +54,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -101,7 +100,19 @@ dnac_response:
"tunnelUdpPort": "string",
"type": "string",
"upTime": "string",
- "waasDeviceMode": "string"
+ "waasDeviceMode": "string",
+ "dnsResolvedManagementAddress": "string",
+ "apEthernetMacAddress": "string",
+ "vendor": "string",
+ "reasonsForPendingSyncRequests": "string",
+ "pendingSyncRequestsCount": "string",
+ "reasonsForDeviceResync": "string",
+ "lastDeviceResyncStartTime": "string",
+ "uptimeSeconds": 0,
+ "managedAtleastOnce": true,
+ "deviceSupportLevel": "string",
+ "managementState": "string",
+ "description": "string"
},
"version": "string"
}
diff --git a/ansible_collections/cisco/dnac/plugins/modules/network_device_chassis_details_info.py b/ansible_collections/cisco/dnac/plugins/modules/network_device_chassis_details_info.py
index c5b34a498..13f84df1f 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/network_device_chassis_details_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/network_device_chassis_details_info.py
@@ -24,8 +24,8 @@ options:
- DeviceId path parameter. Device ID.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Devices GetChassisDetailsForDevice
description: Complete reference of the GetChassisDetailsForDevice API.
@@ -54,7 +54,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/network_device_config_count_info.py b/ansible_collections/cisco/dnac/plugins/modules/network_device_config_count_info.py
index 3d2c900b0..04fb4487d 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/network_device_config_count_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/network_device_config_count_info.py
@@ -20,8 +20,8 @@ options:
description: Additional headers.
type: dict
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Devices GetDeviceConfigCount
description: Complete reference of the GetDeviceConfigCount API.
@@ -49,7 +49,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/network_device_config_info.py b/ansible_collections/cisco/dnac/plugins/modules/network_device_config_info.py
index 6810fd6db..4a67e42ab 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/network_device_config_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/network_device_config_info.py
@@ -11,7 +11,9 @@ short_description: Information module for Network Device Config
description:
- Get all Network Device Config.
- Get Network Device Config by id.
-- Returns the config for all devices.
+- >
+ Returns the config for all devices. This API has been deprecated and will not be available in a Cisco Catalyst
+ Center release after Nov 1st 2024 23 59 59 GMT.
- Returns the device config by specified device ID.
version_added: '3.1.0'
extends_documentation_fragment:
@@ -26,8 +28,8 @@ options:
- NetworkDeviceId path parameter.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Devices GetDeviceConfigById
description: Complete reference of the GetDeviceConfigById API.
@@ -73,7 +75,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/network_device_config_task_info.py b/ansible_collections/cisco/dnac/plugins/modules/network_device_config_task_info.py
new file mode 100644
index 000000000..a55acc49d
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/network_device_config_task_info.py
@@ -0,0 +1,82 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: network_device_config_task_info
+short_description: Information module for Network Device Config Task
+description:
+- Get all Network Device Config Task.
+- Returns a config task result details by specified id.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module_info
+author: Rafael Campos (@racampos)
+options:
+ headers:
+ description: Additional headers.
+ type: dict
+ parentTaskId:
+ description:
+ - ParentTaskId query parameter. Task Id.
+ type: str
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for Configuration Archive GetConfigTaskDetails
+ description: Complete reference of the GetConfigTaskDetails API.
+ link: https://developer.cisco.com/docs/dna-center/#!get-config-task-details
+notes:
+ - SDK Method used are
+ configuration_archive.ConfigurationArchive.get_config_task_details,
+
+ - Paths used are
+ get /dna/intent/api/v1/network-device-config/task,
+
+"""
+
+EXAMPLES = r"""
+- name: Get all Network Device Config Task
+ cisco.dnac.network_device_config_task_info:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ headers: "{{my_headers | from_json}}"
+ parentTaskId: string
+ register: result
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "version": "string",
+ "response": [
+ {
+ "startTime": 0,
+ "errorCode": "string",
+ "deviceId": "string",
+ "taskId": "string",
+ "taskStatus": "string",
+ "parentTaskId": "string",
+ "deviceIpAddress": "string",
+ "detailMessage": "string",
+ "failureMessage": "string",
+ "taskType": "string",
+ "completionTime": 0,
+ "hostName": "string"
+ }
+ ]
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/network_device_config_write_memory.py b/ansible_collections/cisco/dnac/plugins/modules/network_device_config_write_memory.py
new file mode 100644
index 000000000..eb9bff43c
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/network_device_config_write_memory.py
@@ -0,0 +1,66 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: network_device_config_write_memory
+short_description: Resource module for Network Device Config Write Memory
+description:
+- Manage operation create of the resource Network Device Config Write Memory.
+- This operation would commit device running configuration to startup by issuing "write memory" to device.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module
+author: Rafael Campos (@racampos)
+options:
+ deviceId:
+ description: UUID of the device.
+ elements: str
+ type: list
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for Configuration Archive CommitDeviceConfiguration
+ description: Complete reference of the CommitDeviceConfiguration API.
+ link: https://developer.cisco.com/docs/dna-center/#!commit-device-configuration
+notes:
+ - SDK Method used are
+ configuration_archive.ConfigurationArchive.commit_device_configuration,
+
+ - Paths used are
+ post /dna/intent/api/v1/network-device-config/write-memory,
+
+"""
+
+EXAMPLES = r"""
+- name: Create
+ cisco.dnac.network_device_config_write_memory:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ deviceId:
+ - string
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "version": "string",
+ "response": {
+ "url": "string",
+ "taskId": "string"
+ }
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/network_device_count_info.py b/ansible_collections/cisco/dnac/plugins/modules/network_device_count_info.py
index cdcf87b7c..b4ad7e000 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/network_device_count_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/network_device_count_info.py
@@ -27,16 +27,36 @@ options:
description:
- DeviceId path parameter. Device ID.
type: str
+ hostname:
+ description:
+ - Hostname query parameter.
+ elements: str
+ type: list
+ managementIpAddress:
+ description:
+ - ManagementIpAddress query parameter.
+ elements: str
+ type: list
+ macAddress:
+ description:
+ - MacAddress query parameter.
+ elements: str
+ type: list
+ locationName:
+ description:
+ - LocationName query parameter.
+ elements: str
+ type: list
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Devices GetDeviceCount2
description: Complete reference of the GetDeviceCount2 API.
link: https://developer.cisco.com/docs/dna-center/#!get-device-count
-- name: Cisco DNA Center documentation for Devices GetDeviceInterfaceCount2
- description: Complete reference of the GetDeviceInterfaceCount2 API.
- link: https://developer.cisco.com/docs/dna-center/#!get-device-interface-count-2
+- name: Cisco DNA Center documentation for Devices GetDeviceInterfaceCount
+ description: Complete reference of the GetDeviceInterfaceCount API.
+ link: https://developer.cisco.com/docs/dna-center/#!get-device-interface-count
notes:
- SDK Method used are
devices.Devices.get_device_count,
@@ -59,6 +79,10 @@ EXAMPLES = r"""
dnac_version: "{{dnac_version}}"
dnac_debug: "{{dnac_debug}}"
headers: "{{my_headers | from_json}}"
+ hostname: []
+ managementIpAddress: []
+ macAddress: []
+ locationName: []
register: result
- name: Get Network Device Count by id
@@ -75,7 +99,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/network_device_custom_prompt.py b/ansible_collections/cisco/dnac/plugins/modules/network_device_custom_prompt.py
index 38fbc4eaa..b847e5b19 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/network_device_custom_prompt.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/network_device_custom_prompt.py
@@ -11,7 +11,7 @@ short_description: Resource module for Network Device Custom Prompt
description:
- Manage operation create of the resource Network Device Custom Prompt.
- >
- Save custom prompt added by user in Cisco DNA Center. API will always override the existing prompts. User should
+ Save custom prompt added by user in Catalyst Center. API will always override the existing prompts. User should
provide all the custom prompt in case of any update.
version_added: '6.0.0'
extends_documentation_fragment:
@@ -19,21 +19,21 @@ extends_documentation_fragment:
author: Rafael Campos (@racampos)
options:
passwordPrompt:
- description: Password Prompt.
+ description: Password for Custom Prompt.
type: str
usernamePrompt:
- description: Username Prompt.
+ description: Username for Custom Prompt.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for System Settings CustomPromptPOSTAPI
description: Complete reference of the CustomPromptPOSTAPI API.
link: https://developer.cisco.com/docs/dna-center/#!custom-prompt-postapi
notes:
- SDK Method used are
- system_settings.SystemSettings.custom_prompt_post_api,
+ system_settings.SystemSettings.custom_prompt_p_o_s_t_api,
- Paths used are
post /dna/intent/api/v1/network-device/custom-prompt,
@@ -54,7 +54,6 @@ EXAMPLES = r"""
usernamePrompt: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/network_device_custom_prompt_info.py b/ansible_collections/cisco/dnac/plugins/modules/network_device_custom_prompt_info.py
index cd3d02bdd..9637b4d67 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/network_device_custom_prompt_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/network_device_custom_prompt_info.py
@@ -10,7 +10,7 @@ module: network_device_custom_prompt_info
short_description: Information module for Network Device Custom Prompt Info
description:
- Get all Network Device Custom Prompt Info.
-- Returns supported custom prompts by Cisco DNA Center.
+- Returns supported custom prompts by Catalyst Center.
version_added: '6.0.0'
extends_documentation_fragment:
- cisco.dnac.module_info
@@ -20,15 +20,15 @@ options:
description: Additional headers.
type: dict
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for System Settings CustomPromptSupportGETAPI
description: Complete reference of the CustomPromptSupportGETAPI API.
link: https://developer.cisco.com/docs/dna-center/#!custom-prompt-support-getapi
notes:
- SDK Method used are
- system_settings.SystemSettings.custom_prompt_support_get_api,
+ system_settings.SystemSettings.custom_prompt_support_g_e_t_api,
- Paths used are
get /dna/intent/api/v1/network-device/custom-prompt,
@@ -37,7 +37,7 @@ notes:
EXAMPLES = r"""
- name: Get all Network Device Custom Prompt Info
- cisco.dnac.network_device_custom_prompt_info_info:
+ cisco.dnac.network_device_custom_prompt_info:
dnac_host: "{{dnac_host}}"
dnac_username: "{{dnac_username}}"
dnac_password: "{{dnac_password}}"
@@ -49,7 +49,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/network_device_equipment_info.py b/ansible_collections/cisco/dnac/plugins/modules/network_device_equipment_info.py
index 7bc3a6733..3c50a396f 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/network_device_equipment_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/network_device_equipment_info.py
@@ -10,7 +10,9 @@ module: network_device_equipment_info
short_description: Information module for Network Device Equipment
description:
- Get all Network Device Equipment.
-- Return PowerSupply/ Fan details for the Given device.
+- >
+ Return all types of equipment details like PowerSupply, Fan, Chassis, Backplane, Module, PROCESSOR, Other and SFP
+ for the Given device.
version_added: '3.1.0'
extends_documentation_fragment:
- cisco.dnac.module_info
@@ -30,15 +32,15 @@ options:
If no type is mentioned, All equipments are fetched for the device.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
-- name: Cisco DNA Center documentation for Devices ReturnPowerSupplyFanDetailsForTheGivenDevice
- description: Complete reference of the ReturnPowerSupplyFanDetailsForTheGivenDevice API.
- link: https://developer.cisco.com/docs/dna-center/#!return-power-supply-fan-details-for-the-given-device
+- name: Cisco DNA Center documentation for Devices GetTheDetailsOfPhysicalComponentsOfTheGivenDevice
+ description: Complete reference of the GetTheDetailsOfPhysicalComponentsOfTheGivenDevice API.
+ link: https://developer.cisco.com/docs/dna-center/#!get-the-details-of-physical-components-of-the-given-device
notes:
- SDK Method used are
- devices.Devices.return_power_supply_fan_details_for_the_given_device,
+ devices.Devices.get_the_details_of_physical_components_of_the_given_device,
- Paths used are
get /dna/intent/api/v1/network-device/{deviceUuid}/equipment,
@@ -61,7 +63,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -77,7 +78,8 @@ dnac_response:
"vendorEquipmentType": "string",
"description": "string",
"instanceUuid": "string",
- "name": "string"
+ "name": "string",
+ "manufacturer": "string"
}
],
"version": "string"
diff --git a/ansible_collections/cisco/dnac/plugins/modules/network_device_export.py b/ansible_collections/cisco/dnac/plugins/modules/network_device_export.py
index c1bf8d245..aef915f24 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/network_device_export.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/network_device_export.py
@@ -17,25 +17,22 @@ extends_documentation_fragment:
author: Rafael Campos (@racampos)
options:
deviceUuids:
- description: Network Device Export's deviceUuids.
+ description: List of device uuids.
elements: str
type: list
- id:
- description: Network Device Export's id.
- type: str
operationEnum:
- description: Network Device Export's operationEnum.
+ description: 0 to export Device Credential Details Or 1 to export Device Details.
type: str
parameters:
- description: Network Device Export's parameters.
+ description: List of device parameters that needs to be exported to file.
elements: str
type: list
password:
- description: Network Device Export's password.
+ description: Password is required when the operationEnum value is 0.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Devices ExportDeviceList
description: Complete reference of the ExportDeviceList API.
@@ -61,14 +58,12 @@ EXAMPLES = r"""
dnac_debug: "{{dnac_debug}}"
deviceUuids:
- string
- id: string
operationEnum: string
parameters:
- string
password: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/network_device_functional_capability_info.py b/ansible_collections/cisco/dnac/plugins/modules/network_device_functional_capability_info.py
index 1d88047ab..85776d6d5 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/network_device_functional_capability_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/network_device_functional_capability_info.py
@@ -37,8 +37,8 @@ options:
- Id path parameter. Functional Capability UUID.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Devices GetFunctionalCapabilityById
description: Complete reference of the GetFunctionalCapabilityById API.
@@ -86,7 +86,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/network_device_global_polling_interval_info.py b/ansible_collections/cisco/dnac/plugins/modules/network_device_global_polling_interval_info.py
index 41bced4d1..205e00e64 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/network_device_global_polling_interval_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/network_device_global_polling_interval_info.py
@@ -20,8 +20,8 @@ options:
description: Additional headers.
type: dict
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Devices GetPollingIntervalForAllDevices
description: Complete reference of the GetPollingIntervalForAllDevices API.
@@ -49,7 +49,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/network_device_info.py b/ansible_collections/cisco/dnac/plugins/modules/network_device_info.py
index 7dae65a3f..cddce9d30 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/network_device_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/network_device_info.py
@@ -190,8 +190,8 @@ options:
- Limit query parameter. 1 <= limit <= 500 max. No. Of devices to be returned in the result.
type: int
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Devices GetDeviceByID
description: Complete reference of the GetDeviceByID API.
@@ -271,7 +271,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -318,7 +317,19 @@ dnac_response:
"tunnelUdpPort": "string",
"type": "string",
"upTime": "string",
- "waasDeviceMode": "string"
+ "waasDeviceMode": "string",
+ "dnsResolvedManagementAddress": "string",
+ "apEthernetMacAddress": "string",
+ "vendor": "string",
+ "reasonsForPendingSyncRequests": "string",
+ "pendingSyncRequestsCount": "string",
+ "reasonsForDeviceResync": "string",
+ "lastDeviceResyncStartTime": "string",
+ "uptimeSeconds": 0,
+ "managedAtleastOnce": true,
+ "deviceSupportLevel": "string",
+ "managementState": "string",
+ "description": "string"
},
"version": "string"
}
diff --git a/ansible_collections/cisco/dnac/plugins/modules/network_device_insight_device_link_info.py b/ansible_collections/cisco/dnac/plugins/modules/network_device_insight_device_link_info.py
new file mode 100644
index 000000000..bcc7bbd2f
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/network_device_insight_device_link_info.py
@@ -0,0 +1,126 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: network_device_insight_device_link_info
+short_description: Information module for Network Device Insight Device Link
+description:
+- Get all Network Device Insight Device Link.
+- Find all devices with link mismatch speed / vlan .
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module_info
+author: Rafael Campos (@racampos)
+options:
+ headers:
+ description: Additional headers.
+ type: dict
+ siteId:
+ description:
+ - SiteId path parameter.
+ type: str
+ offset:
+ description:
+ - Offset query parameter. Row Number. Default value is 1.
+ type: int
+ limit:
+ description:
+ - Limit query parameter. Default value is 500.
+ type: int
+ category:
+ description:
+ - Category query parameter. Links mismatch category. Value can be speed-duplex or vlan.
+ type: str
+ sortBy:
+ description:
+ - SortBy query parameter. Sort By.
+ type: str
+ order:
+ description:
+ - Order query parameter. Order. Value can be asc or desc. Default value is asc.
+ type: str
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for Devices InventoryInsightDeviceLinkMismatch
+ description: Complete reference of the InventoryInsightDeviceLinkMismatch API.
+ link: https://developer.cisco.com/docs/dna-center/#!inventory-insight-device-link-mismatch
+notes:
+ - SDK Method used are
+ devices.Devices.inventory_insight_device_link_mismatch,
+
+ - Paths used are
+ get /dna/intent/api/v1/network-device/insight/{siteId}/device-link,
+
+"""
+
+EXAMPLES = r"""
+- name: Get all Network Device Insight Device Link
+ cisco.dnac.network_device_insight_device_link_info:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ headers: "{{my_headers | from_json}}"
+ offset: 0
+ limit: 0
+ category: string
+ sortBy: string
+ order: string
+ siteId: string
+ register: result
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": [
+ {
+ "endPortAllowedVlanIds": "string",
+ "endPortNativeVlanId": "string",
+ "startPortAllowedVlanIds": "string",
+ "startPortNativeVlanId": "string",
+ "linkStatus": "string",
+ "endDeviceHostName": "string",
+ "endDeviceId": "string",
+ "endDeviceIpAddress": "string",
+ "endPortAddress": "string",
+ "endPortDuplex": "string",
+ "endPortId": "string",
+ "endPortMask": "string",
+ "endPortName": "string",
+ "endPortPepId": "string",
+ "endPortSpeed": "string",
+ "startDeviceHostName": "string",
+ "startDeviceId": "string",
+ "startDeviceIpAddress": "string",
+ "startPortAddress": "string",
+ "startPortDuplex": "string",
+ "startPortId": "string",
+ "startPortMask": "string",
+ "startPortName": "string",
+ "startPortPepId": "string",
+ "startPortSpeed": "string",
+ "lastUpdated": "string",
+ "numUpdates": 0,
+ "avgUpdateFrequency": 0,
+ "type": "string",
+ "instanceUuid": "string",
+ "instanceTenantId": "string"
+ }
+ ],
+ "version": "string"
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/network_device_interface_neighbor_info.py b/ansible_collections/cisco/dnac/plugins/modules/network_device_interface_neighbor_info.py
index da4550a9e..4ba87d218 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/network_device_interface_neighbor_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/network_device_interface_neighbor_info.py
@@ -28,8 +28,8 @@ options:
- InterfaceUuid path parameter. Instanceuuid of interface.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Devices GetConnectedDeviceDetail
description: Complete reference of the GetConnectedDeviceDetail API.
@@ -59,7 +59,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/network_device_interface_poe_info.py b/ansible_collections/cisco/dnac/plugins/modules/network_device_interface_poe_info.py
index 2c525456f..c0744f110 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/network_device_interface_poe_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/network_device_interface_poe_info.py
@@ -31,8 +31,8 @@ options:
- InterfaceNameList query parameter. Comma seperated interface names.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Devices ReturnsPOEInterfaceDetailsForTheDevice
description: Complete reference of the ReturnsPOEInterfaceDetailsForTheDevice API.
@@ -62,7 +62,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/network_device_inventory_insight_link_mismatch_info.py b/ansible_collections/cisco/dnac/plugins/modules/network_device_inventory_insight_link_mismatch_info.py
index ee282e771..42b8569d6 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/network_device_inventory_insight_link_mismatch_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/network_device_inventory_insight_link_mismatch_info.py
@@ -44,8 +44,8 @@ options:
- Order query parameter. Order. Value can be asc or desc. Default value is asc.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Devices InventoryInsightDeviceLinkMismatchAPI
description: Complete reference of the InventoryInsightDeviceLinkMismatchAPI API.
@@ -79,7 +79,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/network_device_ip_address_info.py b/ansible_collections/cisco/dnac/plugins/modules/network_device_ip_address_info.py
new file mode 100644
index 000000000..b7aadd31d
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/network_device_ip_address_info.py
@@ -0,0 +1,119 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: network_device_ip_address_info
+short_description: Information module for Network Device Ip Address
+description:
+- Get Network Device Ip Address by id.
+- Returns the network device by specified IP address.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module_info
+author: Rafael Campos (@racampos)
+options:
+ headers:
+ description: Additional headers.
+ type: dict
+ ipAddress:
+ description:
+ - IpAddress path parameter. Device IP address.
+ type: str
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for Devices GetNetworkDeviceByIP
+ description: Complete reference of the GetNetworkDeviceByIP API.
+ link: https://developer.cisco.com/docs/dna-center/#!get-network-device-by-ip
+notes:
+ - SDK Method used are
+ devices.Devices.get_network_device_by_ip,
+
+ - Paths used are
+ get /dna/intent/api/v1/network-device/ip-address/{ipAddress},
+
+"""
+
+EXAMPLES = r"""
+- name: Get Network Device Ip Address by id
+ cisco.dnac.network_device_ip_address_info:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ headers: "{{my_headers | from_json}}"
+ ipAddress: string
+ register: result
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": {
+ "apManagerInterfaceIp": "string",
+ "associatedWlcIp": "string",
+ "bootDateTime": "string",
+ "collectionInterval": "string",
+ "collectionStatus": "string",
+ "errorCode": "string",
+ "errorDescription": "string",
+ "family": "string",
+ "hostname": "string",
+ "id": "string",
+ "instanceTenantId": "string",
+ "instanceUuid": "string",
+ "interfaceCount": "string",
+ "inventoryStatusDetail": "string",
+ "lastUpdateTime": 0,
+ "lastUpdated": "string",
+ "lineCardCount": "string",
+ "lineCardId": "string",
+ "location": "string",
+ "locationName": "string",
+ "macAddress": "string",
+ "managementIpAddress": "string",
+ "memorySize": "string",
+ "platformId": "string",
+ "reachabilityFailureReason": "string",
+ "reachabilityStatus": "string",
+ "role": "string",
+ "roleSource": "string",
+ "serialNumber": "string",
+ "series": "string",
+ "snmpContact": "string",
+ "snmpLocation": "string",
+ "softwareType": "string",
+ "softwareVersion": "string",
+ "tagCount": "string",
+ "tunnelUdpPort": "string",
+ "type": "string",
+ "upTime": "string",
+ "waasDeviceMode": "string",
+ "dnsResolvedManagementAddress": "string",
+ "apEthernetMacAddress": "string",
+ "vendor": "string",
+ "reasonsForPendingSyncRequests": "string",
+ "pendingSyncRequestsCount": "string",
+ "reasonsForDeviceResync": "string",
+ "lastDeviceResyncStartTime": "string",
+ "uptimeSeconds": 0,
+ "managedAtleastOnce": true,
+ "deviceSupportLevel": "string",
+ "managementState": "string",
+ "description": "string"
+ },
+ "version": "string"
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/network_device_lexicographically_sorted_info.py b/ansible_collections/cisco/dnac/plugins/modules/network_device_lexicographically_sorted_info.py
index cd7473692..795c40edb 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/network_device_lexicographically_sorted_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/network_device_lexicographically_sorted_info.py
@@ -108,8 +108,8 @@ options:
- Limit query parameter.
type: int
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Devices GetDeviceValuesThatMatchFullyOrPartiallyAnAttribute
description: Complete reference of the GetDeviceValuesThatMatchFullyOrPartiallyAnAttribute API.
@@ -159,17 +159,11 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
returned: always
- type: dict
+ type: str
sample: >
- {
- "response": [
- "string"
- ],
- "version": "string"
- }
+ "string"
"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/network_device_linecard_details_info.py b/ansible_collections/cisco/dnac/plugins/modules/network_device_linecard_details_info.py
index 6ec120ded..8421e3116 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/network_device_linecard_details_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/network_device_linecard_details_info.py
@@ -24,8 +24,8 @@ options:
- DeviceUuid path parameter. Instanceuuid of device.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Devices GetLinecardDetails
description: Complete reference of the GetLinecardDetails API.
@@ -54,7 +54,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/network_device_management_address_update.py b/ansible_collections/cisco/dnac/plugins/modules/network_device_management_address_update.py
new file mode 100644
index 000000000..d50ec5eba
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/network_device_management_address_update.py
@@ -0,0 +1,69 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: network_device_management_address_update
+short_description: Resource module for Network Device Management Address Update
+description:
+- Manage operation update of the resource Network Device Management Address Update.
+- This is a simple PUT API to edit the management IP Address of the device.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module
+author: Rafael Campos (@racampos)
+options:
+ deviceid:
+ description: Deviceid path parameter. The UUID of the device whose management IP
+ address is to be updated.
+ type: str
+ newIP:
+ description: New IP Address of the device to be Updated.
+ type: str
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for Devices UpdateDeviceManagementAddress
+ description: Complete reference of the UpdateDeviceManagementAddress API.
+ link: https://developer.cisco.com/docs/dna-center/#!update-device-management-address
+notes:
+ - SDK Method used are
+ devices.Devices.update_device_management_address,
+
+ - Paths used are
+ put /dna/intent/api/v1/network-device/{deviceid}/management-address,
+
+"""
+
+EXAMPLES = r"""
+- name: Update all
+ cisco.dnac.network_device_management_address_update:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ deviceid: string
+ newIP: string
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": {
+ "taskId": "string",
+ "url": "string"
+ },
+ "version": "string"
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/network_device_meraki_organization_info.py b/ansible_collections/cisco/dnac/plugins/modules/network_device_meraki_organization_info.py
index a8bc1ddeb..2437905df 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/network_device_meraki_organization_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/network_device_meraki_organization_info.py
@@ -21,11 +21,11 @@ options:
type: dict
id:
description:
- - Id path parameter.
+ - Id path parameter. Device Id.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Devices GetOrganizationListForMeraki
description: Complete reference of the GetOrganizationListForMeraki API.
@@ -54,7 +54,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/network_device_module_count_info.py b/ansible_collections/cisco/dnac/plugins/modules/network_device_module_count_info.py
index 5969b346e..30082ff4b 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/network_device_module_count_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/network_device_module_count_info.py
@@ -44,8 +44,8 @@ options:
elements: str
type: list
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Devices GetModuleCount
description: Complete reference of the GetModuleCount API.
@@ -78,7 +78,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/network_device_module_info.py b/ansible_collections/cisco/dnac/plugins/modules/network_device_module_info.py
index be3bd1a55..8451c951b 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/network_device_module_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/network_device_module_info.py
@@ -58,8 +58,8 @@ options:
- Id path parameter. Module id.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Devices GetModuleInfoById
description: Complete reference of the GetModuleInfoById API.
@@ -112,7 +112,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/network_device_poe_info.py b/ansible_collections/cisco/dnac/plugins/modules/network_device_poe_info.py
index 4f78d1e70..f6ca8918a 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/network_device_poe_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/network_device_poe_info.py
@@ -21,11 +21,11 @@ options:
type: dict
deviceUuid:
description:
- - DeviceUuid path parameter. Uuid of the device.
+ - DeviceUuid path parameter. UUID of the device.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Devices POEDetails
description: Complete reference of the POEDetails API.
@@ -54,7 +54,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/network_device_polling_interval_info.py b/ansible_collections/cisco/dnac/plugins/modules/network_device_polling_interval_info.py
index fc4707d26..ec5a9d43a 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/network_device_polling_interval_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/network_device_polling_interval_info.py
@@ -24,8 +24,8 @@ options:
- Id path parameter. Device ID.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Devices GetPollingIntervalById
description: Complete reference of the GetPollingIntervalById API.
@@ -54,7 +54,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/network_device_range_info.py b/ansible_collections/cisco/dnac/plugins/modules/network_device_range_info.py
index 5ea4572f4..6c373ef86 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/network_device_range_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/network_device_range_info.py
@@ -30,8 +30,8 @@ options:
- RecordsToReturn path parameter. Number of records to return 1<= recordsToReturn <= 500.
type: int
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Devices GetNetworkDeviceByPaginationRange
description: Complete reference of the GetNetworkDeviceByPaginationRange API.
@@ -61,7 +61,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -109,7 +108,19 @@ dnac_response:
"tunnelUdpPort": "string",
"type": "string",
"upTime": "string",
- "waasDeviceMode": "string"
+ "waasDeviceMode": "string",
+ "dnsResolvedManagementAddress": "string",
+ "apEthernetMacAddress": "string",
+ "vendor": "string",
+ "reasonsForPendingSyncRequests": "string",
+ "pendingSyncRequestsCount": "string",
+ "reasonsForDeviceResync": "string",
+ "lastDeviceResyncStartTime": "string",
+ "uptimeSeconds": 0,
+ "managedAtleastOnce": true,
+ "deviceSupportLevel": "string",
+ "managementState": "string",
+ "description": "string"
}
],
"version": "string"
diff --git a/ansible_collections/cisco/dnac/plugins/modules/network_device_register_for_wsa_info.py b/ansible_collections/cisco/dnac/plugins/modules/network_device_register_for_wsa_info.py
index 29c483aa2..8ddd0b7e8 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/network_device_register_for_wsa_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/network_device_register_for_wsa_info.py
@@ -30,8 +30,8 @@ options:
- Macaddress query parameter. Mac addres of the device.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Devices GetDevicesRegisteredForWSANotification
description: Complete reference of the GetDevicesRegisteredForWSANotification API.
@@ -61,7 +61,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/network_device_stack_details_info.py b/ansible_collections/cisco/dnac/plugins/modules/network_device_stack_details_info.py
index 5cdaf55ad..8cf7afe37 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/network_device_stack_details_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/network_device_stack_details_info.py
@@ -24,8 +24,8 @@ options:
- DeviceId path parameter. Device ID.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Devices GetStackDetailsForDevice
description: Complete reference of the GetStackDetailsForDevice API.
@@ -54,7 +54,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/network_device_summary_info.py b/ansible_collections/cisco/dnac/plugins/modules/network_device_summary_info.py
index 3096ba1cf..9ffa094cc 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/network_device_summary_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/network_device_summary_info.py
@@ -24,8 +24,8 @@ options:
- Id path parameter. Device ID.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Devices GetDeviceSummary
description: Complete reference of the GetDeviceSummary API.
@@ -54,7 +54,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/network_device_supervisor_card_details_info.py b/ansible_collections/cisco/dnac/plugins/modules/network_device_supervisor_card_details_info.py
index f9db5bdd7..e75afeff3 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/network_device_supervisor_card_details_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/network_device_supervisor_card_details_info.py
@@ -24,8 +24,8 @@ options:
- DeviceUuid path parameter. Instanceuuid of device.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Devices GetSupervisorCardDetail
description: Complete reference of the GetSupervisorCardDetail API.
@@ -54,7 +54,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/network_device_sync.py b/ansible_collections/cisco/dnac/plugins/modules/network_device_sync.py
index 526b8d239..7cd157cb3 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/network_device_sync.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/network_device_sync.py
@@ -24,11 +24,11 @@ options:
type: bool
payload:
description: Network Device Sync's payload.
- elements: dict
+ elements: str
type: list
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Devices SyncDevices
description: Complete reference of the SyncDevices API.
@@ -54,10 +54,9 @@ EXAMPLES = r"""
dnac_debug: "{{dnac_debug}}"
forceSync: true
payload:
- - {}
+ - string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/network_device_update_role.py b/ansible_collections/cisco/dnac/plugins/modules/network_device_update_role.py
index 14ded7ec1..fd679f2a9 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/network_device_update_role.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/network_device_update_role.py
@@ -17,17 +17,17 @@ extends_documentation_fragment:
author: Rafael Campos (@racampos)
options:
id:
- description: Network Device Update Role's id.
+ description: DeviceId of the Device.
type: str
role:
- description: Network Device Update Role's role.
+ description: Role of device as ACCESS, CORE, DISTRIBUTION, BORDER ROUTER.
type: str
roleSource:
- description: Network Device Update Role's roleSource.
+ description: Role source as MANUAL / AUTO.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Devices UpdateDeviceRole
description: Complete reference of the UpdateDeviceRole API.
@@ -56,7 +56,6 @@ EXAMPLES = r"""
roleSource: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/network_device_user_defined_field.py b/ansible_collections/cisco/dnac/plugins/modules/network_device_user_defined_field.py
index 8d0ddc17e..31b3473b5 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/network_device_user_defined_field.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/network_device_user_defined_field.py
@@ -13,7 +13,7 @@ description:
- Creates a new global User Defined Field, which can be assigned to devices.
- Deletes an existing Global User-Defined-Field using it's id.
- Updates an existing global User Defined Field, using it's id.
-version_added: '6.7.0'
+version_added: '3.1.0'
extends_documentation_fragment:
- cisco.dnac.module
author: Rafael Campos (@racampos)
@@ -28,8 +28,8 @@ options:
description: Name of UDF.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Devices CreateUserDefinedField
description: Complete reference of the CreateUserDefinedField API.
@@ -94,7 +94,6 @@ EXAMPLES = r"""
id: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/network_device_user_defined_field_delete.py b/ansible_collections/cisco/dnac/plugins/modules/network_device_user_defined_field_delete.py
new file mode 100644
index 000000000..b7d6c40dc
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/network_device_user_defined_field_delete.py
@@ -0,0 +1,70 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: network_device_user_defined_field_delete
+short_description: Resource module for Network Device User Defined Field Delete
+description:
+- Manage operation delete of the resource Network Device User Defined Field Delete.
+- >
+ Remove a User-Defined-Field from device. Name of UDF has to be passed as the query parameter. Please note that
+ Global UDF will not be deleted by this operation.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module
+author: Rafael Campos (@racampos)
+options:
+ deviceId:
+ description: DeviceId path parameter. UUID of device from which UDF has to be removed.
+ type: str
+ name:
+ description: Name query parameter. Name of UDF to be removed.
+ type: str
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for Devices RemoveUserDefinedFieldFromDevice
+ description: Complete reference of the RemoveUserDefinedFieldFromDevice API.
+ link: https://developer.cisco.com/docs/dna-center/#!remove-user-defined-field-from-device
+notes:
+ - SDK Method used are
+ devices.Devices.remove_user_defined_field_from_device,
+
+ - Paths used are
+ delete /dna/intent/api/v1/network-device/{deviceId}/user-defined-field,
+
+"""
+
+EXAMPLES = r"""
+- name: Delete all
+ cisco.dnac.network_device_user_defined_field_delete:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ deviceId: string
+ name: string
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": {
+ "taskId": "string",
+ "url": "string"
+ },
+ "version": "string"
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/network_device_user_defined_field_info.py b/ansible_collections/cisco/dnac/plugins/modules/network_device_user_defined_field_info.py
index 967114272..3b0a31a25 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/network_device_user_defined_field_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/network_device_user_defined_field_info.py
@@ -12,8 +12,8 @@ description:
- Get all Network Device User Defined Field.
- >
Gets existing global User Defined Fields. If no input is given, it fetches ALL the Global UDFs. Filter/search is
- supported either by UDF Ids or by UDF names, but not both.
-version_added: '6.7.0'
+ supported by UDF Ids or UDF names or both.
+version_added: '3.1.0'
extends_documentation_fragment:
- cisco.dnac.module_info
author: Rafael Campos (@racampos)
@@ -30,8 +30,8 @@ options:
- Name query parameter. Comma-seperated name(s) used for search/filtering.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Devices GetAllUserDefinedFields
description: Complete reference of the GetAllUserDefinedFields API.
@@ -61,7 +61,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/network_device_user_defined_field_update.py b/ansible_collections/cisco/dnac/plugins/modules/network_device_user_defined_field_update.py
new file mode 100644
index 000000000..b41391000
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/network_device_user_defined_field_update.py
@@ -0,0 +1,81 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: network_device_user_defined_field_update
+short_description: Resource module for Network Device User Defined Field Update
+description:
+- Manage operation update of the resource Network Device User Defined Field Update.
+- >
+ Assigns an existing Global User-Defined-Field to a device. If the UDF is already assigned to the specific device,
+ then it updates the device UDF value accordingly. Please note that the assigning UDF 'name' must be an existing
+ global UDF. Otherwise error shall be shown.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module
+author: Rafael Campos (@racampos)
+options:
+ deviceId:
+ description: DeviceId path parameter. UUID of device to which UDF has to be added.
+ type: str
+ payload:
+ description: Network Device User Defined Field Update's payload.
+ elements: dict
+ suboptions:
+ name:
+ description: Name of the User Defined Field.
+ type: str
+ value:
+ description: Value of the User Defined Field that will be assigned to the device.
+ type: str
+ type: list
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for Devices AddUserDefinedFieldToDevice
+ description: Complete reference of the AddUserDefinedFieldToDevice API.
+ link: https://developer.cisco.com/docs/dna-center/#!add-user-defined-field-to-device
+notes:
+ - SDK Method used are
+ devices.Devices.add_user_defined_field_to_device,
+
+ - Paths used are
+ put /dna/intent/api/v1/network-device/{deviceId}/user-defined-field,
+
+"""
+
+EXAMPLES = r"""
+- name: Update all
+ cisco.dnac.network_device_user_defined_field_update:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ deviceId: string
+ payload:
+ - name: string
+ value: string
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": {
+ "taskId": "string",
+ "url": "string"
+ },
+ "version": "string"
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/network_device_vlan_info.py b/ansible_collections/cisco/dnac/plugins/modules/network_device_vlan_info.py
index 132c102c7..89c89c226 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/network_device_vlan_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/network_device_vlan_info.py
@@ -10,7 +10,7 @@ module: network_device_vlan_info
short_description: Information module for Network Device Vlan
description:
- Get all Network Device Vlan.
-- Returns Device Interface VLANs.
+- Returns Device Interface VLANs. If parameter value is null or empty, it won't return any value in response.
version_added: '3.1.0'
extends_documentation_fragment:
- cisco.dnac.module_info
@@ -25,11 +25,14 @@ options:
type: str
interfaceType:
description:
- - InterfaceType query parameter. Vlan assocaited with sub-interface.
+ - >
+ InterfaceType query parameter. Vlan associated with sub-interface. If no interfaceType mentioned it will
+ return all types of Vlan interfaces. If interfaceType is selected but not specified then it will take
+ default value.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Devices GetDeviceInterfaceVLANs
description: Complete reference of the GetDeviceInterfaceVLANs API.
@@ -59,7 +62,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/network_device_wireless_lan_info.py b/ansible_collections/cisco/dnac/plugins/modules/network_device_wireless_lan_info.py
index f08b1bcca..7fe38ba77 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/network_device_wireless_lan_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/network_device_wireless_lan_info.py
@@ -24,8 +24,8 @@ options:
- Id path parameter. Device ID.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Devices GetWirelessLanControllerDetailsById
description: Complete reference of the GetWirelessLanControllerDetailsById API.
@@ -54,7 +54,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -62,22 +61,19 @@ dnac_response:
type: dict
sample: >
{
- "response": {
- "adminEnabledPorts": [
- 0
- ],
- "apGroupName": "string",
- "deviceId": "string",
- "ethMacAddress": "string",
- "flexGroupName": "string",
- "id": "string",
- "instanceTenantId": "string",
- "instanceUuid": "string",
- "lagModeEnabled": true,
- "netconfEnabled": true,
- "wirelessLicenseInfo": "string",
- "wirelessPackageInstalled": true
- },
- "version": "string"
+ "adminEnabledPorts": [
+ 0
+ ],
+ "apGroupName": "string",
+ "deviceId": "string",
+ "ethMacAddress": "string",
+ "flexGroupName": "string",
+ "id": "string",
+ "instanceTenantId": "string",
+ "instanceUuid": "string",
+ "lagModeEnabled": true,
+ "netconfEnabled": true,
+ "wirelessLicenseInfo": "string",
+ "wirelessPackageInstalled": true
}
"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/network_device_with_snmp_v3_des_info.py b/ansible_collections/cisco/dnac/plugins/modules/network_device_with_snmp_v3_des_info.py
index b3b6a55d6..81abfc812 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/network_device_with_snmp_v3_des_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/network_device_with_snmp_v3_des_info.py
@@ -42,7 +42,7 @@ options:
- Order query parameter.
type: str
requirements:
-- dnacentersdk >= 2.6.0
+- dnacentersdk >= 2.7.1
- python >= 3.9
seealso:
- name: Cisco DNA Center documentation for Devices ReturnsDevicesAddedToCiscoDNACenterWithSnmpV3DES
diff --git a/ansible_collections/cisco/dnac/plugins/modules/network_info.py b/ansible_collections/cisco/dnac/plugins/modules/network_info.py
index 0aa99bde5..bed1b9a19 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/network_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/network_info.py
@@ -24,8 +24,8 @@ options:
- SiteId query parameter. Site id to get the network settings associated with the site.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Network Settings GetNetwork
description: Complete reference of the GetNetwork API.
@@ -54,7 +54,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/network_settings_workflow_manager.py b/ansible_collections/cisco/dnac/plugins/modules/network_settings_workflow_manager.py
index cdf7b0634..9cf3cc2a9 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/network_settings_workflow_manager.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/network_settings_workflow_manager.py
@@ -54,18 +54,23 @@ options:
elements: dict
type: list
suboptions:
- dhcp_server_ips:
+ name:
+ description:
+ - Specifies the name assigned to the Global IP Pool.
+ - Required for the operations in the Global IP Pool.
+ - Length should be less than or equal to 100.
+ - Only letters, numbers and -_./ characters are allowed.
+ type: str
+ pool_type:
description: >
- The DHCP server IPs responsible for automatically assigning IP addresses
- and network configuration parameters to devices on a local network.
- elements: str
- type: list
- dns_server_ips:
- description: Responsible for translating domain names into corresponding IP addresses.
- elements: str
- type: list
- gateway:
- description: Serves as an entry or exit point for data traffic between networks.
+ Includes both the Generic Ip Pool and Tunnel Ip Pool.
+ Generic - Used for general purpose within the network such as device
+ management or communication between the network devices.
+ Tunnel - Designated for the tunnel interfaces to encapsulate packets
+ within the network protocol. It is used in VPN connections,
+ GRE tunnels, or other types of overlay networks.
+ default: Generic
+ choices: [Generic, Tunnel]
type: str
ip_address_space:
description: IP address space either IPv4 or IPv6.
@@ -75,44 +80,75 @@ options:
Defines the IP pool's Classless Inter-Domain Routing block,
enabling systematic IP address distribution within a network.
type: str
+ gateway:
+ description: Serves as an entry or exit point for data traffic between networks.
+ type: str
+ dhcp_server_ips:
+ description: >
+ The DHCP server IPs responsible for automatically assigning IP addresses
+ and network configuration parameters to devices on a local network.
+ elements: str
+ type: list
+ dns_server_ips:
+ description: Responsible for translating domain names into corresponding IP addresses.
+ elements: str
+ type: list
prev_name:
description: >
The former identifier for the global pool. It should be used
exclusively when you need to update the global pool's name.
type: str
- name:
- description: Specifies the name assigned to the Global IP Pool.
- type: str
- pool_type:
- description: >
- Includes both the Generic Ip Pool and Tunnel Ip Pool.
- Generic - Used for general purpose within the network such as device
- management or communication between the network devices.
- Tunnel - Designated for the tunnel interfaces to encapsulate packets
- within the network protocol. It is used in VPN connections,
- GRE tunnels, or other types of overlay networks.
- default: Generic
- choices: [Generic, Tunnel]
- type: str
reserve_pool_details:
description: Reserved IP subpool details from the global pool.
type: dict
suboptions:
- ipv4_dhcp_servers:
- description: Specifies the IPv4 addresses for DHCP servers, for example, "1.1.1.1".
- elements: str
- type: list
- ipv4_dns_servers:
- description: Specifies the IPv4 addresses for DNS servers, for example, "4.4.4.4".
- elements: str
- type: list
- ipv4_gateway:
- description: Provides the gateway's IPv4 address, for example, "175.175.0.1".
+ site_name:
+ description: >
+ The name of the site provided as a path parameter, used
+ to specify where the IP sub-pool will be reserved.
type: str
- version_added: 4.0.0
+ name:
+ description:
+ - Name of the reserve IP subpool.
+ - Required for the operations in the Reserve IP Pool.
+ - Length should be less than or equal to 100.
+ - Only letters, numbers and -_./ characters are allowed.
+ type: str
+ pool_type:
+ description: Type of the reserve ip sub pool.
+ Generic - Used for general purpose within the network such as device
+ management or communication between the network devices.
+ LAN - Used for the devices and the resources within the Local Area Network
+ such as device connectivity, internal communication, or services.
+ Management - Used for the management purposes such as device management interfaces,
+ management access, or other administrative functions.
+ Service - Used for the network services and application such as DNS (Domain Name System),
+ DHCP (Dynamic Host Configuration Protocol), NTP (Network Time Protocol).
+ WAN - Used for the devices and resources with the Wide Area Network such as remote
+ sites interconnection with other network or services hosted within WAN.
+ default: Generic
+ choices: [Generic, LAN, Management, Service, WAN]
+ type: str
+ ipv6_address_space:
+ description: >
+ Determines whether both IPv6 and IPv4 inputs are required.
+ If set to false, only IPv4 inputs are required.
+ If set to true, both IPv6 and IPv4 inputs are required.
+ type: bool
ipv4_global_pool:
- description: IP v4 Global pool address with cidr, example 175.175.0.0/16.
+ description:
+ - IP v4 Global pool address with cidr, example 175.175.0.0/16.
+ - If both 'ipv6_global_pool' and 'ipv4_global_pool_name' are provided, the 'ipv4_global_pool' will be given priority.
+ type: str
+ ipv4_global_pool_name:
+ description:
+ - Specifies the name to be associated with the IPv4 Global IP Pool.
+ - If both 'ipv4_global_pool' and 'ipv4_global_pool_name' are provided, the 'ipv4_global_pool' will be given priority.
+ type: str
+ version_added: 6.14.0
+ ipv4_subnet:
+ description: Indicates the IPv4 subnet address, for example, "175.175.0.0".
type: str
ipv4_prefix:
description: ip4 prefix length is enabled or ipv4 total Host input is enabled
@@ -120,18 +156,21 @@ options:
ipv4_prefix_length:
description: The ipv4 prefix length is required when ipv4_prefix value is true.
type: int
- ipv4_subnet:
- description: Indicates the IPv4 subnet address, for example, "175.175.0.0".
- type: str
ipv4_total_host:
description: The total number of hosts for IPv4, required when the 'ipv4_prefix' is set to false.
type: int
- ipv6_address_space:
- description: >
- Determines whether both IPv6 and IPv4 inputs are required.
- If set to false, only IPv4 inputs are required.
- If set to true, both IPv6 and IPv4 inputs are required.
- type: bool
+ ipv4_gateway:
+ description: Provides the gateway's IPv4 address, for example, "175.175.0.1".
+ type: str
+ version_added: 4.0.0
+ ipv4_dhcp_servers:
+ description: Specifies the IPv4 addresses for DHCP servers, for example, "1.1.1.1".
+ elements: str
+ type: list
+ ipv4_dns_servers:
+ description: Specifies the IPv4 addresses for DNS servers, for example, "4.4.4.4".
+ elements: str
+ type: list
ipv6_dhcp_servers:
description: >
Specifies the IPv6 addresses for DHCP servers in the format.
@@ -150,91 +189,141 @@ options:
For example, "2001:0db8:0123:4567:89ab:cdef:0003:0003".
type: str
ipv6_global_pool:
- description: >
- IPv6 Global pool address with cidr this is required when ipv6_address_space
- value is true, example 2001 db8 85a3 /64.
+ description:
+ - The ipv6_global_pool is a required when the ipv6_address_space is set to true.
+ - It specifies the global IPv6 address pool using CIDR notation, such as "2001:db8:85a3::/64".
+ - In cases where both ipv6_global_pool and ipv6_global_pool_name are specified, ipv6_global_pool will take precedence.
+ type: str
+ ipv6_global_pool_name:
+ description:
+ - Specifies the name assigned to the Ip v6 Global IP Pool.
+ - If both 'ipv6_global_pool' and 'ipv6_global_pool_name' are provided, the 'ipv6_global_pool' will be given priority.
+ type: str
+ version_added: 6.14.0
+ ipv6_subnet:
+ description: IPv6 Subnet address, example 2001:db8:85a3:0:100.
type: str
ipv6_prefix:
description: >
- Ipv6 prefix value is true, the ip6 prefix length input field is enabled,
- if it is false ipv6 total Host input is enable.
+ Determines whether to enable the 'ipv6_prefix_length' or 'ipv6_total_host' input field.
+ If IPv6 prefix value is true, the IPv6 prefix length input field is required,
+ If it is false ipv6 total Host input is required.
type: bool
ipv6_prefix_length:
- description: IPv6 prefix length is required when the ipv6_prefix value is true.
+ description: Specifies the IPv6 prefix length. Required when 'ipv6_prefix' is set to true.
type: int
- ipv6_subnet:
- description: IPv6 Subnet address, example 2001 db8 85a3 0 100.
- type: str
ipv6_total_host:
- description: The total number of hosts for IPv6 is required if the 'ipv6_prefix' is set to false.
+ description:
+ - Specifies the total number of IPv6 hosts. Required when 'ipv6_prefix' is set to false.
+ - Must specify a number of IPv6 IP addresses that is less than 256.
type: int
- name:
- description: Name of the reserve IP subpool.
- type: str
prev_name:
description: The former name associated with the reserved IP sub-pool.
type: str
- site_name:
- description: >
- The name of the site provided as a path parameter, used
- to specify where the IP sub-pool will be reserved.
- type: str
slaac_support:
description: >
Allows devices on IPv6 networks to self-configure their
IP addresses autonomously, eliminating the need for manual setup.
type: bool
- pool_type:
- description: Type of the reserve ip sub pool.
- Generic - Used for general purpose within the network such as device
- management or communication between the network devices.
- LAN - Used for the devices and the resources within the Local Area Network
- such as device connectivity, internal communication, or services.
- Management - Used for the management purposes such as device management interfaces,
- management access, or other administrative functions.
- Service - Used for the network services and application such as DNS (Domain Name System),
- DHCP (Dynamic Host Configuration Protocol), NTP (Network Time Protocol).
- WAN - Used for the devices and resources with the Wide Area Network such as remote
- sites interconnection with other network or services hosted within WAN.
- default: Generic
- choices: [Generic, LAN, Management, Service, WAN]
- type: str
network_management_details:
description: Set default network settings for the site
type: dict
suboptions:
+ site_name:
+ description: >
+ The name of the site provided as a path parameter, used
+ to specify where the IP sub-pool will be reserved. (eg Global/Chennai/Trill)
+ type: str
settings:
description: Network management details settings.
type: dict
suboptions:
- client_and_endpoint_aaa:
- description: Network V2's clientAndEndpoint_aaa.
+ network_aaa:
+ description: Manages AAA (Authentication Authorization Accounting) for network devices.
suboptions:
- ip_address:
- description: IP address for ISE serve (eg 1.1.1.4).
+ server_type:
+ description: Server type for managing AAA for network devices.
+ choices: [AAA, ISE]
+ default: ISE
type: str
- network:
- description: IP address for AAA or ISE server (eg 2.2.2.1).
+ protocol:
+ description: Protocol for AAA or ISE server.
+ choices: [RADIUS, TACACS]
+ default: RADIUS
+ type: str
+ pan_address:
+ description:
+ - PAN IP address for the ISE server.
+ - For example, 1.1.1.1.
+ type: str
+ version_added: 6.14.0
+ primary_server_address:
+ description:
+ - Primary IP address for the ISE/AAA server.
+ - For example, 1.1.1.2.
+ type: str
+ version_added: 6.14.0
+ secondary_server_address:
+ description:
+ - Secondary IP address for the AAA server.
+ - For example, 1.1.1.3.
+ type: str
+ version_added: 6.14.0
+ shared_secret:
+ description:
+ - Shared secret for ISE Server.
+ - Required when the server_type is set to ISE.
+ - Length of the shared secret should be atleast 4 characters.
+ type: str
+ type: dict
+ client_and_endpoint_aaa:
+ description: Manages AAA (Authentication Authorization Accounting) for clients and endpoints.
+ suboptions:
+ server_type:
+ description:
+ - Server type for managing AAA for client and endpoints.
+ choices: [AAA, ISE]
+ default: ISE
type: str
protocol:
- description: Protocol for AAA or ISE serve (eg RADIUS).
+ description: Protocol for AAA or ISE server.
+ choices: [RADIUS, TACACS]
+ default: RADIUS
type: str
- servers:
- description: Server type AAA or ISE server (eg AAA).
+ pan_address:
+ description:
+ - PAN IP address for the ISE server.
+ - For example, 1.1.1.1.
type: str
+ version_added: 6.14.0
+ primary_server_address:
+ description:
+ - Primary IP address for the ISE/AAA server.
+ - For example, 1.1.1.2.
+ type: str
+ version_added: 6.14.0
+ secondary_server_address:
+ description:
+ - Secondary IP address for the AAA server.
+ - For example, 1.1.1.3.
+ type: str
+ version_added: 6.14.0
shared_secret:
- description: Shared secret for ISE server.
+ description:
+ - Shared secret for ISE Server.
+ - Required when the server_type is set to ISE.
+ - Length of the shared secret should be atleast 4 characters.
type: str
type: dict
dhcp_server:
- description: DHCP Server IP (eg 1.1.1.1).
+ description: DHCP Server IP address (eg 1.1.1.4).
elements: str
type: list
dns_server:
- description: Network V2's dnsServer.
+ description: DNS server details of the network under a specific site.
suboptions:
domain_name:
- description: Domain Name of DHCP (eg; cisco).
+ description: Domain Name of DHCP (eg; cisco.com, cisco.net).
type: str
primary_ip_address:
description: Primary IP Address for DHCP (eg 2.2.2.2).
@@ -243,18 +332,25 @@ options:
description: Secondary IP Address for DHCP (eg 3.3.3.3).
type: str
type: dict
+ ntp_server:
+ description: IP address for NTP server under a specific site (eg 1.1.1.2).
+ elements: str
+ type: list
+ timezone:
+ description: Time zone of a specific site. (eg Africa/Abidjan/GMT).
+ type: str
message_of_the_day:
- description: Network V2's messageOfTheday.
+ description: Banner details under a specific site.
suboptions:
banner_message:
- description: Massage for Banner message (eg; Good day).
+ description: Message for the banner (eg; Good day).
type: str
retain_existing_banner:
- description: Retain existing Banner Message (eg "true" or "false").
- type: str
+ description: Retain existing banner message.
+ type: bool
type: dict
netflow_collector:
- description: Network V2's netflowcollector.
+ description: Netflow collector details under a specific site.
suboptions:
ip_address:
description: IP Address for NetFlow collector (eg 3.3.3.1).
@@ -263,31 +359,8 @@ options:
description: Port for NetFlow Collector (eg; 443).
type: int
type: dict
- network_aaa:
- description: Network V2's network_aaa.
- suboptions:
- ip_address:
- description: IP address for AAA and ISE server (eg 1.1.1.1).
- type: str
- network:
- description: IP Address for AAA or ISE server (eg 2.2.2.2).
- type: str
- protocol:
- description: Protocol for AAA or ISE serve (eg RADIUS).
- type: str
- servers:
- description: Server type for AAA Network (eg AAA).
- type: str
- shared_secret:
- description: Shared secret for ISE Server.
- type: str
- type: dict
- ntp_server:
- description: IP address for NTP server (eg 1.1.1.2).
- elements: str
- type: list
snmp_server:
- description: Network V2's snmpServer.
+ description: Snmp Server details under a specific site.
suboptions:
configure_dnac_ip:
description: Configuration Cisco Catalyst Center IP for SNMP Server (eg true).
@@ -298,7 +371,7 @@ options:
type: list
type: dict
syslog_server:
- description: Network V2's syslogServer.
+ description: syslog Server details under a specific site.
suboptions:
configure_dnac_ip:
description: Configuration Cisco Catalyst Center IP for syslog server (eg true).
@@ -308,16 +381,8 @@ options:
elements: str
type: list
type: dict
- timezone:
- description: Input for time zone (eg Africa/Abidjan).
- type: str
- site_name:
- description: >
- The name of the site provided as a path parameter, used
- to specify where the IP sub-pool will be reserved.
- type: str
requirements:
-- dnacentersdk == 2.4.5
+- dnacentersdk >= 2.7.1
- python >= 3.9
notes:
- SDK Method used are
@@ -341,7 +406,7 @@ notes:
"""
EXAMPLES = r"""
-- name: Create global pool, reserve an ip pool and network
+- name: Create global pool
cisco.dnac.network_settings_workflow_manager:
dnac_host: "{{dnac_host}}"
dnac_username: "{{dnac_username}}"
@@ -359,55 +424,181 @@ EXAMPLES = r"""
settings:
ip_pool:
- name: string
- gateway: string
+ pool_type: Generic
ip_address_space: string
cidr: string
- pool_type: Generic
+ gateway: string
dhcp_server_ips: list
dns_server_ips: list
- reserve_pool_details:
+
+- name: Create reserve an ip pool
+ cisco.dnac.network_settings_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log: True
+ dnac_log_level: "{{ dnac_log_level }}"
+ state: merged
+ config_verify: True
+ config:
+ - reserve_pool_details:
+ - site_name: string
+ name: string
+ pool_type: LAN
ipv6_address_space: True
ipv4_global_pool: string
ipv4_prefix: True
ipv4_prefix_length: 9
ipv4_subnet: string
- name: string
ipv6_prefix: True
ipv6_prefix_length: 64
ipv6_global_pool: string
ipv6_subnet: string
- site_name: string
slaac_support: True
+
+- name: Create reserve an ip pool using global pool name
+ cisco.dnac.network_settings_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log: True
+ dnac_log_level: "{{ dnac_log_level }}"
+ state: merged
+ config_verify: True
+ config:
+ - reserve_pool_details:
+ - name: string
+ site_name: string
pool_type: LAN
- network_management_details:
+ ipv6_address_space: True
+ ipv4_global_pool_name: string
+ ipv4_prefix: True
+ ipv4_prefix_length: 9
+ ipv4_subnet: string
+ ipv6_prefix: True
+ ipv6_prefix_length: 64
+ ipv6_global_pool_name: string
+ ipv6_subnet: string
+ slaac_support: True
+
+- name: Delete reserved pool
+ cisco.dnac.network_settings_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log: True
+ dnac_log_level: "{{ dnac_log_level }}"
+ state: deleted
+ config_verify: True
+ config:
+ - reserve_pool_details:
+ - site_name: string
+ name: string
+
+- name: Manage the network functions
+ cisco.dnac.network_settings_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log: True
+ dnac_log_level: "{{ dnac_log_level }}"
+ state: merged
+ config_verify: True
+ config:
+ - network_management_details:
+ site_name: string
settings:
dhcp_server: list
dns_server:
domain_name: string
primary_ip_address: string
secondary_ip_address: string
- client_and_endpoint_aaa:
- network: string
- protocol: string
- servers: string
+ ntp_server: list
+ timezone: string
message_of_the_day:
banner_message: string
- retain_existing_banner: string
+ retain_existing_banner: bool
netflow_collector:
ip_address: string
port: 443
- network_aaa:
- network: string
- protocol: string
- servers: string
- ntp_server: list
snmp_server:
configure_dnac_ip: True
ip_addresses: list
syslog_server:
configure_dnac_ip: True
ip_addresses: list
+
+- name: Adding the network_aaa and client_and_endpoint_aaa AAA server
+ cisco.dnac.network_settings_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log: True
+ dnac_log_level: "{{ dnac_log_level }}"
+ state: merged
+ config_verify: True
+ config:
+ - network_management_details:
+ site_name: string
+ settings:
+ network_aaa:
+ server_type: AAA
+ primary_server_address: string
+ secondary_server_address: string
+ protocol: string
+ client_and_endpoint_aaa:
+ server_type: AAA
+ primary_server_address: string
+ secondary_server_address: string
+ protocol: string
+
+- name: Adding the network_aaa and client_and_endpoint_aaa ISE server
+ cisco.dnac.network_settings_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log: True
+ dnac_log_level: "{{ dnac_log_level }}"
+ state: merged
+ config_verify: True
+ config:
+ - network_management_details:
site_name: string
+ settings:
+ network_aaa:
+ server_type: ISE
+ pan_address: string
+ primary_server_address: string
+ protocol: string
+ client_and_endpoint_aaa:
+ server_type: ISE
+ pan_address: string
+ primary_server_address: string
+ protocol: string
"""
RETURN = r"""
@@ -449,6 +640,7 @@ response_3:
"""
import copy
+import re
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.cisco.dnac.plugins.module_utils.dnac import (
DnacBase,
@@ -499,6 +691,7 @@ class NetworkSettings(DnacBase):
"type": 'dict',
"ip_pool": {
"type": 'list',
+ "elements": 'dict',
"ip_address_space": {"type": 'string'},
"dhcp_server_ips": {"type": 'list'},
"dns_server_ips": {"type": 'list'},
@@ -511,7 +704,8 @@ class NetworkSettings(DnacBase):
}
},
"reserve_pool_details": {
- "type": 'dict',
+ "type": 'list',
+ "elements": 'dict',
"name": {"type": 'string'},
"prev_name": {"type": 'string'},
"ipv6_address_space": {"type": 'bool'},
@@ -573,18 +767,20 @@ class NetworkSettings(DnacBase):
},
"network_aaa": {
"type": 'dict',
- "servers": {"type": 'string', "choices": ["ISE", "AAA"]},
- "ip_address": {"type": 'string'},
- "network": {"type": 'string'},
+ "server_type": {"type": 'string', "choices": ["ISE", "AAA"]},
+ "pan_address": {"type": 'string'},
+ "primary_server_address": {"type": 'string'},
+ "secondary_server_address": {"type": 'string'},
"protocol": {"type": 'string', "choices": ["RADIUS", "TACACS"]},
"shared_secret": {"type": 'string'}
},
"client_and_endpoint_aaa": {
"type": 'dict',
- "servers": {"type": 'string', "choices": ["ISE", "AAA"]},
- "ip_address": {"type": 'string'},
- "network": {"type": 'string'},
+ "server_type": {"type": 'string', "choices": ["ISE", "AAA"]},
+ "pan_address": {"type": 'string'},
+ "primary_server_address": {"type": 'string'},
+ "secondary_server_address": {"type": 'string'},
"protocol": {"type": 'string', "choices": ["RADIUS", "TACACS"]},
"shared_secret": {"type": 'string'}
}
@@ -653,16 +849,17 @@ class NetworkSettings(DnacBase):
try:
if get_object == "GlobalPool":
obj_params = [
- ("settings", "settings"),
+ ("ipPoolName", "ipPoolName"),
+ ("IpAddressSpace", "IpAddressSpace"),
+ ("dhcpServerIps", "dhcpServerIps"),
+ ("dnsServerIps", "dnsServerIps"),
+ ("gateway", "gateway"),
]
elif get_object == "ReservePool":
obj_params = [
("name", "name"),
("type", "type"),
("ipv6AddressSpace", "ipv6AddressSpace"),
- ("ipv4GlobalPool", "ipv4GlobalPool"),
- ("ipv4Prefix", "ipv4Prefix"),
- ("ipv4PrefixLength", "ipv4PrefixLength"),
("ipv4GateWay", "ipv4GateWay"),
("ipv4DhcpServers", "ipv4DhcpServers"),
("ipv4DnsServers", "ipv4DnsServers"),
@@ -685,6 +882,48 @@ class NetworkSettings(DnacBase):
return obj_params
+ def is_server_exists(self, ip_address):
+ """
+ Finds if the provided Authentication and Policy Server with
+ the ip_address is available in the system or not.
+
+ Parameters:
+ ip_address (str) - IP Address of the Authentication and Policy Server.
+
+ Returns:
+ True or False - True if the Authentication and Policy Server is
+ available in the system. Else, False.
+ """
+
+ try:
+ response = self.dnac._exec(
+ family="system_settings",
+ function='get_authentication_and_policy_servers'
+ )
+ self.log("Received API response from 'get_authentication_and_policy_servers': {0}"
+ .format(response), "DEBUG")
+ if not response:
+ self.msg = "Failed to retrieve the Authentication and Policy Server details"
+ self.log(str(self.msg), "ERROR")
+ self.status = "failed"
+ return self.check_return_status()
+
+ response = response.get("response")
+ server_details = get_dict_result(response, "ipAddress", ip_address)
+ if not server_details:
+ self.log("The server with IP Address '{0}' is not available in the system.".format(ip_address))
+ return False
+
+ self.log("Server details for the IP address '{0}': {1}".format(ip_address, server_details), "DEBUG")
+ except Exception as msg:
+ self.msg = "Exception occurred while retrieving server details from the IP Address '{0}': {1}" \
+ .format(ip_address, msg)
+ self.log(str(self.msg), "CRITICAL")
+ self.status = "failed"
+ return self.check_return_status()
+
+ return True
+
def get_site_id(self, site_name):
"""
Get the site id from the site name.
@@ -737,28 +976,24 @@ class NetworkSettings(DnacBase):
self.log("Global Pool Details: {0}".format(pool_info), "DEBUG")
global_pool = {
- "settings": {
- "ippool": [{
- "dhcpServerIps": pool_info.get("dhcpServerIps"),
- "dnsServerIps": pool_info.get("dnsServerIps"),
- "ipPoolCidr": pool_info.get("ipPoolCidr"),
- "ipPoolName": pool_info.get("ipPoolName"),
- "type": pool_info.get("ipPoolType").capitalize()
- }]
- }
+ "dhcpServerIps": pool_info.get("dhcpServerIps"),
+ "dnsServerIps": pool_info.get("dnsServerIps"),
+ "ipPoolCidr": pool_info.get("ipPoolCidr"),
+ "ipPoolName": pool_info.get("ipPoolName"),
+ "type": pool_info.get("ipPoolType").capitalize()
}
self.log("Formated global pool details: {0}".format(global_pool), "DEBUG")
- global_ippool = global_pool.get("settings").get("ippool")[0]
+ # global_ippool = global_pool.get("settings").get("ippool")[0]
if pool_info.get("ipv6") is False:
- global_ippool.update({"IpAddressSpace": "IPv4"})
+ global_pool.update({"IpAddressSpace": "IPv4"})
else:
- global_ippool.update({"IpAddressSpace": "IPv6"})
+ global_pool.update({"IpAddressSpace": "IPv6"})
- self.log("ip_address_space: {0}".format(global_ippool.get("IpAddressSpace")), "DEBUG")
+ self.log("ip_address_space: {0}".format(global_pool.get("IpAddressSpace")), "DEBUG")
if not pool_info["gateways"]:
- global_ippool.update({"gateway": ""})
+ global_pool.update({"gateway": ""})
else:
- global_ippool.update({"gateway": pool_info.get("gateways")[0]})
+ global_pool.update({"gateway": pool_info.get("gateways")[0]})
return global_pool
@@ -779,60 +1014,61 @@ class NetworkSettings(DnacBase):
"name": pool_info.get("groupName"),
"site_id": pool_info.get("siteId"),
}
- if len(pool_info.get("ipPools")) == 1:
+ pool_info_ippools = pool_info.get("ipPools")
+ pool_info_length = len(pool_info_ippools)
+
+ # If the reserved pool has only IPv4, pool_info_length will be 1.
+ # If the reserved pool has both IPv4 and IPv6, pool_info_length will be 2.
+ if pool_info_length == 1:
reserve_pool.update({
- "ipv4DhcpServers": pool_info.get("ipPools")[0].get("dhcpServerIps"),
- "ipv4DnsServers": pool_info.get("ipPools")[0].get("dnsServerIps"),
+ "ipv4DhcpServers": pool_info_ippools[0].get("dhcpServerIps"),
+ "ipv4DnsServers": pool_info_ippools[0].get("dnsServerIps"),
"ipv6AddressSpace": "False"
})
- if pool_info.get("ipPools")[0].get("gateways") != []:
- reserve_pool.update({"ipv4GateWay": pool_info.get("ipPools")[0].get("gateways")[0]})
+ if pool_info_ippools[0].get("gateways") != []:
+ reserve_pool.update({"ipv4GateWay": pool_info_ippools[0].get("gateways")[0]})
else:
reserve_pool.update({"ipv4GateWay": ""})
reserve_pool.update({"ipv6AddressSpace": "False"})
- elif len(pool_info.get("ipPools")) == 2:
- if not pool_info.get("ipPools")[0].get("ipv6"):
- reserve_pool.update({
- "ipv4DhcpServers": pool_info.get("ipPools")[0].get("dhcpServerIps"),
- "ipv4DnsServers": pool_info.get("ipPools")[0].get("dnsServerIps"),
- "ipv6AddressSpace": "True",
- "ipv6DhcpServers": pool_info.get("ipPools")[1].get("dhcpServerIps"),
- "ipv6DnsServers": pool_info.get("ipPools")[1].get("dnsServerIps"),
-
- })
+ else:
- if pool_info.get("ipPools")[0].get("gateways") != []:
- reserve_pool.update({"ipv4GateWay":
- pool_info.get("ipPools")[0].get("gateways")[0]})
- else:
- reserve_pool.update({"ipv4GateWay": ""})
+ # If the ipv6 flag is set in the second element, ipv4_index will be 0 and ipv6_index will be 1.
+ # If the ipv6 flag is set in the first element, ipv4_index will be 1 and ipv6_index will be 0.
+ if not pool_info_ippools[0].get("ipv6"):
+ ipv4_index = 0
+ ipv6_index = 1
+ else:
+ ipv4_index = 1
+ ipv6_index = 0
- if pool_info.get("ipPools")[1].get("gateways") != []:
- reserve_pool.update({"ipv6GateWay":
- pool_info.get("ipPools")[1].get("gateways")[0]})
- else:
- reserve_pool.update({"ipv6GateWay": ""})
+ reserve_pool.update({
+ "ipv4DhcpServers": pool_info_ippools[ipv4_index].get("dhcpServerIps"),
+ "ipv4DnsServers": pool_info_ippools[ipv4_index].get("dnsServerIps"),
+ "ipv6AddressSpace": "True",
+ "ipv6Prefix": "True",
+ "ipv6DnsServers": pool_info_ippools[ipv6_index].get("dnsServerIps"),
+ "ipv6DhcpServers": pool_info_ippools[ipv6_index].get("dhcpServerIps")
+ })
+ if pool_info_ippools[ipv4_index].get("gateways") != []:
+ reserve_pool.update({"ipv4GateWay":
+ pool_info_ippools[ipv4_index].get("gateways")[0]})
+ else:
+ reserve_pool.update({"ipv4GateWay": None})
- elif not pool_info.get("ipPools")[1].get("ipv6"):
+ if pool_info_ippools[ipv6_index].get("gateways") != []:
reserve_pool.update({
- "ipv4DhcpServers": pool_info.get("ipPools")[1].get("dhcpServerIps"),
- "ipv4DnsServers": pool_info.get("ipPools")[1].get("dnsServerIps"),
- "ipv6AddressSpace": "True",
- "ipv6DnsServers": pool_info.get("ipPools")[0].get("dnsServerIps"),
- "ipv6DhcpServers": pool_info.get("ipPools")[0].get("dhcpServerIps")
+ "ipv6GateWay": pool_info_ippools[ipv6_index].get("gateways")[0]
})
- if pool_info.get("ipPools")[1].get("gateways") != []:
- reserve_pool.update({"ipv4GateWay":
- pool_info.get("ipPools")[1].get("gateways")[0]})
- else:
- reserve_pool.update({"ipv4GateWay": ""})
+ else:
+ reserve_pool.update({"ipv6GateWay": ""})
+
+ ippools_info = pool_info_ippools[ipv6_index].get("context")
+ slaac_support_info = get_dict_result(ippools_info, "contextKey", "slaacSupport")
+ if slaac_support_info is None or slaac_support_info.get("contextValue") == "false":
+ reserve_pool.update({"slaacSupport": False})
+ else:
+ reserve_pool.update({"slaacSupport": True})
- if pool_info.get("ipPools")[0].get("gateways") != []:
- reserve_pool.update({"ipv6GateWay":
- pool_info.get("ipPools")[0].get("gateways")[0]})
- else:
- reserve_pool.update({"ipv6GateWay": ""})
- reserve_pool.update({"slaacSupport": True})
self.log("Formatted reserve pool details: {0}".format(reserve_pool), "DEBUG")
return reserve_pool
@@ -893,10 +1129,6 @@ class NetworkSettings(DnacBase):
"configureDnacIP": syslog_details.get("value")[0].get("configureDnacIP"),
"ipAddresses": syslog_details.get("value")[0].get("ipAddresses"),
},
- "netflowcollector": {
- "ipAddress": netflow_details.get("value")[0].get("ipAddress"),
- "port": netflow_details.get("value")[0].get("port")
- },
"timezone": timezone_details.get("value")[0],
}
}
@@ -920,6 +1152,19 @@ class NetworkSettings(DnacBase):
else:
network_settings.update({"ntpServer": [""]})
+ netflow_collector_values = netflow_details.get("value")[0]
+ ip_address = netflow_collector_values.get("ipAddress")
+ port = netflow_collector_values.get("port")
+ if port is None:
+ port = "null"
+
+ network_settings.update({
+ "netflowcollector": {
+ "ipAddress": ip_address,
+ "port": port,
+ }
+ })
+
if messageoftheday_details is not None:
network_settings.update({
"messageOfTheday": {
@@ -952,9 +1197,9 @@ class NetworkSettings(DnacBase):
else:
network_settings.update({
"network_aaa": {
- "network": aaa_value.get("ipAddress"),
+ "network": aaa_pan_value,
"protocol": aaa_value.get("protocol"),
- "ipAddress": aaa_pan_value,
+ "ipAddress": aaa_value.get("ipAddress"),
"servers": "ISE"
}
})
@@ -974,13 +1219,21 @@ class NetworkSettings(DnacBase):
else:
network_settings.update({
"clientAndEndpoint_aaa": {
- "network": aaa_value.get("ipAddress"),
+ "network": aaa_pan_value,
"protocol": aaa_value.get("protocol"),
- "ipAddress": aaa_pan_value,
+ "ipAddress": aaa_value.get("ipAddress"),
"servers": "ISE"
}
})
+ network_settings_snmp = network_settings.get("snmpServer")
+ if not network_settings_snmp.get("ipAddresses"):
+ network_settings_snmp.update({"ipAddresses": []})
+
+ network_settings_syslog = network_settings.get("syslogServer")
+ if not network_settings_syslog.get("ipAddresses"):
+ network_settings_syslog.update({"ipAddresses": []})
+
self.log("Formatted playbook network details: {0}".format(network_details), "DEBUG")
return network_details
@@ -1003,25 +1256,33 @@ class NetworkSettings(DnacBase):
"details": None,
"id": None
}
- response = self.dnac._exec(
- family="network_settings",
- function="get_global_pool",
- )
- if not isinstance(response, dict):
- self.log("Failed to retrieve the global pool details - "
- "Response is not a dictionary", "CRITICAL")
- return global_pool
-
- all_global_pool_details = response.get("response")
- global_pool_details = get_dict_result(all_global_pool_details, "ipPoolName", name)
- self.log("Global ip pool name: {0}".format(name), "DEBUG")
- self.log("Global pool details: {0}".format(global_pool_details), "DEBUG")
- if not global_pool_details:
- self.log("Global pool {0} does not exist".format(name), "INFO")
- return global_pool
- global_pool.update({"exists": True})
- global_pool.update({"id": global_pool_details.get("id")})
- global_pool["details"] = self.get_global_pool_params(global_pool_details)
+ value = 1
+ while True:
+ response = self.dnac._exec(
+ family="network_settings",
+ function="get_global_pool",
+ params={"offset": value}
+ )
+ if not isinstance(response, dict):
+ self.msg = "Failed to retrieve the global pool details - Response is not a dictionary"
+ self.log(self.msg, "CRITICAL")
+ self.status = "failed"
+ return self.check_return_status()
+
+ all_global_pool_details = response.get("response")
+ if not all_global_pool_details:
+ self.log("Global pool '{0}' does not exist".format(name), "INFO")
+ return global_pool
+
+ global_pool_details = get_dict_result(all_global_pool_details, "ipPoolName", name)
+ if global_pool_details:
+ self.log("Global pool found with name '{0}': {1}".format(name, global_pool_details), "INFO")
+ global_pool.update({"exists": True})
+ global_pool.update({"id": global_pool_details.get("id")})
+ global_pool["details"] = self.get_global_pool_params(global_pool_details)
+ break
+
+ value += 25
self.log("Formatted global pool details: {0}".format(global_pool), "DEBUG")
return global_pool
@@ -1056,52 +1317,62 @@ class NetworkSettings(DnacBase):
self.status = "failed"
return reserve_pool
- response = self.dnac._exec(
- family="network_settings",
- function="get_reserve_ip_subpool",
- op_modifies=True,
- params={"siteId": site_id}
- )
- if not isinstance(response, dict):
- reserve_pool.update({"success": False})
- self.msg = "Error in getting reserve pool - Response is not a dictionary"
- self.status = "exited"
- return reserve_pool
-
- all_reserve_pool_details = response.get("response")
- reserve_pool_details = get_dict_result(all_reserve_pool_details, "groupName", name)
- if not reserve_pool_details:
- self.log("Reserved pool {0} does not exist in the site {1}"
- .format(name, site_name), "DEBUG")
- return reserve_pool
+ value = 1
+ while True:
+ self.log(str(value))
+ response = self.dnac._exec(
+ family="network_settings",
+ function="get_reserve_ip_subpool",
+ op_modifies=True,
+ params={
+ "site_id": site_id,
+ "offset": value
+ }
+ )
+ if not isinstance(response, dict):
+ reserve_pool.update({"success": False})
+ self.msg = "Error in getting reserve pool - Response is not a dictionary"
+ self.log(self.msg, "CRITICAL")
+ self.status = "exited"
+ return self.check_return_status()
- reserve_pool.update({"exists": True})
- reserve_pool.update({"id": reserve_pool_details.get("id")})
- reserve_pool.update({"details": self.get_reserve_pool_params(reserve_pool_details)})
+ all_reserve_pool_details = response.get("response")
+ self.log(str(all_reserve_pool_details))
+ if not all_reserve_pool_details:
+ self.log("Reserved pool {0} does not exist in the site {1}"
+ .format(name, site_name), "DEBUG")
+ return reserve_pool
+
+ reserve_pool_details = get_dict_result(all_reserve_pool_details, "groupName", name)
+ self.log(str(reserve_pool_details))
+ if reserve_pool_details:
+ self.log("Reserve pool found with name '{0}' in the site '{1}': {2}"
+ .format(name, site_name, reserve_pool_details), "INFO")
+ reserve_pool.update({"exists": True})
+ reserve_pool.update({"id": reserve_pool_details.get("id")})
+ reserve_pool.update({"details": self.get_reserve_pool_params(reserve_pool_details)})
+ break
+
+ value += 25
self.log("Reserved pool details: {0}".format(reserve_pool.get("details")), "DEBUG")
self.log("Reserved pool id: {0}".format(reserve_pool.get("id")), "DEBUG")
return reserve_pool
- def get_have_global_pool(self, config):
+ def get_have_global_pool(self, global_pool_details):
"""
Get the current Global Pool information from
Cisco Catalyst Center based on the provided playbook details.
check this API using check_return_status.
Parameters:
- config (dict) - Playbook details containing Global Pool configuration.
+ global_pool_details (dict) - Playbook details containing Global Pool configuration.
Returns:
self - The current object with updated information.
"""
- global_pool = {
- "exists": False,
- "details": None,
- "id": None
- }
- global_pool_settings = config.get("global_pool_details").get("settings")
+ global_pool_settings = global_pool_details.get("settings")
if global_pool_settings is None:
self.msg = "settings in global_pool_details is missing in the playbook"
self.status = "failed"
@@ -1113,124 +1384,163 @@ class NetworkSettings(DnacBase):
self.status = "failed"
return self
- name = global_pool_ippool[0].get("name")
- if name is None:
- self.msg = "Mandatory Parameter name required"
- self.status = "failed"
- return self
+ global_pool = []
+ global_pool_index = 0
+ for pool_details in global_pool_ippool:
+ name = pool_details.get("name")
+ if name is None:
+ self.msg = "Missing required parameter 'name' in global_pool_details"
+ self.status = "failed"
+ return self
- # If the Global Pool doesn't exist and a previous name is provided
- # Else try using the previous name
- global_pool = self.global_pool_exists(name)
- self.log("Global pool details: {0}".format(global_pool), "DEBUG")
- prev_name = global_pool_ippool[0].get("prev_name")
- if global_pool.get("exists") is False and \
- prev_name is not None:
- global_pool = self.global_pool_exists(prev_name)
- if global_pool.get("exists") is False:
- self.msg = "Prev name {0} doesn't exist in global_pool_details".format(prev_name)
+ name_length = len(name)
+ if name_length > 100:
+ self.msg = "The length of the'name' in global_pool_details should be less or equal to 100."
+ self.status = "failed"
+ return self
+
+ if " " in name:
+ self.msg = "The 'name' in global_pool_details should not contain any spaces."
+ self.status = "failed"
+ return self
+
+ pattern = r'^[\w\-./]+$'
+ if not re.match(pattern, name):
+ self.msg = "The 'name' in global_pool_details should contain only letters, numbers and -_./ characters."
self.status = "failed"
return self
- self.log("Global pool exists: {0}".format(global_pool.get("exists")), "DEBUG")
- self.log("Current Site: {0}".format(global_pool.get("details")), "DEBUG")
+ # If the Global Pool doesn't exist and a previous name is provided
+ # Else try using the previous name
+ global_pool.append(self.global_pool_exists(name))
+ self.log("Global pool details of '{0}': {1}".format(name, global_pool[global_pool_index]), "DEBUG")
+ prev_name = pool_details.get("prev_name")
+ if global_pool[global_pool_index].get("exists") is False and \
+ prev_name is not None:
+ global_pool.pop()
+ global_pool.append(self.global_pool_exists(prev_name))
+ if global_pool[global_pool_index].get("exists") is False:
+ self.msg = "Prev name {0} doesn't exist in global_pool_details".format(prev_name)
+ self.status = "failed"
+ return self
+
+ global_pool[global_pool_index].update({"prev_name": name})
+ global_pool_index += 1
+
+ self.log("Global pool details: {0}".format(global_pool), "DEBUG")
self.have.update({"globalPool": global_pool})
self.msg = "Collecting the global pool details from the Cisco Catalyst Center"
self.status = "success"
return self
- def get_have_reserve_pool(self, config):
+ def get_have_reserve_pool(self, reserve_pool_details):
"""
Get the current Reserved Pool information from Cisco Catalyst Center
based on the provided playbook details.
Check this API using check_return_status
Parameters:
- config (list of dict) - Playbook details containing Reserved Pool configuration.
+ reserve_pool_details (list of dict) - Playbook details containing Reserved Pool configuration.
Returns:
self - The current object with updated information.
"""
- reserve_pool = {
- "exists": False,
- "details": None,
- "id": None
- }
- reserve_pool_details = config.get("reserve_pool_details")
- name = reserve_pool_details.get("name")
- if name is None:
- self.msg = "Mandatory Parameter name required in reserve_pool_details\n"
- self.status = "failed"
- return self
+ reserve_pool = []
+ reserve_pool_index = 0
+ for item in reserve_pool_details:
+ name = item.get("name")
+ if name is None:
+ self.msg = "Missing required parameter 'name' in reserve_pool_details."
+ self.status = "failed"
+ return self
- site_name = reserve_pool_details.get("site_name")
- self.log("Site Name: {0}".format(site_name), "DEBUG")
- if site_name is None:
- self.msg = "Missing parameter 'site_name' in reserve_pool_details"
- self.status = "failed"
- return self
+ name_length = len(name)
+ if name_length > 100:
+ self.msg = "The length of the 'name' in reserve_pool_details should be less or equal to 100."
+ self.status = "failed"
+ return self
- # Check if the Reserved Pool exists in Cisco Catalyst Center
- # based on the provided name and site name
- reserve_pool = self.reserve_pool_exists(name, site_name)
- if not reserve_pool.get("success"):
- return self.check_return_status()
- self.log("Reserved pool details: {0}".format(reserve_pool), "DEBUG")
+ if " " in name:
+ self.msg = "The 'name' in reserve_pool_details should not contain any spaces."
+ self.status = "failed"
+ return self
- # If the Reserved Pool doesn't exist and a previous name is provided
- # Else try using the previous name
- prev_name = reserve_pool_details.get("prev_name")
- if reserve_pool.get("exists") is False and \
- prev_name is not None:
- reserve_pool = self.reserve_pool_exists(prev_name, site_name)
- if not reserve_pool.get("success"):
- return self.check_return_status()
+ pattern = r'^[\w\-./]+$'
+ if not re.match(pattern, name):
+ self.msg = "The 'name' in reserve_pool_details should contain only letters, numbers and -_./ characters."
+ self.status = "failed"
+ return self
- # If the previous name doesn't exist in Cisco Catalyst Center, return with error
- if reserve_pool.get("exists") is False:
- self.msg = "Prev name {0} doesn't exist in reserve_pool_details".format(prev_name)
+ site_name = item.get("site_name")
+ self.log("Site Name: {0}".format(site_name), "DEBUG")
+ if site_name is None:
+ self.msg = "Missing parameter 'site_name' in reserve_pool_details"
self.status = "failed"
return self
- self.log("Reserved pool exists: {0}".format(reserve_pool.get("exists")), "DEBUG")
- self.log("Reserved pool: {0}".format(reserve_pool.get("details")), "DEBUG")
+ # Check if the Reserved Pool exists in Cisco Catalyst Center
+ # based on the provided name and site name
+ reserve_pool.append(self.reserve_pool_exists(name, site_name))
+ if not reserve_pool[reserve_pool_index].get("success"):
+ return self.check_return_status()
+ self.log("Reserved pool details for '{0}': {1}".format(name, reserve_pool[reserve_pool_index]), "DEBUG")
+
+ # If the Reserved Pool doesn't exist and a previous name is provided
+ # Else try using the previous name
+ prev_name = item.get("prev_name")
+ if reserve_pool[reserve_pool_index].get("exists") is False and \
+ prev_name is not None:
+ reserve_pool.pop()
+ reserve_pool.append(self.reserve_pool_exists(prev_name, site_name))
+ if not reserve_pool[reserve_pool_index].get("success"):
+ return self.check_return_status()
+
+ # If the previous name doesn't exist in Cisco Catalyst Center, return with error
+ if reserve_pool[reserve_pool_index].get("exists") is False:
+ self.msg = "Prev name {0} doesn't exist in reserve_pool_details".format(prev_name)
+ self.status = "failed"
+ return self
- # If reserve pool exist, convert ipv6AddressSpace to the required format (boolean)
- if reserve_pool.get("exists"):
- reserve_pool_details = reserve_pool.get("details")
- if reserve_pool_details.get("ipv6AddressSpace") == "False":
- reserve_pool_details.update({"ipv6AddressSpace": False})
- else:
- reserve_pool_details.update({"ipv6AddressSpace": True})
+ self.log("Reserved pool exists: {0}".format(reserve_pool[reserve_pool_index].get("exists")), "DEBUG")
+ self.log("Reserved pool: {0}".format(reserve_pool[reserve_pool_index].get("details")), "DEBUG")
+
+ # If reserve pool exist, convert ipv6AddressSpace to the required format (boolean)
+ if reserve_pool[reserve_pool_index].get("exists"):
+ reserve_pool_info = reserve_pool[reserve_pool_index].get("details")
+ if reserve_pool_info.get("ipv6AddressSpace") == "False":
+ reserve_pool_info.update({"ipv6AddressSpace": False})
+ else:
+ reserve_pool_info.update({"ipv6AddressSpace": True})
+
+ reserve_pool_index += 1
self.log("Reserved pool details: {0}".format(reserve_pool), "DEBUG")
self.have.update({"reservePool": reserve_pool})
- self.msg = "Collecting the reserve pool details from the Cisco Catalyst Center"
+ self.msg = "Collected the reserve pool details from the Cisco Catalyst Center"
self.status = "success"
return self
- def get_have_network(self, config):
+ def get_have_network(self, network_details):
"""
Get the current Network details from Cisco Catalyst
Center based on the provided playbook details.
Parameters:
- config (dict) - Playbook details containing Network Management configuration.
+ network_details (dict) - Playbook details containing Network Management configuration.
Returns:
self - The current object with updated Network information.
"""
network = {}
- site_name = config.get("network_management_details").get("site_name")
+ site_name = network_details.get("site_name")
if site_name is None:
- self.msg = "Mandatory Parameter 'site_name' missing"
- self.status = "failed"
- return self
+ site_name = "Global"
+ network_details.update({"site_name": site_name})
site_id = self.get_site_id(site_name)
if site_id is None:
- self.msg = "Failed to get site id from {0}".format(site_name)
+ self.msg = "The site with the name '{0}' is not available in the Catalyst Center".format(site_name)
self.status = "failed"
return self
@@ -1255,20 +1565,73 @@ class NetworkSettings(DnacBase):
Reserved Pool, and Network information.
"""
- if config.get("global_pool_details") is not None:
- self.get_have_global_pool(config).check_return_status()
+ global_pool_details = config.get("global_pool_details")
+ if global_pool_details is not None:
+ self.get_have_global_pool(global_pool_details).check_return_status()
- if config.get("reserve_pool_details") is not None:
- self.get_have_reserve_pool(config).check_return_status()
+ reserve_pool_details = config.get("reserve_pool_details")
+ if reserve_pool_details is not None:
+ self.get_have_reserve_pool(reserve_pool_details).check_return_status()
- if config.get("network_management_details") is not None:
- self.get_have_network(config).check_return_status()
+ network_details = config.get("network_management_details")
+ if network_details is not None:
+ self.get_have_network(network_details).check_return_status()
self.log("Current State (have): {0}".format(self.have), "INFO")
self.msg = "Successfully retrieved the details from the Cisco Catalyst Center"
self.status = "success"
return self
+ def get_global_pool_cidr(self, global_pool_cidr, global_pool_name):
+ """
+ Get the Ipv4 or Ipv6 global pool cidr from the global pool name.
+
+ Parameters:
+ global_pool_cidr (dict) - Global pool cidr value of the current item.
+ global_pool_name (dict) - Global pool name of the current item.
+
+ Returns:
+ global_pool_cidr (str) - Global pool cidr value of the current item.
+ """
+
+ if global_pool_cidr:
+ return global_pool_cidr
+
+ if not global_pool_name:
+ self.msg = "Missing parameter 'Global Pool CIDR' or 'Global Pool name' is required under reserve_pool_details."
+ self.status = "failed"
+ return self.check_return_status()
+
+ value = 1
+ while True:
+ response = self.dnac._exec(
+ family="network_settings",
+ function="get_global_pool",
+ params={"offset": value}
+ )
+ value += 25
+ if not isinstance(response, dict):
+ self.msg = "Failed to retrieve the global pool details - Response is not a dictionary"
+ self.log(self.msg, "CRITICAL")
+ self.status = "failed"
+ return self.check_return_status()
+
+ all_global_pool_details = response.get("response")
+ if not all_global_pool_details:
+ self.log("Invalid global_pool_name '{0}' under reserve_pool_details".format(global_pool_name), "ERROR")
+ self.msg = "No information found for the global pool named '{0}'".format(global_pool_name)
+ self.status = "failed"
+ return self.check_return_status()
+
+ global_pool_details = get_dict_result(all_global_pool_details, "ipPoolName", global_pool_name)
+ if global_pool_details:
+ global_pool_cidr = global_pool_details.get("ipPoolCidr")
+ self.log("Global pool found with name '{0}': {1}".format(global_pool_name, global_pool_details), "INFO")
+ break
+
+ self.log("Global Pool '{0}' cidr: {1}".format(global_pool_name, global_pool_cidr), "INFO")
+ return global_pool_cidr
+
def get_want_global_pool(self, global_ippool):
"""
Get all the Global Pool information from playbook
@@ -1286,47 +1649,62 @@ class NetworkSettings(DnacBase):
# Initialize the desired Global Pool configuration
want_global = {
"settings": {
- "ippool": [{
- "IpAddressSpace": global_ippool.get("ip_address_space"),
- "dhcpServerIps": global_ippool.get("dhcp_server_ips"),
- "dnsServerIps": global_ippool.get("dns_server_ips"),
- "ipPoolName": global_ippool.get("name"),
- "ipPoolCidr": global_ippool.get("cidr"),
- "gateway": global_ippool.get("gateway"),
- "type": global_ippool.get("pool_type"),
- }]
+ "ippool": []
}
}
- want_ippool = want_global.get("settings").get("ippool")[0]
-
- # Converting to the required format based on the existing Global Pool
- if not self.have.get("globalPool").get("exists"):
- if want_ippool.get("dhcpServerIps") is None:
- want_ippool.update({"dhcpServerIps": []})
- if want_ippool.get("dnsServerIps") is None:
- want_ippool.update({"dnsServerIps": []})
- if want_ippool.get("IpAddressSpace") is None:
- want_ippool.update({"IpAddressSpace": ""})
- if want_ippool.get("gateway") is None:
- want_ippool.update({"gateway": ""})
- if want_ippool.get("type") is None:
- want_ippool.update({"type": "Generic"})
- else:
- have_ippool = self.have.get("globalPool").get("details") \
- .get("settings").get("ippool")[0]
-
- # Copy existing Global Pool information if the desired configuration is not provided
- want_ippool.update({
- "IpAddressSpace": have_ippool.get("IpAddressSpace"),
- "type": have_ippool.get("type"),
- "ipPoolCidr": have_ippool.get("ipPoolCidr")
- })
- want_ippool.update({})
- want_ippool.update({})
+ want_ippool = want_global.get("settings").get("ippool")
+ global_pool_index = 0
+ for pool_details in global_ippool:
+ pool_values = {
+ "dhcpServerIps": pool_details.get("dhcp_server_ips"),
+ "dnsServerIps": pool_details.get("dns_server_ips"),
+ "ipPoolName": pool_details.get("name"),
+ "ipPoolCidr": pool_details.get("cidr"),
+ "gateway": pool_details.get("gateway"),
+ "type": pool_details.get("pool_type"),
+ }
+ ip_address_space = pool_details.get("ip_address_space")
+ if not ip_address_space:
+ self.msg = "Missing required parameter 'ip_address_space' under global_pool_details."
+ self.status = "failed"
+ return self
+
+ ip_address_space_list = ["IPv4", "IPv6"]
+ if ip_address_space not in ip_address_space_list:
+ self.msg = "The 'ip_address_space' under global_pool_details should be in the list: {0}" \
+ .format(ip_address_space_list)
+ self.status = "failed"
+ return self
- for key in ["dhcpServerIps", "dnsServerIps", "gateway"]:
- if want_ippool.get(key) is None and have_ippool.get(key) is not None:
- want_ippool[key] = have_ippool[key]
+ pool_values.update({"IpAddressSpace": ip_address_space})
+
+ # Converting to the required format based on the existing Global Pool
+ if not self.have.get("globalPool")[global_pool_index].get("exists"):
+ if pool_values.get("dhcpServerIps") is None:
+ pool_values.update({"dhcpServerIps": []})
+ if pool_values.get("dnsServerIps") is None:
+ pool_values.update({"dnsServerIps": []})
+ if pool_values.get("IpAddressSpace") is None:
+ pool_values.update({"IpAddressSpace": ""})
+ if pool_values.get("gateway") is None:
+ pool_values.update({"gateway": ""})
+ if pool_values.get("type") is None:
+ pool_values.update({"type": "Generic"})
+ else:
+ have_ippool = self.have.get("globalPool")[global_pool_index].get("details")
+
+ # Copy existing Global Pool information if the desired configuration is not provided
+ pool_values.update({
+ "IpAddressSpace": have_ippool.get("IpAddressSpace"),
+ "type": have_ippool.get("type"),
+ "ipPoolCidr": have_ippool.get("ipPoolCidr"),
+ "id": self.have.get("globalPool")[global_pool_index].get("id")
+ })
+ for key in ["dhcpServerIps", "dnsServerIps", "gateway"]:
+ if pool_values.get(key) is None and have_ippool.get(key) is not None:
+ pool_values[key] = have_ippool[key]
+ want_ippool.append(pool_values)
+ global_pool_index += 1
self.log("Global pool playbook details: {0}".format(want_global), "DEBUG")
self.want.update({"wantGlobal": want_global})
@@ -1348,101 +1726,116 @@ class NetworkSettings(DnacBase):
self - The current object with updated desired Reserved Pool information.
"""
- want_reserve = {
- "name": reserve_pool.get("name"),
- "type": reserve_pool.get("pool_type"),
- "ipv6AddressSpace": reserve_pool.get("ipv6_address_space"),
- "ipv4GlobalPool": reserve_pool.get("ipv4_global_pool"),
- "ipv4Prefix": reserve_pool.get("ipv4_prefix"),
- "ipv4PrefixLength": reserve_pool.get("ipv4_prefix_length"),
- "ipv4GateWay": reserve_pool.get("ipv4_gateway"),
- "ipv4DhcpServers": reserve_pool.get("ipv4_dhcp_servers"),
- "ipv4DnsServers": reserve_pool.get("ipv4_dns_servers"),
- "ipv4Subnet": reserve_pool.get("ipv4_subnet"),
- "ipv6GlobalPool": reserve_pool.get("ipv6_global_pool"),
- "ipv6Prefix": reserve_pool.get("ipv6_prefix"),
- "ipv6PrefixLength": reserve_pool.get("ipv6_prefix_length"),
- "ipv6GateWay": reserve_pool.get("ipv6_gateway"),
- "ipv6DhcpServers": reserve_pool.get("ipv6_dhcp_servers"),
- "ipv6Subnet": reserve_pool.get("ipv6_subnet"),
- "ipv6DnsServers": reserve_pool.get("ipv6_dns_servers"),
- "ipv4TotalHost": reserve_pool.get("ipv4_total_host"),
- "ipv6TotalHost": reserve_pool.get("ipv6_total_host")
- }
-
- # Check for missing mandatory parameters in the playbook
- if not want_reserve.get("name"):
- self.msg = "Missing mandatory parameter 'name' in reserve_pool_details"
- self.status = "failed"
- return self
-
- if want_reserve.get("ipv4Prefix") is True:
- if want_reserve.get("ipv4Subnet") is None and \
- want_reserve.get("ipv4TotalHost") is None:
- self.msg = "missing parameter 'ipv4_subnet' or 'ipv4TotalHost' \
- while adding the ipv4 in reserve_pool_details"
+ want_reserve = []
+ reserve_pool_index = 0
+ for item in reserve_pool:
+ pool_values = {
+ "name": item.get("name"),
+ "type": item.get("pool_type"),
+ "ipv6AddressSpace": item.get("ipv6_address_space"),
+ "ipv4GlobalPool": self.get_global_pool_cidr(item.get("ipv4_global_pool"),
+ item.get("ipv4_global_pool_name")),
+ "ipv4Prefix": item.get("ipv4_prefix"),
+ "ipv4PrefixLength": item.get("ipv4_prefix_length"),
+ "ipv4GateWay": item.get("ipv4_gateway"),
+ "ipv4DhcpServers": item.get("ipv4_dhcp_servers"),
+ "ipv4DnsServers": item.get("ipv4_dns_servers"),
+ "ipv4Subnet": item.get("ipv4_subnet"),
+ "ipv6Prefix": item.get("ipv6_prefix"),
+ "ipv6PrefixLength": item.get("ipv6_prefix_length"),
+ "ipv6GateWay": item.get("ipv6_gateway"),
+ "ipv6DhcpServers": item.get("ipv6_dhcp_servers"),
+ "ipv6Subnet": item.get("ipv6_subnet"),
+ "ipv6DnsServers": item.get("ipv6_dns_servers"),
+ "ipv4TotalHost": item.get("ipv4_total_host"),
+ "ipv6TotalHost": item.get("ipv6_total_host"),
+ "slaacSupport": item.get("slaac_support")
+ }
+ # Check for missing required parameters in the playbook
+ if pool_values.get("ipv6AddressSpace") is True:
+ pool_values.update({
+ "ipv6GlobalPool": self.get_global_pool_cidr(item.get("ipv6_global_pool"),
+ item.get("ipv6_global_pool_name"))})
+
+ if not pool_values.get("name"):
+ self.msg = "Missing required parameter 'name' in reserve_pool_details '{0}' element" \
+ .format(reserve_pool_index + 1)
self.status = "failed"
return self
- if want_reserve.get("ipv6Prefix") is True:
- if want_reserve.get("ipv6Subnet") is None and \
- want_reserve.get("ipv6TotalHost") is None:
- self.msg = "missing parameter 'ipv6_subnet' or 'ipv6TotalHost' \
- while adding the ipv6 in reserve_pool_details"
- self.status = "failed"
- return self
+ if pool_values.get("ipv4Prefix") is True:
+ if pool_values.get("ipv4Subnet") is None and \
+ pool_values.get("ipv4TotalHost") is None:
+ self.msg = "Failed to add IPv4 in reserve_pool_details '{0}'. ".format(reserve_pool_index + 1) + \
+ "Required parameters 'ipv4_subnet' or 'ipv4_total_host' are missing."
+ self.status = "failed"
+ return self
- self.log("Reserved IP pool playbook details: {0}".format(want_reserve), "DEBUG")
+ if pool_values.get("ipv6Prefix") is True:
+ if pool_values.get("ipv6Subnet") is None and \
+ pool_values.get("ipv6TotalHost") is None:
+ self.msg = "Failed to add IPv6 in reserve_pool_details '{0}'. ".format(reserve_pool_index + 1) + \
+ "Required parameters 'ipv6_subnet' or 'ipv6_total_host' are missing."
+ self.status = "failed"
+ return self
- # If there are no existing Reserved Pool details, validate and set defaults
- if not self.have.get("reservePool").get("details"):
- if not want_reserve.get("ipv4GlobalPool"):
- self.msg = "missing parameter 'ipv4GlobalPool' in reserve_pool_details"
- self.status = "failed"
- return self
+ self.log("Reserved IP pool playbook details: {0}".format(pool_values), "DEBUG")
- if not want_reserve.get("ipv4PrefixLength"):
- self.msg = "missing parameter 'ipv4_prefix_length' in reserve_pool_details"
- self.status = "failed"
- return self
+ # If there are no existing Reserved Pool details, validate and set defaults
+ if not self.have.get("reservePool")[reserve_pool_index].get("details"):
+ if not pool_values.get("ipv4GlobalPool"):
+ self.msg = "missing parameter 'ipv4GlobalPool' in reserve_pool_details '{0}' element" \
+ .format(reserve_pool_index + 1)
+ self.status = "failed"
+ return self
- if want_reserve.get("type") is None:
- want_reserve.update({"type": "Generic"})
- if want_reserve.get("ipv4GateWay") is None:
- want_reserve.update({"ipv4GateWay": ""})
- if want_reserve.get("ipv4DhcpServers") is None:
- want_reserve.update({"ipv4DhcpServers": []})
- if want_reserve.get("ipv4DnsServers") is None:
- want_reserve.update({"ipv4DnsServers": []})
- if want_reserve.get("ipv6AddressSpace") is None:
- want_reserve.update({"ipv6AddressSpace": False})
- if want_reserve.get("slaacSupport") is None:
- want_reserve.update({"slaacSupport": True})
- if want_reserve.get("ipv4TotalHost") is None:
- del want_reserve['ipv4TotalHost']
- if want_reserve.get("ipv6AddressSpace") is True:
- want_reserve.update({"ipv6Prefix": True})
+ if not pool_values.get("ipv4PrefixLength"):
+ self.msg = "missing parameter 'ipv4_prefix_length' in reserve_pool_details '{0}' element" \
+ .format(reserve_pool_index + 1)
+ self.status = "failed"
+ return self
+
+ if pool_values.get("type") is None:
+ pool_values.update({"type": "Generic"})
+ if pool_values.get("ipv4DhcpServers") is None:
+ pool_values.update({"ipv4DhcpServers": []})
+ if pool_values.get("ipv4DnsServers") is None:
+ pool_values.update({"ipv4DnsServers": []})
+ if pool_values.get("ipv6AddressSpace") is None:
+ pool_values.update({"ipv6AddressSpace": False})
+ if pool_values.get("slaacSupport") is None:
+ pool_values.update({"slaacSupport": True})
+ if pool_values.get("ipv4TotalHost") is None:
+ del pool_values['ipv4TotalHost']
+ if pool_values.get("ipv6AddressSpace") is True:
+ pool_values.update({"ipv6Prefix": True})
+ else:
+ del pool_values['ipv6Prefix']
+
+ if not pool_values.get("ipv6AddressSpace"):
+ keys_to_check = ['ipv6PrefixLength', 'ipv6GateWay', 'ipv6DhcpServers',
+ 'ipv6DnsServers', 'ipv6TotalHost']
+ for key in keys_to_check:
+ if pool_values.get(key) is None:
+ del pool_values[key]
else:
- del want_reserve['ipv6Prefix']
-
- if not want_reserve.get("ipv6AddressSpace"):
- keys_to_check = ['ipv6GlobalPool', 'ipv6PrefixLength',
- 'ipv6GateWay', 'ipv6DhcpServers',
- 'ipv6DnsServers', 'ipv6TotalHost']
- for key in keys_to_check:
- if want_reserve.get(key) is None:
- del want_reserve[key]
- else:
- keys_to_delete = ['type', 'ipv4GlobalPool',
- 'ipv4Prefix', 'ipv4PrefixLength',
- 'ipv4TotalHost', 'ipv4Subnet']
- for key in keys_to_delete:
- if key in want_reserve:
- del want_reserve[key]
+ keys_to_delete = ['type', 'ipv4GlobalPool', 'ipv4Prefix', 'ipv4PrefixLength',
+ 'ipv4TotalHost', 'ipv4Subnet', 'slaacSupport']
+ for key in keys_to_delete:
+ if key in pool_values:
+ del pool_values[key]
+
+ copy_pool_values = copy.deepcopy(pool_values)
+ for item in copy_pool_values:
+ if pool_values.get(item) is None:
+ del pool_values[item]
+
+ want_reserve.append(pool_values)
+ reserve_pool_index += 1
self.want.update({"wantReserve": want_reserve})
- self.log("Desired State (want): {0}".format(self.want), "INFO")
- self.msg = "Collecting the reserve pool details from the playbook"
+ self.log("Reserved Pool details: {0}".format(want_reserve), "INFO")
+ self.msg = "Collected the reserved pool details from the playbook"
self.status = "success"
return self
@@ -1490,13 +1883,14 @@ class NetworkSettings(DnacBase):
else:
del want_network_settings["ntpServer"]
+ have_timezone = self.have.get("network").get("net_details").get("settings").get("timezone")
if network_management_details.get("timezone") is not None:
want_network_settings["timezone"] = \
network_management_details.get("timezone")
+ elif have_timezone is not None:
+ want_network_settings["timezone"] = have_timezone
else:
- self.msg = "missing parameter timezone in network"
- self.status = "failed"
- return self
+ want_network_settings["timezone"] = "GMT"
dnsServer = network_management_details.get("dns_server")
if dnsServer is not None:
@@ -1568,119 +1962,194 @@ class NetworkSettings(DnacBase):
"bannerMessage":
messageOfTheday.get("banner_message")
})
- if messageOfTheday.get("retain_existing_banner") is not None:
- want_network_settings.get("messageOfTheday").update({
- "retainExistingBanner":
- messageOfTheday.get("retain_existing_banner")
- })
+ retain_existing_banner = messageOfTheday.get("retain_existing_banner")
+ if retain_existing_banner is not None:
+ if retain_existing_banner is True:
+ want_network_settings.get("messageOfTheday").update({
+ "retainExistingBanner": "true"
+ })
+ else:
+ want_network_settings.get("messageOfTheday").update({
+ "retainExistingBanner": "false"
+ })
else:
del want_network_settings["messageOfTheday"]
+ server_types = ["AAA", "ISE"]
+ protocol_types = ["RADIUS", "TACACS"]
network_aaa = network_management_details.get("network_aaa")
if network_aaa:
- if network_aaa.get("ip_address"):
+ server_type = network_aaa.get("server_type")
+ if server_type:
want_network_settings.get("network_aaa").update({
- "ipAddress":
- network_aaa.get("ip_address")
+ "servers": server_type
})
else:
- if network_aaa.get("servers") == "ISE":
- self.msg = "missing parameter ip_address in network_aaa, server ISE is set"
- self.status = "failed"
- return self
-
- if network_aaa.get("network"):
want_network_settings.get("network_aaa").update({
- "network": network_aaa.get("network")
+ "servers": "ISE"
})
- else:
- self.msg = "missing parameter network in network_aaa"
+
+ if server_type not in server_types:
+ self.msg = "The 'server_type' in the network_aaa should be in {0}".format(server_types)
self.status = "failed"
return self
- if network_aaa.get("protocol"):
+ primary_server_address = network_aaa.get("primary_server_address")
+ if primary_server_address:
+ if not self.is_server_exists(primary_server_address):
+ self.msg = "The 'primary_server_address' - '{0}' under 'network_aaa' is not found in the system." \
+ .format(primary_server_address)
+ self.status = "failed"
+ return self
+
want_network_settings.get("network_aaa").update({
- "protocol":
- network_aaa.get("protocol")
+ "network": primary_server_address
})
else:
- self.msg = "missing parameter protocol in network_aaa"
+ self.msg = "Missing required parameter 'primary_server_address' in network_aaa."
self.status = "failed"
return self
- if network_aaa.get("servers"):
+ if server_type == "ISE":
+ pan_address = network_aaa.get("pan_address")
+ if pan_address:
+ if not self.is_server_exists(pan_address):
+ self.msg = "The 'pan_address' - '{0}' under 'network_aaa' is not found in the system." \
+ .format(pan_address)
+ self.status = "failed"
+ return self
+
+ want_network_settings.get("network_aaa").update({
+ "ipAddress": pan_address
+ })
+ else:
+ self.msg = "Missing required parameter 'pan_address' for ISE server in network_aaa."
+ self.status = "failed"
+ return self
+ else:
+ secondary_server_address = network_aaa.get("secondary_server_address")
+ if secondary_server_address:
+ if not self.is_server_exists(secondary_server_address):
+ self.msg = "The 'secondary_server_address' - '{0}' under 'network_aaa' is not found in the system." \
+ .format(secondary_server_address)
+ self.status = "failed"
+ return self
+
+ want_network_settings.get("network_aaa").update({
+ "ipAddress": secondary_server_address
+ })
+
+ protocol = network_aaa.get("protocol")
+ if protocol:
want_network_settings.get("network_aaa").update({
- "servers":
- network_aaa.get("servers")
+ "protocol": protocol
})
else:
- self.msg = "missing parameter servers in network_aaa"
+ want_network_settings.get("network_aaa").update({
+ "protocol": "RADIUS"
+ })
+
+ if protocol not in protocol_types:
+ self.msg = "The 'protocol' in the network_aaa should be in {0}".format(protocol_types)
self.status = "failed"
return self
- if network_aaa.get("shared_secret"):
+ shared_secret = network_aaa.get("shared_secret")
+ if shared_secret:
want_network_settings.get("network_aaa").update({
- "sharedSecret":
- network_aaa.get("shared_secret")
+ "sharedSecret": shared_secret
})
else:
del want_network_settings["network_aaa"]
clientAndEndpoint_aaa = network_management_details.get("client_and_endpoint_aaa")
if clientAndEndpoint_aaa:
- if clientAndEndpoint_aaa.get("ip_address"):
+ server_type = clientAndEndpoint_aaa.get("server_type")
+ if server_type:
want_network_settings.get("clientAndEndpoint_aaa").update({
- "ipAddress":
- clientAndEndpoint_aaa.get("ip_address")
+ "servers": server_type
})
else:
- if clientAndEndpoint_aaa.get("servers") == "ISE":
- self.msg = "missing parameter ip_address in clientAndEndpoint_aaa, \
- server ISE is set"
- self.status = "failed"
- return self
-
- if clientAndEndpoint_aaa.get("network"):
want_network_settings.get("clientAndEndpoint_aaa").update({
- "network":
- clientAndEndpoint_aaa.get("network")
+ "servers": "ISE"
})
- else:
- self.msg = "missing parameter network in clientAndEndpoint_aaa"
+
+ if server_type not in server_types:
+ self.msg = "The 'server_type' in the client_and_endpoint_aaa should be in {0}".format(server_types)
self.status = "failed"
return self
- if clientAndEndpoint_aaa.get("protocol"):
+ primary_server_address = clientAndEndpoint_aaa.get("primary_server_address")
+ if primary_server_address:
+ if not self.is_server_exists(primary_server_address):
+ self.msg = "The 'primary_server_address' - '{0}' under 'clientAndEndpoint_aaa' is not found in the system." \
+ .format(primary_server_address)
+ self.status = "failed"
+ return self
+
want_network_settings.get("clientAndEndpoint_aaa").update({
- "protocol":
- clientAndEndpoint_aaa.get("protocol")
+ "network": primary_server_address
})
else:
- self.msg = "missing parameter protocol in clientAndEndpoint_aaa"
+ self.msg = "Missing required parameter 'primary_server_address' in client_and_endpoint_aaa."
self.status = "failed"
return self
- if clientAndEndpoint_aaa.get("servers"):
+ if server_type == "ISE":
+ pan_address = clientAndEndpoint_aaa.get("pan_address")
+ if pan_address:
+ if not self.is_server_exists(pan_address):
+ self.msg = "The 'pan_address' - '{0}' under 'clientAndEndpoint_aaa' is not found in the system." \
+ .format(pan_address)
+ self.status = "failed"
+ return self
+
+ want_network_settings.get("clientAndEndpoint_aaa").update({
+ "ipAddress": pan_address
+ })
+ else:
+ self.msg = "Missing required parameter 'pan_address' for ISE server in client_and_endpoint_aaa."
+ self.status = "failed"
+ return self
+ else:
+ secondary_server_address = clientAndEndpoint_aaa.get("secondary_server_address")
+ if secondary_server_address:
+ if not self.is_server_exists(secondary_server_address):
+ self.msg = "The 'secondary_server_address' - '{0}' under 'clientAndEndpoint_aaa' is not found in the system." \
+ .format(secondary_server_address)
+ self.status = "failed"
+ return self
+
+ want_network_settings.get("clientAndEndpoint_aaa").update({
+ "ipAddress": secondary_server_address
+ })
+
+ protocol = clientAndEndpoint_aaa.get("protocol")
+ if protocol:
want_network_settings.get("clientAndEndpoint_aaa").update({
- "servers":
- clientAndEndpoint_aaa.get("servers")
+ "protocol": protocol
})
else:
- self.msg = "missing parameter servers in clientAndEndpoint_aaa"
+ want_network_settings.get("clientAndEndpoint_aaa").update({
+ "protocol": "RADIUS"
+ })
+
+ if protocol not in protocol_types:
+ self.msg = "The 'protocol' in the client_and_endpoint_aaa should be in {0}".format(protocol_types)
self.status = "failed"
return self
- if clientAndEndpoint_aaa.get("shared_secret"):
+ shared_secret = clientAndEndpoint_aaa.get("shared_secret")
+ if shared_secret:
want_network_settings.get("clientAndEndpoint_aaa").update({
- "sharedSecret":
- clientAndEndpoint_aaa.get("shared_secret")
+ "sharedSecret": shared_secret
})
else:
del want_network_settings["clientAndEndpoint_aaa"]
self.log("Network playbook details: {0}".format(want_network), "DEBUG")
self.want.update({"wantNetwork": want_network})
- self.msg = "Collecting the network details from the playbook"
+ self.msg = "Collected the network details from the playbook"
self.status = "success"
return self
@@ -1696,7 +2165,7 @@ class NetworkSettings(DnacBase):
"""
if config.get("global_pool_details"):
- global_ippool = config.get("global_pool_details").get("settings").get("ip_pool")[0]
+ global_ippool = config.get("global_pool_details").get("settings").get("ip_pool")
self.get_want_global_pool(global_ippool).check_return_status()
if config.get("reserve_pool_details"):
@@ -1713,160 +2182,181 @@ class NetworkSettings(DnacBase):
self.status = "success"
return self
- def update_global_pool(self, config):
+ def update_global_pool(self, global_pool):
"""
Update/Create Global Pool in Cisco Catalyst Center with fields provided in playbook
Parameters:
- config (list of dict) - Playbook details
+ global_pool (list of dict) - Global Pool playbook details
Returns:
None
"""
- name = config.get("global_pool_details") \
- .get("settings").get("ip_pool")[0].get("name")
+ create_global_pool = []
+ update_global_pool = []
+ global_pool_index = 0
result_global_pool = self.result.get("response")[0].get("globalPool")
- result_global_pool.get("response").update({name: {}})
+ want_global_pool = self.want.get("wantGlobal").get("settings").get("ippool")
+ self.log("Global pool playbook details: {0}".format(global_pool), "DEBUG")
+ for item in self.have.get("globalPool"):
+ result_global_pool.get("msg") \
+ .update({want_global_pool[global_pool_index].get("ipPoolName"): {}})
+ if item.get("exists") is True:
+ update_global_pool.append(want_global_pool[global_pool_index])
+ else:
+ create_global_pool.append(want_global_pool[global_pool_index])
- # Check pool exist, if not create and return
- if not self.have.get("globalPool").get("exists"):
- pool_params = self.want.get("wantGlobal")
- self.log("Desired State for global pool (want): {0}".format(pool_params), "DEBUG")
+ global_pool_index += 1
+
+ # Check create_global_pool; if yes, create the global pool
+ if create_global_pool:
+ self.log("Global pool(s) details to be created: {0}".format(create_global_pool), "INFO")
+ pool_params = {
+ "settings": {
+ "ippool": copy.deepcopy(create_global_pool)
+ }
+ }
response = self.dnac._exec(
family="network_settings",
function="create_global_pool",
op_modifies=True,
params=pool_params,
)
- self.check_execution_response_status(response).check_return_status()
- self.log("Successfully created global pool '{0}'.".format(name), "INFO")
- result_global_pool.get("response").get(name) \
- .update({"globalPool Details": self.want.get("wantGlobal")})
- result_global_pool.get("msg").update({name: "Global Pool Created Successfully"})
- return
-
- # Pool exists, check update is required
- if not self.requires_update(self.have.get("globalPool").get("details"),
- self.want.get("wantGlobal"), self.global_pool_obj_params):
- self.log("Global pool '{0}' doesn't require an update".format(name), "INFO")
- result_global_pool.get("response").get(name).update({
- "Cisco Catalyst Center params":
- self.have.get("globalPool").get("details").get("settings").get("ippool")[0]
- })
- result_global_pool.get("response").get(name).update({
- "Id": self.have.get("globalPool").get("id")
- })
- result_global_pool.get("msg").update({
- name: "Global pool doesn't require an update"
- })
- return
-
- self.log("Global pool requires update", "DEBUG")
- # Pool Exists
- pool_params = copy.deepcopy(self.want.get("wantGlobal"))
- pool_params_ippool = pool_params.get("settings").get("ippool")[0]
- pool_params_ippool.update({"id": self.have.get("globalPool").get("id")})
- self.log("Desired State for global pool (want): {0}".format(pool_params), "DEBUG")
- keys_to_remove = ["IpAddressSpace", "ipPoolCidr", "type"]
- for key in keys_to_remove:
- del pool_params["settings"]["ippool"][0][key]
-
- have_ippool = self.have.get("globalPool").get("details").get("settings").get("ippool")[0]
- keys_to_update = ["dhcpServerIps", "dnsServerIps", "gateway"]
- for key in keys_to_update:
- if pool_params_ippool.get(key) is None:
- pool_params_ippool[key] = have_ippool.get(key)
-
- self.log("Desired global pool details (want): {0}".format(pool_params), "DEBUG")
- response = self.dnac._exec(
- family="network_settings",
- function="update_global_pool",
- op_modifies=True,
- params=pool_params,
- )
-
- self.check_execution_response_status(response).check_return_status()
- self.log("Global pool '{0}' updated successfully".format(name), "INFO")
- result_global_pool.get("response").get(name) \
- .update({"Id": self.have.get("globalPool").get("details").get("id")})
- result_global_pool.get("msg").update({name: "Global Pool Updated Successfully"})
+ self.check_execution_response_status(response, "create_global_pool").check_return_status()
+ self.log("Successfully created global pool successfully.", "INFO")
+ for item in pool_params.get("settings").get("ippool"):
+ name = item.get("ipPoolName")
+ self.log("Global pool '{0}' created successfully.".format(name), "INFO")
+ result_global_pool.get("response").update({"created": pool_params})
+ result_global_pool.get("msg").update({name: "Global Pool Created Successfully"})
+
+ if update_global_pool:
+ final_update_global_pool = []
+ # Pool exists, check update is required
+ for item in update_global_pool:
+ name = item.get("ipPoolName")
+ for pool_value in self.have.get("globalPool"):
+ if pool_value.get("exists") and (pool_value.get("details").get("ipPoolName") == name or pool_value.get("prev_name") == name):
+ if not self.requires_update(pool_value.get("details"), item, self.global_pool_obj_params):
+ self.log("Global pool '{0}' doesn't require an update".format(name), "INFO")
+ result_global_pool.get("msg").update({name: "Global pool doesn't require an update"})
+ elif item not in final_update_global_pool:
+ final_update_global_pool.append(item)
+
+ if final_update_global_pool:
+ self.log("Global pool requires update", "INFO")
+
+ # Pool(s) needs update
+ pool_params = {
+ "settings": {
+ "ippool": copy.deepcopy(final_update_global_pool)
+ }
+ }
+ self.log("Desired State for global pool (want): {0}".format(pool_params), "DEBUG")
+ keys_to_remove = ["IpAddressSpace", "ipPoolCidr", "type"]
+ for item in pool_params["settings"]["ippool"]:
+ for key in keys_to_remove:
+ del item[key]
+
+ self.log("Desired global pool details (want): {0}".format(pool_params), "DEBUG")
+ response = self.dnac._exec(
+ family="network_settings",
+ function="update_global_pool",
+ op_modifies=True,
+ params=pool_params,
+ )
+
+ self.check_execution_response_status(response, "update_global_pool").check_return_status()
+ for item in pool_params.get("settings").get("ippool"):
+ name = item.get("ipPoolName")
+ self.log("Global pool '{0}' Updated successfully.".format(name), "INFO")
+ result_global_pool.get("response").update({"globalPool Details": pool_params})
+ result_global_pool.get("msg").update({name: "Global Pool Updated Successfully"})
+
+ self.log("Global pool configuration operations completed successfully.", "INFO")
return
- def update_reserve_pool(self, config):
+ def update_reserve_pool(self, reserve_pool):
"""
Update or Create a Reserve Pool in Cisco Catalyst Center based on the provided configuration.
This method checks if a reserve pool with the specified name exists in Cisco Catalyst Center.
If it exists and requires an update, it updates the pool. If not, it creates a new pool.
Parameters:
- config (list of dict) - Playbook details containing Reserve Pool information.
+ reserve_pool (list of dict) - Playbook details containing Reserve Pool information.
Returns:
None
"""
- name = config.get("reserve_pool_details").get("name")
- result_reserve_pool = self.result.get("response")[1].get("reservePool")
- result_reserve_pool.get("response").update({name: {}})
- self.log("Current reserved pool details in Catalyst Center: {0}"
- .format(self.have.get("reservePool").get("details")), "DEBUG")
- self.log("Desired reserved pool details in Catalyst Center: {0}"
- .format(self.want.get("wantReserve")), "DEBUG")
-
- # Check pool exist, if not create and return
- self.log("IPv4 global pool: {0}"
- .format(self.want.get("wantReserve").get("ipv4GlobalPool")), "DEBUG")
- site_name = config.get("reserve_pool_details").get("site_name")
- reserve_params = self.want.get("wantReserve")
- site_id = self.get_site_id(site_name)
- reserve_params.update({"site_id": site_id})
- if not self.have.get("reservePool").get("exists"):
- self.log("Desired reserved pool details (want): {0}".format(reserve_params), "DEBUG")
+ reserve_pool_index = -1
+ for item in reserve_pool:
+ reserve_pool_index += 1
+ name = item.get("name")
+ result_reserve_pool = self.result.get("response")[1].get("reservePool")
+ self.log("Current reserved pool '{0}' details in Catalyst Center: {1}"
+ .format(name, self.have.get("reservePool")[reserve_pool_index].get("details")), "DEBUG")
+ self.log("Desired reserved pool '{0}' details in Catalyst Center: {1}"
+ .format(name, self.want.get("wantReserve")[reserve_pool_index]), "DEBUG")
+
+ # Check pool exist, if not create and return
+ self.log("IPv4 reserved pool '{0}': {1}"
+ .format(name, self.want.get("wantReserve")[reserve_pool_index].get("ipv4GlobalPool")), "DEBUG")
+ site_name = item.get("site_name")
+ reserve_params = self.want.get("wantReserve")[reserve_pool_index]
+ site_id = self.get_site_id(site_name)
+ reserve_params.update({"site_id": site_id})
+ if not self.have.get("reservePool")[reserve_pool_index].get("exists"):
+ self.log("Desired reserved pool '{0}' details (want): {1}"
+ .format(name, reserve_params), "DEBUG")
+ response = self.dnac._exec(
+ family="network_settings",
+ function="reserve_ip_subpool",
+ op_modifies=True,
+ params=reserve_params,
+ )
+ self.check_execution_response_status(response, "reserve_ip_subpool").check_return_status()
+ self.log("Successfully created IP subpool reservation '{0}'.".format(name), "INFO")
+ result_reserve_pool.get("response") \
+ .update({name: self.want.get("wantReserve")[reserve_pool_index]})
+ result_reserve_pool.get("msg") \
+ .update({name: "Ip Subpool Reservation Created Successfully"})
+ continue
+
+ # Check update is required
+ if not self.requires_update(self.have.get("reservePool")[reserve_pool_index].get("details"),
+ self.want.get("wantReserve")[reserve_pool_index],
+ self.reserve_pool_obj_params):
+ self.log("Reserved ip subpool '{0}' doesn't require an update".format(name), "INFO")
+ result_reserve_pool.get("msg") \
+ .update({name: "Reserved ip subpool doesn't require an update"})
+ continue
+
+ self.log("Reserved ip pool '{0}' requires an update".format(name), "DEBUG")
+
+ # Pool Exists
+ self.log("Current reserved ip pool '{0}' details in Catalyst Center: {1}"
+ .format(name, self.have.get("reservePool")), "DEBUG")
+ self.log("Desired reserved ip pool '{0}' details: {1}"
+ .format(name, self.want.get("wantReserve")), "DEBUG")
+ reserve_params.update({"id": self.have.get("reservePool")[reserve_pool_index].get("id")})
response = self.dnac._exec(
family="network_settings",
- function="reserve_ip_subpool",
+ function="update_reserve_ip_subpool",
op_modifies=True,
params=reserve_params,
)
- self.check_execution_response_status(response).check_return_status()
- self.log("Successfully created IP subpool reservation '{0}'.".format(name), "INFO")
- result_reserve_pool.get("response").get(name) \
- .update({"reservePool Details": self.want.get("wantReserve")})
- result_reserve_pool.get("msg") \
- .update({name: "Ip Subpool Reservation Created Successfully"})
- return
-
- # Check update is required
- if not self.requires_update(self.have.get("reservePool").get("details"),
- self.want.get("wantReserve"), self.reserve_pool_obj_params):
- self.log("Reserved ip subpool '{0}' doesn't require an update".format(name), "INFO")
- result_reserve_pool.get("response").get(name) \
- .update({"Cisco Catalyst Center params": self.have.get("reservePool").get("details")})
+ self.check_execution_response_status(response, "update_reserve_ip_subpool").check_return_status()
+ self.log("Reserved ip subpool '{0}' updated successfully.".format(name), "INFO")
+ result_reserve_pool.get("response") \
+ .update({name: reserve_params})
result_reserve_pool.get("response").get(name) \
- .update({"Id": self.have.get("reservePool").get("id")})
+ .update({"Id": self.have.get("reservePool")[reserve_pool_index].get("id")})
result_reserve_pool.get("msg") \
- .update({name: "Reserve ip subpool doesn't require an update"})
- return
+ .update({name: "Reserved Ip Subpool updated successfully."})
- self.log("Reserved ip pool '{0}' requires an update".format(name), "DEBUG")
- # Pool Exists
- self.log("Current reserved ip pool '{0}' details in Catalyst Center: {1}"
- .format(name, self.have.get("reservePool")), "DEBUG")
- self.log("Desired reserved ip pool '{0}' details: {1}"
- .format(name, self.want.get("wantReserve")), "DEBUG")
- reserve_params.update({"id": self.have.get("reservePool").get("id")})
- response = self.dnac._exec(
- family="network_settings",
- function="update_reserve_ip_subpool",
- op_modifies=True,
- params=reserve_params,
- )
- self.check_execution_response_status(response).check_return_status()
- self.log("Reserved ip subpool '{0}' updated successfully.".format(name), "INFO")
- result_reserve_pool['msg'] = "Reserved Ip Subpool Updated Successfully"
- result_reserve_pool.get("response").get(name) \
- .update({"Reservation details": self.have.get("reservePool").get("details")})
+ self.log("Updated reserved IP subpool successfully", "INFO")
return
def update_network(self, config):
@@ -1912,7 +2402,7 @@ class NetworkSettings(DnacBase):
)
self.log("Received API response of 'update_network_v2': {0}".format(response), "DEBUG")
validation_string = "desired common settings operation successful"
- self.check_task_response_status(response, validation_string).check_return_status()
+ self.check_task_response_status(response, validation_string, "update_network_v2").check_return_status()
self.log("Network has been changed successfully", "INFO")
result_network.get("msg") \
.update({site_name: "Network Updated successfully"})
@@ -1933,95 +2423,106 @@ class NetworkSettings(DnacBase):
self
"""
- if config.get("global_pool_details") is not None:
- self.update_global_pool(config)
+ global_pool = config.get("global_pool_details")
+ if global_pool is not None:
+ self.update_global_pool(global_pool)
- if config.get("reserve_pool_details") is not None:
- self.update_reserve_pool(config)
+ reserve_pool = config.get("reserve_pool_details")
+ if reserve_pool is not None:
+ self.update_reserve_pool(reserve_pool)
if config.get("network_management_details") is not None:
self.update_network(config)
return self
- def delete_reserve_pool(self, name):
+ def delete_reserve_pool(self, reserve_pool_details):
"""
Delete a Reserve Pool by name in Cisco Catalyst Center
Parameters:
- name (str) - The name of the Reserve Pool to be deleted.
+ reserve_pool_details (list of dict) - Reserverd pool playbook details.
Returns:
self
"""
- reserve_pool_exists = self.have.get("reservePool").get("exists")
- result_reserve_pool = self.result.get("response")[1].get("reservePool")
-
- if not reserve_pool_exists:
- result_reserve_pool.get("response").update({name: "Reserve Pool not found"})
- self.msg = "Reserved Ip Subpool Not Found"
- self.status = "success"
- return self
+ reserve_pool_index = -1
+ for item in reserve_pool_details:
+ reserve_pool_index += 1
+ name = item.get("name")
+ reserve_pool_exists = self.have.get("reservePool")[reserve_pool_index].get("exists")
+ result_reserve_pool = self.result.get("response")[1].get("reservePool")
+
+ if not reserve_pool_exists:
+ result_reserve_pool.get("msg").update({name: "Reserve Pool not found"})
+ self.log("Reserved Ip Subpool '{0}' not found".format(name), "INFO")
+ continue
+
+ self.log("Reserved IP pool scheduled for deletion: {0}"
+ .format(self.have.get("reservePool")[reserve_pool_index].get("name")), "INFO")
+ _id = self.have.get("reservePool")[reserve_pool_index].get("id")
+ self.log("Reserved pool '{0}' id: {1}".format(name, _id), "DEBUG")
+ response = self.dnac._exec(
+ family="network_settings",
+ function="release_reserve_ip_subpool",
+ op_modifies=True,
+ params={"id": _id},
+ )
+ self.check_execution_response_status(response, "release_reserve_ip_subpool").check_return_status()
+ executionid = response.get("executionId")
+ result_reserve_pool = self.result.get("response")[1].get("reservePool")
+ result_reserve_pool.get("response").update({name: {}})
+ result_reserve_pool.get("response").get(name) \
+ .update({"Execution Id": executionid})
+ result_reserve_pool.get("msg") \
+ .update({name: "Ip subpool reservation released successfully"})
- self.log("Reserved IP pool scheduled for deletion: {0}"
- .format(self.have.get("reservePool").get("name")), "INFO")
- _id = self.have.get("reservePool").get("id")
- self.log("Reserved pool {0} id: {1}".format(name, _id), "DEBUG")
- response = self.dnac._exec(
- family="network_settings",
- function="release_reserve_ip_subpool",
- op_modifies=True,
- params={"id": _id},
- )
- self.check_execution_response_status(response).check_return_status()
- executionid = response.get("executionId")
- result_reserve_pool = self.result.get("response")[1].get("reservePool")
- result_reserve_pool.get("response").update({name: {}})
- result_reserve_pool.get("response").get(name) \
- .update({"Execution Id": executionid})
- result_reserve_pool.get("msg") \
- .update({name: "Ip subpool reservation released successfully"})
- self.msg = "Reserved pool - {0} released successfully".format(name)
+ self.msg = "Reserved pool(s) released successfully"
self.status = "success"
return self
- def delete_global_pool(self, name):
+ def delete_global_pool(self, global_pool_details):
"""
Delete a Global Pool by name in Cisco Catalyst Center
Parameters:
- name (str) - The name of the Global Pool to be deleted.
+ global_pool_details (dict) - Global pool details of the playbook
Returns:
self
"""
- global_pool_exists = self.have.get("globalPool").get("exists")
result_global_pool = self.result.get("response")[0].get("globalPool")
- if not global_pool_exists:
- result_global_pool.get("response").update({name: "Global Pool not found"})
- self.msg = "Global pool Not Found"
- self.status = "success"
- return self
+ global_pool_index = 0
+ for item in self.have.get("globalPool"):
+ global_pool_exists = item.get("exists")
+ name = global_pool_details.get("settings").get("ip_pool")[global_pool_index].get("name")
+ global_pool_index += 1
+ if not global_pool_exists:
+ result_global_pool.get("msg").update({name: "Global Pool not found"})
+ self.log("Global pool '{0}' not found".format(name), "INFO")
+ continue
+
+ id = item.get("id")
+ response = self.dnac._exec(
+ family="network_settings",
+ function="delete_global_ip_pool",
+ op_modifies=True,
+ params={"id": id},
+ )
- response = self.dnac._exec(
- family="network_settings",
- function="delete_global_ip_pool",
- op_modifies=True,
- params={"id": self.have.get("globalPool").get("id")},
- )
+ # Check the execution status
+ self.check_execution_response_status(response, "delete_global_ip_pool").check_return_status()
+ executionid = response.get("executionId")
- # Check the execution status
- self.check_execution_response_status(response).check_return_status()
- executionid = response.get("executionId")
+ # Update result information
+ result_global_pool = self.result.get("response")[0].get("globalPool")
+ result_global_pool.get("response").update({name: {}})
+ result_global_pool.get("response").get(name).update({"Execution Id": executionid})
+ result_global_pool.get("msg").update({name: "Global pool deleted successfully"})
- # Update result information
- result_global_pool = self.result.get("response")[0].get("globalPool")
- result_global_pool.get("response").update({name: {}})
- result_global_pool.get("response").get(name).update({"Execution Id": executionid})
- result_global_pool.get("msg").update({name: "Pool deleted successfully"})
- self.msg = "Global pool - {0} deleted successfully".format(name)
+ self.msg = "Global pools deleted successfully"
self.status = "success"
return self
@@ -2036,14 +2537,13 @@ class NetworkSettings(DnacBase):
self
"""
- if config.get("reserve_pool_details") is not None:
- name = config.get("reserve_pool_details").get("name")
- self.delete_reserve_pool(name).check_return_status()
+ reserve_pool_details = config.get("reserve_pool_details")
+ if reserve_pool_details is not None:
+ self.delete_reserve_pool(reserve_pool_details).check_return_status()
- if config.get("global_pool_details") is not None:
- name = config.get("global_pool_details") \
- .get("settings").get("ip_pool")[0].get("name")
- self.delete_global_pool(name).check_return_status()
+ global_pool_details = config.get("global_pool_details")
+ if global_pool_details is not None:
+ self.delete_global_pool(global_pool_details).check_return_status()
return self
@@ -2064,33 +2564,49 @@ class NetworkSettings(DnacBase):
self.log("Current State (have): {0}".format(self.have), "INFO")
self.log("Requested State (want): {0}".format(self.want), "INFO")
if config.get("global_pool_details") is not None:
+ global_pool_index = 0
self.log("Desired State of global pool (want): {0}"
.format(self.want.get("wantGlobal")), "DEBUG")
self.log("Current State of global pool (have): {0}"
- .format(self.have.get("globalPool").get("details")), "DEBUG")
- if self.requires_update(self.have.get("globalPool").get("details"),
- self.want.get("wantGlobal"), self.global_pool_obj_params):
- self.msg = "Global Pool Config is not applied to the Cisco Catalyst Center"
- self.status = "failed"
- return self
+ .format(self.have.get("globalPool")), "DEBUG")
+ for item in self.want.get("wantGlobal").get("settings").get("ippool"):
+ global_pool_details = self.have.get("globalPool")[global_pool_index].get("details")
+ if not global_pool_details:
+ self.msg = "The global pool is not created with the config: {0}".format(item)
+ self.status = "failed"
+ return self
+
+ if self.requires_update(global_pool_details, item, self.global_pool_obj_params):
+ self.msg = "Global Pool Config is not applied to the Cisco Catalyst Center"
+ self.status = "failed"
+ return self
+
+ global_pool_index += 1
- self.log("Successfully validated global pool '{0}'.".format(self.want
- .get("wantGlobal").get("settings").get("ippool")[0].get("ipPoolName")), "INFO")
+ self.log("Successfully validated global pool(s).", "INFO")
self.result.get("response")[0].get("globalPool").update({"Validation": "Success"})
if config.get("reserve_pool_details") is not None:
- if self.requires_update(self.have.get("reservePool").get("details"),
- self.want.get("wantReserve"), self.reserve_pool_obj_params):
- self.log("Desired State for reserve pool (want): {0}"
- .format(self.want.get("wantReserve")), "DEBUG")
- self.log("Current State for reserve pool (have): {0}"
- .format(self.have.get("reservePool").get("details")), "DEBUG")
- self.msg = "Reserved Pool Config is not applied to the Cisco Catalyst Center"
- self.status = "failed"
- return self
+ reserve_pool_index = 0
+ self.log("Desired State for reserve pool (want): {0}"
+ .format(self.want.get("wantReserve")), "DEBUG")
+ self.log("Current State for reserve pool (have): {0}"
+ .format(self.have.get("reservePool")), "DEBUG")
+ for item in self.want.get("wantReserve"):
+ reserve_pool_details = self.have.get("reservePool")[reserve_pool_index].get("details")
+ if not reserve_pool_details:
+ self.msg = "The reserve pool is not created with the config: {0}".format(item)
+ self.status = "failed"
+ return self
+
+ if self.requires_update(reserve_pool_details, item, self.reserve_pool_obj_params):
+ self.msg = "Reserved Pool Config is not applied to the Cisco Catalyst Center"
+ self.status = "failed"
+ return self
+
+ reserve_pool_index += 1
- self.log("Successfully validated the reserved pool '{0}'."
- .format(self.want.get("wantReserve").get("name")), "INFO")
+ self.log("Successfully validated the reserved pool(s)", "INFO")
self.result.get("response")[1].get("reservePool").update({"Validation": "Success"})
if config.get("network_management_details") is not None:
@@ -2104,8 +2620,7 @@ class NetworkSettings(DnacBase):
.format(config.get("network_management_details").get("site_name")), "INFO")
self.result.get("response")[2].get("network").update({"Validation": "Success"})
- self.msg = "Successfully validated the Global Pool, Reserve Pool \
- and the Network Functions."
+ self.msg = "Successfully validated the Global Pool, Reserve Pool and the Network Functions."
self.status = "success"
return self
@@ -2126,27 +2641,36 @@ class NetworkSettings(DnacBase):
self.log("Current State (have): {0}".format(self.have), "INFO")
self.log("Desired State (want): {0}".format(self.want), "INFO")
if config.get("global_pool_details") is not None:
- global_pool_exists = self.have.get("globalPool").get("exists")
- if global_pool_exists:
- self.msg = "Global Pool Config is not applied to the Cisco Catalyst Center"
- self.status = "failed"
- return self
+ global_pool_index = 0
+ global_pool_details = self.have.get("globalPool")
+ for item in global_pool_details:
+ global_pool_exists = item.get("exists")
+ name = config.get("global_pool_details").get("settings") \
+ .get("ip_pool")[global_pool_index].get("name")
+ if global_pool_exists:
+ self.msg = "Global Pool Config '{0}' is not applied to the Cisco Catalyst Center" \
+ .format(name)
+ self.status = "failed"
+ return self
- self.log("Successfully validated absence of Global Pool '{0}'."
- .format(config.get("global_pool_details")
- .get("settings").get("ip_pool")[0].get("name")), "INFO")
+ self.log("Successfully validated absence of Global Pool '{0}'.".format(name), "INFO")
+ global_pool_index += 1
self.result.get("response")[0].get("globalPool").update({"Validation": "Success"})
if config.get("reserve_pool_details") is not None:
- reserve_pool_exists = self.have.get("reservePool").get("exists")
- if reserve_pool_exists:
- self.msg = "Reserved Pool Config is not applied to the Catalyst Center"
- self.status = "failed"
- return self
+ reserve_pool_index = 0
+ reserve_pool_details = self.have.get("reservePool")
+ for item in reserve_pool_details:
+ reserve_pool_exists = item.get("exists")
+ name = config.get("reserve_pool_details")[reserve_pool_index].get("name")
+ if reserve_pool_exists:
+ self.msg = "Reserved Pool Config '{0}' is not applied to the Catalyst Center" \
+ .format(name)
+ self.status = "failed"
+ return self
- self.log("Successfully validated the absence of Reserve Pool '{0}'."
- .format(config.get("reserve_pool_details").get("name")), "INFO")
- self.result.get("response")[1].get("reservePool").update({"Validation": "Success"})
+ self.log("Successfully validated the absence of Reserve Pool '{0}'.".format(name), "INFO")
+ self.result.get("response")[1].get("reservePool").update({"Validation": "Success"})
self.msg = "Successfully validated the absence of Global Pool/Reserve Pool"
self.status = "success"
diff --git a/ansible_collections/cisco/dnac/plugins/modules/network_update.py b/ansible_collections/cisco/dnac/plugins/modules/network_update.py
index 42b2eec0f..65e626500 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/network_update.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/network_update.py
@@ -131,7 +131,7 @@ options:
is associated with the site.
type: str
requirements:
-- dnacentersdk >= 2.6.0
+- dnacentersdk >= 2.7.1
- python >= 3.9
seealso:
- name: Cisco DNA Center documentation for Network Settings UpdateNetwork
diff --git a/ansible_collections/cisco/dnac/plugins/modules/network_v2.py b/ansible_collections/cisco/dnac/plugins/modules/network_v2.py
index faa1b85c9..0bbe2869a 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/network_v2.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/network_v2.py
@@ -134,7 +134,7 @@ options:
the network settings.
type: str
requirements:
-- dnacentersdk >= 2.6.0
+- dnacentersdk >= 2.7.1
- python >= 3.9
seealso:
- name: Cisco DNA Center documentation for Network Settings CreateNetworkV2
diff --git a/ansible_collections/cisco/dnac/plugins/modules/network_v2_info.py b/ansible_collections/cisco/dnac/plugins/modules/network_v2_info.py
index 096b27bbd..715d1f6de 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/network_v2_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/network_v2_info.py
@@ -24,7 +24,7 @@ options:
- SiteId query parameter. Site Id to get the network settings associated with the site.
type: str
requirements:
-- dnacentersdk >= 2.6.0
+- dnacentersdk >= 2.7.1
- python >= 3.9
seealso:
- name: Cisco DNA Center documentation for Network Settings GetNetworkV2
diff --git a/ansible_collections/cisco/dnac/plugins/modules/nfv_profile.py b/ansible_collections/cisco/dnac/plugins/modules/nfv_profile.py
index 37ce59101..42809830c 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/nfv_profile.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/nfv_profile.py
@@ -147,7 +147,7 @@ options:
description: Name of the profile to create NFV profile.
type: str
requirements:
-- dnacentersdk >= 2.6.0
+- dnacentersdk >= 2.7.1
- python >= 3.9
seealso:
- name: Cisco DNA Center documentation for Site Design CreateNFVProfile
diff --git a/ansible_collections/cisco/dnac/plugins/modules/nfv_profile_info.py b/ansible_collections/cisco/dnac/plugins/modules/nfv_profile_info.py
index 7deb4f7be..2a4880742 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/nfv_profile_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/nfv_profile_info.py
@@ -36,7 +36,7 @@ options:
- Name query parameter. Name of network profile to be retrieved.
type: str
requirements:
-- dnacentersdk >= 2.6.0
+- dnacentersdk >= 2.7.1
- python >= 3.9
seealso:
- name: Cisco DNA Center documentation for Site Design GetNFVProfile
diff --git a/ansible_collections/cisco/dnac/plugins/modules/nfv_provision.py b/ansible_collections/cisco/dnac/plugins/modules/nfv_provision.py
index e242de7f5..7c913e63b 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/nfv_provision.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/nfv_provision.py
@@ -375,7 +375,7 @@ options:
type: str
type: list
requirements:
-- dnacentersdk >= 2.6.0
+- dnacentersdk >= 2.7.1
- python >= 3.9
seealso:
- name: Cisco DNA Center documentation for Site Design ProvisionNFV
diff --git a/ansible_collections/cisco/dnac/plugins/modules/nfv_provision_detail_info.py b/ansible_collections/cisco/dnac/plugins/modules/nfv_provision_detail_info.py
index 2a0f85a5a..65b9281db 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/nfv_provision_detail_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/nfv_provision_detail_info.py
@@ -24,7 +24,7 @@ options:
- DeviceIp query parameter. Device to which the provisioning detail has to be retrieved.
type: str
requirements:
-- dnacentersdk >= 2.6.0
+- dnacentersdk >= 2.7.1
- python >= 3.9
seealso:
- name: Cisco DNA Center documentation for Site Design GetDeviceDetailsByIP
diff --git a/ansible_collections/cisco/dnac/plugins/modules/nfv_provision_details.py b/ansible_collections/cisco/dnac/plugins/modules/nfv_provision_details.py
index af9792e72..552fa12b3 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/nfv_provision_details.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/nfv_provision_details.py
@@ -23,7 +23,7 @@ options:
description: Additional headers.
type: dict
requirements:
-- dnacentersdk >= 2.6.0
+- dnacentersdk >= 2.7.1
- python >= 3.9
seealso:
- name: Cisco DNA Center documentation for Site Design NFVProvisioningDetail
diff --git a/ansible_collections/cisco/dnac/plugins/modules/path_trace.py b/ansible_collections/cisco/dnac/plugins/modules/path_trace.py
index 055db2d56..e843b24ac 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/path_trace.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/path_trace.py
@@ -26,7 +26,7 @@ options:
description: Destination IP address.
type: str
destPort:
- description: Destination Port.
+ description: Destination Port, range 1-65535.
type: str
flowAnalysisId:
description: FlowAnalysisId path parameter. Flow analysis request id.
@@ -40,17 +40,17 @@ options:
description: Periodic refresh of path for every 30 sec.
type: bool
protocol:
- description: Protocol.
+ description: Protocol - one of TCP, UDP - checks both when left blank.
type: str
sourceIP:
description: Source IP address.
type: str
sourcePort:
- description: Source Port.
+ description: Source Port, range 1-65535.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Path Trace InitiateANewPathtrace
description: Complete reference of the InitiateANewPathtrace API.
@@ -103,7 +103,6 @@ EXAMPLES = r"""
flowAnalysisId: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/path_trace_info.py b/ansible_collections/cisco/dnac/plugins/modules/path_trace_info.py
index d305183c1..a51331b4f 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/path_trace_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/path_trace_info.py
@@ -31,24 +31,24 @@ options:
type: str
destIP:
description:
- - DestIP query parameter. Destination IP adress.
+ - DestIP query parameter. Destination IP address.
type: str
sourcePort:
description:
- SourcePort query parameter. Source port.
- type: str
+ type: float
destPort:
description:
- DestPort query parameter. Destination port.
- type: str
+ type: float
gtCreateTime:
description:
- GtCreateTime query parameter. Analyses requested after this time.
- type: str
+ type: float
ltCreateTime:
description:
- LtCreateTime query parameter. Analyses requested before this time.
- type: str
+ type: float
protocol:
description:
- Protocol query parameter.
@@ -64,15 +64,15 @@ options:
lastUpdateTime:
description:
- LastUpdateTime query parameter. Last update time.
- type: str
+ type: float
limit:
description:
- Limit query parameter. Number of resources returned.
- type: int
+ type: float
offset:
description:
- Offset query parameter. Start index of resources returned (1-based).
- type: int
+ type: float
order:
description:
- Order query parameter. Order by this field.
@@ -86,19 +86,19 @@ options:
- FlowAnalysisId path parameter. Flow analysis request id.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
+- name: Cisco DNA Center documentation for Path Trace RetrievesAllPreviousPathtracesSummary
+ description: Complete reference of the RetrievesAllPreviousPathtracesSummary API.
+ link: https://developer.cisco.com/docs/dna-center/#!retrieves-all-previous-pathtraces-summary
- name: Cisco DNA Center documentation for Path Trace RetrievesPreviousPathtrace
description: Complete reference of the RetrievesPreviousPathtrace API.
link: https://developer.cisco.com/docs/dna-center/#!retrieves-previous-pathtrace
-- name: Cisco DNA Center documentation for Path Trace RetrivesAllPreviousPathtracesSummary
- description: Complete reference of the RetrivesAllPreviousPathtracesSummary API.
- link: https://developer.cisco.com/docs/dna-center/#!retrives-all-previous-pathtraces-summary
notes:
- SDK Method used are
+ path_trace.PathTrace.retrieves_all_previous_pathtraces_summary,
path_trace.PathTrace.retrieves_previous_pathtrace,
- path_trace.PathTrace.retrives_all_previous_pathtraces_summary,
- Paths used are
get /dna/intent/api/v1/flow-analysis,
@@ -120,14 +120,14 @@ EXAMPLES = r"""
periodicRefresh: True
sourceIP: string
destIP: string
- sourcePort: string
- destPort: string
- gtCreateTime: string
- ltCreateTime: string
+ sourcePort: 0
+ destPort: 0
+ gtCreateTime: 0
+ ltCreateTime: 0
protocol: string
status: string
taskId: string
- lastUpdateTime: string
+ lastUpdateTime: 0
limit: 0
offset: 0
order: string
@@ -148,7 +148,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -1068,7 +1067,8 @@ dnac_response:
"protocol": "string",
"sourceIP": "string",
"sourcePort": "string",
- "status": "string"
+ "status": "string",
+ "previousFlowAnalysisId": "string"
}
},
"version": "string"
diff --git a/ansible_collections/cisco/dnac/plugins/modules/planned_access_points.py b/ansible_collections/cisco/dnac/plugins/modules/planned_access_points.py
new file mode 100644
index 000000000..bce622994
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/planned_access_points.py
@@ -0,0 +1,313 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: planned_access_points
+short_description: Resource module for Planned Access Points
+description:
+- Manage operations create, update and delete of the resource Planned Access Points.
+- >
+ Allows creation of a new planned access point on an existing floor map including its planned radio and antenna
+ details. Use the Get variant of this API to fetch any existing planned access points for the floor. The payload to
+ create a planned access point is in the same format, albeit a single object instead of a list, of that API.
+- >
+ Allow to delete a planned access point from an existing floor map including its planned radio and antenna details.
+ Use the Get variant of this API to fetch the existing planned access points for the floor. The instanceUUID listed
+ in each of the planned access point attributes acts as the path param input to this API to delete that specific
+ instance.
+- >
+ Allows updating a planned access point on an existing floor map including its planned radio and antenna details.
+ Use the Get variant of this API to fetch the existing planned access points for the floor. The payload to update a
+ planned access point is in the same format, albeit a single object instead of a list, of that API.
+version_added: '6.0.0'
+extends_documentation_fragment:
+ - cisco.dnac.module
+author: Rafael Campos (@racampos)
+options:
+ attributes:
+ description: Planned Access Points's attributes.
+ suboptions:
+ createDate:
+ description: Planned Access Points's createDate.
+ type: int
+ domain:
+ description: Planned Access Points's domain.
+ type: str
+ heirarchyName:
+ description: Planned Access Points's heirarchyName.
+ type: str
+ id:
+ description: Planned Access Points's id.
+ type: float
+ instanceUuid:
+ description: Planned Access Points's instanceUuid.
+ type: str
+ macaddress:
+ description: Planned Access Points's macaddress.
+ type: str
+ name:
+ description: Planned Access Points's name.
+ type: str
+ source:
+ description: Planned Access Points's source.
+ type: str
+ typeString:
+ description: Planned Access Points's typeString.
+ type: str
+ type: dict
+ floorId:
+ description: FloorId path parameter. The instance UUID of the floor hierarchy element.
+ type: str
+ isSensor:
+ description: IsSensor flag.
+ type: bool
+ location:
+ description: Planned Access Points's location.
+ suboptions:
+ altitude:
+ description: Planned Access Points's altitude.
+ type: float
+ lattitude:
+ description: Planned Access Points's lattitude.
+ type: float
+ longtitude:
+ description: Planned Access Points's longtitude.
+ type: float
+ type: dict
+ plannedAccessPointUuid:
+ description: PlannedAccessPointUuid path parameter. The instance UUID of the planned
+ access point to delete.
+ type: str
+ position:
+ description: Planned Access Points's position.
+ suboptions:
+ x:
+ description: Planned Access Points's x.
+ type: float
+ y:
+ description: Planned Access Points's y.
+ type: float
+ z:
+ description: Planned Access Points's z.
+ type: float
+ type: dict
+ radioCount:
+ description: Planned Access Points's radioCount.
+ type: int
+ radios:
+ description: Planned Access Points's radios.
+ elements: dict
+ suboptions:
+ antenna:
+ description: Planned Access Points's antenna.
+ suboptions:
+ azimuthAngle:
+ description: Planned Access Points's azimuthAngle.
+ type: float
+ elevationAngle:
+ description: Planned Access Points's elevationAngle.
+ type: float
+ gain:
+ description: Planned Access Points's gain.
+ type: float
+ mode:
+ description: Planned Access Points's mode.
+ type: str
+ name:
+ description: Planned Access Points's name.
+ type: str
+ type:
+ description: Planned Access Points's type.
+ type: str
+ type: dict
+ attributes:
+ description: Planned Access Points's attributes.
+ suboptions:
+ channel:
+ description: Planned Access Points's channel.
+ type: float
+ channelString:
+ description: Planned Access Points's channelString.
+ type: str
+ id:
+ description: Planned Access Points's id.
+ type: float
+ ifMode:
+ description: Planned Access Points's ifMode.
+ type: str
+ ifTypeString:
+ description: Planned Access Points's ifTypeString.
+ type: str
+ ifTypeSubband:
+ description: Planned Access Points's ifTypeSubband.
+ type: str
+ instanceUuid:
+ description: Planned Access Points's instanceUuid.
+ type: str
+ slotId:
+ description: Planned Access Points's slotId.
+ type: float
+ type: dict
+ isSensor:
+ description: IsSensor flag.
+ type: bool
+ type: list
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for Devices CreatePlannedAccessPointForFloor
+ description: Complete reference of the CreatePlannedAccessPointForFloor API.
+ link: https://developer.cisco.com/docs/dna-center/#!create-planned-access-point-for-floor
+- name: Cisco DNA Center documentation for Devices DeletePlannedAccessPointForFloor
+ description: Complete reference of the DeletePlannedAccessPointForFloor API.
+ link: https://developer.cisco.com/docs/dna-center/#!delete-planned-access-point-for-floor
+- name: Cisco DNA Center documentation for Devices UpdatePlannedAccessPointForFloor
+ description: Complete reference of the UpdatePlannedAccessPointForFloor API.
+ link: https://developer.cisco.com/docs/dna-center/#!update-planned-access-point-for-floor
+notes:
+ - SDK Method used are
+ devices.Devices.create_planned_access_point_for_floor,
+ devices.Devices.delete_planned_access_point_for_floor,
+ devices.Devices.update_planned_access_point_for_floor,
+
+ - Paths used are
+ post /dna/intent/api/v1/floors/{floorId}/planned-access-points,
+ delete /dna/intent/api/v1/floors/{floorId}/planned-access-points/{plannedAccessPointUuid},
+ put /dna/intent/api/v1/floors/{floorId}/planned-access-points,
+
+"""
+
+EXAMPLES = r"""
+- name: Update all
+ cisco.dnac.planned_access_points:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ state: present
+ attributes:
+ createDate: 0
+ domain: string
+ heirarchyName: string
+ id: 0
+ instanceUuid: string
+ macaddress: string
+ name: string
+ source: string
+ typeString: string
+ floorId: string
+ isSensor: true
+ location:
+ altitude: 0
+ lattitude: 0
+ longtitude: 0
+ position:
+ x: 0
+ y: 0
+ z: 0
+ radioCount: 0
+ radios:
+ - antenna:
+ azimuthAngle: 0
+ elevationAngle: 0
+ gain: 0
+ mode: string
+ name: string
+ type: string
+ attributes:
+ channel: 0
+ channelString: string
+ id: 0
+ ifMode: string
+ ifTypeString: string
+ ifTypeSubband: string
+ instanceUuid: string
+ slotId: 0
+ isSensor: true
+
+- name: Create
+ cisco.dnac.planned_access_points:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ state: present
+ attributes:
+ createDate: 0
+ domain: string
+ heirarchyName: string
+ id: 0
+ instanceUuid: string
+ macaddress: string
+ name: string
+ source: string
+ typeString: string
+ floorId: string
+ isSensor: true
+ location:
+ altitude: 0
+ lattitude: 0
+ longtitude: 0
+ position:
+ x: 0
+ y: 0
+ z: 0
+ radioCount: 0
+ radios:
+ - antenna:
+ azimuthAngle: 0
+ elevationAngle: 0
+ gain: 0
+ mode: string
+ name: string
+ type: string
+ attributes:
+ channel: 0
+ channelString: string
+ id: 0
+ ifMode: string
+ ifTypeString: string
+ ifTypeSubband: string
+ instanceUuid: string
+ slotId: 0
+ isSensor: true
+
+- name: Delete by id
+ cisco.dnac.planned_access_points:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ state: absent
+ floorId: string
+ plannedAccessPointUuid: string
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": {
+ "taskId": "string",
+ "url": "string"
+ },
+ "version": "string"
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/planned_access_points_info.py b/ansible_collections/cisco/dnac/plugins/modules/planned_access_points_info.py
index bed5d8656..5105c817a 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/planned_access_points_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/planned_access_points_info.py
@@ -26,18 +26,18 @@ options:
limit:
description:
- Limit query parameter.
- type: int
+ type: float
offset:
description:
- Offset query parameter.
- type: int
+ type: float
radios:
description:
- Radios query parameter. Inlcude planned radio details.
type: bool
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Devices GetPlannedAccessPointsForFloor
description: Complete reference of the GetPlannedAccessPointsForFloor API.
@@ -69,7 +69,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/platform_nodes_configuration_summary_info.py b/ansible_collections/cisco/dnac/plugins/modules/platform_nodes_configuration_summary_info.py
index d06ecb029..b64fd731b 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/platform_nodes_configuration_summary_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/platform_nodes_configuration_summary_info.py
@@ -23,8 +23,8 @@ options:
description: Additional headers.
type: dict
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Platform Configuration CiscoDNACenterNodesConfigurationSummary
description: Complete reference of the CiscoDNACenterNodesConfigurationSummary API.
@@ -52,7 +52,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/platform_release_summary_info.py b/ansible_collections/cisco/dnac/plugins/modules/platform_release_summary_info.py
index b37caefc0..15e079801 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/platform_release_summary_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/platform_release_summary_info.py
@@ -22,8 +22,8 @@ options:
description: Additional headers.
type: dict
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Platform Configuration CiscoDNACenterReleaseSummary
description: Complete reference of the CiscoDNACenterReleaseSummary API.
@@ -51,7 +51,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/pnp_device.py b/ansible_collections/cisco/dnac/plugins/modules/pnp_device.py
index a3451edeb..19650316e 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/pnp_device.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/pnp_device.py
@@ -18,813 +18,119 @@ extends_documentation_fragment:
- cisco.dnac.module
author: Rafael Campos (@racampos)
options:
- _id:
- description: Pnp Device's _id.
- type: str
deviceInfo:
description: Pnp Device's deviceInfo.
suboptions:
- aaaCredentials:
- description: Pnp Device's aaaCredentials.
- suboptions:
- password:
- description: Pnp Device's password.
- type: str
- username:
- description: Pnp Device's username.
- type: str
- type: dict
- addedOn:
- description: Pnp Device's addedOn.
- type: int
- addnMacAddrs:
- description: Pnp Device's addnMacAddrs.
- elements: str
- type: list
- agentType:
- description: Pnp Device's agentType.
- type: str
- authStatus:
- description: Pnp Device's authStatus.
- type: str
- authenticatedSudiSerialNo:
- description: Pnp Device's authenticatedSudiSerialNo.
- type: str
- capabilitiesSupported:
- description: Pnp Device's capabilitiesSupported.
- elements: str
- type: list
- cmState:
- description: Pnp Device's cmState.
- type: str
description:
- description: Pnp Device's description.
+ description: Description.
type: str
deviceSudiSerialNos:
- description: Pnp Device's deviceSudiSerialNos.
- elements: str
- type: list
- deviceType:
- description: Pnp Device's deviceType.
- type: str
- featuresSupported:
- description: Pnp Device's featuresSupported.
+ description: Device Sudi Serial Nos.
elements: str
type: list
- fileSystemList:
- description: Pnp Device's fileSystemList.
- elements: dict
- suboptions:
- freespace:
- description: Pnp Device's freespace.
- type: int
- name:
- description: Pnp Device's name.
- type: str
- readable:
- description: Readable flag.
- type: bool
- size:
- description: Pnp Device's size.
- type: int
- type:
- description: Pnp Device's type.
- type: str
- writeable:
- description: Writeable flag.
- type: bool
- type: list
- firstContact:
- description: Pnp Device's firstContact.
- type: int
hostname:
- description: Pnp Device's hostname.
+ description: Hostname.
type: str
- httpHeaders:
- description: Pnp Device's httpHeaders.
- elements: dict
- suboptions:
- key:
- description: Pnp Device's key.
- type: str
- value:
- description: Pnp Device's value.
- type: str
- type: list
- imageFile:
- description: Pnp Device's imageFile.
- type: str
- imageVersion:
- description: Pnp Device's imageVersion.
- type: str
- ipInterfaces:
- description: Pnp Device's ipInterfaces.
- elements: dict
- suboptions:
- ipv4Address:
- description: Pnp Device's ipv4Address.
- type: dict
- ipv6AddressList:
- description: Pnp Device's ipv6AddressList.
- elements: dict
- type: list
- macAddress:
- description: Pnp Device's macAddress.
- type: str
- name:
- description: Pnp Device's name.
- type: str
- status:
- description: Pnp Device's status.
- type: str
- type: list
- lastContact:
- description: Pnp Device's lastContact.
- type: int
- lastSyncTime:
- description: Pnp Device's lastSyncTime.
- type: int
- lastUpdateOn:
- description: Pnp Device's lastUpdateOn.
- type: int
- location:
- description: Pnp Device's location.
- suboptions:
- address:
- description: Pnp Device's address.
- type: str
- altitude:
- description: Pnp Device's altitude.
- type: str
- latitude:
- description: Pnp Device's latitude.
- type: str
- longitude:
- description: Pnp Device's longitude.
- type: str
- siteId:
- description: Pnp Device's siteId.
- type: str
- type: dict
+ isSudiRequired:
+ description: Is Sudi Required.
+ type: bool
macAddress:
- description: Pnp Device's macAddress.
- type: str
- mode:
- description: Pnp Device's mode.
- type: str
- name:
- description: Pnp Device's name.
- type: str
- neighborLinks:
- description: Pnp Device's neighborLinks.
- elements: dict
- suboptions:
- localInterfaceName:
- description: Pnp Device's localInterfaceName.
- type: str
- localMacAddress:
- description: Pnp Device's localMacAddress.
- type: str
- localShortInterfaceName:
- description: Pnp Device's localShortInterfaceName.
- type: str
- remoteDeviceName:
- description: Pnp Device's remoteDeviceName.
- type: str
- remoteInterfaceName:
- description: Pnp Device's remoteInterfaceName.
- type: str
- remoteMacAddress:
- description: Pnp Device's remoteMacAddress.
- type: str
- remotePlatform:
- description: Pnp Device's remotePlatform.
- type: str
- remoteShortInterfaceName:
- description: Pnp Device's remoteShortInterfaceName.
- type: str
- remoteVersion:
- description: Pnp Device's remoteVersion.
- type: str
- type: list
- onbState:
- description: Pnp Device's onbState.
+ description: Mac Address.
type: str
pid:
- description: Pnp Device's pid.
- type: str
- pnpProfileList:
- description: Pnp Device's pnpProfileList.
- elements: dict
- suboptions:
- createdBy:
- description: Pnp Device's createdBy.
- type: str
- discoveryCreated:
- description: DiscoveryCreated flag.
- type: bool
- primaryEndpoint:
- description: Pnp Device's primaryEndpoint.
- suboptions:
- certificate:
- description: Pnp Device's certificate.
- type: str
- fqdn:
- description: Pnp Device's fqdn.
- type: str
- ipv4Address:
- description: Pnp Device's ipv4Address.
- type: dict
- ipv6Address:
- description: Pnp Device's ipv6Address.
- type: dict
- port:
- description: Pnp Device's port.
- type: int
- protocol:
- description: Pnp Device's protocol.
- type: str
- type: dict
- profileName:
- description: Pnp Device's profileName.
- type: str
- secondaryEndpoint:
- description: Pnp Device's secondaryEndpoint.
- suboptions:
- certificate:
- description: Pnp Device's certificate.
- type: str
- fqdn:
- description: Pnp Device's fqdn.
- type: str
- ipv4Address:
- description: Pnp Device's ipv4Address.
- type: dict
- ipv6Address:
- description: Pnp Device's ipv6Address.
- type: dict
- port:
- description: Pnp Device's port.
- type: int
- protocol:
- description: Pnp Device's protocol.
- type: str
- type: dict
- type: list
- populateInventory:
- description: PopulateInventory flag.
- type: bool
- preWorkflowCliOuputs:
- description: Pnp Device's preWorkflowCliOuputs.
- elements: dict
- suboptions:
- cli:
- description: Pnp Device's cli.
- type: str
- cliOutput:
- description: Pnp Device's cliOutput.
- type: str
- type: list
- projectId:
- description: Pnp Device's projectId.
+ description: Pid.
type: str
- projectName:
- description: Pnp Device's projectName.
- type: str
- reloadRequested:
- description: ReloadRequested flag.
- type: bool
serialNumber:
- description: Pnp Device's serialNumber.
+ description: Serial Number.
type: str
- smartAccountId:
- description: Pnp Device's smartAccountId.
- type: str
- source:
- description: Pnp Device's source.
+ siteId:
+ description: Site Id.
type: str
stack:
- description: Stack flag.
+ description: Stack.
type: bool
stackInfo:
description: Pnp Device's stackInfo.
suboptions:
isFullRing:
- description: IsFullRing flag.
+ description: Is Full Ring.
type: bool
stackMemberList:
description: Pnp Device's stackMemberList.
elements: dict
suboptions:
hardwareVersion:
- description: Pnp Device's hardwareVersion.
+ description: Hardware Version.
type: str
licenseLevel:
- description: Pnp Device's licenseLevel.
+ description: License Level.
type: str
licenseType:
- description: Pnp Device's licenseType.
+ description: License Type.
type: str
macAddress:
- description: Pnp Device's macAddress.
+ description: Mac Address.
type: str
pid:
- description: Pnp Device's pid.
+ description: Pid.
type: str
priority:
- description: Pnp Device's priority.
- type: int
+ description: Priority.
+ type: float
role:
- description: Pnp Device's role.
+ description: Role.
type: str
serialNumber:
- description: Pnp Device's serialNumber.
+ description: Serial Number.
type: str
softwareVersion:
- description: Pnp Device's softwareVersion.
+ description: Software Version.
type: str
stackNumber:
- description: Pnp Device's stackNumber.
- type: int
+ description: Stack Number.
+ type: float
state:
- description: Pnp Device's state.
+ description: State.
type: str
sudiSerialNumber:
- description: Pnp Device's sudiSerialNumber.
+ description: Sudi Serial Number.
type: str
type: list
stackRingProtocol:
- description: Pnp Device's stackRingProtocol.
+ description: Stack Ring Protocol.
type: str
supportsStackWorkflows:
- description: SupportsStackWorkflows flag.
+ description: Supports Stack Workflows.
type: bool
totalMemberCount:
- description: Pnp Device's totalMemberCount.
- type: int
+ description: Total Member Count.
+ type: float
validLicenseLevels:
- description: Pnp Device's validLicenseLevels.
+ description: Valid License Levels.
elements: str
type: list
type: dict
- state:
- description: Pnp Device's state.
- type: str
- sudiRequired:
- description: SudiRequired flag.
- type: bool
- tags:
- description: Pnp Device's tags.
- type: dict
+ userMicNumbers:
+ description: User Mic Numbers.
+ elements: str
+ type: list
userSudiSerialNos:
- description: Pnp Device's userSudiSerialNos.
+ description: User Sudi Serial Nos.
elements: str
type: list
- virtualAccountId:
- description: Pnp Device's virtualAccountId.
- type: str
workflowId:
- description: Pnp Device's workflowId.
+ description: Workflow Id.
type: str
workflowName:
- description: Pnp Device's workflowName.
+ description: Workflow Name.
type: str
type: dict
id:
- description: Id path parameter.
+ description: Id.
type: str
- runSummaryList:
- description: Pnp Device's runSummaryList.
- elements: dict
- suboptions:
- details:
- description: Pnp Device's details.
- type: str
- errorFlag:
- description: ErrorFlag flag.
- type: bool
- historyTaskInfo:
- description: Pnp Device's historyTaskInfo.
- suboptions:
- addnDetails:
- description: Pnp Device's addnDetails.
- elements: dict
- suboptions:
- key:
- description: Pnp Device's key.
- type: str
- value:
- description: Pnp Device's value.
- type: str
- type: list
- name:
- description: Pnp Device's name.
- type: str
- timeTaken:
- description: Pnp Device's timeTaken.
- type: int
- type:
- description: Pnp Device's type.
- type: str
- workItemList:
- description: Pnp Device's workItemList.
- elements: dict
- suboptions:
- command:
- description: Pnp Device's command.
- type: str
- endTime:
- description: Pnp Device's endTime.
- type: int
- outputStr:
- description: Pnp Device's outputStr.
- type: str
- startTime:
- description: Pnp Device's startTime.
- type: int
- state:
- description: Pnp Device's state.
- type: str
- timeTaken:
- description: Pnp Device's timeTaken.
- type: int
- type: list
- type: dict
- timestamp:
- description: Pnp Device's timestamp.
- type: int
- type: list
- systemResetWorkflow:
- description: Pnp Device's systemResetWorkflow.
- suboptions:
- _id:
- description: Pnp Device's _id.
- type: str
- addToInventory:
- description: AddToInventory flag.
- type: bool
- addedOn:
- description: Pnp Device's addedOn.
- type: int
- configId:
- description: Pnp Device's configId.
- type: str
- currTaskIdx:
- description: Pnp Device's currTaskIdx.
- type: int
- description:
- description: Pnp Device's description.
- type: str
- endTime:
- description: Pnp Device's endTime.
- type: int
- execTime:
- description: Pnp Device's execTime.
- type: int
- imageId:
- description: Pnp Device's imageId.
- type: str
- instanceType:
- description: Pnp Device's instanceType.
- type: str
- lastupdateOn:
- description: Pnp Device's lastupdateOn.
- type: int
- name:
- description: Pnp Device's name.
- type: str
- startTime:
- description: Pnp Device's startTime.
- type: int
- state:
- description: Pnp Device's state.
- type: str
- tasks:
- description: Pnp Device's tasks.
- elements: dict
- suboptions:
- currWorkItemIdx:
- description: Pnp Device's currWorkItemIdx.
- type: int
- endTime:
- description: Pnp Device's endTime.
- type: int
- name:
- description: Pnp Device's name.
- type: str
- startTime:
- description: Pnp Device's startTime.
- type: int
- state:
- description: Pnp Device's state.
- type: str
- taskSeqNo:
- description: Pnp Device's taskSeqNo.
- type: int
- timeTaken:
- description: Pnp Device's timeTaken.
- type: int
- type:
- description: Pnp Device's type.
- type: str
- workItemList:
- description: Pnp Device's workItemList.
- elements: dict
- suboptions:
- command:
- description: Pnp Device's command.
- type: str
- endTime:
- description: Pnp Device's endTime.
- type: int
- outputStr:
- description: Pnp Device's outputStr.
- type: str
- startTime:
- description: Pnp Device's startTime.
- type: int
- state:
- description: Pnp Device's state.
- type: str
- timeTaken:
- description: Pnp Device's timeTaken.
- type: int
- type: list
- type: list
- tenantId:
- description: Pnp Device's tenantId.
- type: str
- type:
- description: Pnp Device's type.
- type: str
- useState:
- description: Pnp Device's useState.
- type: str
- version:
- description: Pnp Device's version.
- type: int
- type: dict
- systemWorkflow:
- description: Pnp Device's systemWorkflow.
- suboptions:
- _id:
- description: Pnp Device's _id.
- type: str
- addToInventory:
- description: AddToInventory flag.
- type: bool
- addedOn:
- description: Pnp Device's addedOn.
- type: int
- configId:
- description: Pnp Device's configId.
- type: str
- currTaskIdx:
- description: Pnp Device's currTaskIdx.
- type: int
- description:
- description: Pnp Device's description.
- type: str
- endTime:
- description: Pnp Device's endTime.
- type: int
- execTime:
- description: Pnp Device's execTime.
- type: int
- imageId:
- description: Pnp Device's imageId.
- type: str
- instanceType:
- description: Pnp Device's instanceType.
- type: str
- lastupdateOn:
- description: Pnp Device's lastupdateOn.
- type: int
- name:
- description: Pnp Device's name.
- type: str
- startTime:
- description: Pnp Device's startTime.
- type: int
- state:
- description: Pnp Device's state.
- type: str
- tasks:
- description: Pnp Device's tasks.
- elements: dict
- suboptions:
- currWorkItemIdx:
- description: Pnp Device's currWorkItemIdx.
- type: int
- endTime:
- description: Pnp Device's endTime.
- type: int
- name:
- description: Pnp Device's name.
- type: str
- startTime:
- description: Pnp Device's startTime.
- type: int
- state:
- description: Pnp Device's state.
- type: str
- taskSeqNo:
- description: Pnp Device's taskSeqNo.
- type: int
- timeTaken:
- description: Pnp Device's timeTaken.
- type: int
- type:
- description: Pnp Device's type.
- type: str
- workItemList:
- description: Pnp Device's workItemList.
- elements: dict
- suboptions:
- command:
- description: Pnp Device's command.
- type: str
- endTime:
- description: Pnp Device's endTime.
- type: int
- outputStr:
- description: Pnp Device's outputStr.
- type: str
- startTime:
- description: Pnp Device's startTime.
- type: int
- state:
- description: Pnp Device's state.
- type: str
- timeTaken:
- description: Pnp Device's timeTaken.
- type: int
- type: list
- type: list
- tenantId:
- description: Pnp Device's tenantId.
- type: str
- type:
- description: Pnp Device's type.
- type: str
- useState:
- description: Pnp Device's useState.
- type: str
- version:
- description: Pnp Device's version.
- type: int
- type: dict
- tenantId:
- description: Pnp Device's tenantId.
- type: str
- version:
- description: Pnp Device's version.
- type: int
- workflow:
- description: Pnp Device's workflow.
- suboptions:
- _id:
- description: Pnp Device's _id.
- type: str
- addToInventory:
- description: AddToInventory flag.
- type: bool
- addedOn:
- description: Pnp Device's addedOn.
- type: int
- configId:
- description: Pnp Device's configId.
- type: str
- currTaskIdx:
- description: Pnp Device's currTaskIdx.
- type: int
- description:
- description: Pnp Device's description.
- type: str
- endTime:
- description: Pnp Device's endTime.
- type: int
- execTime:
- description: Pnp Device's execTime.
- type: int
- imageId:
- description: Pnp Device's imageId.
- type: str
- instanceType:
- description: Pnp Device's instanceType.
- type: str
- lastupdateOn:
- description: Pnp Device's lastupdateOn.
- type: int
- name:
- description: Pnp Device's name.
- type: str
- startTime:
- description: Pnp Device's startTime.
- type: int
- state:
- description: Pnp Device's state.
- type: str
- tasks:
- description: Pnp Device's tasks.
- elements: dict
- suboptions:
- currWorkItemIdx:
- description: Pnp Device's currWorkItemIdx.
- type: int
- endTime:
- description: Pnp Device's endTime.
- type: int
- name:
- description: Pnp Device's name.
- type: str
- startTime:
- description: Pnp Device's startTime.
- type: int
- state:
- description: Pnp Device's state.
- type: str
- taskSeqNo:
- description: Pnp Device's taskSeqNo.
- type: int
- timeTaken:
- description: Pnp Device's timeTaken.
- type: int
- type:
- description: Pnp Device's type.
- type: str
- workItemList:
- description: Pnp Device's workItemList.
- elements: dict
- suboptions:
- command:
- description: Pnp Device's command.
- type: str
- endTime:
- description: Pnp Device's endTime.
- type: int
- outputStr:
- description: Pnp Device's outputStr.
- type: str
- startTime:
- description: Pnp Device's startTime.
- type: int
- state:
- description: Pnp Device's state.
- type: str
- timeTaken:
- description: Pnp Device's timeTaken.
- type: int
- type: list
- type: list
- tenantId:
- description: Pnp Device's tenantId.
- type: str
- type:
- description: Pnp Device's type.
- type: str
- useState:
- description: Pnp Device's useState.
- type: str
- version:
- description: Pnp Device's version.
- type: int
- type: dict
- workflowParameters:
- description: Pnp Device's workflowParameters.
- suboptions:
- configList:
- description: Pnp Device's configList.
- elements: dict
- suboptions:
- configId:
- description: Pnp Device's configId.
- type: str
- configParameters:
- description: Pnp Device's configParameters.
- elements: dict
- suboptions:
- key:
- description: Pnp Device's key.
- type: str
- value:
- description: Pnp Device's value.
- type: str
- type: list
- type: list
- licenseLevel:
- description: Pnp Device's licenseLevel.
- type: str
- licenseType:
- description: Pnp Device's licenseType.
- type: str
- topOfStackSerialNumber:
- description: Pnp Device's topOfStackSerialNumber.
- type: str
- type: dict
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Device Onboarding (PnP) AddDevice
description: Complete reference of the AddDevice API.
@@ -859,99 +165,16 @@ EXAMPLES = r"""
dnac_version: "{{dnac_version}}"
dnac_debug: "{{dnac_debug}}"
state: present
- _id: string
deviceInfo:
- aaaCredentials:
- password: string
- username: string
- addedOn: 0
- addnMacAddrs:
- - string
- agentType: string
- authStatus: string
- authenticatedSudiSerialNo: string
- capabilitiesSupported:
- - string
- cmState: string
description: string
deviceSudiSerialNos:
- string
- deviceType: string
- featuresSupported:
- - string
- fileSystemList:
- - freespace: 0
- name: string
- readable: true
- size: 0
- type: string
- writeable: true
- firstContact: 0
hostname: string
- httpHeaders:
- - key: string
- value: string
- imageFile: string
- imageVersion: string
- ipInterfaces:
- - ipv4Address: {}
- ipv6AddressList:
- - {}
- macAddress: string
- name: string
- status: string
- lastContact: 0
- lastSyncTime: 0
- lastUpdateOn: 0
- location:
- address: string
- altitude: string
- latitude: string
- longitude: string
- siteId: string
+ isSudiRequired: true
macAddress: string
- mode: string
- name: string
- neighborLinks:
- - localInterfaceName: string
- localMacAddress: string
- localShortInterfaceName: string
- remoteDeviceName: string
- remoteInterfaceName: string
- remoteMacAddress: string
- remotePlatform: string
- remoteShortInterfaceName: string
- remoteVersion: string
- onbState: string
pid: string
- pnpProfileList:
- - createdBy: string
- discoveryCreated: true
- primaryEndpoint:
- certificate: string
- fqdn: string
- ipv4Address: {}
- ipv6Address: {}
- port: 0
- protocol: string
- profileName: string
- secondaryEndpoint:
- certificate: string
- fqdn: string
- ipv4Address: {}
- ipv6Address: {}
- port: 0
- protocol: string
- populateInventory: true
- preWorkflowCliOuputs:
- - cli: string
- cliOutput: string
- projectId: string
- projectName: string
- reloadRequested: true
serialNumber: string
- smartAccountId: string
- source: string
+ siteId: string
stack: true
stackInfo:
isFullRing: true
@@ -973,148 +196,12 @@ EXAMPLES = r"""
totalMemberCount: 0
validLicenseLevels:
- string
- state: string
- sudiRequired: true
- tags: {}
+ userMicNumbers:
+ - string
userSudiSerialNos:
- string
- virtualAccountId: string
workflowId: string
workflowName: string
- runSummaryList:
- - details: string
- errorFlag: true
- historyTaskInfo:
- addnDetails:
- - key: string
- value: string
- name: string
- timeTaken: 0
- type: string
- workItemList:
- - command: string
- endTime: 0
- outputStr: string
- startTime: 0
- state: string
- timeTaken: 0
- timestamp: 0
- systemResetWorkflow:
- _id: string
- addToInventory: true
- addedOn: 0
- configId: string
- currTaskIdx: 0
- description: string
- endTime: 0
- execTime: 0
- imageId: string
- instanceType: string
- lastupdateOn: 0
- name: string
- startTime: 0
- state: string
- tasks:
- - currWorkItemIdx: 0
- endTime: 0
- name: string
- startTime: 0
- state: string
- taskSeqNo: 0
- timeTaken: 0
- type: string
- workItemList:
- - command: string
- endTime: 0
- outputStr: string
- startTime: 0
- state: string
- timeTaken: 0
- tenantId: string
- type: string
- useState: string
- version: 0
- systemWorkflow:
- _id: string
- addToInventory: true
- addedOn: 0
- configId: string
- currTaskIdx: 0
- description: string
- endTime: 0
- execTime: 0
- imageId: string
- instanceType: string
- lastupdateOn: 0
- name: string
- startTime: 0
- state: string
- tasks:
- - currWorkItemIdx: 0
- endTime: 0
- name: string
- startTime: 0
- state: string
- taskSeqNo: 0
- timeTaken: 0
- type: string
- workItemList:
- - command: string
- endTime: 0
- outputStr: string
- startTime: 0
- state: string
- timeTaken: 0
- tenantId: string
- type: string
- useState: string
- version: 0
- tenantId: string
- version: 0
- workflow:
- _id: string
- addToInventory: true
- addedOn: 0
- configId: string
- currTaskIdx: 0
- description: string
- endTime: 0
- execTime: 0
- imageId: string
- instanceType: string
- lastupdateOn: 0
- name: string
- startTime: 0
- state: string
- tasks:
- - currWorkItemIdx: 0
- endTime: 0
- name: string
- startTime: 0
- state: string
- taskSeqNo: 0
- timeTaken: 0
- type: string
- workItemList:
- - command: string
- endTime: 0
- outputStr: string
- startTime: 0
- state: string
- timeTaken: 0
- tenantId: string
- type: string
- useState: string
- version: 0
- workflowParameters:
- configList:
- - configId: string
- configParameters:
- - key: string
- value: string
- licenseLevel: string
- licenseType: string
- topOfStackSerialNumber: string
- name: Update by id
cisco.dnac.pnp_device:
@@ -1126,263 +213,15 @@ EXAMPLES = r"""
dnac_version: "{{dnac_version}}"
dnac_debug: "{{dnac_debug}}"
state: present
- _id: string
deviceInfo:
- aaaCredentials:
- password: string
- username: string
- addedOn: 0
- addnMacAddrs:
- - string
- agentType: string
- authStatus: string
- authenticatedSudiSerialNo: string
- capabilitiesSupported:
- - string
- cmState: string
- description: string
- deviceSudiSerialNos:
- - string
- deviceType: string
- featuresSupported:
- - string
- fileSystemList:
- - freespace: 0
- name: string
- readable: true
- size: 0
- type: string
- writeable: true
- firstContact: 0
hostname: string
- httpHeaders:
- - key: string
- value: string
- imageFile: string
- imageVersion: string
- ipInterfaces:
- - ipv4Address: {}
- ipv6AddressList:
- - {}
- macAddress: string
- name: string
- status: string
- lastContact: 0
- lastSyncTime: 0
- lastUpdateOn: 0
- location:
- address: string
- altitude: string
- latitude: string
- longitude: string
- siteId: string
- macAddress: string
- mode: string
- name: string
- neighborLinks:
- - localInterfaceName: string
- localMacAddress: string
- localShortInterfaceName: string
- remoteDeviceName: string
- remoteInterfaceName: string
- remoteMacAddress: string
- remotePlatform: string
- remoteShortInterfaceName: string
- remoteVersion: string
- onbState: string
pid: string
- pnpProfileList:
- - createdBy: string
- discoveryCreated: true
- primaryEndpoint:
- certificate: string
- fqdn: string
- ipv4Address: {}
- ipv6Address: {}
- port: 0
- protocol: string
- profileName: string
- secondaryEndpoint:
- certificate: string
- fqdn: string
- ipv4Address: {}
- ipv6Address: {}
- port: 0
- protocol: string
- populateInventory: true
- preWorkflowCliOuputs:
- - cli: string
- cliOutput: string
- projectId: string
- projectName: string
- reloadRequested: true
serialNumber: string
- smartAccountId: string
- source: string
stack: true
- stackInfo:
- isFullRing: true
- stackMemberList:
- - hardwareVersion: string
- licenseLevel: string
- licenseType: string
- macAddress: string
- pid: string
- priority: 0
- role: string
- serialNumber: string
- softwareVersion: string
- stackNumber: 0
- state: string
- sudiSerialNumber: string
- stackRingProtocol: string
- supportsStackWorkflows: true
- totalMemberCount: 0
- validLicenseLevels:
- - string
- state: string
sudiRequired: true
- tags: {}
- userSudiSerialNos:
- - string
- virtualAccountId: string
- workflowId: string
- workflowName: string
+ sudiSerialNos:
+ - {}
id: string
- runSummaryList:
- - details: string
- errorFlag: true
- historyTaskInfo:
- addnDetails:
- - key: string
- value: string
- name: string
- timeTaken: 0
- type: string
- workItemList:
- - command: string
- endTime: 0
- outputStr: string
- startTime: 0
- state: string
- timeTaken: 0
- timestamp: 0
- systemResetWorkflow:
- _id: string
- addToInventory: true
- addedOn: 0
- configId: string
- currTaskIdx: 0
- description: string
- endTime: 0
- execTime: 0
- imageId: string
- instanceType: string
- lastupdateOn: 0
- name: string
- startTime: 0
- state: string
- tasks:
- - currWorkItemIdx: 0
- endTime: 0
- name: string
- startTime: 0
- state: string
- taskSeqNo: 0
- timeTaken: 0
- type: string
- workItemList:
- - command: string
- endTime: 0
- outputStr: string
- startTime: 0
- state: string
- timeTaken: 0
- tenantId: string
- type: string
- useState: string
- version: 0
- systemWorkflow:
- _id: string
- addToInventory: true
- addedOn: 0
- configId: string
- currTaskIdx: 0
- description: string
- endTime: 0
- execTime: 0
- imageId: string
- instanceType: string
- lastupdateOn: 0
- name: string
- startTime: 0
- state: string
- tasks:
- - currWorkItemIdx: 0
- endTime: 0
- name: string
- startTime: 0
- state: string
- taskSeqNo: 0
- timeTaken: 0
- type: string
- workItemList:
- - command: string
- endTime: 0
- outputStr: string
- startTime: 0
- state: string
- timeTaken: 0
- tenantId: string
- type: string
- useState: string
- version: 0
- tenantId: string
- version: 0
- workflow:
- _id: string
- addToInventory: true
- addedOn: 0
- configId: string
- currTaskIdx: 0
- description: string
- endTime: 0
- execTime: 0
- imageId: string
- instanceType: string
- lastupdateOn: 0
- name: string
- startTime: 0
- state: string
- tasks:
- - currWorkItemIdx: 0
- endTime: 0
- name: string
- startTime: 0
- state: string
- taskSeqNo: 0
- timeTaken: 0
- type: string
- workItemList:
- - command: string
- endTime: 0
- outputStr: string
- startTime: 0
- state: string
- timeTaken: 0
- tenantId: string
- type: string
- useState: string
- version: 0
- workflowParameters:
- configList:
- - configId: string
- configParameters:
- - key: string
- value: string
- licenseLevel: string
- licenseType: string
- topOfStackSerialNumber: string
- name: Delete by id
cisco.dnac.pnp_device:
@@ -1397,7 +236,6 @@ EXAMPLES = r"""
id: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/pnp_device_authorize.py b/ansible_collections/cisco/dnac/plugins/modules/pnp_device_authorize.py
index 2a033f504..c099ff52a 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/pnp_device_authorize.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/pnp_device_authorize.py
@@ -11,7 +11,7 @@ short_description: Resource module for Pnp Device Authorize
description:
- Manage operation create of the resource Pnp Device Authorize.
- Authorizes one of more devices. A device can only be authorized if Authorization is set in Device Settings.
-version_added: '6.5.0'
+version_added: '6.0.0'
extends_documentation_fragment:
- cisco.dnac.module
author: Rafael Campos (@racampos)
@@ -21,15 +21,15 @@ options:
elements: str
type: list
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
-- name: Cisco DNA Center documentation for AuthorizeDevice
+- name: Cisco DNA Center documentation for Device Onboarding (PnP) AuthorizeDevice
description: Complete reference of the AuthorizeDevice API.
link: https://developer.cisco.com/docs/dna-center/#!authorize-device
notes:
- SDK Method used are
- ..authorize_device,
+ device_onboarding_pnp.DeviceOnboardingPnp.authorize_device,
- Paths used are
post /api/v1/onboarding/pnp-device/authorize,
@@ -50,7 +50,6 @@ EXAMPLES = r"""
- string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/pnp_device_claim.py b/ansible_collections/cisco/dnac/plugins/modules/pnp_device_claim.py
index 4ce1b3389..795d77ab3 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/pnp_device_claim.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/pnp_device_claim.py
@@ -77,8 +77,8 @@ options:
description: Pnp Device Claim's workflowId.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Device Onboarding (PnP) ClaimDevice
description: Complete reference of the ClaimDevice API.
@@ -122,7 +122,6 @@ EXAMPLES = r"""
workflowId: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/pnp_device_claim_to_site.py b/ansible_collections/cisco/dnac/plugins/modules/pnp_device_claim_to_site.py
index 2e3bdd0a5..9085bb26a 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/pnp_device_claim_to_site.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/pnp_device_claim_to_site.py
@@ -10,7 +10,9 @@ module: pnp_device_claim_to_site
short_description: Resource module for Pnp Device Claim To Site
description:
- Manage operation create of the resource Pnp Device Claim To Site.
-- Claim a device based on DNA-C Site-based design process. Some required parameters differ based on device platform.
+- >
+ Claim a device based on Catalyst Center Site-based design process. Some required parameters differ based on device
+ platform.
version_added: '3.1.0'
extends_documentation_fragment:
- cisco.dnac.module
@@ -18,13 +20,13 @@ author: Rafael Campos (@racampos)
options:
configInfo:
description: Pnp Device Claim To Site's configInfo.
- elements: dict
suboptions:
configId:
description: Config Id.
type: str
configParameters:
description: Pnp Device Claim To Site's configParameters.
+ elements: dict
suboptions:
key:
description: Key.
@@ -32,8 +34,8 @@ options:
value:
description: Value.
type: str
- type: dict
- type: list
+ type: list
+ type: dict
version_added: 4.2.0
deviceId:
description: Device Id.
@@ -53,9 +55,10 @@ options:
type: bool
type: dict
version_added: 4.2.0
- interfaceName:
+ ipInterfaceName:
description: For Catalyst 9800 WLC.
type: str
+ version_added: 6.4.0
rfProfile:
description: For Access Points.
type: str
@@ -76,12 +79,13 @@ options:
type:
description: Type.
type: str
- vlanID:
+ vlanId:
description: For Catalyst 9800 WLC.
type: str
+ version_added: 6.4.0
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Device Onboarding (PnP) ClaimADeviceToASite
description: Complete reference of the ClaimADeviceToASite API.
@@ -106,26 +110,25 @@ EXAMPLES = r"""
dnac_version: "{{dnac_version}}"
dnac_debug: "{{dnac_debug}}"
configInfo:
- - configId: string
+ configId: string
configParameters:
- key: string
+ - key: string
value: string
deviceId: string
gateway: string
imageInfo:
imageId: string
skip: true
- interfaceName: string
+ ipInterfaceName: string
rfProfile: string
sensorProfile: string
siteId: string
staticIP: string
subnetMask: string
type: string
- vlanID: string
+ vlanId: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/pnp_device_config_preview.py b/ansible_collections/cisco/dnac/plugins/modules/pnp_device_config_preview.py
index 24d1ae219..826c24829 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/pnp_device_config_preview.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/pnp_device_config_preview.py
@@ -26,8 +26,8 @@ options:
description: Pnp Device Config Preview's type.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Device Onboarding (PnP) PreviewConfig
description: Complete reference of the PreviewConfig API.
@@ -56,7 +56,6 @@ EXAMPLES = r"""
type: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/pnp_device_count_info.py b/ansible_collections/cisco/dnac/plugins/modules/pnp_device_count_info.py
index 94dc35fa2..33ae966d2 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/pnp_device_count_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/pnp_device_count_info.py
@@ -34,11 +34,6 @@ options:
- OnbState query parameter. Device Onboarding State.
elements: str
type: list
- cmState:
- description:
- - CmState query parameter. Device Connection Manager State.
- elements: str
- type: list
name:
description:
- Name query parameter. Device Name.
@@ -54,21 +49,11 @@ options:
- Source query parameter. Device Source.
elements: str
type: list
- projectId:
- description:
- - ProjectId query parameter. Device Project Id.
- elements: str
- type: list
workflowId:
description:
- WorkflowId query parameter. Device Workflow Id.
elements: str
type: list
- projectName:
- description:
- - ProjectName query parameter. Device Project Name.
- elements: str
- type: list
workflowName:
description:
- WorkflowName query parameter. Device Workflow Name.
@@ -89,8 +74,8 @@ options:
- LastContact query parameter. Device Has Contacted lastContact > 0.
type: bool
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Device Onboarding (PnP) GetDeviceCount
description: Complete reference of the GetDeviceCount API.
@@ -118,13 +103,10 @@ EXAMPLES = r"""
serialNumber: []
state_: []
onbState: []
- cmState: []
name: []
pid: []
source: []
- projectId: []
workflowId: []
- projectName: []
workflowName: []
smartAccountId: []
virtualAccountId: []
@@ -132,7 +114,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/pnp_device_history_info.py b/ansible_collections/cisco/dnac/plugins/modules/pnp_device_history_info.py
index 781a2f035..156402efd 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/pnp_device_history_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/pnp_device_history_info.py
@@ -33,8 +33,8 @@ options:
- SortOrder query parameter. Sort Order Ascending (asc) or Descending (des).
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Device Onboarding (PnP) GetDeviceHistory
description: Complete reference of the GetDeviceHistory API.
@@ -65,7 +65,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/pnp_device_import.py b/ansible_collections/cisco/dnac/plugins/modules/pnp_device_import.py
index ed0802a80..35b4e6fad 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/pnp_device_import.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/pnp_device_import.py
@@ -21,810 +21,119 @@ options:
elements: dict
suboptions:
_id:
- description: Pnp Device Import's _id.
+ description: Id.
type: str
deviceInfo:
description: Pnp Device Import's deviceInfo.
suboptions:
- aaaCredentials:
- description: Pnp Device Import's aaaCredentials.
- suboptions:
- password:
- description: Pnp Device Import's password.
- type: str
- username:
- description: Pnp Device Import's username.
- type: str
- type: dict
- addedOn:
- description: Pnp Device Import's addedOn.
- type: int
- addnMacAddrs:
- description: Pnp Device Import's addnMacAddrs.
- elements: str
- type: list
- agentType:
- description: Pnp Device Import's agentType.
- type: str
- authStatus:
- description: Pnp Device Import's authStatus.
- type: str
- authenticatedSudiSerialNo:
- description: Pnp Device Import's authenticatedSudiSerialNo.
- type: str
- capabilitiesSupported:
- description: Pnp Device Import's capabilitiesSupported.
- elements: str
- type: list
- cmState:
- description: Pnp Device Import's cmState.
- type: str
description:
- description: Pnp Device Import's description.
+ description: Description.
type: str
deviceSudiSerialNos:
- description: Pnp Device Import's deviceSudiSerialNos.
- elements: str
- type: list
- deviceType:
- description: Pnp Device Import's deviceType.
- type: str
- featuresSupported:
- description: Pnp Device Import's featuresSupported.
+ description: Device Sudi Serial Nos.
elements: str
type: list
- fileSystemList:
- description: Pnp Device Import's fileSystemList.
- elements: dict
- suboptions:
- freespace:
- description: Pnp Device Import's freespace.
- type: int
- name:
- description: Pnp Device Import's name.
- type: str
- readable:
- description: Readable flag.
- type: bool
- size:
- description: Pnp Device Import's size.
- type: int
- type:
- description: Pnp Device Import's type.
- type: str
- writeable:
- description: Writeable flag.
- type: bool
- type: list
- firstContact:
- description: Pnp Device Import's firstContact.
- type: int
hostname:
- description: Pnp Device Import's hostname.
- type: str
- httpHeaders:
- description: Pnp Device Import's httpHeaders.
- elements: dict
- suboptions:
- key:
- description: Pnp Device Import's key.
- type: str
- value:
- description: Pnp Device Import's value.
- type: str
- type: list
- imageFile:
- description: Pnp Device Import's imageFile.
+ description: Hostname.
type: str
- imageVersion:
- description: Pnp Device Import's imageVersion.
- type: str
- ipInterfaces:
- description: Pnp Device Import's ipInterfaces.
- elements: dict
- suboptions:
- ipv4Address:
- description: Pnp Device Import's ipv4Address.
- type: dict
- ipv6AddressList:
- description: Pnp Device Import's ipv6AddressList.
- elements: dict
- type: list
- macAddress:
- description: Pnp Device Import's macAddress.
- type: str
- name:
- description: Pnp Device Import's name.
- type: str
- status:
- description: Pnp Device Import's status.
- type: str
- type: list
- lastContact:
- description: Pnp Device Import's lastContact.
- type: int
- lastSyncTime:
- description: Pnp Device Import's lastSyncTime.
- type: int
- lastUpdateOn:
- description: Pnp Device Import's lastUpdateOn.
- type: int
- location:
- description: Pnp Device Import's location.
- suboptions:
- address:
- description: Pnp Device Import's address.
- type: str
- altitude:
- description: Pnp Device Import's altitude.
- type: str
- latitude:
- description: Pnp Device Import's latitude.
- type: str
- longitude:
- description: Pnp Device Import's longitude.
- type: str
- siteId:
- description: Pnp Device Import's siteId.
- type: str
- type: dict
+ isSudiRequired:
+ description: Is Sudi Required.
+ type: bool
macAddress:
- description: Pnp Device Import's macAddress.
- type: str
- mode:
- description: Pnp Device Import's mode.
- type: str
- name:
- description: Pnp Device Import's name.
- type: str
- neighborLinks:
- description: Pnp Device Import's neighborLinks.
- elements: dict
- suboptions:
- localInterfaceName:
- description: Pnp Device Import's localInterfaceName.
- type: str
- localMacAddress:
- description: Pnp Device Import's localMacAddress.
- type: str
- localShortInterfaceName:
- description: Pnp Device Import's localShortInterfaceName.
- type: str
- remoteDeviceName:
- description: Pnp Device Import's remoteDeviceName.
- type: str
- remoteInterfaceName:
- description: Pnp Device Import's remoteInterfaceName.
- type: str
- remoteMacAddress:
- description: Pnp Device Import's remoteMacAddress.
- type: str
- remotePlatform:
- description: Pnp Device Import's remotePlatform.
- type: str
- remoteShortInterfaceName:
- description: Pnp Device Import's remoteShortInterfaceName.
- type: str
- remoteVersion:
- description: Pnp Device Import's remoteVersion.
- type: str
- type: list
- onbState:
- description: Pnp Device Import's onbState.
+ description: Mac Address.
type: str
pid:
- description: Pnp Device Import's pid.
- type: str
- pnpProfileList:
- description: Pnp Device Import's pnpProfileList.
- elements: dict
- suboptions:
- createdBy:
- description: Pnp Device Import's createdBy.
- type: str
- discoveryCreated:
- description: DiscoveryCreated flag.
- type: bool
- primaryEndpoint:
- description: Pnp Device Import's primaryEndpoint.
- suboptions:
- certificate:
- description: Pnp Device Import's certificate.
- type: str
- fqdn:
- description: Pnp Device Import's fqdn.
- type: str
- ipv4Address:
- description: Pnp Device Import's ipv4Address.
- type: dict
- ipv6Address:
- description: Pnp Device Import's ipv6Address.
- type: dict
- port:
- description: Pnp Device Import's port.
- type: int
- protocol:
- description: Pnp Device Import's protocol.
- type: str
- type: dict
- profileName:
- description: Pnp Device Import's profileName.
- type: str
- secondaryEndpoint:
- description: Pnp Device Import's secondaryEndpoint.
- suboptions:
- certificate:
- description: Pnp Device Import's certificate.
- type: str
- fqdn:
- description: Pnp Device Import's fqdn.
- type: str
- ipv4Address:
- description: Pnp Device Import's ipv4Address.
- type: dict
- ipv6Address:
- description: Pnp Device Import's ipv6Address.
- type: dict
- port:
- description: Pnp Device Import's port.
- type: int
- protocol:
- description: Pnp Device Import's protocol.
- type: str
- type: dict
- type: list
- populateInventory:
- description: PopulateInventory flag.
- type: bool
- preWorkflowCliOuputs:
- description: Pnp Device Import's preWorkflowCliOuputs.
- elements: dict
- suboptions:
- cli:
- description: Pnp Device Import's cli.
- type: str
- cliOutput:
- description: Pnp Device Import's cliOutput.
- type: str
- type: list
- projectId:
- description: Pnp Device Import's projectId.
+ description: Pid.
type: str
- projectName:
- description: Pnp Device Import's projectName.
- type: str
- reloadRequested:
- description: ReloadRequested flag.
- type: bool
serialNumber:
- description: Pnp Device Import's serialNumber.
- type: str
- smartAccountId:
- description: Pnp Device Import's smartAccountId.
+ description: Serial Number.
type: str
- source:
- description: Pnp Device Import's source.
+ siteId:
+ description: Site Id.
type: str
stack:
- description: Stack flag.
+ description: Stack.
type: bool
stackInfo:
description: Pnp Device Import's stackInfo.
suboptions:
isFullRing:
- description: IsFullRing flag.
+ description: Is Full Ring.
type: bool
stackMemberList:
description: Pnp Device Import's stackMemberList.
elements: dict
suboptions:
hardwareVersion:
- description: Pnp Device Import's hardwareVersion.
+ description: Hardware Version.
type: str
licenseLevel:
- description: Pnp Device Import's licenseLevel.
+ description: License Level.
type: str
licenseType:
- description: Pnp Device Import's licenseType.
+ description: License Type.
type: str
macAddress:
- description: Pnp Device Import's macAddress.
+ description: Mac Address.
type: str
pid:
- description: Pnp Device Import's pid.
+ description: Pid.
type: str
priority:
- description: Pnp Device Import's priority.
- type: int
+ description: Priority.
+ type: float
role:
- description: Pnp Device Import's role.
+ description: Role.
type: str
serialNumber:
- description: Pnp Device Import's serialNumber.
+ description: Serial Number.
type: str
softwareVersion:
- description: Pnp Device Import's softwareVersion.
+ description: Software Version.
type: str
stackNumber:
- description: Pnp Device Import's stackNumber.
- type: int
+ description: Stack Number.
+ type: float
state:
- description: Pnp Device Import's state.
+ description: State.
type: str
sudiSerialNumber:
- description: Pnp Device Import's sudiSerialNumber.
+ description: Sudi Serial Number.
type: str
type: list
stackRingProtocol:
- description: Pnp Device Import's stackRingProtocol.
+ description: Stack Ring Protocol.
type: str
supportsStackWorkflows:
- description: SupportsStackWorkflows flag.
+ description: Supports Stack Workflows.
type: bool
totalMemberCount:
- description: Pnp Device Import's totalMemberCount.
- type: int
+ description: Total Member Count.
+ type: float
validLicenseLevels:
- description: Pnp Device Import's validLicenseLevels.
+ description: Valid License Levels.
elements: str
type: list
type: dict
- state:
- description: Pnp Device Import's state.
- type: str
- sudiRequired:
- description: SudiRequired flag.
- type: bool
- tags:
- description: Pnp Device Import's tags.
- type: dict
+ userMicNumbers:
+ description: User Mic Numbers.
+ elements: str
+ type: list
userSudiSerialNos:
- description: Pnp Device Import's userSudiSerialNos.
+ description: User Sudi Serial Nos.
elements: str
type: list
- virtualAccountId:
- description: Pnp Device Import's virtualAccountId.
- type: str
workflowId:
- description: Pnp Device Import's workflowId.
+ description: Workflow Id.
type: str
workflowName:
- description: Pnp Device Import's workflowName.
- type: str
- type: dict
- runSummaryList:
- description: Pnp Device Import's runSummaryList.
- elements: dict
- suboptions:
- details:
- description: Pnp Device Import's details.
- type: str
- errorFlag:
- description: ErrorFlag flag.
- type: bool
- historyTaskInfo:
- description: Pnp Device Import's historyTaskInfo.
- suboptions:
- addnDetails:
- description: Pnp Device Import's addnDetails.
- elements: dict
- suboptions:
- key:
- description: Pnp Device Import's key.
- type: str
- value:
- description: Pnp Device Import's value.
- type: str
- type: list
- name:
- description: Pnp Device Import's name.
- type: str
- timeTaken:
- description: Pnp Device Import's timeTaken.
- type: int
- type:
- description: Pnp Device Import's type.
- type: str
- workItemList:
- description: Pnp Device Import's workItemList.
- elements: dict
- suboptions:
- command:
- description: Pnp Device Import's command.
- type: str
- endTime:
- description: Pnp Device Import's endTime.
- type: int
- outputStr:
- description: Pnp Device Import's outputStr.
- type: str
- startTime:
- description: Pnp Device Import's startTime.
- type: int
- state:
- description: Pnp Device Import's state.
- type: str
- timeTaken:
- description: Pnp Device Import's timeTaken.
- type: int
- type: list
- type: dict
- timestamp:
- description: Pnp Device Import's timestamp.
- type: int
- type: list
- systemResetWorkflow:
- description: Pnp Device Import's systemResetWorkflow.
- suboptions:
- _id:
- description: Pnp Device Import's _id.
- type: str
- addToInventory:
- description: AddToInventory flag.
- type: bool
- addedOn:
- description: Pnp Device Import's addedOn.
- type: int
- configId:
- description: Pnp Device Import's configId.
- type: str
- currTaskIdx:
- description: Pnp Device Import's currTaskIdx.
- type: int
- description:
- description: Pnp Device Import's description.
- type: str
- endTime:
- description: Pnp Device Import's endTime.
- type: int
- execTime:
- description: Pnp Device Import's execTime.
- type: int
- imageId:
- description: Pnp Device Import's imageId.
- type: str
- instanceType:
- description: Pnp Device Import's instanceType.
- type: str
- lastupdateOn:
- description: Pnp Device Import's lastupdateOn.
- type: int
- name:
- description: Pnp Device Import's name.
- type: str
- startTime:
- description: Pnp Device Import's startTime.
- type: int
- state:
- description: Pnp Device Import's state.
- type: str
- tasks:
- description: Pnp Device Import's tasks.
- elements: dict
- suboptions:
- currWorkItemIdx:
- description: Pnp Device Import's currWorkItemIdx.
- type: int
- endTime:
- description: Pnp Device Import's endTime.
- type: int
- name:
- description: Pnp Device Import's name.
- type: str
- startTime:
- description: Pnp Device Import's startTime.
- type: int
- state:
- description: Pnp Device Import's state.
- type: str
- taskSeqNo:
- description: Pnp Device Import's taskSeqNo.
- type: int
- timeTaken:
- description: Pnp Device Import's timeTaken.
- type: int
- type:
- description: Pnp Device Import's type.
- type: str
- workItemList:
- description: Pnp Device Import's workItemList.
- elements: dict
- suboptions:
- command:
- description: Pnp Device Import's command.
- type: str
- endTime:
- description: Pnp Device Import's endTime.
- type: int
- outputStr:
- description: Pnp Device Import's outputStr.
- type: str
- startTime:
- description: Pnp Device Import's startTime.
- type: int
- state:
- description: Pnp Device Import's state.
- type: str
- timeTaken:
- description: Pnp Device Import's timeTaken.
- type: int
- type: list
- type: list
- tenantId:
- description: Pnp Device Import's tenantId.
- type: str
- type:
- description: Pnp Device Import's type.
- type: str
- useState:
- description: Pnp Device Import's useState.
- type: str
- version:
- description: Pnp Device Import's version.
- type: int
- type: dict
- systemWorkflow:
- description: Pnp Device Import's systemWorkflow.
- suboptions:
- _id:
- description: Pnp Device Import's _id.
- type: str
- addToInventory:
- description: AddToInventory flag.
- type: bool
- addedOn:
- description: Pnp Device Import's addedOn.
- type: int
- configId:
- description: Pnp Device Import's configId.
- type: str
- currTaskIdx:
- description: Pnp Device Import's currTaskIdx.
- type: int
- description:
- description: Pnp Device Import's description.
- type: str
- endTime:
- description: Pnp Device Import's endTime.
- type: int
- execTime:
- description: Pnp Device Import's execTime.
- type: int
- imageId:
- description: Pnp Device Import's imageId.
- type: str
- instanceType:
- description: Pnp Device Import's instanceType.
- type: str
- lastupdateOn:
- description: Pnp Device Import's lastupdateOn.
- type: int
- name:
- description: Pnp Device Import's name.
- type: str
- startTime:
- description: Pnp Device Import's startTime.
- type: int
- state:
- description: Pnp Device Import's state.
- type: str
- tasks:
- description: Pnp Device Import's tasks.
- elements: dict
- suboptions:
- currWorkItemIdx:
- description: Pnp Device Import's currWorkItemIdx.
- type: int
- endTime:
- description: Pnp Device Import's endTime.
- type: int
- name:
- description: Pnp Device Import's name.
- type: str
- startTime:
- description: Pnp Device Import's startTime.
- type: int
- state:
- description: Pnp Device Import's state.
- type: str
- taskSeqNo:
- description: Pnp Device Import's taskSeqNo.
- type: int
- timeTaken:
- description: Pnp Device Import's timeTaken.
- type: int
- type:
- description: Pnp Device Import's type.
- type: str
- workItemList:
- description: Pnp Device Import's workItemList.
- elements: dict
- suboptions:
- command:
- description: Pnp Device Import's command.
- type: str
- endTime:
- description: Pnp Device Import's endTime.
- type: int
- outputStr:
- description: Pnp Device Import's outputStr.
- type: str
- startTime:
- description: Pnp Device Import's startTime.
- type: int
- state:
- description: Pnp Device Import's state.
- type: str
- timeTaken:
- description: Pnp Device Import's timeTaken.
- type: int
- type: list
- type: list
- tenantId:
- description: Pnp Device Import's tenantId.
- type: str
- type:
- description: Pnp Device Import's type.
- type: str
- useState:
- description: Pnp Device Import's useState.
- type: str
- version:
- description: Pnp Device Import's version.
- type: int
- type: dict
- tenantId:
- description: Pnp Device Import's tenantId.
- type: str
- version:
- description: Pnp Device Import's version.
- type: int
- workflow:
- description: Pnp Device Import's workflow.
- suboptions:
- _id:
- description: Pnp Device Import's _id.
- type: str
- addToInventory:
- description: AddToInventory flag.
- type: bool
- addedOn:
- description: Pnp Device Import's addedOn.
- type: int
- configId:
- description: Pnp Device Import's configId.
- type: str
- currTaskIdx:
- description: Pnp Device Import's currTaskIdx.
- type: int
- description:
- description: Pnp Device Import's description.
- type: str
- endTime:
- description: Pnp Device Import's endTime.
- type: int
- execTime:
- description: Pnp Device Import's execTime.
- type: int
- imageId:
- description: Pnp Device Import's imageId.
- type: str
- instanceType:
- description: Pnp Device Import's instanceType.
- type: str
- lastupdateOn:
- description: Pnp Device Import's lastupdateOn.
- type: int
- name:
- description: Pnp Device Import's name.
- type: str
- startTime:
- description: Pnp Device Import's startTime.
- type: int
- state:
- description: Pnp Device Import's state.
- type: str
- tasks:
- description: Pnp Device Import's tasks.
- elements: dict
- suboptions:
- currWorkItemIdx:
- description: Pnp Device Import's currWorkItemIdx.
- type: int
- endTime:
- description: Pnp Device Import's endTime.
- type: int
- name:
- description: Pnp Device Import's name.
- type: str
- startTime:
- description: Pnp Device Import's startTime.
- type: int
- state:
- description: Pnp Device Import's state.
- type: str
- taskSeqNo:
- description: Pnp Device Import's taskSeqNo.
- type: int
- timeTaken:
- description: Pnp Device Import's timeTaken.
- type: int
- type:
- description: Pnp Device Import's type.
- type: str
- workItemList:
- description: Pnp Device Import's workItemList.
- elements: dict
- suboptions:
- command:
- description: Pnp Device Import's command.
- type: str
- endTime:
- description: Pnp Device Import's endTime.
- type: int
- outputStr:
- description: Pnp Device Import's outputStr.
- type: str
- startTime:
- description: Pnp Device Import's startTime.
- type: int
- state:
- description: Pnp Device Import's state.
- type: str
- timeTaken:
- description: Pnp Device Import's timeTaken.
- type: int
- type: list
- type: list
- tenantId:
- description: Pnp Device Import's tenantId.
- type: str
- type:
- description: Pnp Device Import's type.
- type: str
- useState:
- description: Pnp Device Import's useState.
- type: str
- version:
- description: Pnp Device Import's version.
- type: int
- type: dict
- workflowParameters:
- description: Pnp Device Import's workflowParameters.
- suboptions:
- configList:
- description: Pnp Device Import's configList.
- elements: dict
- suboptions:
- configId:
- description: Pnp Device Import's configId.
- type: str
- configParameters:
- description: Pnp Device Import's configParameters.
- elements: dict
- suboptions:
- key:
- description: Pnp Device Import's key.
- type: str
- value:
- description: Pnp Device Import's value.
- type: str
- type: list
- type: list
- licenseLevel:
- description: Pnp Device Import's licenseLevel.
- type: str
- licenseType:
- description: Pnp Device Import's licenseType.
- type: str
- topOfStackSerialNumber:
- description: Pnp Device Import's topOfStackSerialNumber.
+ description: Workflow Name.
type: str
type: dict
type: list
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Device Onboarding (PnP) ImportDevicesInBulk
description: Complete reference of the ImportDevicesInBulk API.
@@ -851,97 +160,15 @@ EXAMPLES = r"""
payload:
- _id: string
deviceInfo:
- aaaCredentials:
- password: string
- username: string
- addedOn: 0
- addnMacAddrs:
- - string
- agentType: string
- authStatus: string
- authenticatedSudiSerialNo: string
- capabilitiesSupported:
- - string
- cmState: string
description: string
deviceSudiSerialNos:
- string
- deviceType: string
- featuresSupported:
- - string
- fileSystemList:
- - freespace: 0
- name: string
- readable: true
- size: 0
- type: string
- writeable: true
- firstContact: 0
hostname: string
- httpHeaders:
- - key: string
- value: string
- imageFile: string
- imageVersion: string
- ipInterfaces:
- - ipv4Address: {}
- ipv6AddressList:
- - {}
- macAddress: string
- name: string
- status: string
- lastContact: 0
- lastSyncTime: 0
- lastUpdateOn: 0
- location:
- address: string
- altitude: string
- latitude: string
- longitude: string
- siteId: string
+ isSudiRequired: true
macAddress: string
- mode: string
- name: string
- neighborLinks:
- - localInterfaceName: string
- localMacAddress: string
- localShortInterfaceName: string
- remoteDeviceName: string
- remoteInterfaceName: string
- remoteMacAddress: string
- remotePlatform: string
- remoteShortInterfaceName: string
- remoteVersion: string
- onbState: string
pid: string
- pnpProfileList:
- - createdBy: string
- discoveryCreated: true
- primaryEndpoint:
- certificate: string
- fqdn: string
- ipv4Address: {}
- ipv6Address: {}
- port: 0
- protocol: string
- profileName: string
- secondaryEndpoint:
- certificate: string
- fqdn: string
- ipv4Address: {}
- ipv6Address: {}
- port: 0
- protocol: string
- populateInventory: true
- preWorkflowCliOuputs:
- - cli: string
- cliOutput: string
- projectId: string
- projectName: string
- reloadRequested: true
serialNumber: string
- smartAccountId: string
- source: string
+ siteId: string
stack: true
stackInfo:
isFullRing: true
@@ -963,151 +190,14 @@ EXAMPLES = r"""
totalMemberCount: 0
validLicenseLevels:
- string
- state: string
- sudiRequired: true
- tags: {}
+ userMicNumbers:
+ - string
userSudiSerialNos:
- string
- virtualAccountId: string
workflowId: string
workflowName: string
- runSummaryList:
- - details: string
- errorFlag: true
- historyTaskInfo:
- addnDetails:
- - key: string
- value: string
- name: string
- timeTaken: 0
- type: string
- workItemList:
- - command: string
- endTime: 0
- outputStr: string
- startTime: 0
- state: string
- timeTaken: 0
- timestamp: 0
- systemResetWorkflow:
- _id: string
- addToInventory: true
- addedOn: 0
- configId: string
- currTaskIdx: 0
- description: string
- endTime: 0
- execTime: 0
- imageId: string
- instanceType: string
- lastupdateOn: 0
- name: string
- startTime: 0
- state: string
- tasks:
- - currWorkItemIdx: 0
- endTime: 0
- name: string
- startTime: 0
- state: string
- taskSeqNo: 0
- timeTaken: 0
- type: string
- workItemList:
- - command: string
- endTime: 0
- outputStr: string
- startTime: 0
- state: string
- timeTaken: 0
- tenantId: string
- type: string
- useState: string
- version: 0
- systemWorkflow:
- _id: string
- addToInventory: true
- addedOn: 0
- configId: string
- currTaskIdx: 0
- description: string
- endTime: 0
- execTime: 0
- imageId: string
- instanceType: string
- lastupdateOn: 0
- name: string
- startTime: 0
- state: string
- tasks:
- - currWorkItemIdx: 0
- endTime: 0
- name: string
- startTime: 0
- state: string
- taskSeqNo: 0
- timeTaken: 0
- type: string
- workItemList:
- - command: string
- endTime: 0
- outputStr: string
- startTime: 0
- state: string
- timeTaken: 0
- tenantId: string
- type: string
- useState: string
- version: 0
- tenantId: string
- version: 0
- workflow:
- _id: string
- addToInventory: true
- addedOn: 0
- configId: string
- currTaskIdx: 0
- description: string
- endTime: 0
- execTime: 0
- imageId: string
- instanceType: string
- lastupdateOn: 0
- name: string
- startTime: 0
- state: string
- tasks:
- - currWorkItemIdx: 0
- endTime: 0
- name: string
- startTime: 0
- state: string
- taskSeqNo: 0
- timeTaken: 0
- type: string
- workItemList:
- - command: string
- endTime: 0
- outputStr: string
- startTime: 0
- state: string
- timeTaken: 0
- tenantId: string
- type: string
- useState: string
- version: 0
- workflowParameters:
- configList:
- - configId: string
- configParameters:
- - key: string
- value: string
- licenseLevel: string
- licenseType: string
- topOfStackSerialNumber: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -1117,7 +207,6 @@ dnac_response:
{
"successList": [
{
- "_id": "string",
"id": "string",
"deviceInfo": {
"source": "string",
diff --git a/ansible_collections/cisco/dnac/plugins/modules/pnp_device_info.py b/ansible_collections/cisco/dnac/plugins/modules/pnp_device_info.py
index 06500ec52..5cd711114 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/pnp_device_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/pnp_device_info.py
@@ -13,8 +13,8 @@ description:
- Get Pnp Device by id.
- Returns device details specified by device id.
- >
- Returns list of devices based on filter crieteria. If a limit is not specified, it will default to return 50
- devices. Pagination and sorting are also supported by this endpoint.
+ Returns list of devices from Plug & Play based on filter criteria. Returns 50 devices by default. This endpoint
+ supports Pagination and Sorting.
version_added: '3.1.0'
extends_documentation_fragment:
- cisco.dnac.module_info
@@ -55,11 +55,6 @@ options:
- OnbState query parameter. Device Onboarding State.
elements: str
type: list
- cmState:
- description:
- - CmState query parameter. Device Connection Manager State.
- elements: str
- type: list
name:
description:
- Name query parameter. Device Name.
@@ -75,21 +70,11 @@ options:
- Source query parameter. Device Source.
elements: str
type: list
- projectId:
- description:
- - ProjectId query parameter. Device Project Id.
- elements: str
- type: list
workflowId:
description:
- WorkflowId query parameter. Device Workflow Id.
elements: str
type: list
- projectName:
- description:
- - ProjectName query parameter. Device Project Name.
- elements: str
- type: list
workflowName:
description:
- WorkflowName query parameter. Device Workflow Name.
@@ -126,8 +111,8 @@ options:
- Id path parameter.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Device Onboarding (PnP) GetDeviceById
description: Complete reference of the GetDeviceById API.
@@ -164,13 +149,10 @@ EXAMPLES = r"""
serialNumber: []
state_: []
onbState: []
- cmState: []
name: []
pid: []
source: []
- projectId: []
workflowId: []
- projectName: []
workflowName: []
smartAccountId: []
virtualAccountId: []
@@ -194,7 +176,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/pnp_device_reset.py b/ansible_collections/cisco/dnac/plugins/modules/pnp_device_reset.py
index df5a7fe15..e1d5514ba 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/pnp_device_reset.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/pnp_device_reset.py
@@ -59,8 +59,8 @@ options:
description: Pnp Device Reset's workflowId.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Device Onboarding (PnP) ResetDevice
description: Complete reference of the ResetDevice API.
@@ -98,7 +98,6 @@ EXAMPLES = r"""
workflowId: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/pnp_device_unclaim.py b/ansible_collections/cisco/dnac/plugins/modules/pnp_device_unclaim.py
index 81ac6a27b..2e3ba3caf 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/pnp_device_unclaim.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/pnp_device_unclaim.py
@@ -10,7 +10,7 @@ module: pnp_device_unclaim
short_description: Resource module for Pnp Device Unclaim
description:
- Manage operation create of the resource Pnp Device Unclaim.
-- Un-Claims one of more devices with specified workflow.
+- Un-Claims one of more devices with specified workflow Deprecated .
version_added: '3.1.0'
extends_documentation_fragment:
- cisco.dnac.module
@@ -21,8 +21,8 @@ options:
elements: str
type: list
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Device Onboarding (PnP) UnClaimDevice
description: Complete reference of the UnClaimDevice API.
@@ -50,7 +50,6 @@ EXAMPLES = r"""
- string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/pnp_global_settings.py b/ansible_collections/cisco/dnac/plugins/modules/pnp_global_settings.py
index b7bd4f09e..c0a0d82ea 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/pnp_global_settings.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/pnp_global_settings.py
@@ -16,150 +16,81 @@ extends_documentation_fragment:
- cisco.dnac.module
author: Rafael Campos (@racampos)
options:
- _id:
- description: Pnp Global Settings's _id.
- type: str
- aaaCredentials:
- description: Pnp Global Settings's aaaCredentials.
- suboptions:
- password:
- description: Pnp Global Settings's password.
- type: str
- username:
- description: Pnp Global Settings's username.
- type: str
- type: dict
acceptEula:
- description: AcceptEula flag.
- type: bool
+ description: Accept Eula.
+ type: str
defaultProfile:
description: Pnp Global Settings's defaultProfile.
suboptions:
cert:
- description: Pnp Global Settings's cert.
+ description: Cert.
type: str
fqdnAddresses:
- description: Pnp Global Settings's fqdnAddresses.
+ description: Fqdn Addresses.
elements: str
type: list
ipAddresses:
- description: Pnp Global Settings's ipAddresses.
+ description: Ip Addresses.
elements: str
type: list
port:
- description: Pnp Global Settings's port.
- type: int
+ description: Port.
+ type: str
proxy:
- description: Proxy flag.
- type: bool
+ description: Proxy.
+ type: str
type: dict
+ id:
+ description: Id.
+ type: str
savaMappingList:
description: Pnp Global Settings's savaMappingList.
elements: dict
suboptions:
- autoSyncPeriod:
- description: Pnp Global Settings's autoSyncPeriod.
- type: int
ccoUser:
- description: Pnp Global Settings's ccoUser.
+ description: Cco User.
type: str
expiry:
- description: Pnp Global Settings's expiry.
- type: int
- lastSync:
- description: Pnp Global Settings's lastSync.
- type: int
+ description: Expiry.
+ type: str
profile:
description: Pnp Global Settings's profile.
suboptions:
addressFqdn:
- description: Pnp Global Settings's addressFqdn.
+ description: Address Fqdn.
type: str
addressIpV4:
- description: Pnp Global Settings's addressIpV4.
+ description: Address Ip V4.
type: str
cert:
- description: Pnp Global Settings's cert.
+ description: Cert.
type: str
makeDefault:
- description: MakeDefault flag.
- type: bool
+ description: Make Default.
+ type: str
name:
- description: Pnp Global Settings's name.
+ description: Name.
type: str
port:
- description: Pnp Global Settings's port.
- type: int
+ description: Port.
+ type: str
profileId:
- description: Pnp Global Settings's profileId.
+ description: Profile Id.
type: str
proxy:
- description: Proxy flag.
- type: bool
- type: dict
- smartAccountId:
- description: Pnp Global Settings's smartAccountId.
- type: str
- syncResult:
- description: Pnp Global Settings's syncResult.
- suboptions:
- syncList:
- description: Pnp Global Settings's syncList.
- elements: dict
- suboptions:
- deviceSnList:
- description: Pnp Global Settings's deviceSnList.
- elements: str
- type: list
- syncType:
- description: Pnp Global Settings's syncType.
- type: str
- type: list
- syncMsg:
- description: Pnp Global Settings's syncMsg.
+ description: Proxy.
type: str
type: dict
- syncResultStr:
- description: Pnp Global Settings's syncResultStr.
- type: str
- syncStartTime:
- description: Pnp Global Settings's syncStartTime.
- type: int
- syncStatus:
- description: Pnp Global Settings's syncStatus.
- type: str
- tenantId:
- description: Pnp Global Settings's tenantId.
- type: str
- token:
- description: Pnp Global Settings's token.
+ smartAccountId:
+ description: Smart Account Id.
type: str
virtualAccountId:
- description: Pnp Global Settings's virtualAccountId.
+ description: Virtual Account Id.
type: str
type: list
- taskTimeOuts:
- description: Pnp Global Settings's taskTimeOuts.
- suboptions:
- configTimeOut:
- description: Pnp Global Settings's configTimeOut.
- type: int
- generalTimeOut:
- description: Pnp Global Settings's generalTimeOut.
- type: int
- imageDownloadTimeOut:
- description: Pnp Global Settings's imageDownloadTimeOut.
- type: int
- type: dict
- tenantId:
- description: Pnp Global Settings's tenantId.
- type: str
- version:
- description: Pnp Global Settings's version.
- type: int
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Device Onboarding (PnP) UpdatePnPGlobalSettings
description: Complete reference of the UpdatePnPGlobalSettings API.
@@ -184,55 +115,32 @@ EXAMPLES = r"""
dnac_version: "{{dnac_version}}"
dnac_debug: "{{dnac_debug}}"
state: present
- _id: string
- aaaCredentials:
- password: string
- username: string
- acceptEula: true
+ acceptEula: string
defaultProfile:
cert: string
fqdnAddresses:
- string
ipAddresses:
- string
- port: 0
- proxy: true
+ port: string
+ proxy: string
+ id: string
savaMappingList:
- - autoSyncPeriod: 0
- ccoUser: string
- expiry: 0
- lastSync: 0
+ - ccoUser: string
+ expiry: string
profile:
addressFqdn: string
addressIpV4: string
cert: string
- makeDefault: true
+ makeDefault: string
name: string
- port: 0
+ port: string
profileId: string
- proxy: true
+ proxy: string
smartAccountId: string
- syncResult:
- syncList:
- - deviceSnList:
- - string
- syncType: string
- syncMsg: string
- syncResultStr: string
- syncStartTime: 0
- syncStatus: string
- tenantId: string
- token: string
virtualAccountId: string
- taskTimeOuts:
- configTimeOut: 0
- generalTimeOut: 0
- imageDownloadTimeOut: 0
- tenantId: string
- version: 0
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -299,7 +207,6 @@ dnac_response:
},
"acceptEula": true,
"id": "string",
- "_id": "string",
"version": 0
}
"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/pnp_global_settings_info.py b/ansible_collections/cisco/dnac/plugins/modules/pnp_global_settings_info.py
index e9c124ec5..f7dbdb8b1 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/pnp_global_settings_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/pnp_global_settings_info.py
@@ -20,8 +20,8 @@ options:
description: Additional headers.
type: dict
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Device Onboarding (PnP) GetPnPGlobalSettings
description: Complete reference of the GetPnPGlobalSettings API.
@@ -49,7 +49,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -116,7 +115,6 @@ dnac_response:
},
"acceptEula": true,
"id": "string",
- "_id": "string",
"version": 0
}
"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/pnp_server_profile_update.py b/ansible_collections/cisco/dnac/plugins/modules/pnp_server_profile_update.py
index ee90a6978..0cc901a3d 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/pnp_server_profile_update.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/pnp_server_profile_update.py
@@ -18,89 +18,46 @@ extends_documentation_fragment:
- cisco.dnac.module
author: Rafael Campos (@racampos)
options:
- autoSyncPeriod:
- description: Pnp Server Profile Update's autoSyncPeriod.
- type: int
ccoUser:
- description: Pnp Server Profile Update's ccoUser.
+ description: Cco User.
type: str
- expiry:
- description: Pnp Server Profile Update's expiry.
- type: int
- lastSync:
- description: Pnp Server Profile Update's lastSync.
- type: int
profile:
description: Pnp Server Profile Update's profile.
suboptions:
addressFqdn:
- description: Pnp Server Profile Update's addressFqdn.
+ description: Address Fqdn.
type: str
addressIpV4:
- description: Pnp Server Profile Update's addressIpV4.
+ description: Address Ip V4.
type: str
cert:
- description: Pnp Server Profile Update's cert.
+ description: Cert.
type: str
makeDefault:
- description: MakeDefault flag.
+ description: Make Default.
type: bool
name:
- description: Pnp Server Profile Update's name.
+ description: Name.
type: str
port:
- description: Pnp Server Profile Update's port.
- type: int
+ description: Port.
+ type: float
profileId:
- description: Pnp Server Profile Update's profileId.
+ description: Profile Id.
type: str
proxy:
- description: Proxy flag.
+ description: Proxy.
type: bool
type: dict
smartAccountId:
- description: Pnp Server Profile Update's smartAccountId.
- type: str
- syncResult:
- description: Pnp Server Profile Update's syncResult.
- suboptions:
- syncList:
- description: Pnp Server Profile Update's syncList.
- elements: dict
- suboptions:
- deviceSnList:
- description: Pnp Server Profile Update's deviceSnList.
- elements: str
- type: list
- syncType:
- description: Pnp Server Profile Update's syncType.
- type: str
- type: list
- syncMsg:
- description: Pnp Server Profile Update's syncMsg.
- type: str
- type: dict
- syncResultStr:
- description: Pnp Server Profile Update's syncResultStr.
- type: str
- syncStartTime:
- description: Pnp Server Profile Update's syncStartTime.
- type: int
- syncStatus:
- description: Pnp Server Profile Update's syncStatus.
- type: str
- tenantId:
- description: Pnp Server Profile Update's tenantId.
- type: str
- token:
- description: Pnp Server Profile Update's token.
+ description: Smart Account Id.
type: str
virtualAccountId:
- description: Pnp Server Profile Update's virtualAccountId.
+ description: Virtual Account Id.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Device Onboarding (PnP) UpdatePnPServerProfile
description: Complete reference of the UpdatePnPServerProfile API.
@@ -124,10 +81,7 @@ EXAMPLES = r"""
dnac_port: "{{dnac_port}}"
dnac_version: "{{dnac_version}}"
dnac_debug: "{{dnac_debug}}"
- autoSyncPeriod: 0
ccoUser: string
- expiry: 0
- lastSync: 0
profile:
addressFqdn: string
addressIpV4: string
@@ -138,21 +92,9 @@ EXAMPLES = r"""
profileId: string
proxy: true
smartAccountId: string
- syncResult:
- syncList:
- - deviceSnList:
- - string
- syncType: string
- syncMsg: string
- syncResultStr: string
- syncStartTime: 0
- syncStatus: string
- tenantId: string
- token: string
virtualAccountId: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/pnp_smart_account_domains_info.py b/ansible_collections/cisco/dnac/plugins/modules/pnp_smart_account_domains_info.py
index bc612723a..5b37b30cb 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/pnp_smart_account_domains_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/pnp_smart_account_domains_info.py
@@ -20,8 +20,8 @@ options:
description: Additional headers.
type: dict
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Device Onboarding (PnP) GetSmartAccountList
description: Complete reference of the GetSmartAccountList API.
@@ -49,7 +49,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/pnp_virtual_account_add.py b/ansible_collections/cisco/dnac/plugins/modules/pnp_virtual_account_add.py
index 7a3ceef87..11556524b 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/pnp_virtual_account_add.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/pnp_virtual_account_add.py
@@ -100,8 +100,8 @@ options:
description: Pnp Virtual Account Add's virtualAccountId.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Device Onboarding (PnP) AddVirtualAccount
description: Complete reference of the AddVirtualAccount API.
@@ -153,7 +153,6 @@ EXAMPLES = r"""
virtualAccountId: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/pnp_virtual_account_deregister.py b/ansible_collections/cisco/dnac/plugins/modules/pnp_virtual_account_deregister.py
index 06af57d3b..81c564f6f 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/pnp_virtual_account_deregister.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/pnp_virtual_account_deregister.py
@@ -26,8 +26,8 @@ options:
description: Name query parameter. Virtual Account Name.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Device Onboarding (PnP) DeregisterVirtualAccount
description: Complete reference of the DeregisterVirtualAccount API.
@@ -55,7 +55,6 @@ EXAMPLES = r"""
name: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/pnp_virtual_account_devices_sync.py b/ansible_collections/cisco/dnac/plugins/modules/pnp_virtual_account_devices_sync.py
index 208bd3352..872ece31d 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/pnp_virtual_account_devices_sync.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/pnp_virtual_account_devices_sync.py
@@ -12,7 +12,7 @@ description:
- Manage operation create of the resource Pnp Virtual Account Devices Sync.
- >
Synchronizes the device info from the given smart account & virtual account with the PnP database. The response
- payload returns a list of synced devices.
+ payload returns a list of synced devices Deprecated .
version_added: '3.1.0'
extends_documentation_fragment:
- cisco.dnac.module
@@ -99,8 +99,8 @@ options:
description: Pnp Virtual Account Devices Sync's virtualAccountId.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Device Onboarding (PnP) SyncVirtualAccountDevices
description: Complete reference of the SyncVirtualAccountDevices API.
@@ -152,7 +152,6 @@ EXAMPLES = r"""
virtualAccountId: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/pnp_virtual_account_sync_result_info.py b/ansible_collections/cisco/dnac/plugins/modules/pnp_virtual_account_sync_result_info.py
index 61a07cb79..2573d9c19 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/pnp_virtual_account_sync_result_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/pnp_virtual_account_sync_result_info.py
@@ -10,7 +10,7 @@ module: pnp_virtual_account_sync_result_info
short_description: Information module for Pnp Virtual Account Sync Result
description:
- Get all Pnp Virtual Account Sync Result.
-- Returns the summary of devices synced from the given smart account & virtual account with PnP.
+- Returns the summary of devices synced from the given smart account & virtual account with PnP Deprecated .
version_added: '3.1.0'
extends_documentation_fragment:
- cisco.dnac.module_info
@@ -28,8 +28,8 @@ options:
- Name path parameter. Virtual Account Name.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Device Onboarding (PnP) GetSyncResultForVirtualAccount
description: Complete reference of the GetSyncResultForVirtualAccount API.
@@ -59,7 +59,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/pnp_virtual_accounts_info.py b/ansible_collections/cisco/dnac/plugins/modules/pnp_virtual_accounts_info.py
index 635a189d5..4b57bd2e5 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/pnp_virtual_accounts_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/pnp_virtual_accounts_info.py
@@ -24,8 +24,8 @@ options:
- Domain path parameter. Smart Account Domain.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Device Onboarding (PnP) GetVirtualAccountList
description: Complete reference of the GetVirtualAccountList API.
@@ -54,7 +54,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/pnp_workflow.py b/ansible_collections/cisco/dnac/plugins/modules/pnp_workflow.py
index d8658e0ea..ae22a151e 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/pnp_workflow.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/pnp_workflow.py
@@ -128,8 +128,8 @@ options:
description: Pnp Workflow's version.
type: int
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Device Onboarding (PnP) AddAWorkflow
description: Complete reference of the AddAWorkflow API.
@@ -258,7 +258,6 @@ EXAMPLES = r"""
version: 0
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/pnp_workflow_count_info.py b/ansible_collections/cisco/dnac/plugins/modules/pnp_workflow_count_info.py
index 3c835c119..85d5187a3 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/pnp_workflow_count_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/pnp_workflow_count_info.py
@@ -25,8 +25,8 @@ options:
elements: str
type: list
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Device Onboarding (PnP) GetWorkflowCount
description: Complete reference of the GetWorkflowCount API.
@@ -55,7 +55,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/pnp_workflow_info.py b/ansible_collections/cisco/dnac/plugins/modules/pnp_workflow_info.py
index d91ea342b..44d0d11ea 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/pnp_workflow_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/pnp_workflow_info.py
@@ -55,8 +55,8 @@ options:
- Id path parameter.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Device Onboarding (PnP) GetWorkflowById
description: Complete reference of the GetWorkflowById API.
@@ -108,7 +108,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/profiling_rules_count_info.py b/ansible_collections/cisco/dnac/plugins/modules/profiling_rules_count_info.py
index 461adba26..a7c9f8dca 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/profiling_rules_count_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/profiling_rules_count_info.py
@@ -29,7 +29,7 @@ options:
- IncludeDeleted query parameter. Flag to indicate whether deleted rules should be part of the records fetched.
type: bool
requirements:
-- dnacentersdk >= 2.6.0
+- dnacentersdk >= 2.7.1
- python >= 3.9
notes:
- SDK Method used are
diff --git a/ansible_collections/cisco/dnac/plugins/modules/profiling_rules_in_bulk_create.py b/ansible_collections/cisco/dnac/plugins/modules/profiling_rules_in_bulk_create.py
index ec2bfc306..5819da497 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/profiling_rules_in_bulk_create.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/profiling_rules_in_bulk_create.py
@@ -121,7 +121,7 @@ options:
type: list
type: list
requirements:
-- dnacentersdk >= 2.6.0
+- dnacentersdk >= 2.7.1
- python >= 3.9
notes:
- SDK Method used are
diff --git a/ansible_collections/cisco/dnac/plugins/modules/projects_details_info.py b/ansible_collections/cisco/dnac/plugins/modules/projects_details_info.py
index 2185060fd..8b85f8b96 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/projects_details_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/projects_details_info.py
@@ -40,8 +40,8 @@ options:
- SortOrder query parameter. Sort Order Ascending (asc) or Descending (dsc).
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Configuration Templates GetProjectsDetails
description: Complete reference of the GetProjectsDetails API.
@@ -74,7 +74,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/provision_workflow_manager.py b/ansible_collections/cisco/dnac/plugins/modules/provision_workflow_manager.py
index cd99f3de6..f709c952b 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/provision_workflow_manager.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/provision_workflow_manager.py
@@ -39,14 +39,24 @@ options:
required: true
suboptions:
management_ip_address:
- description: Management Ip Address .
+ description: Management Ip Address of the device.
type: str
required: true
+ provisioning:
+ description:
+ - Specifies whether the user intends to perform site assignment only or full provisioning for a wired device.
+ - Set to 'False' to carry out site assignment only.
+ - Set to 'True' to proceed with provisioning to a site.
+ type: bool
+ required: false
+ default: true
site_name_hierarchy:
description: Name of site where the device needs to be added.
type: str
managed_ap_locations:
- description: Location of the sites allocated for the APs
+ description:
+ - Location of the sites allocated for the APs.
+ - This is mandatory for provisioning of wireless devices.
type: list
elements: str
dynamic_interfaces:
@@ -95,10 +105,13 @@ notes:
post /dna/intent/api/v1/business/sda/provision-device
post /dna/intent/api/v1/wireless/provision
+ - Added 'provisioning' option in v6.16.0
+ - Added provisioning and reprovisioning of wireless devices in v6.16.0
+
"""
EXAMPLES = r"""
-- name: Create/Modify a new provision
+- name: Provision a wireless device to a site
cisco.dnac.provision_workflow_manager:
dnac_host: "{{dnac_host}}"
dnac_username: "{{dnac_username}}"
@@ -110,16 +123,64 @@ EXAMPLES = r"""
dnac_log: True
state: merged
config:
- - site_name_hierarchy: string
- management_ip_address: string
- managed_ap_locations: list
+ - site_name_hierarchy: Global/USA/San Francisco/BGL_18
+ management_ip_address: 204.192.3.40
+ managed_ap_locations:
+ - Global/USA/San Francisco/BGL_18/Test_Floor2
dynamic_interfaces:
- - vlan_id: integer
- interface_name: string
- interface_ip_address: string
- interface_gateway: string
- interface_netmask_in_c_i_d_r: integer
- lag_or_port_number: integer
+ - vlan_id: 1866
+ interface_name: Vlan1866
+ interface_ip_address: 204.192.6.200
+ interface_gateway: 204.192.6.1
+
+- name: Provision a wired device to a site
+ cisco.dnac.provision_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log: True
+ state: merged
+ config:
+ - site_name_hierarchy: Global/USA/San Francisco/BGL_18
+ management_ip_address: 204.192.3.40
+
+- name: Assign a wired device to a site
+ cisco.dnac.provision_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log: True
+ state: merged
+ config:
+ - site_name_hierarchy: Global/USA/San Francisco/BGL_18
+ management_ip_address: 204.192.3.40
+ provisioning: False
+
+- name: Provision a wireless device to a site
+ cisco.dnac.provision_workflow_manager:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ dnac_log: True
+ state: merged
+ config_verify: True
+ config:
+ - site_name_hierarchy: Global/USA/RTP/BLD11
+ management_ip_address: 204.192.12.201
+ managed_ap_locations:
+ - Global/USA/RTP/BLD11/BLD11_FLOOR1
"""
@@ -210,7 +271,8 @@ class Provision(DnacBase):
"managed_ap_locations": {'type': 'list', 'required': False,
'elements': 'str'},
"dynamic_interfaces": {'type': 'list', 'required': False,
- 'elements': 'dict'}
+ 'elements': 'dict'},
+ "provisioning": {'type': 'bool', 'required': False, "default": True}
}
if state == "merged":
provision_spec["site_name_hierarchy"] = {'type': 'str', 'required': True}
@@ -247,13 +309,16 @@ class Provision(DnacBase):
Post creation of the validated input, we this method gets the
type of the device.
"""
-
- dev_response = self.dnac_apply['exec'](
- family="devices",
- function='get_network_device_by_ip',
- params={"ip_address": self.validated_config[0]["management_ip_address"]},
- op_modifies=True
- )
+ try:
+ dev_response = self.dnac_apply['exec'](
+ family="devices",
+ function='get_network_device_by_ip',
+ params={"ip_address": self.validated_config[0]["management_ip_address"]},
+ op_modifies=True
+ )
+ except Exception as e:
+ self.log(str(e), "ERROR")
+ self.module.fail_json(msg=str(e))
self.log("The device response from 'get_network_device_by_ip' API is {0}".format(str(dev_response)), "DEBUG")
dev_dict = dev_response.get("response")
@@ -268,6 +333,76 @@ class Provision(DnacBase):
self.log("The device type is {0}".format(device_type), "INFO")
return device_type
+ def get_device_id(self):
+ """
+ Fetches the UUID of the device added in the inventory
+
+ Parameters:
+ - self: The instance of the class containing the 'config' attribute
+ to be validated.
+ Returns:
+ The method returns the serial number of the device as a string. If it fails, it returns None.
+ Example:
+ After creating the validated input, this method retrieves the
+ UUID of the device.
+ """
+
+ dev_response = self.dnac_apply['exec'](
+ family="devices",
+ function='get_network_device_by_ip',
+ params={"ip_address": self.validated_config[0]["management_ip_address"]},
+ op_modifies=True
+ )
+
+ self.log("The device response from 'get_network_device_by_ip' API is {0}".format(str(dev_response)), "DEBUG")
+ dev_dict = dev_response.get("response")
+ device_id = dev_dict.get("id")
+
+ self.log("Device ID of the device with IP address {0} is {1}".format(self.validated_config[0]["management_ip_address"], device_id), "INFO")
+ return device_id
+
+ def get_serial_number(self):
+ """
+ Fetches the serial number of the device
+
+ Parameters:
+ - self: The instance of the class containing the 'config' attribute
+ to be validated.
+ Returns:
+ The method returns the serial number of the device as a string. If it fails, it returns None.
+ Example:
+ After creating the validated input, this method retrieves the
+ serial number of the device.
+ """
+
+ try:
+ response = self.dnac_apply['exec'](
+ family="devices",
+ function='get_network_device_by_ip',
+ params={"ip_address": self.validated_config[0]["management_ip_address"]},
+ op_modifies=True
+ )
+
+ except Exception as e:
+ self.log("An error occurred while fetching the serial number: {0}".format(str(e)), "ERROR")
+ return None
+
+ if not (response or response.get("response")):
+ self.log("No response received from 'get_network_device_by_ip' API or it's invalid.", "ERROR")
+ return None
+
+ self.log("The device response from 'get_network_device_by_ip' API is {0}".format(str(response)), "DEBUG")
+ dev_dict = response.get("response")
+ serial_number = dev_dict.get("serialNumber")
+
+ if not serial_number:
+ self.log("Serial number not found in the response.", "ERROR")
+ return None
+
+ self.log("Serial Number of the device is {0}".format(str(serial_number)), "INFO")
+
+ return serial_number
+
def get_task_status(self, task_id=None):
"""
Fetches the status of the task once any provision API is called
@@ -275,9 +410,10 @@ class Provision(DnacBase):
Parameters:
- self: The instance of the class containing the 'config' attribute
to be validated.
+ - task_id: Task_id of the provisioning task.
Returns:
- The method returns an instance of the class with updated attributes:
- - result: A dict indiacting wheter the task was succesful or not
+ The method returns the status of the task_id used to track provisioning.
+ Returns True if task is not failed otheriwse returns False.
Example:
Post creation of the provision task, this method fetheches the task
status.
@@ -294,7 +430,7 @@ class Provision(DnacBase):
)
self.log("Response collected from 'get_task_by_id' API is {0}".format(str(response)), "DEBUG")
response = response.response
- self.log("Task status for the task id {0} is {1}".format(str(task_id), str(response)), "INFO")
+ self.log("Task status for the task id {0} is {1}".format(str(task_id), str(response.get("progress"))), "INFO")
if response.get('isError') or re.search(
'failed', response.get('progress'), flags=re.IGNORECASE
):
@@ -303,7 +439,7 @@ class Provision(DnacBase):
self.module.fail_json(msg=msg)
return False
- if response.get('progress') != 'In Progress':
+ if response.get('progress') in ["TASK_PROVISION", "TASK_MODIFY_PUT"] and response.get("isError") is False:
result = True
break
@@ -311,6 +447,93 @@ class Provision(DnacBase):
self.result.update(dict(provision_task=response))
return result
+ def get_execution_status_site(self, execution_id=None):
+ """
+ Fetches the status of the BAPI once site assignment API is called
+
+ Parameters:
+ - self: The instance of the class containing the 'config' attribute
+ to be validated.
+ - execution_id: execution_id of the BAPI API.
+ Returns:
+ The method returns the status of the BAPI used to track site assignment.
+ Returns True if the status is not failed, otheriwse returns False.
+ Example:
+ Post creation of the provision task, this method fetheches the task
+ status.
+
+ """
+ result = False
+ params = {"execution_id": execution_id}
+ while True:
+ response = self.dnac_apply['exec'](
+ family="task",
+ function="get_business_api_execution_details",
+ params=params,
+ op_modifies=True
+ )
+ self.log("Response collected from 'get_business_api_execution_details' API is {0}".format(str(response)), "DEBUG")
+ self.log("Execution status for the execution id {0} is {1}".format(str(execution_id), str(response.get("status"))), "INFO")
+ if response.get('bapiError') or response.get("status") == "FAILURE":
+ msg = 'Assigning to site execution with id {0} has not completed - Reason: {1}'.format(
+ execution_id, response.get("bapiError"))
+ self.module.fail_json(msg=msg)
+ return False
+
+ if response.get('status') == 'SUCCESS':
+ result = True
+ break
+
+ time.sleep(3)
+ self.result.update(dict(assignment_task=response))
+ return result
+
+ def get_execution_status_wireless(self, execution_id=None):
+ """
+ Fetches the status of the BAPI once site wireless provision API is called
+
+ Parameters:
+ - self: The instance of the class containing the 'config' attribute
+ to be validated.
+ - execution_id: execution_id of the BAPI API.
+ Returns:
+ The method returns the status of the BAPI used to track wireless provisioning.
+ Returns True if the status is not failed, otheriwse returns False.
+ Example:
+ Post creation of the provision task, this method fetheches the task
+ status.
+
+ """
+ result = False
+ params = {"execution_id": execution_id}
+ while True:
+ response = self.dnac_apply['exec'](
+ family="task",
+ function="get_business_api_execution_details",
+ params=params,
+ op_modifies=True
+ )
+ self.log("Response collected from 'get_business_api_execution_details' API is {0}".format(str(response)), "DEBUG")
+ self.log("Execution status for the execution id {0} is {1}".format(str(execution_id), str(response.get("status"))), "INFO")
+ if response.get('bapiError') or response.get("status") == "FAILURE":
+ if response.get("bapiError") == "Device was already provisioned , please use provision update API to reprovision the device":
+ msg = "Performing reprovisioning of wireless device"
+ result = True
+ self.perform_wireless_reprovision()
+ break
+ msg = 'Wireless provisioning execution with id {0} has not completed - Reason: {1}'.format(
+ execution_id, response.get("bapiError"))
+ self.module.fail_json(msg=msg)
+ return False
+
+ if response.get('status') == 'SUCCESS':
+ result = True
+ break
+
+ time.sleep(3)
+ self.result.update(dict(assignment_task=response))
+ return result
+
def get_site_type(self, site_name_hierarchy=None):
"""
Fetches the type of site
@@ -318,12 +541,11 @@ class Provision(DnacBase):
Parameters:
- self: The instance of the class containing the 'config' attribute
to be validated.
+ - site_name_hierarchy: Name of the site collected from the input.
Returns:
- The method returns an instance of the class with updated attributes:
- - site_type: A string indicating the type of the
- site (area/building/floor).
+ - site_type: A string indicating the type of the site (area/building/floor).
Example:
- Post creation of the validated input, we this method gets the
+ Post creation of the validated input, this method gets the
type of the site.
"""
@@ -351,6 +573,84 @@ class Provision(DnacBase):
return site_type
+ def get_site_details(self, site_name_hierarchy=None):
+ """
+ Fetches the id and existance of the site
+
+ Parameters:
+ - self: The instance of the class containing the 'config' attribute
+ to be validated.
+ - site_name_hierarchy: Name of the site collected from the input.
+ Returns:
+ - site_id: A string indicating the id of the site.
+ - site_exits: A boolean value indicating the existance of the site.
+ Example:
+ Post creation of the validated input, this method gets the
+ id of the site.
+ """
+
+ site_exists = False
+ site_id = None
+ try:
+ response = self.dnac_apply['exec'](
+ family="sites",
+ function='get_site',
+ params={"name": site_name_hierarchy},
+ op_modifies=True
+ )
+ except Exception:
+ self.log("Exception occurred as \
+ site '{0}' was not found".format(self.want.get("site_name")), "CRITICAL")
+ self.module.fail_json(msg="Site not found", response=[])
+
+ if response:
+ self.log("Received site details\
+ for '{0}': {1}".format(site_name_hierarchy, str(response)), "DEBUG")
+ site = response.get("response")
+ site_additional_info = site[0].get("additionalInfo")
+ if len(site) == 1:
+ site_id = site[0].get("id")
+ site_exists = True
+ self.log("Site Name: {1}, Site ID: {0}".format(site_id, site_name_hierarchy), "INFO")
+
+ return (site_exists, site_id)
+
+ def get_site_assignment(self):
+ """
+ Fetches the details of devices assigned to a site
+
+ Parameters:
+ - self: The instance of the class containing the 'config' attribute
+ to be validated.
+ Returns:
+ - boolean: True if any device is associated with the site, False if no device is associated with site
+
+ Example:
+ Post creation of the validated input, this method tells whether devices are associated with a site.
+ """
+
+ site_name_hierarchy = self.validated_config[0].get("site_name_hierarchy")
+ site_exits, site_id = self.get_site_details(site_name_hierarchy=site_name_hierarchy)
+ serial_number = self.get_serial_number()
+ if site_exits:
+ site_response = self.dnac_apply['exec'](
+ family="sites",
+ function='get_membership',
+ params={"site_id": site_id,
+ "serial_number": serial_number},
+ op_modifies=True
+ )
+ self.log("Response collected from the 'get_memership' API is {0}".format(site_response), "DEBUG")
+ device_list = site_response.get("device")
+ if len(device_list) > 0:
+ if all(device.get("response") == [] for device in device_list):
+ return False
+ else:
+ return True
+ else:
+ return False
+ return False
+
def get_wired_params(self):
"""
Prepares the payload for provisioning of the wired devices
@@ -369,10 +669,29 @@ class Provision(DnacBase):
parameters in other APIs.
"""
- wired_params = {
- "deviceManagementIpAddress": self.validated_config[0]["management_ip_address"],
- "siteNameHierarchy": self.validated_config[0].get("site_name_hierarchy")
- }
+ site_name = self.validated_config[0].get("site_name_hierarchy")
+
+ (site_exits, site_id) = self.get_site_details(site_name_hierarchy=site_name)
+
+ if site_exits is False:
+ msg = "Site {0} doesn't exist".format(site_name)
+ self.log(msg, "CRITICAL")
+ self.module.fail_json(msg=msg)
+
+ if self.validated_config[0].get("provisioning") is True:
+ wired_params = {
+ "deviceManagementIpAddress": self.validated_config[0]["management_ip_address"],
+ "siteNameHierarchy": site_name
+ }
+ else:
+ wired_params = {
+ "device": [
+ {
+ "ip": self.validated_config[0]["management_ip_address"]
+ }
+ ],
+ "site_id": site_id
+ }
self.log("Parameters collected for the provisioning of wired device:{0}".format(wired_params), "INFO")
return wired_params
@@ -402,31 +721,39 @@ class Provision(DnacBase):
"managedAPLocations": self.validated_config[0].get("managed_ap_locations"),
}
]
- for ap_loc in wireless_params[0]["managedAPLocations"]:
+
+ if not (wireless_params[0].get("managedAPLocations") and isinstance(wireless_params[0].get("managedAPLocations"), list)):
+ msg = "Managed AP locations must be passed as a list of sites. For example, [Global/USA/RTP/BLD11/BLD11_FLOOR1,\
+ Global/USA/RTP/BLD11/BLD11_FLOOR2]"
+ self.log(msg, "CRITICAL")
+ self.module.fail_json(msg=msg, response=[])
+
+ for ap_loc in self.validated_config[0].get("managed_ap_locations"):
if self.get_site_type(site_name_hierarchy=ap_loc) != "floor":
self.log("Managed AP Location must be a floor", "CRITICAL")
self.module.fail_json(msg="Managed AP Location must be a floor", response=[])
wireless_params[0]["dynamicInterfaces"] = []
- for interface in self.validated_config[0].get("dynamic_interfaces"):
- interface_dict = {
- "interfaceIPAddress": interface.get("interface_ip_address"),
- "interfaceNetmaskInCIDR": interface.get("interface_netmask_in_c_i_d_r"),
- "interfaceGateway": interface.get("interface_gateway"),
- "lagOrPortNumber": interface.get("lag_or_port_number"),
- "vlanId": interface.get("vlan_id"),
- "interfaceName": interface.get("interface_name")
- }
- wireless_params[0]["dynamicInterfaces"].append(interface_dict)
+ if self.validated_config[0].get("dynamic_interfaces"):
+ for interface in self.validated_config[0].get("dynamic_interfaces"):
+ interface_dict = {
+ "interfaceIPAddress": interface.get("interface_ip_address"),
+ "interfaceNetmaskInCIDR": interface.get("interface_netmask_in_c_i_d_r"),
+ "interfaceGateway": interface.get("interface_gateway"),
+ "lagOrPortNumber": interface.get("lag_or_port_number"),
+ "vlanId": interface.get("vlan_id"),
+ "interfaceName": interface.get("interface_name")
+ }
+ wireless_params[0]["dynamicInterfaces"].append(interface_dict)
response = self.dnac_apply['exec'](
family="devices",
function='get_network_device_by_ip',
- params={"management_ip_address": self.validated_config[0]["management_ip_address"]},
+ params={"ip_address": self.validated_config[0]["management_ip_address"]},
op_modifies=True
)
self.log("Response collected from 'get_network_device_by_ip' is:{0}".format(str(response)), "DEBUG")
- wireless_params[0]["deviceName"] = response.get("response")[0].get("hostname")
+ wireless_params[0]["deviceName"] = response.get("response").get("hostname")
self.log("Parameters collected for the provisioning of wireless device:{0}".format(wireless_params), "INFO")
return wireless_params
@@ -462,6 +789,47 @@ class Provision(DnacBase):
self.status = "success"
return self
+ def perform_wireless_reprovision(self):
+ """
+ This method performs the reprovisioning of a wireless device. Since, we don't have any
+ APIs to get provisioned wireless devices, so we are reprovisioning based on the failure
+ condition of the device
+ Parameters:
+ - self: The instance of the class containing the 'config' attribute
+ to be validated.
+ Returns:
+ object: An instance of the class with updated results and status
+ based on the processing of differences.
+ Example:
+ If wireless device is already provisioned, this method calls the provision update
+ API and handles it accordingly
+ """
+
+ try:
+ headers_payload = {"__persistbapioutput": "true"}
+ response = self.dnac_apply['exec'](
+ family="wireless",
+ function="provision_update",
+ op_modifies=True,
+ params={"payload": self.want.get("prov_params"),
+ "headers": headers_payload}
+ )
+ self.log("Wireless provisioning response collected from 'provision_update' API is: {0}".format(str(response)), "DEBUG")
+ execution_id = response.get("executionId")
+ provision_info = self.get_execution_status_wireless(execution_id=execution_id)
+ self.result["changed"] = True
+ self.result['msg'] = "Wireless device with IP address {0} got re-provisioned successfully".format(self.validated_config[0]["management_ip_address"])
+ self.result['diff'] = self.validated_config
+ self.result['response'] = execution_id
+ self.log(self.result['msg'], "INFO")
+ return self
+ except Exception as e:
+ self.log("Parameters are {0}".format(self.want))
+ self.msg = "Error in wireless re-provisioning of {0} due to {1}".format(self.validated_config[0]["management_ip_address"], e)
+ self.log(self.msg, "ERROR")
+ self.status = "failed"
+ return self
+
def get_diff_merged(self):
"""
Add to provision database
@@ -479,48 +847,122 @@ class Provision(DnacBase):
device_type = self.want.get("device_type")
if device_type == "wired":
- status_response = self.dnac_apply['exec'](
- family="sda",
- function="get_provisioned_wired_device",
- op_modifies=True,
- params={
- "device_management_ip_address": self.validated_config[0]["management_ip_address"]
- },
- )
+ try:
+ status_response = self.dnac_apply['exec'](
+ family="sda",
+ function="get_provisioned_wired_device",
+ op_modifies=True,
+ params={
+ "device_management_ip_address": self.validated_config[0]["management_ip_address"]
+ },
+ )
+ except Exception:
+ status_response = {}
self.log("Wired device's status Response collected from 'get_provisioned_wired_device' API is:{0}".format(str(status_response)), "DEBUG")
status = status_response.get("status")
self.log("The provisioned status of the wired device is {0}".format(status), "INFO")
if status == "success":
- response = self.dnac_apply['exec'](
- family="sda",
- function="re_provision_wired_device",
- op_modifies=True,
- params=self.want["prov_params"],
- )
- self.log("Reprovisioning response collected from 're_provision_wired_device' API is: {0}".format(response), "DEBUG")
+ try:
+ response = self.dnac_apply['exec'](
+ family="sda",
+ function="re_provision_wired_device",
+ op_modifies=True,
+ params=self.want["prov_params"],
+ )
+ self.log("Reprovisioning response collected from 're_provision_wired_device' API is: {0}".format(response), "DEBUG")
+ task_id = response.get("taskId")
+ provision_info = self.get_task_status(task_id=task_id)
+ self.result["changed"] = True
+ self.result['msg'] = "Re-Provision done Successfully"
+ self.result['diff'] = self.validated_config
+ self.result['response'] = task_id
+ self.log(self.result['msg'], "INFO")
+ return self
+
+ except Exception as e:
+ self.msg = "Error in re-provisioning due to {0}".format(str(e))
+ self.log(self.msg, "ERROR")
+ self.status = "failed"
+ return self
else:
+ if self.validated_config[0].get("provisioning") is True:
+ try:
+ response = self.dnac_apply['exec'](
+ family="sda",
+ function="provision_wired_device",
+ op_modifies=True,
+ params=self.want["prov_params"],
+ )
+ self.log("Provisioning response collected from 'provision_wired_device' API is: {0}".format(response), "DEBUG")
+ except Exception as e:
+ self.msg = "Error in provisioning due to {0}".format(str(e))
+ self.log(self.msg, "ERROR")
+ self.status = "failed"
+ return self
+
+ else:
+ if self.get_site_assignment() is True:
+ self.result["changed"] = False
+ self.result['msg'] = "Device is already assigned to the desired site"
+ self.result['diff'] = self.want
+ self.result['response'] = self.want.get("prov_params").get("site_id")
+ self.log(self.result['msg'], "INFO")
+ return self
+
+ try:
+ response = self.dnac_apply['exec'](
+ family="sites",
+ function="assign_devices_to_site",
+ op_modifies=True,
+ params={
+ "site_id": self.want.get("prov_params").get("site_id"),
+ "payload": self.want.get("prov_params")
+ },
+ )
+ self.log("Assignment response collected from 'assign_devices_to_site' API is: {0}".format(response), "DEBUG")
+ execution_id = response.get("executionId")
+ assignment_info = self.get_execution_status_site(execution_id=execution_id)
+ self.result["changed"] = True
+ self.result['msg'] = "Site assignment done successfully"
+ self.result['diff'] = self.validated_config
+ self.result['response'] = execution_id
+ self.log(self.result['msg'], "INFO")
+ return self
+ except Exception as e:
+ self.msg = "Error in site assignment due to {0}".format(str(e))
+ self.log(self.msg, "ERROR")
+ self.status = "failed"
+ return self
+
+ elif device_type == "wireless":
+ try:
response = self.dnac_apply['exec'](
- family="sda",
- function="provision_wired_device",
+ family="wireless",
+ function="provision",
op_modifies=True,
- params=self.want["prov_params"],
+ params={"payload": self.want.get("prov_params")}
)
- self.log("Provisioning response collected from 'provision_wired_device' API is: {0}".format(response), "DEBUG")
-
- elif device_type == "wireless":
- response = self.dnac_apply['exec'](
- family="wireless",
- function="provision",
- op_modifies=True,
- params=self.want["prov_params"],
- )
- self.log("Wireless provisioning response collected from 'provision' API is: {0}".format(response), "DEBUG")
+ self.log("Wireless provisioning response collected from 'provision' API is: {0}".format(str(response)), "DEBUG")
+ execution_id = response.get("executionId")
+ provision_info = self.get_execution_status_wireless(execution_id=execution_id)
+ self.result["changed"] = True
+ self.result['msg'] = "Wireless device with IP {0} got provisioned successfully".format(self.validated_config[0]["management_ip_address"])
+ self.result['diff'] = self.validated_config
+ self.result['response'] = execution_id
+ self.log(self.result['msg'], "INFO")
+ return self
+ except Exception as e:
+ self.log("Parameters are {0}".format(self.want))
+ self.msg = "Error in wireless provisioning of {0} due to {1}".format(self.validated_config[0]["management_ip_address"], e)
+ self.log(self.msg, "ERROR")
+ self.status = "failed"
+ return self
else:
self.result['msg'] = "Passed device is neither wired nor wireless"
self.log(self.result['msg'], "ERROR")
- self.result['response'] = self.want["prov_params"]
+ self.result['response'] = self.want.get("prov_params")
return self
task_id = response.get("taskId")
@@ -612,6 +1054,15 @@ class Provision(DnacBase):
# Code to validate Cisco Catalyst Center config for merged state
device_type = self.want.get("device_type")
+ provisioning = self.validated_config[0].get("provisioning")
+ site_name_hierarchy = self.validated_config[0].get("site_name_hierarchy")
+ if provisioning is False:
+ if self.get_site_assignment() is True:
+ self.log("Requested device is already added to the site {0}".format(site_name_hierarchy), "INFO")
+ else:
+ self.log("Requested device is not added to the site {0}".format(site_name_hierarchy), "INFO")
+ return self
+
if device_type == "wired":
try:
status_response = self.dnac_apply['exec'](
@@ -635,7 +1086,7 @@ class Provision(DnacBase):
self.log("Requested wired device is not provisioned", "INFO")
else:
- self.log("Currently we don't have any API in the Cisco Catalyst Center to fetch the provisioning details of wired devices")
+ self.log("Currently we don't have any API in the Cisco Catalyst Center to fetch the provisioning details of wireless devices")
self.status = "success"
return self
@@ -681,7 +1132,7 @@ class Provision(DnacBase):
self.log("Requested wired device is unprovisioned", "INFO")
else:
- self.log("Currently we don't have any API in the Cisco Catalyst Center to fetch the provisioning details of wired devices")
+ self.log("Currently we don't have any API in the Cisco Catalyst Center to fetch the provisioning details of wireless devices")
self.status = "success"
return self
diff --git a/ansible_collections/cisco/dnac/plugins/modules/qos_device_interface.py b/ansible_collections/cisco/dnac/plugins/modules/qos_device_interface.py
index e9b67c760..f7df7c970 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/qos_device_interface.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/qos_device_interface.py
@@ -70,8 +70,8 @@ options:
type: list
type: list
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Application Policy CreateQosDeviceInterfaceInfo
description: Complete reference of the CreateQosDeviceInterfaceInfo API.
@@ -159,7 +159,6 @@ EXAMPLES = r"""
id: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/qos_device_interface_info.py b/ansible_collections/cisco/dnac/plugins/modules/qos_device_interface_info.py
index ad7bb2bbe..9e99d9445 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/qos_device_interface_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/qos_device_interface_info.py
@@ -24,8 +24,8 @@ options:
- NetworkDeviceId query parameter. Network device id.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Application Policy GetQosDeviceInterfaceInfo
description: Complete reference of the GetQosDeviceInterfaceInfo API.
@@ -54,7 +54,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/qos_device_interface_info_count_info.py b/ansible_collections/cisco/dnac/plugins/modules/qos_device_interface_info_count_info.py
index f77c2ce31..e8a8ca1e1 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/qos_device_interface_info_count_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/qos_device_interface_info_count_info.py
@@ -20,8 +20,8 @@ options:
description: Additional headers.
type: dict
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Application Policy GetQosDeviceInterfaceInfoCount
description: Complete reference of the GetQosDeviceInterfaceInfoCount API.
@@ -49,7 +49,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/reports.py b/ansible_collections/cisco/dnac/plugins/modules/reports.py
index 3e7eab25c..1b1d0247a 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/reports.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/reports.py
@@ -19,6 +19,9 @@ extends_documentation_fragment:
- cisco.dnac.module
author: Rafael Campos (@racampos)
options:
+ dataCategory:
+ description: Category of viewgroup for the report.
+ type: str
deliveries:
description: Array of available delivery channels.
elements: dict
@@ -103,8 +106,8 @@ options:
description: Version of viewgroup for the report.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Reports CreateOrScheduleAReport
description: Complete reference of the CreateOrScheduleAReport API.
@@ -134,6 +137,7 @@ EXAMPLES = r"""
dnac_version: "{{dnac_version}}"
dnac_debug: "{{dnac_debug}}"
state: present
+ dataCategory: string
deliveries:
- {}
name: string
@@ -173,7 +177,6 @@ EXAMPLES = r"""
reportId: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/reports_executions_info.py b/ansible_collections/cisco/dnac/plugins/modules/reports_executions_info.py
index f12cd7caa..a5010b469 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/reports_executions_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/reports_executions_info.py
@@ -44,8 +44,8 @@ options:
- The filename used to save the download file.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Reports DownloadReportContent
description: Complete reference of the DownloadReportContent API.
@@ -93,7 +93,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/reports_info.py b/ansible_collections/cisco/dnac/plugins/modules/reports_info.py
index aed7e10df..6642cee5b 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/reports_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/reports_info.py
@@ -34,8 +34,8 @@ options:
- ReportId path parameter. ReportId of report.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Reports GetAScheduledReport
description: Complete reference of the GetAScheduledReport API.
@@ -83,7 +83,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/reports_view_group_info.py b/ansible_collections/cisco/dnac/plugins/modules/reports_view_group_info.py
index 76b131718..88c85a1ac 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/reports_view_group_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/reports_view_group_info.py
@@ -28,8 +28,8 @@ options:
- ViewGroupId path parameter. ViewGroupId of viewgroup.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Reports GetAllViewGroups
description: Complete reference of the GetAllViewGroups API.
@@ -75,7 +75,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/reports_view_group_view_info.py b/ansible_collections/cisco/dnac/plugins/modules/reports_view_group_view_info.py
index 11889bac9..a91d5625c 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/reports_view_group_view_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/reports_view_group_view_info.py
@@ -30,8 +30,8 @@ options:
- ViewId path parameter. View id of view.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Reports GetViewDetailsForAGivenViewGroup_View
description: Complete reference of the GetViewDetailsForAGivenViewGroup_View API.
@@ -61,7 +61,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/reserve_ip_subpool.py b/ansible_collections/cisco/dnac/plugins/modules/reserve_ip_subpool.py
index 507b0e078..a61a8b305 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/reserve_ip_subpool.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/reserve_ip_subpool.py
@@ -22,11 +22,11 @@ options:
description: Id path parameter. Id of reserve ip subpool to be deleted.
type: str
ipv4DhcpServers:
- description: IPv4 input for dhcp server ip example 1.1.1.1.
+ description: IPv4 input for dhcp server ip example "1.1.1.1".
elements: str
type: list
ipv4DnsServers:
- description: IPv4 input for dns server ip example 4.4.4.4.
+ description: IPv4 input for dns server ip example "4.4.4.4".
elements: str
type: list
ipv4GateWay:
@@ -44,21 +44,22 @@ options:
description: The ipv4 prefix length is required when ipv4prefix value is true.
type: int
ipv4Subnet:
- description: IPv4 Subnet address, example 175.175.0.0.
+ description: IPv4 Subnet address, example 175.175.0.0. Either ipv4Subnet or ipv4TotalHost
+ needs to be passed if creating IPv4 subpool.
type: str
ipv4TotalHost:
description: IPv4 total host is required when ipv4prefix value is false.
type: int
ipv6AddressSpace:
- description: If the value is false only ipv4 input are required, otherwise both
- ipv6 and ipv4 are required.
+ description: If the value is omitted or false only ipv4 input are required, otherwise
+ both ipv6 and ipv4 are required.
type: bool
ipv6DhcpServers:
- description: IPv6 format dhcp server as input example 2001 db8 1234.
+ description: IPv6 format dhcp server as input example "2001 db8 1234".
elements: str
type: list
ipv6DnsServers:
- description: IPv6 format dns server input example 2001 db8 1234.
+ description: IPv6 format dns server input example "2001 db8 1234".
elements: str
type: list
ipv6GateWay:
@@ -76,7 +77,8 @@ options:
description: IPv6 prefix length is required when the ipv6prefix value is true.
type: int
ipv6Subnet:
- description: IPv6 Subnet address, example 2001 db8 85a3 0 100.
+ description: IPv6 Subnet address, example 2001 db8 85a3 0 100. Either ipv6Subnet
+ or ipv6TotalHost needs to be passed if creating IPv6 subpool.
type: str
ipv6TotalHost:
description: IPv6 total host is required when ipv6prefix value is false.
@@ -94,8 +96,8 @@ options:
description: Type of the reserve ip sub pool.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Network Settings ReserveIPSubpool
description: Complete reference of the ReserveIPSubpool API.
@@ -200,7 +202,6 @@ EXAMPLES = r"""
slaacSupport: true
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/reserve_ip_subpool_create.py b/ansible_collections/cisco/dnac/plugins/modules/reserve_ip_subpool_create.py
index 699e7c96c..4dfe76bee 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/reserve_ip_subpool_create.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/reserve_ip_subpool_create.py
@@ -17,11 +17,11 @@ extends_documentation_fragment:
author: Rafael Campos (@racampos)
options:
ipv4DhcpServers:
- description: IPv4 input for dhcp server ip example 1.1.1.1.
+ description: IPv4 input for dhcp server ip example "1.1.1.1".
elements: str
type: list
ipv4DnsServers:
- description: IPv4 input for dns server ip example 4.4.4.4.
+ description: IPv4 input for dns server ip example "4.4.4.4".
elements: str
type: list
ipv4GateWay:
@@ -38,21 +38,22 @@ options:
description: The ipv4 prefix length is required when ipv4prefix value is true.
type: int
ipv4Subnet:
- description: IPv4 Subnet address, example 175.175.0.0.
+ description: IPv4 Subnet address, example 175.175.0.0. Either ipv4Subnet or ipv4TotalHost
+ needs to be passed if creating IPv4 subpool.
type: str
ipv4TotalHost:
description: IPv4 total host is required when ipv4prefix value is false.
type: int
ipv6AddressSpace:
- description: If the value is false only ipv4 input are required, otherwise both
- ipv6 and ipv4 are required.
+ description: If the value is omitted or false only ipv4 input are required, otherwise
+ both ipv6 and ipv4 are required.
type: bool
ipv6DhcpServers:
- description: IPv6 format dhcp server as input example 2001 db8 1234.
+ description: IPv6 format dhcp server as input example "2001 db8 1234".
elements: str
type: list
ipv6DnsServers:
- description: IPv6 format dns server input example 2001 db8 1234.
+ description: IPv6 format dns server input example "2001 db8 1234".
elements: str
type: list
ipv6GateWay:
@@ -70,7 +71,8 @@ options:
description: IPv6 prefix length is required when the ipv6prefix value is true.
type: int
ipv6Subnet:
- description: IPv6 Subnet address, example 2001 db8 85a3 0 100.
+ description: IPv6 Subnet address, example 2001 db8 85a3 0 100. Either ipv6Subnet
+ or ipv6TotalHost needs to be passed if creating IPv6 subpool.
type: str
ipv6TotalHost:
description: IPv6 total host is required when ipv6prefix value is false.
@@ -88,8 +90,8 @@ options:
description: Type of the reserve ip sub pool.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Network Settings ReserveIPSubpool
description: Complete reference of the ReserveIPSubpool API.
@@ -140,7 +142,6 @@ EXAMPLES = r"""
type: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/reserve_ip_subpool_delete.py b/ansible_collections/cisco/dnac/plugins/modules/reserve_ip_subpool_delete.py
index 65ecd4733..918c75c4d 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/reserve_ip_subpool_delete.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/reserve_ip_subpool_delete.py
@@ -20,8 +20,8 @@ options:
description: Id path parameter. Id of reserve ip subpool to be deleted.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Network Settings ReleaseReserveIPSubpool
description: Complete reference of the ReleaseReserveIPSubpool API.
@@ -48,7 +48,6 @@ EXAMPLES = r"""
id: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/reserve_ip_subpool_info.py b/ansible_collections/cisco/dnac/plugins/modules/reserve_ip_subpool_info.py
index ec3c76d16..494d93385 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/reserve_ip_subpool_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/reserve_ip_subpool_info.py
@@ -21,19 +21,37 @@ options:
type: dict
siteId:
description:
- - SiteId query parameter. Site id to get the reserve ip associated with the site.
+ - >
+ SiteId query parameter. Site id of site from which to retrieve associated reserve pools. Either siteId (per
+ site queries) or ignoreInheritedGroups must be used. They can also be used together.
type: str
offset:
description:
- - Offset query parameter. Offset/starting row.
- type: int
+ - Offset query parameter. Offset/starting row. Indexed from 1.
+ type: float
limit:
description:
- - Limit query parameter. No of Global Pools to be retrieved.
- type: int
+ - >
+ Limit query parameter. Number of reserve pools to be retrieved. Default is 25 if not specified. Maximum
+ allowed limit is 500.
+ type: float
+ ignoreInheritedGroups:
+ description:
+ - >
+ IgnoreInheritedGroups query parameter. Ignores pools inherited from parent site. Either siteId or
+ ignoreInheritedGroups must be passed. They can also be used together.
+ type: str
+ poolUsage:
+ description:
+ - PoolUsage query parameter. Can take values empty, partially-full or empty-partially-full.
+ type: str
+ groupName:
+ description:
+ - GroupName query parameter. Name of the group.
+ type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Network Settings GetReserveIPSubpool
description: Complete reference of the GetReserveIPSubpool API.
@@ -61,10 +79,12 @@ EXAMPLES = r"""
siteId: string
offset: 0
limit: 0
+ ignoreInheritedGroups: string
+ poolUsage: string
+ groupName: string
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/reserve_ip_subpool_update.py b/ansible_collections/cisco/dnac/plugins/modules/reserve_ip_subpool_update.py
index 4bf71c42c..dbacca0ac 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/reserve_ip_subpool_update.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/reserve_ip_subpool_update.py
@@ -17,50 +17,51 @@ extends_documentation_fragment:
author: Rafael Campos (@racampos)
options:
id:
- description: Id query parameter. Id of subpool to be associated with the site.
+ description: Id query parameter. Id of subpool group.
type: str
ipv4DhcpServers:
- description: IPv4 input for dhcp server ip example 1.1.1.1.
+ description: IPv4 input for dhcp server ip example "1.1.1.1".
elements: str
type: list
ipv4DnsServers:
- description: IPv4 input for dns server ip example 4.4.4.4.
+ description: IPv4 input for dns server ip example "4.4.4.4".
elements: str
type: list
ipv4GateWay:
- description: Ipv4 Gate Way.
+ description: Gateway ip address details, example 175.175.0.1.
type: str
ipv6AddressSpace:
- description: If the value is false only ipv4 input are required, otherwise both
- ipv6 and ipv4 are required.
+ description: If the value is false only ipv4 input are required. NOTE if value is
+ false then any existing ipv6 subpool in the group will be removed.
type: bool
ipv6DhcpServers:
- description: IPv6 format dhcp server as input example 2001 db8 1234.
+ description: IPv6 format dhcp server as input example "2001 db8 1234".
elements: str
type: list
ipv6DnsServers:
- description: IPv6 format dns server input example 2001 db8 1234.
+ description: IPv6 format dns server input example "2001 db8 1234".
elements: str
type: list
ipv6GateWay:
description: Gateway ip address details, example 2001 db8 85a3 0 100 1.
type: str
ipv6GlobalPool:
- description: IP v6 Global pool address with cidr this is required when Ipv6AddressSpace
+ description: IPv6 Global pool address with cidr this is required when Ipv6AddressSpace
value is true, example 2001 db8 85a3 /64.
type: str
ipv6Prefix:
- description: IPv6 prefix value is true, the ip6 prefix length input field is enabled
- , if it is false ipv6 total Host input is enable.
+ description: Ipv6 prefix value is true, the ip6 prefix length input field is enabled,
+ if it is false ipv6 total Host input is enable.
type: bool
ipv6PrefixLength:
description: IPv6 prefix length is required when the ipv6prefix value is true.
type: int
ipv6Subnet:
- description: IPv6 Subnet address, example 2001 db8 85a3 0 100.
+ description: IPv6 Subnet address, example 2001 db8 85a3 0 100 .
type: str
ipv6TotalHost:
- description: IPv6 total host is required when ipv6prefix value is false.
+ description: Size of pool in terms of number of IPs. IPv6 total host is required
+ when ipv6prefix value is false.
type: int
name:
description: Name of the reserve ip sub pool.
@@ -72,8 +73,8 @@ options:
description: Slaac Support.
type: bool
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Network Settings UpdateReserveIPSubpool
description: Complete reference of the UpdateReserveIPSubpool API.
@@ -122,7 +123,6 @@ EXAMPLES = r"""
slaacSupport: true
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/role_permissions_info.py b/ansible_collections/cisco/dnac/plugins/modules/role_permissions_info.py
index 51957ed06..97e7c41ad 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/role_permissions_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/role_permissions_info.py
@@ -11,7 +11,7 @@ short_description: Information module for Role Permissions
description:
- Get all Role Permissions.
- Get permissions for a role from Cisco DNA Center System.
-version_added: '6.7.0'
+version_added: '3.1.0'
extends_documentation_fragment:
- cisco.dnac.module_info
author: Rafael Campos (@racampos)
@@ -20,15 +20,15 @@ options:
description: Additional headers.
type: dict
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for User and Roles GetPermissionsAPI
description: Complete reference of the GetPermissionsAPI API.
link: https://developer.cisco.com/docs/dna-center/#!get-permissions-api
notes:
- SDK Method used are
- user_and_roles.UserandRoles.get_permissions_ap_i,
+ userand_roles.UserandRoles.get_permissions_api,
- Paths used are
get /dna/system/api/v1/role/permissions,
@@ -49,7 +49,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/roles.py b/ansible_collections/cisco/dnac/plugins/modules/roles.py
new file mode 100644
index 000000000..b7a6183a5
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/roles.py
@@ -0,0 +1,127 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: roles
+short_description: Resource module for Roles
+description:
+- Manage operations create, update and delete of the resource Roles.
+- Add a new role in Cisco DNA Center System.
+- Delete a role in Cisco DNA Center System.
+- Update a role in Cisco DNA Center System.
+version_added: '3.1.0'
+extends_documentation_fragment:
+ - cisco.dnac.module
+author: Rafael Campos (@racampos)
+options:
+ description:
+ description: Description of role.
+ type: str
+ resourceTypes:
+ description: Roles's resourceTypes.
+ elements: dict
+ suboptions:
+ operations:
+ description: List of operations allowed for the application. Possible values
+ are "gRead", "gWrite", "gUpdate", "gDelete", or some combination of these.
+ elements: str
+ type: list
+ type:
+ description: Name of the application in Cisco DNA Center System.
+ type: str
+ type: list
+ role:
+ description: Name of the role.
+ type: str
+ roleId:
+ description: Id of the role.
+ type: str
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for User and Roles AddRoleAPI
+ description: Complete reference of the AddRoleAPI API.
+ link: https://developer.cisco.com/docs/dna-center/#!add-role-api
+- name: Cisco DNA Center documentation for User and Roles DeleteRoleAPI
+ description: Complete reference of the DeleteRoleAPI API.
+ link: https://developer.cisco.com/docs/dna-center/#!delete-role-api
+- name: Cisco DNA Center documentation for User and Roles UpdateRoleAPI
+ description: Complete reference of the UpdateRoleAPI API.
+ link: https://developer.cisco.com/docs/dna-center/#!update-role-api
+notes:
+ - SDK Method used are
+ userand_roles.UserandRoles.add_role_api,
+ userand_roles.UserandRoles.delete_role_api,
+ userand_roles.UserandRoles.update_role_api,
+
+ - Paths used are
+ post /dna/system/api/v1/role,
+ delete /dna/system/api/v1/role/{roleId},
+ put /dna/system/api/v1/role,
+
+"""
+
+EXAMPLES = r"""
+- name: Create
+ cisco.dnac.roles:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ state: present
+ description: string
+ resourceTypes:
+ - operations:
+ - string
+ type: string
+ role: string
+
+- name: Update all
+ cisco.dnac.roles:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ state: present
+ description: string
+ resourceTypes:
+ - operations:
+ - string
+ type: string
+ roleId: string
+
+- name: Delete by id
+ cisco.dnac.roles:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ state: absent
+ roleId: string
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "roleId": "string",
+ "message": "string"
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/roles_info.py b/ansible_collections/cisco/dnac/plugins/modules/roles_info.py
index a8ce30ddf..15a9ecf9c 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/roles_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/roles_info.py
@@ -10,8 +10,8 @@ module: roles_info
short_description: Information module for Roles
description:
- Get all Roles.
-- Get all roles for the Cisco DNA Center system.
-version_added: '6.7.0'
+- Get all roles for the Cisco DNA Center System.
+version_added: '3.1.0'
extends_documentation_fragment:
- cisco.dnac.module_info
author: Rafael Campos (@racampos)
@@ -20,15 +20,15 @@ options:
description: Additional headers.
type: dict
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for User and Roles GetRolesAPI
description: Complete reference of the GetRolesAPI API.
link: https://developer.cisco.com/docs/dna-center/#!get-roles-api
notes:
- SDK Method used are
- user_and_roles.UserandRoles.get_roles_ap_i,
+ userand_roles.UserandRoles.get_roles_api,
- Paths used are
get /dna/system/api/v1/roles,
@@ -49,7 +49,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_anycast_gateways.py b/ansible_collections/cisco/dnac/plugins/modules/sda_anycast_gateways.py
new file mode 100644
index 000000000..84c11b6ce
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_anycast_gateways.py
@@ -0,0 +1,203 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: sda_anycast_gateways
+short_description: Resource module for Sda Anycastgateways
+description:
+- Manage operations create, update and delete of the resource Sda Anycastgateways.
+- Adds anycast gateways based on user input.
+- Deletes an anycast gateway based on id.
+- Updates anycast gateways based on user input.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module
+author: Rafael Campos (@racampos)
+options:
+ id:
+ description: Id path parameter. ID of the anycast gateway to be deleted.
+ type: str
+ payload:
+ description: Sda Anycast Gateways's payload.
+ elements: dict
+ suboptions:
+ fabricId:
+ description: ID of the fabric containing this anycast gateway. Updating anycast
+ gateways on fabric zones is not allowed--instead, update the corresponding
+ anycast gateway on the fabric site and the updates will be applied on all
+ applicable fabric zones (updating this field is not allowed).
+ type: str
+ id:
+ description: ID of the anycast gateway (updating this field is not allowed).
+ type: str
+ ipPoolName:
+ description: Name of the IP pool associated with the anycast gateway (updating
+ this field is not allowed).
+ type: str
+ isCriticalPool:
+ description: Enable/disable critical VLAN (not applicable to INFRA_VN; updating
+ this field is not allowed).
+ type: bool
+ isIntraSubnetRoutingEnabled:
+ description: Enable/disable Intra-Subnet Routing (not applicable to INFRA_VN;
+ updating this field is not allowed).
+ type: bool
+ isIpDirectedBroadcast:
+ description: Enable/disable IP-directed broadcast (not applicable to INFRA_VN).
+ type: bool
+ isLayer2FloodingEnabled:
+ description: Enable/disable layer 2 flooding (not applicable to INFRA_VN).
+ type: bool
+ isMultipleIpToMacAddresses:
+ description: Enable/disable multiple IP-to-MAC Addresses (Wireless Bridged-Network
+ Virtual Machine; not applicable to INFRA_VN).
+ type: bool
+ isSupplicantBasedExtendedNodeOnboarding:
+ description: Enable/disable Supplicant-Based Extended Node Onboarding (applicable
+ only to INFRA_VN; required when poolType is EXTENDED_NODE).
+ type: bool
+ isWirelessPool:
+ description: Enable/disable fabric-enabled wireless (not applicable to INFRA_VN).
+ type: bool
+ poolType:
+ description: The pool type of the anycast gateway (applicable only to INFRA_VN;
+ updating this field is not allowed).
+ type: str
+ securityGroupName:
+ description: Name of the associated Security Group (not applicable to INFRA_VN).
+ type: str
+ tcpMssAdjustment:
+ description: TCP maximum segment size adjustment.
+ type: int
+ trafficType:
+ description: The type of traffic the anycast gateway serves.
+ type: str
+ virtualNetworkName:
+ description: Name of the layer 3 virtual network associated with the anycast
+ gateway (updating this field is not allowed).
+ type: str
+ vlanId:
+ description: ID of the VLAN of the anycast gateway (updating this field is not
+ allowed).
+ type: int
+ vlanName:
+ description: Name of the VLAN of the anycast gateway (updating this field is
+ not allowed).
+ type: str
+ type: list
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for SDA AddAnycastGateways
+ description: Complete reference of the AddAnycastGateways API.
+ link: https://developer.cisco.com/docs/dna-center/#!add-anycast-gateways
+- name: Cisco DNA Center documentation for SDA DeleteAnycastGatewayById
+ description: Complete reference of the DeleteAnycastGatewayById API.
+ link: https://developer.cisco.com/docs/dna-center/#!delete-anycast-gateway-by-id
+- name: Cisco DNA Center documentation for SDA UpdateAnycastGateways
+ description: Complete reference of the UpdateAnycastGateways API.
+ link: https://developer.cisco.com/docs/dna-center/#!update-anycast-gateways
+notes:
+ - SDK Method used are
+ sda.Sda.add_anycast_gateways,
+ sda.Sda.delete_anycast_gateway_by_id,
+ sda.Sda.update_anycast_gateways,
+
+ - Paths used are
+ post /dna/intent/api/v1/sda/anycastGateways,
+ delete /dna/intent/api/v1/sda/anycastGateways/{id},
+ put /dna/intent/api/v1/sda/anycastGateways,
+
+"""
+
+EXAMPLES = r"""
+- name: Update all
+ cisco.dnac.sda_anycast_gateways:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ state: present
+ payload:
+ - fabricId: string
+ id: string
+ ipPoolName: string
+ isCriticalPool: true
+ isIntraSubnetRoutingEnabled: true
+ isIpDirectedBroadcast: true
+ isLayer2FloodingEnabled: true
+ isMultipleIpToMacAddresses: true
+ isSupplicantBasedExtendedNodeOnboarding: true
+ isWirelessPool: true
+ poolType: string
+ securityGroupName: string
+ tcpMssAdjustment: 0
+ trafficType: string
+ virtualNetworkName: string
+ vlanId: 0
+ vlanName: string
+
+- name: Create
+ cisco.dnac.sda_anycastGateways:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ state: present
+ payload:
+ - autoGenerateVlanName: true
+ fabricId: string
+ ipPoolName: string
+ isCriticalPool: true
+ isIntraSubnetRoutingEnabled: true
+ isIpDirectedBroadcast: true
+ isLayer2FloodingEnabled: true
+ isMultipleIpToMacAddresses: true
+ isSupplicantBasedExtendedNodeOnboarding: true
+ isWirelessPool: true
+ poolType: string
+ securityGroupName: string
+ tcpMssAdjustment: 0
+ trafficType: string
+ virtualNetworkName: string
+ vlanId: 0
+ vlanName: string
+
+- name: Delete by id
+ cisco.dnac.sda_anycastGateways:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ state: absent
+ id: string
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": {
+ "taskId": "string",
+ "url": "string"
+ },
+ "version": "string"
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_anycast_gateways_count_info.py b/ansible_collections/cisco/dnac/plugins/modules/sda_anycast_gateways_count_info.py
new file mode 100644
index 000000000..7904cbe39
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_anycast_gateways_count_info.py
@@ -0,0 +1,89 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: sda_anycast_gateways_count_info
+short_description: Information module for Sda Anycastgateways Count
+description:
+- Get all Sda Anycastgateways Count.
+- Returns the count of anycast gateways that match the provided query parameters.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module_info
+author: Rafael Campos (@racampos)
+options:
+ headers:
+ description: Additional headers.
+ type: dict
+ fabricId:
+ description:
+ - FabricId query parameter. Only count anycast gateways within this fabric.
+ type: str
+ virtualNetworkName:
+ description:
+ - VirtualNetworkName query parameter. Only count anycast gateways associated with this virtual network.
+ type: str
+ ipPoolName:
+ description:
+ - IpPoolName query parameter. Only count anycast gateways associated with this IP pool.
+ type: str
+ vlanName:
+ description:
+ - VlanName query parameter. Only count anycast gateways associated with this VLAN name.
+ type: str
+ vlanId:
+ description:
+ - VlanId query parameter. Only count anycast gateways associated with this VLAN ID.
+ type: int
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for SDA GetAnycastGatewayCount
+ description: Complete reference of the GetAnycastGatewayCount API.
+ link: https://developer.cisco.com/docs/dna-center/#!get-anycast-gateway-count
+notes:
+ - SDK Method used are
+ sda.Sda.get_anycast_gateway_count,
+
+ - Paths used are
+ get /dna/intent/api/v1/sda/anycastGateways/count,
+
+"""
+
+EXAMPLES = r"""
+- name: Get all Sda Anycastgateways Count
+ cisco.dnac.sda_anycast_gateways_count_info:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ headers: "{{my_headers | from_json}}"
+ fabricId: string
+ virtualNetworkName: string
+ ipPoolName: string
+ vlanName: string
+ vlanId: 0
+ register: result
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": {
+ "count": 0
+ },
+ "version": "string"
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_anycast_gateways_info.py b/ansible_collections/cisco/dnac/plugins/modules/sda_anycast_gateways_info.py
new file mode 100644
index 000000000..716cc46ad
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_anycast_gateways_info.py
@@ -0,0 +1,122 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: sda_anycast_gateways_info
+short_description: Information module for Sda Anycastgateways
+description:
+- Get all Sda Anycastgateways.
+- Returns a list of anycast gateways that match the provided query parameters.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module_info
+author: Rafael Campos (@racampos)
+options:
+ headers:
+ description: Additional headers.
+ type: dict
+ id:
+ description:
+ - Id query parameter. ID of the anycast gateway to search for in the database.
+ type: str
+ fabricId:
+ description:
+ - FabricId query parameter. Get anycast gateways within this fabric.
+ type: str
+ virtualNetworkName:
+ description:
+ - VirtualNetworkName query parameter. Get anycast gateways associated with this virtual network.
+ type: str
+ ipPoolName:
+ description:
+ - IpPoolName query parameter. Get anycast gateways associated with this IP pool.
+ type: str
+ vlanName:
+ description:
+ - VlanName query parameter. Get anycast gateways associated with this VLAN name.
+ type: str
+ vlanId:
+ description:
+ - VlanId query parameter. Get anycast gateways associated with this VLAN ID.
+ type: int
+ offset:
+ description:
+ - Offset query parameter. Starting record for pagination.
+ type: int
+ limit:
+ description:
+ - Limit query parameter. Maximum number of records to return.
+ type: int
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for SDA GetAnycastGateways
+ description: Complete reference of the GetAnycastGateways API.
+ link: https://developer.cisco.com/docs/dna-center/#!get-anycast-gateways
+notes:
+ - SDK Method used are
+ sda.Sda.get_anycast_gateways,
+
+ - Paths used are
+ get /dna/intent/api/v1/sda/anycastGateways,
+
+"""
+
+EXAMPLES = r"""
+- name: Get all Sda Anycastgateways
+ cisco.dnac.sda_anycast_gateways_info:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ headers: "{{my_headers | from_json}}"
+ id: string
+ fabricId: string
+ virtualNetworkName: string
+ ipPoolName: string
+ vlanName: string
+ vlanId: 0
+ offset: 0
+ limit: 0
+ register: result
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": [
+ {
+ "id": "string",
+ "fabricId": "string",
+ "virtualNetworkName": "string",
+ "ipPoolName": "string",
+ "tcpMssAdjustment": 0,
+ "vlanName": "string",
+ "vlanId": 0,
+ "trafficType": "string",
+ "poolType": "string",
+ "securityGroupName": "string",
+ "isCriticalPool": true,
+ "isLayer2FloodingEnabled": true,
+ "isWirelessPool": true,
+ "isIpDirectedBroadcast": true,
+ "isIntraSubnetRoutingEnabled": true,
+ "isMultipleIpToMacAddresses": true,
+ "isSupplicantBasedExtendedNodeOnboarding": true
+ }
+ ],
+ "version": "string"
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_authentication_profiles.py b/ansible_collections/cisco/dnac/plugins/modules/sda_authentication_profiles.py
new file mode 100644
index 000000000..a48ac7f99
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_authentication_profiles.py
@@ -0,0 +1,96 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: sda_authentication_profiles
+short_description: Resource module for Sda Authenticationprofiles
+description:
+- Manage operation update of the resource Sda Authenticationprofiles.
+- Updates an authentication profile based on user input.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module
+author: Rafael Campos (@racampos)
+options:
+ payload:
+ description: Sda Authentication Profiles's payload.
+ elements: dict
+ suboptions:
+ authenticationOrder:
+ description: First authentication method.
+ type: str
+ authenticationProfileName:
+ description: The default host authentication template (updating this field is
+ not allowed).
+ type: str
+ dot1xToMabFallbackTimeout:
+ description: 802.1x Timeout.
+ type: int
+ fabricId:
+ description: ID of the fabric site/zone (updating this field is not allowed).
+ type: str
+ id:
+ description: ID of the authentication profile (updating this field is not allowed).
+ type: str
+ numberOfHosts:
+ description: Number of Hosts.
+ type: str
+ wakeOnLan:
+ description: Wake on LAN.
+ type: bool
+ type: list
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for SDA UpdateAuthenticationProfile
+ description: Complete reference of the UpdateAuthenticationProfile API.
+ link: https://developer.cisco.com/docs/dna-center/#!update-authentication-profile
+notes:
+ - SDK Method used are
+ sda.Sda.update_authentication_profile,
+
+ - Paths used are
+ put /dna/intent/api/v1/sda/authenticationProfiles,
+
+"""
+
+EXAMPLES = r"""
+- name: Update all
+ cisco.dnac.sda_authentication_profiles:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ state: present
+ payload:
+ - authenticationOrder: string
+ authenticationProfileName: string
+ dot1xToMabFallbackTimeout: 0
+ fabricId: string
+ id: string
+ numberOfHosts: string
+ wakeOnLan: true
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": {
+ "taskId": "string",
+ "url": "string"
+ },
+ "version": "string"
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_authentication_profiles_info.py b/ansible_collections/cisco/dnac/plugins/modules/sda_authentication_profiles_info.py
new file mode 100644
index 000000000..1f71e297e
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_authentication_profiles_info.py
@@ -0,0 +1,84 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: sda_authentication_profiles_info
+short_description: Information module for Sda Authenticationprofiles
+description:
+- Get all Sda Authenticationprofiles.
+- Returns a list of authentication profiles that match the provided query parameters.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module_info
+author: Rafael Campos (@racampos)
+options:
+ headers:
+ description: Additional headers.
+ type: dict
+ fabricId:
+ description:
+ - FabricId query parameter. ID of the fabric whose authentication profiles are to be returned.
+ type: str
+ authenticationProfileName:
+ description:
+ - >
+ AuthenticationProfileName query parameter. Return only the authentication profiles with this specified name.
+ Note that 'No Authentication' is not a valid option for this parameter.
+ type: str
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for SDA GetAuthenticationProfiles
+ description: Complete reference of the GetAuthenticationProfiles API.
+ link: https://developer.cisco.com/docs/dna-center/#!get-authentication-profiles
+notes:
+ - SDK Method used are
+ sda.Sda.get_authentication_profiles,
+
+ - Paths used are
+ get /dna/intent/api/v1/sda/authenticationProfiles,
+
+"""
+
+EXAMPLES = r"""
+- name: Get all Sda Authenticationprofiles
+ cisco.dnac.sda_authentication_profiles_info:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ headers: "{{my_headers | from_json}}"
+ fabricId: string
+ authenticationProfileName: string
+ register: result
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": [
+ {
+ "id": "string",
+ "fabricId": "string",
+ "authenticationProfileName": "string",
+ "authenticationOrder": "string",
+ "dot1xToMabFallbackTimeout": 0,
+ "wakeOnLan": true,
+ "numberOfHosts": "string"
+ }
+ ],
+ "version": "string"
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_count_info.py b/ansible_collections/cisco/dnac/plugins/modules/sda_count_info.py
index d942591c1..a8a2ce8af 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/sda_count_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_count_info.py
@@ -20,7 +20,7 @@ options:
description: Additional headers.
type: dict
requirements:
-- dnacentersdk >= 2.6.0
+- dnacentersdk >= 2.7.1
- python >= 3.9
notes:
- SDK Method used are
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_device_info.py b/ansible_collections/cisco/dnac/plugins/modules/sda_device_info.py
index 22029fe55..3c892d9d0 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/sda_device_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_device_info.py
@@ -24,8 +24,8 @@ options:
- DeviceManagementIpAddress query parameter.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for SDA GetDeviceInfoFromSDAFabric
description: Complete reference of the GetDeviceInfoFromSDAFabric API.
@@ -54,7 +54,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_device_role_info.py b/ansible_collections/cisco/dnac/plugins/modules/sda_device_role_info.py
index b06a698a1..996e037be 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/sda_device_role_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_device_role_info.py
@@ -24,8 +24,8 @@ options:
- DeviceManagementIpAddress query parameter. Device Management IP Address.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for SDA GetDeviceRoleInSDAFabric
description: Complete reference of the GetDeviceRoleInSDAFabric API.
@@ -54,7 +54,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -62,13 +61,10 @@ dnac_response:
type: dict
sample: >
{
- "response": {
- "status": "string",
- "description": "string",
- "roles": [
- "string"
- ]
- },
- "version": "string"
+ "roles": [
+ "string"
+ ],
+ "status": "string",
+ "description": "string"
}
"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_extranet_policies.py b/ansible_collections/cisco/dnac/plugins/modules/sda_extranet_policies.py
new file mode 100644
index 000000000..c8b2f408c
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_extranet_policies.py
@@ -0,0 +1,140 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: sda_extranet_policies
+short_description: Resource module for Sda Extranetpolicies
+description:
+- Manage operations create, update and delete of the resource Sda Extranetpolicies.
+- Adds an extranet policy based on user input.
+- Deletes an extranet policy based on id.
+- Updates an extranet policy based on user input.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module
+author: Rafael Campos (@racampos)
+options:
+ id:
+ description: Id path parameter. ID of the extranet policy.
+ type: str
+ payload:
+ description: Sda Extranet Policies's payload.
+ elements: dict
+ suboptions:
+ extranetPolicyName:
+ description: Name of the existing extranet policy (updating this field is not
+ allowed).
+ type: str
+ fabricIds:
+ description: ID of the fabric sites/zones that associated with this extranet
+ policy.
+ elements: str
+ type: list
+ id:
+ description: ID of the existing extranet policy (updating this field is not
+ allowed).
+ type: str
+ providerVirtualNetworkName:
+ description: Name of the existing provider virtual network (updating this field
+ is not allowed).
+ type: str
+ subscriberVirtualNetworkNames:
+ description: Name of the subscriber virtual networks.
+ elements: str
+ type: list
+ type: list
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for SDA AddExtranetPolicy
+ description: Complete reference of the AddExtranetPolicy API.
+ link: https://developer.cisco.com/docs/dna-center/#!add-extranet-policy
+- name: Cisco DNA Center documentation for SDA DeleteExtranetPolicyById
+ description: Complete reference of the DeleteExtranetPolicyById API.
+ link: https://developer.cisco.com/docs/dna-center/#!delete-extranet-policy-by-id
+- name: Cisco DNA Center documentation for SDA UpdateExtranetPolicy
+ description: Complete reference of the UpdateExtranetPolicy API.
+ link: https://developer.cisco.com/docs/dna-center/#!update-extranet-policy
+notes:
+ - SDK Method used are
+ sda.Sda.add_extranet_policy,
+ sda.Sda.delete_extranet_policy_by_id,
+ sda.Sda.update_extranet_policy,
+
+ - Paths used are
+ post /dna/intent/api/v1/sda/extranetPolicies,
+ delete /dna/intent/api/v1/sda/extranetPolicies/{id},
+ put /dna/intent/api/v1/sda/extranetPolicies,
+
+"""
+
+EXAMPLES = r"""
+- name: Update all
+ cisco.dnac.sda_extranet_policies:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ state: present
+ payload:
+ - extranetPolicyName: string
+ fabricIds:
+ - string
+ id: string
+ providerVirtualNetworkName: string
+ subscriberVirtualNetworkNames:
+ - string
+
+- name: Create
+ cisco.dnac.sda_extranet_policies:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ state: present
+ payload:
+ - extranetPolicyName: string
+ fabricIds:
+ - string
+ providerVirtualNetworkName: string
+ subscriberVirtualNetworkNames:
+ - string
+
+- name: Delete by id
+ cisco.dnac.sda_extranet_policies:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ state: absent
+ id: string
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": {
+ "taskId": "string",
+ "url": "string"
+ },
+ "version": "string"
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_extranet_policies_count_info.py b/ansible_collections/cisco/dnac/plugins/modules/sda_extranet_policies_count_info.py
new file mode 100644
index 000000000..d21a9b7e5
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_extranet_policies_count_info.py
@@ -0,0 +1,64 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: sda_extranet_policies_count_info
+short_description: Information module for Sda Extranetpolicies Count
+description:
+- Get all Sda Extranetpolicies Count.
+- Returns the count of extranet policies that match the provided query parameters.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module_info
+author: Rafael Campos (@racampos)
+options:
+ headers:
+ description: Additional headers.
+ type: dict
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for SDA GetExtranetPolicyCount
+ description: Complete reference of the GetExtranetPolicyCount API.
+ link: https://developer.cisco.com/docs/dna-center/#!get-extranet-policy-count
+notes:
+ - SDK Method used are
+ sda.Sda.get_extranet_policy_count,
+
+ - Paths used are
+ get /dna/intent/api/v1/sda/extranetPolicies/count,
+
+"""
+
+EXAMPLES = r"""
+- name: Get all Sda Extranetpolicies Count
+ cisco.dnac.sda_extranet_policies_count_info:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ headers: "{{my_headers | from_json}}"
+ register: result
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": {
+ "count": 0
+ },
+ "version": "string"
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_extranet_policies_info.py b/ansible_collections/cisco/dnac/plugins/modules/sda_extranet_policies_info.py
new file mode 100644
index 000000000..bfd420b33
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_extranet_policies_info.py
@@ -0,0 +1,89 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: sda_extranet_policies_info
+short_description: Information module for Sda Extranetpolicies
+description:
+- Get all Sda Extranetpolicies.
+- Returns a list of extranet policies that match the provided query parameters.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module_info
+author: Rafael Campos (@racampos)
+options:
+ headers:
+ description: Additional headers.
+ type: dict
+ extranetPolicyName:
+ description:
+ - ExtranetPolicyName query parameter. Name of the extranet policy.
+ type: str
+ offset:
+ description:
+ - Offset query parameter. Starting record for pagination.
+ type: float
+ limit:
+ description:
+ - Limit query parameter. Maximum number of records to return.
+ type: float
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for SDA GetExtranetPolicies
+ description: Complete reference of the GetExtranetPolicies API.
+ link: https://developer.cisco.com/docs/dna-center/#!get-extranet-policies
+notes:
+ - SDK Method used are
+ sda.Sda.get_extranet_policies,
+
+ - Paths used are
+ get /dna/intent/api/v1/sda/extranetPolicies,
+
+"""
+
+EXAMPLES = r"""
+- name: Get all Sda Extranetpolicies
+ cisco.dnac.sda_extranet_policies_info:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ headers: "{{my_headers | from_json}}"
+ extranetPolicyName: string
+ offset: 0
+ limit: 0
+ register: result
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": [
+ {
+ "id": "string",
+ "extranetPolicyName": "string",
+ "fabricIds": [
+ "string"
+ ],
+ "providerVirtualNetworkName": "string",
+ "subscriberVirtualNetworkNames": [
+ "string"
+ ]
+ }
+ ],
+ "version": "string"
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_fabric.py b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric.py
index f068b0e1e..9ab5c6c06 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/sda_fabric.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric.py
@@ -21,7 +21,7 @@ options:
description: FabricName query parameter. Fabric Name.
type: str
requirements:
-- dnacentersdk >= 2.6.0
+- dnacentersdk >= 2.7.1
- python >= 3.9
notes:
- SDK Method used are
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_authentication_profile.py b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_authentication_profile.py
index 925e28fa1..ce93c577d 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_authentication_profile.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_authentication_profile.py
@@ -34,8 +34,8 @@ options:
description: SiteNameHierarchy query parameter.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for SDA AddDefaultAuthenticationTemplateInSDAFabric
description: Complete reference of the AddDefaultAuthenticationTemplateInSDAFabric API.
@@ -105,7 +105,6 @@ EXAMPLES = r"""
siteNameHierarchy: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_authentication_profile_info.py b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_authentication_profile_info.py
index 619af5f01..5f4584f69 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_authentication_profile_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_authentication_profile_info.py
@@ -29,8 +29,8 @@ options:
- AuthenticateTemplateName query parameter.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for SDA GetDefaultAuthenticationProfileFromSDAFabric
description: Complete reference of the GetDefaultAuthenticationProfileFromSDAFabric API.
@@ -60,24 +60,21 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
returned: always
- type: list
- elements: dict
+ type: dict
sample: >
- [
- {
- "siteNameHierarchy": "string",
- "authenticateTemplateName": "string",
- "authenticationOrder": "string",
- "dot1xToMabFallbackTimeout": "string",
- "wakeOnLan": true,
- "numberOfHosts": "string",
- "status": "string",
- "description": "string"
- }
- ]
+ {
+ "siteNameHierarchy": "string",
+ "authenticateTemplateName": "string",
+ "authenticationOrder": "string",
+ "dot1xToMabFallbackTimeout": "string",
+ "wakeOnLan": true,
+ "numberOfHosts": "string",
+ "status": "string",
+ "description": "string",
+ "executionId": "string"
+ }
"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_border_device.py b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_border_device.py
index e2441daf8..7bdcd8356 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_border_device.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_border_device.py
@@ -110,7 +110,7 @@ options:
description: External Domain Routing Protocol Name.
type: str
internalAutonomouSystemNumber:
- description: Internal Autonomouns System Number (e.g.,1-65535).
+ description: Internal Autonomous System Number.
type: str
routeDistributionProtocol:
description: Route Distribution Protocol for Control Plane Device. Allowed values
@@ -125,8 +125,8 @@ options:
type: str
type: list
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for SDA AddBorderDeviceInSDAFabric
description: Complete reference of the AddBorderDeviceInSDAFabric API.
@@ -195,7 +195,6 @@ EXAMPLES = r"""
deviceManagementIpAddress: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_border_device_info.py b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_border_device_info.py
index c4318cd73..0857f63a5 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_border_device_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_border_device_info.py
@@ -25,8 +25,8 @@ options:
- DeviceManagementIpAddress query parameter.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for SDA GetBorderDeviceDetailFromSDAFabric
description: Complete reference of the GetBorderDeviceDetailFromSDAFabric API.
@@ -55,7 +55,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_control_plane_device.py b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_control_plane_device.py
index ff3ecf449..b4b7c3295 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_control_plane_device.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_control_plane_device.py
@@ -31,8 +31,8 @@ options:
type: str
version_added: 4.0.0
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for SDA AddControlPlaneDeviceInSDAFabric
description: Complete reference of the AddControlPlaneDeviceInSDAFabric API.
@@ -79,7 +79,6 @@ EXAMPLES = r"""
siteNameHierarchy: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_control_plane_device_info.py b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_control_plane_device_info.py
index 9267a7014..4f4eae8e8 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_control_plane_device_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_control_plane_device_info.py
@@ -25,8 +25,8 @@ options:
- DeviceManagementIpAddress query parameter.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for SDA GetControlPlaneDeviceFromSDAFabric
description: Complete reference of the GetControlPlaneDeviceFromSDAFabric API.
@@ -55,7 +55,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_devices.py b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_devices.py
new file mode 100644
index 000000000..a06950863
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_devices.py
@@ -0,0 +1,215 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: sda_fabric_devices
+short_description: Resource module for Sda Fabricdevices
+description:
+- Manage operations create, update and delete of the resource Sda Fabricdevices.
+- Adds fabric devices based on user input.
+- Deletes a fabric device based on id.
+- Deletes fabric devices based on user input.
+- Updates fabric devices based on user input.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module
+author: Rafael Campos (@racampos)
+options:
+ deviceRoles:
+ description: DeviceRoles query parameter. Device roles of the fabric device.
+ type: str
+ fabricId:
+ description: FabricId query parameter. ID of the fabric this device belongs to.
+ type: str
+ id:
+ description: Id path parameter. ID of the fabric device.
+ type: str
+ networkDeviceId:
+ description: NetworkDeviceId query parameter. Network device ID of the fabric device.
+ type: str
+ payload:
+ description: Sda Fabric Devices's payload.
+ elements: dict
+ suboptions:
+ borderDeviceSettings:
+ description: Sda Fabric Devices's borderDeviceSettings.
+ suboptions:
+ borderTypes:
+ description: List of the border types of the fabric device. Allowed values
+ are LAYER_2, LAYER_3.
+ elements: str
+ type: list
+ layer3Settings:
+ description: Sda Fabric Devices's layer3Settings.
+ suboptions:
+ borderPriority:
+ description: Border priority of the fabric border device. Allowed range
+ is 1-9. A lower value indicates higher priority. E.g., a priority
+ of 1 takes precedence over 5. Default priority would be set to 10.
+ type: int
+ importExternalRoutes:
+ description: Set this to import external routes from other routing protocols
+ (such as BGP) to the fabric control plane. (updating this field is
+ not allowed).
+ type: bool
+ isDefaultExit:
+ description: Set this to make the fabric border device the gateway of
+ last resort for this site. Any unknown traffic will be sent to this
+ fabric border device from edge nodes. (updating this field is not
+ allowed).
+ type: bool
+ localAutonomousSystemNumber:
+ description: BGP Local autonomous system number of the fabric border
+ device. Allowed range is 1 to 4294967295. (updating this field is
+ not allowed).
+ type: str
+ prependAutonomousSystemCount:
+ description: Prepend autonomous system count of the fabric border device.
+ Allowed range is 1 to 10.
+ type: int
+ type: dict
+ type: dict
+ deviceRoles:
+ description: List of the roles of the fabric device. Allowed values are CONTROL_PLANE_NODE,
+ EDGE_NODE, BORDER_NODE. (updating this field is not allowed).
+ elements: str
+ type: list
+ fabricId:
+ description: ID of the fabric site/zone of this fabric device. (updating this
+ field is not allowed).
+ type: str
+ id:
+ description: ID of the fabric device. (updating this field is not allowed).
+ type: str
+ networkDeviceId:
+ description: Network device ID of the fabric device. (updating this field is
+ not allowed).
+ type: str
+ type: list
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for SDA AddFabricDevices
+ description: Complete reference of the AddFabricDevices API.
+ link: https://developer.cisco.com/docs/dna-center/#!add-fabric-devices
+- name: Cisco DNA Center documentation for SDA DeleteAFabricDeviceById
+ description: Complete reference of the DeleteAFabricDeviceById API.
+ link: https://developer.cisco.com/docs/dna-center/#!delete-a-fabric-device-by-id
+- name: Cisco DNA Center documentation for SDA DeleteFabricDevices
+ description: Complete reference of the DeleteFabricDevices API.
+ link: https://developer.cisco.com/docs/dna-center/#!delete-fabric-devices
+- name: Cisco DNA Center documentation for SDA UpdateFabricDevices
+ description: Complete reference of the UpdateFabricDevices API.
+ link: https://developer.cisco.com/docs/dna-center/#!update-fabric-devices
+notes:
+ - SDK Method used are
+ sda.Sda.add_fabric_devices,
+ sda.Sda.delete_a_fabric_device_by_id,
+ sda.Sda.update_fabric_devices,
+
+ - Paths used are
+ post /dna/intent/api/v1/sda/fabricDevices,
+ delete /dna/intent/api/v1/sda/fabricDevices,
+ delete /dna/intent/api/v1/sda/fabricDevices/{id},
+ put /dna/intent/api/v1/sda/fabricDevices,
+
+"""
+
+EXAMPLES = r"""
+- name: Update all
+ cisco.dnac.sda_fabric_devices:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ state: present
+ payload:
+ - borderDeviceSettings:
+ borderTypes:
+ - string
+ layer3Settings:
+ borderPriority: 0
+ importExternalRoutes: true
+ isDefaultExit: true
+ localAutonomousSystemNumber: string
+ prependAutonomousSystemCount: 0
+ deviceRoles:
+ - string
+ fabricId: string
+ id: string
+ networkDeviceId: string
+
+- name: Delete all
+ cisco.dnac.sda_fabric_devices:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ state: absent
+ deviceRoles: string
+ fabricId: string
+ networkDeviceId: string
+
+- name: Create
+ cisco.dnac.sda_fabric_devices:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ state: present
+ payload:
+ - borderDeviceSettings:
+ borderTypes:
+ - string
+ layer3Settings:
+ borderPriority: 0
+ importExternalRoutes: true
+ isDefaultExit: true
+ localAutonomousSystemNumber: string
+ prependAutonomousSystemCount: 0
+ deviceRoles:
+ - string
+ fabricId: string
+ networkDeviceId: string
+
+- name: Delete by id
+ cisco.dnac.sda_fabric_devices:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ state: absent
+ id: string
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": {
+ "taskId": "string",
+ "url": "string"
+ },
+ "version": "string"
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_devices_count_info.py b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_devices_count_info.py
new file mode 100644
index 000000000..8bb73b6b5
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_devices_count_info.py
@@ -0,0 +1,79 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: sda_fabric_devices_count_info
+short_description: Information module for Sda Fabricdevices Count
+description:
+- Get all Sda Fabricdevices Count.
+- Returns the count of fabric devices that match the provided query parameters.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module_info
+author: Rafael Campos (@racampos)
+options:
+ headers:
+ description: Additional headers.
+ type: dict
+ fabricId:
+ description:
+ - FabricId query parameter. ID of the fabric this device belongs to.
+ type: str
+ networkDeviceId:
+ description:
+ - NetworkDeviceId query parameter. Network device ID of the fabric device.
+ type: str
+ deviceRoles:
+ description:
+ - DeviceRoles query parameter. Device roles of the fabric device.
+ type: str
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for SDA GetFabricDevicesCount
+ description: Complete reference of the GetFabricDevicesCount API.
+ link: https://developer.cisco.com/docs/dna-center/#!get-fabric-devices-count
+notes:
+ - SDK Method used are
+ sda.Sda.get_fabric_devices_count,
+
+ - Paths used are
+ get /dna/intent/api/v1/sda/fabricDevices/count,
+
+"""
+
+EXAMPLES = r"""
+- name: Get all Sda Fabricdevices Count
+ cisco.dnac.sda_fabric_devices_count_info:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ headers: "{{my_headers | from_json}}"
+ fabricId: string
+ networkDeviceId: string
+ deviceRoles: string
+ register: result
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": {
+ "count": 0
+ },
+ "version": "string"
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_devices_info.py b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_devices_info.py
new file mode 100644
index 000000000..8a8d55081
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_devices_info.py
@@ -0,0 +1,108 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: sda_fabric_devices_info
+short_description: Information module for Sda Fabricdevices
+description:
+- Get all Sda Fabricdevices.
+- Returns a list of fabric devices that match the provided query parameters.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module_info
+author: Rafael Campos (@racampos)
+options:
+ headers:
+ description: Additional headers.
+ type: dict
+ fabricId:
+ description:
+ - FabricId query parameter. ID of the fabric this device belongs to.
+ type: str
+ networkDeviceId:
+ description:
+ - NetworkDeviceId query parameter. Network device ID of the fabric device.
+ type: str
+ deviceRoles:
+ description:
+ - DeviceRoles query parameter. Device roles of the fabric device.
+ type: str
+ offset:
+ description:
+ - Offset query parameter. Starting record for pagination.
+ type: float
+ limit:
+ description:
+ - Limit query parameter. Maximum number of records to return.
+ type: float
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for SDA GetFabricDevices
+ description: Complete reference of the GetFabricDevices API.
+ link: https://developer.cisco.com/docs/dna-center/#!get-fabric-devices
+notes:
+ - SDK Method used are
+ sda.Sda.get_fabric_devices,
+
+ - Paths used are
+ get /dna/intent/api/v1/sda/fabricDevices,
+
+"""
+
+EXAMPLES = r"""
+- name: Get all Sda Fabricdevices
+ cisco.dnac.sda_fabric_devices_info:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ headers: "{{my_headers | from_json}}"
+ fabricId: string
+ networkDeviceId: string
+ deviceRoles: string
+ offset: 0
+ limit: 0
+ register: result
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": [
+ {
+ "id": "string",
+ "networkDeviceId": "string",
+ "fabricId": "string",
+ "deviceRoles": [
+ "string"
+ ],
+ "borderDeviceSettings": {
+ "borderTypes": [
+ "string"
+ ],
+ "layer3Settings": {
+ "localAutonomousSystemNumber": "string",
+ "isDefaultExit": true,
+ "importExternalRoutes": true,
+ "borderPriority": 0,
+ "prependAutonomousSystemCount": 0
+ }
+ }
+ }
+ ],
+ "version": "string"
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_devices_layer2_handoffs.py b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_devices_layer2_handoffs.py
new file mode 100644
index 000000000..cd356af68
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_devices_layer2_handoffs.py
@@ -0,0 +1,134 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: sda_fabric_devices_layer2_handoffs
+short_description: Resource module for Sda Fabricdevices Layer2handoffs
+description:
+- Manage operations create, update and delete of the resource Sda Fabricdevices Layer2handoffs.
+- Adds layer 2 handoffs in fabric devices based on user input.
+- Deletes a layer 2 handoff of a fabric device based on id.
+- Deletes layer 2 handoffs of a fabric device based on user input.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module
+author: Rafael Campos (@racampos)
+options:
+ fabricId:
+ description: FabricId query parameter. ID of the fabric this device belongs to.
+ type: str
+ id:
+ description: Id path parameter. ID of the layer 2 handoff of a fabric device.
+ type: str
+ networkDeviceId:
+ description: NetworkDeviceId query parameter. Network device ID of the fabric device.
+ type: str
+ payload:
+ description: Sda Fabric Devices Layer2 Handoffs's payload.
+ elements: dict
+ suboptions:
+ externalVlanId:
+ description: External VLAN number into which the fabric must be extended. Allowed
+ VLAN range is 2-4094 except for reserved vlans (1, 1002-1005, 2046, 4094).
+ type: int
+ fabricId:
+ description: ID of the fabric this device belongs to.
+ type: str
+ interfaceName:
+ description: Interface name of the layer 2 handoff. E.g., GigabitEthernet1/0/4.
+ type: str
+ internalVlanId:
+ description: VLAN number associated with this fabric. Allowed VLAN range is
+ 2-4094 except for reserved vlans (1, 1002-1005, 2046, 4094).
+ type: int
+ networkDeviceId:
+ description: Network device ID of the fabric device.
+ type: str
+ type: list
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for SDA AddFabricDevicesLayer2Handoffs
+ description: Complete reference of the AddFabricDevicesLayer2Handoffs API.
+ link: https://developer.cisco.com/docs/dna-center/#!add-fabric-devices-layer-2-handoffs
+- name: Cisco DNA Center documentation for SDA DeleteFabricDeviceLayer2HandoffById
+ description: Complete reference of the DeleteFabricDeviceLayer2HandoffById API.
+ link: https://developer.cisco.com/docs/dna-center/#!delete-fabric-device-layer-2-handoff-by-id
+- name: Cisco DNA Center documentation for SDA DeleteFabricDeviceLayer2Handoffs
+ description: Complete reference of the DeleteFabricDeviceLayer2Handoffs API.
+ link: https://developer.cisco.com/docs/dna-center/#!delete-fabric-device-layer-2-handoffs
+notes:
+ - SDK Method used are
+ sda.Sda.add_fabric_devices_layer2_handoffs,
+ sda.Sda.delete_fabric_device_layer2_handoff_by_id,
+
+ - Paths used are
+ post /dna/intent/api/v1/sda/fabricDevices/layer2Handoffs,
+ delete /dna/intent/api/v1/sda/fabricDevices/layer2Handoffs,
+ delete /dna/intent/api/v1/sda/fabricDevices/layer2Handoffs/{id},
+
+"""
+
+EXAMPLES = r"""
+- name: Delete all
+ cisco.dnac.sda_fabric_devices_layer2_handoffs:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ state: absent
+ fabricId: string
+ networkDeviceId: string
+
+- name: Create
+ cisco.dnac.sda_fabric_devices_layer2_handoffs:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ state: present
+ payload:
+ - externalVlanId: 0
+ fabricId: string
+ interfaceName: string
+ internalVlanId: 0
+ networkDeviceId: string
+
+- name: Delete by id
+ cisco.dnac.sda_fabric_devices_layer2_handoffs:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ state: absent
+ id: string
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": {
+ "taskId": "string",
+ "url": "string"
+ },
+ "version": "string"
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_devices_layer2_handoffs_count_info.py b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_devices_layer2_handoffs_count_info.py
new file mode 100644
index 000000000..872e9fb65
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_devices_layer2_handoffs_count_info.py
@@ -0,0 +1,74 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: sda_fabric_devices_layer2_handoffs_count_info
+short_description: Information module for Sda Fabricdevices Layer2handoffs Count
+description:
+- Get all Sda Fabricdevices Layer2handoffs Count.
+- Returns the count of layer 2 handoffs of fabric devices that match the provided query parameters.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module_info
+author: Rafael Campos (@racampos)
+options:
+ headers:
+ description: Additional headers.
+ type: dict
+ fabricId:
+ description:
+ - FabricId query parameter. ID of the fabric this device belongs to.
+ type: str
+ networkDeviceId:
+ description:
+ - NetworkDeviceId query parameter. Network device ID of the fabric device.
+ type: str
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for SDA GetFabricDevicesLayer2HandoffsCount
+ description: Complete reference of the GetFabricDevicesLayer2HandoffsCount API.
+ link: https://developer.cisco.com/docs/dna-center/#!get-fabric-devices-layer-2-handoffs-count
+notes:
+ - SDK Method used are
+ sda.Sda.get_fabric_devices_layer2_handoffs_count,
+
+ - Paths used are
+ get /dna/intent/api/v1/sda/fabricDevices/layer2Handoffs/count,
+
+"""
+
+EXAMPLES = r"""
+- name: Get all Sda Fabricdevices Layer2handoffs Count
+ cisco.dnac.sda_fabric_devices_layer2_handoffs_count_info:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ headers: "{{my_headers | from_json}}"
+ fabricId: string
+ networkDeviceId: string
+ register: result
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": {
+ "count": 0
+ },
+ "version": "string"
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_devices_layer2_handoffs_info.py b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_devices_layer2_handoffs_info.py
new file mode 100644
index 000000000..5d7a1eb66
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_devices_layer2_handoffs_info.py
@@ -0,0 +1,91 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: sda_fabric_devices_layer2_handoffs_info
+short_description: Information module for Sda Fabricdevices Layer2handoffs
+description:
+- Get all Sda Fabricdevices Layer2handoffs.
+- Returns a list of layer 2 handoffs of fabric devices that match the provided query parameters.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module_info
+author: Rafael Campos (@racampos)
+options:
+ headers:
+ description: Additional headers.
+ type: dict
+ fabricId:
+ description:
+ - FabricId query parameter. ID of the fabric this device belongs to.
+ type: str
+ networkDeviceId:
+ description:
+ - NetworkDeviceId query parameter. Network device ID of the fabric device.
+ type: str
+ offset:
+ description:
+ - Offset query parameter. Starting record for pagination.
+ type: float
+ limit:
+ description:
+ - Limit query parameter. Maximum number of records to return.
+ type: float
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for SDA GetFabricDevicesLayer2Handoffs
+ description: Complete reference of the GetFabricDevicesLayer2Handoffs API.
+ link: https://developer.cisco.com/docs/dna-center/#!get-fabric-devices-layer-2-handoffs
+notes:
+ - SDK Method used are
+ sda.Sda.get_fabric_devices_layer2_handoffs,
+
+ - Paths used are
+ get /dna/intent/api/v1/sda/fabricDevices/layer2Handoffs,
+
+"""
+
+EXAMPLES = r"""
+- name: Get all Sda Fabricdevices Layer2handoffs
+ cisco.dnac.sda_fabric_devices_layer2_handoffs_info:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ headers: "{{my_headers | from_json}}"
+ fabricId: string
+ networkDeviceId: string
+ offset: 0
+ limit: 0
+ register: result
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": [
+ {
+ "id": "string",
+ "networkDeviceId": "string",
+ "fabricId": "string",
+ "interfaceName": "string",
+ "internalVlanId": 0,
+ "externalVlanId": 0
+ }
+ ],
+ "version": "string"
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_devices_layer2_handoffs_ip_transits.py b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_devices_layer2_handoffs_ip_transits.py
new file mode 100644
index 000000000..7d4dcd43b
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_devices_layer2_handoffs_ip_transits.py
@@ -0,0 +1,209 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: sda_fabric_devices_layer2_handoffs_ip_transits
+short_description: Resource module for Sda Fabricdevices Layer2handoffs Iptransits
+description:
+- Manage operations create, update and delete of the resource Sda Fabricdevices Layer2handoffs Iptransits.
+- Adds layer 3 handoffs with ip transit in fabric devices based on user input.
+- Deletes a layer 3 handoff with ip transit of a fabric device by id.
+- Deletes layer 3 handoffs with ip transit of a fabric device based on user input.
+- Updates layer 3 handoffs with ip transit of fabric devices based on user input.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module
+author: Rafael Campos (@racampos)
+options:
+ fabricId:
+ description: FabricId query parameter. ID of the fabric this device belongs to.
+ type: str
+ id:
+ description: Id path parameter. ID of the layer 3 handoff with ip transit of a fabric
+ device to be deleted.
+ type: str
+ networkDeviceId:
+ description: NetworkDeviceId query parameter. Network device ID of the fabric device.
+ type: str
+ payload:
+ description: Sda Fabric Devices Layer2 Handoffs Ip Transits's payload.
+ elements: dict
+ suboptions:
+ externalConnectivityIpPoolName:
+ description: External connectivity ip pool will be used by Catalyst Center to
+ allocate IP address for the connection between the border node and peer.
+ type: str
+ fabricId:
+ description: ID of the fabric this device belongs to.
+ type: str
+ interfaceName:
+ description: Interface name of the layer 3 handoff ip transit.
+ type: str
+ localIpAddress:
+ description: Local ipv4 address for the selected virtual network. Enter the
+ IP addresses and subnet mask in the CIDR notation (IP address/prefix-length).
+ Not applicable if you have already provided an external connectivity ip pool
+ name.
+ type: str
+ localIpv6Address:
+ description: Local ipv6 address for the selected virtual network. Enter the
+ IP addresses and subnet mask in the CIDR notation (IP address/prefix-length).
+ Not applicable if you have already provided an external connectivity ip pool
+ name.
+ type: str
+ networkDeviceId:
+ description: Network device ID of the fabric device.
+ type: str
+ remoteIpAddress:
+ description: Remote ipv4 address for the selected virtual network. Enter the
+ IP addresses and subnet mask in the CIDR notation (IP address/prefix-length).
+ Not applicable if you have already provided an external connectivity ip pool
+ name.
+ type: str
+ remoteIpv6Address:
+ description: Remote ipv6 address for the selected virtual network. Enter the
+ IP addresses and subnet mask in the CIDR notation (IP address/prefix-length).
+ Not applicable if you have already provided an external connectivity ip pool
+ name.
+ type: str
+ tcpMssAdjustment:
+ description: TCP maximum segment size (mss) value for the layer 3 handoff. Allowed
+ range is 500-1440. TCP MSS Adjustment value is applicable for the TCP sessions
+ over both IPv4 and IPv6.
+ type: int
+ transitNetworkId:
+ description: ID of the transit network of the layer 3 handoff ip transit.
+ type: str
+ virtualNetworkName:
+ description: Name of the virtual network associated with this fabric site.
+ type: str
+ vlanId:
+ description: VLAN number for the Switch Virtual Interface (SVI) used to establish
+ BGP peering with the external domain for the virtual network. Allowed VLAN
+ range is 2-4094 except for reserved vlans (1, 1002-1005, 2046, 4094).
+ type: int
+ type: list
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for SDA AddFabricDevicesLayer3HandoffsWithIpTransit
+ description: Complete reference of the AddFabricDevicesLayer3HandoffsWithIpTransit API.
+ link: https://developer.cisco.com/docs/dna-center/#!add-fabric-devices-layer-3-handoffs-with-ip-transit
+- name: Cisco DNA Center documentation for SDA DeleteFabricDeviceLayer3HandoffWithIpTransitById
+ description: Complete reference of the DeleteFabricDeviceLayer3HandoffWithIpTransitById API.
+ link: https://developer.cisco.com/docs/dna-center/#!delete-fabric-device-layer-3-handoff-with-ip-transit-by-id
+- name: Cisco DNA Center documentation for SDA DeleteFabricDeviceLayer3HandoffsWithIpTransit
+ description: Complete reference of the DeleteFabricDeviceLayer3HandoffsWithIpTransit API.
+ link: https://developer.cisco.com/docs/dna-center/#!delete-fabric-device-layer-3-handoffs-with-ip-transit
+- name: Cisco DNA Center documentation for SDA UpdateFabricDevicesLayer3HandoffsWithIpTransit
+ description: Complete reference of the UpdateFabricDevicesLayer3HandoffsWithIpTransit API.
+ link: https://developer.cisco.com/docs/dna-center/#!update-fabric-devices-layer-3-handoffs-with-ip-transit
+notes:
+ - SDK Method used are
+ sda.Sda.add_fabric_devices_layer3_handoffs_with_ip_transit,
+ sda.Sda.delete_fabric_device_layer3_handoff_with_ip_transit_by_id,
+ sda.Sda.update_fabric_devices_layer3_handoffs_with_ip_transit,
+
+ - Paths used are
+ post /dna/intent/api/v1/sda/fabricDevices/layer3Handoffs/ipTransits,
+ delete /dna/intent/api/v1/sda/fabricDevices/layer3Handoffs/ipTransits,
+ delete /dna/intent/api/v1/sda/fabricDevices/layer3Handoffs/ipTransits/{id},
+ put /dna/intent/api/v1/sda/fabricDevices/layer3Handoffs/ipTransits,
+
+"""
+
+EXAMPLES = r"""
+- name: Create
+ cisco.dnac.sda_fabric_devices_layer2_handoffs_ip_transits:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ state: present
+ payload:
+ - externalConnectivityIpPoolName: string
+ fabricId: string
+ interfaceName: string
+ localIpAddress: string
+ localIpv6Address: string
+ networkDeviceId: string
+ remoteIpAddress: string
+ remoteIpv6Address: string
+ tcpMssAdjustment: 0
+ transitNetworkId: string
+ virtualNetworkName: string
+ vlanId: 0
+
+- name: Update all
+ cisco.dnac.sda_fabric_devices_layer2_handoffs_ip_transits:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ state: present
+ payload:
+ - externalConnectivityIpPoolName: string
+ fabricId: string
+ id: string
+ interfaceName: string
+ localIpAddress: string
+ localIpv6Address: string
+ networkDeviceId: string
+ remoteIpAddress: string
+ remoteIpv6Address: string
+ tcpMssAdjustment: 0
+ transitNetworkId: string
+ virtualNetworkName: string
+ vlanId: 0
+
+- name: Delete all
+ cisco.dnac.sda_fabric_devices_layer2_handoffs_ip_transits:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ state: absent
+ fabricId: string
+ networkDeviceId: string
+
+- name: Delete by id
+ cisco.dnac.sda_fabric_devices_layer2_handoffs_ip_transits:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ state: absent
+ id: string
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": {
+ "taskId": "string",
+ "url": "string"
+ },
+ "version": "string"
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_devices_layer2_handoffs_ip_transits_count_info.py b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_devices_layer2_handoffs_ip_transits_count_info.py
new file mode 100644
index 000000000..224811e04
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_devices_layer2_handoffs_ip_transits_count_info.py
@@ -0,0 +1,74 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: sda_fabric_devices_layer2_handoffs_ip_transits_count_info
+short_description: Information module for Sda Fabricdevices Layer2handoffs Iptransits Count
+description:
+- Get all Sda Fabricdevices Layer2handoffs Iptransits Count.
+- Returns the count of layer 3 handoffs with ip transit of fabric devices that match the provided query parameters.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module_info
+author: Rafael Campos (@racampos)
+options:
+ headers:
+ description: Additional headers.
+ type: dict
+ fabricId:
+ description:
+ - FabricId query parameter. ID of the fabric this device belongs to.
+ type: str
+ networkDeviceId:
+ description:
+ - NetworkDeviceId query parameter. Network device ID of the fabric device.
+ type: str
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for SDA GetFabricDevicesLayer3HandoffsWithIpTransitCount
+ description: Complete reference of the GetFabricDevicesLayer3HandoffsWithIpTransitCount API.
+ link: https://developer.cisco.com/docs/dna-center/#!get-fabric-devices-layer-3-handoffs-with-ip-transit-count
+notes:
+ - SDK Method used are
+ sda.Sda.get_fabric_devices_layer3_handoffs_with_ip_transit_count,
+
+ - Paths used are
+ get /dna/intent/api/v1/sda/fabricDevices/layer3Handoffs/ipTransits/count,
+
+"""
+
+EXAMPLES = r"""
+- name: Get all Sda Fabricdevices Layer2handoffs Iptransits Count
+ cisco.dnac.sda_fabric_devices_layer2_handoffs_ip_transits_count_info:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ headers: "{{my_headers | from_json}}"
+ fabricId: string
+ networkDeviceId: string
+ register: result
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": {
+ "count": 0
+ },
+ "version": "string"
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_devices_layer2_handoffs_ip_transits_info.py b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_devices_layer2_handoffs_ip_transits_info.py
new file mode 100644
index 000000000..5a0d1934d
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_devices_layer2_handoffs_ip_transits_info.py
@@ -0,0 +1,98 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: sda_fabric_devices_layer2_handoffs_ip_transits_info
+short_description: Information module for Sda Fabricdevices Layer2handoffs Iptransits
+description:
+- Get all Sda Fabricdevices Layer2handoffs Iptransits.
+- Returns a list of layer 3 handoffs with ip transit of fabric devices that match the provided query parameters.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module_info
+author: Rafael Campos (@racampos)
+options:
+ headers:
+ description: Additional headers.
+ type: dict
+ fabricId:
+ description:
+ - FabricId query parameter. ID of the fabric this device belongs to.
+ type: str
+ networkDeviceId:
+ description:
+ - NetworkDeviceId query parameter. Network device ID of the fabric device.
+ type: str
+ offset:
+ description:
+ - Offset query parameter. Starting record for pagination.
+ type: float
+ limit:
+ description:
+ - Limit query parameter. Maximum number of records to return.
+ type: float
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for SDA GetFabricDevicesLayer3HandoffsWithIpTransit
+ description: Complete reference of the GetFabricDevicesLayer3HandoffsWithIpTransit API.
+ link: https://developer.cisco.com/docs/dna-center/#!get-fabric-devices-layer-3-handoffs-with-ip-transit
+notes:
+ - SDK Method used are
+ sda.Sda.get_fabric_devices_layer3_handoffs_with_ip_transit,
+
+ - Paths used are
+ get /dna/intent/api/v1/sda/fabricDevices/layer3Handoffs/ipTransits,
+
+"""
+
+EXAMPLES = r"""
+- name: Get all Sda Fabricdevices Layer2handoffs Iptransits
+ cisco.dnac.sda_fabric_devices_layer2_handoffs_ip_transits_info:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ headers: "{{my_headers | from_json}}"
+ fabricId: string
+ networkDeviceId: string
+ offset: 0
+ limit: 0
+ register: result
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": [
+ {
+ "id": "string",
+ "networkDeviceId": "string",
+ "fabricId": "string",
+ "transitNetworkId": "string",
+ "interfaceName": "string",
+ "externalConnectivityIpPoolName": "string",
+ "virtualNetworkName": "string",
+ "vlanId": 0,
+ "tcpMssAdjustment": 0,
+ "localIpAddress": "string",
+ "remoteIpAddress": "string",
+ "localIpv6Address": "string",
+ "remoteIpv6Address": "string"
+ }
+ ],
+ "version": "string"
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_devices_layer2_handoffs_sda_transits.py b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_devices_layer2_handoffs_sda_transits.py
new file mode 100644
index 000000000..7ec186fb1
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_devices_layer2_handoffs_sda_transits.py
@@ -0,0 +1,157 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: sda_fabric_devices_layer2_handoffs_sda_transits
+short_description: Resource module for Sda Fabricdevices Layer2handoffs Sdatransits
+description:
+- Manage operations create, update and delete of the resource Sda Fabricdevices Layer2handoffs Sdatransits.
+- Adds layer 3 handoffs with sda transit in fabric devices based on user input.
+- Deletes layer 3 handoffs with sda transit of a fabric device based on user input.
+- Updates layer 3 handoffs with sda transit of fabric devices based on user input.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module
+author: Rafael Campos (@racampos)
+options:
+ fabricId:
+ description: FabricId query parameter. ID of the fabric this device belongs to.
+ type: str
+ networkDeviceId:
+ description: NetworkDeviceId query parameter. Network device ID of the fabric device.
+ type: str
+ payload:
+ description: Sda Fabric Devices Layer2 Handoffs Sda Transits's payload.
+ elements: dict
+ suboptions:
+ affinityIdDecider:
+ description: Affinity id decider value of the border node. When the affinity
+ id prime value is the same on multiple devices, the affinity id decider value
+ is used as a tiebreaker. Allowed range is 0-2147483647. The lower the relative
+ value of affinity id decider, the higher the preference for a destination
+ border node.
+ type: int
+ affinityIdPrime:
+ description: Affinity id prime value of the border node. It supersedes the border
+ priority to determine border node preference. Allowed range is 0-2147483647.
+ The lower the relative value of affinity id prime, the higher the preference
+ for a destination border node.
+ type: int
+ connectedToInternet:
+ description: Set this true to allow associated site to provide internet access
+ to other sites through sd-access.
+ type: bool
+ fabricId:
+ description: ID of the fabric this device belongs to. (updating this field is
+ not allowed).
+ type: str
+ isMulticastOverTransitEnabled:
+ description: Set this true to configure native multicast over multiple sites
+ that are connected to an sd-access transit.
+ type: bool
+ networkDeviceId:
+ description: Network device ID of the fabric device. (updating this field is
+ not allowed).
+ type: str
+ transitNetworkId:
+ description: ID of the transit network of the layer 3 handoff sda transit. (updating
+ this field is not allowed).
+ type: str
+ type: list
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for SDA AddFabricDevicesLayer3HandoffsWithSdaTransit
+ description: Complete reference of the AddFabricDevicesLayer3HandoffsWithSdaTransit API.
+ link: https://developer.cisco.com/docs/dna-center/#!add-fabric-devices-layer-3-handoffs-with-sda-transit
+- name: Cisco DNA Center documentation for SDA DeleteFabricDeviceLayer3HandoffsWithSdaTransit
+ description: Complete reference of the DeleteFabricDeviceLayer3HandoffsWithSdaTransit API.
+ link: https://developer.cisco.com/docs/dna-center/#!delete-fabric-device-layer-3-handoffs-with-sda-transit
+- name: Cisco DNA Center documentation for SDA UpdateFabricDevicesLayer3HandoffsWithSdaTransit
+ description: Complete reference of the UpdateFabricDevicesLayer3HandoffsWithSdaTransit API.
+ link: https://developer.cisco.com/docs/dna-center/#!update-fabric-devices-layer-3-handoffs-with-sda-transit
+notes:
+ - SDK Method used are
+ sda.Sda.add_fabric_devices_layer3_handoffs_with_sda_transit,
+ sda.Sda.delete_fabric_device_layer3_handoffs_with_sda_transit,
+ sda.Sda.update_fabric_devices_layer3_handoffs_with_sda_transit,
+
+ - Paths used are
+ post /dna/intent/api/v1/sda/fabricDevices/layer3Handoffs/sdaTransits,
+ delete /dna/intent/api/v1/sda/fabricDevices/layer3Handoffs/sdaTransits,
+ put /dna/intent/api/v1/sda/fabricDevices/layer3Handoffs/sdaTransits,
+
+"""
+
+EXAMPLES = r"""
+- name: Update all
+ cisco.dnac.sda_fabric_devices_layer2_handoffs_sda_transits:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ state: present
+ payload:
+ - affinityIdDecider: 0
+ affinityIdPrime: 0
+ connectedToInternet: true
+ fabricId: string
+ isMulticastOverTransitEnabled: true
+ networkDeviceId: string
+ transitNetworkId: string
+
+- name: Delete all
+ cisco.dnac.sda_fabric_devices_layer2_handoffs_sda_transits:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ state: absent
+ fabricId: string
+ networkDeviceId: string
+
+- name: Create
+ cisco.dnac.sda_fabric_devices_layer2_handoffs_sda_transits:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ state: present
+ payload:
+ - affinityIdDecider: 0
+ affinityIdPrime: 0
+ connectedToInternet: true
+ fabricId: string
+ isMulticastOverTransitEnabled: true
+ networkDeviceId: string
+ transitNetworkId: string
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": {
+ "taskId": "string",
+ "url": "string"
+ },
+ "version": "string"
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_devices_layer2_handoffs_sda_transits_count_info.py b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_devices_layer2_handoffs_sda_transits_count_info.py
new file mode 100644
index 000000000..432ab802a
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_devices_layer2_handoffs_sda_transits_count_info.py
@@ -0,0 +1,74 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: sda_fabric_devices_layer2_handoffs_sda_transits_count_info
+short_description: Information module for Sda Fabricdevices Layer2handoffs Sdatransits Count
+description:
+- Get all Sda Fabricdevices Layer2handoffs Sdatransits Count.
+- Returns the count of layer 3 handoffs with sda transit of fabric devices that match the provided query parameters.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module_info
+author: Rafael Campos (@racampos)
+options:
+ headers:
+ description: Additional headers.
+ type: dict
+ fabricId:
+ description:
+ - FabricId query parameter. ID of the fabric this device belongs to.
+ type: str
+ networkDeviceId:
+ description:
+ - NetworkDeviceId query parameter. Network device ID of the fabric device.
+ type: str
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for SDA GetFabricDevicesLayer3HandoffsWithSdaTransitCount
+ description: Complete reference of the GetFabricDevicesLayer3HandoffsWithSdaTransitCount API.
+ link: https://developer.cisco.com/docs/dna-center/#!get-fabric-devices-layer-3-handoffs-with-sda-transit-count
+notes:
+ - SDK Method used are
+ sda.Sda.get_fabric_devices_layer3_handoffs_with_sda_transit_count,
+
+ - Paths used are
+ get /dna/intent/api/v1/sda/fabricDevices/layer3Handoffs/sdaTransits/count,
+
+"""
+
+EXAMPLES = r"""
+- name: Get all Sda Fabricdevices Layer2handoffs Sdatransits Count
+ cisco.dnac.sda_fabric_devices_layer2_handoffs_sda_transits_count_info:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ headers: "{{my_headers | from_json}}"
+ fabricId: string
+ networkDeviceId: string
+ register: result
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": {
+ "count": 0
+ },
+ "version": "string"
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_devices_layer2_handoffs_sda_transits_info.py b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_devices_layer2_handoffs_sda_transits_info.py
new file mode 100644
index 000000000..84267bf38
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_devices_layer2_handoffs_sda_transits_info.py
@@ -0,0 +1,92 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: sda_fabric_devices_layer2_handoffs_sda_transits_info
+short_description: Information module for Sda Fabricdevices Layer2handoffs Sdatransits
+description:
+- Get all Sda Fabricdevices Layer2handoffs Sdatransits.
+- Returns a list of layer 3 handoffs with sda transit of fabric devices that match the provided query parameters.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module_info
+author: Rafael Campos (@racampos)
+options:
+ headers:
+ description: Additional headers.
+ type: dict
+ fabricId:
+ description:
+ - FabricId query parameter. ID of the fabric this device belongs to.
+ type: str
+ networkDeviceId:
+ description:
+ - NetworkDeviceId query parameter. Network device ID of the fabric device.
+ type: str
+ offset:
+ description:
+ - Offset query parameter. Starting record for pagination.
+ type: float
+ limit:
+ description:
+ - Limit query parameter. Maximum number of records to return.
+ type: float
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for SDA GetFabricDevicesLayer3HandoffsWithSdaTransit
+ description: Complete reference of the GetFabricDevicesLayer3HandoffsWithSdaTransit API.
+ link: https://developer.cisco.com/docs/dna-center/#!get-fabric-devices-layer-3-handoffs-with-sda-transit
+notes:
+ - SDK Method used are
+ sda.Sda.get_fabric_devices_layer3_handoffs_with_sda_transit,
+
+ - Paths used are
+ get /dna/intent/api/v1/sda/fabricDevices/layer3Handoffs/sdaTransits,
+
+"""
+
+EXAMPLES = r"""
+- name: Get all Sda Fabricdevices Layer2handoffs Sdatransits
+ cisco.dnac.sda_fabric_devices_layer2_handoffs_sda_transits_info:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ headers: "{{my_headers | from_json}}"
+ fabricId: string
+ networkDeviceId: string
+ offset: 0
+ limit: 0
+ register: result
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": [
+ {
+ "networkDeviceId": "string",
+ "fabricId": "string",
+ "transitNetworkId": "string",
+ "affinityIdPrime": 0,
+ "affinityIdDecider": 0,
+ "connectedToInternet": true,
+ "isMulticastOverTransitEnabled": true
+ }
+ ],
+ "version": "string"
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_edge_device.py b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_edge_device.py
index 9bcde0422..10d432dc2 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_edge_device.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_edge_device.py
@@ -26,8 +26,8 @@ options:
type: str
version_added: 4.0.0
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for SDA AddEdgeDeviceInSDAFabric
description: Complete reference of the AddEdgeDeviceInSDAFabric API.
@@ -73,7 +73,6 @@ EXAMPLES = r"""
deviceManagementIpAddress: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_edge_device_info.py b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_edge_device_info.py
index a4f6eecc2..f9be7650e 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_edge_device_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_edge_device_info.py
@@ -24,8 +24,8 @@ options:
- DeviceManagementIpAddress query parameter.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for SDA GetEdgeDeviceFromSDAFabric
description: Complete reference of the GetEdgeDeviceFromSDAFabric API.
@@ -54,7 +54,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_info.py b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_info.py
index c44290025..99d7e1127 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_info.py
@@ -24,7 +24,7 @@ options:
- FabricName query parameter. Fabric Name.
type: str
requirements:
-- dnacentersdk >= 2.6.0
+- dnacentersdk >= 2.7.1
- python >= 3.9
notes:
- SDK Method used are
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_site.py b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_site.py
index eed71ad48..7bdbf45f8 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_site.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_site.py
@@ -31,8 +31,8 @@ options:
description: SiteNameHierarchy query parameter. Site Name Hierarchy.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for SDA AddSiteInSDAFabric
description: Complete reference of the AddSiteInSDAFabric API.
@@ -79,7 +79,6 @@ EXAMPLES = r"""
siteNameHierarchy: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_site_info.py b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_site_info.py
index bd2e8c177..ceac72373 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_site_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_site_info.py
@@ -24,8 +24,8 @@ options:
- SiteNameHierarchy query parameter. Site Name Hierarchy.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for SDA GetSiteFromSDAFabric
description: Complete reference of the GetSiteFromSDAFabric API.
@@ -54,7 +54,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_sites.py b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_sites.py
new file mode 100644
index 000000000..73000a818
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_sites.py
@@ -0,0 +1,123 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: sda_fabric_sites
+short_description: Resource module for Sda Fabricsites
+description:
+- Manage operations create, update and delete of the resource Sda Fabricsites.
+- Adds a fabric site based on user input.
+- Deletes a fabric site based on id.
+- Updates a fabric site based on user input.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module
+author: Rafael Campos (@racampos)
+options:
+ id:
+ description: Id path parameter. ID of the fabric site to be deleted.
+ type: str
+ payload:
+ description: Sda Fabric Sites's payload.
+ elements: dict
+ suboptions:
+ authenticationProfileName:
+ description: Authentication profile used for this fabric.
+ type: str
+ isPubSubEnabled:
+ description: Specifies whether this fabric site will use pub/sub for control
+ nodes.
+ type: bool
+ siteId:
+ description: ID of the network hierarchy.
+ type: str
+ type: list
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for SDA AddFabricSite
+ description: Complete reference of the AddFabricSite API.
+ link: https://developer.cisco.com/docs/dna-center/#!add-fabric-site
+- name: Cisco DNA Center documentation for SDA DeleteFabricSiteById
+ description: Complete reference of the DeleteFabricSiteById API.
+ link: https://developer.cisco.com/docs/dna-center/#!delete-fabric-site-by-id
+- name: Cisco DNA Center documentation for SDA UpdateFabricSite
+ description: Complete reference of the UpdateFabricSite API.
+ link: https://developer.cisco.com/docs/dna-center/#!update-fabric-site
+notes:
+ - SDK Method used are
+ sda.Sda.add_fabric_site,
+ sda.Sda.delete_fabric_site_by_id,
+ sda.Sda.update_fabric_site,
+
+ - Paths used are
+ post /dna/intent/api/v1/sda/fabricSites,
+ delete /dna/intent/api/v1/sda/fabricSites/{id},
+ put /dna/intent/api/v1/sda/fabricSites,
+
+"""
+
+EXAMPLES = r"""
+- name: Create
+ cisco.dnac.sda_fabric_sites:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ state: present
+ payload:
+ - authenticationProfileName: string
+ isPubSubEnabled: true
+ siteId: string
+
+- name: Update all
+ cisco.dnac.sda_fabric_sites:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ state: present
+ payload:
+ - authenticationProfileName: string
+ id: string
+ isPubSubEnabled: true
+ siteId: string
+
+- name: Delete by id
+ cisco.dnac.sda_fabric_sites:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ state: absent
+ id: string
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": {
+ "taskId": "string",
+ "url": "string"
+ },
+ "version": "string"
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_sites_count_info.py b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_sites_count_info.py
new file mode 100644
index 000000000..f010d86e2
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_sites_count_info.py
@@ -0,0 +1,64 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: sda_fabric_sites_count_info
+short_description: Information module for Sda Fabricsites Count
+description:
+- Get all Sda Fabricsites Count.
+- Returns the count of fabric sites that match the provided query parameters.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module_info
+author: Rafael Campos (@racampos)
+options:
+ headers:
+ description: Additional headers.
+ type: dict
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for SDA GetFabricSiteCount
+ description: Complete reference of the GetFabricSiteCount API.
+ link: https://developer.cisco.com/docs/dna-center/#!get-fabric-site-count
+notes:
+ - SDK Method used are
+ sda.Sda.get_fabric_site_count,
+
+ - Paths used are
+ get /dna/intent/api/v1/sda/fabricSites/count,
+
+"""
+
+EXAMPLES = r"""
+- name: Get all Sda Fabricsites Count
+ cisco.dnac.sda_fabric_sites_count_info:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ headers: "{{my_headers | from_json}}"
+ register: result
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": {
+ "count": 0
+ },
+ "version": "string"
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_sites_info.py b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_sites_info.py
new file mode 100644
index 000000000..8d0a821a6
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_sites_info.py
@@ -0,0 +1,89 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: sda_fabric_sites_info
+short_description: Information module for Sda Fabricsites
+description:
+- Get all Sda Fabricsites.
+- Returns a list of fabric sites that match the provided query parameters.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module_info
+author: Rafael Campos (@racampos)
+options:
+ headers:
+ description: Additional headers.
+ type: dict
+ id:
+ description:
+ - Id query parameter. ID of the fabric site to search for in the database.
+ type: str
+ siteId:
+ description:
+ - SiteId query parameter. Get the fabric site associated with this network hierarchy.
+ type: str
+ offset:
+ description:
+ - Offset query parameter. Starting record for pagination.
+ type: int
+ limit:
+ description:
+ - Limit query parameter. Maximum number of records to return.
+ type: int
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for SDA GetFabricSites
+ description: Complete reference of the GetFabricSites API.
+ link: https://developer.cisco.com/docs/dna-center/#!get-fabric-sites
+notes:
+ - SDK Method used are
+ sda.Sda.get_fabric_sites,
+
+ - Paths used are
+ get /dna/intent/api/v1/sda/fabricSites,
+
+"""
+
+EXAMPLES = r"""
+- name: Get all Sda Fabricsites
+ cisco.dnac.sda_fabric_sites_info:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ headers: "{{my_headers | from_json}}"
+ id: string
+ siteId: string
+ offset: 0
+ limit: 0
+ register: result
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": [
+ {
+ "id": "string",
+ "siteId": "string",
+ "authenticationProfileName": "string",
+ "isPubSubEnabled": true
+ }
+ ],
+ "version": "string"
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_zones.py b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_zones.py
new file mode 100644
index 000000000..2f213aa97
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_zones.py
@@ -0,0 +1,120 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: sda_fabric_zones
+short_description: Resource module for Sda Fabriczones
+description:
+- Manage operations create, update and delete of the resource Sda Fabriczones.
+- Adds a fabric zone based on user input.
+- Deletes a fabric zone based on id.
+- Updates a fabric zone based on user input.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module
+author: Rafael Campos (@racampos)
+options:
+ id:
+ description: Id path parameter. ID of the fabric zone to be deleted.
+ type: str
+ payload:
+ description: Sda Fabric Zones's payload.
+ elements: dict
+ suboptions:
+ authenticationProfileName:
+ description: Authentication profile used for this fabric.
+ type: str
+ id:
+ description: ID of the fabric zone (updating this field is not allowed).
+ type: str
+ siteId:
+ description: ID of the network hierarchy (updating this field is not allowed).
+ type: str
+ type: list
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for SDA AddFabricZone
+ description: Complete reference of the AddFabricZone API.
+ link: https://developer.cisco.com/docs/dna-center/#!add-fabric-zone
+- name: Cisco DNA Center documentation for SDA DeleteFabricZoneById
+ description: Complete reference of the DeleteFabricZoneById API.
+ link: https://developer.cisco.com/docs/dna-center/#!delete-fabric-zone-by-id
+- name: Cisco DNA Center documentation for SDA UpdateFabricZone
+ description: Complete reference of the UpdateFabricZone API.
+ link: https://developer.cisco.com/docs/dna-center/#!update-fabric-zone
+notes:
+ - SDK Method used are
+ sda.Sda.add_fabric_zone,
+ sda.Sda.delete_fabric_zone_by_id,
+ sda.Sda.update_fabric_zone,
+
+ - Paths used are
+ post /dna/intent/api/v1/sda/fabricZones,
+ delete /dna/intent/api/v1/sda/fabricZones/{id},
+ put /dna/intent/api/v1/sda/fabricZones,
+
+"""
+
+EXAMPLES = r"""
+- name: Update all
+ cisco.dnac.sda_fabric_zones:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ state: present
+ payload:
+ - authenticationProfileName: string
+ id: string
+ siteId: string
+
+- name: Create
+ cisco.dnac.sda_fabric_zones:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ state: present
+ payload:
+ - authenticationProfileName: string
+ siteId: string
+
+- name: Delete by id
+ cisco.dnac.sda_fabric_zones:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ state: absent
+ id: string
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": {
+ "taskId": "string",
+ "url": "string"
+ },
+ "version": "string"
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_zones_count_info.py b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_zones_count_info.py
new file mode 100644
index 000000000..a22932e28
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_zones_count_info.py
@@ -0,0 +1,64 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: sda_fabric_zones_count_info
+short_description: Information module for Sda Fabriczones Count
+description:
+- Get all Sda Fabriczones Count.
+- Returns the count of fabric zones that match the provided query parameters.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module_info
+author: Rafael Campos (@racampos)
+options:
+ headers:
+ description: Additional headers.
+ type: dict
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for SDA GetFabricZoneCount
+ description: Complete reference of the GetFabricZoneCount API.
+ link: https://developer.cisco.com/docs/dna-center/#!get-fabric-zone-count
+notes:
+ - SDK Method used are
+ sda.Sda.get_fabric_zone_count,
+
+ - Paths used are
+ get /dna/intent/api/v1/sda/fabricZones/count,
+
+"""
+
+EXAMPLES = r"""
+- name: Get all Sda Fabriczones Count
+ cisco.dnac.sda_fabric_zones_count_info:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ headers: "{{my_headers | from_json}}"
+ register: result
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": {
+ "count": 0
+ },
+ "version": "string"
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_zones_info.py b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_zones_info.py
new file mode 100644
index 000000000..42c4f421b
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_fabric_zones_info.py
@@ -0,0 +1,88 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: sda_fabric_zones_info
+short_description: Information module for Sda Fabriczones
+description:
+- Get all Sda Fabriczones.
+- Returns a list of fabric zones that match the provided query parameters.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module_info
+author: Rafael Campos (@racampos)
+options:
+ headers:
+ description: Additional headers.
+ type: dict
+ id:
+ description:
+ - Id query parameter. ID of the fabric zone to search for in the database.
+ type: str
+ siteId:
+ description:
+ - SiteId query parameter. Get the fabric zone associated with this network hierarchy.
+ type: str
+ offset:
+ description:
+ - Offset query parameter. Starting record for pagination.
+ type: int
+ limit:
+ description:
+ - Limit query parameter. Maximum number of records to return.
+ type: int
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for SDA GetFabricZones
+ description: Complete reference of the GetFabricZones API.
+ link: https://developer.cisco.com/docs/dna-center/#!get-fabric-zones
+notes:
+ - SDK Method used are
+ sda.Sda.get_fabric_zones,
+
+ - Paths used are
+ get /dna/intent/api/v1/sda/fabricZones,
+
+"""
+
+EXAMPLES = r"""
+- name: Get all Sda Fabriczones
+ cisco.dnac.sda_fabric_zones_info:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ headers: "{{my_headers | from_json}}"
+ id: string
+ siteId: string
+ offset: 0
+ limit: 0
+ register: result
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": [
+ {
+ "id": "string",
+ "siteId": "string",
+ "authenticationProfileName": "string"
+ }
+ ],
+ "version": "string"
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_multicast.py b/ansible_collections/cisco/dnac/plugins/modules/sda_multicast.py
index 6e1045c5d..b8baa6b96 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/sda_multicast.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_multicast.py
@@ -56,8 +56,8 @@ options:
description: Full path of sda Fabric Site.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for SDA AddMulticastInSDAFabric
description: Complete reference of the AddMulticastInSDAFabric API.
@@ -113,7 +113,6 @@ EXAMPLES = r"""
siteNameHierarchy: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_multicast_info.py b/ansible_collections/cisco/dnac/plugins/modules/sda_multicast_info.py
index 2f4ae1218..702335f2e 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/sda_multicast_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_multicast_info.py
@@ -24,8 +24,8 @@ options:
- SiteNameHierarchy query parameter. Fabric site name hierarchy.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for SDA GetMulticastDetailsFromSDAFabric
description: Complete reference of the GetMulticastDetailsFromSDAFabric API.
@@ -54,7 +54,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_port_assignment_for_access_point.py b/ansible_collections/cisco/dnac/plugins/modules/sda_port_assignment_for_access_point.py
index c8280407d..d75762405 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/sda_port_assignment_for_access_point.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_port_assignment_for_access_point.py
@@ -41,8 +41,8 @@ options:
type: str
version_added: 4.0.0
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for SDA AddPortAssignmentForAccessPointInSDAFabric
description: Complete reference of the AddPortAssignmentForAccessPointInSDAFabric API.
@@ -93,7 +93,6 @@ EXAMPLES = r"""
interfaceName: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_port_assignment_for_access_point_info.py b/ansible_collections/cisco/dnac/plugins/modules/sda_port_assignment_for_access_point_info.py
index 3cdb7cd3a..140e4ac75 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/sda_port_assignment_for_access_point_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_port_assignment_for_access_point_info.py
@@ -29,8 +29,8 @@ options:
- InterfaceName query parameter.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for SDA GetPortAssignmentForAccessPointInSDAFabric
description: Complete reference of the GetPortAssignmentForAccessPointInSDAFabric API.
@@ -60,7 +60,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_port_assignment_for_user_device.py b/ansible_collections/cisco/dnac/plugins/modules/sda_port_assignment_for_user_device.py
index 58664b8e2..cb609b216 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/sda_port_assignment_for_user_device.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_port_assignment_for_user_device.py
@@ -54,8 +54,8 @@ options:
type: str
version_added: 4.0.0
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for SDA AddPortAssignmentForUserDeviceInSDAFabric
description: Complete reference of the AddPortAssignmentForUserDeviceInSDAFabric API.
@@ -110,7 +110,6 @@ EXAMPLES = r"""
voiceIpAddressPoolName: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_port_assignment_for_user_device_info.py b/ansible_collections/cisco/dnac/plugins/modules/sda_port_assignment_for_user_device_info.py
index 2fdbee046..2b0152bdd 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/sda_port_assignment_for_user_device_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_port_assignment_for_user_device_info.py
@@ -28,8 +28,8 @@ options:
- InterfaceName query parameter.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for SDA GetPortAssignmentForUserDeviceInSDAFabric
description: Complete reference of the GetPortAssignmentForUserDeviceInSDAFabric API.
@@ -59,7 +59,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_port_assignments.py b/ansible_collections/cisco/dnac/plugins/modules/sda_port_assignments.py
new file mode 100644
index 000000000..4bae27b70
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_port_assignments.py
@@ -0,0 +1,188 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: sda_port_assignments
+short_description: Resource module for Sda Portassignments
+description:
+- Manage operations create, update and delete of the resource Sda Portassignments.
+- Adds port assignments based on user input.
+- Deletes a port assignment based on id.
+- Deletes port assignments based on user input.
+- Updates port assignments based on user input.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module
+author: Rafael Campos (@racampos)
+options:
+ dataVlanName:
+ description: DataVlanName query parameter. Data VLAN name of the port assignment.
+ type: str
+ fabricId:
+ description: FabricId query parameter. ID of the fabric this device belongs to.
+ type: str
+ id:
+ description: Id path parameter. ID of the port assignment.
+ type: str
+ interfaceName:
+ description: InterfaceName query parameter. Interface name of the port assignment.
+ type: str
+ networkDeviceId:
+ description: NetworkDeviceId query parameter. Network device ID of the port assignment.
+ type: str
+ payload:
+ description: Sda Port Assignments's payload.
+ elements: dict
+ suboptions:
+ authenticateTemplateName:
+ description: Authenticate template name of the port assignment.
+ type: str
+ connectedDeviceType:
+ description: Connected device type of the port assignment.
+ type: str
+ dataVlanName:
+ description: Data VLAN name of the port assignment.
+ type: str
+ fabricId:
+ description: ID of the fabric this device belongs to.
+ type: str
+ interfaceDescription:
+ description: Interface description of the port assignment.
+ type: str
+ interfaceName:
+ description: Interface name of the port assignment.
+ type: str
+ networkDeviceId:
+ description: Network device ID of the port assignment.
+ type: str
+ scalableGroupName:
+ description: Scalable group name of the port assignment.
+ type: str
+ voiceVlanName:
+ description: Voice VLAN name of the port assignment.
+ type: str
+ type: list
+ voiceVlanName:
+ description: VoiceVlanName query parameter. Voice VLAN name of the port assignment.
+ type: str
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for SDA AddPortAssignments
+ description: Complete reference of the AddPortAssignments API.
+ link: https://developer.cisco.com/docs/dna-center/#!add-port-assignments
+- name: Cisco DNA Center documentation for SDA DeletePortAssignmentById
+ description: Complete reference of the DeletePortAssignmentById API.
+ link: https://developer.cisco.com/docs/dna-center/#!delete-port-assignment-by-id
+- name: Cisco DNA Center documentation for SDA DeletePortAssignments
+ description: Complete reference of the DeletePortAssignments API.
+ link: https://developer.cisco.com/docs/dna-center/#!delete-port-assignments
+- name: Cisco DNA Center documentation for SDA UpdatePortAssignments
+ description: Complete reference of the UpdatePortAssignments API.
+ link: https://developer.cisco.com/docs/dna-center/#!update-port-assignments
+notes:
+ - SDK Method used are
+ sda.Sda.add_port_assignments,
+ sda.Sda.delete_port_assignment_by_id,
+ sda.Sda.update_port_assignments,
+
+ - Paths used are
+ post /dna/intent/api/v1/sda/portAssignments,
+ delete /dna/intent/api/v1/sda/portAssignments,
+ delete /dna/intent/api/v1/sda/portAssignments/{id},
+ put /dna/intent/api/v1/sda/portAssignments,
+
+"""
+
+EXAMPLES = r"""
+- name: Create
+ cisco.dnac.sda_port_assignments:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ state: present
+ payload:
+ - authenticateTemplateName: string
+ connectedDeviceType: string
+ dataVlanName: string
+ fabricId: string
+ interfaceDescription: string
+ interfaceName: string
+ networkDeviceId: string
+ scalableGroupName: string
+ voiceVlanName: string
+
+- name: Update all
+ cisco.dnac.sda_port_assignments:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ state: present
+ payload:
+ - authenticateTemplateName: string
+ connectedDeviceType: string
+ dataVlanName: string
+ fabricId: string
+ id: string
+ interfaceDescription: string
+ interfaceName: string
+ networkDeviceId: string
+ scalableGroupName: string
+ voiceVlanName: string
+
+- name: Delete all
+ cisco.dnac.sda_port_assignments:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ state: absent
+ dataVlanName: string
+ fabricId: string
+ interfaceName: string
+ networkDeviceId: string
+ voiceVlanName: string
+
+- name: Delete by id
+ cisco.dnac.sda_port_assignments:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ state: absent
+ id: string
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": {
+ "taskId": "string",
+ "url": "string"
+ },
+ "version": "string"
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_port_assignments_count_info.py b/ansible_collections/cisco/dnac/plugins/modules/sda_port_assignments_count_info.py
new file mode 100644
index 000000000..b06cc0ac2
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_port_assignments_count_info.py
@@ -0,0 +1,89 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: sda_port_assignments_count_info
+short_description: Information module for Sda Portassignments Count
+description:
+- Get all Sda Portassignments Count.
+- Returns the count of port assignments that match the provided query parameters.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module_info
+author: Rafael Campos (@racampos)
+options:
+ headers:
+ description: Additional headers.
+ type: dict
+ fabricId:
+ description:
+ - FabricId query parameter. ID of the fabric this device belongs to.
+ type: str
+ networkDeviceId:
+ description:
+ - NetworkDeviceId query parameter. Network device ID of the port assignment.
+ type: str
+ interfaceName:
+ description:
+ - InterfaceName query parameter. Interface name of the port assignment.
+ type: str
+ dataVlanName:
+ description:
+ - DataVlanName query parameter. Data VLAN name of the port assignment.
+ type: str
+ voiceVlanName:
+ description:
+ - VoiceVlanName query parameter. Voice VLAN name of the port assignment.
+ type: str
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for SDA GetPortAssignmentCount
+ description: Complete reference of the GetPortAssignmentCount API.
+ link: https://developer.cisco.com/docs/dna-center/#!get-port-assignment-count
+notes:
+ - SDK Method used are
+ sda.Sda.get_port_assignment_count,
+
+ - Paths used are
+ get /dna/intent/api/v1/sda/portAssignments/count,
+
+"""
+
+EXAMPLES = r"""
+- name: Get all Sda Portassignments Count
+ cisco.dnac.sda_port_assignments_count_info:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ headers: "{{my_headers | from_json}}"
+ fabricId: string
+ networkDeviceId: string
+ interfaceName: string
+ dataVlanName: string
+ voiceVlanName: string
+ register: result
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": {
+ "count": 0
+ },
+ "version": "string"
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_port_assignments_info.py b/ansible_collections/cisco/dnac/plugins/modules/sda_port_assignments_info.py
new file mode 100644
index 000000000..a69df71ae
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_port_assignments_info.py
@@ -0,0 +1,110 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: sda_port_assignments_info
+short_description: Information module for Sda Portassignments
+description:
+- Get all Sda Portassignments.
+- Returns a list of port assignments that match the provided query parameters.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module_info
+author: Rafael Campos (@racampos)
+options:
+ headers:
+ description: Additional headers.
+ type: dict
+ fabricId:
+ description:
+ - FabricId query parameter. ID of the fabric this device belongs to.
+ type: str
+ networkDeviceId:
+ description:
+ - NetworkDeviceId query parameter. Network device ID of the port assignment.
+ type: str
+ interfaceName:
+ description:
+ - InterfaceName query parameter. Interface Name of the port assignment.
+ type: str
+ dataVlanName:
+ description:
+ - DataVlanName query parameter. Data VLAN name of the port assignment.
+ type: str
+ voiceVlanName:
+ description:
+ - VoiceVlanName query parameter. Voice VLAN name of the port assignment.
+ type: str
+ offset:
+ description:
+ - Offset query parameter. Starting record for pagination.
+ type: float
+ limit:
+ description:
+ - Limit query parameter. Maximum number of records to return.
+ type: float
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for SDA GetPortAssignments
+ description: Complete reference of the GetPortAssignments API.
+ link: https://developer.cisco.com/docs/dna-center/#!get-port-assignments
+notes:
+ - SDK Method used are
+ sda.Sda.get_port_assignments,
+
+ - Paths used are
+ get /dna/intent/api/v1/sda/portAssignments,
+
+"""
+
+EXAMPLES = r"""
+- name: Get all Sda Portassignments
+ cisco.dnac.sda_port_assignments_info:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ headers: "{{my_headers | from_json}}"
+ fabricId: string
+ networkDeviceId: string
+ interfaceName: string
+ dataVlanName: string
+ voiceVlanName: string
+ offset: 0
+ limit: 0
+ register: result
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": [
+ {
+ "id": "string",
+ "fabricId": "string",
+ "networkDeviceId": "string",
+ "interfaceName": "string",
+ "connectedDeviceType": "string",
+ "dataVlanName": "string",
+ "voiceVlanName": "string",
+ "authenticateTemplateName": "string",
+ "scalableGroupName": "string",
+ "interfaceDescription": "string"
+ }
+ ],
+ "version": "string"
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_provision_device.py b/ansible_collections/cisco/dnac/plugins/modules/sda_provision_device.py
index 5940634b8..6bf8e028c 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/sda_provision_device.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_provision_device.py
@@ -26,8 +26,8 @@ options:
description: SiteNameHierarchy of the provisioned device.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for SDA ProvisionWiredDevice
description: Complete reference of the ProvisionWiredDevice API.
@@ -91,7 +91,6 @@ EXAMPLES = r"""
siteNameHierarchy: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_provision_device_info.py b/ansible_collections/cisco/dnac/plugins/modules/sda_provision_device_info.py
index ae78549ec..b12f10545 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/sda_provision_device_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_provision_device_info.py
@@ -24,8 +24,8 @@ options:
- DeviceManagementIpAddress query parameter.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for SDA GetProvisionedWiredDevice
description: Complete reference of the GetProvisionedWiredDevice API.
@@ -54,7 +54,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_provision_devices.py b/ansible_collections/cisco/dnac/plugins/modules/sda_provision_devices.py
new file mode 100644
index 000000000..aafc283c8
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_provision_devices.py
@@ -0,0 +1,141 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: sda_provision_devices
+short_description: Resource module for Sda Provisiondevices
+description:
+- Manage operations create, update and delete of the resource Sda Provisiondevices.
+- Provisions network devices to respective Sites based on user input.
+- Delete provisioned devices based on query parameters.
+- Deletes provisioned device based on Id.
+- Re-provisions network devices to the site based on the user input.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module
+author: Rafael Campos (@racampos)
+options:
+ id:
+ description: Id path parameter. ID of provisioned device.
+ type: str
+ networkDeviceId:
+ description: NetworkDeviceId query parameter. ID of the network device.
+ type: str
+ payload:
+ description: Sda Provision Devices's payload.
+ elements: dict
+ suboptions:
+ networkDeviceId:
+ description: ID of network device to be provisioned.
+ type: str
+ siteId:
+ description: ID of the site this network device needs to be provisioned.
+ type: str
+ type: list
+ siteId:
+ description: SiteId query parameter. ID of the site hierarchy.
+ type: str
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for SDA ProvisionDevices
+ description: Complete reference of the ProvisionDevices API.
+ link: https://developer.cisco.com/docs/dna-center/#!provision-devices
+- name: Cisco DNA Center documentation for SDA DeleteProvisionedDeviceById
+ description: Complete reference of the DeleteProvisionedDeviceById API.
+ link: https://developer.cisco.com/docs/dna-center/#!delete-provisioned-device-by-id
+- name: Cisco DNA Center documentation for SDA DeleteProvisionedDevices
+ description: Complete reference of the DeleteProvisionedDevices API.
+ link: https://developer.cisco.com/docs/dna-center/#!delete-provisioned-devices
+- name: Cisco DNA Center documentation for SDA ReProvisionDevices
+ description: Complete reference of the ReProvisionDevices API.
+ link: https://developer.cisco.com/docs/dna-center/#!re-provision-devices
+notes:
+ - SDK Method used are
+ sda.Sda.delete_provisioned_device_by_id,
+ sda.Sda.provision_devices,
+ sda.Sda.re_provision_devices,
+
+ - Paths used are
+ post /dna/intent/api/v1/sda/provisionDevices,
+ delete /dna/intent/api/v1/sda/provisionDevices,
+ delete /dna/intent/api/v1/sda/provisionDevices/{id},
+ put /dna/intent/api/v1/sda/provisionDevices,
+
+"""
+
+EXAMPLES = r"""
+- name: Delete all
+ cisco.dnac.sda_provision_devices:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ state: absent
+ networkDeviceId: string
+ siteId: string
+
+- name: Create
+ cisco.dnac.sda_provision_devices:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ state: present
+ payload:
+ - networkDeviceId: string
+ siteId: string
+
+- name: Update all
+ cisco.dnac.sda_provision_devices:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ state: present
+ payload:
+ - id: string
+ networkDeviceId: string
+ siteId: string
+
+- name: Delete by id
+ cisco.dnac.sda_provision_devices:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ state: absent
+ id: string
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": {
+ "taskId": "string",
+ "url": "string"
+ },
+ "version": "string"
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_provision_devices_count_info.py b/ansible_collections/cisco/dnac/plugins/modules/sda_provision_devices_count_info.py
new file mode 100644
index 000000000..f42d55be3
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_provision_devices_count_info.py
@@ -0,0 +1,69 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: sda_provision_devices_count_info
+short_description: Information module for Sda Provisiondevices Count
+description:
+- Get all Sda Provisiondevices Count.
+- Returns the count of provisioned devices based on query parameters.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module_info
+author: Rafael Campos (@racampos)
+options:
+ headers:
+ description: Additional headers.
+ type: dict
+ siteId:
+ description:
+ - SiteId query parameter. ID of the site hierarchy.
+ type: str
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for SDA GetProvisionedDevicesCount
+ description: Complete reference of the GetProvisionedDevicesCount API.
+ link: https://developer.cisco.com/docs/dna-center/#!get-provisioned-devices-count
+notes:
+ - SDK Method used are
+ sda.Sda.get_provisioned_devices_count,
+
+ - Paths used are
+ get /dna/intent/api/v1/sda/provisionDevices/count,
+
+"""
+
+EXAMPLES = r"""
+- name: Get all Sda Provisiondevices Count
+ cisco.dnac.sda_provision_devices_count_info:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ headers: "{{my_headers | from_json}}"
+ siteId: string
+ register: result
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": {
+ "count": 0
+ },
+ "version": "string"
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_provision_devices_info.py b/ansible_collections/cisco/dnac/plugins/modules/sda_provision_devices_info.py
new file mode 100644
index 000000000..ff14527a6
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_provision_devices_info.py
@@ -0,0 +1,93 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: sda_provision_devices_info
+short_description: Information module for Sda Provisiondevices
+description:
+- Get all Sda Provisiondevices.
+- Returns the list of provisioned devices based on query parameters.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module_info
+author: Rafael Campos (@racampos)
+options:
+ headers:
+ description: Additional headers.
+ type: dict
+ id:
+ description:
+ - Id query parameter. ID of the provisioned device.
+ type: str
+ networkDeviceId:
+ description:
+ - NetworkDeviceId query parameter. ID of the network device.
+ type: str
+ siteId:
+ description:
+ - SiteId query parameter. ID of the site hierarchy.
+ type: str
+ offset:
+ description:
+ - Offset query parameter. Starting record for pagination.
+ type: float
+ limit:
+ description:
+ - Limit query parameter. Maximum number of devices to return.
+ type: float
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for SDA GetProvisionedDevices
+ description: Complete reference of the GetProvisionedDevices API.
+ link: https://developer.cisco.com/docs/dna-center/#!get-provisioned-devices
+notes:
+ - SDK Method used are
+ sda.Sda.get_provisioned_devices,
+
+ - Paths used are
+ get /dna/intent/api/v1/sda/provisionDevices,
+
+"""
+
+EXAMPLES = r"""
+- name: Get all Sda Provisiondevices
+ cisco.dnac.sda_provision_devices_info:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ headers: "{{my_headers | from_json}}"
+ id: string
+ networkDeviceId: string
+ siteId: string
+ offset: 0
+ limit: 0
+ register: result
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": [
+ {
+ "id": "string",
+ "siteId": "string",
+ "networkDeviceId": "string"
+ }
+ ],
+ "version": "string"
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_site_member_member_info.py b/ansible_collections/cisco/dnac/plugins/modules/sda_site_member_member_info.py
new file mode 100644
index 000000000..51d8afdbe
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_site_member_member_info.py
@@ -0,0 +1,141 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: sda_site_member_member_info
+short_description: Information module for Sda Site Member Member
+description:
+- Get all Sda Site Member Member.
+- API to get devices that are assigned to a site.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module_info
+author: Rafael Campos (@racampos)
+options:
+ headers:
+ description: Additional headers.
+ type: dict
+ id:
+ description:
+ - Id path parameter. Site Id.
+ type: str
+ offset:
+ description:
+ - Offset query parameter. Offset/starting index for pagination.
+ type: str
+ limit:
+ description:
+ - Limit query parameter. Number of devices to be listed. Default and max supported value is 500.
+ type: str
+ memberType:
+ description:
+ - MemberType query parameter. Member type (This API only supports the 'networkdevice' type).
+ type: str
+ level:
+ description:
+ - >
+ Level query parameter. Depth of site hierarchy to be considered to list the devices. If the provided value
+ is -1, devices for all child sites will be listed.
+ type: str
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for Sites GetDevicesThatAreAssignedToASite
+ description: Complete reference of the GetDevicesThatAreAssignedToASite API.
+ link: https://developer.cisco.com/docs/dna-center/#!get-devices-that-are-assigned-to-a-site
+notes:
+ - SDK Method used are
+ sites.Sites.get_devices_that_are_assigned_to_a_site,
+
+ - Paths used are
+ get /dna/intent/api/v1/site-member/{id}/member,
+
+"""
+
+EXAMPLES = r"""
+- name: Get all Sda Site Member Member
+ cisco.dnac.sda_site_member_member_info:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ headers: "{{my_headers | from_json}}"
+ offset: string
+ limit: string
+ memberType: string
+ level: string
+ id: string
+ register: result
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: list
+ elements: dict
+ sample: >
+ [
+ {
+ "instanceUuid": "string",
+ "instanceId": 0,
+ "authEntityId": 0,
+ "authEntityClass": 0,
+ "instanceTenantId": "string",
+ "deployPending": "string",
+ "instanceVersion": 0,
+ "apManagerInterfaceIp": "string",
+ "associatedWlcIp": "string",
+ "bootDateTime": "string",
+ "collectionInterval": "string",
+ "collectionIntervalValue": "string",
+ "collectionStatus": "string",
+ "description": "string",
+ "deviceSupportLevel": "string",
+ "dnsResolvedManagementAddress": "string",
+ "family": "string",
+ "hostname": "string",
+ "interfaceCount": "string",
+ "inventoryStatusDetail": "string",
+ "lastUpdateTime": 0,
+ "lastUpdated": "string",
+ "lineCardCount": "string",
+ "lineCardId": "string",
+ "lastDeviceResyncStartTime": "string",
+ "macAddress": "string",
+ "managedAtleastOnce": true,
+ "managementIpAddress": "string",
+ "managementState": "string",
+ "memorySize": "string",
+ "paddedMgmtIpAddress": "string",
+ "pendingSyncRequestsCount": "string",
+ "platformId": "string",
+ "reachabilityFailureReason": "string",
+ "reachabilityStatus": "string",
+ "reasonsForDeviceResync": "string",
+ "reasonsForPendingSyncRequests": "string",
+ "role": "string",
+ "roleSource": "string",
+ "serialNumber": "string",
+ "series": "string",
+ "snmpContact": "string",
+ "snmpLocation": "string",
+ "softwareType": "string",
+ "softwareVersion": "string",
+ "tagCount": "string",
+ "type": "string",
+ "upTime": "string",
+ "uptimeSeconds": 0,
+ "vendor": "string",
+ "displayName": "string"
+ }
+ ]
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_virtual_network.py b/ansible_collections/cisco/dnac/plugins/modules/sda_virtual_network.py
index 09d01e5ed..485378682 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/sda_virtual_network.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_virtual_network.py
@@ -24,8 +24,8 @@ options:
description: VirtualNetworkName query parameter.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for SDA AddVNInFabric
description: Complete reference of the AddVNInFabric API.
@@ -72,7 +72,6 @@ EXAMPLES = r"""
virtualNetworkName: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_virtual_network_info.py b/ansible_collections/cisco/dnac/plugins/modules/sda_virtual_network_info.py
index 560c9179f..83de24bb1 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/sda_virtual_network_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_virtual_network_info.py
@@ -28,8 +28,8 @@ options:
- SiteNameHierarchy query parameter.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for SDA GetVNFromSDAFabric
description: Complete reference of the GetVNFromSDAFabric API.
@@ -59,7 +59,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -70,9 +69,12 @@ dnac_response:
"siteNameHierarchy": "string",
"virtualNetworkName": "string",
"fabricName": "string",
- "isInfraVN": "string",
- "isDefaultVN": "string",
+ "isInfraVN": true,
+ "isDefaultVN": true,
+ "virtualNetworkContextId": "string",
+ "virtualNetworkId": "string",
"status": "string",
- "description": "string"
+ "description": "string",
+ "executionId": "string"
}
"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_virtual_network_ip_pool.py b/ansible_collections/cisco/dnac/plugins/modules/sda_virtual_network_ip_pool.py
index 71bdf744f..373983078 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/sda_virtual_network_ip_pool.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_virtual_network_ip_pool.py
@@ -83,8 +83,8 @@ options:
type: str
version_added: 4.0.0
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for SDA AddIPPoolInSDAVirtualNetwork
description: Complete reference of the AddIPPoolInSDAVirtualNetwork API.
@@ -146,7 +146,6 @@ EXAMPLES = r"""
vlanName: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_virtual_network_ip_pool_info.py b/ansible_collections/cisco/dnac/plugins/modules/sda_virtual_network_ip_pool_info.py
index 35de542c6..48b6c8718 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/sda_virtual_network_ip_pool_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_virtual_network_ip_pool_info.py
@@ -31,11 +31,13 @@ options:
ipPoolName:
version_added: "4.0.0"
description:
- - IpPoolName query parameter.
+ - >
+ IpPoolName query parameter. IpPoolName. Note Use vlanName as a value for this parameter if same ip pool is
+ assigned to multiple virtual networks (e.g.. IpPoolName=vlan1021).
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for SDA GetIPPoolFromSDAVirtualNetwork
description: Complete reference of the GetIPPoolFromSDAVirtualNetwork API.
@@ -66,7 +68,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_virtual_network_v2.py b/ansible_collections/cisco/dnac/plugins/modules/sda_virtual_network_v2.py
index 27bffd8d9..54b6374c7 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/sda_virtual_network_v2.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_virtual_network_v2.py
@@ -32,8 +32,8 @@ options:
description: Virtual Network Name to be assigned at global level.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for SDA AddVirtualNetworkWithScalableGroups
description: Complete reference of the AddVirtualNetworkWithScalableGroups API.
@@ -103,7 +103,6 @@ EXAMPLES = r"""
virtualNetworkName: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sda_virtual_network_v2_info.py b/ansible_collections/cisco/dnac/plugins/modules/sda_virtual_network_v2_info.py
index fc73454d1..3f6adbb10 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/sda_virtual_network_v2_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/sda_virtual_network_v2_info.py
@@ -24,8 +24,8 @@ options:
- VirtualNetworkName query parameter.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for SDA GetVirtualNetworkWithScalableGroups
description: Complete reference of the GetVirtualNetworkWithScalableGroups API.
@@ -54,7 +54,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -68,7 +67,9 @@ dnac_response:
"string"
],
"vManageVpnId": "string",
+ "virtualNetworkContextId": "string",
"status": "string",
- "description": "string"
+ "description": "string",
+ "executionId": "string"
}
"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/security_advisories_devices_info.py b/ansible_collections/cisco/dnac/plugins/modules/security_advisories_devices_info.py
index 977fe48cd..71f829292 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/security_advisories_devices_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/security_advisories_devices_info.py
@@ -24,8 +24,8 @@ options:
- AdvisoryId path parameter. Advisory ID.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Security Advisories GetDevicesPerAdvisory
description: Complete reference of the GetDevicesPerAdvisory API.
@@ -54,7 +54,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/security_advisories_ids_per_device_info.py b/ansible_collections/cisco/dnac/plugins/modules/security_advisories_ids_per_device_info.py
index 47f2db396..596485730 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/security_advisories_ids_per_device_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/security_advisories_ids_per_device_info.py
@@ -10,7 +10,7 @@ module: security_advisories_ids_per_device_info
short_description: Information module for Security Advisories Ids Per Device
description:
- Get Security Advisories Ids Per Device by id.
-- Retrieves list of advisory IDs for a device.
+- Retrieves advisory device details for a device.
version_added: '3.1.0'
extends_documentation_fragment:
- cisco.dnac.module_info
@@ -24,15 +24,15 @@ options:
- DeviceId path parameter. Device instance UUID.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
-- name: Cisco DNA Center documentation for Security Advisories GetAdvisoryIDsPerDevice
- description: Complete reference of the GetAdvisoryIDsPerDevice API.
- link: https://developer.cisco.com/docs/dna-center/#!get-advisory-i-ds-per-device
+- name: Cisco DNA Center documentation for Security Advisories GetAdvisoryDeviceDetail
+ description: Complete reference of the GetAdvisoryDeviceDetail API.
+ link: https://developer.cisco.com/docs/dna-center/#!get-advisory-device-detail
notes:
- SDK Method used are
- security_advisories.SecurityAdvisories.get_advisory_ids_per_device,
+ security_advisories.SecurityAdvisories.get_advisory_device_detail,
- Paths used are
get /dna/intent/api/v1/security-advisory/device/{deviceId},
@@ -54,7 +54,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -62,14 +61,17 @@ dnac_response:
type: dict
sample: >
{
- "response": [
- {
- "deviceId": "string",
- "advisoryIds": [
- "string"
- ]
- }
- ],
+ "response": {
+ "deviceId": "string",
+ "advisoryIds": [
+ "string"
+ ],
+ "hiddenAdvisoryCount": 0,
+ "scanMode": "string",
+ "scanStatus": "string",
+ "comments": "string",
+ "lastScanTime": 0
+ },
"version": "string"
}
"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/security_advisories_info.py b/ansible_collections/cisco/dnac/plugins/modules/security_advisories_info.py
index a2d860316..153822113 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/security_advisories_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/security_advisories_info.py
@@ -20,8 +20,8 @@ options:
description: Additional headers.
type: dict
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Security Advisories GetAdvisoriesList
description: Complete reference of the GetAdvisoriesList API.
@@ -49,7 +49,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -57,21 +56,20 @@ dnac_response:
type: dict
sample: >
{
- "response": [
- {
- "advisoryId": "string",
- "deviceCount": 0,
- "hiddenDeviceCount": 0,
- "cves": [
- "string"
- ],
- "publicationUrl": "string",
- "sir": "string",
- "detectionType": "string",
- "defaultDetectionType": "string",
- "defaultConfigMatchPattern": "string"
- }
- ],
+ "response": {
+ "advisoryId": "string",
+ "deviceCount": 0,
+ "hiddenDeviceCount": 0,
+ "cves": [
+ "string"
+ ],
+ "publicationUrl": "string",
+ "sir": "string",
+ "detectionType": "string",
+ "defaultDetectionType": "string",
+ "defaultConfigMatchPattern": "string",
+ "fixedVersions": {}
+ },
"version": "string"
}
"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/security_advisories_per_device_info.py b/ansible_collections/cisco/dnac/plugins/modules/security_advisories_per_device_info.py
index 486bb4654..72cbdacbc 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/security_advisories_per_device_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/security_advisories_per_device_info.py
@@ -24,8 +24,8 @@ options:
- DeviceId path parameter. Device instance UUID.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Security Advisories GetAdvisoriesPerDevice
description: Complete reference of the GetAdvisoriesPerDevice API.
@@ -54,7 +54,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -62,21 +61,20 @@ dnac_response:
type: dict
sample: >
{
- "response": [
- {
- "advisoryId": "string",
- "deviceCount": 0,
- "hiddenDeviceCount": 0,
- "cves": [
- "string"
- ],
- "publicationUrl": "string",
- "sir": "string",
- "detectionType": "string",
- "defaultDetectionType": "string",
- "defaultConfigMatchPattern": "string"
- }
- ],
+ "response": {
+ "advisoryId": "string",
+ "deviceCount": 0,
+ "hiddenDeviceCount": 0,
+ "cves": [
+ "string"
+ ],
+ "publicationUrl": "string",
+ "sir": "string",
+ "detectionType": "string",
+ "defaultDetectionType": "string",
+ "defaultConfigMatchPattern": "string",
+ "fixedVersions": {}
+ },
"version": "string"
}
"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/security_advisories_summary_info.py b/ansible_collections/cisco/dnac/plugins/modules/security_advisories_summary_info.py
index 88440aceb..5fcd6cfd6 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/security_advisories_summary_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/security_advisories_summary_info.py
@@ -20,8 +20,8 @@ options:
description: Additional headers.
type: dict
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Security Advisories GetAdvisoriesSummary
description: Complete reference of the GetAdvisoriesSummary API.
@@ -49,7 +49,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -58,33 +57,39 @@ dnac_response:
sample: >
{
"response": {
- "NA": {
- "CONFIG": 0,
- "VERSION": 0,
- "TOTAL": 0
- },
"INFORMATIONAL": {
"CONFIG": 0,
+ "CUSTOM_CONFIG": 0,
"VERSION": 0,
"TOTAL": 0
},
"LOW": {
"CONFIG": 0,
+ "CUSTOM_CONFIG": 0,
"VERSION": 0,
"TOTAL": 0
},
"MEDIUM": {
"CONFIG": 0,
+ "CUSTOM_CONFIG": 0,
"VERSION": 0,
"TOTAL": 0
},
"HIGH": {
"CONFIG": 0,
+ "CUSTOM_CONFIG": 0,
"VERSION": 0,
"TOTAL": 0
},
"CRITICAL": {
"CONFIG": 0,
+ "CUSTOM_CONFIG": 0,
+ "VERSION": 0,
+ "TOTAL": 0
+ },
+ "NA": {
+ "CONFIG": 0,
+ "CUSTOM_CONFIG": 0,
"VERSION": 0,
"TOTAL": 0
}
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sensor.py b/ansible_collections/cisco/dnac/plugins/modules/sensor.py
index c98887c45..c447c541a 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/sensor.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/sensor.py
@@ -22,73 +22,578 @@ options:
elements: dict
suboptions:
bands:
- description: Bands.
+ description: The WIFI bands.
type: str
numberOfApsToTest:
- description: Number Of Aps To Test.
- type: str
+ description: Number of APs to test.
+ type: int
rssiThreshold:
- description: Rssi Threshold.
- type: str
+ description: RSSI threshold.
+ type: int
type: list
connection:
- description: Connection.
+ description: Connection type of test WIRED, WIRELESS, BOTH.
+ type: str
+ encryptionMode:
+ description: Encryption mode.
type: str
+ locationInfoList:
+ description: Sensor's locationInfoList.
+ elements: dict
+ suboptions:
+ allSensors:
+ description: Use all sensors in the site for test.
+ type: bool
+ customManagementVlan:
+ description: Custom Management VLAN.
+ type: bool
+ locationId:
+ description: Site UUID.
+ type: str
+ locationType:
+ description: Site type.
+ type: str
+ macAddressList:
+ description: MAC addresses.
+ elements: str
+ type: list
+ managementVlan:
+ description: Management VLAN.
+ type: str
+ siteHierarchy:
+ description: Site name hierarhy.
+ type: str
+ type: list
modelVersion:
- description: Model Version.
+ description: Test template object model version (must be 2).
type: int
name:
- description: Name.
+ description: The sensor test template name.
+ type: str
+ profiles:
+ description: Sensor's profiles.
+ elements: dict
+ suboptions:
+ authProtocol:
+ description: Auth protocol.
+ type: str
+ authType:
+ description: Authentication type OPEN, WPA2_PSK, WPA2_EaP, WEB_AUTH, MAB, DOT1X,
+ OTHER.
+ type: str
+ certdownloadurl:
+ description: Certificate download URL.
+ type: str
+ certfilename:
+ description: Auth certificate file name.
+ type: str
+ certpassphrase:
+ description: Certificate password phrase.
+ type: str
+ certstatus:
+ description: Certificate status INACTIVE or ACTIVE.
+ type: str
+ certxferprotocol:
+ description: Certificate transfering protocol HTTP or HTTPS.
+ type: str
+ deviceType:
+ description: Device Type.
+ type: str
+ eapMethod:
+ description: WPA2_EAP methods EAP-FAST, PEAP-MSCHAPv2, EAP-TLS, PEAP-TLS, EAP-TTLS-MSCHAPv2,
+ EAP-TTLS-PAP, EAP-TTLS-CHAP, EAP-FAST-GTC, EAP-PEAP-GTC.
+ type: str
+ extWebAuth:
+ description: Indication of using external WEB Auth.
+ type: bool
+ extWebAuthAccessUrl:
+ description: External WEB Auth access URL.
+ type: str
+ extWebAuthHtmlTag:
+ description: Sensor's extWebAuthHtmlTag.
+ elements: dict
+ suboptions:
+ label:
+ description: Label.
+ type: str
+ tag:
+ description: Tag.
+ type: str
+ value:
+ description: Value.
+ type: str
+ type: list
+ extWebAuthPortal:
+ description: External authentication portal.
+ type: str
+ extWebAuthVirtualIp:
+ description: External WEB Auth virtual IP.
+ type: str
+ locationVlanList:
+ description: Sensor's locationVlanList.
+ elements: dict
+ suboptions:
+ locationId:
+ description: Site UUID.
+ type: str
+ vlans:
+ description: Array of VLANs.
+ elements: str
+ type: list
+ type: list
+ password:
+ description: Password string for onboarding SSID.
+ type: str
+ passwordType:
+ description: SSID password type ASCII or HEX.
+ type: str
+ profileName:
+ description: Profile name.
+ type: str
+ psk:
+ description: Password of SSID when passwordType is ASCII.
+ type: str
+ qosPolicy:
+ description: QoS policy PlATINUM, GOLD, SILVER, BRONZE.
+ type: str
+ scep:
+ description: Secure certificate enrollment protocol true or false or null for
+ not applicable.
+ type: bool
+ tests:
+ description: Sensor's tests.
+ elements: dict
+ suboptions:
+ config:
+ description: Sensor's config.
+ elements: dict
+ suboptions:
+ direction:
+ description: IPerf direction (UPLOAD, DOWNLOAD, BOTH).
+ type: str
+ domains:
+ description: DNS domain name.
+ elements: str
+ type: list
+ downlinkTest:
+ description: Downlink test.
+ type: bool
+ endPort:
+ description: IPerf end port.
+ type: int
+ exitCommand:
+ description: Exit command.
+ type: str
+ finalPrompt:
+ description: Final prompt.
+ type: str
+ ndtServer:
+ description: NDT server.
+ type: str
+ ndtServerPath:
+ description: NDT server path.
+ type: str
+ ndtServerPort:
+ description: NDT server port.
+ type: str
+ numPackets:
+ description: Number of packets.
+ type: int
+ password:
+ description: Password.
+ type: str
+ passwordPrompt:
+ description: Password prompt.
+ type: str
+ pathToDownload:
+ description: File path for file transfer.
+ type: str
+ port:
+ description: Radius or WEB server port.
+ type: int
+ probeType:
+ description: Probe type.
+ type: str
+ protocol:
+ description: Protocol used by file transfer, IPerf, mail server, and
+ radius (TCP, UDP, FTP, POP3, IMAP, CHAP, PAP).
+ type: str
+ proxyPassword:
+ description: Proxy password.
+ type: str
+ proxyPort:
+ description: Proxy port.
+ type: str
+ proxyServer:
+ description: Proxy server.
+ type: str
+ proxyUserName:
+ description: Proxy user name.
+ type: str
+ server:
+ description: Ping, file transfer, mail, radius, ssh, or telnet server.
+ type: str
+ servers:
+ description: IPerf server list.
+ elements: str
+ type: list
+ sharedSecret:
+ description: Shared secret.
+ type: str
+ startPort:
+ description: IPerf start port.
+ type: int
+ transferType:
+ description: File transfer type (UPLOAD, DOWNLOAD, BOTH).
+ type: str
+ udpBandwidth:
+ description: IPerf UDP bandwidth.
+ type: int
+ uplinkTest:
+ description: Uplink test.
+ type: bool
+ url:
+ description: URL.
+ type: str
+ userName:
+ description: User name.
+ type: str
+ userNamePrompt:
+ description: User name prompt.
+ type: str
+ type: list
+ name:
+ description: Name of the test.
+ type: str
+ type: list
+ username:
+ description: User name string for onboarding SSID.
+ type: str
+ vlan:
+ description: VLAN.
+ type: str
+ whiteList:
+ description: Indication of being on allowed list.
+ type: bool
+ type: list
+ runNow:
+ description: Run now (YES, NO).
type: str
+ sensors:
+ description: Sensor's sensors.
+ elements: dict
+ suboptions:
+ allSensorAddition:
+ description: Is all sensor addition.
+ type: bool
+ assigned:
+ description: Is assigned.
+ type: bool
+ configUpdated:
+ description: Configuration updated YES, NO.
+ type: str
+ hostName:
+ description: Host name.
+ type: str
+ iPerfInfo:
+ description: A string-stringList iPerf information.
+ type: dict
+ id:
+ description: Sensor ID.
+ type: str
+ ipAddress:
+ description: IP address.
+ type: str
+ locationId:
+ description: Site UUID.
+ type: str
+ macAddress:
+ description: MAC address.
+ type: str
+ markedForUninstall:
+ description: Is marked for uninstall.
+ type: bool
+ name:
+ description: Sensor name.
+ type: str
+ runNow:
+ description: Run now YES, NO.
+ type: str
+ sensorType:
+ description: Sensor type.
+ type: str
+ servicePolicy:
+ description: Service policy.
+ type: str
+ status:
+ description: Sensor device status UP, DOWN, REBOOT.
+ type: str
+ switchMac:
+ description: Switch MAC address.
+ type: str
+ switchSerialNumber:
+ description: Switch serial number.
+ type: str
+ switchUuid:
+ description: Switch device UUID.
+ type: str
+ targetAPs:
+ description: Array of target APs.
+ elements: str
+ type: list
+ testMacAddresses:
+ description: A string-string test MAC address.
+ type: dict
+ wiredApplicationMessage:
+ description: Wired application message.
+ type: str
+ wiredApplicationStatus:
+ description: Wired application status.
+ type: str
+ xorSensor:
+ description: Is XOR sensor.
+ type: bool
+ type: list
ssids:
description: Sensor's ssids.
elements: dict
suboptions:
+ authProtocol:
+ description: Auth protocol.
+ type: str
authType:
- description: Auth Type.
+ description: Authentication type OPEN, WPA2_PSK, WPA2_EaP, WEB_AUTH, MAB, DOT1X,
+ OTHER.
type: str
- categories:
- description: Categories.
- elements: str
+ bands:
+ description: WIFI bands 2.4GHz or 5GHz.
+ type: str
+ certdownloadurl:
+ description: Certificate download URL.
+ type: str
+ certfilename:
+ description: Auth certificate file name.
+ type: str
+ certpassphrase:
+ description: Certificate password phrase.
+ type: str
+ certstatus:
+ description: Certificate status INACTIVE or ACTIVE.
+ type: str
+ certxferprotocol:
+ description: Certificate transfering protocol HTTP or HTTPS.
+ type: str
+ eapMethod:
+ description: WPA2_EAP methods EAP-FAST, PEAP-MSCHAPv2, EAP-TLS, PEAP-TLS, EAP-TTLS-MSCHAPv2,
+ EAP-TTLS-PAP, EAP-TTLS-CHAP, EAP-FAST-GTC, EAP-PEAP-GTC.
+ type: str
+ extWebAuth:
+ description: Indication of using external WEB Auth.
+ type: bool
+ extWebAuthAccessUrl:
+ description: External WEB Auth access URL.
+ type: str
+ extWebAuthHtmlTag:
+ description: Sensor's extWebAuthHtmlTag.
+ elements: dict
+ suboptions:
+ label:
+ description: Label.
+ type: str
+ tag:
+ description: Tag.
+ type: str
+ value:
+ description: Value.
+ type: str
type: list
+ extWebAuthPortal:
+ description: External authentication portal.
+ type: str
+ extWebAuthVirtualIp:
+ description: External WEB Auth virtual IP.
+ type: str
+ layer3webAuthEmailAddress:
+ description: Layer 3 WEB Auth email address.
+ type: str
+ layer3webAuthpassword:
+ description: Layer 3 WEB Auth password.
+ type: str
+ layer3webAuthsecurity:
+ description: Layer 3 WEB Auth security.
+ type: str
+ layer3webAuthuserName:
+ description: Layer 3 WEB Auth user name.
+ type: str
+ password:
+ description: Password string for onboarding SSID.
+ type: str
+ passwordType:
+ description: SSID password type ASCII or HEX.
+ type: str
profileName:
- description: Profile Name.
+ description: The SSID profile name string.
+ type: str
+ proxyPassword:
+ description: Proxy server password.
+ type: str
+ proxyPort:
+ description: Proxy server port.
+ type: str
+ proxyServer:
+ description: Proxy server for onboarding SSID.
+ type: str
+ proxyUserName:
+ description: Proxy server user name.
type: str
psk:
- description: Psk.
+ description: Password of SSID when passwordType is ASCII.
type: str
qosPolicy:
- description: Qos Policy.
+ description: QoS policy PlATINUM, GOLD, SILVER, BRONZE.
type: str
+ scep:
+ description: Secure certificate enrollment protocol true or false or null for
+ not applicable.
+ type: bool
ssid:
- description: Ssid.
+ description: The SSID string.
type: str
tests:
description: Sensor's tests.
elements: dict
suboptions:
config:
- description: Config.
+ description: Sensor's config.
elements: dict
+ suboptions:
+ direction:
+ description: IPerf direction (UPLOAD, DOWNLOAD, BOTH).
+ type: str
+ domains:
+ description: DNS domain name.
+ elements: str
+ type: list
+ downlinkTest:
+ description: Downlink test.
+ type: bool
+ endPort:
+ description: IPerf end port.
+ type: int
+ exitCommand:
+ description: Exit command.
+ type: str
+ finalPrompt:
+ description: Final prompt.
+ type: str
+ ndtServer:
+ description: NDT server.
+ type: str
+ ndtServerPath:
+ description: NDT server path.
+ type: str
+ ndtServerPort:
+ description: NDT server port.
+ type: str
+ numPackets:
+ description: Number of packets.
+ type: int
+ password:
+ description: Password.
+ type: str
+ passwordPrompt:
+ description: Password prompt.
+ type: str
+ pathToDownload:
+ description: File path for file transfer.
+ type: str
+ port:
+ description: Radius or WEB server port.
+ type: int
+ probeType:
+ description: Probe type.
+ type: str
+ protocol:
+ description: Protocol used by file transfer, IPerf, mail server, and
+ radius (TCP, UDP, FTP, POP3, IMAP, CHAP, PAP).
+ type: str
+ proxyPassword:
+ description: Proxy password.
+ type: str
+ proxyPort:
+ description: Proxy port.
+ type: str
+ proxyServer:
+ description: Proxy server.
+ type: str
+ proxyUserName:
+ description: Proxy user name.
+ type: str
+ server:
+ description: Ping, file transfer, mail, radius, ssh, or telnet server.
+ type: str
+ servers:
+ description: IPerf server list.
+ elements: str
+ type: list
+ sharedSecret:
+ description: Shared secret.
+ type: str
+ startPort:
+ description: IPerf start port.
+ type: int
+ transferType:
+ description: File transfer type (UPLOAD, DOWNLOAD, BOTH).
+ type: str
+ udpBandwidth:
+ description: IPerf UDP bandwidth.
+ type: int
+ uplinkTest:
+ description: Uplink test.
+ type: bool
+ url:
+ description: URL.
+ type: str
+ userName:
+ description: User name.
+ type: str
+ userNamePrompt:
+ description: User name prompt.
+ type: str
type: list
name:
- description: Name.
+ description: Name of the test.
type: str
type: list
thirdParty:
description: Sensor's thirdParty.
suboptions:
selected:
- description: Selected.
+ description: True the SSID is third party.
type: bool
type: dict
+ username:
+ description: User name string for onboarding SSID.
+ type: str
+ whiteList:
+ description: Indication of being on allowed list.
+ type: bool
+ wlanId:
+ description: WLAN ID.
+ type: int
+ wlc:
+ description: WLC IP addres.
+ type: str
type: list
templateName:
description: TemplateName query parameter.
type: str
+ version:
+ description: The sensor test template version (must be 2).
+ type: int
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Sensors CreateSensorTestTemplate
description: Complete reference of the CreateSensorTestTemplate API.
@@ -120,25 +625,188 @@ EXAMPLES = r"""
state: present
apCoverage:
- bands: string
- numberOfApsToTest: string
- rssiThreshold: string
+ numberOfApsToTest: 0
+ rssiThreshold: 0
connection: string
+ encryptionMode: string
+ locationInfoList:
+ - allSensors: true
+ customManagementVlan: true
+ locationId: string
+ locationType: string
+ macAddressList:
+ - string
+ managementVlan: string
+ siteHierarchy: string
modelVersion: 0
name: string
- ssids:
- - authType: string
- categories:
+ profiles:
+ - authProtocol: string
+ authType: string
+ certdownloadurl: string
+ certfilename: string
+ certpassphrase: string
+ certstatus: string
+ certxferprotocol: string
+ deviceType: string
+ eapMethod: string
+ extWebAuth: true
+ extWebAuthAccessUrl: string
+ extWebAuthHtmlTag:
+ - label: string
+ tag: string
+ value: string
+ extWebAuthPortal: string
+ extWebAuthVirtualIp: string
+ locationVlanList:
+ - locationId: string
+ vlans:
+ - string
+ password: string
+ passwordType: string
+ profileName: string
+ psk: string
+ qosPolicy: string
+ scep: true
+ tests:
+ - config:
+ - direction: string
+ domains:
+ - string
+ downlinkTest: true
+ endPort: 0
+ exitCommand: string
+ finalPrompt: string
+ ndtServer: string
+ ndtServerPath: string
+ ndtServerPort: string
+ numPackets: 0
+ password: string
+ passwordPrompt: string
+ pathToDownload: string
+ port: 0
+ probeType: string
+ protocol: string
+ proxyPassword: string
+ proxyPort: string
+ proxyServer: string
+ proxyUserName: string
+ server: string
+ servers:
+ - string
+ sharedSecret: string
+ startPort: 0
+ transferType: string
+ udpBandwidth: 0
+ uplinkTest: true
+ url: string
+ userName: string
+ userNamePrompt: string
+ name: string
+ username: string
+ vlan: string
+ whiteList: true
+ runNow: string
+ sensors:
+ - allSensorAddition: true
+ assigned: true
+ configUpdated: string
+ hostName: string
+ iPerfInfo: {}
+ id: string
+ ipAddress: string
+ locationId: string
+ macAddress: string
+ markedForUninstall: true
+ name: string
+ runNow: string
+ sensorType: string
+ servicePolicy: string
+ status: string
+ switchMac: string
+ switchSerialNumber: string
+ switchUuid: string
+ targetAPs:
- string
+ testMacAddresses: {}
+ wiredApplicationMessage: string
+ wiredApplicationStatus: string
+ xorSensor: true
+ ssids:
+ - authProtocol: string
+ authType: string
+ bands: string
+ certdownloadurl: string
+ certfilename: string
+ certpassphrase: string
+ certstatus: string
+ certxferprotocol: string
+ eapMethod: string
+ extWebAuth: true
+ extWebAuthAccessUrl: string
+ extWebAuthHtmlTag:
+ - label: string
+ tag: string
+ value: string
+ extWebAuthPortal: string
+ extWebAuthVirtualIp: string
+ layer3webAuthEmailAddress: string
+ layer3webAuthpassword: string
+ layer3webAuthsecurity: string
+ layer3webAuthuserName: string
+ password: string
+ passwordType: string
profileName: string
+ proxyPassword: string
+ proxyPort: string
+ proxyServer: string
+ proxyUserName: string
psk: string
qosPolicy: string
+ scep: true
ssid: string
tests:
- config:
- - {}
+ - direction: string
+ domains:
+ - string
+ downlinkTest: true
+ endPort: 0
+ exitCommand: string
+ finalPrompt: string
+ ndtServer: string
+ ndtServerPath: string
+ ndtServerPort: string
+ numPackets: 0
+ password: string
+ passwordPrompt: string
+ pathToDownload: string
+ port: 0
+ probeType: string
+ protocol: string
+ proxyPassword: string
+ proxyPort: string
+ proxyServer: string
+ proxyUserName: string
+ server: string
+ servers:
+ - string
+ sharedSecret: string
+ startPort: 0
+ transferType: string
+ udpBandwidth: 0
+ uplinkTest: true
+ url: string
+ userName: string
+ userNamePrompt: string
name: string
thirdParty:
selected: true
+ username: string
+ whiteList: true
+ wlanId: 0
+ wlc: string
+ version: 0
- name: Delete all
cisco.dnac.sensor:
@@ -153,7 +821,6 @@ EXAMPLES = r"""
templateName: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -163,71 +830,202 @@ dnac_response:
{
"version": "string",
"response": {
- "_id": "string",
"name": "string",
+ "_id": "string",
"version": 0,
"modelVersion": 0,
"startTime": 0,
"lastModifiedTime": 0,
"numAssociatedSensor": 0,
- "location": {},
- "siteHierarchy": {},
+ "location": "string",
+ "siteHierarchy": "string",
"status": "string",
"connection": "string",
- "frequency": {},
+ "actionInProgress": "string",
+ "frequency": {
+ "value": 0,
+ "unit": "string"
+ },
"rssiThreshold": 0,
"numNeighborAPThreshold": 0,
"scheduleInDays": 0,
"wlans": [
- {}
+ "string"
],
"ssids": [
{
- "bands": {},
+ "bands": "string",
"ssid": "string",
"profileName": "string",
- "authType": "string",
- "authTypeRcvd": {},
- "psk": "string",
- "username": {},
- "password": {},
- "eapMethod": {},
- "scep": true,
- "authProtocol": {},
- "certfilename": {},
- "certxferprotocol": "string",
- "certstatus": "string",
- "certpassphrase": {},
- "certdownloadurl": {},
"numAps": 0,
"numSensors": 0,
- "layer3webAuthsecurity": {},
- "layer3webAuthuserName": {},
- "layer3webAuthpassword": {},
- "extWebAuthVirtualIp": {},
- "layer3webAuthEmailAddress": {},
- "qosPolicy": "string",
- "extWebAuth": true,
- "whiteList": true,
- "extWebAuthPortal": {},
- "extWebAuthAccessUrl": {},
- "extWebAuthHtmlTag": [
- {}
- ],
+ "layer3webAuthsecurity": "string",
+ "layer3webAuthuserName": "string",
+ "layer3webAuthpassword": "string",
+ "layer3webAuthEmailAddress": "string",
"thirdParty": {
"selected": true
},
"id": 0,
"wlanId": 0,
- "wlc": {},
+ "wlc": "string",
"validFrom": 0,
"validTo": 0,
"status": "string",
+ "proxyServer": "string",
+ "proxyPort": "string",
+ "proxyUserName": "string",
+ "proxyPassword": "string",
+ "authType": "string",
+ "psk": "string",
+ "username": "string",
+ "password": "string",
+ "passwordType": "string",
+ "eapMethod": "string",
+ "scep": true,
+ "authProtocol": "string",
+ "certfilename": "string",
+ "certxferprotocol": "string",
+ "certstatus": "string",
+ "certpassphrase": "string",
+ "certdownloadurl": "string",
+ "extWebAuthVirtualIp": "string",
+ "extWebAuth": true,
+ "whiteList": true,
+ "extWebAuthPortal": "string",
+ "extWebAuthAccessUrl": "string",
+ "extWebAuthHtmlTag": [
+ {
+ "label": "string",
+ "tag": "string",
+ "value": "string"
+ }
+ ],
+ "qosPolicy": "string",
+ "tests": [
+ {
+ "name": "string",
+ "config": [
+ {
+ "domains": [
+ "string"
+ ],
+ "server": "string",
+ "userName": "string",
+ "password": "string",
+ "url": "string",
+ "port": 0,
+ "protocol": "string",
+ "servers": [
+ "string"
+ ],
+ "direction": "string",
+ "startPort": 0,
+ "endPort": 0,
+ "udpBandwidth": 0,
+ "probeType": "string",
+ "numPackets": 0,
+ "pathToDownload": "string",
+ "transferType": "string",
+ "sharedSecret": "string",
+ "ndtServer": "string",
+ "ndtServerPort": "string",
+ "ndtServerPath": "string",
+ "uplinkTest": true,
+ "downlinkTest": true,
+ "proxyServer": "string",
+ "proxyPort": "string",
+ "proxyUserName": "string",
+ "proxyPassword": "string",
+ "userNamePrompt": "string",
+ "passwordPrompt": "string",
+ "exitCommand": "string",
+ "finalPrompt": "string"
+ }
+ ]
+ }
+ ]
+ }
+ ],
+ "profiles": [
+ {
+ "authType": "string",
+ "psk": "string",
+ "username": "string",
+ "password": "string",
+ "passwordType": "string",
+ "eapMethod": "string",
+ "scep": true,
+ "authProtocol": "string",
+ "certfilename": "string",
+ "certxferprotocol": "string",
+ "certstatus": "string",
+ "certpassphrase": "string",
+ "certdownloadurl": "string",
+ "extWebAuthVirtualIp": "string",
+ "extWebAuth": true,
+ "whiteList": true,
+ "extWebAuthPortal": "string",
+ "extWebAuthAccessUrl": "string",
+ "extWebAuthHtmlTag": [
+ {
+ "label": "string",
+ "tag": "string",
+ "value": "string"
+ }
+ ],
+ "qosPolicy": "string",
"tests": [
{
"name": "string",
"config": [
- {}
+ {
+ "domains": [
+ "string"
+ ],
+ "server": "string",
+ "userName": "string",
+ "password": "string",
+ "url": "string",
+ "port": 0,
+ "protocol": "string",
+ "servers": [
+ "string"
+ ],
+ "direction": "string",
+ "startPort": 0,
+ "endPort": 0,
+ "udpBandwidth": 0,
+ "probeType": "string",
+ "numPackets": 0,
+ "pathToDownload": "string",
+ "transferType": "string",
+ "sharedSecret": "string",
+ "ndtServer": "string",
+ "ndtServerPort": "string",
+ "ndtServerPath": "string",
+ "uplinkTest": true,
+ "downlinkTest": true,
+ "proxyServer": "string",
+ "proxyPort": "string",
+ "proxyUserName": "string",
+ "proxyPassword": "string",
+ "userNamePrompt": "string",
+ "passwordPrompt": "string",
+ "exitCommand": "string",
+ "finalPrompt": "string"
+ }
+ ]
+ }
+ ],
+ "profileName": "string",
+ "deviceType": "string",
+ "vlan": "string",
+ "locationVlanList": [
+ {
+ "locationId": "string",
+ "vlans": [
+ "string"
]
}
]
@@ -239,12 +1037,46 @@ dnac_response:
"encryptionMode": "string",
"runNow": "string",
"locationInfoList": [
- {}
+ {
+ "locationId": "string",
+ "locationType": "string",
+ "allSensors": true,
+ "siteHierarchy": "string",
+ "macAddressList": [
+ "string"
+ ],
+ "managementVlan": "string",
+ "customManagementVlan": true
+ }
],
- "schedule": {},
- "tests": {},
"sensors": [
- {}
+ {
+ "name": "string",
+ "macAddress": "string",
+ "switchMac": "string",
+ "switchUuid": "string",
+ "switchSerialNumber": "string",
+ "markedForUninstall": true,
+ "ipAddress": "string",
+ "hostName": "string",
+ "wiredApplicationStatus": "string",
+ "wiredApplicationMessage": "string",
+ "assigned": true,
+ "status": "string",
+ "xorSensor": true,
+ "targetAPs": [
+ "string"
+ ],
+ "runNow": "string",
+ "locationId": "string",
+ "allSensorAddition": true,
+ "configUpdated": "string",
+ "sensorType": "string",
+ "testMacAddresses": {},
+ "id": "string",
+ "servicePolicy": "string",
+ "iPerfInfo": {}
+ }
],
"apCoverage": [
{
@@ -252,11 +1084,7 @@ dnac_response:
"numberOfApsToTest": 0,
"rssiThreshold": 0
}
- ],
- "testDurationEstimate": 0,
- "testTemplate": true,
- "legacyTestSuite": true,
- "tenantId": {}
+ ]
}
}
"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sensor_info.py b/ansible_collections/cisco/dnac/plugins/modules/sensor_info.py
index 2955ceac1..d2965470a 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/sensor_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/sensor_info.py
@@ -24,8 +24,8 @@ options:
- SiteId query parameter.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Sensors Sensors
description: Complete reference of the Sensors API.
@@ -54,7 +54,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -76,13 +75,13 @@ dnac_response:
"version": "string",
"lastSeen": 0,
"type": "string",
- "sshConfig": {
+ "ssh": {
"sshState": "string",
"sshUserName": "string",
"sshPassword": "string",
"enablePassword": "string"
},
- "isLEDEnabled": true
+ "led": true
}
]
}
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sensor_test_run.py b/ansible_collections/cisco/dnac/plugins/modules/sensor_test_run.py
index cb800c968..c7731deeb 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/sensor_test_run.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/sensor_test_run.py
@@ -20,8 +20,8 @@ options:
description: Template Name.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Sensors RunNowSensorTest
description: Complete reference of the RunNowSensorTest API.
@@ -48,7 +48,6 @@ EXAMPLES = r"""
templateName: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sensor_test_template_duplicate.py b/ansible_collections/cisco/dnac/plugins/modules/sensor_test_template_duplicate.py
index 85cad96d2..c25d8432f 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/sensor_test_template_duplicate.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/sensor_test_template_duplicate.py
@@ -17,14 +17,14 @@ extends_documentation_fragment:
author: Rafael Campos (@racampos)
options:
newTemplateName:
- description: New Template Name.
+ description: Destination test template name.
type: str
templateName:
- description: Template Name.
+ description: Source test template name.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Sensors DuplicateSensorTestTemplate
description: Complete reference of the DuplicateSensorTestTemplate API.
@@ -52,7 +52,6 @@ EXAMPLES = r"""
templateName: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -62,71 +61,202 @@ dnac_response:
{
"version": "string",
"response": {
- "_id": "string",
"name": "string",
+ "_id": "string",
"version": 0,
"modelVersion": 0,
"startTime": 0,
"lastModifiedTime": 0,
"numAssociatedSensor": 0,
- "location": {},
- "siteHierarchy": {},
+ "location": "string",
+ "siteHierarchy": "string",
"status": "string",
"connection": "string",
- "frequency": {},
+ "actionInProgress": "string",
+ "frequency": {
+ "value": 0,
+ "unit": "string"
+ },
"rssiThreshold": 0,
"numNeighborAPThreshold": 0,
"scheduleInDays": 0,
"wlans": [
- {}
+ "string"
],
"ssids": [
{
- "bands": {},
+ "bands": "string",
"ssid": "string",
"profileName": "string",
- "authType": "string",
- "authTypeRcvd": {},
- "psk": "string",
- "username": {},
- "password": {},
- "eapMethod": {},
- "scep": true,
- "authProtocol": {},
- "certfilename": {},
- "certxferprotocol": "string",
- "certstatus": "string",
- "certpassphrase": {},
- "certdownloadurl": {},
"numAps": 0,
"numSensors": 0,
- "layer3webAuthsecurity": {},
- "layer3webAuthuserName": {},
- "layer3webAuthpassword": {},
- "extWebAuthVirtualIp": {},
- "layer3webAuthEmailAddress": {},
- "qosPolicy": "string",
- "extWebAuth": true,
- "whiteList": true,
- "extWebAuthPortal": {},
- "extWebAuthAccessUrl": {},
- "extWebAuthHtmlTag": [
- {}
- ],
+ "layer3webAuthsecurity": "string",
+ "layer3webAuthuserName": "string",
+ "layer3webAuthpassword": "string",
+ "layer3webAuthEmailAddress": "string",
"thirdParty": {
"selected": true
},
"id": 0,
"wlanId": 0,
- "wlc": {},
+ "wlc": "string",
"validFrom": 0,
"validTo": 0,
"status": "string",
+ "proxyServer": "string",
+ "proxyPort": "string",
+ "proxyUserName": "string",
+ "proxyPassword": "string",
+ "authType": "string",
+ "psk": "string",
+ "username": "string",
+ "password": "string",
+ "passwordType": "string",
+ "eapMethod": "string",
+ "scep": true,
+ "authProtocol": "string",
+ "certfilename": "string",
+ "certxferprotocol": "string",
+ "certstatus": "string",
+ "certpassphrase": "string",
+ "certdownloadurl": "string",
+ "extWebAuthVirtualIp": "string",
+ "extWebAuth": true,
+ "whiteList": true,
+ "extWebAuthPortal": "string",
+ "extWebAuthAccessUrl": "string",
+ "extWebAuthHtmlTag": [
+ {
+ "label": "string",
+ "tag": "string",
+ "value": "string"
+ }
+ ],
+ "qosPolicy": "string",
"tests": [
{
"name": "string",
"config": [
- {}
+ {
+ "domains": [
+ "string"
+ ],
+ "server": "string",
+ "userName": "string",
+ "password": "string",
+ "url": "string",
+ "port": 0,
+ "protocol": "string",
+ "servers": [
+ "string"
+ ],
+ "direction": "string",
+ "startPort": 0,
+ "endPort": 0,
+ "udpBandwidth": 0,
+ "probeType": "string",
+ "numPackets": 0,
+ "pathToDownload": "string",
+ "transferType": "string",
+ "sharedSecret": "string",
+ "ndtServer": "string",
+ "ndtServerPort": "string",
+ "ndtServerPath": "string",
+ "uplinkTest": true,
+ "downlinkTest": true,
+ "proxyServer": "string",
+ "proxyPort": "string",
+ "proxyUserName": "string",
+ "proxyPassword": "string",
+ "userNamePrompt": "string",
+ "passwordPrompt": "string",
+ "exitCommand": "string",
+ "finalPrompt": "string"
+ }
+ ]
+ }
+ ]
+ }
+ ],
+ "profiles": [
+ {
+ "authType": "string",
+ "psk": "string",
+ "username": "string",
+ "password": "string",
+ "passwordType": "string",
+ "eapMethod": "string",
+ "scep": true,
+ "authProtocol": "string",
+ "certfilename": "string",
+ "certxferprotocol": "string",
+ "certstatus": "string",
+ "certpassphrase": "string",
+ "certdownloadurl": "string",
+ "extWebAuthVirtualIp": "string",
+ "extWebAuth": true,
+ "whiteList": true,
+ "extWebAuthPortal": "string",
+ "extWebAuthAccessUrl": "string",
+ "extWebAuthHtmlTag": [
+ {
+ "label": "string",
+ "tag": "string",
+ "value": "string"
+ }
+ ],
+ "qosPolicy": "string",
+ "tests": [
+ {
+ "name": "string",
+ "config": [
+ {
+ "domains": [
+ "string"
+ ],
+ "server": "string",
+ "userName": "string",
+ "password": "string",
+ "url": "string",
+ "port": 0,
+ "protocol": "string",
+ "servers": [
+ "string"
+ ],
+ "direction": "string",
+ "startPort": 0,
+ "endPort": 0,
+ "udpBandwidth": 0,
+ "probeType": "string",
+ "numPackets": 0,
+ "pathToDownload": "string",
+ "transferType": "string",
+ "sharedSecret": "string",
+ "ndtServer": "string",
+ "ndtServerPort": "string",
+ "ndtServerPath": "string",
+ "uplinkTest": true,
+ "downlinkTest": true,
+ "proxyServer": "string",
+ "proxyPort": "string",
+ "proxyUserName": "string",
+ "proxyPassword": "string",
+ "userNamePrompt": "string",
+ "passwordPrompt": "string",
+ "exitCommand": "string",
+ "finalPrompt": "string"
+ }
+ ]
+ }
+ ],
+ "profileName": "string",
+ "deviceType": "string",
+ "vlan": "string",
+ "locationVlanList": [
+ {
+ "locationId": "string",
+ "vlans": [
+ "string"
]
}
]
@@ -144,36 +274,40 @@ dnac_response:
"allSensors": true,
"siteHierarchy": "string",
"macAddressList": [
- {}
- ]
+ "string"
+ ],
+ "managementVlan": "string",
+ "customManagementVlan": true
}
],
- "schedule": {
- "testScheduleMode": "string",
- "scheduleRange": [
- {
- "timeRange": [
- {
- "from": "string",
- "to": "string",
- "frequency": {
- "value": 0,
- "unit": "string"
- }
- }
- ],
- "day": "string"
- }
- ],
- "startTime": 0,
- "frequency": {
- "value": 0,
- "unit": "string"
- }
- },
- "tests": {},
"sensors": [
- {}
+ {
+ "name": "string",
+ "macAddress": "string",
+ "switchMac": "string",
+ "switchUuid": "string",
+ "switchSerialNumber": "string",
+ "markedForUninstall": true,
+ "ipAddress": "string",
+ "hostName": "string",
+ "wiredApplicationStatus": "string",
+ "wiredApplicationMessage": "string",
+ "assigned": true,
+ "status": "string",
+ "xorSensor": true,
+ "targetAPs": [
+ "string"
+ ],
+ "runNow": "string",
+ "locationId": "string",
+ "allSensorAddition": true,
+ "configUpdated": "string",
+ "sensorType": "string",
+ "testMacAddresses": {},
+ "id": "string",
+ "servicePolicy": "string",
+ "iPerfInfo": {}
+ }
],
"apCoverage": [
{
@@ -181,11 +315,7 @@ dnac_response:
"numberOfApsToTest": 0,
"rssiThreshold": 0
}
- ],
- "testDurationEstimate": 0,
- "testTemplate": true,
- "legacyTestSuite": true,
- "tenantId": {}
+ ]
}
}
"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sensor_test_template_edit.py b/ansible_collections/cisco/dnac/plugins/modules/sensor_test_template_edit.py
index b8bf3f3b4..5fc48f442 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/sensor_test_template_edit.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/sensor_test_template_edit.py
@@ -16,75 +16,659 @@ extends_documentation_fragment:
- cisco.dnac.module
author: Rafael Campos (@racampos)
options:
+ _id:
+ description: The sensor test template unique identifier, generated at test creation
+ time.
+ type: str
+ actionInProgress:
+ description: Indication of inprogress action.
+ type: str
+ apCoverage:
+ description: Sensor Test Template Edit's apCoverage.
+ elements: dict
+ suboptions:
+ bands:
+ description: The WIFI bands.
+ type: str
+ numberOfApsToTest:
+ description: Number of APs to test.
+ type: int
+ rssiThreshold:
+ description: RSSI threshold.
+ type: int
+ type: list
+ connection:
+ description: Connection type of test WIRED, WIRELESS, BOTH.
+ type: str
+ encryptionMode:
+ description: Encryption mode.
+ type: str
+ frequency:
+ description: Sensor Test Template Edit's frequency.
+ suboptions:
+ unit:
+ description: Unit of the time value (NANOSECONDS, MICROSECONDS, MILLISECONDS,
+ SECONDS, MINUTES, HOURS, DAYS).
+ type: str
+ value:
+ description: Value of the unit.
+ type: int
+ type: dict
+ lastModifiedTime:
+ description: Last modify time.
+ type: int
+ location:
+ description: Location string.
+ type: str
locationInfoList:
description: Sensor Test Template Edit's locationInfoList.
elements: dict
suboptions:
allSensors:
- description: All Sensors.
+ description: Use all sensors in the site for test.
+ type: bool
+ customManagementVlan:
+ description: Custom Management VLAN.
type: bool
locationId:
- description: Location Id.
+ description: Site UUID.
type: str
locationType:
- description: Location Type.
+ description: Site type.
+ type: str
+ macAddressList:
+ description: MAC addresses.
+ elements: str
+ type: list
+ managementVlan:
+ description: Management VLAN.
type: str
siteHierarchy:
- description: Site Hierarchy.
+ description: Site name hierarhy.
type: str
type: list
- schedule:
- description: Sensor Test Template Edit's schedule.
+ modelVersion:
+ description: Test template object model version (must be 2).
+ type: int
+ name:
+ description: The sensor test template name, which is the same as in 'templateName'.
+ type: str
+ numAssociatedSensor:
+ description: Number of associated sensor.
+ type: int
+ numNeighborAPThreshold:
+ description: Number of neighboring AP threshold.
+ type: int
+ profiles:
+ description: Sensor Test Template Edit's profiles.
+ elements: dict
suboptions:
- frequency:
- description: Sensor Test Template Edit's frequency.
+ authProtocol:
+ description: Auth protocol.
+ type: str
+ authType:
+ description: Authentication type OPEN, WPA2_PSK, WPA2_EAP, WEB_AUTH, MAB, DOT1X,
+ OTHER.
+ type: str
+ certdownloadurl:
+ description: Certificate download URL.
+ type: str
+ certfilename:
+ description: Auth certificate file name.
+ type: str
+ certpassphrase:
+ description: Certificate password phrase.
+ type: str
+ certstatus:
+ description: Certificate status INACTIVE or ACTIVE.
+ type: str
+ certxferprotocol:
+ description: Certificate transfering protocol HTTP or HTTPS.
+ type: str
+ deviceType:
+ description: Device Type.
+ type: str
+ eapMethod:
+ description: WPA2_EAP methods EAP-FAST, PEAP-MSCHAPv2, EAP-TLS, PEAP-TLS, EAP-TTLS-MSCHAPv2,
+ EAP-TTLS-PAP, EAP-TTLS-CHAP, EAP-FAST-GTC, EAP-PEAP-GTC.
+ type: str
+ extWebAuth:
+ description: Indication of using external WEB Auth.
+ type: bool
+ extWebAuthAccessUrl:
+ description: External WEB Auth access URL.
+ type: str
+ extWebAuthHtmlTag:
+ description: Sensor Test Template Edit's extWebAuthHtmlTag.
+ elements: dict
suboptions:
- unit:
- description: Unit.
+ label:
+ description: Label.
+ type: str
+ tag:
+ description: Tag.
type: str
value:
description: Value.
- type: int
+ type: str
+ type: list
+ extWebAuthPortal:
+ description: External authentication portal.
+ type: str
+ extWebAuthVirtualIp:
+ description: External WEB Auth virtual IP.
+ type: str
+ locationVlanList:
+ description: Sensor Test Template Edit's locationVlanList.
+ elements: dict
+ suboptions:
+ locationId:
+ description: Site UUID.
+ type: str
+ vlans:
+ description: Array of VLANs.
+ elements: str
+ type: list
+ type: list
+ password:
+ description: Password string for onboarding SSID.
+ type: str
+ passwordType:
+ description: SSID password type ASCII or HEX.
+ type: str
+ profileName:
+ description: Profile name.
+ type: str
+ psk:
+ description: Password of SSID when passwordType is ASCII.
+ type: str
+ qosPolicy:
+ description: QoS policy PlATINUM, GOLD, SILVER, BRONZE.
+ type: str
+ scep:
+ description: Secure certificate enrollment protocol true or false or null for
+ not applicable.
+ type: bool
+ tests:
+ description: Sensor Test Template Edit's tests.
+ elements: dict
+ suboptions:
+ config:
+ description: Sensor Test Template Edit's config.
+ elements: dict
+ suboptions:
+ direction:
+ description: IPerf direction (UPLOAD, DOWNLOAD, BOTH).
+ type: str
+ domains:
+ description: DNS domain name.
+ elements: str
+ type: list
+ downlinkTest:
+ description: Downlink test.
+ type: bool
+ endPort:
+ description: IPerf end port.
+ type: int
+ exitCommand:
+ description: Exit command.
+ type: str
+ finalPrompt:
+ description: Final prompt.
+ type: str
+ ndtServer:
+ description: NDT server.
+ type: str
+ ndtServerPath:
+ description: NDT server path.
+ type: str
+ ndtServerPort:
+ description: NDT server port.
+ type: str
+ numPackets:
+ description: Number of packets.
+ type: int
+ password:
+ description: Password.
+ type: str
+ passwordPrompt:
+ description: Password prompt.
+ type: str
+ pathToDownload:
+ description: File path for file transfer.
+ type: str
+ port:
+ description: Radius or WEB server port.
+ type: int
+ probeType:
+ description: Probe type.
+ type: str
+ protocol:
+ description: Protocol used by file transfer, IPerf, mail server, and
+ radius (TCP, UDP, FTP, POP3, IMAP, CHAP, PAP).
+ type: str
+ proxyPassword:
+ description: Proxy password.
+ type: str
+ proxyPort:
+ description: Proxy port.
+ type: str
+ proxyServer:
+ description: Proxy server.
+ type: str
+ proxyUserName:
+ description: Proxy user name.
+ type: str
+ server:
+ description: Ping, file transfer, mail, radius, ssh, or telnet server.
+ type: str
+ servers:
+ description: IPerf server list.
+ elements: str
+ type: list
+ sharedSecret:
+ description: Shared secret.
+ type: str
+ startPort:
+ description: IPerf start port.
+ type: int
+ transferType:
+ description: File transfer type (UPLOAD, DOWNLOAD, BOTH).
+ type: str
+ udpBandwidth:
+ description: IPerf UDP bandwidth.
+ type: int
+ uplinkTest:
+ description: Uplink test.
+ type: bool
+ url:
+ description: URL.
+ type: str
+ userName:
+ description: User name.
+ type: str
+ userNamePrompt:
+ description: User name prompt.
+ type: str
+ type: list
+ name:
+ description: Name of the test.
+ type: str
+ type: list
+ username:
+ description: User name string for onboarding SSID.
+ type: str
+ vlan:
+ description: VLAN.
+ type: str
+ whiteList:
+ description: Indication of being on allowed list.
+ type: bool
+ type: list
+ radioAsSensorRemoved:
+ description: Radio as sensor removed.
+ type: bool
+ rssiThreshold:
+ description: RSSI threshold.
+ type: int
+ runNow:
+ description: Run now (YES, NO).
+ type: str
+ scheduleInDays:
+ description: Bit-wise value of scheduled test days.
+ type: int
+ sensors:
+ description: Sensor Test Template Edit's sensors.
+ elements: dict
+ suboptions:
+ allSensorAddition:
+ description: Is all sensor addition.
+ type: bool
+ assigned:
+ description: Is assigned.
+ type: bool
+ configUpdated:
+ description: Configuration updated YES, NO.
+ type: str
+ hostName:
+ description: Host name.
+ type: str
+ iPerfInfo:
+ description: A string-stringList iPerf information.
+ type: dict
+ id:
+ description: Sensor ID.
+ type: str
+ ipAddress:
+ description: IP address.
+ type: str
+ locationId:
+ description: Site UUID.
+ type: str
+ macAddress:
+ description: MAC address.
+ type: str
+ markedForUninstall:
+ description: Is marked for uninstall.
+ type: bool
+ name:
+ description: Sensor name.
+ type: str
+ runNow:
+ description: Run now YES, NO.
+ type: str
+ sensorType:
+ description: Sensor type.
+ type: str
+ servicePolicy:
+ description: Service policy.
+ type: str
+ status:
+ description: Sensor device status UP, DOWN, REBOOT.
+ type: str
+ switchMac:
+ description: Switch MAC address.
+ type: str
+ switchSerialNumber:
+ description: Switch serial number.
+ type: str
+ switchUuid:
+ description: Switch device UUID.
+ type: str
+ targetAPs:
+ description: Array of target APs.
+ elements: str
+ type: list
+ testMacAddresses:
+ description: A string-string test MAC address.
type: dict
- scheduleRange:
- description: Sensor Test Template Edit's scheduleRange.
+ wiredApplicationMessage:
+ description: Wired application message.
+ type: str
+ wiredApplicationStatus:
+ description: Wired application status.
+ type: str
+ xorSensor:
+ description: Is XOR sensor.
+ type: bool
+ type: list
+ showWlcUpgradeBanner:
+ description: Show WLC upgrade banner.
+ type: bool
+ siteHierarchy:
+ description: Site hierarchy.
+ type: str
+ ssids:
+ description: Sensor Test Template Edit's ssids.
+ elements: dict
+ suboptions:
+ authProtocol:
+ description: Auth protocol.
+ type: str
+ authType:
+ description: Authentication type OPEN, WPA2_PSK, WPA2_EAP, WEB_AUTH, MAB, DOT1X,
+ OTHER.
+ type: str
+ bands:
+ description: WIFI bands 2.4GHz or 5GHz.
+ type: str
+ certdownloadurl:
+ description: Certificate download URL.
+ type: str
+ certfilename:
+ description: Auth certificate file name.
+ type: str
+ certpassphrase:
+ description: Certificate password phrase.
+ type: str
+ certstatus:
+ description: Certificate status INACTIVE or ACTIVE.
+ type: str
+ certxferprotocol:
+ description: Certificate transfering protocol HTTP or HTTPS.
+ type: str
+ eapMethod:
+ description: WPA2_EAP methods EAP-FAST, PEAP-MSCHAPv2, EAP-TLS, PEAP-TLS, EAP-TTLS-MSCHAPv2,
+ EAP-TTLS-PAP, EAP-TTLS-CHAP, EAP-FAST-GTC, EAP-PEAP-GTC.
+ type: str
+ extWebAuth:
+ description: Indication of using external WEB Auth.
+ type: bool
+ extWebAuthAccessUrl:
+ description: External WEB Auth access URL.
+ type: str
+ extWebAuthHtmlTag:
+ description: Sensor Test Template Edit's extWebAuthHtmlTag.
elements: dict
suboptions:
- day:
- description: Day.
+ label:
+ description: Label.
type: str
- timeRange:
- description: Sensor Test Template Edit's timeRange.
+ tag:
+ description: Tag.
+ type: str
+ value:
+ description: Value.
+ type: str
+ type: list
+ extWebAuthPortal:
+ description: External authentication portal.
+ type: str
+ extWebAuthVirtualIp:
+ description: External WEB Auth virtual IP.
+ type: str
+ id:
+ description: Identification number.
+ type: int
+ layer3webAuthEmailAddress:
+ description: Layer 3 WEB Auth email address.
+ type: str
+ layer3webAuthpassword:
+ description: Layer 3 WEB Auth password.
+ type: str
+ layer3webAuthsecurity:
+ description: Layer 3 WEB Auth security.
+ type: str
+ layer3webAuthuserName:
+ description: Layer 3 WEB Auth user name.
+ type: str
+ numAps:
+ description: Number of APs in the test.
+ type: int
+ numSensors:
+ description: Number of Sensors in the test.
+ type: int
+ password:
+ description: Password string for onboarding SSID.
+ type: str
+ passwordType:
+ description: SSID password type ASCII or HEX.
+ type: str
+ profileName:
+ description: The SSID profile name string.
+ type: str
+ proxyPassword:
+ description: Proxy server password.
+ type: str
+ proxyPort:
+ description: Proxy server port.
+ type: str
+ proxyServer:
+ description: Proxy server for onboarding SSID.
+ type: str
+ proxyUserName:
+ description: Proxy server user name.
+ type: str
+ psk:
+ description: Password of SSID when passwordType is ASCII.
+ type: str
+ qosPolicy:
+ description: QoS policy PlATINUM, GOLD, SILVER, BRONZE.
+ type: str
+ scep:
+ description: Secure certificate enrollment protocol true or false or null for
+ not applicable.
+ type: bool
+ ssid:
+ description: The SSID string.
+ type: str
+ status:
+ description: WLAN status ENABLED or DISABLED.
+ type: str
+ tests:
+ description: Sensor Test Template Edit's tests.
+ elements: dict
+ suboptions:
+ config:
+ description: Sensor Test Template Edit's config.
elements: dict
suboptions:
- frequency:
- description: Sensor Test Template Edit's frequency.
- suboptions:
- unit:
- description: Unit.
- type: str
- value:
- description: Value.
- type: int
- type: dict
- from:
- description: From.
- type: str
- to:
- description: To.
+ direction:
+ description: IPerf direction (UPLOAD, DOWNLOAD, BOTH).
+ type: str
+ domains:
+ description: DNS domain name.
+ elements: str
+ type: list
+ downlinkTest:
+ description: Downlink test.
+ type: bool
+ endPort:
+ description: IPerf end port.
+ type: int
+ exitCommand:
+ description: Exit command.
+ type: str
+ finalPrompt:
+ description: Final prompt.
+ type: str
+ ndtServer:
+ description: NDT server.
+ type: str
+ ndtServerPath:
+ description: NDT server path.
+ type: str
+ ndtServerPort:
+ description: NDT server port.
+ type: str
+ numPackets:
+ description: Number of packets.
+ type: int
+ password:
+ description: Password.
+ type: str
+ passwordPrompt:
+ description: Password prompt.
+ type: str
+ pathToDownload:
+ description: File path for file transfer.
+ type: str
+ port:
+ description: Radius or WEB server port.
+ type: int
+ probeType:
+ description: Probe type.
+ type: str
+ protocol:
+ description: Protocol used by file transfer, IPerf, mail server, and
+ radius (TCP, UDP, FTP, POP3, IMAP, CHAP, PAP).
+ type: str
+ proxyPassword:
+ description: Proxy password.
+ type: str
+ proxyPort:
+ description: Proxy port.
+ type: str
+ proxyServer:
+ description: Proxy server.
+ type: str
+ proxyUserName:
+ description: Proxy user name.
+ type: str
+ server:
+ description: Ping, file transfer, mail, radius, ssh, or telnet server.
+ type: str
+ servers:
+ description: IPerf server list.
+ elements: str
+ type: list
+ sharedSecret:
+ description: Shared secret.
+ type: str
+ startPort:
+ description: IPerf start port.
+ type: int
+ transferType:
+ description: File transfer type (UPLOAD, DOWNLOAD, BOTH).
+ type: str
+ udpBandwidth:
+ description: IPerf UDP bandwidth.
+ type: int
+ uplinkTest:
+ description: Uplink test.
+ type: bool
+ url:
+ description: URL.
+ type: str
+ userName:
+ description: User name.
+ type: str
+ userNamePrompt:
+ description: User name prompt.
type: str
type: list
+ name:
+ description: Name of the test.
+ type: str
type: list
- testScheduleMode:
- description: Test Schedule Mode.
+ thirdParty:
+ description: Sensor Test Template Edit's thirdParty.
+ suboptions:
+ selected:
+ description: True the SSID is third party.
+ type: bool
+ type: dict
+ username:
+ description: User name string for onboarding SSID.
type: str
- type: dict
+ validFrom:
+ description: Valid From UTC timestamp.
+ type: int
+ validTo:
+ description: Valid To UTC timestamp.
+ type: int
+ whiteList:
+ description: Indication of being on allowed list.
+ type: bool
+ wlanId:
+ description: WLAN ID.
+ type: int
+ wlc:
+ description: WLC IP addres.
+ type: str
+ type: list
+ startTime:
+ description: Start time.
+ type: int
+ status:
+ description: Status of the test (RUNNING, NOTRUNNING).
+ type: str
templateName:
- description: Template Name.
+ description: The test template name that is to be edited.
+ type: str
+ testScheduleMode:
+ description: Test schedule mode (ONDEMAND, DEDICATED, SCHEDULED, CONTINUOUS, RUNNOW).
type: str
+ version:
+ description: The sensor test template version (must be 2).
+ type: int
+ wlans:
+ description: WLANs list.
+ elements: str
+ type: list
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Sensors EditSensorTestTemplate
description: Complete reference of the EditSensorTestTemplate API.
@@ -108,28 +692,218 @@ EXAMPLES = r"""
dnac_port: "{{dnac_port}}"
dnac_version: "{{dnac_version}}"
dnac_debug: "{{dnac_debug}}"
+ _id: string
+ actionInProgress: string
+ apCoverage:
+ - bands: string
+ numberOfApsToTest: 0
+ rssiThreshold: 0
+ connection: string
+ encryptionMode: string
+ frequency:
+ unit: string
+ value: 0
+ lastModifiedTime: 0
+ location: string
locationInfoList:
- allSensors: true
+ customManagementVlan: true
locationId: string
locationType: string
+ macAddressList:
+ - string
+ managementVlan: string
siteHierarchy: string
- schedule:
- frequency:
- unit: string
- value: 0
- scheduleRange:
- - day: string
- timeRange:
- - frequency:
- unit: string
- value: 0
- from: string
- to: string
- testScheduleMode: string
+ modelVersion: 0
+ name: string
+ numAssociatedSensor: 0
+ numNeighborAPThreshold: 0
+ profiles:
+ - authProtocol: string
+ authType: string
+ certdownloadurl: string
+ certfilename: string
+ certpassphrase: string
+ certstatus: string
+ certxferprotocol: string
+ deviceType: string
+ eapMethod: string
+ extWebAuth: true
+ extWebAuthAccessUrl: string
+ extWebAuthHtmlTag:
+ - label: string
+ tag: string
+ value: string
+ extWebAuthPortal: string
+ extWebAuthVirtualIp: string
+ locationVlanList:
+ - locationId: string
+ vlans:
+ - string
+ password: string
+ passwordType: string
+ profileName: string
+ psk: string
+ qosPolicy: string
+ scep: true
+ tests:
+ - config:
+ - direction: string
+ domains:
+ - string
+ downlinkTest: true
+ endPort: 0
+ exitCommand: string
+ finalPrompt: string
+ ndtServer: string
+ ndtServerPath: string
+ ndtServerPort: string
+ numPackets: 0
+ password: string
+ passwordPrompt: string
+ pathToDownload: string
+ port: 0
+ probeType: string
+ protocol: string
+ proxyPassword: string
+ proxyPort: string
+ proxyServer: string
+ proxyUserName: string
+ server: string
+ servers:
+ - string
+ sharedSecret: string
+ startPort: 0
+ transferType: string
+ udpBandwidth: 0
+ uplinkTest: true
+ url: string
+ userName: string
+ userNamePrompt: string
+ name: string
+ username: string
+ vlan: string
+ whiteList: true
+ radioAsSensorRemoved: true
+ rssiThreshold: 0
+ runNow: string
+ scheduleInDays: 0
+ sensors:
+ - allSensorAddition: true
+ assigned: true
+ configUpdated: string
+ hostName: string
+ iPerfInfo: {}
+ id: string
+ ipAddress: string
+ locationId: string
+ macAddress: string
+ markedForUninstall: true
+ name: string
+ runNow: string
+ sensorType: string
+ servicePolicy: string
+ status: string
+ switchMac: string
+ switchSerialNumber: string
+ switchUuid: string
+ targetAPs:
+ - string
+ testMacAddresses: {}
+ wiredApplicationMessage: string
+ wiredApplicationStatus: string
+ xorSensor: true
+ showWlcUpgradeBanner: true
+ siteHierarchy: string
+ ssids:
+ - authProtocol: string
+ authType: string
+ bands: string
+ certdownloadurl: string
+ certfilename: string
+ certpassphrase: string
+ certstatus: string
+ certxferprotocol: string
+ eapMethod: string
+ extWebAuth: true
+ extWebAuthAccessUrl: string
+ extWebAuthHtmlTag:
+ - label: string
+ tag: string
+ value: string
+ extWebAuthPortal: string
+ extWebAuthVirtualIp: string
+ id: 0
+ layer3webAuthEmailAddress: string
+ layer3webAuthpassword: string
+ layer3webAuthsecurity: string
+ layer3webAuthuserName: string
+ numAps: 0
+ numSensors: 0
+ password: string
+ passwordType: string
+ profileName: string
+ proxyPassword: string
+ proxyPort: string
+ proxyServer: string
+ proxyUserName: string
+ psk: string
+ qosPolicy: string
+ scep: true
+ ssid: string
+ status: string
+ tests:
+ - config:
+ - direction: string
+ domains:
+ - string
+ downlinkTest: true
+ endPort: 0
+ exitCommand: string
+ finalPrompt: string
+ ndtServer: string
+ ndtServerPath: string
+ ndtServerPort: string
+ numPackets: 0
+ password: string
+ passwordPrompt: string
+ pathToDownload: string
+ port: 0
+ probeType: string
+ protocol: string
+ proxyPassword: string
+ proxyPort: string
+ proxyServer: string
+ proxyUserName: string
+ server: string
+ servers:
+ - string
+ sharedSecret: string
+ startPort: 0
+ transferType: string
+ udpBandwidth: 0
+ uplinkTest: true
+ url: string
+ userName: string
+ userNamePrompt: string
+ name: string
+ thirdParty:
+ selected: true
+ username: string
+ validFrom: 0
+ validTo: 0
+ whiteList: true
+ wlanId: 0
+ wlc: string
+ startTime: 0
+ status: string
templateName: string
+ testScheduleMode: string
+ version: 0
+ wlans:
+ - string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -139,71 +913,202 @@ dnac_response:
{
"version": "string",
"response": {
- "_id": "string",
"name": "string",
+ "_id": "string",
"version": 0,
"modelVersion": 0,
"startTime": 0,
"lastModifiedTime": 0,
"numAssociatedSensor": 0,
- "location": {},
- "siteHierarchy": {},
+ "location": "string",
+ "siteHierarchy": "string",
"status": "string",
"connection": "string",
- "frequency": {},
+ "actionInProgress": "string",
+ "frequency": {
+ "value": 0,
+ "unit": "string"
+ },
"rssiThreshold": 0,
"numNeighborAPThreshold": 0,
"scheduleInDays": 0,
"wlans": [
- {}
+ "string"
],
"ssids": [
{
- "bands": {},
+ "bands": "string",
"ssid": "string",
"profileName": "string",
- "authType": "string",
- "authTypeRcvd": {},
- "psk": "string",
- "username": {},
- "password": {},
- "eapMethod": {},
- "scep": true,
- "authProtocol": {},
- "certfilename": {},
- "certxferprotocol": "string",
- "certstatus": "string",
- "certpassphrase": {},
- "certdownloadurl": {},
"numAps": 0,
"numSensors": 0,
- "layer3webAuthsecurity": {},
- "layer3webAuthuserName": {},
- "layer3webAuthpassword": {},
- "extWebAuthVirtualIp": {},
- "layer3webAuthEmailAddress": {},
- "qosPolicy": "string",
- "extWebAuth": true,
- "whiteList": true,
- "extWebAuthPortal": {},
- "extWebAuthAccessUrl": {},
- "extWebAuthHtmlTag": [
- {}
- ],
+ "layer3webAuthsecurity": "string",
+ "layer3webAuthuserName": "string",
+ "layer3webAuthpassword": "string",
+ "layer3webAuthEmailAddress": "string",
"thirdParty": {
"selected": true
},
"id": 0,
"wlanId": 0,
- "wlc": {},
+ "wlc": "string",
"validFrom": 0,
"validTo": 0,
"status": "string",
+ "proxyServer": "string",
+ "proxyPort": "string",
+ "proxyUserName": "string",
+ "proxyPassword": "string",
+ "authType": "string",
+ "psk": "string",
+ "username": "string",
+ "password": "string",
+ "passwordType": "string",
+ "eapMethod": "string",
+ "scep": true,
+ "authProtocol": "string",
+ "certfilename": "string",
+ "certxferprotocol": "string",
+ "certstatus": "string",
+ "certpassphrase": "string",
+ "certdownloadurl": "string",
+ "extWebAuthVirtualIp": "string",
+ "extWebAuth": true,
+ "whiteList": true,
+ "extWebAuthPortal": "string",
+ "extWebAuthAccessUrl": "string",
+ "extWebAuthHtmlTag": [
+ {
+ "label": "string",
+ "tag": "string",
+ "value": "string"
+ }
+ ],
+ "qosPolicy": "string",
+ "tests": [
+ {
+ "name": "string",
+ "config": [
+ {
+ "domains": [
+ "string"
+ ],
+ "server": "string",
+ "userName": "string",
+ "password": "string",
+ "url": "string",
+ "port": 0,
+ "protocol": "string",
+ "servers": [
+ "string"
+ ],
+ "direction": "string",
+ "startPort": 0,
+ "endPort": 0,
+ "udpBandwidth": 0,
+ "probeType": "string",
+ "numPackets": 0,
+ "pathToDownload": "string",
+ "transferType": "string",
+ "sharedSecret": "string",
+ "ndtServer": "string",
+ "ndtServerPort": "string",
+ "ndtServerPath": "string",
+ "uplinkTest": true,
+ "downlinkTest": true,
+ "proxyServer": "string",
+ "proxyPort": "string",
+ "proxyUserName": "string",
+ "proxyPassword": "string",
+ "userNamePrompt": "string",
+ "passwordPrompt": "string",
+ "exitCommand": "string",
+ "finalPrompt": "string"
+ }
+ ]
+ }
+ ]
+ }
+ ],
+ "profiles": [
+ {
+ "authType": "string",
+ "psk": "string",
+ "username": "string",
+ "password": "string",
+ "passwordType": "string",
+ "eapMethod": "string",
+ "scep": true,
+ "authProtocol": "string",
+ "certfilename": "string",
+ "certxferprotocol": "string",
+ "certstatus": "string",
+ "certpassphrase": "string",
+ "certdownloadurl": "string",
+ "extWebAuthVirtualIp": "string",
+ "extWebAuth": true,
+ "whiteList": true,
+ "extWebAuthPortal": "string",
+ "extWebAuthAccessUrl": "string",
+ "extWebAuthHtmlTag": [
+ {
+ "label": "string",
+ "tag": "string",
+ "value": "string"
+ }
+ ],
+ "qosPolicy": "string",
"tests": [
{
"name": "string",
"config": [
- {}
+ {
+ "domains": [
+ "string"
+ ],
+ "server": "string",
+ "userName": "string",
+ "password": "string",
+ "url": "string",
+ "port": 0,
+ "protocol": "string",
+ "servers": [
+ "string"
+ ],
+ "direction": "string",
+ "startPort": 0,
+ "endPort": 0,
+ "udpBandwidth": 0,
+ "probeType": "string",
+ "numPackets": 0,
+ "pathToDownload": "string",
+ "transferType": "string",
+ "sharedSecret": "string",
+ "ndtServer": "string",
+ "ndtServerPort": "string",
+ "ndtServerPath": "string",
+ "uplinkTest": true,
+ "downlinkTest": true,
+ "proxyServer": "string",
+ "proxyPort": "string",
+ "proxyUserName": "string",
+ "proxyPassword": "string",
+ "userNamePrompt": "string",
+ "passwordPrompt": "string",
+ "exitCommand": "string",
+ "finalPrompt": "string"
+ }
+ ]
+ }
+ ],
+ "profileName": "string",
+ "deviceType": "string",
+ "vlan": "string",
+ "locationVlanList": [
+ {
+ "locationId": "string",
+ "vlans": [
+ "string"
]
}
]
@@ -221,36 +1126,40 @@ dnac_response:
"allSensors": true,
"siteHierarchy": "string",
"macAddressList": [
- {}
- ]
+ "string"
+ ],
+ "managementVlan": "string",
+ "customManagementVlan": true
}
],
- "schedule": {
- "testScheduleMode": "string",
- "scheduleRange": [
- {
- "timeRange": [
- {
- "from": "string",
- "to": "string",
- "frequency": {
- "value": 0,
- "unit": "string"
- }
- }
- ],
- "day": "string"
- }
- ],
- "startTime": 0,
- "frequency": {
- "value": 0,
- "unit": "string"
- }
- },
- "tests": {},
"sensors": [
- {}
+ {
+ "name": "string",
+ "macAddress": "string",
+ "switchMac": "string",
+ "switchUuid": "string",
+ "switchSerialNumber": "string",
+ "markedForUninstall": true,
+ "ipAddress": "string",
+ "hostName": "string",
+ "wiredApplicationStatus": "string",
+ "wiredApplicationMessage": "string",
+ "assigned": true,
+ "status": "string",
+ "xorSensor": true,
+ "targetAPs": [
+ "string"
+ ],
+ "runNow": "string",
+ "locationId": "string",
+ "allSensorAddition": true,
+ "configUpdated": "string",
+ "sensorType": "string",
+ "testMacAddresses": {},
+ "id": "string",
+ "servicePolicy": "string",
+ "iPerfInfo": {}
+ }
],
"apCoverage": [
{
@@ -258,11 +1167,7 @@ dnac_response:
"numberOfApsToTest": 0,
"rssiThreshold": 0
}
- ],
- "testDurationEstimate": 0,
- "testTemplate": true,
- "legacyTestSuite": true,
- "tenantId": "string"
+ ]
}
}
"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/service_provider_create.py b/ansible_collections/cisco/dnac/plugins/modules/service_provider_create.py
index 2ce62b664..fc5501abc 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/service_provider_create.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/service_provider_create.py
@@ -35,8 +35,8 @@ options:
type: list
type: dict
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Network Settings CreateSPProfile
description: Complete reference of the CreateSPProfile API.
@@ -67,7 +67,6 @@ EXAMPLES = r"""
wanProvider: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/service_provider_info.py b/ansible_collections/cisco/dnac/plugins/modules/service_provider_info.py
index d2bc282f3..1352a68ae 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/service_provider_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/service_provider_info.py
@@ -20,8 +20,8 @@ options:
description: Additional headers.
type: dict
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Network Settings GetServiceProviderDetails
description: Complete reference of the GetServiceProviderDetails API.
@@ -49,7 +49,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -64,7 +63,7 @@ dnac_response:
"namespace": "string",
"type": "string",
"key": "string",
- "version": "string",
+ "version": 0,
"value": [
{
"wanProvider": "string",
@@ -77,6 +76,6 @@ dnac_response:
"inheritedGroupName": "string"
}
],
- "version": 0
+ "version": "string"
}
"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/service_provider_profile_delete.py b/ansible_collections/cisco/dnac/plugins/modules/service_provider_profile_delete.py
index 0d076885e..0d454a9fd 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/service_provider_profile_delete.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/service_provider_profile_delete.py
@@ -20,8 +20,8 @@ options:
description: SpProfileName path parameter. Sp profile name.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Network Settings DeleteSPProfile
description: Complete reference of the DeleteSPProfile API.
@@ -48,7 +48,6 @@ EXAMPLES = r"""
spProfileName: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/service_provider_update.py b/ansible_collections/cisco/dnac/plugins/modules/service_provider_update.py
index 3ec3fcde9..580926721 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/service_provider_update.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/service_provider_update.py
@@ -38,8 +38,8 @@ options:
type: list
type: dict
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Network Settings UpdateSPProfile
description: Complete reference of the UpdateSPProfile API.
@@ -71,7 +71,6 @@ EXAMPLES = r"""
wanProvider: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/service_provider_v2.py b/ansible_collections/cisco/dnac/plugins/modules/service_provider_v2.py
index 5dcf4f367..0c4500d22 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/service_provider_v2.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/service_provider_v2.py
@@ -12,7 +12,7 @@ description:
- Manage operations create and update of the resource Service Provider V2.
- API to create Service Provider Profile QOS .
- API to update Service Provider Profile QoS .
-version_added: '6.7.0'
+version_added: '3.1.0'
extends_documentation_fragment:
- cisco.dnac.module
author: Rafael Campos (@racampos)
@@ -36,8 +36,8 @@ options:
type: list
type: dict
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Network Settings CreateSPProfileV2
description: Complete reference of the CreateSPProfileV2 API.
@@ -91,7 +91,6 @@ EXAMPLES = r"""
wanProvider: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/service_provider_v2_info.py b/ansible_collections/cisco/dnac/plugins/modules/service_provider_v2_info.py
index 2641b9bdb..b7e6a7e44 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/service_provider_v2_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/service_provider_v2_info.py
@@ -11,7 +11,7 @@ short_description: Information module for Service Provider V2
description:
- Get all Service Provider V2.
- API to get Service Provider details QoS .
-version_added: '6.7.0'
+version_added: '3.1.0'
extends_documentation_fragment:
- cisco.dnac.module_info
author: Rafael Campos (@racampos)
@@ -20,8 +20,8 @@ options:
description: Additional headers.
type: dict
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Network Settings GetServiceProviderDetailsV2
description: Complete reference of the GetServiceProviderDetailsV2 API.
@@ -49,7 +49,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/site_assign_credential.py b/ansible_collections/cisco/dnac/plugins/modules/site_assign_credential.py
index b21a5ca77..2817fe8dd 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/site_assign_credential.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/site_assign_credential.py
@@ -41,8 +41,8 @@ options:
description: Snmp V3 Id.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Network Settings AssignDeviceCredentialToSite
description: Complete reference of the AssignDeviceCredentialToSite API.
@@ -76,7 +76,6 @@ EXAMPLES = r"""
snmpV3Id: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/site_assign_device.py b/ansible_collections/cisco/dnac/plugins/modules/site_assign_device.py
index 1c080cba4..3b7bb354e 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/site_assign_device.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/site_assign_device.py
@@ -28,7 +28,7 @@ options:
description: SiteId path parameter. Site id to which site the device to assign.
type: str
requirements:
-- dnacentersdk >= 2.6.0
+- dnacentersdk >= 2.7.1
- python >= 3.9
notes:
- SDK Method used are
diff --git a/ansible_collections/cisco/dnac/plugins/modules/site_count_info.py b/ansible_collections/cisco/dnac/plugins/modules/site_count_info.py
index c4a5dbd96..d4fc275d3 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/site_count_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/site_count_info.py
@@ -10,7 +10,7 @@ module: site_count_info
short_description: Information module for Site Count
description:
- Get all Site Count.
-- API to get site count.
+- Get the site count of the specified site's sub-hierarchy inclusive of the provided site .
version_added: '3.1.0'
extends_documentation_fragment:
- cisco.dnac.module_info
@@ -21,11 +21,11 @@ options:
type: dict
siteId:
description:
- - SiteId query parameter. Site id to retrieve site count.
+ - SiteId query parameter. Site instance UUID.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Sites GetSiteCount
description: Complete reference of the GetSiteCount API.
@@ -54,7 +54,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/site_count_v2_info.py b/ansible_collections/cisco/dnac/plugins/modules/site_count_v2_info.py
new file mode 100644
index 000000000..1ea0a9687
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/site_count_v2_info.py
@@ -0,0 +1,67 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: site_count_v2_info
+short_description: Information module for Site Count V2
+description:
+- Get all Site Count V2.
+- Get the site count of the specified site's sub-hierarchy inclusive of the provided site .
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module_info
+author: Rafael Campos (@racampos)
+options:
+ headers:
+ description: Additional headers.
+ type: dict
+ id:
+ description:
+ - Id query parameter. Site instance UUID.
+ type: str
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for Sites GetSiteCountV2
+ description: Complete reference of the GetSiteCountV2 API.
+ link: https://developer.cisco.com/docs/dna-center/#!get-site-count-v-2
+notes:
+ - SDK Method used are
+ sites.Sites.get_site_count_v2,
+
+ - Paths used are
+ get /dna/intent/api/v2/site/count,
+
+"""
+
+EXAMPLES = r"""
+- name: Get all Site Count V2
+ cisco.dnac.site_count_v2_info:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ headers: "{{my_headers | from_json}}"
+ id: string
+ register: result
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": 0,
+ "version": "string"
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/site_create.py b/ansible_collections/cisco/dnac/plugins/modules/site_create.py
index 6b690ac25..d1c9627fa 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/site_create.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/site_create.py
@@ -43,10 +43,10 @@ options:
type: str
latitude:
description: Latitude coordinate of the building (eg 37.338).
- type: int
+ type: float
longitude:
description: Longitude coordinate of the building (eg -121.832).
- type: int
+ type: float
name:
description: Name of the building (eg building1).
type: str
@@ -59,13 +59,13 @@ options:
suboptions:
floorNumber:
description: Floor number. (eg 5).
- type: int
+ type: float
height:
description: Height of the floor. Unit of measure is ft. (eg 15).
- type: int
+ type: float
length:
description: Length of the floor. Unit of measure is ft. (eg 100).
- type: int
+ type: float
name:
description: Name of the floor (eg floor-1).
type: str
@@ -77,15 +77,15 @@ options:
type: str
width:
description: Width of the floor. Unit of measure is ft. (eg 100).
- type: int
+ type: float
type: dict
type: dict
type:
description: Type of site to create (eg area, building, floor).
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Sites CreateSite
description: Complete reference of the CreateSite API.
@@ -132,7 +132,6 @@ EXAMPLES = r"""
type: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/site_delete.py b/ansible_collections/cisco/dnac/plugins/modules/site_delete.py
index 6c52cc343..b9bdb242a 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/site_delete.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/site_delete.py
@@ -20,8 +20,8 @@ options:
description: SiteId path parameter. Site id to which site details to be deleted.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Sites DeleteSite
description: Complete reference of the DeleteSite API.
@@ -48,7 +48,6 @@ EXAMPLES = r"""
siteId: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -57,7 +56,7 @@ dnac_response:
sample: >
{
"executionId": "string",
- "executionStatusURL": "string",
+ "executionStatusUrl": "string",
"message": "string"
}
"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/site_design_floormap.py b/ansible_collections/cisco/dnac/plugins/modules/site_design_floormap.py
index 0948930f3..3405dfe6e 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/site_design_floormap.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/site_design_floormap.py
@@ -24,7 +24,7 @@ options:
description: Site Design Floormap's payload
type: dict
requirements:
-- dnacentersdk >= 2.6.0
+- dnacentersdk >= 2.7.1
- python >= 3.9
notes:
- SDK Method used are
diff --git a/ansible_collections/cisco/dnac/plugins/modules/site_design_floormap_info.py b/ansible_collections/cisco/dnac/plugins/modules/site_design_floormap_info.py
index c427e42bb..560603902 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/site_design_floormap_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/site_design_floormap_info.py
@@ -26,7 +26,7 @@ options:
- FloorId path parameter. Group Id of the specified floormap.
type: str
requirements:
-- dnacentersdk >= 2.6.0
+- dnacentersdk >= 2.7.1
- python >= 3.9
notes:
- SDK Method used are
diff --git a/ansible_collections/cisco/dnac/plugins/modules/site_health_info.py b/ansible_collections/cisco/dnac/plugins/modules/site_health_info.py
index cbb91dcd6..8851204b3 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/site_health_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/site_health_info.py
@@ -19,25 +19,25 @@ options:
headers:
description: Additional headers.
type: dict
- timestamp:
- description:
- - Timestamp query parameter. Epoch time(in milliseconds) when the Site Hierarchy data is required.
- type: str
siteType:
description:
- - SiteType query parameter. Type of the site to return. AREA or BUILDING. Default to AREA.
+ - SiteType query parameter. Site type AREA or BUILDING (case insensitive).
type: str
offset:
description:
- - Offset query parameter. The offset value, starting from 1, of the first returned site entry. Default is 1.
- type: int
+ - Offset query parameter. Offset of the first returned data set entry (Multiple of 'limit' + 1).
+ type: float
limit:
description:
- - Limit query parameter. The max number of sites in the returned data set. Default is 25, and max at 50.
- type: int
+ - Limit query parameter. Max number of data entries in the returned data set 1,50. Default is 25.
+ type: float
+ timestamp:
+ description:
+ - Timestamp query parameter. Epoch time(in milliseconds) when the Site Hierarchy data is required.
+ type: float
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Sites GetSiteHealth
description: Complete reference of the GetSiteHealth API.
@@ -62,14 +62,13 @@ EXAMPLES = r"""
dnac_version: "{{dnac_version}}"
dnac_debug: "{{dnac_debug}}"
headers: "{{my_headers | from_json}}"
- timestamp: string
siteType: string
offset: 0
limit: 0
+ timestamp: 0
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -86,41 +85,62 @@ dnac_response:
"siteType": "string",
"latitude": 0,
"longitude": 0,
- "healthyNetworkDevicePercentage": {},
- "healthyClientsPercentage": {},
- "clientHealthWired": {},
- "clientHealthWireless": {},
- "numberOfClients": {},
- "numberOfNetworkDevice": {},
- "networkHealthAverage": {},
- "networkHealthAccess": {},
- "networkHealthCore": {},
- "networkHealthDistribution": {},
- "networkHealthRouter": {},
- "networkHealthWireless": {},
- "networkHealthOthers": {},
- "numberOfWiredClients": {},
- "numberOfWirelessClients": {},
- "totalNumberOfConnectedWiredClients": {},
- "totalNumberOfActiveWirelessClients": {},
- "wiredGoodClients": {},
- "wirelessGoodClients": {},
- "overallGoodDevices": {},
- "accessGoodCount": {},
- "accessTotalCount": {},
- "coreGoodCount": {},
- "coreTotalCount": {},
- "distributionGoodCount": {},
- "distributionTotalCount": {},
- "routerGoodCount": {},
- "routerTotalCount": {},
- "wirelessDeviceGoodCount": {},
- "wirelessDeviceTotalCount": {},
- "applicationHealth": {},
- "applicationGoodCount": {},
- "applicationTotalCount": {},
- "applicationBytesTotalCount": {},
- "dnacInfo": {},
+ "healthyNetworkDevicePercentage": 0,
+ "healthyClientsPercentage": 0,
+ "clientHealthWired": 0,
+ "clientHealthWireless": 0,
+ "numberOfClients": 0,
+ "numberOfNetworkDevice": 0,
+ "networkHealthAverage": 0,
+ "networkHealthAccess": 0,
+ "networkHealthCore": 0,
+ "networkHealthDistribution": 0,
+ "networkHealthRouter": 0,
+ "networkHealthWireless": 0,
+ "networkHealthAP": 0,
+ "networkHealthWLC": 0,
+ "networkHealthSwitch": 0,
+ "networkHealthOthers": 0,
+ "numberOfWiredClients": 0,
+ "numberOfWirelessClients": 0,
+ "totalNumberOfConnectedWiredClients": 0,
+ "totalNumberOfActiveWirelessClients": 0,
+ "wiredGoodClients": 0,
+ "wirelessGoodClients": 0,
+ "overallGoodDevices": 0,
+ "accessGoodCount": 0,
+ "accessTotalCount": 0,
+ "coreGoodCount": 0,
+ "coreTotalCount": 0,
+ "distributionGoodCount": 0,
+ "distributionTotalCount": 0,
+ "routerGoodCount": 0,
+ "routerTotalCount": 0,
+ "wirelessDeviceGoodCount": 0,
+ "wirelessDeviceTotalCount": 0,
+ "apDeviceGoodCount": 0,
+ "apDeviceTotalCount": 0,
+ "wlcDeviceGoodCount": 0,
+ "wlcDeviceTotalCount": 0,
+ "switchDeviceGoodCount": 0,
+ "switchDeviceTotalCount": 0,
+ "applicationHealth": 0,
+ "applicationHealthInfo": [
+ {
+ "trafficClass": "string",
+ "bytesCount": 0,
+ "healthScore": 0
+ }
+ ],
+ "applicationGoodCount": 0,
+ "applicationTotalCount": 0,
+ "applicationBytesTotalCount": 0,
+ "dnacInfo": {
+ "uuid": "string",
+ "ip": "string",
+ "status": "string"
+ },
+ "usage": 0,
"applicationHealthStats": {
"appTotalCount": 0,
"businessRelevantAppCount": {
diff --git a/ansible_collections/cisco/dnac/plugins/modules/site_info.py b/ansible_collections/cisco/dnac/plugins/modules/site_info.py
index be914d2e2..cfbfddf06 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/site_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/site_info.py
@@ -10,7 +10,7 @@ module: site_info
short_description: Information module for Site
description:
- Get all Site.
-- Get site using siteNameHierarchy/siteId/type ,return all sites if these parameters are not given as input.
+- Get sites by site-name-hierarchy or siteId or type. List all sites if these parameters are not given as an input.
version_added: '3.1.0'
extends_documentation_fragment:
- cisco.dnac.module_info
@@ -21,27 +21,27 @@ options:
type: dict
name:
description:
- - Name query parameter. SiteNameHierarchy (ex global/groupName).
+ - Name query parameter. Site name hierarchy (E.g Global/USA/CA).
type: str
siteId:
description:
- - SiteId query parameter. Site id to which site details to retrieve.
+ - SiteId query parameter. Site Id.
type: str
type:
description:
- - Type query parameter. Type (ex area, building, floor).
+ - Type query parameter. Site type (Ex area, building, floor).
type: str
offset:
description:
- - Offset query parameter. Offset/starting row. The default value is 1.
+ - Offset query parameter. Offset/starting index for pagination. Indexed from 1.
type: int
limit:
description:
- - Limit query parameter. Number of sites to be retrieved. The default value is 500.
+ - Limit query parameter. Number of sites to be listed.
type: int
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Sites GetSite
description: Complete reference of the GetSite API.
@@ -74,7 +74,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/site_intent.py b/ansible_collections/cisco/dnac/plugins/modules/site_intent.py
index 39d708730..63af3d192 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/site_intent.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/site_intent.py
@@ -342,6 +342,8 @@ class DnacSite(DnacBase):
def __init__(self, module):
super().__init__(module)
self.supported_states = ["merged", "deleted"]
+ self.created_site_list, self.updated_site_list, self.update_not_neeeded_sites = [], [], []
+ self.deleted_site_list, self.site_absent_list = [], []
def validate_input(self):
"""
@@ -449,7 +451,7 @@ class DnacSite(DnacBase):
width=map_geometry.get("attributes").get("width"),
length=map_geometry.get("attributes").get("length"),
height=map_geometry.get("attributes").get("height"),
- floorNumber=map_geometry.get("attributes").get("floor_number", "")
+ floorNumber=map_summary.get('attributes').get('floorIndex')
)
)
@@ -666,7 +668,7 @@ class DnacSite(DnacBase):
"""
keys_to_compare = ['length', 'width', 'height']
- if updated_site['name'] != requested_site['name'] or updated_site['rf_model'] != requested_site['rfModel']:
+ if updated_site['name'] != requested_site['name'] or updated_site.get('rf_model') != requested_site.get('rfModel'):
return False
for key in keys_to_compare:
@@ -781,6 +783,7 @@ class DnacSite(DnacBase):
site_updated = False
site_created = False
+ site_name = self.want.get("site_name")
# check if the given site exists and/or needs to be updated/created.
if self.have.get("site_exists"):
@@ -795,14 +798,13 @@ class DnacSite(DnacBase):
op_modifies=True,
params=site_params,
)
+ self.log("Received API response from 'update_site': {0}".format(str(response)), "DEBUG")
site_updated = True
else:
# Site does not neet update
- self.result['response'] = self.have.get("current_site")
- self.msg = "Site - {0} does not need any update".format(self.have.get("current_site"))
- self.log(self.msg, "INFO")
- self.result['msg'] = self.msg
+ self.update_not_neeeded_sites.append(site_name)
+ self.log("Site - {0} does not need any update".format(site_name), "INFO")
return self
else:
@@ -817,9 +819,9 @@ class DnacSite(DnacBase):
site_params['site']['building'] = building_details
except Exception as e:
site_type = site_params['type']
- site_name = site_params['site'][site_type]['name']
+ name = site_params['site'][site_type]['name']
self.log("""The site '{0}' is not categorized as a building; hence, there is no need to filter out 'None'
- values from the 'site_params' dictionary.""".format(site_name), "INFO")
+ values from the 'site_params' dictionary.""".format(name), "INFO")
response = self.dnac._exec(
family="sites",
@@ -837,7 +839,6 @@ class DnacSite(DnacBase):
execution_details = self.get_execution_details(executionid)
if execution_details.get("status") == "SUCCESS":
self.result['changed'] = True
- self.result['response'] = execution_details
break
elif execution_details.get("bapiError"):
@@ -846,21 +847,15 @@ class DnacSite(DnacBase):
break
if site_updated:
- self.msg = "Site - {0} Updated Successfully".format(self.want.get("site_name"))
- self.log(self.msg, "INFO")
- self.result['msg'] = self.msg
- self.result['response'].update({"siteId": self.have.get("site_id")})
-
+ self.updated_site_list.append(site_name)
+ self.log("Site - {0} Updated Successfully".format(site_name), "INFO")
else:
# Get the site id of the newly created site.
(site_exists, current_site) = self.site_exists()
if site_exists:
- self.msg = "Site '{0}' created successfully".format(self.want.get("site_name"))
- self.log(self.msg, "INFO")
- self.log("Current site (have): {0}".format(str(current_site)), "DEBUG")
- self.result['msg'] = self.msg
- self.result['response'].update({"siteId": current_site.get('site_id')})
+ self.created_site_list.append(site_name)
+ self.log("Site '{0}' created successfully".format(site_name), "INFO")
return self
@@ -895,11 +890,9 @@ class DnacSite(DnacBase):
while True:
execution_details = self.get_execution_details(executionid)
if execution_details.get("status") == "SUCCESS":
- self.msg = "Site '{0}' deleted successfully".format(site_name)
- self.result['changed'] = True
- self.result['response'] = self.msg
self.status = "success"
- self.log(self.msg, "INFO")
+ self.deleted_site_list.append(site_name)
+ self.log("Site '{0}' deleted successfully".format(site_name), "INFO")
break
elif execution_details.get("bapiError"):
self.log("Error response for 'delete_site' execution: {0}".format(execution_details.get("bapiError")), "ERROR")
@@ -935,12 +928,8 @@ class DnacSite(DnacBase):
site_name = self.want.get("site_name")
if not site_exists:
self.status = "success"
- self.msg = "Unable to delete site '{0}' as it's not found in Cisco Catalyst Center".format(site_name)
- self.result.update({'changed': False,
- 'response': self.msg,
- 'msg': self.msg})
- self.log(self.msg, "INFO")
-
+ self.site_absent_list.append(site_name)
+ self.log("Unable to delete site '{0}' as it's not found in Cisco Catalyst Center".format(site_name), "INFO")
return self
# Check here if the site have the childs then fetch it using get membership API and then sort it
@@ -952,6 +941,7 @@ class DnacSite(DnacBase):
op_modifies=True,
params={"site_id": site_id},
)
+ self.log("Received API response from 'get_membership': {0}".format(str(mem_response)), "DEBUG")
site_response = mem_response.get("site").get("response")
self.log("Site {0} response along with it's child sites: {1}".format(site_name, str(site_response)), "DEBUG")
@@ -968,9 +958,7 @@ class DnacSite(DnacBase):
# Delete the final parent site
self.delete_single_site(site_id, site_name)
- self.msg = "The site '{0}' and its child sites have been deleted successfully".format(site_name)
- self.result['response'] = self.msg
- self.log(self.msg, "INFO")
+ self.log("The site '{0}' and its child sites have been deleted successfully".format(site_name), "INFO")
return self
@@ -1044,6 +1032,63 @@ class DnacSite(DnacBase):
return self
+ def update_site_messages(self):
+ """
+ Update site messages based on the status of created, updated, and deleted sites.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Returns:
+ self (object): An instance of a class representing the status of the operation, including whether it was
+ successful or failed, any error messages encountered during operation.
+ Description:
+ This method updates the messages related to site creation, updating, and deletion in the Cisco Catalyst Center.
+ It evaluates the status of created sites, updated sites, and sites that are no longer needed for update to
+ determine the appropriate message to be set. The messages are then stored in the 'msg' attribute of the object.
+ """
+
+ if self.created_site_list and self.updated_site_list:
+ self.result['changed'] = True
+ if self.update_not_neeeded_sites:
+ msg = """Site(s) '{0}' created successfully as well as Site(s) '{1}' updated successully and the some site(s)
+ '{2}' needs no update in Cisco Catalyst Center"""
+ self.msg = msg.format(str(self.created_site_list), str(self.updated_site_list), str(self.update_not_neeeded_sites))
+ else:
+ self.msg = """Site(s) '{0}' created successfully in Cisco Catalyst Center as well as Site(s) '{1}' updated successully in
+ Cisco Catalyst Center""".format(str(self.created_site_list), str(self.updated_site_list))
+ elif self.created_site_list:
+ self.result['changed'] = True
+ if self.update_not_neeeded_sites:
+ self.msg = """Site(s) '{0}' created successfully and some site(s) '{1}' not needs any update in Cisco Catalyst
+ Center.""".format(str(self.created_site_list), str(self.update_not_neeeded_sites))
+ else:
+ self.msg = "Site(s) '{0}' created successfully in Cisco Catalyst Center.".format(str(self.created_site_list))
+ elif self.updated_site_list:
+ self.result['changed'] = True
+ if self.update_not_neeeded_sites:
+ self.msg = """Site(s) '{0}' updated successfully and some site(s) '{1}' not needs any update in Cisco Catalyst
+ Center.""".format(str(self.updated_site_list), str(self.update_not_neeeded_sites))
+ else:
+ self.msg = "Site(s) '{0}' updated successfully in Cisco Catalyst Center.".format(str(self.updated_site_list))
+ elif self.update_not_neeeded_sites:
+ self.result['changed'] = False
+ self.msg = "Site(s) '{0}' not needs any update in Cisco Catalyst Center.".format(str(self.update_not_neeeded_sites))
+ elif self.deleted_site_list and self.site_absent_list:
+ self.result['changed'] = True
+ self.msg = """Given site(s) '{0}' deleted successfully from Cisco Catalyst Center and unable to deleted some site(s) '{1}' as they
+ are not found in Cisco Catalyst Center.""".format(str(self.deleted_site_list), str(self.site_absent_list))
+ elif self.deleted_site_list:
+ self.result['changed'] = True
+ self.msg = "Given site(s) '{0}' deleted successfully from Cisco Catalyst Center".format(str(self.deleted_site_list))
+ else:
+ self.result['changed'] = False
+ self.msg = "Unable to delete site(s) '{0}' as it's not found in Cisco Catalyst Center.".format(str(self.site_absent_list))
+
+ self.status = "success"
+ self.result['response'] = self.msg
+ self.result['msg'] = self.msg
+
+ return self
+
def main():
""" main entry point for module execution
@@ -1090,6 +1135,9 @@ def main():
if config_verify:
dnac_site.verify_diff_state_apply[state](config).check_return_status()
+ # Invoke the API to check the status and log the output of each site on the console
+ dnac_site.update_site_messages().check_return_status()
+
module.exit_json(**dnac_site.result)
diff --git a/ansible_collections/cisco/dnac/plugins/modules/site_membership_info.py b/ansible_collections/cisco/dnac/plugins/modules/site_membership_info.py
index bc28ba3f3..10319f53f 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/site_membership_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/site_membership_info.py
@@ -26,11 +26,11 @@ options:
offset:
description:
- Offset query parameter. Offset/starting row.
- type: int
+ type: float
limit:
description:
- Limit query parameter. Number of sites to be retrieved.
- type: int
+ type: float
deviceFamily:
description:
- DeviceFamily query parameter. Device family name.
@@ -40,8 +40,8 @@ options:
- SerialNumber query parameter. Device serial number.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Sites GetMembership
description: Complete reference of the GetMembership API.
@@ -74,7 +74,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/site_update.py b/ansible_collections/cisco/dnac/plugins/modules/site_update.py
index 86c54a60b..3dd31d2b6 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/site_update.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/site_update.py
@@ -26,60 +26,68 @@ options:
description: Site Update's area.
suboptions:
name:
- description: Name.
+ description: Area name.
type: str
parentName:
- description: Parent Name.
+ description: Parent hierarchical name (Example Global/USA/CA).
type: str
type: dict
building:
description: Site Update's building.
suboptions:
address:
- description: Address.
+ description: Building address (Example 4900 Marie P. Debartolo Way, Santa
+ Clara, California 95054, United States).
+ type: str
+ country:
+ description: Country name. This field is mandatory for air-gapped networks
+ (Example United States).
type: str
latitude:
- description: Latitude.
- type: int
+ description: Building latitude (Example 37.403712).
+ type: float
longitude:
- description: Longitude.
- type: int
+ description: Building longitude (Example -121.971063).
+ type: float
name:
- description: Name.
+ description: Building name.
type: str
parentName:
- description: Parent Name.
+ description: Parent hierarchical name (Example Global/USA/CA/SantaClara).
type: str
type: dict
floor:
description: Site Update's floor.
suboptions:
+ floorNumber:
+ description: Floor Number (Example 3).
+ type: float
height:
- description: Height.
- type: int
+ description: Floor height in feet (Example 10).
+ type: float
length:
- description: Length.
- type: int
+ description: Floor length in feet (Example 100).
+ type: float
name:
- description: Name.
+ description: Floor name.
type: str
rfModel:
- description: Rf Model.
+ description: RF model (Example Cubes And Walled Offices).
type: str
width:
- description: Width.
- type: int
+ description: Floor width in feet (Example 200).
+ type: float
type: dict
type: dict
siteId:
description: SiteId path parameter. Site id to which site details to be updated.
type: str
type:
- description: Type.
+ description: Site type.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Sites UpdateSite
description: Complete reference of the UpdateSite API.
@@ -110,11 +118,13 @@ EXAMPLES = r"""
parentName: string
building:
address: string
+ country: string
latitude: 0
longitude: 0
name: string
parentName: string
floor:
+ floorNumber: 0
height: 0
length: 0
name: string
@@ -124,7 +134,6 @@ EXAMPLES = r"""
type: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/site_v2_info.py b/ansible_collections/cisco/dnac/plugins/modules/site_v2_info.py
new file mode 100644
index 000000000..4f539387b
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/site_v2_info.py
@@ -0,0 +1,112 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: site_v2_info
+short_description: Information module for Site V2
+description:
+- Get all Site V2.
+- >
+ API to get sites by site-name-hierarchy or siteId or type. List all sites if these parameters are not given as an
+ input.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module_info
+author: Rafael Campos (@racampos)
+options:
+ headers:
+ description: Additional headers.
+ type: dict
+ groupNameHierarchy:
+ description:
+ - GroupNameHierarchy query parameter. Site name hierarchy (E.g. Global/USA/CA).
+ type: str
+ id:
+ description:
+ - Id query parameter. Site Id.
+ type: str
+ type:
+ description:
+ - Type query parameter. Site type (Acceptable values area, building, floor).
+ type: str
+ offset:
+ description:
+ - Offset query parameter. Offset/starting index for pagination.
+ type: str
+ limit:
+ description:
+ - Limit query parameter. Number of sites to be listed. Default and max supported value is 500.
+ type: str
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for Sites GetSiteV2
+ description: Complete reference of the GetSiteV2 API.
+ link: https://developer.cisco.com/docs/dna-center/#!get-site-v-2
+notes:
+ - SDK Method used are
+ sites.Sites.get_site_v2,
+
+ - Paths used are
+ get /dna/intent/api/v2/site,
+
+"""
+
+EXAMPLES = r"""
+- name: Get all Site V2
+ cisco.dnac.site_v2_info:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ headers: "{{my_headers | from_json}}"
+ groupNameHierarchy: string
+ id: string
+ type: string
+ offset: string
+ limit: string
+ register: result
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: list
+ elements: dict
+ sample: >
+ [
+ {
+ "parentId": "string",
+ "groupTypeList": [
+ "string"
+ ],
+ "groupHierarchy": "string",
+ "additionalInfo": [
+ {
+ "nameSpace": "string",
+ "attributes": {
+ "addressInheritedFrom": "string",
+ "type": "string",
+ "country": "string",
+ "address": "string",
+ "latitude": "string",
+ "longitude": "string"
+ }
+ }
+ ],
+ "groupNameHierarchy": "string",
+ "name": "string",
+ "instanceTenantId": "string",
+ "id": "string"
+ }
+ ]
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/site_workflow_manager.py b/ansible_collections/cisco/dnac/plugins/modules/site_workflow_manager.py
index 81c6a974e..1f17115b7 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/site_workflow_manager.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/site_workflow_manager.py
@@ -342,6 +342,8 @@ class Site(DnacBase):
def __init__(self, module):
super().__init__(module)
self.supported_states = ["merged", "deleted"]
+ self.created_site_list, self.updated_site_list, self.update_not_neeeded_sites = [], [], []
+ self.deleted_site_list, self.site_absent_list = [], []
def validate_input(self):
"""
@@ -448,7 +450,7 @@ class Site(DnacBase):
width=map_geometry.get("attributes").get("width"),
length=map_geometry.get("attributes").get("length"),
height=map_geometry.get("attributes").get("height"),
- floorNumber=map_geometry.get("attributes").get("floor_number", "")
+ floorNumber=map_summary.get('attributes').get('floorIndex')
)
)
@@ -665,7 +667,9 @@ class Site(DnacBase):
"""
keys_to_compare = ['length', 'width', 'height']
- if updated_site['name'] != requested_site['name'] or updated_site['rf_model'] != requested_site['rfModel']:
+ if updated_site['name'] != requested_site['name'] or updated_site.get('rf_model') != requested_site.get('rfModel'):
+ return False
+ if requested_site.get('floorNumber') and int(requested_site.get('floorNumber')) != int(updated_site.get('floorNumber')):
return False
for key in keys_to_compare:
@@ -780,6 +784,7 @@ class Site(DnacBase):
site_updated = False
site_created = False
+ site_name = self.want.get("site_name")
# check if the given site exists and/or needs to be updated/created.
if self.have.get("site_exists"):
@@ -794,14 +799,12 @@ class Site(DnacBase):
op_modifies=True,
params=site_params,
)
+ self.log("Received API response from 'update_site': {0}".format(str(response)), "DEBUG")
site_updated = True
-
else:
# Site does not neet update
- self.result['response'] = self.have.get("current_site")
- self.msg = "Site - {0} does not need any update".format(self.have.get("current_site"))
- self.log(self.msg, "INFO")
- self.result['msg'] = self.msg
+ self.update_not_neeeded_sites.append(site_name)
+ self.log("Site - {0} does not need any update".format(site_name), "INFO")
return self
else:
@@ -816,9 +819,9 @@ class Site(DnacBase):
site_params['site']['building'] = building_details
except Exception as e:
site_type = site_params['type']
- site_name = site_params['site'][site_type]['name']
+ name = site_params['site'][site_type]['name']
self.log("""The site '{0}' is not categorized as a building; hence, there is no need to filter out 'None'
- values from the 'site_params' dictionary.""".format(site_name), "INFO")
+ values from the 'site_params' dictionary.""".format(name), "INFO")
response = self.dnac._exec(
family="sites",
@@ -836,7 +839,6 @@ class Site(DnacBase):
execution_details = self.get_execution_details(executionid)
if execution_details.get("status") == "SUCCESS":
self.result['changed'] = True
- self.result['response'] = execution_details
break
elif execution_details.get("bapiError"):
@@ -845,21 +847,15 @@ class Site(DnacBase):
break
if site_updated:
- self.msg = "Site - {0} Updated Successfully".format(self.want.get("site_name"))
- self.log(self.msg, "INFO")
- self.result['msg'] = self.msg
- self.result['response'].update({"siteId": self.have.get("site_id")})
-
+ self.updated_site_list.append(site_name)
+ self.log("Site - {0} Updated Successfully".format(site_name), "INFO")
else:
# Get the site id of the newly created site.
(site_exists, current_site) = self.site_exists()
if site_exists:
- self.msg = "Site '{0}' created successfully".format(self.want.get("site_name"))
- self.log(self.msg, "INFO")
- self.log("Current site (have): {0}".format(str(current_site)), "DEBUG")
- self.result['msg'] = self.msg
- self.result['response'].update({"siteId": current_site.get('site_id')})
+ self.created_site_list.append(site_name)
+ self.log("Site '{0}' created successfully".format(site_name), "INFO")
return self
@@ -894,11 +890,9 @@ class Site(DnacBase):
while True:
execution_details = self.get_execution_details(executionid)
if execution_details.get("status") == "SUCCESS":
- self.msg = "Site '{0}' deleted successfully".format(site_name)
- self.result['changed'] = True
- self.result['response'] = self.msg
self.status = "success"
- self.log(self.msg, "INFO")
+ self.deleted_site_list.append(site_name)
+ self.log("Site '{0}' deleted successfully".format(site_name), "INFO")
break
elif execution_details.get("bapiError"):
self.log("Error response for 'delete_site' execution: {0}".format(execution_details.get("bapiError")), "ERROR")
@@ -934,12 +928,8 @@ class Site(DnacBase):
site_name = self.want.get("site_name")
if not site_exists:
self.status = "success"
- self.msg = "Unable to delete site '{0}' as it's not found in Cisco Catalyst Center".format(site_name)
- self.result.update({'changed': False,
- 'response': self.msg,
- 'msg': self.msg})
- self.log(self.msg, "INFO")
-
+ self.site_absent_list.append(site_name)
+ self.log("Unable to delete site '{0}' as it's not found in Cisco Catalyst Center".format(site_name), "INFO")
return self
# Check here if the site have the childs then fetch it using get membership API and then sort it
@@ -951,6 +941,7 @@ class Site(DnacBase):
op_modifies=True,
params={"site_id": site_id},
)
+ self.log("Received API response from 'get_membership': {0}".format(str(mem_response)), "DEBUG")
site_response = mem_response.get("site").get("response")
self.log("Site {0} response along with it's child sites: {1}".format(site_name, str(site_response)), "DEBUG")
@@ -967,9 +958,7 @@ class Site(DnacBase):
# Delete the final parent site
self.delete_single_site(site_id, site_name)
- self.msg = "The site '{0}' and its child sites have been deleted successfully".format(site_name)
- self.result['response'] = self.msg
- self.log(self.msg, "INFO")
+ self.log("The site '{0}' and its child sites have been deleted successfully".format(site_name), "INFO")
return self
@@ -1043,6 +1032,63 @@ class Site(DnacBase):
return self
+ def update_site_messages(self):
+ """
+ Update site messages based on the status of created, updated, and deleted sites.
+ Args:
+ self (object): An instance of a class used for interacting with Cisco Catalyst Center.
+ Returns:
+ self (object): An instance of a class representing the status of the operation, including whether it was
+ successful or failed, any error messages encountered during operation.
+ Description:
+ This method updates the messages related to site creation, updating, and deletion in the Cisco Catalyst Center.
+ It evaluates the status of created sites, updated sites, and sites that are no longer needed for update to
+ determine the appropriate message to be set. The messages are then stored in the 'msg' attribute of the object.
+ """
+
+ if self.created_site_list and self.updated_site_list:
+ self.result['changed'] = True
+ if self.update_not_neeeded_sites:
+ msg = """Site(s) '{0}' created successfully as well as Site(s) '{1}' updated successully and the some site(s)
+ '{2}' needs no update in Cisco Catalyst Center"""
+ self.msg = msg.format(str(self.created_site_list), str(self.updated_site_list), str(self.update_not_neeeded_sites))
+ else:
+ self.msg = """Site(s) '{0}' created successfully in Cisco Catalyst Center as well as Site(s) '{1}' updated successully in
+ Cisco Catalyst Center""".format(str(self.created_site_list), str(self.updated_site_list))
+ elif self.created_site_list:
+ self.result['changed'] = True
+ if self.update_not_neeeded_sites:
+ self.msg = """Site(s) '{0}' created successfully and some site(s) '{1}' not needs any update in Cisco Catalyst
+ Center.""".format(str(self.created_site_list), str(self.update_not_neeeded_sites))
+ else:
+ self.msg = "Site(s) '{0}' created successfully in Cisco Catalyst Center.".format(str(self.created_site_list))
+ elif self.updated_site_list:
+ self.result['changed'] = True
+ if self.update_not_neeeded_sites:
+ self.msg = """Site(s) '{0}' updated successfully and some site(s) '{1}' not needs any update in Cisco Catalyst
+ Center.""".format(str(self.updated_site_list), str(self.update_not_neeeded_sites))
+ else:
+ self.msg = "Site(s) '{0}' updated successfully in Cisco Catalyst Center.".format(str(self.updated_site_list))
+ elif self.update_not_neeeded_sites:
+ self.result['changed'] = False
+ self.msg = "Site(s) '{0}' not needs any update in Cisco Catalyst Center.".format(str(self.update_not_neeeded_sites))
+ elif self.deleted_site_list and self.site_absent_list:
+ self.result['changed'] = True
+ self.msg = """Given site(s) '{0}' deleted successfully from Cisco Catalyst Center and unable to deleted some site(s) '{1}' as they
+ are not found in Cisco Catalyst Center.""".format(str(self.deleted_site_list), str(self.site_absent_list))
+ elif self.deleted_site_list:
+ self.result['changed'] = True
+ self.msg = "Given site(s) '{0}' deleted successfully from Cisco Catalyst Center".format(str(self.deleted_site_list))
+ else:
+ self.result['changed'] = False
+ self.msg = "Unable to delete site(s) '{0}' as it's not found in Cisco Catalyst Center.".format(str(self.site_absent_list))
+
+ self.status = "success"
+ self.result['response'] = self.msg
+ self.result['msg'] = self.msg
+
+ return self
+
def main():
""" main entry point for module execution
@@ -1089,6 +1135,9 @@ def main():
if config_verify:
ccc_site.verify_diff_state_apply[state](config).check_return_status()
+ # Invoke the API to check the status and log the output of each site on the console
+ ccc_site.update_site_messages().check_return_status()
+
module.exit_json(**ccc_site.result)
diff --git a/ansible_collections/cisco/dnac/plugins/modules/snmp_properties.py b/ansible_collections/cisco/dnac/plugins/modules/snmp_properties.py
index 9cbcae16c..3b80d06db 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/snmp_properties.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/snmp_properties.py
@@ -37,8 +37,8 @@ options:
type: str
type: list
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Discovery CreateUpdateSNMPProperties
description: Complete reference of the CreateUpdateSNMPProperties API.
@@ -71,7 +71,6 @@ EXAMPLES = r"""
systemPropertyName: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/snmp_properties_info.py b/ansible_collections/cisco/dnac/plugins/modules/snmp_properties_info.py
index 4f32fd28d..40e2f5562 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/snmp_properties_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/snmp_properties_info.py
@@ -20,8 +20,8 @@ options:
description: Additional headers.
type: dict
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Discovery GetSNMPProperties
description: Complete reference of the GetSNMPProperties API.
@@ -49,7 +49,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/snmpv2_read_community_credential.py b/ansible_collections/cisco/dnac/plugins/modules/snmpv2_read_community_credential.py
index 0fd5ce8cb..21e56e739 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/snmpv2_read_community_credential.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/snmpv2_read_community_credential.py
@@ -27,14 +27,14 @@ options:
description: Name/Description of the credential.
type: str
instanceUuid:
- description: Snmpv2 Read Community Credential's instanceUuid.
+ description: Credential UUID.
type: str
readCommunity:
description: SNMP read community. NO!$DATA!$ for no value change.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Discovery CreateSNMPReadCommunity
description: Complete reference of the CreateSNMPReadCommunity API.
@@ -86,7 +86,6 @@ EXAMPLES = r"""
readCommunity: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/snmpv2_write_community_credential.py b/ansible_collections/cisco/dnac/plugins/modules/snmpv2_write_community_credential.py
index 00caf3e5b..5d10233ab 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/snmpv2_write_community_credential.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/snmpv2_write_community_credential.py
@@ -27,14 +27,14 @@ options:
description: Name/Description of the credential.
type: str
instanceUuid:
- description: Snmpv2 Write Community Credential's instanceUuid.
+ description: Credential UUID.
type: str
writeCommunity:
description: SNMP write community. NO!$DATA!$ for no value change.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Discovery CreateSNMPWriteCommunity
description: Complete reference of the CreateSNMPWriteCommunity API.
@@ -86,7 +86,6 @@ EXAMPLES = r"""
writeCommunity: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/snmpv3_credential.py b/ansible_collections/cisco/dnac/plugins/modules/snmpv3_credential.py
index 705d9e507..729505039 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/snmpv3_credential.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/snmpv3_credential.py
@@ -18,44 +18,44 @@ extends_documentation_fragment:
author: Rafael Campos (@racampos)
options:
authPassword:
- description: Snmpv3 Credential's authPassword.
+ description: Auth Password for SNMP.
type: str
authType:
- description: Snmpv3 Credential's authType.
+ description: SNMP auth protocol. 'SHA' or 'MD5'.
type: str
comments:
- description: Snmpv3 Credential's comments.
+ description: Comments to identify the SNMPv3 credential.
type: str
credentialType:
- description: Snmpv3 Credential's credentialType.
+ description: Credential type to identify the application that uses the SNMPv3 credential.
type: str
description:
- description: Snmpv3 Credential's description.
+ description: Description for Snmp V3 Credential.
type: str
id:
- description: Snmpv3 Credential's id.
+ description: Id of the SNMP V3 Credential in UUID format.
type: str
instanceTenantId:
- description: Snmpv3 Credential's instanceTenantId.
+ description: Deprecated.
type: str
instanceUuid:
- description: Snmpv3 Credential's instanceUuid.
+ description: Deprecated.
type: str
privacyPassword:
- description: Snmpv3 Credential's privacyPassword.
+ description: Privacy Password for SNMP privacy.
type: str
privacyType:
- description: Snmpv3 Credential's privacyType.
+ description: SNMP privacy protocol.
type: str
snmpMode:
- description: Snmpv3 Credential's snmpMode.
+ description: Mode of SNMP. 'AUTHPRIV' or 'AUTHNOPRIV' or 'NOAUTHNOPRIV'.
type: str
username:
- description: Snmpv3 Credential's username.
+ description: SNMP V3 Username.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Discovery CreateSNMPv3Credentials
description: Complete reference of the CreateSNMPv3Credentials API.
@@ -122,7 +122,6 @@ EXAMPLES = r"""
username: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/sp_profile_delete_v2.py b/ansible_collections/cisco/dnac/plugins/modules/sp_profile_delete_v2.py
index 236a0ba49..d2b9fbc53 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/sp_profile_delete_v2.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/sp_profile_delete_v2.py
@@ -11,17 +11,17 @@ short_description: Resource module for Sp Profile Delete V2
description:
- Manage operation delete of the resource Sp Profile Delete V2.
- API to delete Service Provider Profile QoS .
-version_added: '6.7.0'
+version_added: '3.1.0'
extends_documentation_fragment:
- cisco.dnac.module
author: Rafael Campos (@racampos)
options:
spProfileName:
- description: SpProfileName path parameter. Sp profile name.
+ description: SpProfileName path parameter. SP profile name.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Network Settings DeleteSPProfileV2
description: Complete reference of the DeleteSPProfileV2 API.
@@ -48,7 +48,6 @@ EXAMPLES = r"""
spProfileName: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/swim_image_details_info.py b/ansible_collections/cisco/dnac/plugins/modules/swim_image_details_info.py
index d41519382..5d55353dd 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/swim_image_details_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/swim_image_details_info.py
@@ -92,8 +92,8 @@ options:
- Offset query parameter.
type: int
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Software Image Management (SWIM) GetSoftwareImageDetails
description: Complete reference of the GetSoftwareImageDetails API.
@@ -139,7 +139,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/swim_import_local.py b/ansible_collections/cisco/dnac/plugins/modules/swim_import_local.py
index 27966b4cd..d2bcd0374 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/swim_import_local.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/swim_import_local.py
@@ -35,8 +35,8 @@ options:
description: ThirdPartyVendor query parameter. Third Party Vendor.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Software Image Management (SWIM) ImportLocalSoftwareImage
description: Complete reference of the ImportLocalSoftwareImage API.
@@ -67,7 +67,6 @@ EXAMPLES = r"""
thirdPartyVendor: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/swim_import_via_url.py b/ansible_collections/cisco/dnac/plugins/modules/swim_import_via_url.py
index df217a30c..206cde725 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/swim_import_via_url.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/swim_import_via_url.py
@@ -49,8 +49,8 @@ options:
description: ScheduleOrigin query parameter. Originator of this call (Optional).
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Software Image Management (SWIM) ImportSoftwareImageViaURL
description: Complete reference of the ImportSoftwareImageViaURL API.
@@ -85,7 +85,6 @@ EXAMPLES = r"""
scheduleOrigin: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/swim_trigger_activation.py b/ansible_collections/cisco/dnac/plugins/modules/swim_trigger_activation.py
index 5d7cae2b5..a2544542f 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/swim_trigger_activation.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/swim_trigger_activation.py
@@ -49,8 +49,8 @@ options:
before schedule (Optional).
type: bool
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Software Image Management (SWIM) TriggerSoftwareImageActivation
description: Complete reference of the TriggerSoftwareImageActivation API.
@@ -87,7 +87,6 @@ EXAMPLES = r"""
scheduleValidate: true
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/swim_trigger_distribution.py b/ansible_collections/cisco/dnac/plugins/modules/swim_trigger_distribution.py
index ce1b5e1fc..21eab5890 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/swim_trigger_distribution.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/swim_trigger_distribution.py
@@ -30,8 +30,8 @@ options:
type: str
type: list
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Software Image Management (SWIM) TriggerSoftwareImageDistribution
description: Complete reference of the TriggerSoftwareImageDistribution API.
@@ -60,7 +60,6 @@ EXAMPLES = r"""
imageUuid: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/syslog_config_create.py b/ansible_collections/cisco/dnac/plugins/modules/syslog_config_create.py
index c4804a3b3..57c525fd6 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/syslog_config_create.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/syslog_config_create.py
@@ -35,7 +35,7 @@ options:
description: Protocol.
type: str
requirements:
-- dnacentersdk >= 2.6.0
+- dnacentersdk >= 2.7.1
- python >= 3.9
seealso:
- name: Cisco DNA Center documentation for Event Management CreateSyslogDestination
diff --git a/ansible_collections/cisco/dnac/plugins/modules/syslog_config_update.py b/ansible_collections/cisco/dnac/plugins/modules/syslog_config_update.py
index 653555ac9..6eeae91a0 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/syslog_config_update.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/syslog_config_update.py
@@ -35,7 +35,7 @@ options:
description: Protocol.
type: str
requirements:
-- dnacentersdk >= 2.6.0
+- dnacentersdk >= 2.7.1
- python >= 3.9
seealso:
- name: Cisco DNA Center documentation for Event Management UpdateSyslogDestination
diff --git a/ansible_collections/cisco/dnac/plugins/modules/system_health_count_info.py b/ansible_collections/cisco/dnac/plugins/modules/system_health_count_info.py
index bf9b72361..104e4bb79 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/system_health_count_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/system_health_count_info.py
@@ -32,8 +32,8 @@ options:
here /dna/platform/app/consumer-portal/developer-toolkit/events.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Health and Performance SystemHealthCountAPI
description: Complete reference of the SystemHealthCountAPI API.
@@ -63,7 +63,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/system_health_info.py b/ansible_collections/cisco/dnac/plugins/modules/system_health_info.py
index 01a23ee64..58f2ee93d 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/system_health_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/system_health_info.py
@@ -38,14 +38,14 @@ options:
limit:
description:
- Limit query parameter.
- type: int
+ type: float
offset:
description:
- Offset query parameter.
- type: int
+ type: float
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Health and Performance SystemHealthAPI
description: Complete reference of the SystemHealthAPI API.
@@ -78,7 +78,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/system_performance_historical_info.py b/ansible_collections/cisco/dnac/plugins/modules/system_performance_historical_info.py
index 11476ed6f..211fd381c 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/system_performance_historical_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/system_performance_historical_info.py
@@ -10,7 +10,10 @@ module: system_performance_historical_info
short_description: Information module for System Performance Historical
description:
- Get all System Performance Historical.
-- This API retrieves the historical performance indicators. The data can be retrieved for the last 3 months.
+- >
+ Retrieves the average values of cluster key performance indicators KPIs , like CPU utilization, memory utilization
+ or network rates grouped by time intervals within a specified time range. The data will be available from the past
+ 24 hours.
version_added: '3.1.0'
extends_documentation_fragment:
- cisco.dnac.module_info
@@ -28,16 +31,16 @@ options:
- >
StartTime query parameter. This is the epoch start time in milliseconds from which performance indicator
need to be fetched.
- type: int
+ type: float
endTime:
description:
- >
EndTime query parameter. This is the epoch end time in milliseconds upto which performance indicator need to
be fetched.
- type: int
+ type: float
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Health and Performance SystemPerformanceHistoricalAPI
description: Complete reference of the SystemPerformanceHistoricalAPI API.
@@ -68,7 +71,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -97,7 +99,9 @@ dnac_response:
"t1": [
"string"
]
- }
+ },
+ "cpuAvg": "string",
+ "memoryAvg": "string"
}
}
"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/system_performance_info.py b/ansible_collections/cisco/dnac/plugins/modules/system_performance_info.py
index 41b14ccdd..b1a8ad24e 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/system_performance_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/system_performance_info.py
@@ -10,7 +10,10 @@ module: system_performance_info
short_description: Information module for System Performance
description:
- Get all System Performance.
-- This API gives the aggregated performance indicators. The data can be retrieved for the last 3 months.
+- >
+ Retrieves the aggregated metrics total, average or maximum of cluster key performance indicators KPIs , such as
+ CPU utilization, memory utilization or network rates recorded within a specified time period. The data will be
+ available from the past 24 hours.
version_added: '3.1.0'
extends_documentation_fragment:
- cisco.dnac.module_info
@@ -32,16 +35,16 @@ options:
- >
StartTime query parameter. This is the epoch start time in milliseconds from which performance indicator
need to be fetched.
- type: int
+ type: float
endTime:
description:
- >
EndTime query parameter. This is the epoch end time in milliseconds upto which performance indicator need to
be fetched.
- type: int
+ type: float
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Health and Performance SystemPerformanceAPI
description: Complete reference of the SystemPerformanceAPI API.
@@ -73,7 +76,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/tag.py b/ansible_collections/cisco/dnac/plugins/modules/tag.py
index 0b26448c0..f7125017e 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/tag.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/tag.py
@@ -19,52 +19,59 @@ extends_documentation_fragment:
author: Rafael Campos (@racampos)
options:
description:
- description: Tag's description.
+ description: Description of the tag.
type: str
dynamicRules:
description: Tag's dynamicRules.
elements: dict
suboptions:
memberType:
- description: Tag's memberType.
+ description: MemberType of the tag (e.g. Networkdevice, interface).
type: str
rules:
description: Tag's rules.
suboptions:
items:
- description: Tag's items.
- elements: str
+ description: Items details,multiple rules can be defined by items(e.g. "items"
+ {"operation" "ILIKE", "name" "managementIpAddress", "value" "%10%"}, {"operation"
+ "ILIKE", "name" "hostname", "value" "%NA%"} ).
+ elements: dict
type: list
name:
- description: Tag's name.
+ description: Name of the parameter (e.g. For interface portName,adminStatus,speed,status,description.
+ For networkdevice family,series,hostname,managementIpAddress,groupNameHierarchy,softwareVersion).
type: str
operation:
- description: Tag's operation.
+ description: Opeartion used in the rules (e.g. OR,IN,EQ,LIKE,ILIKE,AND).
type: str
value:
- description: Tag's value.
+ description: Value of the parameter (e.g. For portName 1/0/1,for adminStatus,status
+ up/down, for speed any integer value, for description any valid string,
+ for family switches, for series C3650, for managementIpAddress 10.197.124.90,
+ groupNameHierarchy Global, softwareVersion 16.9.1).
type: str
values:
- description: Tag's values.
+ description: Values of the parameter,Only one of the value or values can
+ be used for the given parameter. (for managementIpAddress e.g. "10.197.124.90","10.197.124.91").
elements: str
type: list
type: dict
type: list
id:
- description: Tag's id.
+ description: Mandatory instanceUuid of the tag that needs to be updated.
type: str
instanceTenantId:
- description: Tag's instanceTenantId.
+ description: InstanceTenantId generated for the tag.
type: str
name:
- description: Tag's name.
+ description: Name of the tag.
type: str
systemTag:
- description: SystemTag flag.
+ description: True for system created tags, false for user defined tags.
type: bool
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Tag CreateTag
description: Complete reference of the CreateTag API.
@@ -104,7 +111,7 @@ EXAMPLES = r"""
- memberType: string
rules:
items:
- - string
+ - {}
name: string
operation: string
value: string
@@ -129,7 +136,8 @@ EXAMPLES = r"""
dynamicRules:
- memberType: string
rules:
- items: string
+ items:
+ - {}
name: string
operation: string
value: string
@@ -153,7 +161,6 @@ EXAMPLES = r"""
id: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/tag_count_info.py b/ansible_collections/cisco/dnac/plugins/modules/tag_count_info.py
index 7562f71fc..4d9de1eb5 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/tag_count_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/tag_count_info.py
@@ -31,10 +31,6 @@ options:
description:
- AttributeName query parameter.
type: str
- level:
- description:
- - Level query parameter.
- type: str
size:
description:
- Size query parameter. Size in kilobytes(KB).
@@ -44,8 +40,8 @@ options:
- SystemTag query parameter.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Tag GetTagCount
description: Complete reference of the GetTagCount API.
@@ -73,13 +69,11 @@ EXAMPLES = r"""
name: string
nameSpace: string
attributeName: string
- level: string
size: string
systemTag: string
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/tag_info.py b/ansible_collections/cisco/dnac/plugins/modules/tag_info.py
index c7c32e7d5..359d1c290 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/tag_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/tag_info.py
@@ -40,11 +40,11 @@ options:
offset:
description:
- Offset query parameter.
- type: int
+ type: float
limit:
description:
- Limit query parameter.
- type: int
+ type: float
size:
description:
- Size query parameter. Size in kilobytes(KB).
@@ -72,8 +72,8 @@ options:
- Id path parameter. Tag ID.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Tag GetTag
description: Complete reference of the GetTag API.
@@ -130,7 +130,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -149,7 +148,9 @@ dnac_response:
"values": [
"string"
],
- "items": "string",
+ "items": [
+ {}
+ ],
"operation": "string",
"name": "string",
"value": "string"
diff --git a/ansible_collections/cisco/dnac/plugins/modules/tag_member.py b/ansible_collections/cisco/dnac/plugins/modules/tag_member.py
index 2ffe6ce10..3174bb579 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/tag_member.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/tag_member.py
@@ -23,15 +23,16 @@ options:
memberId:
description: MemberId path parameter. TagMember id to be removed from tag.
type: str
- object:
- description: Object.
- type: str
+ memberType:
+ description: Tag Member's memberType.
+ elements: str
+ type: list
payload:
description: Map of member type and member ids.
type: dict
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Tag AddMembersToTheTag
description: Complete reference of the AddMembersToTheTag API.
@@ -62,7 +63,8 @@ EXAMPLES = r"""
dnac_debug: "{{dnac_debug}}"
state: present
id: string
- object: string
+ memberType:
+ - string
payload:
networkinterface:
- string
@@ -79,10 +81,8 @@ EXAMPLES = r"""
state: absent
id: string
memberId: string
- memberType: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/tag_member_count_info.py b/ansible_collections/cisco/dnac/plugins/modules/tag_member_count_info.py
index 82dbf33d0..9095be170 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/tag_member_count_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/tag_member_count_info.py
@@ -31,13 +31,9 @@ options:
description:
- MemberAssociationType query parameter.
type: str
- level:
- description:
- - Level query parameter.
- type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Tag GetTagMemberCount
description: Complete reference of the GetTagMemberCount API.
@@ -64,12 +60,10 @@ EXAMPLES = r"""
headers: "{{my_headers | from_json}}"
memberType: string
memberAssociationType: string
- level: string
id: string
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/tag_member_info.py b/ansible_collections/cisco/dnac/plugins/modules/tag_member_info.py
index 57401f152..b347efa65 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/tag_member_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/tag_member_info.py
@@ -32,11 +32,11 @@ options:
offset:
description:
- Offset query parameter. Used for pagination. It indicates the starting row number out of available member records.
- type: str
+ type: float
limit:
description:
- Limit query parameter. Used to Number of maximum members to return in the result.
- type: str
+ type: float
memberAssociationType:
description:
- >
@@ -50,8 +50,8 @@ options:
- Level query parameter.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Tag GetTagMembersById
description: Complete reference of the GetTagMembersById API.
@@ -77,15 +77,14 @@ EXAMPLES = r"""
dnac_debug: "{{dnac_debug}}"
headers: "{{my_headers | from_json}}"
memberType: string
- offset: string
- limit: string
+ offset: 0
+ limit: 0
memberAssociationType: string
level: string
id: string
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/tag_member_type_info.py b/ansible_collections/cisco/dnac/plugins/modules/tag_member_type_info.py
index a7e4cd1c9..f8acba59b 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/tag_member_type_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/tag_member_type_info.py
@@ -20,8 +20,8 @@ options:
description: Additional headers.
type: dict
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Tag GetTagResourceTypes
description: Complete reference of the GetTagResourceTypes API.
@@ -49,7 +49,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/tag_membership.py b/ansible_collections/cisco/dnac/plugins/modules/tag_membership.py
index 9366e9ed2..71f503686 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/tag_membership.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/tag_membership.py
@@ -11,7 +11,7 @@ short_description: Resource module for Tag Membership
description:
- Manage operation update of the resource Tag Membership.
- >
- Updates tag membership. As part of the request payload through this API, only the specified members are added /
+ Update tag membership. As part of the request payload through this API, only the specified members are added /
retained to the given input tags. Possible values of memberType attribute in the request payload can be queried by
using the /tag/member/type API.
version_added: '3.1.0'
@@ -21,26 +21,25 @@ author: Rafael Campos (@racampos)
options:
memberToTags:
description: Tag Membership's memberToTags.
- elements: dict
suboptions:
key:
description: Tag Membership's key.
elements: str
type: list
- type: list
+ type: dict
memberType:
description: Tag Membership's memberType.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
-- name: Cisco DNA Center documentation for Tag UpdatesTagMembership
- description: Complete reference of the UpdatesTagMembership API.
- link: https://developer.cisco.com/docs/dna-center/#!updates-tag-membership
+- name: Cisco DNA Center documentation for Tag UpdateTagMembership
+ description: Complete reference of the UpdateTagMembership API.
+ link: https://developer.cisco.com/docs/dna-center/#!update-tag-membership
notes:
- SDK Method used are
- tag.Tag.updates_tag_membership,
+ tag.Tag.update_tag_membership,
- Paths used are
put /dna/intent/api/v1/tag/member,
@@ -58,12 +57,11 @@ EXAMPLES = r"""
dnac_version: "{{dnac_version}}"
dnac_debug: "{{dnac_debug}}"
memberToTags:
- - key:
+ key:
- string
memberType: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/task_count_info.py b/ansible_collections/cisco/dnac/plugins/modules/task_count_info.py
index a1eedd445..d4c50b14a 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/task_count_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/task_count_info.py
@@ -60,8 +60,8 @@ options:
- ParentId query parameter. Fetch tasks that have this parent Id.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Task GetTaskCount
description: Complete reference of the GetTaskCount API.
@@ -99,7 +99,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/task_info.py b/ansible_collections/cisco/dnac/plugins/modules/task_info.py
index 9adaf195a..059f59fd5 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/task_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/task_info.py
@@ -82,8 +82,8 @@ options:
- TaskId path parameter. UUID of the Task.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Task GetTaskById
description: Complete reference of the GetTaskById API.
@@ -143,7 +143,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/task_operation_info.py b/ansible_collections/cisco/dnac/plugins/modules/task_operation_info.py
index ea08f3306..c50edb86c 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/task_operation_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/task_operation_info.py
@@ -34,8 +34,8 @@ options:
value is 1.
type: int
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Task GetTaskByOperationId
description: Complete reference of the GetTaskByOperationId API.
@@ -66,7 +66,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -78,20 +77,20 @@ dnac_response:
{
"additionalStatusURL": "string",
"data": "string",
- "endTime": "string",
+ "endTime": 0,
"errorCode": "string",
"errorKey": "string",
"failureReason": "string",
"id": "string",
"instanceTenantId": "string",
"isError": true,
- "lastUpdate": "string",
+ "lastUpdate": 0,
"operationIdList": {},
"parentId": "string",
"progress": "string",
"rootId": "string",
"serviceType": "string",
- "startTime": "string",
+ "startTime": 0,
"username": "string",
"version": 0
}
diff --git a/ansible_collections/cisco/dnac/plugins/modules/task_tree_info.py b/ansible_collections/cisco/dnac/plugins/modules/task_tree_info.py
index 9e80665b3..e3660ed86 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/task_tree_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/task_tree_info.py
@@ -24,8 +24,8 @@ options:
- TaskId path parameter. UUID of the Task.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Task GetTaskTree
description: Complete reference of the GetTaskTree API.
@@ -54,7 +54,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -66,20 +65,20 @@ dnac_response:
{
"additionalStatusURL": "string",
"data": "string",
- "endTime": "string",
+ "endTime": 0,
"errorCode": "string",
"errorKey": "string",
"failureReason": "string",
"id": "string",
"instanceTenantId": "string",
"isError": true,
- "lastUpdate": "string",
+ "lastUpdate": 0,
"operationIdList": {},
"parentId": "string",
"progress": "string",
"rootId": "string",
"serviceType": "string",
- "startTime": "string",
+ "startTime": 0,
"username": "string",
"version": 0
}
diff --git a/ansible_collections/cisco/dnac/plugins/modules/template_preview.py b/ansible_collections/cisco/dnac/plugins/modules/template_preview.py
index cb1657ccc..9399ac034 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/template_preview.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/template_preview.py
@@ -29,8 +29,8 @@ options:
description: UUID of template to get template preview.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Configuration Templates PreviewTemplate
description: Complete reference of the PreviewTemplate API.
@@ -60,7 +60,6 @@ EXAMPLES = r"""
templateId: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/template_workflow_manager.py b/ansible_collections/cisco/dnac/plugins/modules/template_workflow_manager.py
index fbd91685e..976cbd6e5 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/template_workflow_manager.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/template_workflow_manager.py
@@ -75,6 +75,19 @@ options:
suboptions:
product_family:
description: Denotes the family to which the device belongs.
+ choices:
+ - Cisco Cloud Services Platform
+ - Cisco Interfaces and Modules
+ - Content Networking
+ - Network Management
+ - NFV-ThirdParty Devices
+ - NFVIS
+ - Routers
+ - Security and VPN
+ - Storage Networking
+ - Switches and Hubs
+ - Voice and Telephony
+ - Wireless Controller
type: str
product_series:
description: Specifies the series classification of the device.
@@ -100,90 +113,6 @@ options:
project_description:
description: Narrative that elaborates on the purpose and scope of the project.
type: str
- rollback_template_params:
- description: A list of dictionaries defining parameters necessary for the rollback functionality of a template.
- type: list
- elements: dict
- suboptions:
- binding:
- description: Associates the parameter with its source.
- type: str
- custom_order:
- description: Specifies a user-defined ordering for the parameter.
- type: int
- data_type:
- description: Identifies the data type of the parameter (e.g., string, integer, boolean).
- type: str
- default_value:
- description: Establishes a default value for the parameter, used if no other value is provided.
- type: str
- description:
- description: Provides a descriptive explanation of the parameter's purpose.
- type: str
- display_name:
- description: The name of the parameter as displayed to users.
- type: str
- group:
- description: Categorizes the parameter into a named group for organizational purposes.
- type: str
- id:
- description: A unique identifier for the parameter, formatted as a UUID.
- type: str
- instruction_text:
- description: Gives guidance or instructions regarding the parameter's use.
- type: str
- key:
- description: A unique key that identifies the parameter within the template.
- type: str
- not_param:
- description: Indicates whether the entry is not to be treated as a parameter.
- type: bool
- order:
- description: Determines the sequence in which the parameter appears relative to others.
- type: int
- param_array:
- description: Specifies if the parameter should be treated as an array.
- type: bool
- parameter_name:
- description: The name of the parameter.
- type: str
- provider:
- description: Denotes the provider associated with the parameter.
- type: str
- range:
- description: Defines the permissible range for the parameter's value.
- type: list
- elements: dict
- suboptions:
- id:
- description: Unique identifier for the range, represented as a UUID.
- type: str
- max_value:
- description: Specifies the maximum allowable value for the parameter.
- type: int
- min_value:
- description: Specifies the minimum allowable value for the parameter.
- type: int
- required:
- description: Dictates whether the parameter is mandatory for template operations.
- type: bool
- selection:
- description: Contains options for parameter selection when a choice is available.
- suboptions:
- default_selected_values:
- description: Lists the default values that are preselected.
- elements: str
- type: list
- id:
- description: A unique identifier for the selection entity, represented as a UUID.
- type: str
- selection_type:
- description: Specifies the type of selection, such as 'SINGLE_SELECT' or 'MULTI_SELECT'.
- type: str
- selection_values:
- description: A dictionary of available values for selection.
- type: dict
- type: dict
tags:
description: A list of dictionaries representing tags associated with the Configuration Template during creation.
suboptions:
@@ -287,9 +216,6 @@ options:
type: str
type: list
elements: dict
- create_time:
- description: The creation time of the template refers to the initial development.
- type: int
custom_params_order:
description: Specifies the sequence in which custom parameters or variables should be arranged within the template.
type: bool
@@ -303,6 +229,19 @@ options:
suboptions:
product_family:
description: Denotes the family to which the device belongs.
+ choices:
+ - Cisco Cloud Services Platform
+ - Cisco Interfaces and Modules
+ - Content Networking
+ - Network Management
+ - NFV-ThirdParty Devices
+ - NFVIS
+ - Routers
+ - Security and VPN
+ - Storage Networking
+ - Switches and Hubs
+ - Voice and Telephony
+ - Wireless Controller
type: str
product_series:
description: Specifies the series classification of the device.
@@ -311,7 +250,11 @@ options:
description: Describes the exact type of the device.
type: str
failure_policy:
- description: Define failure policy if template provisioning fails.
+ description:
+ - Define failure policy if template provisioning fails.
+ - failure_policy will be enabled only when the composite is set to True.
+ choices:
+ - ABORT_TARGET_ON_ERROR
type: str
id:
description: A unique identifier, represented as a UUID.
@@ -322,119 +265,27 @@ options:
- JINJA
- VELOCITY
type: str
- last_update_time:
- description: Indicates the most recent timestamp when the template was modified or revised.
- type: int
- latest_version_time:
- description: Indicates when the most recent version of a template was released or updated.
- type: int
template_name:
description: Name of template. This field is mandatory to create a new template.
type: str
- parent_template_id:
- description: Refers to the unique identifier of a template from which another template derives.
- type: str
- project_id:
- description: A unique identifier for the project, formatted as a UUID.
- type: str
project_name:
description: Title of the project within which the template is categorized and managed.
type: str
project_description:
description: Narrative that elaborates on the purpose and scope of the project.
type: str
- rollback_template_content:
- description: Refers to the process of reverting the content of a template back to a previous version or state.
- type: str
- rollback_template_params:
- description: A list of dictionaries defining parameters necessary for the rollback functionality of a template.
- suboptions:
- binding:
- description: Associates the parameter with its source.
- type: str
- custom_order:
- description: Specifies a user-defined ordering for the parameter.
- type: int
- data_type:
- description: Identifies the data type of the parameter (e.g., string, integer, boolean).
- type: str
- default_value:
- description: Establishes a default value for the parameter, used if no other value is provided.
- type: str
- description:
- description: Provides a descriptive explanation of the parameter's purpose.
- type: str
- display_name:
- description: The name of the parameter as displayed to users.
- type: str
- group:
- description: Categorizes the parameter into a named group for organizational purposes.
- type: str
- id:
- description: A unique identifier for the parameter, formatted as a UUID.
- type: str
- instruction_text:
- description: Gives guidance or instructions regarding the parameter's use.
- type: str
- key:
- description: A unique key that identifies the parameter within the template.
- type: str
- not_param:
- description: Indicates whether the entry is not to be treated as a parameter.
- type: bool
- order:
- description: Determines the sequence in which the parameter appears relative to others.
- type: int
- param_array:
- description: Specifies if the parameter should be treated as an array.
- type: bool
- parameter_name:
- description: The name of the parameter.
- type: str
- provider:
- description: Denotes the provider associated with the parameter.
- type: str
- range:
- description: Defines the permissible range for the parameter's value.
- suboptions:
- id:
- description: Unique identifier for the range, represented as a UUID.
- type: str
- max_value:
- description: Specifies the maximum allowable value for the parameter.
- type: int
- min_value:
- description: Specifies the minimum allowable value for the parameter.
- type: int
- type: list
- elements: dict
- required:
- description: Dictates whether the parameter is mandatory for template operations.
- type: bool
- selection:
- description: Contains options for parameter selection when a choice is available.
- suboptions:
- default_selected_values:
- description: Lists the default values that are preselected.
- elements: str
- type: list
- id:
- description: A unique identifier for the selection entity, represented as a UUID.
- type: str
- selection_type:
- description: Specifies the type of selection, such as 'SINGLE_SELECT' or 'MULTI_SELECT'.
- type: str
- selection_values:
- description: A dictionary of available values for selection.
- type: dict
- type: dict
- type: list
- elements: dict
software_type:
description: Applicable device software type. This field is mandatory to create a new template.
- type: str
- software_variant:
- description: Refers to a version or edition of a software application that differs from the main or standard release.
+ choices:
+ - IOS
+ - IOS-XE
+ - IOS-XR
+ - NX-OS
+ - Cisco Controller
+ - Wide Area Application Services
+ - Adaptive Security Appliance
+ - NFV-OS
+ - Others
type: str
software_version:
description: Applicable device software version.
@@ -537,24 +388,6 @@ options:
type: dict
type: list
elements: dict
- validation_errors:
- description: Refer to issues or discrepancies identified during the validation process.
- suboptions:
- rollback_template_errors:
- description: Validation or design conflicts errors of rollback template.
- elements: dict
- type: list
- template_errors:
- description: Refer to issues or discrepancies encountered during the processing of a template within a software application.
- elements: dict
- type: list
- template_id:
- description: A unique identifier for the template, represented as a UUID.
- type: str
- template_version:
- description: The current version of validation process in the template.
- type: str
- type: dict
version:
description: The current version of template.
type: str
@@ -642,6 +475,19 @@ options:
suboptions:
product_family:
description: Denotes the family to which the device belongs.
+ choices:
+ - Cisco Cloud Services Platform
+ - Cisco Interfaces and Modules
+ - Content Networking
+ - Network Management
+ - NFV-ThirdParty Devices
+ - NFVIS
+ - Routers
+ - Security and VPN
+ - Storage Networking
+ - Switches and Hubs
+ - Voice and Telephony
+ - Wireless Controller
type: str
product_series:
description: Specifies the series classification of the device.
@@ -664,90 +510,6 @@ options:
project_name:
description: Title of the project within which the template is categorized and managed.
type: str
- rollback_template_params:
- description: A list of dictionaries defining parameters necessary for the rollback functionality of a template.
- type: list
- elements: dict
- suboptions:
- binding:
- description: Associates the parameter with its source.
- type: str
- custom_order:
- description: Specifies a user-defined ordering for the parameter.
- type: int
- data_type:
- description: Identifies the data type of the parameter (e.g., string, integer, boolean).
- type: str
- default_value:
- description: Establishes a default value for the parameter, used if no other value is provided.
- type: str
- description:
- description: Provides a descriptive explanation of the parameter's purpose.
- type: str
- display_name:
- description: The name of the parameter as displayed to users.
- type: str
- group:
- description: Categorizes the parameter into a named group for organizational purposes.
- type: str
- id:
- description: A unique identifier for the parameter, formatted as a UUID.
- type: str
- instruction_text:
- description: Gives guidance or instructions regarding the parameter's use.
- type: str
- key:
- description: A unique key that identifies the parameter within the template.
- type: str
- not_param:
- description: Indicates whether the entry is not to be treated as a parameter.
- type: bool
- order:
- description: Determines the sequence in which the parameter appears relative to others.
- type: int
- param_array:
- description: Specifies if the parameter should be treated as an array.
- type: bool
- parameter_name:
- description: The name of the parameter.
- type: str
- provider:
- description: Denotes the provider associated with the parameter.
- type: str
- range:
- description: Defines the permissible range for the parameter's value.
- type: list
- elements: dict
- suboptions:
- id:
- description: Unique identifier for the range, represented as a UUID.
- type: str
- max_value:
- description: Specifies the maximum allowable value for the parameter.
- type: int
- min_value:
- description: Specifies the minimum allowable value for the parameter.
- type: int
- required:
- description: Dictates whether the parameter is mandatory for template operations.
- type: bool
- selection:
- description: Contains options for parameter selection when a choice is available.
- suboptions:
- default_selected_values:
- description: Lists the default values that are preselected.
- elements: str
- type: list
- id:
- description: A unique identifier for the selection entity, represented as a UUID.
- type: str
- selection_type:
- description: Specifies the type of selection, such as 'SINGLE_SELECT' or 'MULTI_SELECT'.
- type: str
- selection_values:
- description: A dictionary of available values for selection.
- type: dict
- type: dict
tags:
description: A list of dictionaries representing tags associated with the Configuration Template during creation.
suboptions:
@@ -851,9 +613,6 @@ options:
type: str
type: list
elements: dict
- create_time:
- description: The creation time of the template refers to the initial development.
- type: int
custom_params_order:
description: Specifies the sequence in which custom parameters or variables should be arranged within the template.
type: bool
@@ -867,6 +626,19 @@ options:
suboptions:
product_family:
description: Denotes the family to which the device belongs.
+ choices:
+ - Cisco Cloud Services Platform
+ - Cisco Interfaces and Modules
+ - Content Networking
+ - Network Management
+ - NFV-ThirdParty Devices
+ - NFVIS
+ - Routers
+ - Security and VPN
+ - Storage Networking
+ - Switches and Hubs
+ - Voice and Telephony
+ - Wireless Controller
type: str
product_series:
description: Specifies the series classification of the device.
@@ -875,7 +647,11 @@ options:
description: Describes the exact type of the device.
type: str
failure_policy:
- description: Define failure policy if template provisioning fails.
+ description:
+ - Define failure policy if template provisioning fails.
+ - failure_policy will be enabled only when the composite is set to True.
+ choices:
+ - ABORT_TARGET_ON_ERROR
type: str
id:
description: A unique identifier, represented as a UUID.
@@ -886,119 +662,27 @@ options:
- JINJA
- VELOCITY
type: str
- last_update_time:
- description: Indicates the most recent timestamp when the template was modified or revised.
- type: int
- latest_version_time:
- description: Indicates when the most recent version of a template was released or updated.
- type: int
template_name:
description: Name of template. This field is mandatory to create a new template.
type: str
- parent_template_id:
- description: Refers to the unique identifier of a template from which another template derives.
- type: str
- project_id:
- description: A unique identifier for the project, formatted as a UUID.
- type: str
project_name:
description: Title of the project within which the template is categorized and managed.
type: str
project_description:
description: Narrative that elaborates on the purpose and scope of the project.
type: str
- rollback_template_content:
- description: Refers to the process of reverting the content of a template back to a previous version or state.
- type: str
- rollback_template_params:
- description: A list of dictionaries defining parameters necessary for the rollback functionality of a template.
- suboptions:
- binding:
- description: Associates the parameter with its source.
- type: str
- custom_order:
- description: Specifies a user-defined ordering for the parameter.
- type: int
- data_type:
- description: Identifies the data type of the parameter (e.g., string, integer, boolean).
- type: str
- default_value:
- description: Establishes a default value for the parameter, used if no other value is provided.
- type: str
- description:
- description: Provides a descriptive explanation of the parameter's purpose.
- type: str
- display_name:
- description: The name of the parameter as displayed to users.
- type: str
- group:
- description: Categorizes the parameter into a named group for organizational purposes.
- type: str
- id:
- description: A unique identifier for the parameter, formatted as a UUID.
- type: str
- instruction_text:
- description: Gives guidance or instructions regarding the parameter's use.
- type: str
- key:
- description: A unique key that identifies the parameter within the template.
- type: str
- not_param:
- description: Indicates whether the entry is not to be treated as a parameter.
- type: bool
- order:
- description: Determines the sequence in which the parameter appears relative to others.
- type: int
- param_array:
- description: Specifies if the parameter should be treated as an array.
- type: bool
- parameter_name:
- description: The name of the parameter.
- type: str
- provider:
- description: Denotes the provider associated with the parameter.
- type: str
- range:
- description: Defines the permissible range for the parameter's value.
- suboptions:
- id:
- description: Unique identifier for the range, represented as a UUID.
- type: str
- max_value:
- description: Specifies the maximum allowable value for the parameter.
- type: int
- min_value:
- description: Specifies the minimum allowable value for the parameter.
- type: int
- type: list
- elements: dict
- required:
- description: Dictates whether the parameter is mandatory for template operations.
- type: bool
- selection:
- description: Contains options for parameter selection when a choice is available.
- suboptions:
- default_selected_values:
- description: Lists the default values that are preselected.
- elements: str
- type: list
- id:
- description: A unique identifier for the selection entity, represented as a UUID.
- type: str
- selection_type:
- description: Specifies the type of selection, such as 'SINGLE_SELECT' or 'MULTI_SELECT'.
- type: str
- selection_values:
- description: A dictionary of available values for selection.
- type: dict
- type: dict
- type: list
- elements: dict
software_type:
description: Applicable device software type. This field is mandatory to create a new template.
- type: str
- software_variant:
- description: Refers to a version or edition of a software application that differs from the main or standard release.
+ choices:
+ - IOS
+ - IOS-XE
+ - IOS-XR
+ - NX-OS
+ - Cisco Controller
+ - Wide Area Application Services
+ - Adaptive Security Appliance
+ - NFV-OS
+ - Others
type: str
software_version:
description: Applicable device software version.
@@ -1101,24 +785,6 @@ options:
type: dict
type: list
elements: dict
- validation_errors:
- description: Refer to issues or discrepancies identified during the validation process.
- suboptions:
- rollback_template_errors:
- description: Refer to errors or issues encountered during the process of reverting a template to a previous version or state.
- elements: dict
- type: list
- template_errors:
- description: Refer to issues or discrepancies encountered during the processing of a template within a software application.
- elements: dict
- type: list
- template_id:
- description: A unique identifier for the template, represented as a UUID.
- type: str
- template_version:
- description: The current version of validation process in the template.
- type: str
- type: dict
version:
description: The current version of template.
type: str
@@ -1128,7 +794,7 @@ options:
type: str
requirements:
-- dnacentersdk == 2.4.5
+- dnacentersdk >= 2.7.1
- python >= 3.9
notes:
- SDK Method used are
@@ -1169,7 +835,6 @@ EXAMPLES = r"""
- configuration_templates:
author: string
composite: true
- create_time: 0
custom_params_order: true
description: string
device_types:
@@ -1179,28 +844,15 @@ EXAMPLES = r"""
failure_policy: string
id: string
language: string
- last_update_time: 0
- latest_version_time: 0
name: string
- parent_template_id: string
- project_id: string
project_name: string
project_description: string
- rollback_template_content: string
software_type: string
- software_variant: string
software_version: string
tags:
- id: string
name: string
template_content: string
- validation_errors:
- rollback_template_errors:
- - {}
- template_errors:
- - {}
- template_id: string
- template_version: string
version: string
- name: Export the projects.
@@ -1415,7 +1067,6 @@ class Template(DnacBase):
'author': {'type': 'str'},
'composite': {'type': 'bool'},
'containing_templates': {'type': 'list'},
- 'create_time': {'type': 'int'},
'custom_params_order': {'type': 'bool'},
'template_description': {'type': 'str'},
'device_types': {
@@ -1428,22 +1079,14 @@ class Template(DnacBase):
'failure_policy': {'type': 'str'},
'id': {'type': 'str'},
'language': {'type': 'str'},
- 'last_update_time': {'type': 'int'},
- 'latest_version_time': {'type': 'int'},
'name': {'type': 'str'},
- 'parent_template_id': {'type': 'str'},
- 'project_id': {'type': 'str'},
'project_name': {'type': 'str'},
'project_description': {'type': 'str'},
- 'rollback_template_content': {'type': 'str'},
- 'rollback_template_params': {'type': 'list'},
'software_type': {'type': 'str'},
- 'software_variant': {'type': 'str'},
'software_version': {'type': 'str'},
'template_content': {'type': 'str'},
'template_params': {'type': 'list'},
'template_name': {'type': 'str'},
- 'validation_errors': {'type': 'dict'},
'version': {'type': 'str'}
},
'export': {
@@ -1472,7 +1115,6 @@ class Template(DnacBase):
'author': {'type': 'str'},
'composite': {'type': 'bool'},
'containing_templates': {'type': 'list'},
- 'create_time': {'type': 'int'},
'custom_params_order': {'type': 'bool'},
'template_description': {'type': 'str'},
'device_types': {
@@ -1485,22 +1127,14 @@ class Template(DnacBase):
'failure_policy': {'type': 'str'},
'id': {'type': 'str'},
'language': {'type': 'str'},
- 'last_update_time': {'type': 'int'},
- 'latest_version_time': {'type': 'int'},
'name': {'type': 'str'},
- 'parent_template_id': {'type': 'str'},
- 'project_id': {'type': 'str'},
'project_name': {'type': 'str'},
'project_description': {'type': 'str'},
- 'rollback_template_content': {'type': 'str'},
- 'rollback_template_params': {'type': 'list'},
'software_type': {'type': 'str'},
- 'software_variant': {'type': 'str'},
'software_version': {'type': 'str'},
'template_content': {'type': 'str'},
'template_params': {'type': 'list'},
'template_name': {'type': 'str'},
- 'validation_errors': {'type': 'dict'},
'version': {'type': 'str'}
}
}
@@ -1601,6 +1235,15 @@ class Template(DnacBase):
self.status = "failed"
return self.check_return_status()
+ product_families_list = ["Cisco Cloud Services Platform", "Cisco Interfaces and Modules",
+ "Content Networking", "Network Management", "NFV-ThirdParty Devices",
+ "NFVIS", "Routers", "Security and VPN", "Storage Networking",
+ "Switches and Hubs", "Voice and Telephony", "Wireless Controller"]
+ if product_family not in product_families_list:
+ self.msg = "The 'product_family should be in the following list {0}.".format(product_families_list)
+ self.status = "failed"
+ return self.check_return_status()
+
product_series = item.get("product_series")
if product_series is not None:
deviceTypes[i].update({"productSeries": product_series})
@@ -1611,47 +1254,6 @@ class Template(DnacBase):
return deviceTypes
- def get_validation_errors(self, validation_errors):
- """
- Store template parameters from the playbook for template processing in Cisco Catalyst Center.
-
- Parameters:
- validation_errors (dict) - Playbook details containing validation errors information.
-
- Returns:
- validationErrors (dict) - Organized validation errors parameters.
- """
-
- if validation_errors is None:
- return None
-
- validationErrors = {}
- rollback_template_errors = validation_errors.get("rollback_template_errors")
- if rollback_template_errors is not None:
- validationErrors.update({
- "rollbackTemplateErrors": rollback_template_errors
- })
-
- template_errors = validation_errors.get("template_errors")
- if template_errors is not None:
- validationErrors.update({
- "templateErrors": template_errors
- })
-
- template_id = validation_errors.get("template_id")
- if template_id is not None:
- validationErrors.update({
- "templateId": template_id
- })
-
- template_version = validation_errors.get("template_version")
- if template_version is not None:
- validationErrors.update({
- "templateVersion": template_version
- })
-
- return validationErrors
-
def get_template_info(self, template_params):
"""
Store template params from the playbook for template processing in Cisco Catalyst Center.
@@ -1771,7 +1373,7 @@ class Template(DnacBase):
return self.check_return_status()
min_value = value.get("min_value")
if min_value is not None:
- _range[j].update({"maxValue": min_value})
+ _range[j].update({"minValue": min_value})
else:
self.msg = "min_value is mandatory for range under template_params"
self.status = "failed"
@@ -1800,13 +1402,37 @@ class Template(DnacBase):
return templateParams
+ def get_templates_details(self, name):
+ """
+ Get the template details from the template name provided in the playbook.
+
+ Parameters:
+ name (str) - Name of the template provided in the playbook.
+
+ Returns:
+ result (dict) - Template details for the given template name.
+ """
+
+ result = None
+ items = self.dnac_apply['exec'](
+ family="configuration_templates",
+ function="get_templates_details",
+ op_modifies=True,
+ params={"name": name}
+ )
+ if items:
+ result = items
+
+ self.log("Received API response from 'get_templates_details': {0}".format(items), "DEBUG")
+ return result
+
def get_containing_templates(self, containing_templates):
"""
Store tags from the playbook for template processing in Cisco Catalyst Center.
Check using check_return_status()
Parameters:
- containing_templates (dict) - Containing tempaltes details
+ containing_templates (dict) - Containing templates details
containing Template information.
Returns:
@@ -1838,10 +1464,6 @@ class Template(DnacBase):
"deviceTypes": self.get_device_types(device_types)
})
- id = item.get("id")
- if id is not None:
- containingTemplates[i].update({"id": id})
-
name = item.get("name")
if name is None:
self.msg = "name is mandatory under containing templates"
@@ -1850,6 +1472,16 @@ class Template(DnacBase):
containingTemplates[i].update({"name": name})
+ template_details = self.get_templates_details(name).get("response")
+ if not template_details:
+ self.msg = "No template with the template name '{0}' or it is not versioned".format(name)
+ self.status = "failed"
+ return self.check_return_status()
+
+ id = template_details[0].get("id")
+ if id is not None:
+ containingTemplates[i].update({"id": id})
+
language = item.get("language")
if language is None:
self.msg = "language is mandatory under containing templates"
@@ -1865,19 +1497,12 @@ class Template(DnacBase):
containingTemplates[i].update({"language": language})
project_name = item.get("project_name")
- if project_name is not None:
- containingTemplates[i].update({"projectName": project_name})
- else:
+ if project_name is None:
self.msg = "project_name is mandatory under containing templates"
self.status = "failed"
return self.check_return_status()
- rollback_template_params = item.get("rollback_template_params")
- if rollback_template_params is not None:
- containingTemplates[i].update({
- "rollbackTemplateParams": self.get_template_info(rollback_template_params)
- })
-
+ containingTemplates[i].update({"projectName": project_name})
template_content = item.get("template_content")
if template_content is not None:
containingTemplates[i].update({"templateContent": template_content})
@@ -1892,6 +1517,8 @@ class Template(DnacBase):
if version is not None:
containingTemplates[i].update({"version": version})
+ i += 1
+
return containingTemplates
def get_template_params(self, params):
@@ -1912,29 +1539,16 @@ class Template(DnacBase):
"composite": params.get("composite"),
"containingTemplates":
self.get_containing_templates(params.get("containing_templates")),
- "createTime": params.get("create_time"),
"customParamsOrder": params.get("custom_params_order"),
"description": params.get("template_description"),
"deviceTypes":
self.get_device_types(params.get("device_types")),
- "failurePolicy": params.get("failure_policy"),
"id": params.get("id"),
- "lastUpdateTime": params.get("last_update_time"),
- "latestVersionTime": params.get("latest_version_time"),
- "parentTemplateId": params.get("parent_template_id"),
- "projectId": params.get("project_id"),
- "rollbackTemplateContent": params.get("rollback_template_content"),
- "rollbackTemplateParams":
- self.get_template_info(params.get("rollback_template_params")),
- "softwareVariant": params.get("software_variant"),
"softwareVersion": params.get("software_version"),
"templateContent": params.get("template_content"),
"templateParams":
self.get_template_info(params.get("template_params")),
- "validationErrors":
- self.get_validation_errors(params.get("validation_errors")),
"version": params.get("version"),
- "project_id": params.get("project_id")
}
language = params.get("language")
if not language:
@@ -1973,8 +1587,26 @@ class Template(DnacBase):
self.status = "failed"
return self.check_return_status()
+ software_types_list = ["IOS", "IOS-XE", "IOS-XR", "NX-OS",
+ "Cisco Controller", "Wide Area Application Services",
+ "Adaptive Security Appliance", "NFV-OS", "Others"]
+ if softwareType not in software_types_list:
+ self.msg = "The 'software_type' should be in the following list {0}.".format(software_types_list)
+ self.status = "failed"
+ return self.check_return_status()
+
temp_params.update({"softwareType": softwareType})
+ if temp_params.get("composite") is True:
+ failure_policy = params.get("failure_policy")
+ failure_policy_list = ["ABORT_TARGET_ON_ERROR", None]
+ if failure_policy not in failure_policy_list:
+ self.msg = "The 'failure_policy' should be in the following list {0}.".format(failure_policy)
+ self.status = "failed"
+ return self
+
+ temp_params.update({"failurePolicy": failure_policy})
+
self.log("Formatted template params details: {0}".format(temp_params), "DEBUG")
copy_temp_params = copy.deepcopy(temp_params)
for item in copy_temp_params:
@@ -2243,7 +1875,7 @@ class Template(DnacBase):
if task_details.get("isError"):
self.log("Error occurred for '{0}' with taskid: {1}"
.format(creation_value, task_id), "ERROR")
- return creation_id, created
+ return task_id, created
if validation_string not in task_details.get("progress"):
self.log("'{0}' progress set to {1} for taskid: {2}"
@@ -2298,27 +1930,18 @@ class Template(DnacBase):
("author", "author", ""),
("composite", "composite", False),
("containingTemplates", "containingTemplates", []),
- ("createTime", "createTime", ""),
("customParamsOrder", "customParamsOrder", False),
("description", "description", ""),
("deviceTypes", "deviceTypes", []),
("failurePolicy", "failurePolicy", ""),
("id", "id", ""),
("language", "language", "VELOCITY"),
- ("lastUpdateTime", "lastUpdateTime", ""),
- ("latestVersionTime", "latestVersionTime", ""),
("name", "name", ""),
- ("parentTemplateId", "parentTemplateId", ""),
- ("projectId", "projectId", ""),
("projectName", "projectName", ""),
- ("rollbackTemplateContent", "rollbackTemplateContent", ""),
- ("rollbackTemplateParams", "rollbackTemplateParams", []),
("softwareType", "softwareType", ""),
- ("softwareVariant", "softwareVariant", ""),
("softwareVersion", "softwareVersion", ""),
("templateContent", "templateContent", ""),
("templateParams", "templateParams", []),
- ("validationErrors", "validationErrors", {}),
("version", "version", ""),
]
@@ -2522,6 +2145,15 @@ class Template(DnacBase):
self.msg = "Error while versioning the template"
self.status = "failed"
return self
+ else:
+ task_details = self.get_task_details(template_id)
+ self.log('Getting task details from task ID {0}: {1}'.format(template_id, task_details), "DEBUG")
+ if task_details.get("failureReason"):
+ self.msg = str(task_details.get("failureReason"))
+ else:
+ self.msg = str(task_details.get("progress"))
+ self.status = "failed"
+ return self
def handle_export(self, export):
"""
@@ -2549,6 +2181,7 @@ class Template(DnacBase):
validation_string = "successfully exported project"
self.check_task_response_status(response,
validation_string,
+ "export_projects",
True).check_return_status()
self.result['response'][1].get("export").get("response").update({"exportProject": self.msg})
@@ -2568,6 +2201,7 @@ class Template(DnacBase):
validation_string = "successfully exported template"
self.check_task_response_status(response,
validation_string,
+ "export_templates",
True).check_return_status()
self.result['response'][1].get("export").get("response").update({"exportTemplate": self.msg})
@@ -2616,7 +2250,7 @@ class Template(DnacBase):
params=_import_project,
)
validation_string = "successfully imported project"
- self.check_task_response_status(response, validation_string).check_return_status()
+ self.check_task_response_status(response, validation_string, "imports_the_projects_provided").check_return_status()
self.result['response'][2].get("import").get("response").update({"importProject": validation_string})
else:
self.msg = "Projects '{0}' already available.".format(payload)
@@ -2683,6 +2317,17 @@ class Template(DnacBase):
}
self.log("Import template details from the playbook: {0}"
.format(import_template), "DEBUG")
+ global_project_name = import_template.get("project_name")
+ for item in import_template.get("payload"):
+ template_project_name = item.get("projectName")
+ if template_project_name is not None and \
+ global_project_name != template_project_name:
+ self.msg = "Template '{0}' under the the 'Import Template' should have project_name as {1}" \
+ .format(item.get("name"), global_project_name)
+ self.log(str(self.msg), "ERROR")
+ self.status = "failed"
+ return self
+
if _import_template:
response = self.dnac._exec(
family="configuration_templates",
@@ -2691,7 +2336,7 @@ class Template(DnacBase):
params=import_template
)
validation_string = "successfully imported template"
- self.check_task_response_status(response, validation_string).check_return_status()
+ self.check_task_response_status(response, validation_string, "imports_the_templates_provided").check_return_status()
self.result['response'][2].get("import").get("response") \
.update({"importTemplate": "Successfully imported the templates"})
@@ -2807,8 +2452,11 @@ class Template(DnacBase):
if is_template_found:
self.delete_project_or_template(config)
else:
+ self.result['response'][0].get("configurationTemplate").update({
+ "msg": "Template with template_name '{0}' already deleted".format(templateName)
+ })
self.msg = "Invalid template {0} under project".format(templateName)
- self.status = "failed"
+ self.status = "success"
return self
else:
self.log("Template name is empty, deleting the project '{0}' and "
@@ -2848,15 +2496,41 @@ class Template(DnacBase):
return self
self.get_have_template(config, is_template_available)
- self.log("Current State (have): {0}".format(self.want.get("template_params")), "INFO")
- self.log("Desired State (want): {0}".format(self.have_template.get("template")), "INFO")
- template_params = ["language", "name", "projectName", "softwareType",
- "softwareVariant", "templateContent"]
+ self.log("Desired State (have): {0}".format(self.want.get("template_params")), "INFO")
+ self.log("Current State (want): {0}".format(self.have_template.get("template")), "INFO")
+ if not self.have_template.get("template"):
+ self.msg = "No template created with the name '{0}'".format(self.want.get("template_params").get("name"))
+ self.status = "failed"
+ return self
+
+ template_params = ["language", "name", "projectName",
+ "softwareType", "templateContent"]
+ have_template = self.have_template.get("template")
+ want_template = self.want.get("template_params")
for item in template_params:
- if self.have_template.get("template").get(item) != self.want.get("template_params").get(item):
- self.msg = "Configuration Template config is not applied to the Cisco Catalyst Center."
+ if have_template.get(item) != want_template.get(item):
+ self.msg = "Configuration Template config with template_name {0}'s '{1}' is not applied to the Cisco Catalyst Center." \
+ .format(want_template.get("name"), item)
self.status = "failed"
return self
+
+ want_template_containing_template = want_template.get("containingTemplates")
+ if want_template_containing_template:
+ for item in want_template_containing_template:
+ name = item.get("name")
+ response = get_dict_result(have_template.get("containingTemplates"), "name", name)
+ if response is None:
+ self.msg = "Configuration Template config with template_name '{0}' under ".format(name) + \
+ "'containing_templates' is not available in the Cisco Catalyst Center."
+ self.status = "failed"
+ return self
+ for value in item:
+ if item.get(value) != response.get(value):
+ self.msg = "Configuration Template config with template_name " + \
+ "{0}'s '{1}' is not applied to the Cisco Catalyst Center.".format(name, value)
+ self.status = "failed"
+ return self
+
self.log("Successfully validated the Template in the Catalyst Center.", "INFO")
self.result['response'][0].get("configurationTemplate").get("response").update({"Validation": "Success"})
diff --git a/ansible_collections/cisco/dnac/plugins/modules/templates_details_info.py b/ansible_collections/cisco/dnac/plugins/modules/templates_details_info.py
index f2f45271c..13f2cc904 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/templates_details_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/templates_details_info.py
@@ -89,8 +89,8 @@ options:
- Limit query parameter. Limits number of results.
type: int
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Configuration Templates GetTemplatesDetails
description: Complete reference of the GetTemplatesDetails API.
@@ -135,7 +135,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/threat_detail.py b/ansible_collections/cisco/dnac/plugins/modules/threat_detail.py
index 0bcfac62c..a7cd309ce 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/threat_detail.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/threat_detail.py
@@ -44,7 +44,7 @@ options:
elements: str
type: list
requirements:
-- dnacentersdk >= 2.6.0
+- dnacentersdk >= 2.7.1
- python >= 3.9
notes:
- SDK Method used are
diff --git a/ansible_collections/cisco/dnac/plugins/modules/threat_detail_count.py b/ansible_collections/cisco/dnac/plugins/modules/threat_detail_count.py
index 7a21cd1ad..4b63e29b7 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/threat_detail_count.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/threat_detail_count.py
@@ -44,7 +44,7 @@ options:
elements: str
type: list
requirements:
-- dnacentersdk >= 2.6.0
+- dnacentersdk >= 2.7.1
- python >= 3.9
notes:
- SDK Method used are
diff --git a/ansible_collections/cisco/dnac/plugins/modules/threat_summary.py b/ansible_collections/cisco/dnac/plugins/modules/threat_summary.py
index 635b0c43c..a8c1d7a4b 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/threat_summary.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/threat_summary.py
@@ -35,7 +35,7 @@ options:
elements: str
type: list
requirements:
-- dnacentersdk >= 2.6.0
+- dnacentersdk >= 2.7.1
- python >= 3.9
notes:
- SDK Method used are
diff --git a/ansible_collections/cisco/dnac/plugins/modules/topology_layer_2_info.py b/ansible_collections/cisco/dnac/plugins/modules/topology_layer_2_info.py
index 2ba3667e2..d2cac631b 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/topology_layer_2_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/topology_layer_2_info.py
@@ -24,8 +24,8 @@ options:
- VlanID path parameter. Vlan Name for e.g Vlan1, Vlan23 etc.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Topology GetTopologyDetails
description: Complete reference of the GetTopologyDetails API.
@@ -54,7 +54,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/topology_layer_3_info.py b/ansible_collections/cisco/dnac/plugins/modules/topology_layer_3_info.py
index 008b5d1ac..ea058cc8b 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/topology_layer_3_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/topology_layer_3_info.py
@@ -24,8 +24,8 @@ options:
- TopologyType path parameter. Type of topology(OSPF,ISIS,etc).
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Topology GetL3TopologyDetails
description: Complete reference of the GetL3TopologyDetails API.
@@ -54,7 +54,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/topology_network_health_info.py b/ansible_collections/cisco/dnac/plugins/modules/topology_network_health_info.py
index b9c856316..ca38c16af 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/topology_network_health_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/topology_network_health_info.py
@@ -23,11 +23,11 @@ options:
type: dict
timestamp:
description:
- - Timestamp query parameter. Epoch time(in milliseconds) when the Network health data is required.
- type: str
+ - Timestamp query parameter. UTC timestamp of network health data in milliseconds.
+ type: float
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Topology GetOverallNetworkHealth
description: Complete reference of the GetOverallNetworkHealth API.
@@ -52,11 +52,10 @@ EXAMPLES = r"""
dnac_version: "{{dnac_version}}"
dnac_debug: "{{dnac_debug}}"
headers: "{{my_headers | from_json}}"
- timestamp: string
+ timestamp: 0
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -71,20 +70,27 @@ dnac_response:
"healthScore": 0,
"totalCount": 0,
"goodCount": 0,
+ "noHealthCount": 0,
"unmonCount": 0,
"fairCount": 0,
"badCount": 0,
- "entity": {},
+ "maintenanceModeCount": 0,
+ "entity": "string",
"timeinMillis": 0
}
],
"measuredBy": "string",
- "latestMeasuredByEntity": {},
+ "latestMeasuredByEntity": "string",
"latestHealthScore": 0,
"monitoredDevices": 0,
"monitoredHealthyDevices": 0,
"monitoredUnHealthyDevices": 0,
"unMonitoredDevices": 0,
+ "noHealthDevices": 0,
+ "totalDevices": 0,
+ "monitoredPoorHealthDevices": 0,
+ "monitoredFairHealthDevices": 0,
+ "healthContributingDevices": 0,
"healthDistirubution": [
{
"category": "string",
@@ -93,13 +99,19 @@ dnac_response:
"goodPercentage": 0,
"badPercentage": 0,
"fairPercentage": 0,
+ "noHealthPercentage": 0,
"unmonPercentage": 0,
"goodCount": 0,
"badCount": 0,
"fairCount": 0,
+ "noHealthCount": 0,
"unmonCount": 0,
+ "thirdPartyDeviceCount": 0,
"kpiMetrics": [
- {}
+ {
+ "key": "string",
+ "value": "string"
+ }
]
}
]
diff --git a/ansible_collections/cisco/dnac/plugins/modules/topology_physical_info.py b/ansible_collections/cisco/dnac/plugins/modules/topology_physical_info.py
index 6a9573d57..81bff393d 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/topology_physical_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/topology_physical_info.py
@@ -24,8 +24,8 @@ options:
- NodeType query parameter.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Topology GetPhysicalTopology
description: Complete reference of the GetPhysicalTopology API.
@@ -54,7 +54,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/topology_site_info.py b/ansible_collections/cisco/dnac/plugins/modules/topology_site_info.py
index d63661b43..63f268372 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/topology_site_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/topology_site_info.py
@@ -20,8 +20,8 @@ options:
description: Additional headers.
type: dict
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Topology GetSiteTopology
description: Complete reference of the GetSiteTopology API.
@@ -49,7 +49,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/topology_vlan_details_info.py b/ansible_collections/cisco/dnac/plugins/modules/topology_vlan_details_info.py
index 555b5b9d8..707d30ccc 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/topology_vlan_details_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/topology_vlan_details_info.py
@@ -10,7 +10,7 @@ module: topology_vlan_details_info
short_description: Information module for Topology Vlan Details
description:
- Get all Topology Vlan Details.
-- Returns the list of VLAN names.
+- Returns the list of VLAN names that are involved in a loop as identified by the Spanning Tree Protocol.
version_added: '3.1.0'
extends_documentation_fragment:
- cisco.dnac.module_info
@@ -20,8 +20,8 @@ options:
description: Additional headers.
type: dict
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Topology GetVLANDetails
description: Complete reference of the GetVLANDetails API.
@@ -49,7 +49,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/transit_peer_network.py b/ansible_collections/cisco/dnac/plugins/modules/transit_peer_network.py
index 38125e4d3..64c366978 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/transit_peer_network.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/transit_peer_network.py
@@ -12,7 +12,7 @@ description:
- Manage operations create and delete of the resource Transit Peer Network.
- Add Transit Peer Network in SD-Access.
- Delete Transit Peer Network from SD-Access.
-version_added: '6.5.0'
+version_added: '6.0.0'
extends_documentation_fragment:
- cisco.dnac.module
author: Rafael Campos (@racampos)
@@ -21,7 +21,7 @@ options:
description: Transit Peer Network's ipTransitSettings.
suboptions:
autonomousSystemNumber:
- description: Autonomous System Number (e.g.,1-65535).
+ description: Autonomous System Number.
type: str
routingProtocolName:
description: Routing Protocol Name.
@@ -49,19 +49,19 @@ options:
description: Transit Peer Network Type.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
-- name: Cisco DNA Center documentation for AddTransitPeerNetwork
+- name: Cisco DNA Center documentation for SDA AddTransitPeerNetwork
description: Complete reference of the AddTransitPeerNetwork API.
link: https://developer.cisco.com/docs/dna-center/#!add-transit-peer-network
-- name: Cisco DNA Center documentation for DeleteTransitPeerNetwork
+- name: Cisco DNA Center documentation for SDA DeleteTransitPeerNetwork
description: Complete reference of the DeleteTransitPeerNetwork API.
link: https://developer.cisco.com/docs/dna-center/#!delete-transit-peer-network
notes:
- SDK Method used are
- ..add_transit_peer_network,
- ..delete_transit_peer_network,
+ sda.Sda.add_transit_peer_network,
+ sda.Sda.delete_transit_peer_network,
- Paths used are
post /dna/intent/api/v1/business/sda/transit-peer-network,
@@ -103,7 +103,6 @@ EXAMPLES = r"""
transitPeerNetworkType: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/transit_peer_network_info.py b/ansible_collections/cisco/dnac/plugins/modules/transit_peer_network_info.py
index 75ca07d7a..4b54f937a 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/transit_peer_network_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/transit_peer_network_info.py
@@ -11,7 +11,7 @@ short_description: Information module for Transit Peer Network
description:
- Get all Transit Peer Network.
- Get Transit Peer Network Info from SD-Access.
-version_added: '6.5.0'
+version_added: '6.0.0'
extends_documentation_fragment:
- cisco.dnac.module_info
author: Rafael Campos (@racampos)
@@ -24,15 +24,15 @@ options:
- TransitPeerNetworkName query parameter. Transit or Peer Network Name.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
-- name: Cisco DNA Center documentation for GetTransitPeerNetworkInfo
+- name: Cisco DNA Center documentation for SDA GetTransitPeerNetworkInfo
description: Complete reference of the GetTransitPeerNetworkInfo API.
link: https://developer.cisco.com/docs/dna-center/#!get-transit-peer-network-info
notes:
- SDK Method used are
- ..get_transit_peer_network_info,
+ sda.Sda.get_transit_peer_network_info,
- Paths used are
get /dna/intent/api/v1/business/sda/transit-peer-network,
@@ -54,7 +54,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/user.py b/ansible_collections/cisco/dnac/plugins/modules/user.py
index 4567de33d..230211eec 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/user.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/user.py
@@ -9,10 +9,11 @@ DOCUMENTATION = r"""
module: user
short_description: Resource module for User
description:
-- Manage operations create and update of the resource User.
-- Add a new user for Cisco DNA Center system.
-- Update a user for Cisco DNA Center system.
-version_added: '6.7.0'
+- Manage operations create, update and delete of the resource User.
+- Add a new user for Cisco DNA Center System.
+- Delete a user from Cisco DNA Center System.
+- Update a user for Cisco DNA Center System.
+version_added: '3.1.0'
extends_documentation_fragment:
- cisco.dnac.module
author: Rafael Campos (@racampos)
@@ -40,22 +41,27 @@ options:
description: Username.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for User and Roles AddUserAPI
description: Complete reference of the AddUserAPI API.
link: https://developer.cisco.com/docs/dna-center/#!add-user-api
+- name: Cisco DNA Center documentation for User and Roles DeleteUserAPI
+ description: Complete reference of the DeleteUserAPI API.
+ link: https://developer.cisco.com/docs/dna-center/#!delete-user-api
- name: Cisco DNA Center documentation for User and Roles UpdateUserAPI
description: Complete reference of the UpdateUserAPI API.
link: https://developer.cisco.com/docs/dna-center/#!update-user-api
notes:
- SDK Method used are
- user_and_roles.UserandRoles.add_user_ap_i,
- user_and_roles.UserandRoles.update_user_ap_i,
+ userand_roles.UserandRoles.add_user_api,
+ userand_roles.UserandRoles.delete_user_api,
+ userand_roles.UserandRoles.update_user_api,
- Paths used are
post /dna/system/api/v1/user,
+ delete /dna/system/api/v1/user/{userId},
put /dna/system/api/v1/user,
"""
@@ -97,8 +103,19 @@ EXAMPLES = r"""
userId: string
username: string
-"""
+- name: Delete by id
+ cisco.dnac.user:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ state: absent
+ userId: string
+"""
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/user_enrichment_details_info.py b/ansible_collections/cisco/dnac/plugins/modules/user_enrichment_details_info.py
index bcffca746..1a2bbffc3 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/user_enrichment_details_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/user_enrichment_details_info.py
@@ -22,8 +22,8 @@ options:
description: Additional headers.
type: dict
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Users GetUserEnrichmentDetails
description: Complete reference of the GetUserEnrichmentDetails API.
@@ -51,7 +51,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/user_info.py b/ansible_collections/cisco/dnac/plugins/modules/user_info.py
index d4c20af69..839c3ba53 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/user_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/user_info.py
@@ -10,8 +10,8 @@ module: user_info
short_description: Information module for User
description:
- Get all User.
-- Get all users for the Cisco DNA Center system.
-version_added: '6.7.0'
+- Get all users for the Cisco DNA Center System.
+version_added: '3.1.0'
extends_documentation_fragment:
- cisco.dnac.module_info
author: Rafael Campos (@racampos)
@@ -21,18 +21,26 @@ options:
type: dict
invokeSource:
description:
- - InvokeSource query parameter. The source that invokes this API.
+ - >
+ InvokeSource query parameter. The source that invokes this API. The value of this query parameter must be
+ set to "external".
+ type: str
+ authSource:
+ description:
+ - >
+ AuthSource query parameter. The source that authenticates the user. The value of this query parameter can be
+ set to "internal" or "external". If not provided, then all users will be returned in the response.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for User and Roles GetUsersAPI
description: Complete reference of the GetUsersAPI API.
link: https://developer.cisco.com/docs/dna-center/#!get-users-api
notes:
- SDK Method used are
- user_and_roles.UserandRoles.get_users_api,
+ userand_roles.UserandRoles.get_users_api,
- Paths used are
get /dna/system/api/v1/user,
@@ -51,10 +59,10 @@ EXAMPLES = r"""
dnac_debug: "{{dnac_debug}}"
headers: "{{my_headers | from_json}}"
invokeSource: string
+ authSource: string
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/users_external_authentication.py b/ansible_collections/cisco/dnac/plugins/modules/users_external_authentication.py
new file mode 100644
index 000000000..85b86c16f
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/users_external_authentication.py
@@ -0,0 +1,61 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: users_external_authentication
+short_description: Resource module for Users External Authentication
+description:
+- Manage operation create of the resource Users External Authentication.
+- Enable or disable external authentication on Cisco DNA Center System.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module
+author: Rafael Campos (@racampos)
+options:
+ enable:
+ description: Enable/disable External Authentication.
+ type: bool
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for User and Roles ManageExternalAuthenticationSettingAPI
+ description: Complete reference of the ManageExternalAuthenticationSettingAPI API.
+ link: https://developer.cisco.com/docs/dna-center/#!manage-external-authentication-setting-api
+notes:
+ - SDK Method used are
+ userand_roles.UserandRoles.manage_external_authentication_setting_api,
+
+ - Paths used are
+ post /dna/system/api/v1/users/external-authentication,
+
+"""
+
+EXAMPLES = r"""
+- name: Create
+ cisco.dnac.users_external_authentication:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ state: present
+ enable: true
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "message": "string"
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/users_external_authentication_info.py b/ansible_collections/cisco/dnac/plugins/modules/users_external_authentication_info.py
new file mode 100644
index 000000000..c7ef52cab
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/users_external_authentication_info.py
@@ -0,0 +1,65 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: users_external_authentication_info
+short_description: Information module for Users External Authentication
+description:
+- Get all Users External Authentication.
+- Get the External Authentication setting.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module_info
+author: Rafael Campos (@racampos)
+options:
+ headers:
+ description: Additional headers.
+ type: dict
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for User and Roles GetExternalAuthenticationSettingAPI
+ description: Complete reference of the GetExternalAuthenticationSettingAPI API.
+ link: https://developer.cisco.com/docs/dna-center/#!get-external-authentication-setting-api
+notes:
+ - SDK Method used are
+ userand_roles.UserandRoles.get_external_authentication_setting_api,
+
+ - Paths used are
+ get /dna/system/api/v1/users/external-authentication,
+
+"""
+
+EXAMPLES = r"""
+- name: Get all Users External Authentication
+ cisco.dnac.users_external_authentication_info:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ headers: "{{my_headers | from_json}}"
+ register: result
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "external-authentication-flag": [
+ {
+ "enabled": true
+ }
+ ]
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/users_external_servers_aaa_attribute.py b/ansible_collections/cisco/dnac/plugins/modules/users_external_servers_aaa_attribute.py
new file mode 100644
index 000000000..1faf388eb
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/users_external_servers_aaa_attribute.py
@@ -0,0 +1,84 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: users_external_servers_aaa_attribute
+short_description: Resource module for Users External Servers Aaa Attribute
+description:
+- Manage operations create and delete of the resource Users External Servers Aaa Attribute.
+- >
+ Add or update the custom AAA attribute for external authentication. Note that if you decide not to set the custom
+ AAA attribute, a default AAA attribute will be used for authentication based on the protocol supported by your
+ server. For TACACS servers it will be "cisco-av-pair" and for RADIUS servers it will be "Cisco-AVPair".
+- >
+ Delete the custom AAA attribute that was added. Note that by deleting the AAA attribute, a default AAA attribute
+ will be used for authentication based on the protocol supported by your server. For TACACS servers it will be
+ "cisco-av-pair" and for RADIUS servers it will be "Cisco-AVPair".
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module
+author: Rafael Campos (@racampos)
+options:
+ attributeName:
+ description: Name of the custom AAA attribute.
+ type: str
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for User and Roles AddAndUpdateAAAAttributeAPI
+ description: Complete reference of the AddAndUpdateAAAAttributeAPI API.
+ link: https://developer.cisco.com/docs/dna-center/#!add-and-update-aaa-attribute-api
+- name: Cisco DNA Center documentation for User and Roles DeleteAAAAttributeAPI
+ description: Complete reference of the DeleteAAAAttributeAPI API.
+ link: https://developer.cisco.com/docs/dna-center/#!delete-aaa-attribute-api
+notes:
+ - SDK Method used are
+ userand_roles.UserandRoles.add_and_update_a_a_a_attribute_api,
+ userand_roles.UserandRoles.delete_a_a_a_attribute_api,
+
+ - Paths used are
+ post /dna/system/api/v1/users/external-servers/aaa-attribute,
+ delete /dna/system/api/v1/users/external-servers/aaa-attribute,
+
+"""
+
+EXAMPLES = r"""
+- name: Create
+ cisco.dnac.users_external_servers_aaa_attribute:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ state: present
+ attributeName: string
+
+- name: Delete all
+ cisco.dnac.users_external_servers_aaa_attribute:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ state: absent
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "message": "string"
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/users_external_servers_aaa_attribute_info.py b/ansible_collections/cisco/dnac/plugins/modules/users_external_servers_aaa_attribute_info.py
new file mode 100644
index 000000000..a73caa07f
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/users_external_servers_aaa_attribute_info.py
@@ -0,0 +1,65 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: users_external_servers_aaa_attribute_info
+short_description: Information module for Users External Servers Aaa Attribute
+description:
+- Get all Users External Servers Aaa Attribute.
+- Get the current value of the custom AAA attribute.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module_info
+author: Rafael Campos (@racampos)
+options:
+ headers:
+ description: Additional headers.
+ type: dict
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for User and Roles GetAAAAttributeAPI
+ description: Complete reference of the GetAAAAttributeAPI API.
+ link: https://developer.cisco.com/docs/dna-center/#!get-aaa-attribute-api
+notes:
+ - SDK Method used are
+ userand_roles.UserandRoles.get_a_a_a_attribute_api,
+
+ - Paths used are
+ get /dna/system/api/v1/users/external-servers/aaa-attribute,
+
+"""
+
+EXAMPLES = r"""
+- name: Get all Users External Servers Aaa Attribute
+ cisco.dnac.users_external_servers_aaa_attribute_info:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ headers: "{{my_headers | from_json}}"
+ register: result
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "aaa-attributes": [
+ {
+ "attributeName": "string"
+ }
+ ]
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/users_external_servers_info.py b/ansible_collections/cisco/dnac/plugins/modules/users_external_servers_info.py
index 0373d2c85..7fc86ce6b 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/users_external_servers_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/users_external_servers_info.py
@@ -11,7 +11,7 @@ short_description: Information module for Users External Servers
description:
- Get all Users External Servers.
- Get external users authentication servers.
-version_added: '6.7.0'
+version_added: '3.1.0'
extends_documentation_fragment:
- cisco.dnac.module_info
author: Rafael Campos (@racampos)
@@ -21,18 +21,20 @@ options:
type: dict
invokeSource:
description:
- - InvokeSource query parameter. The source that invokes this API.
+ - >
+ InvokeSource query parameter. The source that invokes this API. The value of this query parameter must be
+ set to "external".
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for User and Roles GetExternalAuthenticationServersAPI
description: Complete reference of the GetExternalAuthenticationServersAPI API.
link: https://developer.cisco.com/docs/dna-center/#!get-external-authentication-servers-api
notes:
- SDK Method used are
- user_and_roles.UserandRoles.get_external_authentication_servers_ap_i,
+ userand_roles.UserandRoles.get_external_authentication_servers_api,
- Paths used are
get /dna/system/api/v1/users/external-servers,
@@ -54,7 +56,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/wireless_accespoint_configuration.py b/ansible_collections/cisco/dnac/plugins/modules/wireless_accespoint_configuration.py
index 6a1cf2946..33f6797c1 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/wireless_accespoint_configuration.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/wireless_accespoint_configuration.py
@@ -10,8 +10,10 @@ module: wireless_accespoint_configuration
short_description: Resource module for Wireless Accespoint Configuration
description:
- Manage operation create of the resource Wireless Accespoint Configuration.
-- User can configure multiple access points with required options using this intent API.
-version_added: '6.7.0'
+- >
+ User can configure multiple access points with required options using this intent API. This API does not support
+ configuration of CleanAir or SI for IOS-XE devices with version greater than or equal to 17.9.
+version_added: '3.1.0'
extends_documentation_fragment:
- cisco.dnac.module
author: Rafael Campos (@racampos)
@@ -20,10 +22,6 @@ options:
description: Configure the access point's admin status. Set this parameter's value
to "true" to enable it and "false" to disable it.
type: bool
- apHeight:
- description: Configure the height of the access point by setting a value between
- 3 and height of the floor.
- type: int
apList:
description: Wireless Accespoint Configuration's apList.
elements: dict
@@ -47,10 +45,6 @@ options:
description: To change the access point's admin status, set this parameter's value
to "true".
type: bool
- configureApHeight:
- description: To change the access point's height, set this parameter's value to
- "true".
- type: bool
configureApMode:
description: To change the access point's mode, set this parameter's value to "true".
type: bool
@@ -78,6 +72,10 @@ options:
description: Configure the acess point's failover priority for low, set "1"; for
medium, set "2"; for high, set "3"; and for critical, set "4".
type: int
+ isAssignedSiteAsLocation:
+ description: If AP is assigned to a site, then to assign AP location as the site
+ name, set this parameter's value to "true".
+ type: bool
ledBrightnessLevel:
description: Configure the access point's LED brightness level by setting a value
between 1 and 8.
@@ -113,31 +111,19 @@ options:
access point. If cable loss needs to be configured, set this parameter's value
to "other".
type: str
- antennaDegree:
- description: Configure the antenna degree on the specified radio for an access
- point.
- type: int
- antennaElevAngleDegree:
- description: Configure the antenna elevation angle on the specified radio for
- an access point.
- type: int
- antennaElevAngleSign:
- description: Configure the antenna elevation angle direction on the specified
- radio for an access point for up, set "1"; for down, set "-1".
- type: int
antennaGain:
description: Configure the antenna gain on the specified radio for an access
- point by setting a decimal value (in dBi).
+ point by setting a decimal value (in dBi). To configure "antennaGain", set
+ "antennaPatternName" value to "other".
type: int
antennaPatternName:
- description: Configure the antenna pattern name on the specified radio for an
- access point. If antenna gain needs to be configured, set this parameter's
- value to "other".
+ description: Specify the antenna name on the specified radio for an access point.
+ The antenna name is used to calculate the gain on the radio slot.
type: str
cableLoss:
description: Configure the cable loss on the specified radio for an access point
by setting a decimal value (in dBi).
- type: int
+ type: float
channelAssignmentMode:
description: Configure the channel assignment mode on the specified radio for
an access point for global mode, set "1"; and for custom mode, set "2".
@@ -164,13 +150,9 @@ options:
description: To change the antenna cable name on the specified radio for an
access point, set this parameter's value to "true".
type: bool
- configureAntennaDegree:
- description: To change the antenna degree on the specified radio for an access
- point, set this parameter's value to "true".
- type: bool
configureAntennaPatternName:
- description: To change the antenna pattern name on the specified radio for an
- access point, set the value for this parameter to "true".
+ description: To change the antenna gain on the specified radio for an access
+ point, set the value for this parameter to "true".
type: bool
configureChannel:
description: To change the channel on the specified radio for an access point,
@@ -184,10 +166,6 @@ options:
description: To enable or disable either CleanAir or Spectrum Intelligence on
the specified radio for an access point, set this parameter's value to "true".
type: bool
- configureElevAngleDegree:
- description: To change the elevation angle degree on the specified radio for
- an access point, set this parameter's value to "true".
- type: bool
configurePower:
description: To change the power assignment mode on the specified radio for
an access point, set this parameter's value to "true".
@@ -206,11 +184,13 @@ options:
type: int
radioBand:
description: Configure the band on the specified radio for an access point for
- 2.4 GHz, set "RADIO24"; for 5 GHz, set "RADIO5".
+ 2.4 GHz, set "RADIO24"; for 5 GHz, set "RADIO5". Any other string is invalid,
+ including empty string.
type: str
radioRoleAssignment:
- description: Configure one of the following roles on the specified radio for
- an access point "auto", "serving", or "monitor".
+ description: Configure only one of the following roles on the specified radio
+ for an access point as "AUTO", "SERVING", or "MONITOR". Any other string is
+ invalid, including empty string.
type: str
radioType:
description: Configure an access point's radio band for 2.4 GHz, set "1"; for
@@ -238,15 +218,15 @@ options:
type: str
type: dict
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
-- name: Cisco DNA Center documentation for Wireless ConfigureAccessPoints
- description: Complete reference of the ConfigureAccessPoints API.
- link: https://developer.cisco.com/docs/dna-center/#!configure-access-points
+- name: Cisco DNA Center documentation for Wireless ConfigureAccessPointsV1
+ description: Complete reference of the ConfigureAccessPointsV1 API.
+ link: https://developer.cisco.com/docs/dna-center/#!configure-access-points-v-1
notes:
- SDK Method used are
- wireless.Wireless.configure_access_points,
+ wireless.Wireless.configure_access_points_v1,
- Paths used are
post /dna/intent/api/v1/wireless/accesspoint-configuration,
@@ -264,14 +244,12 @@ EXAMPLES = r"""
dnac_version: "{{dnac_version}}"
dnac_debug: "{{dnac_debug}}"
adminStatus: true
- apHeight: 0
apList:
- apName: string
apNameNew: string
macAddress: string
apMode: 0
configureAdminStatus: true
- configureApHeight: true
configureApMode: true
configureFailoverPriority: true
configureHAController: true
@@ -279,6 +257,7 @@ EXAMPLES = r"""
configureLedStatus: true
configureLocation: true
failoverPriority: 0
+ isAssignedSiteAsLocation: true
ledBrightnessLevel: 0
ledStatus: true
location: string
@@ -288,9 +267,6 @@ EXAMPLES = r"""
radioConfigurations:
- adminStatus: true
antennaCableName: string
- antennaDegree: 0
- antennaElevAngleDegree: 0
- antennaElevAngleSign: 0
antennaGain: 0
antennaPatternName: string
cableLoss: 0
@@ -300,12 +276,10 @@ EXAMPLES = r"""
cleanAirSI: 0
configureAdminStatus: true
configureAntennaCable: true
- configureAntennaDegree: true
configureAntennaPatternName: true
configureChannel: true
configureChannelWidth: true
configureCleanAirSI: true
- configureElevAngleDegree: true
configurePower: true
configureRadioRoleAssignment: true
powerAssignmentMode: 0
@@ -321,7 +295,6 @@ EXAMPLES = r"""
address: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/wireless_accesspoint_configuration_create.py b/ansible_collections/cisco/dnac/plugins/modules/wireless_accesspoint_configuration_create.py
new file mode 100644
index 000000000..6f450eede
--- /dev/null
+++ b/ansible_collections/cisco/dnac/plugins/modules/wireless_accesspoint_configuration_create.py
@@ -0,0 +1,328 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021, Cisco Systems
+# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION = r"""
+---
+module: wireless_accesspoint_configuration_create
+short_description: Resource module for Wireless Accesspoint Configuration Create
+description:
+- Manage operation create of the resource Wireless Accesspoint Configuration Create.
+- User can configure multiple access points with required options using this intent API.
+version_added: '6.14.0'
+extends_documentation_fragment:
+ - cisco.dnac.module
+author: Rafael Campos (@racampos)
+options:
+ adminStatus:
+ description: Configure the access point's admin status. Set this parameter's value
+ to "true" to enable it and "false" to disable it.
+ type: bool
+ apList:
+ description: Wireless Accesspoint Configuration Create's apList.
+ elements: dict
+ suboptions:
+ apName:
+ description: The current host name of the access point.
+ type: str
+ apNameNew:
+ description: The modified hostname of the access point.
+ type: str
+ macAddress:
+ description: The ethernet MAC address of the access point.
+ type: str
+ type: list
+ apMode:
+ description: Configure the access point's mode for local/flexconnect mode, set "0";
+ for monitor mode, set "1"; for sniffer mode, set "4"; and for bridge/flex+bridge
+ mode, set "5".
+ type: int
+ cleanAirSI24:
+ description: Configure clean air status for radios that are in 2.4 Ghz band. Set
+ this parameter's value to "true" to enable it and "false" to disable it.
+ type: bool
+ cleanAirSI5:
+ description: Configure clean air status for radios that are in 5 Ghz band. Set this
+ parameter's value to "true" to enable it and "false" to disable it.
+ type: bool
+ cleanAirSI6:
+ description: Configure clean air status for radios that are in 6 Ghz band. Set this
+ parameter's value to "true" to enable it and "false" to disable it.
+ type: bool
+ configureAdminStatus:
+ description: To change the access point's admin status, set this parameter's value
+ to "true".
+ type: bool
+ configureApMode:
+ description: To change the access point's mode, set this parameter's value to "true".
+ type: bool
+ configureCleanAirSI24Ghz:
+ description: To change the clean air status for radios that are in 2.4 Ghz band,
+ set this parameter's value to "true".
+ type: bool
+ configureCleanAirSI5Ghz:
+ description: To change the clean air status for radios that are in 5 Ghz band, set
+ this parameter's value to "true".
+ type: bool
+ configureCleanAirSI6Ghz:
+ description: To change the clean air status for radios that are in 6 Ghz band, set
+ this parameter's value to "true".
+ type: bool
+ configureFailoverPriority:
+ description: To change the access point's failover priority, set this parameter's
+ value to "true".
+ type: bool
+ configureHAController:
+ description: To change the access point's HA controller, set this parameter's value
+ to "true".
+ type: bool
+ configureLedBrightnessLevel:
+ description: To change the access point's LED brightness level, set this parameter's
+ value to "true".
+ type: bool
+ configureLedStatus:
+ description: To change the access point's LED status, set this parameter's value
+ to "true".
+ type: bool
+ configureLocation:
+ description: To change the access point's location, set this parameter's value to
+ "true".
+ type: bool
+ failoverPriority:
+ description: Configure the acess point's failover priority for low, set "1"; for
+ medium, set "2"; for high, set "3"; and for critical, set "4".
+ type: int
+ isAssignedSiteAsLocation:
+ description: To configure the access point's location as the site assigned to the
+ access point, set this parameter's value to "true".
+ type: bool
+ ledBrightnessLevel:
+ description: Configure the access point's LED brightness level by setting a value
+ between 1 and 8.
+ type: int
+ ledStatus:
+ description: Configure the access point's LED status. Set "true" to enable its status
+ and "false" to disable it.
+ type: bool
+ location:
+ description: Configure the access point's location.
+ type: str
+ primaryControllerName:
+ description: Configure the hostname for an access point's primary controller.
+ type: str
+ primaryIpAddress:
+ description: Wireless Accesspoint Configuration Create's primaryIpAddress.
+ suboptions:
+ address:
+ description: Configure the IP address for an access point's primary controller.
+ type: str
+ type: dict
+ radioConfigurations:
+ description: Wireless Accesspoint Configuration Create's radioConfigurations.
+ elements: dict
+ suboptions:
+ adminStatus:
+ description: Configure the admin status on the specified radio for an access
+ point. Set this parameter's value to "true" to enable it and "false" to disable
+ it.
+ type: bool
+ antennaCableName:
+ description: Configure the antenna cable name on the specified radio for an
+ access point. If cable loss needs to be configured, set this parameter's value
+ to "other".
+ type: str
+ antennaGain:
+ description: Configure the antenna gain on the specified radio for an access
+ point by setting a decimal value (in dBi). To configure "antennaGain", set
+ "antennaPatternName" value to "other".
+ type: int
+ antennaPatternName:
+ description: Specify the antenna name on the specified radio for an access point.
+ The antenna name is used to calculate the gain on the radio slot.
+ type: str
+ cableLoss:
+ description: Configure the cable loss on the specified radio for an access point
+ by setting a decimal value (in dBi).
+ type: float
+ channelAssignmentMode:
+ description: Configure the channel assignment mode on the specified radio for
+ an access point for global mode, set "1"; and for custom mode, set "2".
+ type: int
+ channelNumber:
+ description: Configure the channel number on the specified radio for an access
+ point.
+ type: int
+ channelWidth:
+ description: Configure the channel width on the specified radio for an access
+ point for 20 MHz, set "3"; for 40 MHz, set "4"; for 80 MHz, set "5"; and for
+ 160 MHz, set "6".
+ type: int
+ configureAdminStatus:
+ description: To change the admin status on the specified radio for an access
+ point, set this parameter's value to "true".
+ type: bool
+ configureAntennaCable:
+ description: To change the antenna cable name on the specified radio for an
+ access point, set this parameter's value to "true".
+ type: bool
+ configureAntennaPatternName:
+ description: To change the antenna gain on the specified radio for an access
+ point, set the value for this parameter to "true".
+ type: bool
+ configureChannel:
+ description: To change the channel on the specified radio for an access point,
+ set this parameter's value to "true".
+ type: bool
+ configureChannelWidth:
+ description: To change the channel width on the specified radio for an access
+ point, set this parameter's value to "true".
+ type: bool
+ configurePower:
+ description: To change the power assignment mode on the specified radio for
+ an access point, set this parameter's value to "true".
+ type: bool
+ configureRadioRoleAssignment:
+ description: To change the radio role on the specified radio for an access point,
+ set this parameter's value to "true".
+ type: bool
+ powerAssignmentMode:
+ description: Configure the power assignment mode on the specified radio for
+ an access point for global mode, set "1"; and for custom mode, set "2".
+ type: int
+ powerlevel:
+ description: Configure the power level on the specified radio for an access
+ point by setting a value between 1 and 8.
+ type: int
+ radioBand:
+ description: Configure the band on the specified radio for an access point for
+ 2.4 GHz, set "RADIO24"; for 5 GHz, set "RADIO5". Any other string is invalid,
+ including empty string.
+ type: str
+ radioRoleAssignment:
+ description: Configure only one of the following roles on the specified radio
+ for an access point as "AUTO", "SERVING", or "MONITOR". Any other string is
+ invalid, including empty string.
+ type: str
+ radioType:
+ description: Configure an access point's radio band for 2.4 GHz, set "1"; for
+ 5 GHz, set "2"; for XOR, set "3"; and for 6 GHz, set "6".
+ type: int
+ type: list
+ secondaryControllerName:
+ description: Configure the hostname for an access point's secondary controller.
+ type: str
+ secondaryIpAddress:
+ description: Wireless Accesspoint Configuration Create's secondaryIpAddress.
+ suboptions:
+ address:
+ description: Configure the IP address for an access point's secondary controller.
+ type: str
+ type: dict
+ tertiaryControllerName:
+ description: Configure the hostname for an access point's tertiary controller.
+ type: str
+ tertiaryIpAddress:
+ description: Wireless Accesspoint Configuration Create's tertiaryIpAddress.
+ suboptions:
+ address:
+ description: Configure the IP address for an access point's tertiary controller.
+ type: str
+ type: dict
+requirements:
+- dnacentersdk >= 2.7.1
+- python >= 3.5
+seealso:
+- name: Cisco DNA Center documentation for Wireless ConfigureAccessPointsV2
+ description: Complete reference of the ConfigureAccessPointsV2 API.
+ link: https://developer.cisco.com/docs/dna-center/#!configure-access-points-v-2
+notes:
+ - SDK Method used are
+ wireless.Wireless.configure_access_points_v2,
+
+ - Paths used are
+ post /dna/intent/api/v2/wireless/accesspoint-configuration,
+
+"""
+
+EXAMPLES = r"""
+- name: Create
+ cisco.dnac.wireless_accesspoint_configuration_create:
+ dnac_host: "{{dnac_host}}"
+ dnac_username: "{{dnac_username}}"
+ dnac_password: "{{dnac_password}}"
+ dnac_verify: "{{dnac_verify}}"
+ dnac_port: "{{dnac_port}}"
+ dnac_version: "{{dnac_version}}"
+ dnac_debug: "{{dnac_debug}}"
+ adminStatus: true
+ apList:
+ - apName: string
+ apNameNew: string
+ macAddress: string
+ apMode: 0
+ cleanAirSI24: true
+ cleanAirSI5: true
+ cleanAirSI6: true
+ configureAdminStatus: true
+ configureApMode: true
+ configureCleanAirSI24Ghz: true
+ configureCleanAirSI5Ghz: true
+ configureCleanAirSI6Ghz: true
+ configureFailoverPriority: true
+ configureHAController: true
+ configureLedBrightnessLevel: true
+ configureLedStatus: true
+ configureLocation: true
+ failoverPriority: 0
+ isAssignedSiteAsLocation: true
+ ledBrightnessLevel: 0
+ ledStatus: true
+ location: string
+ primaryControllerName: string
+ primaryIpAddress:
+ address: string
+ radioConfigurations:
+ - adminStatus: true
+ antennaCableName: string
+ antennaGain: 0
+ antennaPatternName: string
+ cableLoss: 0
+ channelAssignmentMode: 0
+ channelNumber: 0
+ channelWidth: 0
+ configureAdminStatus: true
+ configureAntennaCable: true
+ configureAntennaPatternName: true
+ configureChannel: true
+ configureChannelWidth: true
+ configurePower: true
+ configureRadioRoleAssignment: true
+ powerAssignmentMode: 0
+ powerlevel: 0
+ radioBand: string
+ radioRoleAssignment: string
+ radioType: 0
+ secondaryControllerName: string
+ secondaryIpAddress:
+ address: string
+ tertiaryControllerName: string
+ tertiaryIpAddress:
+ address: string
+
+"""
+RETURN = r"""
+dnac_response:
+ description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
+ returned: always
+ type: dict
+ sample: >
+ {
+ "response": {
+ "taskId": "string",
+ "url": "string"
+ },
+ "version": "string"
+ }
+"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/wireless_accesspoint_configuration_summary_info.py b/ansible_collections/cisco/dnac/plugins/modules/wireless_accesspoint_configuration_summary_info.py
index ba43fe052..178836568 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/wireless_accesspoint_configuration_summary_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/wireless_accesspoint_configuration_summary_info.py
@@ -11,7 +11,7 @@ short_description: Information module for Wireless Accesspoint Configuration Sum
description:
- Get all Wireless Accesspoint Configuration Summary.
- Users can query the access point configuration information per device using the ethernet MAC address.
-version_added: '6.7.0'
+version_added: '3.1.0'
extends_documentation_fragment:
- cisco.dnac.module_info
author: Rafael Campos (@racampos)
@@ -24,8 +24,8 @@ options:
- Key query parameter. The ethernet MAC address of Access point.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Wireless GetAccessPointConfiguration
description: Complete reference of the GetAccessPointConfiguration API.
@@ -54,7 +54,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/wireless_dynamic_interface.py b/ansible_collections/cisco/dnac/plugins/modules/wireless_dynamic_interface.py
index ed933a268..d2a621383 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/wireless_dynamic_interface.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/wireless_dynamic_interface.py
@@ -21,14 +21,14 @@ options:
description: Additional headers.
type: dict
interfaceName:
- description: Dynamic-interface name.
+ description: InterfaceName query parameter. Valid interface-name to be deleted.
type: str
vlanId:
description: Vlan Id.
- type: int
+ type: float
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Wireless CreateUpdateDynamicInterface
description: Complete reference of the CreateUpdateDynamicInterface API.
@@ -43,12 +43,12 @@ notes:
- Paths used are
post /dna/intent/api/v1/wireless/dynamic-interface,
- delete /dna/intent/api/v1/wireless/dynamic-interface/{interfaceName},
+ delete /dna/intent/api/v1/wireless/dynamic-interface,
"""
EXAMPLES = r"""
-- name: Create
+- name: Delete all
cisco.dnac.wireless_dynamic_interface:
dnac_host: "{{dnac_host}}"
dnac_username: "{{dnac_username}}"
@@ -57,12 +57,11 @@ EXAMPLES = r"""
dnac_port: "{{dnac_port}}"
dnac_version: "{{dnac_version}}"
dnac_debug: "{{dnac_debug}}"
- state: present
+ state: absent
headers: '{{my_headers | from_json}}'
interfaceName: string
- vlanId: 0
-- name: Delete by name
+- name: Create
cisco.dnac.wireless_dynamic_interface:
dnac_host: "{{dnac_host}}"
dnac_username: "{{dnac_username}}"
@@ -71,23 +70,20 @@ EXAMPLES = r"""
dnac_port: "{{dnac_port}}"
dnac_version: "{{dnac_version}}"
dnac_debug: "{{dnac_debug}}"
- state: absent
- headers: '{{my_headers | from_json}}'
+ state: present
interfaceName: string
+ vlanId: 0
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
returned: always
- type: list
+ type: dict
sample: >
- [
- {
- "executionId": "string",
- "executionUrl": "string",
- "message": "string"
- }
- ]
+ {
+ "executionId": "string",
+ "executionStatusUrl": "string",
+ "message": "string"
+ }
"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/wireless_dynamic_interface_info.py b/ansible_collections/cisco/dnac/plugins/modules/wireless_dynamic_interface_info.py
index 152b1c339..1b34ae233 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/wireless_dynamic_interface_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/wireless_dynamic_interface_info.py
@@ -26,8 +26,8 @@ options:
will be retrieved.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Wireless GetDynamicInterface
description: Complete reference of the GetDynamicInterface API.
@@ -56,7 +56,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/wireless_enterprise_ssid.py b/ansible_collections/cisco/dnac/plugins/modules/wireless_enterprise_ssid.py
index f288aaada..a734b7040 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/wireless_enterprise_ssid.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/wireless_enterprise_ssid.py
@@ -18,20 +18,36 @@ extends_documentation_fragment:
- cisco.dnac.module
author: Rafael Campos (@racampos)
options:
+ aaaOverride:
+ description: Aaa Override.
+ type: bool
+ authKeyMgmt:
+ description: Takes string inputs for the AKMs that should be set true. Possible
+ AKM values dot1x,dot1x_ft, dot1x_sha, psk, psk_ft, psk_sha, owe, sae, sae_ft.
+ elements: str
+ type: list
basicServiceSetClientIdleTimeout:
- description: Basic Service Set Client Idle Timeout.
+ description: Basic Service Set Client Idle Timeout (Default 300 if enableBasicServiceSetMaxIdle
+ is true, 0 otherwise).
type: int
clientExclusionTimeout:
- description: Client Exclusion Timeout.
+ description: Client Exclusion Timeout(Default 180 if enableClientExclusion is true,
+ 0 otherwise).
type: int
+ clientRateLimit:
+ description: Client Rate Limit (in bits per second).
+ type: float
+ coverageHoleDetectionEnable:
+ description: Coverage Hole Detection Enable.
+ type: bool
enableBasicServiceSetMaxIdle:
- description: Enable Basic Service Set Max Idle.
+ description: Enable Basic Service Set Max Idle (Default true).
type: bool
enableBroadcastSSID:
description: Enable Broadcase SSID.
type: bool
enableClientExclusion:
- description: Enable Client Exclusion.
+ description: Enable Client Exclusion(Default true).
type: bool
enableDirectedMulticastService:
description: Enable Directed Multicast Service.
@@ -46,14 +62,34 @@ options:
description: Enable Neighbor List.
type: bool
enableSessionTimeOut:
- description: Enable Session Timeout.
+ description: Enable Session Timeout(Default true).
type: bool
fastTransition:
description: Fast Transition.
type: str
+ ghz24Policy:
+ description: Ghz24 Policy.
+ type: str
+ ghz6PolicyClientSteering:
+ description: Ghz6 Policy Client Steering.
+ type: bool
mfpClientProtection:
description: Management Frame Protection Client.
type: str
+ multiPSKSettings:
+ description: Wireless Enterprise Ssid's multiPSKSettings.
+ elements: dict
+ suboptions:
+ passphrase:
+ description: Passphrase.
+ type: str
+ passphraseType:
+ description: Passphrase Type.
+ type: str
+ priority:
+ description: Priority.
+ type: int
+ type: list
name:
description: SSID NAME.
type: str
@@ -64,15 +100,34 @@ options:
passphrase:
description: Passphrase.
type: str
+ policyProfileName:
+ description: Policy Profile Name.
+ type: str
+ profileName:
+ description: Profile Name.
+ type: str
+ protectedManagementFrame:
+ description: (Required applicable for Security Type WPA3_PERSONAL, WPA3_ENTERPRISE,
+ OPEN_SECURED) and (Optional, Required Applicable for Security Type WPA2_WPA3_PERSONAL
+ and WPA2_WPA3_ENTERPRISE).
+ type: str
radioPolicy:
- description: Radio Policy Enum (enum Triple band operation (2.4GHz, 5GHz and 6GHz),
- Triple band operation with band select, 5GHz only, 2.4GHz only, 6GHz only).
+ description: Radio Policy Enum.
type: str
+ rsnCipherSuiteCcmp256:
+ description: Rsn Cipher Suite Ccmp256.
+ type: bool
+ rsnCipherSuiteGcmp128:
+ description: Rsn Cipher Suite Gcmp 128.
+ type: bool
+ rsnCipherSuiteGcmp256:
+ description: Rsn Cipher Suite Gcmp256.
+ type: bool
securityLevel:
description: Security Level.
type: str
sessionTimeOut:
- description: Session Time Out.
+ description: Session Time Out (Default 1800 if enableSessionTimeOut is true, 0 otherwise).
type: int
ssidName:
description: SsidName path parameter. Enter the SSID name to be deleted.
@@ -81,8 +136,8 @@ options:
description: Traffic Type Enum (voicedata or data ).
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Wireless CreateEnterpriseSSID
description: Complete reference of the CreateEnterpriseSSID API.
@@ -117,8 +172,13 @@ EXAMPLES = r"""
dnac_version: "{{dnac_version}}"
dnac_debug: "{{dnac_debug}}"
state: present
+ aaaOverride: true
+ authKeyMgmt:
+ - string
basicServiceSetClientIdleTimeout: 0
clientExclusionTimeout: 0
+ clientRateLimit: 0
+ coverageHoleDetectionEnable: true
enableBasicServiceSetMaxIdle: true
enableBroadcastSSID: true
enableClientExclusion: true
@@ -128,12 +188,24 @@ EXAMPLES = r"""
enableNeighborList: true
enableSessionTimeOut: true
fastTransition: string
+ ghz24Policy: string
+ ghz6PolicyClientSteering: true
mfpClientProtection: string
+ multiPSKSettings:
+ - passphrase: string
+ passphraseType: string
+ priority: 0
name: string
nasOptions:
- string
passphrase: string
+ policyProfileName: string
+ profileName: string
+ protectedManagementFrame: string
radioPolicy: string
+ rsnCipherSuiteCcmp256: true
+ rsnCipherSuiteGcmp128: true
+ rsnCipherSuiteGcmp256: true
securityLevel: string
sessionTimeOut: 0
trafficType: string
@@ -148,8 +220,13 @@ EXAMPLES = r"""
dnac_version: "{{dnac_version}}"
dnac_debug: "{{dnac_debug}}"
state: present
+ aaaOverride: true
+ authKeyMgmt:
+ - string
basicServiceSetClientIdleTimeout: 0
clientExclusionTimeout: 0
+ clientRateLimit: 0
+ coverageHoleDetectionEnable: true
enableBasicServiceSetMaxIdle: true
enableBroadcastSSID: true
enableClientExclusion: true
@@ -159,12 +236,24 @@ EXAMPLES = r"""
enableNeighborList: true
enableSessionTimeOut: true
fastTransition: string
+ ghz24Policy: string
+ ghz6PolicyClientSteering: true
mfpClientProtection: string
+ multiPSKSettings:
+ - passphrase: string
+ passphraseType: string
+ priority: 0
name: string
nasOptions:
- string
passphrase: string
+ policyProfileName: string
+ profileName: string
+ protectedManagementFrame: string
radioPolicy: string
+ rsnCipherSuiteCcmp256: true
+ rsnCipherSuiteGcmp128: true
+ rsnCipherSuiteGcmp256: true
securityLevel: string
sessionTimeOut: 0
trafficType: string
@@ -182,7 +271,6 @@ EXAMPLES = r"""
ssidName: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/wireless_enterprise_ssid_info.py b/ansible_collections/cisco/dnac/plugins/modules/wireless_enterprise_ssid_info.py
index b277d03bd..5c6336618 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/wireless_enterprise_ssid_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/wireless_enterprise_ssid_info.py
@@ -26,8 +26,8 @@ options:
enterprise SSIDs will be retrieved.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Wireless GetEnterpriseSSID
description: Complete reference of the GetEnterpriseSSID API.
@@ -56,7 +56,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -96,7 +95,16 @@ dnac_response:
"passphrase": "string"
}
],
- "clientRateLimit": 0
+ "clientRateLimit": 0,
+ "enableSessionTimeOut": true,
+ "sessionTimeOut": 0,
+ "enableClientExclusion": true,
+ "clientExclusionTimeout": 0,
+ "enableBasicServiceSetMaxIdle": true,
+ "basicServiceSetClientIdleTimeout": 0,
+ "enableDirectedMulticastService": true,
+ "enableNeighborList": true,
+ "mfpClientProtection": "string"
}
],
"groupUuid": "string",
diff --git a/ansible_collections/cisco/dnac/plugins/modules/wireless_profile.py b/ansible_collections/cisco/dnac/plugins/modules/wireless_profile.py
index 7c837cca7..43c5216da 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/wireless_profile.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/wireless_profile.py
@@ -14,7 +14,8 @@ description:
- Delete the Wireless Profile from Cisco DNA Center whose name is provided.
- >
Updates the wireless Network Profile with updated details provided. All sites to be present in the network profile
- should be provided.
+ should be provided. This API has been deprecated. Please use the new endpoint URL
+ /dna/intent/api/v2/wireless/profile.
version_added: '3.1.0'
extends_documentation_fragment:
- cisco.dnac.module
@@ -51,14 +52,12 @@ options:
description: Interface Name.
type: str
name:
- description: Ssid Name.
+ description: Ssid Name is required if ssidDetails is passed in PayLoad for
+ mapping to the Network Profile.
type: str
policyProfileName:
description: Policy Profile Name.
type: str
- type:
- description: Ssid Type(enum Enterprise/Guest).
- type: str
wlanProfileName:
description: WLAN Profile Name.
type: str
@@ -68,8 +67,8 @@ options:
description: WirelessProfileName path parameter. Wireless Profile Name.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Wireless CreateWirelessProfile
description: Complete reference of the CreateWirelessProfile API.
@@ -128,7 +127,6 @@ EXAMPLES = r"""
interfaceName: string
name: string
policyProfileName: string
- type: string
wlanProfileName: string
- name: Create
@@ -153,11 +151,9 @@ EXAMPLES = r"""
interfaceName: string
name: string
policyProfileName: string
- type: string
wlanProfileName: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/wireless_profile_info.py b/ansible_collections/cisco/dnac/plugins/modules/wireless_profile_info.py
index fb175dedf..b2c5e6878 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/wireless_profile_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/wireless_profile_info.py
@@ -24,8 +24,8 @@ options:
- ProfileName query parameter. Wireless Network Profile Name.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Wireless GetWirelessProfile
description: Complete reference of the GetWirelessProfile API.
@@ -54,7 +54,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/wireless_provision_access_point.py b/ansible_collections/cisco/dnac/plugins/modules/wireless_provision_access_point.py
index d4651745f..a2a20db39 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/wireless_provision_access_point.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/wireless_provision_access_point.py
@@ -36,9 +36,6 @@ options:
rfProfile:
description: Radio frequency profile name.
type: str
- siteId:
- description: Site name hierarchy(ex Global/...).
- type: str
siteNameHierarchy:
description: Site name hierarchy(ex Global/...).
type: str
@@ -47,8 +44,8 @@ options:
type: str
type: list
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Wireless APProvision
description: Complete reference of the APProvision API.
@@ -79,7 +76,6 @@ EXAMPLES = r"""
- string
deviceName: string
rfProfile: string
- siteId: string
siteNameHierarchy: string
type: string
@@ -92,7 +88,7 @@ dnac_response:
sample: >
{
"executionId": "string",
- "executionUrl": "string",
+ "executionStatusUrl": "string",
"message": "string"
}
"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/wireless_provision_device_create.py b/ansible_collections/cisco/dnac/plugins/modules/wireless_provision_device_create.py
index 84622d990..538b3e592 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/wireless_provision_device_create.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/wireless_provision_device_create.py
@@ -28,22 +28,22 @@ options:
elements: dict
suboptions:
interfaceGateway:
- description: Interface Gateway.
+ description: Interface Gateway. Required for AireOS.
type: str
interfaceIPAddress:
- description: Interface IP Address.
+ description: Interface IP Address. Required for AireOS.
type: str
interfaceName:
- description: Interface Name.
+ description: Interface Name. Required for both AireOS and EWLC.
type: str
interfaceNetmaskInCIDR:
- description: Interface Netmask In CIDR.
+ description: Interface Netmask In CIDR. Required for AireOS.
type: int
lagOrPortNumber:
- description: Lag Or Port Number.
+ description: Lag Or Port Number. Required for AireOS.
type: int
vlanId:
- description: VLAN ID.
+ description: VLAN ID. Required for both AireOS and EWLC.
type: int
type: list
managedAPLocations:
@@ -55,8 +55,8 @@ options:
type: str
type: list
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Wireless Provision
description: Complete reference of the Provision API.
@@ -94,7 +94,6 @@ EXAMPLES = r"""
site: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -103,14 +102,7 @@ dnac_response:
sample: >
{
"executionId": "string",
- "executionUrl": "string",
- "provisioningTasks": {
- "success": [
- "string"
- ],
- "failed": [
- "string"
- ]
- }
+ "executionStatusUrl": "string",
+ "message": "string"
}
"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/wireless_provision_device_update.py b/ansible_collections/cisco/dnac/plugins/modules/wireless_provision_device_update.py
index c76487739..ab3923666 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/wireless_provision_device_update.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/wireless_provision_device_update.py
@@ -24,39 +24,39 @@ options:
elements: dict
suboptions:
deviceName:
- description: Device Name.
+ description: Controller Name.
type: str
dynamicInterfaces:
description: Wireless Provision Device Update's dynamicInterfaces.
elements: dict
suboptions:
interfaceGateway:
- description: Interface Gateway.
+ description: Interface Gateway. Required for AireOS.
type: str
interfaceIPAddress:
- description: Interface IPAddress.
+ description: Interface IP Address. Required for AireOS.
type: str
interfaceName:
- description: Interface Name.
+ description: Interface Name. Required for AireOS and EWLC.
type: str
interfaceNetmaskInCIDR:
- description: Interface Netmask In CIDR.
+ description: Interface Netmask In CIDR. Required for AireOS.
type: int
lagOrPortNumber:
- description: Lag Or Port Number.
+ description: Lag Or Port Number. Required for AireOS.
type: int
vlanId:
- description: Vlan Id.
+ description: VLAN ID. Required for AireOS and EWLC.
type: int
type: list
managedAPLocations:
- description: Managed APLocations.
+ description: List of managed AP locations (Site Hierarchies).
elements: str
type: list
type: list
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Wireless ProvisionUpdate
description: Complete reference of the ProvisionUpdate API.
@@ -94,7 +94,6 @@ EXAMPLES = r"""
- string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -103,14 +102,7 @@ dnac_response:
sample: >
{
"executionId": "string",
- "executionUrl": "string",
- "provisioningTasks": {
- "success": [
- "string"
- ],
- "failed": [
- "string"
- ]
- }
+ "executionStatusUrl": "string",
+ "message": "string"
}
"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/wireless_provision_ssid_create_provision.py b/ansible_collections/cisco/dnac/plugins/modules/wireless_provision_ssid_create_provision.py
index a0627c125..3ccb867db 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/wireless_provision_ssid_create_provision.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/wireless_provision_ssid_create_provision.py
@@ -41,6 +41,11 @@ options:
ssidDetails:
description: Wireless Provision Ssid Create Provision's ssidDetails.
suboptions:
+ authKeyMgmt:
+ description: Takes string inputs for the AKMs that should be set true. Possible
+ AKM values dot1x,dot1x_ft, dot1x_sha, psk, psk_ft, psk_sha, owe, sae, sae_ft.
+ elements: str
+ type: list
enableBroadcastSSID:
description: Enable Broadcast SSID.
type: bool
@@ -53,6 +58,12 @@ options:
fastTransition:
description: Fast Transition.
type: str
+ ghz24Policy:
+ description: 2.4 GHz Policy.
+ type: str
+ ghz6PolicyClientSteering:
+ description: 6 Ghz Client Steering.
+ type: bool
name:
description: SSID Name.
type: str
@@ -63,6 +74,15 @@ options:
radioPolicy:
description: Radio Policy.
type: str
+ rsnCipherSuiteCcmp256:
+ description: Rsn Cipher Suite Ccmp256.
+ type: bool
+ rsnCipherSuiteGcmp128:
+ description: Rsn Cipher Suite Gcmp128.
+ type: bool
+ rsnCipherSuiteGcmp256:
+ description: Rsn Cipher Suite Gcmp256.
+ type: bool
securityLevel:
description: Security Level(For guest SSID OPEN/WEB_AUTH, For Enterprise SSID
ENTERPRISE/PERSONAL/OPEN).
@@ -78,8 +98,8 @@ options:
description: SSID Type.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Wireless CreateAndProvisionSSID
description: Complete reference of the CreateAndProvisionSSID API.
@@ -111,20 +131,26 @@ EXAMPLES = r"""
managedAPLocations:
- string
ssidDetails:
+ authKeyMgmt:
+ - string
enableBroadcastSSID: true
enableFastLane: true
enableMACFiltering: true
fastTransition: string
+ ghz24Policy: string
+ ghz6PolicyClientSteering: true
name: string
passphrase: string
radioPolicy: string
+ rsnCipherSuiteCcmp256: true
+ rsnCipherSuiteGcmp128: true
+ rsnCipherSuiteGcmp256: true
securityLevel: string
trafficType: string
webAuthURL: string
ssidType: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/wireless_provision_ssid_delete_reprovision.py b/ansible_collections/cisco/dnac/plugins/modules/wireless_provision_ssid_delete_reprovision.py
index 8d713ce13..51b1cd7ba 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/wireless_provision_ssid_delete_reprovision.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/wireless_provision_ssid_delete_reprovision.py
@@ -20,14 +20,16 @@ options:
description: Additional headers.
type: dict
managedAPLocations:
- description: ManagedAPLocations path parameter.
+ description: ManagedAPLocations path parameter. List of managed AP locations (Site
+ Hierarchies). This parameter needs to be encoded as per UTF-8 encoding.
type: str
ssidName:
- description: SsidName path parameter.
+ description: SsidName path parameter. SSID Name. This parameter needs to be encoded
+ as per UTF-8 encoding.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Wireless DeleteSSIDAndProvisionItToDevices
description: Complete reference of the DeleteSSIDAndProvisionItToDevices API.
@@ -56,7 +58,6 @@ EXAMPLES = r"""
ssidName: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/wireless_psk_override.py b/ansible_collections/cisco/dnac/plugins/modules/wireless_psk_override.py
index 163ea45b5..97befd7d6 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/wireless_psk_override.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/wireless_psk_override.py
@@ -16,26 +16,21 @@ extends_documentation_fragment:
- cisco.dnac.module
author: Rafael Campos (@racampos)
options:
- payload:
- description: Wireless Psk Override's payload.
- elements: dict
- suboptions:
- passPhrase:
- description: Pass phrase (create/update).
- type: str
- site:
- description: Site name hierarchy (ex Global/aaa/zzz/...).
- type: str
- ssid:
- description: Enterprise ssid name(already created/present).
- type: str
- wlanProfileName:
- description: WLAN Profile Name.
- type: str
- type: list
+ passPhrase:
+ description: Pass phrase (create/update).
+ type: str
+ site:
+ description: Site name hierarchy (ex Global/aaa/zzz/...).
+ type: str
+ ssidName:
+ description: Enterprise SSID Name(already created/present).
+ type: str
+ wlanProfileName:
+ description: WLAN Profile Name.
+ type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Wireless PSKOverride
description: Complete reference of the PSKOverride API.
@@ -59,14 +54,12 @@ EXAMPLES = r"""
dnac_port: "{{dnac_port}}"
dnac_version: "{{dnac_version}}"
dnac_debug: "{{dnac_debug}}"
- payload:
- - passPhrase: string
- site: string
- ssid: string
- wlanProfileName: string
+ passPhrase: string
+ site: string
+ ssidName: string
+ wlanProfileName: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
diff --git a/ansible_collections/cisco/dnac/plugins/modules/wireless_rf_profile.py b/ansible_collections/cisco/dnac/plugins/modules/wireless_rf_profile.py
index 86c7ad953..2e1697ff8 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/wireless_rf_profile.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/wireless_rf_profile.py
@@ -21,7 +21,7 @@ options:
description: Channel Width.
type: str
defaultRfProfile:
- description: Is Default Rf Profile.
+ description: Default Rf Profile.
type: bool
enableBrownField:
description: Enable Brown Field.
@@ -52,16 +52,16 @@ options:
type: str
maxPowerLevel:
description: Max Power Level.
- type: int
+ type: float
minPowerLevel:
description: Rx Sop Threshold.
- type: int
+ type: float
parentProfile:
description: Parent Profile.
type: str
powerThresholdV1:
description: Power Threshold V1.
- type: int
+ type: float
radioChannels:
description: Radio Channels.
type: str
@@ -80,16 +80,16 @@ options:
type: str
maxPowerLevel:
description: Max Power Level.
- type: int
+ type: float
minPowerLevel:
description: Min Power Level.
- type: int
+ type: float
parentProfile:
description: Parent Profile.
type: str
powerThresholdV1:
description: Power Threshold V1.
- type: int
+ type: float
radioChannels:
description: Radio Channels.
type: str
@@ -108,16 +108,16 @@ options:
type: str
maxPowerLevel:
description: Max Power Level.
- type: int
+ type: float
minPowerLevel:
description: Min Power Level.
- type: int
+ type: float
parentProfile:
description: Parent Profile.
type: str
powerThresholdV1:
description: Power Threshold V1.
- type: int
+ type: float
radioChannels:
description: Radio Channels.
type: str
@@ -130,8 +130,8 @@ options:
*non-custom RF profile cannot be deleted.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Wireless CreateOrUpdateRFProfile
description: Complete reference of the CreateOrUpdateRFProfile API.
@@ -210,7 +210,6 @@ EXAMPLES = r"""
rfProfileName: string
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -219,7 +218,7 @@ dnac_response:
sample: >
{
"executionId": "string",
- "executionUrl": "string",
+ "executionStatusUrl": "string",
"message": "string"
}
"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/wireless_rf_profile_info.py b/ansible_collections/cisco/dnac/plugins/modules/wireless_rf_profile_info.py
index 281f87b6d..5d507795c 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/wireless_rf_profile_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/wireless_rf_profile_info.py
@@ -24,8 +24,8 @@ options:
- Rf-profile-name query parameter. RF Profile Name.
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Wireless RetrieveRFProfiles
description: Complete reference of the RetrieveRFProfiles API.
@@ -54,46 +54,50 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
returned: always
- type: list
- elements: dict
+ type: dict
sample: >
- [
- {
- "name": "string",
- "parentProfileA": "string",
- "parentProfileB": "string",
- "enableARadioType": true,
- "enableBRadioType": true,
- "enableCRadioType": true,
- "channelWidth": "string",
- "aRadioChannels": "string",
- "bRadioChannels": "string",
- "cRadioChannels": "string",
- "dataRatesA": "string",
- "dataRatesB": "string",
- "dataRatesC": "string",
- "mandatoryDataRatesA": "string",
- "mandatoryDataRatesB": "string",
- "mandatoryDataRatesC": "string",
- "enableCustom": true,
- "minPowerLevelA": "string",
- "minPowerLevelB": "string",
- "minPowerLevelC": "string",
- "maxPowerLevelA": "string",
- "maxPowerLevelB": "string",
- "powerThresholdV1A": 0,
- "powerThresholdV1B": 0,
- "powerThresholdV1C": 0,
- "rxSopThresholdA": "string",
- "rxSopThresholdB": "string",
- "rxSopThresholdC": "string",
- "defaultRfProfile": true,
- "enableBrownField": true
- }
- ]
+ {
+ "name": "string",
+ "defaultRfProfile": true,
+ "enableRadioTypeA": true,
+ "enableRadioTypeB": true,
+ "channelWidth": "string",
+ "enableCustom": true,
+ "enableBrownField": true,
+ "radioTypeAProperties": {
+ "parentProfile": "string",
+ "radioChannels": "string",
+ "dataRates": "string",
+ "mandatoryDataRates": "string",
+ "powerThresholdV1": 0,
+ "rxSopThreshold": "string",
+ "minPowerLevel": 0,
+ "maxPowerLevel": 0
+ },
+ "radioTypeBProperties": {
+ "parentProfile": "string",
+ "radioChannels": "string",
+ "dataRates": "string",
+ "mandatoryDataRates": "string",
+ "powerThresholdV1": 0,
+ "rxSopThreshold": "string",
+ "minPowerLevel": 0,
+ "maxPowerLevel": 0
+ },
+ "radioTypeCProperties": {
+ "parentProfile": "string",
+ "radioChannels": "string",
+ "dataRates": "string",
+ "mandatoryDataRates": "string",
+ "rxSopThreshold": "string",
+ "minPowerLevel": 0,
+ "maxPowerLevel": 0,
+ "powerThresholdV1": 0
+ },
+ "enableRadioTypeC": true
+ }
"""
diff --git a/ansible_collections/cisco/dnac/plugins/modules/wireless_sensor_test_results_info.py b/ansible_collections/cisco/dnac/plugins/modules/wireless_sensor_test_results_info.py
index f316a4602..dbd9fbf32 100644
--- a/ansible_collections/cisco/dnac/plugins/modules/wireless_sensor_test_results_info.py
+++ b/ansible_collections/cisco/dnac/plugins/modules/wireless_sensor_test_results_info.py
@@ -26,18 +26,20 @@ options:
startTime:
description:
- StartTime query parameter. The epoch time in milliseconds.
- type: int
+ type: float
endTime:
description:
- EndTime query parameter. The epoch time in milliseconds.
- type: int
+ type: float
testFailureBy:
description:
- - TestFailureBy query parameter. Obtain failure statistics group by "area", "building", or "floor".
+ - >
+ TestFailureBy query parameter. Obtain failure statistics group by "area", "building", or "floor" (case
+ insensitive).
type: str
requirements:
-- dnacentersdk >= 2.6.0
-- python >= 3.9
+- dnacentersdk >= 2.7.1
+- python >= 3.5
seealso:
- name: Cisco DNA Center documentation for Wireless SensorTestResults
description: Complete reference of the SensorTestResults API.
@@ -69,7 +71,6 @@ EXAMPLES = r"""
register: result
"""
-
RETURN = r"""
dnac_response:
description: A dictionary or list with the response returned by the Cisco DNAC Python SDK
@@ -77,72 +78,75 @@ dnac_response:
type: dict
sample: >
{
- "summary": {
- "totalTestCount": 0,
- "ONBOARDING": {
- "AUTH": {
- "passCount": 0,
- "failCount": 0
+ "version": "string",
+ "response": {
+ "summary": {
+ "totalTestCount": 0,
+ "ONBOARDING": {
+ "AUTH": {
+ "passCount": 0,
+ "failCount": 0
+ },
+ "DHCP": {
+ "passCount": 0,
+ "failCount": 0
+ },
+ "ASSOC": {
+ "passCount": 0,
+ "failCount": 0
+ }
},
- "DHCP": {
- "passCount": 0,
- "failCount": 0
+ "PERFORMANCE": {
+ "IPSLASENDER": {
+ "passCount": 0,
+ "failCount": 0
+ }
},
- "ASSOC": {
- "passCount": 0,
- "failCount": 0
- }
- },
- "PERFORMANCE": {
- "IPSLASENDER": {
- "passCount": 0,
- "failCount": 0
- }
- },
- "NETWORK_SERVICES": {
- "DNS": {
- "passCount": 0,
- "failCount": 0
- }
- },
- "APP_CONNECTIVITY": {
- "HOST_REACHABILITY": {
- "passCount": 0,
- "failCount": 0
+ "NETWORK_SERVICES": {
+ "DNS": {
+ "passCount": 0,
+ "failCount": 0
+ }
},
- "WEBSERVER": {
- "passCount": 0,
- "failCount": 0
+ "APP_CONNECTIVITY": {
+ "HOST_REACHABILITY": {
+ "passCount": 0,
+ "failCount": 0
+ },
+ "WEBSERVER": {
+ "passCount": 0,
+ "failCount": 0
+ },
+ "FILETRANSFER": {
+ "passCount": 0,
+ "failCount": 0
+ }
},
- "FILETRANSFER": {
- "passCount": 0,
- "failCount": 0
- }
- },
- "RF_ASSESSMENT": {
- "DATA_RATE": {
- "passCount": 0,
- "failCount": 0
+ "RF_ASSESSMENT": {
+ "DATA_RATE": {
+ "passCount": 0,
+ "failCount": 0
+ },
+ "SNR": {
+ "passCount": 0,
+ "failCount": 0
+ }
},
- "SNR": {
- "passCount": 0,
- "failCount": 0
+ "EMAIL": {
+ "MAILSERVER": {
+ "passCount": 0,
+ "failCount": 0
+ }
}
},
- "EMAIL": {
- "MAILSERVER": {
- "passCount": 0,
- "failCount": 0
+ "failureStats": [
+ {
+ "errorCode": 0,
+ "errorTitle": "string",
+ "testType": "string",
+ "testCategory": "string"
}
- }
- },
- "failureStats": [
- {
- "errorCode": 0,
- "errorTitle": "string",
- "testType": "string",
- "testCategory": "string"
- }
- ]
+ ]
+ }
}
"""
diff --git a/ansible_collections/cisco/dnac/plugins/plugin_utils/dnac.py b/ansible_collections/cisco/dnac/plugins/plugin_utils/dnac.py
index 7448c4c50..514106e2f 100644
--- a/ansible_collections/cisco/dnac/plugins/plugin_utils/dnac.py
+++ b/ansible_collections/cisco/dnac/plugins/plugin_utils/dnac.py
@@ -140,7 +140,7 @@ def dnac_argument_spec():
dnac_username=dict(type="str", fallback=(env_fallback, ['DNAC_USERNAME']), default="admin", aliases=["user"]),
dnac_password=dict(type="str", fallback=(env_fallback, ['DNAC_PASSWORD']), no_log=True),
dnac_verify=dict(type="bool", fallback=(env_fallback, ['DNAC_VERIFY']), default=True),
- dnac_version=dict(type="str", fallback=(env_fallback, ['DNAC_VERSION']), default="2.3.5.3"),
+ dnac_version=dict(type="str", fallback=(env_fallback, ['DNAC_VERSION']), default="2.3.7.6"),
dnac_debug=dict(type="bool", fallback=(env_fallback, ['DNAC_DEBUG']), default=False),
validate_response_schema=dict(type="bool", fallback=(env_fallback, ['VALIDATE_RESPONSE_SCHEMA']), default=True),
)
diff --git a/ansible_collections/cisco/dnac/requirements.txt b/ansible_collections/cisco/dnac/requirements.txt
index fe7a7694c..cf40b8f59 100644
--- a/ansible_collections/cisco/dnac/requirements.txt
+++ b/ansible_collections/cisco/dnac/requirements.txt
@@ -1 +1 @@
-dnacentersdk >= 2.6.0 \ No newline at end of file
+dnacentersdk >= 2.7.1 \ No newline at end of file
diff --git a/ansible_collections/cisco/dnac/tests/sanity/ignore-2.10.txt b/ansible_collections/cisco/dnac/tests/sanity/ignore-2.10.txt
index 199ea4ac4..81bbb929c 100644
--- a/ansible_collections/cisco/dnac/tests/sanity/ignore-2.10.txt
+++ b/ansible_collections/cisco/dnac/tests/sanity/ignore-2.10.txt
@@ -740,3 +740,7 @@ plugins/modules/template_workflow_manager.py compile-2.7!skip # Python 2.7 is no
plugins/modules/template_workflow_manager.py compile-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
plugins/modules/template_workflow_manager.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/template_workflow_manager.py import-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
+plugins/modules/ise_radius_integration_workflow_manager.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/ise_radius_integration_workflow_manager.py compile-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
+plugins/modules/ise_radius_integration_workflow_manager.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/ise_radius_integration_workflow_manager.py import-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
diff --git a/ansible_collections/cisco/dnac/tests/sanity/ignore-2.11.txt b/ansible_collections/cisco/dnac/tests/sanity/ignore-2.11.txt
index 91834bb50..0b839978b 100644
--- a/ansible_collections/cisco/dnac/tests/sanity/ignore-2.11.txt
+++ b/ansible_collections/cisco/dnac/tests/sanity/ignore-2.11.txt
@@ -1091,3 +1091,7 @@ plugins/modules/template_workflow_manager.py compile-2.7!skip # Python 2.7 is no
plugins/modules/template_workflow_manager.py compile-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
plugins/modules/template_workflow_manager.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/template_workflow_manager.py import-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
+plugins/modules/ise_radius_integration_workflow_manager.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/ise_radius_integration_workflow_manager.py compile-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
+plugins/modules/ise_radius_integration_workflow_manager.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/ise_radius_integration_workflow_manager.py import-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
diff --git a/ansible_collections/cisco/dnac/tests/sanity/ignore-2.12.txt b/ansible_collections/cisco/dnac/tests/sanity/ignore-2.12.txt
index 41aebb287..36d5eb734 100644
--- a/ansible_collections/cisco/dnac/tests/sanity/ignore-2.12.txt
+++ b/ansible_collections/cisco/dnac/tests/sanity/ignore-2.12.txt
@@ -38,3 +38,7 @@ plugins/modules/template_workflow_manager.py compile-2.7!skip # Python 2.7 is no
plugins/modules/template_workflow_manager.py compile-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
plugins/modules/template_workflow_manager.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/template_workflow_manager.py import-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
+plugins/modules/ise_radius_integration_workflow_manager.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/ise_radius_integration_workflow_manager.py compile-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
+plugins/modules/ise_radius_integration_workflow_manager.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/ise_radius_integration_workflow_manager.py import-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
diff --git a/ansible_collections/cisco/dnac/tests/sanity/ignore-2.13.txt b/ansible_collections/cisco/dnac/tests/sanity/ignore-2.13.txt
index 29449f7cb..3c31c2c4b 100644
--- a/ansible_collections/cisco/dnac/tests/sanity/ignore-2.13.txt
+++ b/ansible_collections/cisco/dnac/tests/sanity/ignore-2.13.txt
@@ -18,3 +18,5 @@ plugins/modules/device_credential_workflow_manager.py compile-2.7!skip # Python
plugins/modules/device_credential_workflow_manager.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/template_workflow_manager.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/template_workflow_manager.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/ise_radius_integration_workflow_manager.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/ise_radius_integration_workflow_manager.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
diff --git a/ansible_collections/cisco/dnac/tests/sanity/ignore-2.14.txt b/ansible_collections/cisco/dnac/tests/sanity/ignore-2.14.txt
index 29449f7cb..3c31c2c4b 100644
--- a/ansible_collections/cisco/dnac/tests/sanity/ignore-2.14.txt
+++ b/ansible_collections/cisco/dnac/tests/sanity/ignore-2.14.txt
@@ -18,3 +18,5 @@ plugins/modules/device_credential_workflow_manager.py compile-2.7!skip # Python
plugins/modules/device_credential_workflow_manager.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/template_workflow_manager.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/template_workflow_manager.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/ise_radius_integration_workflow_manager.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/ise_radius_integration_workflow_manager.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
diff --git a/ansible_collections/cisco/dnac/tests/sanity/ignore-2.15.txt b/ansible_collections/cisco/dnac/tests/sanity/ignore-2.15.txt
index 29449f7cb..3c31c2c4b 100644
--- a/ansible_collections/cisco/dnac/tests/sanity/ignore-2.15.txt
+++ b/ansible_collections/cisco/dnac/tests/sanity/ignore-2.15.txt
@@ -18,3 +18,5 @@ plugins/modules/device_credential_workflow_manager.py compile-2.7!skip # Python
plugins/modules/device_credential_workflow_manager.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/template_workflow_manager.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/template_workflow_manager.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/ise_radius_integration_workflow_manager.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/ise_radius_integration_workflow_manager.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
diff --git a/ansible_collections/cisco/dnac/tests/sanity/ignore-2.9.txt b/ansible_collections/cisco/dnac/tests/sanity/ignore-2.9.txt
index 199ea4ac4..81bbb929c 100644
--- a/ansible_collections/cisco/dnac/tests/sanity/ignore-2.9.txt
+++ b/ansible_collections/cisco/dnac/tests/sanity/ignore-2.9.txt
@@ -740,3 +740,7 @@ plugins/modules/template_workflow_manager.py compile-2.7!skip # Python 2.7 is no
plugins/modules/template_workflow_manager.py compile-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
plugins/modules/template_workflow_manager.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
plugins/modules/template_workflow_manager.py import-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
+plugins/modules/ise_radius_integration_workflow_manager.py compile-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/ise_radius_integration_workflow_manager.py compile-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
+plugins/modules/ise_radius_integration_workflow_manager.py import-2.7!skip # Python 2.7 is not supported by the DNA Center SDK
+plugins/modules/ise_radius_integration_workflow_manager.py import-2.6!skip # Python 2.6 is not supported by the DNA Center SDK
diff --git a/ansible_collections/cisco/ise/FILES.json b/ansible_collections/cisco/ise/FILES.json
index 72c9d6a53..bd62e3c2c 100644
--- a/ansible_collections/cisco/ise/FILES.json
+++ b/ansible_collections/cisco/ise/FILES.json
@@ -6423,7 +6423,7 @@
"name": "meta/runtime.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2e182a22d58fcfbca71fbef1992147e1cd87fc67648750e62b5955ec72521520",
+ "chksum_sha256": "720f48707e16c8707f33f2bc22bbd53207b492502669c2649547d652b5f99dcd",
"format": 1
},
{
@@ -6521,7 +6521,7 @@
"name": "roles/personas_deployment/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e82c60136cb1895d61432dd52c83d4c6e8ad4c654a697ac2d5b5e4a288866d87",
+ "chksum_sha256": "a136beb59cd8f3f2a34a1e339bdf0ef4c62b1432b55e58a73ed8ff1ed90b42ba",
"format": 1
},
{
@@ -6630,6 +6630,20 @@
"format": 1
},
{
+ "name": "roles/aws_deployment/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "roles/aws_deployment/meta/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e3e31758bf96ce68e4aaca28e8546ea21192023445d424f2e93500e2af13c15e",
+ "format": 1
+ },
+ {
"name": "roles/aws_deployment/README.md",
"ftype": "file",
"chksum_type": "sha256",
@@ -7165,14 +7179,14 @@
"name": "changelogs/changelog.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "140605f83668879d8c78db738188d906df045dfa6d6f30938b06ff48a02fca68",
+ "chksum_sha256": "b7c18879f0cac476ed8b2704c4500a8adfb36a978ab014ad45d09271991f709e",
"format": 1
},
{
"name": "README.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d1a02a0b361b7e8e37fa4423da5b9c7ac69c9a02c9621a48ee195cc1f5c4f173",
+ "chksum_sha256": "97075b2c1e03122186853442fc4f5c9b2c6bb537a1987409d7cc655a0ae11719",
"format": 1
},
{
diff --git a/ansible_collections/cisco/ise/MANIFEST.json b/ansible_collections/cisco/ise/MANIFEST.json
index 0c78fda00..c61dce517 100644
--- a/ansible_collections/cisco/ise/MANIFEST.json
+++ b/ansible_collections/cisco/ise/MANIFEST.json
@@ -2,7 +2,7 @@
"collection_info": {
"namespace": "cisco",
"name": "ise",
- "version": "2.9.1",
+ "version": "2.9.2",
"authors": [
"Rafael Campos <rcampos@altus.cr>",
"William Astorga <wastorga@altus.cr>",
@@ -32,7 +32,7 @@
"name": "FILES.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "15f432db63325689f8870dba08698116fcf594e05eaa83c4029245933f588784",
+ "chksum_sha256": "03e8aa8c0a512c59183f007d803261884cf58f7ba74e3bb252336faa214d71bb",
"format": 1
},
"format": 1
diff --git a/ansible_collections/cisco/ise/README.md b/ansible_collections/cisco/ise/README.md
index 2961ef5f0..2b502f4ca 100644
--- a/ansible_collections/cisco/ise/README.md
+++ b/ansible_collections/cisco/ise/README.md
@@ -52,7 +52,7 @@ The following table shows the supported versions.
| 3.1.0 | 2.0.0 | 1.2.0 |
| 3.1_Patch_1 | 2.5.16 | 2.0.10 |
| 3.2_beta | 2.8.0 | 2.1.1 |
-| 3.3_patch_1 | 2.9.1 | 2.2.1 |
+| 3.3_patch_1 | 2.9.2 | 2.2.1 |
If your Ansible collection is older please consider updating it first.
*Notes*:
diff --git a/ansible_collections/cisco/ise/changelogs/changelog.yaml b/ansible_collections/cisco/ise/changelogs/changelog.yaml
index d5edc8259..42199b071 100644
--- a/ansible_collections/cisco/ise/changelogs/changelog.yaml
+++ b/ansible_collections/cisco/ise/changelogs/changelog.yaml
@@ -1008,4 +1008,11 @@ releases:
release_summary: Update network_device_group module.
bugfixes:
- network_device_group - change parameter name from ndgtype to othername.
- - network_device_group_info - change parameter name from ndgtype to othername. \ No newline at end of file
+ - network_device_group_info - change parameter name from ndgtype to othername.
+ 2.9.2:
+ release_date: "2024-06-06"
+ changes:
+ release_summary: Update min ansible version.
+ bugfixes:
+ - Update min_ansible_version to 2.15.0 in runtime.yml and roles
+ - Added main.yml to aws_deployment role \ No newline at end of file
diff --git a/ansible_collections/cisco/ise/meta/runtime.yml b/ansible_collections/cisco/ise/meta/runtime.yml
index bcb6dcedc..9912f4a7d 100644
--- a/ansible_collections/cisco/ise/meta/runtime.yml
+++ b/ansible_collections/cisco/ise/meta/runtime.yml
@@ -1,2 +1,2 @@
---
-requires_ansible: '>=2.14.0' \ No newline at end of file
+requires_ansible: '>=2.15.0' \ No newline at end of file
diff --git a/ansible_collections/cisco/ise/roles/aws_deployment/meta/main.yml b/ansible_collections/cisco/ise/roles/aws_deployment/meta/main.yml
new file mode 100644
index 000000000..08993e178
--- /dev/null
+++ b/ansible_collections/cisco/ise/roles/aws_deployment/meta/main.yml
@@ -0,0 +1,52 @@
+galaxy_info:
+ author: Jose Bogarín
+ description: Cisco ISE deployment in AWS
+ company: your company (optional)
+
+ # If the issue tracker for your role is not on github, uncomment the
+ # next line and provide a value
+ # issue_tracker_url: http://example.com/issue/tracker
+
+ # Choose a valid license ID from https://spdx.org - some suggested licenses:
+ # - BSD-3-Clause (default)
+ # - MIT
+ # - GPL-2.0-or-later
+ # - GPL-3.0-only
+ # - Apache-2.0
+ # - CC-BY-4.0
+ license: license (GPL-2.0-or-later, MIT, etc)
+
+ min_ansible_version: 2.15.0
+
+ # If this a Container Enabled role, provide the minimum Ansible Container version.
+ # min_ansible_container_version:
+
+ #
+ # Provide a list of supported platforms, and for each platform a list of versions.
+ # If you don't wish to enumerate all versions for a particular platform, use 'all'.
+ # To view available platforms and versions (or releases), visit:
+ # https://galaxy.ansible.com/api/v1/platforms/
+ #
+ # platforms:
+ # - name: Fedora
+ # versions:
+ # - all
+ # - 25
+ # - name: SomePlatform
+ # versions:
+ # - all
+ # - 1.0
+ # - 7
+ # - 99.99
+
+ galaxy_tags: []
+ # List tags for your role here, one per line. A tag is a keyword that describes
+ # and categorizes the role. Users find roles by searching for tags. Be sure to
+ # remove the '[]' above, if you add tags to this list.
+ #
+ # NOTE: A tag is limited to a single word comprised of alphanumeric characters.
+ # Maximum 20 tags per role.
+
+dependencies: []
+ # List your role dependencies here, one per line. Be sure to remove the '[]' above,
+ # if you add dependencies to this list.
diff --git a/ansible_collections/cisco/ise/roles/personas_deployment/meta/main.yml b/ansible_collections/cisco/ise/roles/personas_deployment/meta/main.yml
index c572acc9f..daa938e07 100644
--- a/ansible_collections/cisco/ise/roles/personas_deployment/meta/main.yml
+++ b/ansible_collections/cisco/ise/roles/personas_deployment/meta/main.yml
@@ -16,7 +16,7 @@ galaxy_info:
# - CC-BY-4.0
license: license (GPL-2.0-or-later, MIT, etc)
- min_ansible_version: 2.1
+ min_ansible_version: 2.15.0
# If this a Container Enabled role, provide the minimum Ansible Container version.
# min_ansible_container_version:
diff --git a/ansible_collections/cisco/nxos/.pre-commit-config.yaml b/ansible_collections/cisco/nxos/.pre-commit-config.yaml
index 6db6f7ece..0f2d95f7e 100644
--- a/ansible_collections/cisco/nxos/.pre-commit-config.yaml
+++ b/ansible_collections/cisco/nxos/.pre-commit-config.yaml
@@ -6,7 +6,7 @@ repos:
- id: update-docs
- repo: https://github.com/pre-commit/pre-commit-hooks
- rev: v4.5.0
+ rev: v4.6.0
hooks:
- id: check-merge-conflict
- id: check-symlinks
@@ -40,7 +40,7 @@ repos:
args: ["--filter-files"]
- repo: https://github.com/psf/black
- rev: 23.12.1
+ rev: 24.4.2
hooks:
- id: black
diff --git a/ansible_collections/cisco/nxos/CHANGELOG.rst b/ansible_collections/cisco/nxos/CHANGELOG.rst
index 7f8bda1e5..ab33a3a86 100644
--- a/ansible_collections/cisco/nxos/CHANGELOG.rst
+++ b/ansible_collections/cisco/nxos/CHANGELOG.rst
@@ -4,6 +4,20 @@ Cisco Nxos Collection Release Notes
.. contents:: Topics
+v8.1.0
+======
+
+Minor Changes
+-------------
+
+- route_maps - support simple route-maps that do not contain set or match statements. it allows for the creation and management of purely basic route-map entries like 'route-map test-1 permit 10'.
+
+Bugfixes
+--------
+
+- nxos_l3_interfaces - fail if encapsulation exists on a different sub-interface.
+- nxos_static_routes - correctly generate command when track parameter is specified.
+
v8.0.0
======
@@ -16,8 +30,7 @@ Minor Changes
-------------
- Add support for cli_restore functionality.
-- Please refer the PR to know more about core changes (https://github.com/ansible-collections/ansible.netcommon/pull/618).
- The cli_restore module is a part of ansible.netcommon.
+- Please refer the PR to know more about core changes (https://github.com/ansible-collections/ansible.netcommon/pull/618). The cli_restore module is a part of ansible.netcommon.
Bugfixes
--------
diff --git a/ansible_collections/cisco/nxos/FILES.json b/ansible_collections/cisco/nxos/FILES.json
index b7d4211ca..57600c69d 100644
--- a/ansible_collections/cisco/nxos/FILES.json
+++ b/ansible_collections/cisco/nxos/FILES.json
@@ -8,6 +8,27 @@
"format": 1
},
{
+ "name": ".flake8",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "f357bc604a44fe8a5912ec2b9067b734a88d1026da98d5993905d7b34ab44357",
+ "format": 1
+ },
+ {
+ "name": "README.md",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "9b596e8b658671e2b935bc348772769903763ae1c3bba961ec613049ab403e1d",
+ "format": 1
+ },
+ {
+ "name": "CHANGELOG.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "757f8042c702e9fdf9ade01a689c4cce10615484e31de3e54f99861114880f0e",
+ "format": 1
+ },
+ {
"name": "docs",
"ftype": "dir",
"chksum_type": null,
@@ -15,171 +36,178 @@
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_snmp_location_module.rst",
+ "name": "docs/cisco.nxos.nxos_httpapi.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "321530eb77aa27eecab0eb6090a189eee7b9a7431cc94750c1f0f38718058d4d",
+ "chksum_sha256": "eb7f6fd0b46ad4b478746f0518b8d443e541f44ba8d77f999a42a760671ea015",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_user_module.rst",
+ "name": "docs/cisco.nxos.nxos_feature_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5987398411ee888f2b8e148be96c318e831a7b712b0de344d456fba46825197f",
+ "chksum_sha256": "e31d99494f87afad1ecdce42d483a32e523e8e978f42b1955de1f3b0033bcf80",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_ping_module.rst",
+ "name": "docs/cisco.nxos.nxos_nxapi_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "74122faa018ba84c3628abadef990f3888d58a72897d4b56ec41882348a12141",
+ "chksum_sha256": "7e1d3a6d1f0ab8c170068321f77ab36c1161892773c9efa90dfff711664bfdb3",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_igmp_module.rst",
+ "name": "docs/cisco.nxos.nxos_vpc_interface_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9f12f1c5cb360aa1634964c6343e14141cfce7fc49e37e0062c2957b20459399",
+ "chksum_sha256": "941e607951f2fe1347329a6f68d9303ff04fcebedfd0792fe5ab181625e120d3",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_prefix_lists_module.rst",
+ "name": "docs/cisco.nxos.nxos_bgp_address_family_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e411c76a91d77aac63c31c51864f7247f91fb48226b134bbb1b2cbc29029bfa3",
+ "chksum_sha256": "9706519c8d06db723b98f3c2ed78ffda4ca604b08215be80461fad2defd9507c",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_vrf_module.rst",
+ "name": "docs/cisco.nxos.nxos_user_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "33c9e1a06961e0785265c7b132c9caf7fa33c4cff00f15ef479787531d56f122",
+ "chksum_sha256": "5987398411ee888f2b8e148be96c318e831a7b712b0de344d456fba46825197f",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_banner_module.rst",
+ "name": "docs/cisco.nxos.nxos_gir_profile_management_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f33242bd0d3b632cb5e97dc9ce3e6a9029060e204c87a3554aff1ed36a7d19f0",
+ "chksum_sha256": "63332c56149477772d1571d4884f3dd7d99c11f8119942426c1b22898d63dcc8",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_aaa_server_module.rst",
+ "name": "docs/cisco.nxos.nxos_system_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ae7d06f89fb79b0ff5b7dbb41b289db70e8c602a08c1baac5b059adc72775665",
+ "chksum_sha256": "7701830b6698de0515fb009aa9b2c01256f3144d62909b760ccf23e749ec7422",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_overlay_global_module.rst",
+ "name": "docs/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c8362ed52554894dd979a332e946ede7fa0834c17d8980a08a282320a2a18391",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_install_os_module.rst",
+ "name": "docs/cisco.nxos.nxos_logging_global_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a2ad6d9178006558a46f6ef8038621cebd25eb937493f76503fbde8f65f9a6c6",
+ "chksum_sha256": "7794e28520082d477a17b1e52ac8feeacf092bd98b7e4c11e832cfb67aa49b3b",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_feature_module.rst",
+ "name": "docs/cisco.nxos.nxos_bgp_templates_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e31d99494f87afad1ecdce42d483a32e523e8e978f42b1955de1f3b0033bcf80",
+ "chksum_sha256": "00bb83305a1e55e1fa99e7008ff4131123f6702514a66409c524e6ca1374c00e",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_hsrp_module.rst",
+ "name": "docs/cisco.nxos.nxos_hsrp_interfaces_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "41fd5b3b6c57a33c8ce81d03798b35d598496b89d13327c18dfe27dedaf99ae4",
+ "chksum_sha256": "6cbf7b6611599b267ed05be736c19303ecc74c696750ea3b7b27bf97f942644f",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_command_module.rst",
+ "name": "docs/cisco.nxos.nxos_igmp_snooping_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8414812954c669abeb7370516fd50ea893bccd1a5e804d81d3174cafc441aad6",
+ "chksum_sha256": "a31b9e7cac6c66069bc1b9a46fbf4f3e778fec3cdbdb55f4b6d672abeaf7d795",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_ospfv2_module.rst",
+ "name": "docs/cisco.nxos.nxos_overlay_global_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d2704aec625f078e9b8c03b949dec0b6ce33ee85efc5e495b37f0c27339b20c8",
+ "chksum_sha256": "c8362ed52554894dd979a332e946ede7fa0834c17d8980a08a282320a2a18391",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_vlans_module.rst",
+ "name": "docs/cisco.nxos.nxos_snmp_user_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e60a339f8dbb48eaed5188761be6c6b375f5f6641ff322d3904e45f199f5ee0f",
+ "chksum_sha256": "0ebfbf6a1bdda73a9f3e5943dc33bbb38d636865ab213c2e79501baca180a6ef",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_config_module.rst",
+ "name": "docs/cisco.nxos.nxos_snmp_traps_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d4ad6ef9b747b454824d16ca0cd7c56fdde289a6cc8c93c9ecf3bf1d23db5f84",
+ "chksum_sha256": "25326502a9e485b1bfe99093b316af67686f797fb8ce8d972c9ae914d8e9da27",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_logging_global_module.rst",
+ "name": "docs/cisco.nxos.nxos_zone_zoneset_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7794e28520082d477a17b1e52ac8feeacf092bd98b7e4c11e832cfb67aa49b3b",
+ "chksum_sha256": "2b25696dcecfa3f12cf9a899132d4b220dcc774df51e1aea5fdecd422ca3a36a",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_vtp_version_module.rst",
+ "name": "docs/cisco.nxos.nxos_vtp_password_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c27cc4c99dbad288216a917b46c9949f1cc2ed72a4268e79ef61e708d6a1edff",
+ "chksum_sha256": "aba8c49ea83acf4e1ad8de779ebc06e21940537ce459b53bcfbdabdf72dacd06",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_snmp_user_module.rst",
+ "name": "docs/cisco.nxos.nxos_snapshot_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0ebfbf6a1bdda73a9f3e5943dc33bbb38d636865ab213c2e79501baca180a6ef",
+ "chksum_sha256": "a0e3fad000d677b7f24bf64090f02742004a9cf100306a97f51aff9594fe8d9f",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_lldp_interfaces_module.rst",
+ "name": "docs/cisco.nxos.nxos_l2_interfaces_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b14e490bedd8b115c8beebe5ec81fda4a7db93457f546fbca91fe2bce3887e76",
+ "chksum_sha256": "8b94e0fc5aeaa2d80e755b774c097970fead1ddb156eb018bc852516de3ca877",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_devicealias_module.rst",
+ "name": "docs/cisco.nxos.nxos_bgp_neighbor_address_family_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ff0660c7a4fa99aabb1c0c1d59d33fc923d3cdc50d2be391fc79bdeef13ae66b",
+ "chksum_sha256": "1fe6fe7be307fe262e49165038677e55a79be75ce8a1eaa0edf1617aa0b8c4e9",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_snmp_host_module.rst",
+ "name": "docs/cisco.nxos.nxos_vxlan_vtep_vni_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "738d6be19ec9aea9158035e6f1a2302d3484500ee9b3148e2d01628bf2f9fcdc",
+ "chksum_sha256": "7aebad3ad5c1c53036df468c782c366a17fea865b15a189f99aff17c171e464b",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_evpn_vni_module.rst",
+ "name": "docs/cisco.nxos.nxos_fc_interfaces_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1d69d440ff1844c0ffcf234f50c4cbaccafd2a5dd56481e14aeecac6c80e94a1",
+ "chksum_sha256": "865bcbc040c3370de4f47a6b8e68c1339f8ba54dbbaa362b0ceeae8cecd86cc8",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_snmp_traps_module.rst",
+ "name": "docs/cisco.nxos.nxos_acl_interfaces_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "25326502a9e485b1bfe99093b316af67686f797fb8ce8d972c9ae914d8e9da27",
+ "chksum_sha256": "03a8274fb4992504f056b15b84a7c7f98b08a9d3160a12ea038a1a700039e04a",
+ "format": 1
+ },
+ {
+ "name": "docs/cisco.nxos.nxos_cliconf.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "546fc754b81109404a33285c90c3bba565b1a2529e00b7284609b62bb25b4d8b",
"format": 1
},
{
@@ -190,10 +218,31 @@
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_ntp_global_module.rst",
+ "name": "docs/cisco.nxos.nxos_lacp_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "07810bf5a6fb659d294c956436bb3527b4506c7a07642997af02f1cf717d97d9",
+ "chksum_sha256": "bf3920fe2dbbd42f322d4a0f4dd23c875d01f6a5545dbc648ae5ae3aa62320de",
+ "format": 1
+ },
+ {
+ "name": "docs/cisco.nxos.nxos_hsrp_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "41fd5b3b6c57a33c8ce81d03798b35d598496b89d13327c18dfe27dedaf99ae4",
+ "format": 1
+ },
+ {
+ "name": "docs/cisco.nxos.nxos_bfd_interfaces_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "bcc5883c369f37ac683991364855f247f77299f2decc72a5e5c794e681a92fbc",
+ "format": 1
+ },
+ {
+ "name": "docs/cisco.nxos.nxos_snmp_location_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "321530eb77aa27eecab0eb6090a189eee7b9a7431cc94750c1f0f38718058d4d",
"format": 1
},
{
@@ -204,38 +253,52 @@
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_aaa_server_host_module.rst",
+ "name": "docs/cisco.nxos.nxos_vrf_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e8d1dc8303a82ca59e70c37d6852f8dd1ba6b002e896a9f4f2182148a6944ef7",
+ "chksum_sha256": "33c9e1a06961e0785265c7b132c9caf7fa33c4cff00f15ef479787531d56f122",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_snmp_community_module.rst",
+ "name": "docs/cisco.nxos.nxos_ping_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "273905bc36e919c6f348d146018b77d4b028457714b0f3b4b2fad6a078680fe7",
+ "chksum_sha256": "74122faa018ba84c3628abadef990f3888d58a72897d4b56ec41882348a12141",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_bgp_templates_module.rst",
+ "name": "docs/cisco.nxos.nxos_command_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "00bb83305a1e55e1fa99e7008ff4131123f6702514a66409c524e6ca1374c00e",
+ "chksum_sha256": "8414812954c669abeb7370516fd50ea893bccd1a5e804d81d3174cafc441aad6",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_udld_interface_module.rst",
+ "name": "docs/cisco.nxos.nxos_acls_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7786920d50729a8a8a73a20bcd0d4296af8095f52590076177511dc0c696dda5",
+ "chksum_sha256": "2869249548f0b1aaf9f1d7c8b3d24a5301a7ffa88fadb82dda595aca2374ccb2",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_bfd_global_module.rst",
+ "name": "docs/cisco.nxos.nxos_install_os_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1ff1d376f71be1de3dc5e97bfae1ea331e92430adb9e31b61e6fdde8da710538",
+ "chksum_sha256": "a2ad6d9178006558a46f6ef8038621cebd25eb937493f76503fbde8f65f9a6c6",
+ "format": 1
+ },
+ {
+ "name": "docs/cisco.nxos.nxos_ntp_global_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "07810bf5a6fb659d294c956436bb3527b4506c7a07642997af02f1cf717d97d9",
+ "format": 1
+ },
+ {
+ "name": "docs/cisco.nxos.nxos_devicealias_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "ff0660c7a4fa99aabb1c0c1d59d33fc923d3cdc50d2be391fc79bdeef13ae66b",
"format": 1
},
{
@@ -246,52 +309,59 @@
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_vxlan_vtep_vni_module.rst",
+ "name": "docs/cisco.nxos.nxos_udld_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7aebad3ad5c1c53036df468c782c366a17fea865b15a189f99aff17c171e464b",
+ "chksum_sha256": "26d0773b5cbd658f39b2c08b9949e45ae5aa8a94405c4f1ef9ed0708c682c4a3",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_vrf_interface_module.rst",
+ "name": "docs/cisco.nxos.nxos_vxlan_vtep_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "64296c4de9145ceec2a49e4f09b2573fd163bbe33247b395769cb6833cdd16ba",
+ "chksum_sha256": "920cbc7cf6b2d9736bbeacbf6751674c6b8985af13183432f027070235ed745e",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_gir_profile_management_module.rst",
+ "name": "docs/cisco.nxos.nxos_banner_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "63332c56149477772d1571d4884f3dd7d99c11f8119942426c1b22898d63dcc8",
+ "chksum_sha256": "f33242bd0d3b632cb5e97dc9ce3e6a9029060e204c87a3554aff1ed36a7d19f0",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_snapshot_module.rst",
+ "name": "docs/cisco.nxos.nxos_igmp_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a0e3fad000d677b7f24bf64090f02742004a9cf100306a97f51aff9594fe8d9f",
+ "chksum_sha256": "9f12f1c5cb360aa1634964c6343e14141cfce7fc49e37e0062c2957b20459399",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_rollback_module.rst",
+ "name": "docs/cisco.nxos.nxos_l3_interfaces_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "04cf041066c5d09e1a5317b7cb7cc1eb2504f0d1faeb9a28d3d59f3ae98c783f",
+ "chksum_sha256": "01adb1dc48dfbe1a73cf269060e1f83310d5e05b813dd109770e5e335abcb436",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_rpm_module.rst",
+ "name": "docs/cisco.nxos.nxos_config_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e928284ecee7ed27ef75a660db2e02900d8de4cf0d255bfd31a819f4e2abe036",
+ "chksum_sha256": "d4ad6ef9b747b454824d16ca0cd7c56fdde289a6cc8c93c9ecf3bf1d23db5f84",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_vtp_password_module.rst",
+ "name": "docs/cisco.nxos.nxos_hostname_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "aba8c49ea83acf4e1ad8de779ebc06e21940537ce459b53bcfbdabdf72dacd06",
+ "chksum_sha256": "2cc61e3ea5053c4ee1bb14f41a85e67e4790010b7142e21bb72fcc88a5e715aa",
+ "format": 1
+ },
+ {
+ "name": "docs/cisco.nxos.nxos_vtp_domain_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "18a52103976ba1d930a98e0e7fa45de34d95a86ca222e59b0ffc9629585cdf6a",
"format": 1
},
{
@@ -302,66 +372,87 @@
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_igmp_interface_module.rst",
+ "name": "docs/cisco.nxos.nxos_snmp_community_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1e5c432e7e272efb56ee80dda73b66a13b1a74e16f14bd48d38681023f710b7f",
+ "chksum_sha256": "273905bc36e919c6f348d146018b77d4b028457714b0f3b4b2fad6a078680fe7",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_facts_module.rst",
+ "name": "docs/cisco.nxos.nxos_ospf_interfaces_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6ef51706f5729ece09bdcce4c7e1ab04ec7d58d4ae6c3a7e12ebf275b5d792ad",
+ "chksum_sha256": "808523316e7aa23425653e4c2caed8ea2732ebb51fb3aa0f15dddd416d049725",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_lldp_global_module.rst",
+ "name": "docs/cisco.nxos.nxos_vpc_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6994ab2f365796541f268d6d3051e4a192e2b911495e12342d6d3cad12016f2b",
+ "chksum_sha256": "0aeb5a090773bcf219cb3ab181424bde284e9669ccc3ace93175b739fec4f81a",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_cliconf.rst",
+ "name": "docs/cisco.nxos.nxos_bgp_global_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "546fc754b81109404a33285c90c3bba565b1a2529e00b7284609b62bb25b4d8b",
+ "chksum_sha256": "83bf0acc036ccd3a7508746b650720220d208e5c8ff4894fff982b151af3689f",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_ospf_interfaces_module.rst",
+ "name": "docs/cisco.nxos.nxos_interfaces_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "808523316e7aa23425653e4c2caed8ea2732ebb51fb3aa0f15dddd416d049725",
+ "chksum_sha256": "ab3f38f88a8fbc5884c3694784ba4991c29c7690f21725706603dfe83c35e6e4",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_zone_zoneset_module.rst",
+ "name": "docs/cisco.nxos.nxos_facts_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2b25696dcecfa3f12cf9a899132d4b220dcc774df51e1aea5fdecd422ca3a36a",
+ "chksum_sha256": "6ef51706f5729ece09bdcce4c7e1ab04ec7d58d4ae6c3a7e12ebf275b5d792ad",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_vpc_module.rst",
+ "name": "docs/cisco.nxos.nxos_netconf.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0aeb5a090773bcf219cb3ab181424bde284e9669ccc3ace93175b739fec4f81a",
+ "chksum_sha256": "da11bd9904bba75e1ca0484cf6ddc0ebcd0a2ffc12b1e9132d60f8a0bc54705c",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_gir_module.rst",
+ "name": "docs/cisco.nxos.nxos_vlans_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2969b2a880bc98f5272720086059bc55d861fc1ce00c8854c75aa061ba3aa8bf",
+ "chksum_sha256": "e60a339f8dbb48eaed5188761be6c6b375f5f6641ff322d3904e45f199f5ee0f",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_bfd_interfaces_module.rst",
+ "name": "docs/cisco.nxos.nxos_ospfv2_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bcc5883c369f37ac683991364855f247f77299f2decc72a5e5c794e681a92fbc",
+ "chksum_sha256": "d2704aec625f078e9b8c03b949dec0b6ce33ee85efc5e495b37f0c27339b20c8",
+ "format": 1
+ },
+ {
+ "name": "docs/cisco.nxos.nxos_route_maps_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "2041055670405961112eb26ef06db6afda07055ef02a81026f1f0293144c7331",
+ "format": 1
+ },
+ {
+ "name": "docs/cisco.nxos.nxos_pim_interface_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "b7fa1c8e28a128af94059b3d66e3e40c97e7d748a858ff74e77a0e313641c3ff",
+ "format": 1
+ },
+ {
+ "name": "docs/cisco.nxos.nxos_evpn_global_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "993ea85ff1bf02311f55646f0c5dcadc2d216a2819b7902f43470a645d1b90d7",
"format": 1
},
{
@@ -372,248 +463,269 @@
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_lacp_interfaces_module.rst",
+ "name": "docs/cisco.nxos.nxos_igmp_interface_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "81829e369c84802f5cc2fdbf88d380007da8d326106cafaacb981d86f38be6e1",
+ "chksum_sha256": "1e5c432e7e272efb56ee80dda73b66a13b1a74e16f14bd48d38681023f710b7f",
"format": 1
},
{
- "name": "docs/__init__.py",
+ "name": "docs/cisco.nxos.nxos_aaa_server_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "ae7d06f89fb79b0ff5b7dbb41b289db70e8c602a08c1baac5b059adc72775665",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_vxlan_vtep_module.rst",
+ "name": "docs/cisco.nxos.nxos_evpn_vni_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "920cbc7cf6b2d9736bbeacbf6751674c6b8985af13183432f027070235ed745e",
+ "chksum_sha256": "1d69d440ff1844c0ffcf234f50c4cbaccafd2a5dd56481e14aeecac6c80e94a1",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_acl_interfaces_module.rst",
+ "name": "docs/cisco.nxos.nxos_snmp_host_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "03a8274fb4992504f056b15b84a7c7f98b08a9d3160a12ea038a1a700039e04a",
+ "chksum_sha256": "738d6be19ec9aea9158035e6f1a2302d3484500ee9b3148e2d01628bf2f9fcdc",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_system_module.rst",
+ "name": "docs/cisco.nxos.nxos_vtp_version_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7701830b6698de0515fb009aa9b2c01256f3144d62909b760ccf23e749ec7422",
+ "chksum_sha256": "c27cc4c99dbad288216a917b46c9949f1cc2ed72a4268e79ef61e708d6a1edff",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_snmp_server_module.rst",
+ "name": "docs/cisco.nxos.nxos_ospfv3_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2c41210552496035bd6ba8eed6ec9d37ad4445b724438207a50bb4ce9e8fb6bb",
+ "chksum_sha256": "041dd0262265b58cbe0bf6e505a7f676eb6b2d4a5fc59681f1e4b5e6c463de56",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_bgp_neighbor_address_family_module.rst",
+ "name": "docs/cisco.nxos.nxos_telemetry_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1fe6fe7be307fe262e49165038677e55a79be75ce8a1eaa0edf1617aa0b8c4e9",
+ "chksum_sha256": "70e0841739f0a1ab237624a5f5f6fbbebbeb61244a9fe00e9ae6a6af936d94a9",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_httpapi.rst",
+ "name": "docs/cisco.nxos.nxos_snmp_contact_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "eb7f6fd0b46ad4b478746f0518b8d443e541f44ba8d77f999a42a760671ea015",
+ "chksum_sha256": "cbb715723ead144ae09062bf77f7f4a73e4a5035a81a7ecec925b00410e8579b",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_reboot_module.rst",
+ "name": "docs/cisco.nxos.nxos_aaa_server_host_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "194778e502ec450e673e2562baf84d459a9d844c3c3f281cd6ed2f2bc2618087",
+ "chksum_sha256": "e8d1dc8303a82ca59e70c37d6852f8dd1ba6b002e896a9f4f2182148a6944ef7",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_vpc_interface_module.rst",
+ "name": "docs/cisco.nxos.nxos_rollback_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "941e607951f2fe1347329a6f68d9303ff04fcebedfd0792fe5ab181625e120d3",
+ "chksum_sha256": "04cf041066c5d09e1a5317b7cb7cc1eb2504f0d1faeb9a28d3d59f3ae98c783f",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_hsrp_interfaces_module.rst",
+ "name": "docs/cisco.nxos.nxos_vrf_interface_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6cbf7b6611599b267ed05be736c19303ecc74c696750ea3b7b27bf97f942644f",
+ "chksum_sha256": "64296c4de9145ceec2a49e4f09b2573fd163bbe33247b395769cb6833cdd16ba",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_igmp_snooping_module.rst",
+ "name": "docs/cisco.nxos.nxos_prefix_lists_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a31b9e7cac6c66069bc1b9a46fbf4f3e778fec3cdbdb55f4b6d672abeaf7d795",
+ "chksum_sha256": "e411c76a91d77aac63c31c51864f7247f91fb48226b134bbb1b2cbc29029bfa3",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_udld_module.rst",
+ "name": "docs/cisco.nxos.nxos_lacp_interfaces_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "26d0773b5cbd658f39b2c08b9949e45ae5aa8a94405c4f1ef9ed0708c682c4a3",
+ "chksum_sha256": "81829e369c84802f5cc2fdbf88d380007da8d326106cafaacb981d86f38be6e1",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_l3_interfaces_module.rst",
+ "name": "docs/cisco.nxos.nxos_vrf_af_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "01adb1dc48dfbe1a73cf269060e1f83310d5e05b813dd109770e5e335abcb436",
+ "chksum_sha256": "3719fa178c08c4d172a9a0307f402b45434931b395cc57f2451cc810bd45a786",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_route_maps_module.rst",
+ "name": "docs/cisco.nxos.nxos_lldp_interfaces_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2041055670405961112eb26ef06db6afda07055ef02a81026f1f0293144c7331",
+ "chksum_sha256": "b14e490bedd8b115c8beebe5ec81fda4a7db93457f546fbca91fe2bce3887e76",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_acls_module.rst",
+ "name": "docs/cisco.nxos.nxos_rpm_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2869249548f0b1aaf9f1d7c8b3d24a5301a7ffa88fadb82dda595aca2374ccb2",
+ "chksum_sha256": "e928284ecee7ed27ef75a660db2e02900d8de4cf0d255bfd31a819f4e2abe036",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_ospfv3_module.rst",
+ "name": "docs/cisco.nxos.nxos_snmp_server_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "041dd0262265b58cbe0bf6e505a7f676eb6b2d4a5fc59681f1e4b5e6c463de56",
+ "chksum_sha256": "2c41210552496035bd6ba8eed6ec9d37ad4445b724438207a50bb4ce9e8fb6bb",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_netconf.rst",
+ "name": "docs/cisco.nxos.nxos_bfd_global_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "da11bd9904bba75e1ca0484cf6ddc0ebcd0a2ffc12b1e9132d60f8a0bc54705c",
+ "chksum_sha256": "1ff1d376f71be1de3dc5e97bfae1ea331e92430adb9e31b61e6fdde8da710538",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_fc_interfaces_module.rst",
+ "name": "docs/cisco.nxos.nxos_gir_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "865bcbc040c3370de4f47a6b8e68c1339f8ba54dbbaa362b0ceeae8cecd86cc8",
+ "chksum_sha256": "2969b2a880bc98f5272720086059bc55d861fc1ce00c8854c75aa061ba3aa8bf",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_lag_interfaces_module.rst",
+ "name": "docs/cisco.nxos.nxos_reboot_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4980199790b302fc46982df8e0c2fa5f5d81ef50ebae4d9961ebbfe6cb62eb0c",
+ "chksum_sha256": "194778e502ec450e673e2562baf84d459a9d844c3c3f281cd6ed2f2bc2618087",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_bgp_address_family_module.rst",
+ "name": "docs/cisco.nxos.nxos_file_copy_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9706519c8d06db723b98f3c2ed78ffda4ca604b08215be80461fad2defd9507c",
+ "chksum_sha256": "c38e717848ddfc093b17245d08c45e6beda5d7b1c263453ef0a43171d5008f47",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_hostname_module.rst",
+ "name": "docs/cisco.nxos.nxos_lldp_global_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2cc61e3ea5053c4ee1bb14f41a85e67e4790010b7142e21bb72fcc88a5e715aa",
+ "chksum_sha256": "6994ab2f365796541f268d6d3051e4a192e2b911495e12342d6d3cad12016f2b",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_telemetry_module.rst",
+ "name": "docs/cisco.nxos.nxos_lag_interfaces_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "70e0841739f0a1ab237624a5f5f6fbbebbeb61244a9fe00e9ae6a6af936d94a9",
+ "chksum_sha256": "4980199790b302fc46982df8e0c2fa5f5d81ef50ebae4d9961ebbfe6cb62eb0c",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_evpn_global_module.rst",
+ "name": "docs/cisco.nxos.nxos_udld_interface_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "993ea85ff1bf02311f55646f0c5dcadc2d216a2819b7902f43470a645d1b90d7",
+ "chksum_sha256": "7786920d50729a8a8a73a20bcd0d4296af8095f52590076177511dc0c696dda5",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_lacp_module.rst",
+ "name": ".isort.cfg",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bf3920fe2dbbd42f322d4a0f4dd23c875d01f6a5545dbc648ae5ae3aa62320de",
+ "chksum_sha256": "3fc5fcc23727e7c5b9f1a7035303990229f796a171a84e21051dd1d242b90c52",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_vtp_domain_module.rst",
+ "name": ".gitignore",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "18a52103976ba1d930a98e0e7fa45de34d95a86ca222e59b0ffc9629585cdf6a",
+ "chksum_sha256": "179a6d0780422c32653a71b9395a3ecb8b3859346ffa3e38ca3c8d90f8063fd2",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_l2_interfaces_module.rst",
+ "name": "changelogs",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "changelogs/config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8b94e0fc5aeaa2d80e755b774c097970fead1ddb156eb018bc852516de3ca877",
+ "chksum_sha256": "54c802c2ade537e8b71ba45d10c73a422778a62aab0093bc6805c75ab8c7f448",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_interfaces_module.rst",
+ "name": "changelogs/changelog.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ab3f38f88a8fbc5884c3694784ba4991c29c7690f21725706603dfe83c35e6e4",
+ "chksum_sha256": "941ab630b9f9ed4677b42e26267af4f32cb234c6778572d487f47e2e3123f51c",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_snmp_contact_module.rst",
+ "name": "changelogs/fragments",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "changelogs/fragments/.keep",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cbb715723ead144ae09062bf77f7f4a73e4a5035a81a7ecec925b00410e8579b",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_nxapi_module.rst",
+ "name": "LICENSE",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7e1d3a6d1f0ab8c170068321f77ab36c1161892773c9efa90dfff711664bfdb3",
+ "chksum_sha256": "3972dc9744f6499f0f9b2dbf76696f2ae7ad8af9b23dde66d6af86c9dfb36986",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_file_copy_module.rst",
+ "name": "codecov.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c38e717848ddfc093b17245d08c45e6beda5d7b1c263453ef0a43171d5008f47",
+ "chksum_sha256": "4aa7e485dd4db6f8a55b046088c745def2b3145d9499ccda4e9a3336467dcea2",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_bgp_global_module.rst",
+ "name": "pyproject.toml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "83bf0acc036ccd3a7508746b650720220d208e5c8ff4894fff982b151af3689f",
+ "chksum_sha256": "82a737e4c91699b90905fc2d347acaf5c3eb4567cc0750cf122c859d0eed78ed",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_vrf_af_module.rst",
+ "name": "tox-ansible.ini",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3719fa178c08c4d172a9a0307f402b45434931b395cc57f2451cc810bd45a786",
+ "chksum_sha256": "da802bfc2dc2671ca7caba028e8a635b459b131874041d119b698717333a7a49",
"format": 1
},
{
- "name": "docs/cisco.nxos.nxos_pim_interface_module.rst",
+ "name": ".prettierignore",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b7fa1c8e28a128af94059b3d66e3e40c97e7d748a858ff74e77a0e313641c3ff",
+ "chksum_sha256": "fe21cf5ca4606d16ccedb1bccc85ef06771ee697924ca50ecd8ca4c98acce5cc",
"format": 1
},
{
- "name": "LICENSE",
+ "name": ".pre-commit-config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3972dc9744f6499f0f9b2dbf76696f2ae7ad8af9b23dde66d6af86c9dfb36986",
+ "chksum_sha256": "5abb3cf06d4b85c40867e9267de00b184cb5d3abed36754ff1607c27ff767ee0",
+ "format": 1
+ },
+ {
+ "name": "bindep.txt",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "f0e2709e0a523d84f824ccfb6883f62e9d27f70bd7a38be0d18506223ccfe60e",
"format": 1
},
{
@@ -624,13 +736,6 @@
"format": 1
},
{
- "name": ".github/release-drafter.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "7cbc785c8661033cd9dd7a60897484fce731cbe3dc124a689fc3e934b1d974fb",
- "format": 1
- },
- {
"name": ".github/workflows",
"ftype": "dir",
"chksum_type": null,
@@ -645,6 +750,20 @@
"format": 1
},
{
+ "name": ".github/workflows/tests.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "5e5d8797ac2c34af1b5cf6e9dd3c760655623bb9d7c40cff83958202066f69f6",
+ "format": 1
+ },
+ {
+ "name": ".github/workflows/release.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "2e3f90a3147d93b97347d8418352f23e201417e05539a466399dd258a65d699c",
+ "format": 1
+ },
+ {
"name": ".github/workflows/check_label.yml",
"ftype": "file",
"chksum_type": "sha256",
@@ -659,45 +778,73 @@
"format": 1
},
{
- "name": ".github/workflows/release.yml",
+ "name": ".github/workflows/draft_release.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2e3f90a3147d93b97347d8418352f23e201417e05539a466399dd258a65d699c",
+ "chksum_sha256": "b582d81b8bbcfb8d0f077f40ff65463c0803c2a2713664aa9f28ff167c48e483",
"format": 1
},
{
- "name": ".github/workflows/draft_release.yml",
+ "name": ".github/dependabot.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b582d81b8bbcfb8d0f077f40ff65463c0803c2a2713664aa9f28ff167c48e483",
+ "chksum_sha256": "c4d7b4c429faafe0eb46a070eafcd3d8d84d9083b5fe72943d627b5f4ccf0dd8",
"format": 1
},
{
- "name": ".github/workflows/tests.yml",
+ "name": ".github/release-drafter.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5e5d8797ac2c34af1b5cf6e9dd3c760655623bb9d7c40cff83958202066f69f6",
+ "chksum_sha256": "7cbc785c8661033cd9dd7a60897484fce731cbe3dc124a689fc3e934b1d974fb",
"format": 1
},
{
- "name": ".github/dependabot.yml",
+ "name": "meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "meta/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c4d7b4c429faafe0eb46a070eafcd3d8d84d9083b5fe72943d627b5f4ccf0dd8",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "README.md",
+ "name": "meta/runtime.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "95f172f0a54c78871ccb1230fb692ef79368765c2cc63d7d86c3a3f781e0f5ff",
+ "chksum_sha256": "40c6ab1595d2252393db7e834d0de46f776beddd1bd701a49a29a9d7b1a7c876",
"format": 1
},
{
- "name": "bindep.txt",
+ "name": ".ansible-lint",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f0e2709e0a523d84f824ccfb6883f62e9d27f70bd7a38be0d18506223ccfe60e",
+ "chksum_sha256": "b47f72e159f93a5ff07ea2534752e0fa977b214e9ac05c667fa83ac13be4e50c",
+ "format": 1
+ },
+ {
+ "name": "requirements.txt",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "70053744bf8f5b698f82ad59a6daec106188d9b63c6d1246f860f21ab8cb6ac5",
+ "format": 1
+ },
+ {
+ "name": "platform_guide.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "48467b2534cf719f3925b1b62e10a587eb68c1afbc19bf69d801dbec119a9277",
+ "format": 1
+ },
+ {
+ "name": "test-requirements.txt",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "cf0a6736e79a79f5ea107f0b74c38957ece4a6e4b5c00b01d2c1ea679d876089",
"format": 1
},
{
@@ -708,759 +855,808 @@
"format": 1
},
{
- "name": "plugins/modules",
+ "name": "plugins/terminal",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/nxos_bgp_neighbor_address_family.py",
+ "name": "plugins/terminal/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f2861a072c7f54bdf90fa0fca06b8ce0af964f3073e27a4ac00940a7fdfeb26a",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/modules/nxos_vrrp.py",
+ "name": "plugins/terminal/nxos.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a261501d6200ac9685d544c2dd29f284377da8592864f7ad26e74ef4e55d5567",
+ "chksum_sha256": "e49e9c495e05b71317fa3d2c462cb085ce5827874235484d735f470e7afa5059",
"format": 1
},
{
- "name": "plugins/modules/nxos_zone_zoneset.py",
+ "name": "plugins/action",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "plugins/action/snmp_location.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "44ead6bd11cb8112b667ecc6f7d1fd0751941737d6fc1154c73501e4abdd42a0",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_l2_interfaces.py",
+ "name": "plugins/action/static_route.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "06d0406435fadc33d78bdc5dc2b541d3cdb08b972497a7fd86357354638056a7",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_gir.py",
+ "name": "plugins/action/vxlan_vtep_vni.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "05675c570b78691538eaa20c62f548b41885b8db20018e5805a6ced813eaff40",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_igmp_interface.py",
+ "name": "plugins/action/static_routes.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c8cbb9aec6383daa0e4a82c2c4c3d29ef313c078404d70413301abdd98513c17",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_lacp.py",
+ "name": "plugins/action/telemetry.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bf38b4169e3605546f6f4d7138529c1ba3effb60f74ea844f205ef2160e800c1",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_vrf_af.py",
+ "name": "plugins/action/smu.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bcddf538b58de6ee3355af3f5af10af8b95268b4a2bdd7166d2c451a7484228d",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_overlay_global.py",
+ "name": "plugins/action/bgp_global.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f2e7bc1eff3259825c6eae134d87e92ee922cc3fd423b63fac821c659ea891d8",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_bfd_interfaces.py",
+ "name": "plugins/action/bfd_interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "dd7b49293300983b1bfe085206f30ad341bf3af6ee17aad238186ad5d1bec86a",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_ping.py",
+ "name": "plugins/action/ospfv3.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9d2648ff85a7fee59b9b0b099afde09e1f7f11c7e5d9f033d27a2af0953e854e",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_vlans.py",
+ "name": "plugins/action/ntp_options.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d603737717f0b6a697fac057acf07a763f002aa641026cd6436b2bc3e4611d3b",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_gir_profile_management.py",
+ "name": "plugins/action/interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e5c49276a499ef2988a7047ec75a5464f13e5b458f0a7d6af46c8fd8e3f71c89",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_udld_interface.py",
+ "name": "plugins/action/igmp_interface.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2346b5a0aec6682e11c7ee21cf5f77fd1b53bd4839d11a7da4e23e8378049aaa",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_vxlan_vtep.py",
+ "name": "plugins/action/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9aefbaf9557a93bafc8de522a59e3246967964e658c9ee7631fa202e0b5215fa",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/modules/nxos_config.py",
+ "name": "plugins/action/devicealias.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "160aa4af3ddc22a3a35d277eded9a0bf54df4a0b7b44a11fa2c077b5cca71db5",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_lacp_interfaces.py",
+ "name": "plugins/action/l3_interface.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "dc1a726578647a2b3f08a76ca542daa557960de383812b9b4067af37a49359d1",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_route_maps.py",
+ "name": "plugins/action/rpm.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3daed656349f74ca544cca0df90fe653384af61e5109235c2a0643e67593ddbf",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_devicealias.py",
+ "name": "plugins/action/config.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8fcfa22b50abdddab25a9235c03d8ef41055a7f98dfa6ab1af4a4e86fa6c1e10",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_pim_rp_address.py",
+ "name": "plugins/action/bgp_neighbor_af.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "048f2a6e71622db905600dbc6f0862564275c438ac0a401aca0b2c792605d10f",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_bgp_templates.py",
+ "name": "plugins/action/bgp_neighbor_address_family.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "955e3860781eef5083a0fa88e80d95803645ca485edd357b5867d3706bbc45bb",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_file_copy.py",
+ "name": "plugins/action/file_copy.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8418e53de9a83561d6c4b60b2d5d8115863d210376acbebf4f4c41a6489cd2a5",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_snmp_community.py",
+ "name": "plugins/action/vrf.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4225f8c4053ec681cb9fadb841f3b0660771879e740d13858c459fedbfc5aa3c",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_acls.py",
+ "name": "plugins/action/vrf_af.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "283955d63ab51126d854605578e07115bb252047205fe90749563bf343bdb00a",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_snmp_location.py",
+ "name": "plugins/action/acl_interface.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a11d2a363173b645bdf9c8cd1cdb80e77b94d4c169069614deae90a441a1de22",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_vrf_interface.py",
+ "name": "plugins/action/ospf.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a2876512450aa2920f88a2d408eb72cd7976ec4124a95b058d57427d0f939fb1",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_vpc.py",
+ "name": "plugins/action/facts.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "edd715fc9e823dbf37a31a16de8f680132e0cc9b2feea46ab073d34f8098acf5",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_hostname.py",
+ "name": "plugins/action/snapshot.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c6e80d807e983caef125de3683283634a0fb850861b4c240a7446c36660b8a22",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_logging_global.py",
+ "name": "plugins/action/banner.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1493bf17b72bd52b2f045a3d3c8767b2e414600762fadf79c065dda1b99e47d2",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_aaa_server.py",
+ "name": "plugins/action/hsrp_interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c7d0ba90c00bef7d014ca922ce5e66ea10fe116bb9af88bfdf671402527b404b",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_igmp_snooping.py",
+ "name": "plugins/action/bgp_address_family.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ee94035d9f4a71393a16670f8e269064476b6da5d99071a4f23cfc253da13703",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_interfaces.py",
+ "name": "plugins/action/l2_interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5535b8856eb027aabcd9885c29602a14a92c40554ca777b553879ae1bd8b18b6",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_rollback.py",
+ "name": "plugins/action/vlan.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7fec43544c7cb26eb84d9cdd22e74a4f042de04943e5896fcd45a1020f003e82",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_vtp_password.py",
+ "name": "plugins/action/vrrp.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e6cb79ae92bdf1ab82291e4b331dfeb53b42f004a416908268380813905b8215",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_lldp_interfaces.py",
+ "name": "plugins/action/acl.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b444c878e0d7289fb705e56985724155c15dd9f5b8de4ad2fa581ddc77b29e0e",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_prefix_lists.py",
+ "name": "plugins/action/gir.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fbc6db3871917b6239c2e37caede67c3a6f69e2c578357f5831bf74173c09bd5",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_bfd_global.py",
+ "name": "plugins/action/install_os.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e2121178e29e5e34ca77f7ed2dc78e7fdcf6ddbe7a9ab16545c39c11dc00f7ce",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_rpm.py",
+ "name": "plugins/action/vtp_domain.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e2ba7d2ec024a075a96be051996d4eba6f6e370058c6df454b89ac7c36759f58",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_vtp_version.py",
+ "name": "plugins/action/lag_interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2c822abea7b871b5781dcf46856c07f0214793bc3768743d45966ae5a0b6e779",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_nxapi.py",
+ "name": "plugins/action/ping.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cf44a3e80959a805238202c49a5a639d6a1ecf5f6d8191b93b3f6db314a80c15",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_command.py",
+ "name": "plugins/action/igmp_snooping.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "19c20fdbf3d421a6a487cb76e85904fcfc62dfe26569000e370e6d829c0fb838",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_snmp_contact.py",
+ "name": "plugins/action/logging_global.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "936a5e9b00d8c46ed39a8818f5806971f16fc8cebd3f5106e6909b7ed8395ac1",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_aaa_server_host.py",
+ "name": "plugins/action/user.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b7560a2b37d9b93af17d9d0241db8cc701778ff974fcc4f16204fcaa991c1f2a",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_ntp_global.py",
+ "name": "plugins/action/ospfv2.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "48f03d894a18905e2934f7f22c43b8b78e66e8f3b2a39770e94b45907977a0a0",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_feature.py",
+ "name": "plugins/action/nxapi.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8b8a6deea08ebeae462e2fefbcc4762dd67b71bc5a5d0f51e3ea3595375888da",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_vxlan_vtep_vni.py",
+ "name": "plugins/action/bgp_af.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "00cc8a5f8826d10b27cbea4c1523a5dc48a57d174ae7ecfd918e02d506902b80",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/__init__.py",
+ "name": "plugins/action/pim_rp_address.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_pim_interface.py",
+ "name": "plugins/action/snmp_community.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "62a33fbb98411f4b017a3d884645f48f8e79049f27e0f3d87698b3bcb7f0c48c",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_snmp_traps.py",
+ "name": "plugins/action/ntp_global.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "73ef124bdd93a1b8d6d7d55870e04c99bb29cde93c0c400884f06c4972057f75",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_reboot.py",
+ "name": "plugins/action/command.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "826e076559d7143fc03a3de9f0e5ec125241fd3196037f2a1a524e033e75b8de",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_vsan.py",
+ "name": "plugins/action/interface_ospf.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7a775ad1dba4b6997070b43beb0eff5a71f94cf0d7462c64d7761710663cb865",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_pim.py",
+ "name": "plugins/action/zone_zoneset.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a06919bdfbaab64ef58295c408def23dc36336b630e12d80a2a80e3a31a53a85",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_banner.py",
+ "name": "plugins/action/igmp.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a98cf0828178ff3233b836f7a1d1478ad4f5020143b0a6435630737a82b4110e",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_vpc_interface.py",
+ "name": "plugins/action/pim.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7361c2f7afeaedada1c42037a5f54d16b934ef5993f155819e0edb731c21de1f",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_system.py",
+ "name": "plugins/action/udld.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "34a51f70bb99431fadc63a66270533d88d97d3a6eb422578a78c1f59da7c89e8",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_hsrp_interfaces.py",
+ "name": "plugins/action/interface.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5b9cea1b4a4a770fdf3b6dad829247384c19aefeaf5d300f8730ebbeb8c7bc0d",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_vrf.py",
+ "name": "plugins/action/snmp_traps.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6145ff3c78f13d4bcd3d338da62805a3a1e594b80c8a72de7ff83059d3d5eb04",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_vtp_domain.py",
+ "name": "plugins/action/gir_profile_management.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e76836b4b68b3c32d5a146bcb6738009872e3e69d3afffe269e50846d4a84058",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_snmp_host.py",
+ "name": "plugins/action/vsan.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ae6e69d2c2af158c70e81aed0f2a6dbccca2c69033861bd697af4dba0c5183c7",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_fc_interfaces.py",
+ "name": "plugins/action/pim_interface.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2b7cb927469f1aab619f06daa2442fd6c4267898dfd7f9f471d58b9970eb5b27",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_user.py",
+ "name": "plugins/action/logging.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3421c84ad96b685dce41814131c5e17d336d58fdaac332ea01b72606ffdae07c",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_evpn_vni.py",
+ "name": "plugins/action/ntp_auth.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cccbafbd724df1855a60cebf93e8429be7edc59582827b0ed6c5af84a877565a",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_install_os.py",
+ "name": "plugins/action/vxlan_vtep.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5a8d563421835390e3d46ff4cf4feadd2ff33a43d58a6f8d828a8c9f1f79d951",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_evpn_global.py",
+ "name": "plugins/action/hsrp.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3cb0c7e66e32783f346744b3b08edd63f2d3ac82b67addb55597a19b48db7e9c",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_static_routes.py",
+ "name": "plugins/action/lldp_global.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2ef2625e9e8c20f8012c57596cf06ac0ac8b28662ee78e98e2911354dfea6f32",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_facts.py",
+ "name": "plugins/action/udld_interface.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3d47e0e98adf0b507c6e725efcb94b573d9100679336c3beaf8193ee0247f24b",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_acl_interfaces.py",
+ "name": "plugins/action/overlay_global.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7d3aa83fa072e36c33233e8244b6d7f4b6b28eb355d5045dbffbb7ebd2cfbf8b",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_telemetry.py",
+ "name": "plugins/action/vrf_interface.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5bf9c2cfa138af73b7d1e47a1d5345049dd93a00933eeb94e39a66d94ec759d0",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_ospfv2.py",
+ "name": "plugins/action/vtp_version.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ceba0e4676fb124fe23e8c0e3e9358e247f6cf34f4af0c2e09db648692b03262",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_lldp_global.py",
+ "name": "plugins/action/system.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "485b35aeac40cd9d3dfaf18a7f5481fc14796ce054387af3dca72160677f6dd7",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_hsrp.py",
+ "name": "plugins/action/snmp_user.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8e0bccf6fb0aa7ce0cbacacb8acf9821015472213a2319df618e7eef4d03a52f",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_l3_interfaces.py",
+ "name": "plugins/action/route_maps.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b95b46e32c56259e36e705a3249140c51028bc43376257ef0913e93149918c1e",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_ospfv3.py",
+ "name": "plugins/action/acls.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5f7553546e438131fb6a81fb5c994270b900e30349f675a9f9ac77cb20b4a218",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_igmp.py",
+ "name": "plugins/action/lacp_interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ec8870d4fb6ecf6089717936138d43fc5fe54381994a0a7d5e8299a94f9ade67",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_bgp_global.py",
+ "name": "plugins/action/bfd_global.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "66c33d7e478c4d39b8899962fc8f152cab6651220069cc2a89cef2e299d1547e",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_snmp_user.py",
+ "name": "plugins/action/lldp_interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d16699e2fd607da804183e1fe29c6981095303d22b836f865f2046dc2c50ecf0",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_bgp_address_family.py",
+ "name": "plugins/action/rollback.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "98ba4c208cb7259d49b76f7b48cd1baa8b6f049cac2e9f64142c5011e12c2eee",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_ospf_interfaces.py",
+ "name": "plugins/action/acl_interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8ccbbc24a2038b70b2486f92b33ceaecf1932dbc2316167526855312dc6c599d",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_snmp_server.py",
+ "name": "plugins/action/bgp_neighbor.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9ad2b68c98c78ad1082727d1997b52f29f03c4d8c9ee734ee40f7151bec58fd5",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_udld.py",
+ "name": "plugins/action/evpn_global.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f9d531c20f551d644be87cf218c7bf8175f531a3190c60c7a20ae281203fbe8d",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_lag_interfaces.py",
+ "name": "plugins/action/snmp_host.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8aa8104d1be201ba069a75576473610d9a3d5e6e9778687b8f83121557d20a73",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/modules/nxos_snapshot.py",
+ "name": "plugins/action/ospf_vrf.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5cf9eab5ad1ae6953579860b7365ce6e6d604f69fe00389f656c63f99cb3b24f",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/terminal",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/action/vlans.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/terminal/__init__.py",
+ "name": "plugins/action/hostname.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/terminal/nxos.py",
+ "name": "plugins/action/l2_interface.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e49e9c495e05b71317fa3d2c462cb085ce5827874235484d735f470e7afa5059",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/module_utils",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/action/vpc_interface.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/module_utils/network",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/action/aaa_server.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/action/ospf_interfaces.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/action/ntp.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/l3_interfaces",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/action/lldp.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/l3_interfaces/l3_interfaces.py",
+ "name": "plugins/action/l3_interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7082768644e0745bf790852b29aa7c0a05a1aa73e1038bbd65b115085ed981f6",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/l3_interfaces/__init__.py",
+ "name": "plugins/action/feature.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/telemetry",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/action/vpc.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/telemetry/telemetry.py",
+ "name": "plugins/action/vtp_password.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "735fed6cdb3cf9f532186ddcbd9caeceae6c66e803344bacfba16aae44312018",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/telemetry/__init__.py",
+ "name": "plugins/action/aaa_server_host.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/ospfv2",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/action/reboot.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/ospfv2/ospfv2.py",
+ "name": "plugins/action/bgp.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0a7bd0a029361af8e3f6355af52f8bbffdd6f6f31cb376a09574cbc248c63e4f",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/ospfv2/__init__.py",
+ "name": "plugins/action/snmp_server.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/lldp_interfaces",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/action/prefix_lists.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/lldp_interfaces/__init__.py",
+ "name": "plugins/action/lacp.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/lldp_interfaces/lldp_interfaces.py",
+ "name": "plugins/action/linkagg.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6dca4e69682508b76a0992a7ea9be015da7ecd322898e1a7f303ed45478586bc",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/ospfv3",
+ "name": "plugins/action/evpn_vni.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "format": 1
+ },
+ {
+ "name": "plugins/action/nxos.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "format": 1
+ },
+ {
+ "name": "plugins/action/snmp_contact.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "format": 1
+ },
+ {
+ "name": "plugins/netconf",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/ospfv3/__init__.py",
+ "name": "plugins/netconf/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/ospfv3/ospfv3.py",
+ "name": "plugins/netconf/nxos.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e6e093df4df247e1c4aa45fc6bfcdd4a5abbc8c2af26bfe0e3b4b7298be76081",
+ "chksum_sha256": "3dceb9254be3c61b493715b4d993f42ab9c088dd103eb6b59563832aec26c22e",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/acls",
+ "name": "plugins/module_utils",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/acls/__init__.py",
+ "name": "plugins/module_utils/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/acls/acls.py",
+ "name": "plugins/module_utils/network",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "plugins/module_utils/network/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "62a096743b254ad70713007700d5c44ea26a00d69ff7fb8106e640ba4e25fced",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "format": 1
+ },
+ {
+ "name": "plugins/module_utils/network/nxos",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "plugins/module_utils/network/nxos/argspec",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
@@ -1471,98 +1667,105 @@
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/hostname/hostname.py",
+ "name": "plugins/module_utils/network/nxos/argspec/hostname/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "05b2c2b1670e7b5c0685f81d73c6a4f6176e9b659869554b51144e8311c62e56",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/hostname/__init__.py",
+ "name": "plugins/module_utils/network/nxos/argspec/hostname/hostname.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "05b2c2b1670e7b5c0685f81d73c6a4f6176e9b659869554b51144e8311c62e56",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/vlans",
+ "name": "plugins/module_utils/network/nxos/argspec/ospfv3",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/vlans/vlans.py",
+ "name": "plugins/module_utils/network/nxos/argspec/ospfv3/ospfv3.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1b23ecaded6b2b8fbcf578fd9bc5997cc3184037a70957a1dd17a772ab212f15",
+ "chksum_sha256": "e6e093df4df247e1c4aa45fc6bfcdd4a5abbc8c2af26bfe0e3b4b7298be76081",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/vlans/__init__.py",
+ "name": "plugins/module_utils/network/nxos/argspec/ospfv3/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/bgp_address_family",
+ "name": "plugins/module_utils/network/nxos/argspec/__init__.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "format": 1
+ },
+ {
+ "name": "plugins/module_utils/network/nxos/argspec/ospf_interfaces",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/bgp_address_family/__init__.py",
+ "name": "plugins/module_utils/network/nxos/argspec/ospf_interfaces/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/bgp_address_family/bgp_address_family.py",
+ "name": "plugins/module_utils/network/nxos/argspec/ospf_interfaces/ospf_interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "37b88e6b28e3fae84e9403565021cf2bdcb54fb8c282b38a4ec095b1c25fcb4a",
+ "chksum_sha256": "bf40cbc988c8bfd85f8f96409bc3a4246540e933527340e72cf19dfee0bca77b",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/ntp_global",
+ "name": "plugins/module_utils/network/nxos/argspec/fc_interfaces",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/ntp_global/__init__.py",
+ "name": "plugins/module_utils/network/nxos/argspec/fc_interfaces/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/ntp_global/ntp_global.py",
+ "name": "plugins/module_utils/network/nxos/argspec/fc_interfaces/fc_interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "783602fd75a5c0ae5e6e5d966c292366b63a1b8c5806e5fe76163067f1442777",
+ "chksum_sha256": "cde20eb7e720bd76c623eb69a11dccee251365a5c8c7b8b934d83f61338c2784",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/interfaces",
+ "name": "plugins/module_utils/network/nxos/argspec/telemetry",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/interfaces/interfaces.py",
+ "name": "plugins/module_utils/network/nxos/argspec/telemetry/telemetry.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "571184731dbbe433772733ec8f703954ad6b17777b20251a7ed12a6f7ab441a7",
+ "chksum_sha256": "735fed6cdb3cf9f532186ddcbd9caeceae6c66e803344bacfba16aae44312018",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/interfaces/__init__.py",
+ "name": "plugins/module_utils/network/nxos/argspec/telemetry/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
@@ -1590,136 +1793,150 @@
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/bgp_neighbor_address_family",
+ "name": "plugins/module_utils/network/nxos/argspec/acl_interfaces",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/bgp_neighbor_address_family/__init__.py",
+ "name": "plugins/module_utils/network/nxos/argspec/acl_interfaces/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/bgp_neighbor_address_family/bgp_neighbor_address_family.py",
+ "name": "plugins/module_utils/network/nxos/argspec/acl_interfaces/acl_interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8ca222317640163b92905c69d00da5f3cba1788125facfb63e3166e4a6ff4036",
+ "chksum_sha256": "bb0d056e85549d5ce2c3bbd3bc5330a3bd8ffb7562123dc8452d07a7999f1701",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/lldp_global",
+ "name": "plugins/module_utils/network/nxos/argspec/acls",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/lldp_global/__init__.py",
+ "name": "plugins/module_utils/network/nxos/argspec/acls/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/lldp_global/lldp_global.py",
+ "name": "plugins/module_utils/network/nxos/argspec/acls/acls.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c9b7fb9ecb6060e51efd1eb61649c055657aabac60fb4b28e20d8d8b12ea363c",
+ "chksum_sha256": "62a096743b254ad70713007700d5c44ea26a00d69ff7fb8106e640ba4e25fced",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/acl_interfaces",
+ "name": "plugins/module_utils/network/nxos/argspec/l3_interfaces",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/acl_interfaces/__init__.py",
+ "name": "plugins/module_utils/network/nxos/argspec/l3_interfaces/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/acl_interfaces/acl_interfaces.py",
+ "name": "plugins/module_utils/network/nxos/argspec/l3_interfaces/l3_interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bb0d056e85549d5ce2c3bbd3bc5330a3bd8ffb7562123dc8452d07a7999f1701",
+ "chksum_sha256": "7082768644e0745bf790852b29aa7c0a05a1aa73e1038bbd65b115085ed981f6",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/__init__.py",
+ "name": "plugins/module_utils/network/nxos/argspec/bgp_global",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "plugins/module_utils/network/nxos/argspec/bgp_global/bgp_global.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "4fbafd074800875e6855205cdc9797c6e9ede1d525aab97014a6aa60e59b3900",
+ "format": 1
+ },
+ {
+ "name": "plugins/module_utils/network/nxos/argspec/bgp_global/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/ospf_interfaces",
+ "name": "plugins/module_utils/network/nxos/argspec/lacp",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/ospf_interfaces/ospf_interfaces.py",
+ "name": "plugins/module_utils/network/nxos/argspec/lacp/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bf40cbc988c8bfd85f8f96409bc3a4246540e933527340e72cf19dfee0bca77b",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/ospf_interfaces/__init__.py",
+ "name": "plugins/module_utils/network/nxos/argspec/lacp/lacp.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "55ab8a5ad42c207cbf65c711bc75c4ecfa391219e2106b78dc8b1d4137d743f2",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/facts",
+ "name": "plugins/module_utils/network/nxos/argspec/lldp_global",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/facts/facts.py",
+ "name": "plugins/module_utils/network/nxos/argspec/lldp_global/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3dbe3771e03c68a3144d8b70df7b0d5895c69dbec69a7f47e67f0396f18ae879",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/facts/__init__.py",
+ "name": "plugins/module_utils/network/nxos/argspec/lldp_global/lldp_global.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "c9b7fb9ecb6060e51efd1eb61649c055657aabac60fb4b28e20d8d8b12ea363c",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/bgp_global",
+ "name": "plugins/module_utils/network/nxos/argspec/bgp_templates",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/bgp_global/bgp_global.py",
+ "name": "plugins/module_utils/network/nxos/argspec/bgp_templates/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4fbafd074800875e6855205cdc9797c6e9ede1d525aab97014a6aa60e59b3900",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/bgp_global/__init__.py",
+ "name": "plugins/module_utils/network/nxos/argspec/bgp_templates/bgp_templates.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "186b8ce90e4f21c9b9ec38647e9d34bb5665c667b6b88fe042b7eede91d27bf3",
"format": 1
},
{
@@ -1744,153 +1961,174 @@
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/fc_interfaces",
+ "name": "plugins/module_utils/network/nxos/argspec/vlans",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/fc_interfaces/fc_interfaces.py",
+ "name": "plugins/module_utils/network/nxos/argspec/vlans/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cde20eb7e720bd76c623eb69a11dccee251365a5c8c7b8b934d83f61338c2784",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/fc_interfaces/__init__.py",
+ "name": "plugins/module_utils/network/nxos/argspec/vlans/vlans.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "1b23ecaded6b2b8fbcf578fd9bc5997cc3184037a70957a1dd17a772ab212f15",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/route_maps",
+ "name": "plugins/module_utils/network/nxos/argspec/hsrp_interfaces",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/route_maps/route_maps.py",
+ "name": "plugins/module_utils/network/nxos/argspec/hsrp_interfaces/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fc5a7ea360233ab48021b6513bca2b38467e94960b29c63945028e82e624702a",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/route_maps/__init__.py",
+ "name": "plugins/module_utils/network/nxos/argspec/hsrp_interfaces/hsrp_interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "78bff7e6897df616065eb94dab6e2aedf4cdef6106783c80c68e3a44b3641c30",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/lacp",
+ "name": "plugins/module_utils/network/nxos/argspec/snmp_server",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/lacp/lacp.py",
+ "name": "plugins/module_utils/network/nxos/argspec/snmp_server/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "55ab8a5ad42c207cbf65c711bc75c4ecfa391219e2106b78dc8b1d4137d743f2",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/lacp/__init__.py",
+ "name": "plugins/module_utils/network/nxos/argspec/snmp_server/snmp_server.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "9ae7bf54bcbbd71a2eeec9fd2db8c3fdefa34af8162df5d5941c2e322c1c040d",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/hsrp_interfaces",
+ "name": "plugins/module_utils/network/nxos/argspec/bfd_interfaces",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/hsrp_interfaces/hsrp_interfaces.py",
+ "name": "plugins/module_utils/network/nxos/argspec/bfd_interfaces/bfd_interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "78bff7e6897df616065eb94dab6e2aedf4cdef6106783c80c68e3a44b3641c30",
+ "chksum_sha256": "bf82cfc30230fa91de7fbfc2e5dd98a765f17ce3b123cfb8dc78668d3934523a",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/hsrp_interfaces/__init__.py",
+ "name": "plugins/module_utils/network/nxos/argspec/bfd_interfaces/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/bgp_templates",
+ "name": "plugins/module_utils/network/nxos/argspec/ntp_global",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/bgp_templates/__init__.py",
+ "name": "plugins/module_utils/network/nxos/argspec/ntp_global/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/bgp_templates/bgp_templates.py",
+ "name": "plugins/module_utils/network/nxos/argspec/ntp_global/ntp_global.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "186b8ce90e4f21c9b9ec38647e9d34bb5665c667b6b88fe042b7eede91d27bf3",
+ "chksum_sha256": "783602fd75a5c0ae5e6e5d966c292366b63a1b8c5806e5fe76163067f1442777",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/prefix_lists",
+ "name": "plugins/module_utils/network/nxos/argspec/bgp_neighbor_address_family",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/prefix_lists/prefix_lists.py",
+ "name": "plugins/module_utils/network/nxos/argspec/bgp_neighbor_address_family/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "979b5710687e9fd4d0001ee92d070b9a24e5b22b4c622bb47ab6aa3da657c613",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/prefix_lists/__init__.py",
+ "name": "plugins/module_utils/network/nxos/argspec/bgp_neighbor_address_family/bgp_neighbor_address_family.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "8ca222317640163b92905c69d00da5f3cba1788125facfb63e3166e4a6ff4036",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/snmp_server",
+ "name": "plugins/module_utils/network/nxos/argspec/l2_interfaces",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/snmp_server/snmp_server.py",
+ "name": "plugins/module_utils/network/nxos/argspec/l2_interfaces/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9ae7bf54bcbbd71a2eeec9fd2db8c3fdefa34af8162df5d5941c2e322c1c040d",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/snmp_server/__init__.py",
+ "name": "plugins/module_utils/network/nxos/argspec/l2_interfaces/l2_interfaces.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "ceeb10c6125b3efda9d04ac4d324e8025b291277400266b651c8b9e3b4f3f960",
+ "format": 1
+ },
+ {
+ "name": "plugins/module_utils/network/nxos/argspec/bgp_address_family",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "plugins/module_utils/network/nxos/argspec/bgp_address_family/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
+ "name": "plugins/module_utils/network/nxos/argspec/bgp_address_family/bgp_address_family.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "37b88e6b28e3fae84e9403565021cf2bdcb54fb8c282b38a4ec095b1c25fcb4a",
+ "format": 1
+ },
+ {
"name": "plugins/module_utils/network/nxos/argspec/lag_interfaces",
"ftype": "dir",
"chksum_type": null,
@@ -1898,6 +2136,13 @@
"format": 1
},
{
+ "name": "plugins/module_utils/network/nxos/argspec/lag_interfaces/__init__.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "format": 1
+ },
+ {
"name": "plugins/module_utils/network/nxos/argspec/lag_interfaces/lag_interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -1905,157 +2150,185 @@
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/lag_interfaces/__init__.py",
+ "name": "plugins/module_utils/network/nxos/argspec/facts",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "plugins/module_utils/network/nxos/argspec/facts/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/l2_interfaces",
+ "name": "plugins/module_utils/network/nxos/argspec/facts/facts.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "3dbe3771e03c68a3144d8b70df7b0d5895c69dbec69a7f47e67f0396f18ae879",
+ "format": 1
+ },
+ {
+ "name": "plugins/module_utils/network/nxos/argspec/interfaces",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/l2_interfaces/l2_interfaces.py",
+ "name": "plugins/module_utils/network/nxos/argspec/interfaces/interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ceeb10c6125b3efda9d04ac4d324e8025b291277400266b651c8b9e3b4f3f960",
+ "chksum_sha256": "571184731dbbe433772733ec8f703954ad6b17777b20251a7ed12a6f7ab441a7",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/l2_interfaces/__init__.py",
+ "name": "plugins/module_utils/network/nxos/argspec/interfaces/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/bfd_interfaces",
+ "name": "plugins/module_utils/network/nxos/argspec/lldp_interfaces",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/bfd_interfaces/bfd_interfaces.py",
+ "name": "plugins/module_utils/network/nxos/argspec/lldp_interfaces/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bf82cfc30230fa91de7fbfc2e5dd98a765f17ce3b123cfb8dc78668d3934523a",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/bfd_interfaces/__init__.py",
+ "name": "plugins/module_utils/network/nxos/argspec/lldp_interfaces/lldp_interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "6dca4e69682508b76a0992a7ea9be015da7ecd322898e1a7f303ed45478586bc",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/lacp_interfaces",
+ "name": "plugins/module_utils/network/nxos/argspec/route_maps",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/lacp_interfaces/lacp_interfaces.py",
+ "name": "plugins/module_utils/network/nxos/argspec/route_maps/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "73c1cb0cdb2fe7ddee5372b2ab1d2b9936248389c55e59ba3641a3cd7fd3a790",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/argspec/lacp_interfaces/__init__.py",
+ "name": "plugins/module_utils/network/nxos/argspec/route_maps/route_maps.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "fc5a7ea360233ab48021b6513bca2b38467e94960b29c63945028e82e624702a",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/__init__.py",
+ "name": "plugins/module_utils/network/nxos/argspec/prefix_lists",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "plugins/module_utils/network/nxos/argspec/prefix_lists/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/nxos.py",
+ "name": "plugins/module_utils/network/nxos/argspec/prefix_lists/prefix_lists.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "571d3839f5fdab7ee6046ccb1b7e3753877b862f7efd51d43973513910d60201",
+ "chksum_sha256": "979b5710687e9fd4d0001ee92d070b9a24e5b22b4c622bb47ab6aa3da657c613",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/rm_templates",
+ "name": "plugins/module_utils/network/nxos/argspec/ospfv2",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/rm_templates/bgp_global.py",
+ "name": "plugins/module_utils/network/nxos/argspec/ospfv2/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ed671f6023709bf21f4a311d5b4d0d691d80ff73705eb81c6d20e7770da771f1",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/rm_templates/static_routes.py",
+ "name": "plugins/module_utils/network/nxos/argspec/ospfv2/ospfv2.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ccf265de0d70717436fd47a40647609558cba7a1d6407849f1b3e2577bd115b3",
+ "chksum_sha256": "0a7bd0a029361af8e3f6355af52f8bbffdd6f6f31cb376a09574cbc248c63e4f",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/rm_templates/ospfv2.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "f049139f75676305c98743a0efc940ea8a5fe481a0a9d37e99d5b7da9f5f6cb1",
+ "name": "plugins/module_utils/network/nxos/argspec/lacp_interfaces",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/rm_templates/route_maps.py",
+ "name": "plugins/module_utils/network/nxos/argspec/lacp_interfaces/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1e4d7751215f3d2c175f957ec2f8f667f547b3f59c4c4eb6b0e52559740a060a",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/rm_templates/hostname.py",
+ "name": "plugins/module_utils/network/nxos/argspec/lacp_interfaces/lacp_interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "dd7353433ed688982b69d5f12378ff56bb0b399f0734e27b1571a86a14384253",
+ "chksum_sha256": "73c1cb0cdb2fe7ddee5372b2ab1d2b9936248389c55e59ba3641a3cd7fd3a790",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/rm_templates/prefix_lists.py",
+ "name": "plugins/module_utils/network/nxos/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c61c5caf3fd680763a367ee1d24dd318c773a6e390ab26a28eb28aff0176a1b2",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/rm_templates/fc_interfaces.py",
+ "name": "plugins/module_utils/network/nxos/rm_templates",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "plugins/module_utils/network/nxos/rm_templates/static_routes.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f64b8a82076d44e2d84bb8e519e867de1be52eea91daa677a5e542f0a0d3429b",
+ "chksum_sha256": "4c7ca097072e599d45e9de4cffeebfdaa165831c1f99d5f0a3db2542121b413f",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/rm_templates/snmp_server.py",
+ "name": "plugins/module_utils/network/nxos/rm_templates/bgp_global.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "32cba948b16c3a3f53d2340958523c4b62de76300df58506c094fdf244d9cda6",
+ "chksum_sha256": "ed671f6023709bf21f4a311d5b4d0d691d80ff73705eb81c6d20e7770da771f1",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/rm_templates/ospf_interfaces.py",
+ "name": "plugins/module_utils/network/nxos/rm_templates/ospfv3.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "619993ad1e730c0263a72547114d501d22cad1cc30a1ea614ad10f463ec3292c",
+ "chksum_sha256": "f1d51e9d1880df627d5d8c3f2cb483c1d6acb9e71919fd88ee7d0d2dc79e0682",
"format": 1
},
{
@@ -2066,31 +2339,31 @@
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/rm_templates/logging_global.py",
+ "name": "plugins/module_utils/network/nxos/rm_templates/bgp_neighbor_address_family.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2f67d7522c05494210bc06984c76e11b9053b9c3e6c13aa10bafcafa527de814",
+ "chksum_sha256": "4c67e25a48908cf9c5567c09db909e2fb0223acd6cca1a737bb18101f0a6d2de",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/rm_templates/bgp_templates.py",
+ "name": "plugins/module_utils/network/nxos/rm_templates/bgp_address_family.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "539774203fdf45d1b73fc4e4acfdfe38e86c43f285c18b873c03238c2bba01fc",
+ "chksum_sha256": "13f0286247895225a98082e385e0024eb4b177acc5d8fcadb26e3ae4df75bd25",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/rm_templates/bgp_address_family.py",
+ "name": "plugins/module_utils/network/nxos/rm_templates/logging_global.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "13f0286247895225a98082e385e0024eb4b177acc5d8fcadb26e3ae4df75bd25",
+ "chksum_sha256": "2f67d7522c05494210bc06984c76e11b9053b9c3e6c13aa10bafcafa527de814",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/rm_templates/bgp_neighbor_address_family.py",
+ "name": "plugins/module_utils/network/nxos/rm_templates/ospfv2.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4c67e25a48908cf9c5567c09db909e2fb0223acd6cca1a737bb18101f0a6d2de",
+ "chksum_sha256": "f049139f75676305c98743a0efc940ea8a5fe481a0a9d37e99d5b7da9f5f6cb1",
"format": 1
},
{
@@ -2101,661 +2374,675 @@
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/rm_templates/ospfv3.py",
+ "name": "plugins/module_utils/network/nxos/rm_templates/fc_interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f1d51e9d1880df627d5d8c3f2cb483c1d6acb9e71919fd88ee7d0d2dc79e0682",
+ "chksum_sha256": "f64b8a82076d44e2d84bb8e519e867de1be52eea91daa677a5e542f0a0d3429b",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/module_utils/network/nxos/rm_templates/route_maps.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "1e4d7751215f3d2c175f957ec2f8f667f547b3f59c4c4eb6b0e52559740a060a",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/l3_interfaces",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/module_utils/network/nxos/rm_templates/bgp_templates.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "539774203fdf45d1b73fc4e4acfdfe38e86c43f285c18b873c03238c2bba01fc",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/l3_interfaces/l3_interfaces.py",
+ "name": "plugins/module_utils/network/nxos/rm_templates/hostname.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cd91ce2d24a35f39f37f8275196584b199f99d885a0f915fd32a02e70a1bac02",
+ "chksum_sha256": "dd7353433ed688982b69d5f12378ff56bb0b399f0734e27b1571a86a14384253",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/l3_interfaces/__init__.py",
+ "name": "plugins/module_utils/network/nxos/rm_templates/ospf_interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "619993ad1e730c0263a72547114d501d22cad1cc30a1ea614ad10f463ec3292c",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/telemetry",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/module_utils/network/nxos/rm_templates/snmp_server.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "32cba948b16c3a3f53d2340958523c4b62de76300df58506c094fdf244d9cda6",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/telemetry/telemetry.py",
+ "name": "plugins/module_utils/network/nxos/rm_templates/prefix_lists.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9b34d0128aee2e7a33355d4132f4a5ba31a0d5ac90ccc69d786c7bc2abec568c",
+ "chksum_sha256": "c61c5caf3fd680763a367ee1d24dd318c773a6e390ab26a28eb28aff0176a1b2",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/telemetry/__init__.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "name": "plugins/module_utils/network/nxos/config",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/ospfv2",
+ "name": "plugins/module_utils/network/nxos/config/hostname",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/ospfv2/ospfv2.py",
+ "name": "plugins/module_utils/network/nxos/config/hostname/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "52b55a9bc2c837c33f85f0d3a20c5a392fc26a589b7c8d61e433d173fee934cc",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/ospfv2/__init__.py",
+ "name": "plugins/module_utils/network/nxos/config/hostname/hostname.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "ab919426e1bd33f667ae875af49ed837d5f3f1e3598b878a290c3bc35004765d",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/lldp_interfaces",
+ "name": "plugins/module_utils/network/nxos/config/ospfv3",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/lldp_interfaces/__init__.py",
+ "name": "plugins/module_utils/network/nxos/config/ospfv3/ospfv3.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "0ee78999cd1450377200ffd92fa3e925ecf413b3c1cb1a4f11e084facf5844d9",
+ "format": 1
+ },
+ {
+ "name": "plugins/module_utils/network/nxos/config/ospfv3/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/lldp_interfaces/lldp_interfaces.py",
+ "name": "plugins/module_utils/network/nxos/config/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6a9af0b059926122015f40536c839c8dc3fbb14d14cfc6a6ae02d85d0de14a7a",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/ospfv3",
+ "name": "plugins/module_utils/network/nxos/config/ospf_interfaces",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/ospfv3/__init__.py",
+ "name": "plugins/module_utils/network/nxos/config/ospf_interfaces/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/ospfv3/ospfv3.py",
+ "name": "plugins/module_utils/network/nxos/config/ospf_interfaces/ospf_interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "39f79531c9741bae023e8c9db926a5463c311093057a49422bd7e25536e6cab9",
+ "chksum_sha256": "6a12a9c127ce960b84820ac79286672f458ab56934903abbfb52e74e2e9c98b6",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/acls",
+ "name": "plugins/module_utils/network/nxos/config/fc_interfaces",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/acls/__init__.py",
+ "name": "plugins/module_utils/network/nxos/config/fc_interfaces/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/acls/acls.py",
+ "name": "plugins/module_utils/network/nxos/config/fc_interfaces/fc_interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d3bbdfc94cda205574023cea71125acc6b9511f3dcac5eb89aa9b6001c503c85",
+ "chksum_sha256": "0438d7d1f23bc654fa0488e35002d8aff313dcf8afbef7412b0c68ee4e128f73",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/hostname",
+ "name": "plugins/module_utils/network/nxos/config/telemetry",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/hostname/hostname.py",
+ "name": "plugins/module_utils/network/nxos/config/telemetry/telemetry.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3c3330ec64341ba82a3d65827a9f33c088b95f64fd79f859db5b16334be443d9",
+ "chksum_sha256": "727e58c53e65e0803777cc288930ceb07f415e2202e6cf5a8b58ebbdffbd46d8",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/hostname/__init__.py",
+ "name": "plugins/module_utils/network/nxos/config/telemetry/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/facts.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "0ca5196b5ac6835d5840ea9cbd58e2ed4b4554cd0b3f5a6625f30cde5ce03033",
- "format": 1
- },
- {
- "name": "plugins/module_utils/network/nxos/facts/vlans",
+ "name": "plugins/module_utils/network/nxos/config/logging_global",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/vlans/vlans.py",
+ "name": "plugins/module_utils/network/nxos/config/logging_global/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "02438c1af2fd68b76f7f368fcf8e1199bb61922c892a467ba78eb73dd081631a",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/vlans/__init__.py",
+ "name": "plugins/module_utils/network/nxos/config/logging_global/logging_global.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "e7d1de04722e657bd71a3131e7bba17bc0178031c1c91cfa87df16e0eccc6878",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/legacy",
+ "name": "plugins/module_utils/network/nxos/config/acl_interfaces",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/legacy/base.py",
+ "name": "plugins/module_utils/network/nxos/config/acl_interfaces/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "31d68732e6e3dccbb6ce2dacba8a4497cfe827638587bcd5aa6a4f6418a34a93",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/legacy/__init__.py",
+ "name": "plugins/module_utils/network/nxos/config/acl_interfaces/acl_interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "0fe0d6c19ce427a02332486d5c2b3d3905634d7eadc1f76b37ca21799e018bde",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/bgp_address_family",
+ "name": "plugins/module_utils/network/nxos/config/acls",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/bgp_address_family/__init__.py",
+ "name": "plugins/module_utils/network/nxos/config/acls/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/bgp_address_family/bgp_address_family.py",
+ "name": "plugins/module_utils/network/nxos/config/acls/acls.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9dceb9b1618124325de42f3625419e16b34dfa1b118f41d24742d16d188bd952",
+ "chksum_sha256": "350416a8c3dc470636d4d2633b6624a881274588ffefe26c3bc78f22475f5b11",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/ntp_global",
+ "name": "plugins/module_utils/network/nxos/config/l3_interfaces",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/ntp_global/__init__.py",
+ "name": "plugins/module_utils/network/nxos/config/l3_interfaces/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/ntp_global/ntp_global.py",
+ "name": "plugins/module_utils/network/nxos/config/l3_interfaces/l3_interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7a0e41e513e3e657d058c758de0f4f107cb10c35c806968552c6b0f42fea7377",
+ "chksum_sha256": "783e5e40f848ca348b658ea629f6fc4a350d890ea44c49c303b601db44e0a92c",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/interfaces",
+ "name": "plugins/module_utils/network/nxos/config/bgp_global",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/interfaces/interfaces.py",
+ "name": "plugins/module_utils/network/nxos/config/bgp_global/bgp_global.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3c4a7320158d21677a5bfa159a9c2138ffec2ce98996ed5573c061b163b8daad",
+ "chksum_sha256": "e0d590ee1bf37a813910101a74b0d52709cfb6e174828990d34125a06a2ae570",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/interfaces/__init__.py",
+ "name": "plugins/module_utils/network/nxos/config/bgp_global/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/logging_global",
+ "name": "plugins/module_utils/network/nxos/config/lacp",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/logging_global/__init__.py",
+ "name": "plugins/module_utils/network/nxos/config/lacp/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/logging_global/logging_global.py",
+ "name": "plugins/module_utils/network/nxos/config/lacp/lacp.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a5b8bd957449018ead97c56ab6c4c7b3945f6a176a9a6be0dee1bfaa02878939",
+ "chksum_sha256": "9612ac4a62de771ca9cef65d733373105e180a060227fff8330bc814d5cdde96",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/bgp_neighbor_address_family",
+ "name": "plugins/module_utils/network/nxos/config/lldp_global",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/bgp_neighbor_address_family/__init__.py",
+ "name": "plugins/module_utils/network/nxos/config/lldp_global/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/bgp_neighbor_address_family/bgp_neighbor_address_family.py",
+ "name": "plugins/module_utils/network/nxos/config/lldp_global/lldp_global.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a8658e63caa17970c687a6b260d8067ec1a83acc322b1efa0608ca9711b6a598",
+ "chksum_sha256": "1804e5abcd0b77a8b5c675c3bb4b90731e8932bf882f8445f01e839013660845",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/lldp_global",
+ "name": "plugins/module_utils/network/nxos/config/bgp_templates",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/lldp_global/__init__.py",
+ "name": "plugins/module_utils/network/nxos/config/bgp_templates/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/lldp_global/lldp_global.py",
+ "name": "plugins/module_utils/network/nxos/config/bgp_templates/bgp_templates.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5a4c38941c0836cfcbe5870de76c13b5e991772d2514d919b52a0eb8861b1b8b",
+ "chksum_sha256": "dd3eb3ca52063455c3cbbb0fba6bdace21254000f3bd662466950098f82f72c1",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/acl_interfaces",
+ "name": "plugins/module_utils/network/nxos/config/static_routes",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/acl_interfaces/__init__.py",
+ "name": "plugins/module_utils/network/nxos/config/static_routes/static_routes.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "329e8b628dae287612efa80e2ca7b5aaeefd530c19ddd8bd37a5b7774eb26aef",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/acl_interfaces/acl_interfaces.py",
+ "name": "plugins/module_utils/network/nxos/config/static_routes/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "dcde32d987bef1d67842179d8e8dad0077899c8009893fbef7ef89ab1b09ed05",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/__init__.py",
+ "name": "plugins/module_utils/network/nxos/config/vlans",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "plugins/module_utils/network/nxos/config/vlans/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/ospf_interfaces",
+ "name": "plugins/module_utils/network/nxos/config/vlans/vlans.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "dd731a6f5a475a54f70cfc7f872737ebdc7b06e5a6b5f612ce2e92c0cc6bbfdc",
+ "format": 1
+ },
+ {
+ "name": "plugins/module_utils/network/nxos/config/hsrp_interfaces",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/ospf_interfaces/ospf_interfaces.py",
+ "name": "plugins/module_utils/network/nxos/config/hsrp_interfaces/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6df58dfe3eba436f0b43a2f4f089d58de078ca53907811f49c6ff9295a36e7b5",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/ospf_interfaces/__init__.py",
+ "name": "plugins/module_utils/network/nxos/config/hsrp_interfaces/hsrp_interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "5d5eb7ab8b3ff9d4c90321a6372a73f5087712b5d269eb1351380b93e5f1e207",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/bgp_global",
+ "name": "plugins/module_utils/network/nxos/config/snmp_server",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/bgp_global/bgp_global.py",
+ "name": "plugins/module_utils/network/nxos/config/snmp_server/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ef399135879accd85c0495e76be7c6fbf1a1141c57fe10ed98666d1f2cd174e4",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/bgp_global/__init__.py",
+ "name": "plugins/module_utils/network/nxos/config/snmp_server/snmp_server.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "213fd101832e7b149113afc178eac76dbff00448cec80e65c6c3e6e0d434e914",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/static_routes",
+ "name": "plugins/module_utils/network/nxos/config/bfd_interfaces",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/static_routes/static_routes.py",
+ "name": "plugins/module_utils/network/nxos/config/bfd_interfaces/bfd_interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d158d7985a673c37a6a92d24c32f7fac372f8082b7aebc0f95215760c2891c8f",
+ "chksum_sha256": "149dfe5bc61c80053e6fc498e2b98b70d1482ef8dd51ebb8ae7f84776e3b69b6",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/static_routes/__init__.py",
+ "name": "plugins/module_utils/network/nxos/config/bfd_interfaces/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/fc_interfaces",
+ "name": "plugins/module_utils/network/nxos/config/ntp_global",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/fc_interfaces/fc_interfaces.py",
+ "name": "plugins/module_utils/network/nxos/config/ntp_global/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cfe4284974d9e939b38db93c36988c773a5c75743b2754e734ad1a814617f232",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/fc_interfaces/__init__.py",
+ "name": "plugins/module_utils/network/nxos/config/ntp_global/ntp_global.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "6936bf5deb7b2708e4a6373b325c3155a14bbc454419ed7f516999e92946415c",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/route_maps",
+ "name": "plugins/module_utils/network/nxos/config/bgp_neighbor_address_family",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/route_maps/route_maps.py",
+ "name": "plugins/module_utils/network/nxos/config/bgp_neighbor_address_family/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f15891603e1c71bcfee05df6fa749fc807f756b6a7413ac545b673ba76f57543",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/route_maps/__init__.py",
+ "name": "plugins/module_utils/network/nxos/config/bgp_neighbor_address_family/bgp_neighbor_address_family.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "d8a658ac17e3b61cc0105d4567c8793adf8b2bd0cc0bcd68cf86353ac3116db3",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/lacp",
+ "name": "plugins/module_utils/network/nxos/config/l2_interfaces",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/lacp/lacp.py",
+ "name": "plugins/module_utils/network/nxos/config/l2_interfaces/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b0f05127c1e721158123ed8ef3ddd6259eebdb4226a2b289ee70a8237d167c8a",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/lacp/__init__.py",
+ "name": "plugins/module_utils/network/nxos/config/l2_interfaces/l2_interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "30afeab2a5eff47a83411b870b078040d11cb84a5b0c094b5b7814bb69d2e831",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/hsrp_interfaces",
+ "name": "plugins/module_utils/network/nxos/config/bgp_address_family",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/hsrp_interfaces/hsrp_interfaces.py",
+ "name": "plugins/module_utils/network/nxos/config/bgp_address_family/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0024b419e2bca58180d26c864588f59ce5941f5f5420c9df5ed7063e672f391a",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/hsrp_interfaces/__init__.py",
+ "name": "plugins/module_utils/network/nxos/config/bgp_address_family/bgp_address_family.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "ab2d098a5e4d9f5dfccc95ec69210831e191ce526eb08fb55029517f27bca8f2",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/bgp_templates",
+ "name": "plugins/module_utils/network/nxos/config/lag_interfaces",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/bgp_templates/__init__.py",
+ "name": "plugins/module_utils/network/nxos/config/lag_interfaces/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/bgp_templates/bgp_templates.py",
+ "name": "plugins/module_utils/network/nxos/config/lag_interfaces/lag_interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "019a61c240a5cc8c2177c4402ffecbfd035248efa3426db05f91d8c94da57246",
+ "chksum_sha256": "cb71721dc87b58474b79a72e3ef13d436c3cc5c65ddf79383e6a2a849c94bad9",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/prefix_lists",
+ "name": "plugins/module_utils/network/nxos/config/interfaces",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/prefix_lists/prefix_lists.py",
+ "name": "plugins/module_utils/network/nxos/config/interfaces/interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bdd9700bbce8280fc77f1fe6c2cb02ae15fd4680ab4076c50bb53c7ea0b35a69",
+ "chksum_sha256": "32368a2909b02f1b25206c7ee67e5e8da1ecfdc80800905a260decb71b2b873f",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/prefix_lists/__init__.py",
+ "name": "plugins/module_utils/network/nxos/config/interfaces/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/snmp_server",
+ "name": "plugins/module_utils/network/nxos/config/lldp_interfaces",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/snmp_server/snmp_server.py",
+ "name": "plugins/module_utils/network/nxos/config/lldp_interfaces/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "40b58f658b1b04583ed22216cb79b87040e113511d55c523c6dc17c8b1b9b103",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/snmp_server/__init__.py",
+ "name": "plugins/module_utils/network/nxos/config/lldp_interfaces/lldp_interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "6c15204ad386b15117fcb44a6e62be71db4eb03b7f9aac53f7bbdd29020a4690",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/lag_interfaces",
+ "name": "plugins/module_utils/network/nxos/config/route_maps",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/lag_interfaces/lag_interfaces.py",
+ "name": "plugins/module_utils/network/nxos/config/route_maps/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "59a72ebb6ded18c885dc7716f707873e9b2e4819c1f1af38d43d2f60d3ebb436",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/lag_interfaces/__init__.py",
+ "name": "plugins/module_utils/network/nxos/config/route_maps/route_maps.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "8e85bd83013fe73285b78d1dfb5bdbcff5715535b9433199dcdde1beebb4c1da",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/l2_interfaces",
+ "name": "plugins/module_utils/network/nxos/config/prefix_lists",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/l2_interfaces/l2_interfaces.py",
+ "name": "plugins/module_utils/network/nxos/config/prefix_lists/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "16e09269f14c79c8b4ec91aa5eccc89af3d385dd779dd37dc1434933d9e9bc72",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/l2_interfaces/__init__.py",
+ "name": "plugins/module_utils/network/nxos/config/prefix_lists/prefix_lists.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "445f9a55c8fd372fdbad1834ed7598d9281a376fd5de5e749218995310ccea81",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/bfd_interfaces",
+ "name": "plugins/module_utils/network/nxos/config/ospfv2",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/bfd_interfaces/bfd_interfaces.py",
+ "name": "plugins/module_utils/network/nxos/config/ospfv2/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "85a308e34d42cc8d6a01e307d61f80aad2d3ebd4144654514b9855f96a1608f4",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/bfd_interfaces/__init__.py",
+ "name": "plugins/module_utils/network/nxos/config/ospfv2/ospfv2.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "9294a53417222ea7bd4cf1b003780c5774924a288309b70bb353b0c64e74bd7e",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/lacp_interfaces",
+ "name": "plugins/module_utils/network/nxos/config/lacp_interfaces",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/lacp_interfaces/lacp_interfaces.py",
+ "name": "plugins/module_utils/network/nxos/config/lacp_interfaces/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "023062f4e902d13603541dfa4ed7556aaf55c9f7e09624e82404ad99b375e1d3",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/facts/lacp_interfaces/__init__.py",
+ "name": "plugins/module_utils/network/nxos/config/lacp_interfaces/lacp_interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "4a3ca5d6369f95058d8133277cfda2b0f04b6465bebce78756e6a4b45adf1d20",
"format": 1
},
{
@@ -2766,6 +3053,13 @@
"format": 1
},
{
+ "name": "plugins/module_utils/network/nxos/utils/__init__.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "format": 1
+ },
+ {
"name": "plugins/module_utils/network/nxos/utils/telemetry",
"ftype": "dir",
"chksum_type": null,
@@ -2787,13 +3081,6 @@
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/utils/__init__.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
- "format": 1
- },
- {
"name": "plugins/module_utils/network/nxos/utils/utils.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -2808,6 +3095,13 @@
"format": 1
},
{
+ "name": "plugins/module_utils/network/nxos/cmdref/__init__.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "format": 1
+ },
+ {
"name": "plugins/module_utils/network/nxos/cmdref/telemetry",
"ftype": "dir",
"chksum_type": null,
@@ -2829,5400 +3123,5421 @@
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/cmdref/__init__.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
- "format": 1
- },
- {
- "name": "plugins/module_utils/network/nxos/config",
+ "name": "plugins/module_utils/network/nxos/facts",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/l3_interfaces",
+ "name": "plugins/module_utils/network/nxos/facts/hostname",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/l3_interfaces/l3_interfaces.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "e63f600eb2827bdd4a6fd5d16a728fe5a179a1bb8e29baa6da806e21bcf9cac9",
- "format": 1
- },
- {
- "name": "plugins/module_utils/network/nxos/config/l3_interfaces/__init__.py",
+ "name": "plugins/module_utils/network/nxos/facts/hostname/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/telemetry",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
- "format": 1
- },
- {
- "name": "plugins/module_utils/network/nxos/config/telemetry/telemetry.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "727e58c53e65e0803777cc288930ceb07f415e2202e6cf5a8b58ebbdffbd46d8",
- "format": 1
- },
- {
- "name": "plugins/module_utils/network/nxos/config/telemetry/__init__.py",
+ "name": "plugins/module_utils/network/nxos/facts/hostname/hostname.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "3c3330ec64341ba82a3d65827a9f33c088b95f64fd79f859db5b16334be443d9",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/ospfv2",
+ "name": "plugins/module_utils/network/nxos/facts/ospfv3",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/ospfv2/ospfv2.py",
+ "name": "plugins/module_utils/network/nxos/facts/ospfv3/ospfv3.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9294a53417222ea7bd4cf1b003780c5774924a288309b70bb353b0c64e74bd7e",
+ "chksum_sha256": "39f79531c9741bae023e8c9db926a5463c311093057a49422bd7e25536e6cab9",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/ospfv2/__init__.py",
+ "name": "plugins/module_utils/network/nxos/facts/ospfv3/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/lldp_interfaces",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
- "format": 1
- },
- {
- "name": "plugins/module_utils/network/nxos/config/lldp_interfaces/__init__.py",
+ "name": "plugins/module_utils/network/nxos/facts/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/lldp_interfaces/lldp_interfaces.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "6c15204ad386b15117fcb44a6e62be71db4eb03b7f9aac53f7bbdd29020a4690",
- "format": 1
- },
- {
- "name": "plugins/module_utils/network/nxos/config/ospfv3",
+ "name": "plugins/module_utils/network/nxos/facts/ospf_interfaces",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/ospfv3/__init__.py",
+ "name": "plugins/module_utils/network/nxos/facts/ospf_interfaces/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/ospfv3/ospfv3.py",
+ "name": "plugins/module_utils/network/nxos/facts/ospf_interfaces/ospf_interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0ee78999cd1450377200ffd92fa3e925ecf413b3c1cb1a4f11e084facf5844d9",
+ "chksum_sha256": "6df58dfe3eba436f0b43a2f4f089d58de078ca53907811f49c6ff9295a36e7b5",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/acls",
+ "name": "plugins/module_utils/network/nxos/facts/fc_interfaces",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/acls/__init__.py",
+ "name": "plugins/module_utils/network/nxos/facts/fc_interfaces/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/acls/acls.py",
+ "name": "plugins/module_utils/network/nxos/facts/fc_interfaces/fc_interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b04b56fe6b5f62801329741e2707a9c0d2030ba1963590ad4608fb28eba36706",
+ "chksum_sha256": "cfe4284974d9e939b38db93c36988c773a5c75743b2754e734ad1a814617f232",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/hostname",
+ "name": "plugins/module_utils/network/nxos/facts/telemetry",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/hostname/hostname.py",
+ "name": "plugins/module_utils/network/nxos/facts/telemetry/telemetry.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ab919426e1bd33f667ae875af49ed837d5f3f1e3598b878a290c3bc35004765d",
+ "chksum_sha256": "9b34d0128aee2e7a33355d4132f4a5ba31a0d5ac90ccc69d786c7bc2abec568c",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/hostname/__init__.py",
+ "name": "plugins/module_utils/network/nxos/facts/telemetry/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/vlans",
+ "name": "plugins/module_utils/network/nxos/facts/logging_global",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/vlans/vlans.py",
+ "name": "plugins/module_utils/network/nxos/facts/logging_global/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "dd731a6f5a475a54f70cfc7f872737ebdc7b06e5a6b5f612ce2e92c0cc6bbfdc",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/vlans/__init__.py",
+ "name": "plugins/module_utils/network/nxos/facts/logging_global/logging_global.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "a5b8bd957449018ead97c56ab6c4c7b3945f6a176a9a6be0dee1bfaa02878939",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/bgp_address_family",
+ "name": "plugins/module_utils/network/nxos/facts/acl_interfaces",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/bgp_address_family/__init__.py",
+ "name": "plugins/module_utils/network/nxos/facts/acl_interfaces/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/bgp_address_family/bgp_address_family.py",
+ "name": "plugins/module_utils/network/nxos/facts/acl_interfaces/acl_interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ab2d098a5e4d9f5dfccc95ec69210831e191ce526eb08fb55029517f27bca8f2",
+ "chksum_sha256": "dcde32d987bef1d67842179d8e8dad0077899c8009893fbef7ef89ab1b09ed05",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/ntp_global",
+ "name": "plugins/module_utils/network/nxos/facts/acls",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/ntp_global/__init__.py",
+ "name": "plugins/module_utils/network/nxos/facts/acls/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/ntp_global/ntp_global.py",
+ "name": "plugins/module_utils/network/nxos/facts/acls/acls.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6936bf5deb7b2708e4a6373b325c3155a14bbc454419ed7f516999e92946415c",
+ "chksum_sha256": "d3bbdfc94cda205574023cea71125acc6b9511f3dcac5eb89aa9b6001c503c85",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/interfaces",
+ "name": "plugins/module_utils/network/nxos/facts/l3_interfaces",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/interfaces/interfaces.py",
+ "name": "plugins/module_utils/network/nxos/facts/l3_interfaces/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "32368a2909b02f1b25206c7ee67e5e8da1ecfdc80800905a260decb71b2b873f",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/interfaces/__init__.py",
+ "name": "plugins/module_utils/network/nxos/facts/l3_interfaces/l3_interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "cd91ce2d24a35f39f37f8275196584b199f99d885a0f915fd32a02e70a1bac02",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/logging_global",
+ "name": "plugins/module_utils/network/nxos/facts/bgp_global",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/logging_global/__init__.py",
+ "name": "plugins/module_utils/network/nxos/facts/bgp_global/bgp_global.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "ef399135879accd85c0495e76be7c6fbf1a1141c57fe10ed98666d1f2cd174e4",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/logging_global/logging_global.py",
+ "name": "plugins/module_utils/network/nxos/facts/bgp_global/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e7d1de04722e657bd71a3131e7bba17bc0178031c1c91cfa87df16e0eccc6878",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/bgp_neighbor_address_family",
+ "name": "plugins/module_utils/network/nxos/facts/lacp",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/bgp_neighbor_address_family/__init__.py",
+ "name": "plugins/module_utils/network/nxos/facts/lacp/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/bgp_neighbor_address_family/bgp_neighbor_address_family.py",
+ "name": "plugins/module_utils/network/nxos/facts/lacp/lacp.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d8a658ac17e3b61cc0105d4567c8793adf8b2bd0cc0bcd68cf86353ac3116db3",
+ "chksum_sha256": "b0f05127c1e721158123ed8ef3ddd6259eebdb4226a2b289ee70a8237d167c8a",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/lldp_global",
+ "name": "plugins/module_utils/network/nxos/facts/lldp_global",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/lldp_global/__init__.py",
+ "name": "plugins/module_utils/network/nxos/facts/lldp_global/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/lldp_global/lldp_global.py",
+ "name": "plugins/module_utils/network/nxos/facts/lldp_global/lldp_global.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1804e5abcd0b77a8b5c675c3bb4b90731e8932bf882f8445f01e839013660845",
+ "chksum_sha256": "5a4c38941c0836cfcbe5870de76c13b5e991772d2514d919b52a0eb8861b1b8b",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/acl_interfaces",
+ "name": "plugins/module_utils/network/nxos/facts/bgp_templates",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/acl_interfaces/__init__.py",
+ "name": "plugins/module_utils/network/nxos/facts/bgp_templates/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/acl_interfaces/acl_interfaces.py",
+ "name": "plugins/module_utils/network/nxos/facts/bgp_templates/bgp_templates.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0fe0d6c19ce427a02332486d5c2b3d3905634d7eadc1f76b37ca21799e018bde",
+ "chksum_sha256": "019a61c240a5cc8c2177c4402ffecbfd035248efa3426db05f91d8c94da57246",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/__init__.py",
+ "name": "plugins/module_utils/network/nxos/facts/facts.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "0ca5196b5ac6835d5840ea9cbd58e2ed4b4554cd0b3f5a6625f30cde5ce03033",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/ospf_interfaces",
+ "name": "plugins/module_utils/network/nxos/facts/static_routes",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/ospf_interfaces/ospf_interfaces.py",
+ "name": "plugins/module_utils/network/nxos/facts/static_routes/static_routes.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6a12a9c127ce960b84820ac79286672f458ab56934903abbfb52e74e2e9c98b6",
+ "chksum_sha256": "a925d3f94dabc2e3ec8d6a1b514e977e4aa7406b720526d5e98f4133f224258c",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/ospf_interfaces/__init__.py",
+ "name": "plugins/module_utils/network/nxos/facts/static_routes/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/bgp_global",
+ "name": "plugins/module_utils/network/nxos/facts/vlans",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/bgp_global/bgp_global.py",
+ "name": "plugins/module_utils/network/nxos/facts/vlans/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e0d590ee1bf37a813910101a74b0d52709cfb6e174828990d34125a06a2ae570",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/bgp_global/__init__.py",
+ "name": "plugins/module_utils/network/nxos/facts/vlans/vlans.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "02438c1af2fd68b76f7f368fcf8e1199bb61922c892a467ba78eb73dd081631a",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/static_routes",
+ "name": "plugins/module_utils/network/nxos/facts/hsrp_interfaces",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/static_routes/static_routes.py",
+ "name": "plugins/module_utils/network/nxos/facts/hsrp_interfaces/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "329e8b628dae287612efa80e2ca7b5aaeefd530c19ddd8bd37a5b7774eb26aef",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/static_routes/__init__.py",
+ "name": "plugins/module_utils/network/nxos/facts/hsrp_interfaces/hsrp_interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "0024b419e2bca58180d26c864588f59ce5941f5f5420c9df5ed7063e672f391a",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/fc_interfaces",
+ "name": "plugins/module_utils/network/nxos/facts/snmp_server",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/fc_interfaces/fc_interfaces.py",
+ "name": "plugins/module_utils/network/nxos/facts/snmp_server/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0438d7d1f23bc654fa0488e35002d8aff313dcf8afbef7412b0c68ee4e128f73",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/fc_interfaces/__init__.py",
+ "name": "plugins/module_utils/network/nxos/facts/snmp_server/snmp_server.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "40b58f658b1b04583ed22216cb79b87040e113511d55c523c6dc17c8b1b9b103",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/route_maps",
+ "name": "plugins/module_utils/network/nxos/facts/bfd_interfaces",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/route_maps/route_maps.py",
+ "name": "plugins/module_utils/network/nxos/facts/bfd_interfaces/bfd_interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3f4b8b99ca2c2ad045886e77c5f0e412dd9d906cd801daf0e462b02e719e6d47",
+ "chksum_sha256": "85a308e34d42cc8d6a01e307d61f80aad2d3ebd4144654514b9855f96a1608f4",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/route_maps/__init__.py",
+ "name": "plugins/module_utils/network/nxos/facts/bfd_interfaces/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/lacp",
+ "name": "plugins/module_utils/network/nxos/facts/ntp_global",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/lacp/lacp.py",
+ "name": "plugins/module_utils/network/nxos/facts/ntp_global/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9612ac4a62de771ca9cef65d733373105e180a060227fff8330bc814d5cdde96",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/lacp/__init__.py",
+ "name": "plugins/module_utils/network/nxos/facts/ntp_global/ntp_global.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "7a0e41e513e3e657d058c758de0f4f107cb10c35c806968552c6b0f42fea7377",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/hsrp_interfaces",
+ "name": "plugins/module_utils/network/nxos/facts/legacy",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/hsrp_interfaces/hsrp_interfaces.py",
+ "name": "plugins/module_utils/network/nxos/facts/legacy/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5d5eb7ab8b3ff9d4c90321a6372a73f5087712b5d269eb1351380b93e5f1e207",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/hsrp_interfaces/__init__.py",
+ "name": "plugins/module_utils/network/nxos/facts/legacy/base.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "31d68732e6e3dccbb6ce2dacba8a4497cfe827638587bcd5aa6a4f6418a34a93",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/bgp_templates",
+ "name": "plugins/module_utils/network/nxos/facts/bgp_neighbor_address_family",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/bgp_templates/__init__.py",
+ "name": "plugins/module_utils/network/nxos/facts/bgp_neighbor_address_family/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/bgp_templates/bgp_templates.py",
+ "name": "plugins/module_utils/network/nxos/facts/bgp_neighbor_address_family/bgp_neighbor_address_family.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "dd3eb3ca52063455c3cbbb0fba6bdace21254000f3bd662466950098f82f72c1",
+ "chksum_sha256": "a8658e63caa17970c687a6b260d8067ec1a83acc322b1efa0608ca9711b6a598",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/prefix_lists",
+ "name": "plugins/module_utils/network/nxos/facts/l2_interfaces",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/prefix_lists/prefix_lists.py",
+ "name": "plugins/module_utils/network/nxos/facts/l2_interfaces/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "445f9a55c8fd372fdbad1834ed7598d9281a376fd5de5e749218995310ccea81",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/prefix_lists/__init__.py",
+ "name": "plugins/module_utils/network/nxos/facts/l2_interfaces/l2_interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "16e09269f14c79c8b4ec91aa5eccc89af3d385dd779dd37dc1434933d9e9bc72",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/snmp_server",
+ "name": "plugins/module_utils/network/nxos/facts/bgp_address_family",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/snmp_server/snmp_server.py",
+ "name": "plugins/module_utils/network/nxos/facts/bgp_address_family/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "213fd101832e7b149113afc178eac76dbff00448cec80e65c6c3e6e0d434e914",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/snmp_server/__init__.py",
+ "name": "plugins/module_utils/network/nxos/facts/bgp_address_family/bgp_address_family.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "9dceb9b1618124325de42f3625419e16b34dfa1b118f41d24742d16d188bd952",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/lag_interfaces",
+ "name": "plugins/module_utils/network/nxos/facts/lag_interfaces",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/lag_interfaces/lag_interfaces.py",
+ "name": "plugins/module_utils/network/nxos/facts/lag_interfaces/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cb71721dc87b58474b79a72e3ef13d436c3cc5c65ddf79383e6a2a849c94bad9",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/lag_interfaces/__init__.py",
+ "name": "plugins/module_utils/network/nxos/facts/lag_interfaces/lag_interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "59a72ebb6ded18c885dc7716f707873e9b2e4819c1f1af38d43d2f60d3ebb436",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/l2_interfaces",
+ "name": "plugins/module_utils/network/nxos/facts/interfaces",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/l2_interfaces/l2_interfaces.py",
+ "name": "plugins/module_utils/network/nxos/facts/interfaces/interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "30afeab2a5eff47a83411b870b078040d11cb84a5b0c094b5b7814bb69d2e831",
+ "chksum_sha256": "3c4a7320158d21677a5bfa159a9c2138ffec2ce98996ed5573c061b163b8daad",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/l2_interfaces/__init__.py",
+ "name": "plugins/module_utils/network/nxos/facts/interfaces/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/bfd_interfaces",
+ "name": "plugins/module_utils/network/nxos/facts/lldp_interfaces",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/bfd_interfaces/bfd_interfaces.py",
+ "name": "plugins/module_utils/network/nxos/facts/lldp_interfaces/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "149dfe5bc61c80053e6fc498e2b98b70d1482ef8dd51ebb8ae7f84776e3b69b6",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/bfd_interfaces/__init__.py",
+ "name": "plugins/module_utils/network/nxos/facts/lldp_interfaces/lldp_interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "6a9af0b059926122015f40536c839c8dc3fbb14d14cfc6a6ae02d85d0de14a7a",
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/lacp_interfaces",
+ "name": "plugins/module_utils/network/nxos/facts/route_maps",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/network/nxos/config/lacp_interfaces/lacp_interfaces.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "4a3ca5d6369f95058d8133277cfda2b0f04b6465bebce78756e6a4b45adf1d20",
- "format": 1
- },
- {
- "name": "plugins/module_utils/network/nxos/config/lacp_interfaces/__init__.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
- "format": 1
- },
- {
- "name": "plugins/module_utils/network/__init__.py",
+ "name": "plugins/module_utils/network/nxos/facts/route_maps/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/module_utils/__init__.py",
+ "name": "plugins/module_utils/network/nxos/facts/route_maps/route_maps.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "f15891603e1c71bcfee05df6fa749fc807f756b6a7413ac545b673ba76f57543",
"format": 1
},
{
- "name": "plugins/cliconf",
+ "name": "plugins/module_utils/network/nxos/facts/prefix_lists",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/cliconf/__init__.py",
+ "name": "plugins/module_utils/network/nxos/facts/prefix_lists/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/cliconf/nxos.py",
+ "name": "plugins/module_utils/network/nxos/facts/prefix_lists/prefix_lists.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "13c8aee982db809a18709e985a54757e2390122a5a27461c2fbfbcde1286c926",
+ "chksum_sha256": "bdd9700bbce8280fc77f1fe6c2cb02ae15fd4680ab4076c50bb53c7ea0b35a69",
"format": 1
},
{
- "name": "plugins/action",
+ "name": "plugins/module_utils/network/nxos/facts/ospfv2",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/action/bgp_global.py",
+ "name": "plugins/module_utils/network/nxos/facts/ospfv2/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/action/static_routes.py",
+ "name": "plugins/module_utils/network/nxos/facts/ospfv2/ospfv2.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "52b55a9bc2c837c33f85f0d3a20c5a392fc26a589b7c8d61e433d173fee934cc",
"format": 1
},
{
- "name": "plugins/action/feature.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "name": "plugins/module_utils/network/nxos/facts/lacp_interfaces",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/action/vxlan_vtep_vni.py",
+ "name": "plugins/module_utils/network/nxos/facts/lacp_interfaces/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/action/smu.py",
+ "name": "plugins/module_utils/network/nxos/facts/lacp_interfaces/lacp_interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "023062f4e902d13603541dfa4ed7556aaf55c9f7e09624e82404ad99b375e1d3",
"format": 1
},
{
- "name": "plugins/action/ospfv2.py",
+ "name": "plugins/module_utils/network/nxos/nxos.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "571d3839f5fdab7ee6046ccb1b7e3753877b862f7efd51d43973513910d60201",
"format": 1
},
{
- "name": "plugins/action/evpn_global.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "name": "plugins/modules",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/action/route_maps.py",
+ "name": "plugins/modules/nxos_lacp.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "bf38b4169e3605546f6f4d7138529c1ba3effb60f74ea844f205ef2160e800c1",
"format": 1
},
{
- "name": "plugins/action/hostname.py",
+ "name": "plugins/modules/nxos_bfd_interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "dd7b49293300983b1bfe085206f30ad341bf3af6ee17aad238186ad5d1bec86a",
"format": 1
},
{
- "name": "plugins/action/l2_interfaces.py",
+ "name": "plugins/modules/nxos_gir.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "05675c570b78691538eaa20c62f548b41885b8db20018e5805a6ced813eaff40",
"format": 1
},
{
- "name": "plugins/action/snmp_contact.py",
+ "name": "plugins/modules/nxos_ntp_global.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "48f03d894a18905e2934f7f22c43b8b78e66e8f3b2a39770e94b45907977a0a0",
"format": 1
},
{
- "name": "plugins/action/devicealias.py",
+ "name": "plugins/modules/nxos_vsan.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "7a775ad1dba4b6997070b43beb0eff5a71f94cf0d7462c64d7761710663cb865",
"format": 1
},
{
- "name": "plugins/action/ntp.py",
+ "name": "plugins/modules/nxos_udld.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "f9d531c20f551d644be87cf218c7bf8175f531a3190c60c7a20ae281203fbe8d",
"format": 1
},
{
- "name": "plugins/action/pim.py",
+ "name": "plugins/modules/nxos_ospf_interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "8ccbbc24a2038b70b2486f92b33ceaecf1932dbc2316167526855312dc6c599d",
"format": 1
},
{
- "name": "plugins/action/acl.py",
+ "name": "plugins/modules/nxos_gir_profile_management.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "e5c49276a499ef2988a7047ec75a5464f13e5b458f0a7d6af46c8fd8e3f71c89",
"format": 1
},
{
- "name": "plugins/action/interfaces.py",
+ "name": "plugins/modules/nxos_evpn_global.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "3cb0c7e66e32783f346744b3b08edd63f2d3ac82b67addb55597a19b48db7e9c",
"format": 1
},
{
- "name": "plugins/action/vrf_af.py",
+ "name": "plugins/modules/nxos_vpc_interface.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "7361c2f7afeaedada1c42037a5f54d16b934ef5993f155819e0edb731c21de1f",
"format": 1
},
{
- "name": "plugins/action/bfd_interfaces.py",
+ "name": "plugins/modules/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/action/bgp.py",
+ "name": "plugins/modules/nxos_acls.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "283955d63ab51126d854605578e07115bb252047205fe90749563bf343bdb00a",
"format": 1
},
{
- "name": "plugins/action/logging.py",
+ "name": "plugins/modules/nxos_zone_zoneset.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "44ead6bd11cb8112b667ecc6f7d1fd0751941737d6fc1154c73501e4abdd42a0",
"format": 1
},
{
- "name": "plugins/action/igmp_snooping.py",
+ "name": "plugins/modules/nxos_bgp_global.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "66c33d7e478c4d39b8899962fc8f152cab6651220069cc2a89cef2e299d1547e",
"format": 1
},
{
- "name": "plugins/action/prefix_lists.py",
+ "name": "plugins/modules/nxos_snmp_server.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "9ad2b68c98c78ad1082727d1997b52f29f03c4d8c9ee734ee40f7151bec58fd5",
"format": 1
},
{
- "name": "plugins/action/vrf.py",
+ "name": "plugins/modules/nxos_lag_interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "8aa8104d1be201ba069a75576473610d9a3d5e6e9778687b8f83121557d20a73",
"format": 1
},
{
- "name": "plugins/action/lacp_interfaces.py",
+ "name": "plugins/modules/nxos_route_maps.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "3daed656349f74ca544cca0df90fe653384af61e5109235c2a0643e67593ddbf",
"format": 1
},
{
- "name": "plugins/action/snapshot.py",
+ "name": "plugins/modules/nxos_banner.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "a98cf0828178ff3233b836f7a1d1478ad4f5020143b0a6435630737a82b4110e",
"format": 1
},
{
- "name": "plugins/action/vxlan_vtep.py",
+ "name": "plugins/modules/nxos_igmp.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "ec8870d4fb6ecf6089717936138d43fc5fe54381994a0a7d5e8299a94f9ade67",
"format": 1
},
{
- "name": "plugins/action/ping.py",
+ "name": "plugins/modules/nxos_static_routes.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "2ef2625e9e8c20f8012c57596cf06ac0ac8b28662ee78e98e2911354dfea6f32",
"format": 1
},
{
- "name": "plugins/action/zone_zoneset.py",
+ "name": "plugins/modules/nxos_ospfv2.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "ceba0e4676fb124fe23e8c0e3e9358e247f6cf34f4af0c2e09db648692b03262",
"format": 1
},
{
- "name": "plugins/action/facts.py",
+ "name": "plugins/modules/nxos_snmp_contact.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "936a5e9b00d8c46ed39a8818f5806971f16fc8cebd3f5106e6909b7ed8395ac1",
"format": 1
},
{
- "name": "plugins/action/gir.py",
+ "name": "plugins/modules/nxos_telemetry.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "5bf9c2cfa138af73b7d1e47a1d5345049dd93a00933eeb94e39a66d94ec759d0",
"format": 1
},
{
- "name": "plugins/action/snmp_server.py",
+ "name": "plugins/modules/nxos_snmp_traps.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "73ef124bdd93a1b8d6d7d55870e04c99bb29cde93c0c400884f06c4972057f75",
"format": 1
},
{
- "name": "plugins/action/system.py",
+ "name": "plugins/modules/nxos_command.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "19c20fdbf3d421a6a487cb76e85904fcfc62dfe26569000e370e6d829c0fb838",
"format": 1
},
{
- "name": "plugins/action/vpc.py",
+ "name": "plugins/modules/nxos_l2_interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "06d0406435fadc33d78bdc5dc2b541d3cdb08b972497a7fd86357354638056a7",
"format": 1
},
{
- "name": "plugins/action/telemetry.py",
+ "name": "plugins/modules/nxos_pim.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "a06919bdfbaab64ef58295c408def23dc36336b630e12d80a2a80e3a31a53a85",
"format": 1
},
{
- "name": "plugins/action/vlans.py",
+ "name": "plugins/modules/nxos_vpc.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "edd715fc9e823dbf37a31a16de8f680132e0cc9b2feea46ab073d34f8098acf5",
"format": 1
},
{
- "name": "plugins/action/bgp_af.py",
+ "name": "plugins/modules/nxos_igmp_interface.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "c8cbb9aec6383daa0e4a82c2c4c3d29ef313c078404d70413301abdd98513c17",
"format": 1
},
{
- "name": "plugins/action/ospf_vrf.py",
+ "name": "plugins/modules/nxos_bgp_templates.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "955e3860781eef5083a0fa88e80d95803645ca485edd357b5867d3706bbc45bb",
"format": 1
},
{
- "name": "plugins/action/aaa_server.py",
+ "name": "plugins/modules/nxos_lldp_interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "b444c878e0d7289fb705e56985724155c15dd9f5b8de4ad2fa581ddc77b29e0e",
"format": 1
},
{
- "name": "plugins/action/linkagg.py",
+ "name": "plugins/modules/nxos_overlay_global.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "f2e7bc1eff3259825c6eae134d87e92ee922cc3fd423b63fac821c659ea891d8",
"format": 1
},
{
- "name": "plugins/action/snmp_community.py",
+ "name": "plugins/modules/nxos_reboot.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "826e076559d7143fc03a3de9f0e5ec125241fd3196037f2a1a524e033e75b8de",
"format": 1
},
{
- "name": "plugins/action/ospf_interfaces.py",
+ "name": "plugins/modules/nxos_bfd_global.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "e2121178e29e5e34ca77f7ed2dc78e7fdcf6ddbe7a9ab16545c39c11dc00f7ce",
"format": 1
},
{
- "name": "plugins/action/lldp.py",
+ "name": "plugins/modules/nxos_config.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "160aa4af3ddc22a3a35d277eded9a0bf54df4a0b7b44a11fa2c077b5cca71db5",
"format": 1
},
{
- "name": "plugins/action/snmp_location.py",
+ "name": "plugins/modules/nxos_vxlan_vtep.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "9aefbaf9557a93bafc8de522a59e3246967964e658c9ee7631fa202e0b5215fa",
"format": 1
},
{
- "name": "plugins/action/snmp_host.py",
+ "name": "plugins/modules/nxos_aaa_server_host.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "b7560a2b37d9b93af17d9d0241db8cc701778ff974fcc4f16204fcaa991c1f2a",
"format": 1
},
{
- "name": "plugins/action/overlay_global.py",
+ "name": "plugins/modules/nxos_vrf.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "6145ff3c78f13d4bcd3d338da62805a3a1e594b80c8a72de7ff83059d3d5eb04",
"format": 1
},
{
- "name": "plugins/action/pim_rp_address.py",
+ "name": "plugins/modules/nxos_acl_interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "7d3aa83fa072e36c33233e8244b6d7f4b6b28eb355d5045dbffbb7ebd2cfbf8b",
"format": 1
},
{
- "name": "plugins/action/snmp_user.py",
+ "name": "plugins/modules/nxos_igmp_snooping.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "ee94035d9f4a71393a16670f8e269064476b6da5d99071a4f23cfc253da13703",
"format": 1
},
{
- "name": "plugins/action/bgp_neighbor_af.py",
+ "name": "plugins/modules/nxos_snapshot.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "5cf9eab5ad1ae6953579860b7365ce6e6d604f69fe00389f656c63f99cb3b24f",
"format": 1
},
{
- "name": "plugins/action/l3_interfaces.py",
+ "name": "plugins/modules/nxos_hsrp.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "8e0bccf6fb0aa7ce0cbacacb8acf9821015472213a2319df618e7eef4d03a52f",
"format": 1
},
{
- "name": "plugins/action/vtp_version.py",
+ "name": "plugins/modules/nxos_hsrp_interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "5b9cea1b4a4a770fdf3b6dad829247384c19aefeaf5d300f8730ebbeb8c7bc0d",
"format": 1
},
{
- "name": "plugins/action/hsrp.py",
+ "name": "plugins/modules/nxos_vxlan_vtep_vni.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "00cc8a5f8826d10b27cbea4c1523a5dc48a57d174ae7ecfd918e02d506902b80",
"format": 1
},
{
- "name": "plugins/action/bgp_neighbor.py",
+ "name": "plugins/modules/nxos_facts.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "3d47e0e98adf0b507c6e725efcb94b573d9100679336c3beaf8193ee0247f24b",
"format": 1
},
{
- "name": "plugins/action/evpn_vni.py",
+ "name": "plugins/modules/nxos_lldp_global.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "485b35aeac40cd9d3dfaf18a7f5481fc14796ce054387af3dca72160677f6dd7",
"format": 1
},
{
- "name": "plugins/action/igmp_interface.py",
+ "name": "plugins/modules/nxos_vlans.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "d603737717f0b6a697fac057acf07a763f002aa641026cd6436b2bc3e4611d3b",
"format": 1
},
{
- "name": "plugins/action/vtp_domain.py",
+ "name": "plugins/modules/nxos_vrrp.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "a261501d6200ac9685d544c2dd29f284377da8592864f7ad26e74ef4e55d5567",
"format": 1
},
{
- "name": "plugins/action/file_copy.py",
+ "name": "plugins/modules/nxos_rollback.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "7fec43544c7cb26eb84d9cdd22e74a4f042de04943e5896fcd45a1020f003e82",
"format": 1
},
{
- "name": "plugins/action/vrrp.py",
+ "name": "plugins/modules/nxos_aaa_server.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "c7d0ba90c00bef7d014ca922ce5e66ea10fe116bb9af88bfdf671402527b404b",
"format": 1
},
{
- "name": "plugins/action/install_os.py",
+ "name": "plugins/modules/nxos_vtp_password.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "e6cb79ae92bdf1ab82291e4b331dfeb53b42f004a416908268380813905b8215",
"format": 1
},
{
- "name": "plugins/action/lag_interfaces.py",
+ "name": "plugins/modules/nxos_rpm.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "e2ba7d2ec024a075a96be051996d4eba6f6e370058c6df454b89ac7c36759f58",
"format": 1
},
{
- "name": "plugins/action/rollback.py",
+ "name": "plugins/modules/nxos_evpn_vni.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "cccbafbd724df1855a60cebf93e8429be7edc59582827b0ed6c5af84a877565a",
"format": 1
},
{
- "name": "plugins/action/rpm.py",
+ "name": "plugins/modules/nxos_vtp_domain.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "e76836b4b68b3c32d5a146bcb6738009872e3e69d3afffe269e50846d4a84058",
"format": 1
},
{
- "name": "plugins/action/lacp.py",
+ "name": "plugins/modules/nxos_nxapi.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "cf44a3e80959a805238202c49a5a639d6a1ecf5f6d8191b93b3f6db314a80c15",
"format": 1
},
{
- "name": "plugins/action/vsan.py",
+ "name": "plugins/modules/nxos_vtp_version.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "2c822abea7b871b5781dcf46856c07f0214793bc3768743d45966ae5a0b6e779",
"format": 1
},
{
- "name": "plugins/action/ospf.py",
+ "name": "plugins/modules/nxos_snmp_user.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "d16699e2fd607da804183e1fe29c6981095303d22b836f865f2046dc2c50ecf0",
"format": 1
},
{
- "name": "plugins/action/config.py",
+ "name": "plugins/modules/nxos_vrf_interface.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "a2876512450aa2920f88a2d408eb72cd7976ec4124a95b058d57427d0f939fb1",
"format": 1
},
{
- "name": "plugins/action/hsrp_interfaces.py",
+ "name": "plugins/modules/nxos_ospfv3.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "5f7553546e438131fb6a81fb5c994270b900e30349f675a9f9ac77cb20b4a218",
"format": 1
},
{
- "name": "plugins/action/__init__.py",
+ "name": "plugins/modules/nxos_file_copy.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "03612d9f597a665113fa3bdb784e620fa73f4ad9f2a237a85aa531147696b492",
"format": 1
},
{
- "name": "plugins/action/nxapi.py",
+ "name": "plugins/modules/nxos_bgp_address_family.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "98ba4c208cb7259d49b76f7b48cd1baa8b6f049cac2e9f64142c5011e12c2eee",
"format": 1
},
{
- "name": "plugins/action/nxos.py",
+ "name": "plugins/modules/nxos_l3_interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "b95b46e32c56259e36e705a3249140c51028bc43376257ef0913e93149918c1e",
"format": 1
},
{
- "name": "plugins/action/l3_interface.py",
+ "name": "plugins/modules/nxos_lacp_interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "dc1a726578647a2b3f08a76ca542daa557960de383812b9b4067af37a49359d1",
"format": 1
},
{
- "name": "plugins/action/pim_interface.py",
+ "name": "plugins/modules/nxos_pim_interface.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "62a33fbb98411f4b017a3d884645f48f8e79049f27e0f3d87698b3bcb7f0c48c",
"format": 1
},
{
- "name": "plugins/action/vtp_password.py",
+ "name": "plugins/modules/nxos_system.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "34a51f70bb99431fadc63a66270533d88d97d3a6eb422578a78c1f59da7c89e8",
"format": 1
},
{
- "name": "plugins/action/bfd_global.py",
+ "name": "plugins/modules/nxos_interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "5535b8856eb027aabcd9885c29602a14a92c40554ca777b553879ae1bd8b18b6",
"format": 1
},
{
- "name": "plugins/action/vpc_interface.py",
+ "name": "plugins/modules/nxos_ping.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "9d2648ff85a7fee59b9b0b099afde09e1f7f11c7e5d9f033d27a2af0953e854e",
"format": 1
},
{
- "name": "plugins/action/command.py",
+ "name": "plugins/modules/nxos_user.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "3421c84ad96b685dce41814131c5e17d336d58fdaac332ea01b72606ffdae07c",
"format": 1
},
{
- "name": "plugins/action/banner.py",
+ "name": "plugins/modules/nxos_devicealias.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "8fcfa22b50abdddab25a9235c03d8ef41055a7f98dfa6ab1af4a4e86fa6c1e10",
"format": 1
},
{
- "name": "plugins/action/logging_global.py",
+ "name": "plugins/modules/nxos_fc_interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "2b7cb927469f1aab619f06daa2442fd6c4267898dfd7f9f471d58b9970eb5b27",
"format": 1
},
{
- "name": "plugins/action/user.py",
+ "name": "plugins/modules/nxos_vrf_af.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "bcddf538b58de6ee3355af3f5af10af8b95268b4a2bdd7166d2c451a7484228d",
"format": 1
},
{
- "name": "plugins/action/acl_interfaces.py",
+ "name": "plugins/modules/nxos_udld_interface.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "2346b5a0aec6682e11c7ee21cf5f77fd1b53bd4839d11a7da4e23e8378049aaa",
"format": 1
},
{
- "name": "plugins/action/bgp_address_family.py",
+ "name": "plugins/modules/nxos_install_os.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "5a8d563421835390e3d46ff4cf4feadd2ff33a43d58a6f8d828a8c9f1f79d951",
"format": 1
},
{
- "name": "plugins/action/lldp_interfaces.py",
+ "name": "plugins/modules/nxos_snmp_host.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "ae6e69d2c2af158c70e81aed0f2a6dbccca2c69033861bd697af4dba0c5183c7",
"format": 1
},
{
- "name": "plugins/action/interface.py",
+ "name": "plugins/modules/nxos_snmp_location.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "a11d2a363173b645bdf9c8cd1cdb80e77b94d4c169069614deae90a441a1de22",
"format": 1
},
{
- "name": "plugins/action/ntp_options.py",
+ "name": "plugins/modules/nxos_prefix_lists.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "fbc6db3871917b6239c2e37caede67c3a6f69e2c578357f5831bf74173c09bd5",
"format": 1
},
{
- "name": "plugins/action/aaa_server_host.py",
+ "name": "plugins/modules/nxos_snmp_community.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "4225f8c4053ec681cb9fadb841f3b0660771879e740d13858c459fedbfc5aa3c",
"format": 1
},
{
- "name": "plugins/action/bgp_neighbor_address_family.py",
+ "name": "plugins/modules/nxos_logging_global.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "1493bf17b72bd52b2f045a3d3c8767b2e414600762fadf79c065dda1b99e47d2",
"format": 1
},
{
- "name": "plugins/action/static_route.py",
+ "name": "plugins/modules/nxos_hostname.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "c6e80d807e983caef125de3683283634a0fb850861b4c240a7446c36660b8a22",
"format": 1
},
{
- "name": "plugins/action/vlan.py",
+ "name": "plugins/modules/nxos_bgp_neighbor_address_family.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "f2861a072c7f54bdf90fa0fca06b8ce0af964f3073e27a4ac00940a7fdfeb26a",
"format": 1
},
{
- "name": "plugins/action/lldp_global.py",
+ "name": "plugins/modules/nxos_pim_rp_address.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "048f2a6e71622db905600dbc6f0862564275c438ac0a401aca0b2c792605d10f",
"format": 1
},
{
- "name": "plugins/action/vrf_interface.py",
+ "name": "plugins/modules/nxos_feature.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "8b8a6deea08ebeae462e2fefbcc4762dd67b71bc5a5d0f51e3ea3595375888da",
"format": 1
},
{
- "name": "plugins/action/ntp_auth.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "name": "plugins/filter",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/action/udld.py",
+ "name": "plugins/filter/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/action/acl_interface.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "name": "plugins/cliconf",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/action/udld_interface.py",
+ "name": "plugins/cliconf/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/action/interface_ospf.py",
+ "name": "plugins/cliconf/nxos.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "257404c8fb3e42b3277eba151008f583f3cfd5a26f262b48c2b5b6f1e220e305",
"format": 1
},
{
- "name": "plugins/action/igmp.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "name": "plugins/httpapi",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/action/ntp_global.py",
+ "name": "plugins/httpapi/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/action/l2_interface.py",
+ "name": "plugins/httpapi/nxos.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "acfbda7df81189f8211a3d8af3d7e567bc719328e075531c154671cd97bd92be",
"format": 1
},
{
- "name": "plugins/action/snmp_traps.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "name": "plugins/inventory",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/action/gir_profile_management.py",
+ "name": "plugins/inventory/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/action/acls.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "name": "plugins/doc_fragments",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/action/reboot.py",
+ "name": "plugins/doc_fragments/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/action/ospfv3.py",
+ "name": "plugins/doc_fragments/nxos.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7c78254555923b2a83bccadc6d71c48d5a84c1a67941ee3666222b163fc7a4",
+ "chksum_sha256": "75b71c234b0886b9a8c7cb54e9b1a2de2e433f8a19608a3ba45e67037c942943",
"format": 1
},
{
- "name": "plugins/doc_fragments",
+ "name": "tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/doc_fragments/__init__.py",
+ "name": "tests/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/doc_fragments/nxos.py",
+ "name": "tests/config.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "75b71c234b0886b9a8c7cb54e9b1a2de2e433f8a19608a3ba45e67037c942943",
+ "chksum_sha256": "4129945061440c8d8c281590b8054b194f5958833c722aa40e878b09038c380e",
"format": 1
},
{
- "name": "plugins/filter",
+ "name": "tests/integration",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/filter/__init__.py",
+ "name": "tests/integration/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/netconf",
+ "name": "tests/integration/labs",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/netconf/__init__.py",
+ "name": "tests/integration/labs/single.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "fe76aae4b59ac2c628191394f5cfce79e8564ecb72be6157e14f32c7e8a97093",
"format": 1
},
{
- "name": "plugins/netconf/nxos.py",
+ "name": "tests/integration/network-integration.requirements.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3dceb9254be3c61b493715b4d993f42ab9c088dd103eb6b59563832aec26c22e",
+ "chksum_sha256": "234d0353684f5a41c84eabb03e13539abf602dc0de061a90e129618976b3c106",
"format": 1
},
{
- "name": "plugins/httpapi",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/target-prefixes.network",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "1d281d4e38e720eeeee03bdd1df2429947e358ec15ab276b0b29765d99953d63",
"format": 1
},
{
- "name": "plugins/httpapi/__init__.py",
+ "name": "tests/integration/test_integration.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "a4966cf32b6505b51255468ffa72db6f36b948b07aa5b9ae3313607a93e48988",
"format": 1
},
{
- "name": "plugins/httpapi/nxos.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "acfbda7df81189f8211a3d8af3d7e567bc719328e075531c154671cd97bd92be",
+ "name": "tests/integration/targets",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/inventory",
+ "name": "tests/integration/targets/nxos_zone_zoneset",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/inventory/__init__.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "name": "tests/integration/targets/nxos_zone_zoneset/vars",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": ".flake8",
+ "name": "tests/integration/targets/nxos_zone_zoneset/vars/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f357bc604a44fe8a5912ec2b9067b734a88d1026da98d5993905d7b34ab44357",
+ "chksum_sha256": "97b10ee26b05c01b328d79887c41479bd2bf559184fb1df4af53645db6e109ba",
"format": 1
},
{
- "name": "pyproject.toml",
+ "name": "tests/integration/targets/nxos_zone_zoneset/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_zone_zoneset/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "82a737e4c91699b90905fc2d347acaf5c3eb4567cc0750cf122c859d0eed78ed",
+ "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
"format": 1
},
{
- "name": ".isort.cfg",
+ "name": "tests/integration/targets/nxos_zone_zoneset/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3fc5fcc23727e7c5b9f1a7035303990229f796a171a84e21051dd1d242b90c52",
+ "chksum_sha256": "f47853340600c1c9b76ed00695079b203ae45896e40acfac2f48e28f2ad98b40",
"format": 1
},
{
- "name": "meta",
+ "name": "tests/integration/targets/nxos_zone_zoneset/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "meta/__init__.py",
+ "name": "tests/integration/targets/nxos_zone_zoneset/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
"format": 1
},
{
- "name": "meta/runtime.yml",
+ "name": "tests/integration/targets/nxos_zone_zoneset/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_zone_zoneset/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "40c6ab1595d2252393db7e834d0de46f776beddd1bd701a49a29a9d7b1a7c876",
+ "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
"format": 1
},
{
- "name": "changelogs",
+ "name": "tests/integration/targets/nxos_zone_zoneset/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "changelogs/fragments",
+ "name": "tests/integration/targets/nxos_zone_zoneset/tests/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "changelogs/fragments/.keep",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
- "format": 1
- },
- {
- "name": "changelogs/config.yaml",
+ "name": "tests/integration/targets/nxos_zone_zoneset/tests/common/sanity.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "54c802c2ade537e8b71ba45d10c73a422778a62aab0093bc6805c75ab8c7f448",
+ "chksum_sha256": "781a480d71c224fb90d67eba3072492ff3f2d263df13ec8c735cabec41956751",
"format": 1
},
{
- "name": "changelogs/changelog.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "679e0f7cfe3b1161eb8979912c5997848b879a8b3fa353578f61d866b1b9490e",
+ "name": "tests/integration/targets/nxos_igmp_snooping",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": ".pre-commit-config.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "395482dc9117c2fbc52da3a2d5b78a06f526834f5959ff6091229f4e2d0035b2",
+ "name": "tests/integration/targets/nxos_igmp_snooping/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "CHANGELOG.rst",
+ "name": "tests/integration/targets/nxos_igmp_snooping/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "138c9116b2e12a0f7804969ae129d2281aa64a30a36003593458731678e35da7",
+ "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
"format": 1
},
{
- "name": "requirements.txt",
+ "name": "tests/integration/targets/nxos_igmp_snooping/tasks/nxapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "70053744bf8f5b698f82ad59a6daec106188d9b63c6d1246f860f21ab8cb6ac5",
+ "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
"format": 1
},
{
- "name": ".prettierignore",
+ "name": "tests/integration/targets/nxos_igmp_snooping/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fe21cf5ca4606d16ccedb1bccc85ef06771ee697924ca50ecd8ca4c98acce5cc",
+ "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
"format": 1
},
{
- "name": "platform_guide.rst",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "48467b2534cf719f3925b1b62e10a587eb68c1afbc19bf69d801dbec119a9277",
+ "name": "tests/integration/targets/nxos_igmp_snooping/defaults",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": ".gitignore",
+ "name": "tests/integration/targets/nxos_igmp_snooping/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "179a6d0780422c32653a71b9395a3ecb8b3859346ffa3e38ca3c8d90f8063fd2",
+ "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
"format": 1
},
{
- "name": "test-requirements.txt",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "cf0a6736e79a79f5ea107f0b74c38957ece4a6e4b5c00b01d2c1ea679d876089",
+ "name": "tests/integration/targets/nxos_igmp_snooping/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": ".ansible-lint",
+ "name": "tests/integration/targets/nxos_igmp_snooping/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b47f72e159f93a5ff07ea2534752e0fa977b214e9ac05c667fa83ac13be4e50c",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "codecov.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "4aa7e485dd4db6f8a55b046088c745def2b3145d9499ccda4e9a3336467dcea2",
+ "name": "tests/integration/targets/nxos_igmp_snooping/tests",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests",
+ "name": "tests/integration/targets/nxos_igmp_snooping/tests/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/config.yml",
+ "name": "tests/integration/targets/nxos_igmp_snooping/tests/common/sanity.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4129945061440c8d8c281590b8054b194f5958833c722aa40e878b09038c380e",
+ "chksum_sha256": "a4a73bc840e125f41372313d58b550980ef7c1642f87ba9bb8ee85d8d026ff42",
"format": 1
},
{
- "name": "tests/sanity",
+ "name": "tests/integration/targets/nxos_vpc_interface",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/sanity/ignore-2.15.txt",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "935199601f3c767ca623e1a8c6f73032f3cfee0536e4aed1cf5842b157956671",
+ "name": "tests/integration/targets/nxos_vpc_interface/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/sanity/ignore-2.17.txt",
+ "name": "tests/integration/targets/nxos_vpc_interface/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "935199601f3c767ca623e1a8c6f73032f3cfee0536e4aed1cf5842b157956671",
+ "chksum_sha256": "8d37308cf2225418b5fab604f2b171a1aafbaa84be0c933a48ec0f153d7119ad",
"format": 1
},
{
- "name": "tests/sanity/ignore-2.18.txt",
+ "name": "tests/integration/targets/nxos_vpc_interface/tasks/nxapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "935199601f3c767ca623e1a8c6f73032f3cfee0536e4aed1cf5842b157956671",
+ "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
"format": 1
},
{
- "name": "tests/sanity/ignore-2.16.txt",
+ "name": "tests/integration/targets/nxos_vpc_interface/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "935199601f3c767ca623e1a8c6f73032f3cfee0536e4aed1cf5842b157956671",
+ "chksum_sha256": "bb83a5490c188c20d9a8632bdb13043146a87d5dfc555cd2e389ee30885bf7ca",
"format": 1
},
{
- "name": "tests/sanity/ignore-2.14.txt",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "935199601f3c767ca623e1a8c6f73032f3cfee0536e4aed1cf5842b157956671",
+ "name": "tests/integration/targets/nxos_vpc_interface/defaults",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/__init__.py",
+ "name": "tests/integration/targets/nxos_vpc_interface/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
"format": 1
},
{
- "name": "tests/.gitignore",
+ "name": "tests/integration/targets/nxos_vpc_interface/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_vpc_interface/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b5726d3ec9335a09c124469eca039523847a6b0f08a083efaefd002b83326600",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/unit",
+ "name": "tests/integration/targets/nxos_vpc_interface/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules",
+ "name": "tests/integration/targets/nxos_vpc_interface/tests/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network",
+ "name": "tests/integration/targets/nxos_vpc_interface/tests/common/sanity.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "03b61d6df7de21547cead303ca85a6e4139660eb2df5e5e2af81ddfefc206951",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_hostname",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos",
+ "name": "tests/integration/targets/nxos_hostname/vars",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/test_nxos_acls.py",
+ "name": "tests/integration/targets/nxos_hostname/vars/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5a7acd1fa7f58e389510c0b12262c3ccad13fdbfa263a052df7ced07b51ae4a0",
+ "chksum_sha256": "372a997c3a0ce9aa7b42746d99a75295a66504416d94fd1b0c545e58717a6a9d",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/test_nxos_snmp_server.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "894022761947d303c8f275cdd907dd987a18b092589e069a7719332f9efc3180",
+ "name": "tests/integration/targets/nxos_hostname/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/test_nxos_overlay_global.py",
+ "name": "tests/integration/targets/nxos_hostname/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "84c43269ad7e4c30ec87b50598018aa176ecc5fa6a5cf83c04163549df700c11",
+ "chksum_sha256": "82e7cf4c490409760e85c9516e174b27c8d67fb419e829353d94c4d022016245",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/test_nxos_vrf.py",
+ "name": "tests/integration/targets/nxos_hostname/tasks/nxapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ce6b30fdf5ea958858abea77348d845b53c2430333dbd879d9d8dbcf3f438432",
+ "chksum_sha256": "adcabdbc005b34ef70ef70c9a7a3b0bfc9b94ff93d2f3ea18bf1f19fbc2d2201",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/test_nxos_bfd_interfaces.py",
+ "name": "tests/integration/targets/nxos_hostname/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5c791a5a862edc717ae75ce62fcd53f8f2f2c629f267194482909a8350946457",
+ "chksum_sha256": "c100293cbf1e43e53f2ff874e3e1079621ae414e404db0078a26044ececac0b1",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures",
+ "name": "tests/integration/targets/nxos_hostname/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_vlans",
+ "name": "tests/integration/targets/nxos_hostname/defaults/main.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "7d754db246e92eda93bade9b741e09afdc7c5ba2754a4ea5d874f92a479b65d0",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_hostname/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_vlans/__init__.py",
+ "name": "tests/integration/targets/nxos_hostname/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_vlans/show_running-config",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "6b793f9fc0770084b8babcdd914c4c42b2d1d3256be53d0cba3a78c19a616dec",
+ "name": "tests/integration/targets/nxos_hostname/tests",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_vlans/show_vlan",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "6ac358cca25afd3bbc1aa11515f5e31ea9d548abdbcdf0563949e446d76f0c3c",
+ "name": "tests/integration/targets/nxos_hostname/tests/common",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_vlans/show_vlan_no_facts",
+ "name": "tests/integration/targets/nxos_hostname/tests/common/_populate_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ca3d163bab055381827226140568f3bef7eaac187cebd76878e0b63e9e442356",
+ "chksum_sha256": "55288117a1ba1770a3e0bcac6e550c318807316ce84c65e536ba34cc449f62b0",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_vlans/show_running-config_no_facts",
+ "name": "tests/integration/targets/nxos_hostname/tests/common/empty_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "783b64a65f752ecfbf2ab084cb164df5d5a818d3dc9980856037a47853bcbfd8",
+ "chksum_sha256": "c48e9bf3dac84165ae9953b7ad26ecec061cc599fd5cc78290d048b36119ce19",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_vxlan_vtep",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_hostname/tests/common/parsed.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "292e2e1750dcd0b5114b4c1b2f8fe93125754fa76c1869f64f87e6055da6338f",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_vxlan_vtep/config.cfg",
+ "name": "tests/integration/targets/nxos_hostname/tests/common/gathered.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cea4ee76cbd062bd53302a5c1f37bc78584c8677950c99eb145c106a8852b278",
+ "chksum_sha256": "0b28a51b56bc716da4598490f3cb3302329f0113eb3ab32fa9a860223eae43f6",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_vxlan_vtep/__init__.py",
+ "name": "tests/integration/targets/nxos_hostname/tests/common/merged.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "b2db6ef6d129f976a7c1dd3d1543020bba577d399fa4e780311e9942d0e39a21",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_vrf",
+ "name": "tests/integration/targets/nxos_hostname/tests/common/fixtures",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_vrf/show_run_all",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "f2a24625313932312f1d5e1c1d2c3935f99e5ab28880315c7dd39698df72daf8",
- "format": 1
- },
- {
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_vrf/show_vrf_default_interface",
+ "name": "tests/integration/targets/nxos_hostname/tests/common/fixtures/parsed.cfg",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "6cd019ca4bbc9dc9a7657ccbc89a52a8505890bda564813765bc2bb004ef3814",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_vrf/show_vrf_management_interface",
+ "name": "tests/integration/targets/nxos_hostname/tests/common/replaced.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "8564938ffbfc1d998d5ce080e9bc6cb612cf2aeefa19ac79bf9bf6f895800a2e",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_vrf/show_vrf_management",
+ "name": "tests/integration/targets/nxos_hostname/tests/common/_remove_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ca2359ed30a431281f356f0bb15f8bc2abb7169e21a6022034d8a4411fc413f6",
+ "chksum_sha256": "3c610838a6fa946c2237c8292c07b688a1ba7e8096a2d35d75f12f0b58866ed7",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_vrf/show_vrf_ntc",
+ "name": "tests/integration/targets/nxos_hostname/tests/common/deleted.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "ce390e649cf85c4eddff7df5c0d545f57e389bb36774ae580dadafec45cf2a1b",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_vrf/__init__.py",
+ "name": "tests/integration/targets/nxos_hostname/tests/common/rendered.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "6e0e085de3251f1846bf350484b27a639383cf70f82effdb6d8aa0b73f105c60",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_vrf/show_vrf_default",
+ "name": "tests/integration/targets/nxos_hostname/tests/common/overridden.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "c76970334ac8f7dea1c52d4a2e9785b43fe9754ed2cece81e49036d8efce6507",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_vrf/show_vrf_ntc_interface",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "name": "tests/integration/targets/nxos_snmp_user",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_pim_interface",
+ "name": "tests/integration/targets/nxos_snmp_user/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_pim_interface/config.cfg",
+ "name": "tests/integration/targets/nxos_snmp_user/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "25600ba56e8e7066889af660ff87eb22aec3ce587ad60c1a7eca25c627064a5c",
+ "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_pim_interface/__init__.py",
+ "name": "tests/integration/targets/nxos_snmp_user/tasks/nxapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_pim_interface/show_interface_eth2_1",
+ "name": "tests/integration/targets/nxos_snmp_user/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "de922c7abca31dbb4d597cc485a02b9affe0c85fbaf1d268b15d38d8fe6ba069",
+ "chksum_sha256": "bb83a5490c188c20d9a8632bdb13043146a87d5dfc555cd2e389ee30885bf7ca",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_pim_interface/sh_run_interface_eth2_1_all",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "name": "tests/integration/targets/nxos_snmp_user/defaults",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_pim_interface/show_ip_pim_interface_eth2_1",
+ "name": "tests/integration/targets/nxos_snmp_user/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "88ad9b709dae48e373fcf04c158d6714de1f94e808fa033204d3b7b4e48bf8bb",
+ "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_interface",
+ "name": "tests/integration/targets/nxos_snmp_user/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_interface/show_interface_loopback0",
+ "name": "tests/integration/targets/nxos_snmp_user/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_interface/show_interface_Ethernet2_1",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "de922c7abca31dbb4d597cc485a02b9affe0c85fbaf1d268b15d38d8fe6ba069",
+ "name": "tests/integration/targets/nxos_snmp_user/tests",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_interface/show_run_interface_Ethernet2_1",
+ "name": "tests/integration/targets/nxos_snmp_user/tests/common",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_snmp_user/tests/common/sanity.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6fda3eb0c249234e367f3e6d981a9957902f3443ff892ee35c3fbb87f8b2760b",
+ "chksum_sha256": "cc18bce3cf7f8a257784f7d4e432ef00b535bfe73b057d661756f364bcc9d176",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_interface/__init__.py",
+ "name": "tests/integration/targets/nxos_udld",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_udld/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_udld/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_interface/show_run_interface_loopback0",
+ "name": "tests/integration/targets/nxos_udld/tasks/nxapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_interface/show_interface",
+ "name": "tests/integration/targets/nxos_udld/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3152065a242771a7e158ce7a8a6da2f5e62f0a35807389d62d0b927830d2ab04",
+ "chksum_sha256": "bb83a5490c188c20d9a8632bdb13043146a87d5dfc555cd2e389ee30885bf7ca",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_vpc_interface",
+ "name": "tests/integration/targets/nxos_udld/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_vpc_interface/show_port-channel_summary",
+ "name": "tests/integration/targets/nxos_udld/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bc24581fc9c635989ae2926270c03c4c85c642b768f6685cff7cfdcd924e70f6",
+ "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_vpc_interface/__init__.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "name": "tests/integration/targets/nxos_udld/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_vpc_interface/show_vpc_brief",
+ "name": "tests/integration/targets/nxos_udld/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1901ac7c063992cfcebb4ba110637bc241aa0fb122959d9af40be8957b47bf54",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_acl",
+ "name": "tests/integration/targets/nxos_udld/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_acl/__init__.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "name": "tests/integration/targets/nxos_udld/tests/common",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_acl/show_ip_access-list.txt",
+ "name": "tests/integration/targets/nxos_udld/tests/common/sanity.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "61de402089db40d19ac51c2e8a52894c45d9002156027c09ab17d0cc3fea635d",
+ "chksum_sha256": "327f500b5a3e5f79399f7a93e74613eec32ee87f52ea77b89af0bb84862a89d2",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_system",
+ "name": "tests/integration/targets/nxos_static_routes",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_system/vrf_only",
+ "name": "tests/integration/targets/nxos_static_routes/vars",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_system/vrf_only/config.cfg",
+ "name": "tests/integration/targets/nxos_static_routes/vars/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "78c6c60ac86950ccc102c733222dc40ca42ae78622fecbee3af27f61bb015adb",
+ "chksum_sha256": "3eda3cf170b296ca5e5f4996c8b03c030d87a4e3e6197f7770e2b14e739e4ab3",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_system/config.cfg",
+ "name": "tests/integration/targets/nxos_static_routes/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_static_routes/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c65fd0fea85d4421908dcb50a8778a562d62c2723ccb10520cb9c62c57f32977",
+ "chksum_sha256": "2eacf5012e1fb8a88bd7663844178dc5f6b617535d4bc2ddf0037ce474a33b1e",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_feature",
+ "name": "tests/integration/targets/nxos_static_routes/tasks/main.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "40fe4fe85031868241ac9845115f925a1c7be1ad8c4f932f4896fdaad51da36c",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_static_routes/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_feature/show_feature.txt",
+ "name": "tests/integration/targets/nxos_static_routes/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d7be73f626f2d19d384bda2c214b58505da78232c84a1b5c971495510e950db8",
+ "chksum_sha256": "7d754db246e92eda93bade9b741e09afdc7c5ba2754a4ea5d874f92a479b65d0",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_feature/__init__.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "name": "tests/integration/targets/nxos_static_routes/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_feature/show_feature_mds.txt",
+ "name": "tests/integration/targets/nxos_static_routes/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "aca22131f56ec81f704e0b11d93c03bec037cd2b33a9964143b8c7bf53fa7878",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_overlay_global_config.cfg",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "aeb566110f15e114259185610a7e1a17ba8a4a8380f590120292eceab885bccd",
+ "name": "tests/integration/targets/nxos_static_routes/tests",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_vrf_af",
+ "name": "tests/integration/targets/nxos_static_routes/tests/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_vrf_af/config.cfg",
+ "name": "tests/integration/targets/nxos_static_routes/tests/common/_populate_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a11a202b4b688d09c6b7cb78a74d139f2b599c85973dd068b1ec18892926cbf4",
+ "chksum_sha256": "d486bec06fd92b15f491fc77298d00447520a955e2dbf3d84a5a739d1a2703c2",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_vrf_af/__init__.py",
+ "name": "tests/integration/targets/nxos_static_routes/tests/common/deleted.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "10271b66076c4d1659a13c85ae4ba02c903ba2eb771ba8a2e31f0910c46de739",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_bfd_global",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_static_routes/tests/common/gathered.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "f58a5e229e9cb4a2cd367d9bd3d36dfc7b8a03598bd35d77c19ea701e70b8976",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_bfd_global/N9K.cfg",
+ "name": "tests/integration/targets/nxos_static_routes/tests/common/rtt.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9089fae22c9d5b25f815c43f8de1b95041c2fd012d721e29e009e47f64fa66ff",
+ "chksum_sha256": "ac26dd65c91afd1fd0e3734c2d3b9c361904f6af8fed0fbce8ee7746c96b64bf",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_bfd_global/__init__.py",
+ "name": "tests/integration/targets/nxos_static_routes/tests/common/replaced.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "5c23e4c252c1cd1583fd56550d8d702f0b09b867eb03c50886e83f3c24197690",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_bfd_global/N7K.cfg",
+ "name": "tests/integration/targets/nxos_static_routes/tests/common/parsed.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "406c13868678145de8fa80ab6540d805084b62c191a928a466fd44e6e59ccd78",
+ "chksum_sha256": "4a2cd09da60d6b0bc0904e3f313e959fc3efbba7979d14309764e11aaefab96d",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_command",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_static_routes/tests/common/merged.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d8a7a44814c334c50589139d3473b014a953fda361f2d88075fe1538f65a580e",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_command/show_version.txt",
+ "name": "tests/integration/targets/nxos_static_routes/tests/common/rendered.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c45a343dc5192f50ef68b6bebee3da8cb980be9ec3cd5b75b00a20658fde6748",
+ "chksum_sha256": "b9734f5468d080e059ad191471f78497c2ce42fa6d87de56f9f2e273bc0d8e28",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_command/__init__.py",
+ "name": "tests/integration/targets/nxos_static_routes/tests/common/overridden.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "ff14829c4bb5758e9c2bcce36a6a134e5fd2d02af7586147e25ce45f5d3c6197",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/cliconf",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_static_routes/tests/common/_remove_config.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "244c13d831c6419ec7e7a46e3e00e82fc1be5a476e5204561deae0efce659d93",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/cliconf/nxos",
+ "name": "tests/integration/targets/nxos_lldp_global",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/cliconf/nxos/show_version",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "12c7c1c28fcd05fced589995a39f0a1389d970f6199fadcbf95543da330feb22",
+ "name": "tests/integration/targets/nxos_lldp_global/vars",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/cliconf/nxos/show_inventory",
+ "name": "tests/integration/targets/nxos_lldp_global/vars/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cb14576378ab0f2fd25d13dd4c4dbc31c3940d65ecb5f2fa29b39eba7e04d7ee",
+ "chksum_sha256": "ac136582618b2844cd5bb84e0aaf52f6b4e7a6ea581a0fa0ec5466e9c9c3bd68",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/cliconf/mds",
+ "name": "tests/integration/targets/nxos_lldp_global/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/cliconf/mds/show_version",
+ "name": "tests/integration/targets/nxos_lldp_global/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "33cb951f7e55bfc24ba68026e55c250973d49113340d4c86ec5f52d2dfb32198",
+ "chksum_sha256": "fe84894e2a9ff992a755f52f856ff23c23dd249fb9fe14dc4f264cd1a6070e12",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/cliconf/mds/show_inventory",
+ "name": "tests/integration/targets/nxos_lldp_global/tasks/nxapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6e57d9cdf30356d92715fcd45fc9fc5e6fd0dc07298682343aef0f9c1c6231d5",
+ "chksum_sha256": "280d285e1a9b531b25d8b880d3a032933d7135004c4658b7f1eb4872c60cc666",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/_nxos_ip_interface.cfg",
+ "name": "tests/integration/targets/nxos_lldp_global/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ce28d8e36e6ed0776c23f9d57050b68a3dd4cace82f8d3909de89bf98c2b3401",
+ "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_acl_interface",
+ "name": "tests/integration/targets/nxos_lldp_global/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_acl_interface/__init__.py",
+ "name": "tests/integration/targets/nxos_lldp_global/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "7d754db246e92eda93bade9b741e09afdc7c5ba2754a4ea5d874f92a479b65d0",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_acl_interface/show_running-config_aclmgr.txt",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "606d5b49ef095558924b32c6faa4e06f128f4655dddd57595e945cd33a4dbe72",
+ "name": "tests/integration/targets/nxos_lldp_global/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_static_route.cfg",
+ "name": "tests/integration/targets/nxos_lldp_global/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8787db2297394f70dfa9ea3550d604fcaef84c40d973c498b147d346422b8585",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_vxlan_vtep_vni",
+ "name": "tests/integration/targets/nxos_lldp_global/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_vxlan_vtep_vni/config.cfg",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "9676910539593a9a50989e688d7848de6074092a58e720ebbb437fa02859bd5f",
+ "name": "tests/integration/targets/nxos_lldp_global/tests/common",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_vxlan_vtep_vni/__init__.py",
+ "name": "tests/integration/targets/nxos_lldp_global/tests/common/_remove_config.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "339639545338297ee483c8e1370eeefc88356fceb240523dbbc6d8c237444982",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_pim_rp_address",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_lldp_global/tests/common/deleted.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "ffe0021fbc8bd39a7990267dc791d78f2571f3285efd5fd1244c68159ffca98f",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_pim_rp_address/config.cfg",
+ "name": "tests/integration/targets/nxos_lldp_global/tests/common/gathered.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "358b10ada00b4a323c671eaa193bc4c62bfa6e6600042382003a8efe5708bddf",
+ "chksum_sha256": "e5f9f21d8529dd8144e8259391a04642355d3f3c9063db5a27472a5180d6b7dd",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_pim_rp_address/__init__.py",
+ "name": "tests/integration/targets/nxos_lldp_global/tests/common/replaced.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "a62a8ac1f40b48c56dbd06eb543897331236925608b859a758f1c91037f17455",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_evpn_vni_config.cfg",
+ "name": "tests/integration/targets/nxos_lldp_global/tests/common/parsed.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f12c0f2a38e284dde284dd4042cda86d71671289619456a3438a58f88ab929e7",
+ "chksum_sha256": "ca1cd52e0c16bb7a97f9e1aa4eb9c49600b25854f35a93b0bcfeaf3ead1e7c07",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_bgp",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_lldp_global/tests/common/merged.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e82eb173fbdc3209c167bc096bf1665240e72b24a34f75c19ec1a3b7c5d4c8d1",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_bgp/config.cfg",
+ "name": "tests/integration/targets/nxos_lldp_global/tests/common/rendered.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f971230c61be923533c3ea1eb63e9938cdf701f3884d511105d551d7ee531554",
+ "chksum_sha256": "18c1848ef28d20fffd52045955b5a54ca72aea45d5c2ff9060d8856aeeb7162a",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_bgp/__init__.py",
+ "name": "tests/integration/targets/nxos_lldp_global/tests/common/overridden.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "8aeec5521c22e513a4e736c9b51461285abce953173508ae28fb02ecef95952a",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_bgp/config_32_bits_as.cfg",
+ "name": "tests/integration/targets/nxos_lldp_global/tests/common/_populate_config.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "deed874c21f6472915657eed44c550ff438995b132d365fedb8b94deb510c856",
+ "chksum_sha256": "c7bacd3b7cb077dc6895ecf350711c1bf39e18739a361f6cf514e9dd4d6355f3",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_l3_interface",
+ "name": "tests/integration/targets/nxos_banner",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_l3_interface/ethernet_noshut_multiple_ipv6",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "93f0654869c15b0670ae97f78615662b35f5f3aefccfb3c1e2e463ae3759a6d0",
- "format": 1
- },
- {
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_l3_interface/__init__.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "name": "tests/integration/targets/nxos_banner/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_l3_interface/ethernet_noshut",
+ "name": "tests/integration/targets/nxos_banner/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "96a17611067db53f8bdee41ec48055b8ec808b2968d4f3dd784dfae1d6d85d4e",
+ "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_l3_interface/ethernet_noshut_ipv4_ipv6",
+ "name": "tests/integration/targets/nxos_banner/tasks/nxapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9b65b76df737c6da986a8850a5874d6f6733c26b8209c778d7d861ce0b5cb894",
+ "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/__init__.py",
+ "name": "tests/integration/targets/nxos_banner/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "c35d2ac52b14846e9fb2bba91ef7ba5f946b1cd2e47ed10c0b30941d6f9de766",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_vpc",
+ "name": "tests/integration/targets/nxos_banner/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_vpc/vrf_test_vpc_config",
+ "name": "tests/integration/targets/nxos_banner/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f68fa10ece2a3162e69d8bc6ae207437f76b07a956782d6637214ae088346557",
+ "chksum_sha256": "665288590cd4226da42511817f09ccdfc92df36c9621583cd2c82919af295c5a",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_vpc/show_vrf_all",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "a690233a51aeef35190bd4fd440a65fd819bc0153ea20b1b362c25392329e04c",
+ "name": "tests/integration/targets/nxos_banner/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_vpc/vrf_test_show_vpc",
+ "name": "tests/integration/targets/nxos_banner/meta/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d5e079e4fa7499d1ee8e1f0ef98ba4908b5abe9bb146f2d07909c5e139940ff7",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_vpc/vrf_test_show_vrf_all",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "f9b34cc87b238b789fa2c54dd008597f37aa0a859cf06c25e5ce60aefdd0ed12",
+ "name": "tests/integration/targets/nxos_banner/tests",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_vpc/show_vpc",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "4876569ce50a9bb5a00412d9e0e3bd17aed2d739613b28796c3224ae5397b705",
+ "name": "tests/integration/targets/nxos_banner/tests/common",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_vpc/__init__.py",
+ "name": "tests/integration/targets/nxos_banner/tests/common/sanity.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "dcccaebec31acf1f9adfe71c2e5067fcb6c1416a7c63b077490beedfff6078ca",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_vpc/vrf_test_show_inventory",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "3d501010723eb3675a96a954862a0210ac023095947f8e09ea951e15faa96545",
+ "name": "tests/integration/targets/nxos_route_maps",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_vpc/vrf_test_show_hardware",
+ "name": "tests/integration/targets/nxos_route_maps/vars",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_route_maps/vars/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cf21f74cb2df54e19dae270c2b973eea492c3d9925ad7a1b8b59ff9f0e575287",
+ "chksum_sha256": "2e01454e13b8195b24273afb60288d0cb4a32a1ca4e7f1fff3bfea0fd2610f18",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_evpn_global",
+ "name": "tests/integration/targets/nxos_route_maps/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_evpn_global/configured.cfg",
+ "name": "tests/integration/targets/nxos_route_maps/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "53ca3e78de8cd1b9ec6e040ece7bec23ea29663d1eebd1aed66a2980e832dc19",
+ "chksum_sha256": "82e7cf4c490409760e85c9516e174b27c8d67fb419e829353d94c4d022016245",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_evpn_global/__init__.py",
+ "name": "tests/integration/targets/nxos_route_maps/tasks/nxapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "adcabdbc005b34ef70ef70c9a7a3b0bfc9b94ff93d2f3ea18bf1f19fbc2d2201",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_evpn_global/unconfigured.cfg",
+ "name": "tests/integration/targets/nxos_route_maps/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "84f47e5f6bde50931928544ec35970db216f2bffd80cff2d7b7350903d86a5c1",
+ "chksum_sha256": "2396a1bb62aa2d673208deb59766a787971d061630b8467afa525b7fdcb2e95e",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_pim",
+ "name": "tests/integration/targets/nxos_route_maps/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_pim/config.cfg",
+ "name": "tests/integration/targets/nxos_route_maps/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c0c10e6761048c3f05f7143dc844703c76c41ed38e9f0175479874c183cef2ac",
+ "chksum_sha256": "7d754db246e92eda93bade9b741e09afdc7c5ba2754a4ea5d874f92a479b65d0",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_pim/__init__.py",
+ "name": "tests/integration/targets/nxos_route_maps/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_route_maps/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_nxapi",
+ "name": "tests/integration/targets/nxos_route_maps/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_nxapi/n3k",
+ "name": "tests/integration/targets/nxos_route_maps/tests/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_nxapi/n3k/show_run_all",
+ "name": "tests/integration/targets/nxos_route_maps/tests/common/_populate_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "db42fd8881e10bf436a6712d228152f2ca6e627e0aa07531751009df2153fc3d",
+ "chksum_sha256": "036af452cc03bdec84720356e6c7013b4f39c2a6800e1f8303120d785b2955e7",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_nxapi/n3k/__init__.py",
+ "name": "tests/integration/targets/nxos_route_maps/tests/common/empty_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "ff60a5603326c1ae214cbac1f9411495c0cab6360f7880234d1b2e3be1a2ffbb",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_nxapi/__init__.py",
+ "name": "tests/integration/targets/nxos_route_maps/tests/common/parsed.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "ab508eea7234bac851c3c482330fa0c4afb0e8bd2af87d91901c230754d9779d",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_nxapi/n7k",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_route_maps/tests/common/gathered.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "562a6817ca710cc4c1b6c4babb9b7bca97334312e19d963f8df83a98b7e94512",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_nxapi/n7k/show_run_all",
+ "name": "tests/integration/targets/nxos_route_maps/tests/common/merged.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0f487063d9698ed988f461a0f1d90be2406db893a1881d162720dd0587a0ddd9",
+ "chksum_sha256": "3b2a9d97ae8aa59ee542363b43e7051f5d8492e4f45ba0db862a9d071e1ef820",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_nxapi/n7k/__init__.py",
+ "name": "tests/integration/targets/nxos_route_maps/tests/common/_setup.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "5153443ed073b0cc035110ac96d297361e22abe390c84014d0a7dbc83ede74e4",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_interface_ospf",
+ "name": "tests/integration/targets/nxos_route_maps/tests/common/fixtures",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_interface_ospf/config.cfg",
+ "name": "tests/integration/targets/nxos_route_maps/tests/common/fixtures/parsed.cfg",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5251c622e0f68d8c94e0215bd6a9f8c5d7ab755fc8879ceff8a9cac9b09caf9c",
+ "chksum_sha256": "09e1c1e0b37433d900624319d7037eb3672c94053d18f87f22552dceeec1dc96",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_interface_ospf/__init__.py",
+ "name": "tests/integration/targets/nxos_route_maps/tests/common/replaced.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "56653dab75b72e7a40ed1dbff928b5fd2dedf7651dcf2155747903b1a64803e4",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_telemetry",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_route_maps/tests/common/_remove_config.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "cdf825a17b7761cfbd225079ec5d366f4584e7f97111e0af086884f87fda0fde",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_telemetry/N9K_SGs.cfg",
+ "name": "tests/integration/targets/nxos_route_maps/tests/common/deleted.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "af042e9ae8d0a2e6b6ddc28f13ce438e7b49050385ca546bdb5f1a1b51c1da1e",
+ "chksum_sha256": "c276d4c6888f077b84280f79606acb632995c8f1bf32e8c1c0bcb9c02055ee76",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_telemetry/N9K.cfg",
+ "name": "tests/integration/targets/nxos_route_maps/tests/common/rendered.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8d4c2bff81ce036402bf1bfc5a01191ead2507ea6deb7e7d4930ceb35965b1e5",
+ "chksum_sha256": "4e59d332194a4bb9ce27f445771203088b05b31a191b59d0e9e049deb72552a8",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_telemetry/__init__.py",
+ "name": "tests/integration/targets/nxos_route_maps/tests/common/overridden.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "d8ea6575a0570f9ce32c1d3211f075ee42f56a4ba2d87115c6fccd3409b6b5d0",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_config",
+ "name": "tests/integration/targets/nxos_snmp_server",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_config/config.cfg",
+ "name": "tests/integration/targets/nxos_snmp_server/vars",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_snmp_server/vars/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "03abdcaedc2f739a12fd1c31f37164f02ed471aff024ad8e654a0d7ec3e611e6",
+ "chksum_sha256": "8668c98a492f96e42640c00cc5b31c0b73f071b7a61dac9e872c31602833ade2",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_config/__init__.py",
+ "name": "tests/integration/targets/nxos_snmp_server/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_snmp_server/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "82e7cf4c490409760e85c9516e174b27c8d67fb419e829353d94c4d022016245",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_config/candidate.cfg",
+ "name": "tests/integration/targets/nxos_snmp_server/tasks/nxapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3352c8002ead9454c96fa97e82bc2bf245ac57a69edbf8a934bed4ef809de3de",
+ "chksum_sha256": "adcabdbc005b34ef70ef70c9a7a3b0bfc9b94ff93d2f3ea18bf1f19fbc2d2201",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_switchport",
+ "name": "tests/integration/targets/nxos_snmp_server/tasks/main.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "dc2a3a8f315668c3a0a94a40753f7fd1e7eec342b781bbef50ce0f8d3c4abae2",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_snmp_server/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_switchport/show_interface_ethernet",
+ "name": "tests/integration/targets/nxos_snmp_server/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3eba930c712f346d62428fc829d411a3c7f3b32d00ac0b54bee3e25f5a239876",
+ "chksum_sha256": "7d754db246e92eda93bade9b741e09afdc7c5ba2754a4ea5d874f92a479b65d0",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_switchport/show_interface_ethernet_switchport",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "8b266354ff8d44bc8987e97565cba41afedb2fbe282c577c8d371516e84d01bc",
+ "name": "tests/integration/targets/nxos_snmp_server/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_switchport/__init__.py",
+ "name": "tests/integration/targets/nxos_snmp_server/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_switchport/show_vlan",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "864e06b1e9befc20919e723ee5f9bcb2f51a88e524e8ce8e112ac9ec12bf38fc",
+ "name": "tests/integration/targets/nxos_snmp_server/tests",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_zone_zoneset",
+ "name": "tests/integration/targets/nxos_snmp_server/tests/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_zone_zoneset/shzoneset_2.cfg",
+ "name": "tests/integration/targets/nxos_snmp_server/tests/common/_populate_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "08f7f3dc2d44ed61e7002bc417cea69f8407d7f4b0a62634c1f449f5338f7384",
+ "chksum_sha256": "af12998368091442ef220d6e85760264d3d8b49b44ab62a34598479e622d6d61",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_zone_zoneset/show_zoneset_vsan.out",
+ "name": "tests/integration/targets/nxos_snmp_server/tests/common/empty_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fc71e64aa30f5e4da11b419ac36c9b36cae8057bd8bef47b2da5bc293ebaaebb",
+ "chksum_sha256": "870e84431b7485f0d29bb5431284baf0098b3a2dc4c33433007de1b970ed5b6f",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_zone_zoneset/show_zoneset_active_vsan.out",
+ "name": "tests/integration/targets/nxos_snmp_server/tests/common/parsed.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "eef09bfcfaae85faa6c0b10f2ac4207c73947496c77783c7aa6bac45a9d02d3a",
+ "chksum_sha256": "0a2278a138afa16699d2018af1c1e1e616a0898336bf1632352195a4d2be36ec",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_zone_zoneset/shzonestatus_1.cfg",
+ "name": "tests/integration/targets/nxos_snmp_server/tests/common/gathered.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "46c16cd03a600d15085abf2052ed33830fed61424a02004021ce4dd82f0086e2",
+ "chksum_sha256": "98bd39c65fc6a311ba043b2cb6e5fb636bf75e1cd4976826739b8a81f1a4a6fc",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_zone_zoneset/show_zone_vsan.out",
+ "name": "tests/integration/targets/nxos_snmp_server/tests/common/merged.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "74aee6c5327fabe2de027ca0907e76be2628510e125976c8918575bbf09ea703",
+ "chksum_sha256": "f8669a63ac0d96789d4722d662bf1568b86c5433817147820023d2b323b2c14e",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_zone_zoneset/shzoneset_0.cfg",
+ "name": "tests/integration/targets/nxos_snmp_server/tests/common/_setup.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cae1aca46b8d82f44a8f728182fafd1030040360725fb7367040392c1d52c018",
+ "chksum_sha256": "5153443ed073b0cc035110ac96d297361e22abe390c84014d0a7dbc83ede74e4",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_zone_zoneset/shzone_2.cfg",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "6e109ddbe7ad8d251edbe3ebe5bf960c055ac9a1d3d5bc3865596bf0d65d83a5",
+ "name": "tests/integration/targets/nxos_snmp_server/tests/common/fixtures",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_zone_zoneset/shzonestatus_4.cfg",
+ "name": "tests/integration/targets/nxos_snmp_server/tests/common/fixtures/parsed.cfg",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5005380753639cd3afd7e1b4ed90452d5d9492bb05c0852563009949e285db0e",
+ "chksum_sha256": "68295fddd79719b8ad7b74adc76ea194066abae9bd717b24c71eacb7d4b20f8e",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_zone_zoneset/shzonesetactive_0.cfg",
+ "name": "tests/integration/targets/nxos_snmp_server/tests/common/replaced.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1836db8862dd990ea4008e3c10d16d7a5563561ac27a8f846f2d2bfee8d805f0",
+ "chksum_sha256": "f6e477bd3b8bdcdbdb4a581d99fc15745c21c823d54c364ea2b1a1fbfb43958d",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_zone_zoneset/shzonestatus_0.cfg",
+ "name": "tests/integration/targets/nxos_snmp_server/tests/common/_remove_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "010172dc57589c93d51b95aa50feecbda06ee706fd623422a088de23ccc9e723",
+ "chksum_sha256": "5e57172ca560cf57a5a6cda0d4427ef723ff0c2041494801839810d37f2dc448",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_zone_zoneset/__init__.py",
+ "name": "tests/integration/targets/nxos_snmp_server/tests/common/rendered.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "84c00378264bacd0d235c0b7a80ebf88d754fb93b1e85b00047fac028b29275c",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_zone_zoneset/show_zone_status_vsan.out",
+ "name": "tests/integration/targets/nxos_snmp_server/tests/common/overridden.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "42ca86dc445e87a4cba4d08cbd217173af42f60181eb44017d852837c17e9a47",
+ "chksum_sha256": "7fd565877939a79d2e7d7d25da2c520c9ea6d42572d5d4fc8110716dff0f961a",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_zone_zoneset/shzonestatus_2.cfg",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "171f743817a7ffe74398b6d78a0600ec0126dc2fe0890694a62eb6db89225a58",
+ "name": "tests/integration/targets/nxos_igmp",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_zone_zoneset/shzone_0.cfg",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "13fa7bba46e2967e5c1bc2254620c4cd5337daab92a23d8aa6d6dec1ea7905ae",
+ "name": "tests/integration/targets/nxos_igmp/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_zone_zoneset/shzone_1.cfg",
+ "name": "tests/integration/targets/nxos_igmp/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "abb9d2504120327f0b55c36a6c130c9180a0f455ae397737ecfaaa4e6f891ef5",
+ "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_zone_zoneset/shzonestatus_3.cfg",
+ "name": "tests/integration/targets/nxos_igmp/tasks/nxapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3fee2a63052400d50f93de5abc0dfd7897a48269d71af99f53182139461c3bb0",
+ "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_zone_zoneset/shzoneset_1.cfg",
+ "name": "tests/integration/targets/nxos_igmp/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bcdd326b9710f2b1f56ac59ed3bdccaa6e853cfb24af1445743d3254ce458166",
+ "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_zone_zoneset/shzone_bug339.cfg",
+ "name": "tests/integration/targets/nxos_igmp/defaults",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_igmp/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6c45bdddc81417a51bdb8256cc024efef8d8cf432083297902ef8fcda9850703",
+ "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_acl_interfaces",
+ "name": "tests/integration/targets/nxos_igmp/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_acl_interfaces/nxos_acl_interfaces.cfg",
+ "name": "tests/integration/targets/nxos_igmp/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "764024b1837b2491a2e87e60ba305df48bc64541f6caf244fe4eea63653ee340",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_acl_interfaces/__init__.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "name": "tests/integration/targets/nxos_igmp/tests",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_devicealias",
+ "name": "tests/integration/targets/nxos_igmp/tests/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_devicealias/shdastatus_mansi.cfg",
+ "name": "tests/integration/targets/nxos_igmp/tests/common/sanity.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b47fe27d3b8f630988803930db4eb009b8ef93e5f7838e4d3a49adf1859d43f5",
+ "chksum_sha256": "e3f3aa30512b55460b5e6c82887f66d7642c56b86c7eabd5801148a945ce2ef4",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_devicealias/shdastatus.cfg",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "5426bfb2d55edf2003ff250d0730cf4ea9ace4f44da02ce6664ed3bc77c7867d",
+ "name": "tests/integration/targets/nxos_pim_interface",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_devicealias/__init__.py",
+ "name": "tests/integration/targets/nxos_pim_interface/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_pim_interface/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_devicealias/shdastatuslock.cfg",
+ "name": "tests/integration/targets/nxos_pim_interface/tasks/nxapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ba24a69173bd06b6a025d176427b5619878da807c2a2b8a42aced27ac16d607c",
+ "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_devicealias/shdadatabse.cfg",
+ "name": "tests/integration/targets/nxos_pim_interface/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2e912812c9084c8676a32afd1e2522f00bfd75d164fbb86113f01a88f164ebb8",
+ "chksum_sha256": "bb83a5490c188c20d9a8632bdb13043146a87d5dfc555cd2e389ee30885bf7ca",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_vsan",
+ "name": "tests/integration/targets/nxos_pim_interface/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_vsan/shvsan.cfg",
+ "name": "tests/integration/targets/nxos_pim_interface/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "63e5ef2ff7dfbb786b21f77d0e047b16a5af1477901433189032c59f0d2e4988",
+ "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_vsan/__init__.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "name": "tests/integration/targets/nxos_pim_interface/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_vsan/shvsanmem.cfg",
+ "name": "tests/integration/targets/nxos_pim_interface/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "91b604b4bdd22922dfbfb776dc40a760428806a2f845e8c9e76d42beef14bbb8",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_vlan",
+ "name": "tests/integration/targets/nxos_pim_interface/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_vlan/show_vlan_brief.txt",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "c60b1775765492c72f6cf744aa56574aeea9b6c57ac19c974ebc73146cf4920a",
+ "name": "tests/integration/targets/nxos_pim_interface/tests/common",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_vlan/agg_show_vlan_brief.txt",
+ "name": "tests/integration/targets/nxos_pim_interface/tests/common/sanity.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a6ad270b50167f7b13c3e7f40f83ebc812ceb7ef7d3455c0ce7b404ede9d6238",
+ "chksum_sha256": "acc020ce7729ff57e637de9f3cd1f3aae98af404b208386a852c6c804d0006fd",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_vlan/config.cfg",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "4cd4cba4fba8bd0ceca02c0cea325b38c65d82a1b01633f59776902dfc00010c",
+ "name": "tests/integration/targets/nxos_evpn_global",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/fixtures/nxos_vlan/__init__.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "name": "tests/integration/targets/nxos_evpn_global/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/test_nxos_bgp_templates.py",
+ "name": "tests/integration/targets/nxos_evpn_global/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9ef2f97c111f9f0a6155a7353fde97cf4cbea592f715578a4df20893bad2e7ff",
+ "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/test_nxos_ntp_global.py",
+ "name": "tests/integration/targets/nxos_evpn_global/tasks/nxapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "21b73c3764e98cb997bfd28ed8b5ce52bd4b3fa3f499feab3802ca37e5de012f",
+ "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/test_nxos_ospfv2.py",
+ "name": "tests/integration/targets/nxos_evpn_global/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fae6c56a4916171573a12ad3ab0071c82488062af9e6562078262a50c699b83f",
+ "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/test_nxos_lldp_interfaces.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "d61028deb5268fdf63e8cfa7bcfa10f99738fb475e42f31911e3e264360eda34",
+ "name": "tests/integration/targets/nxos_evpn_global/defaults",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/test_nxos_nxapi.py",
+ "name": "tests/integration/targets/nxos_evpn_global/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c79a89af6839dbb8a41461dd0c68ff33e88b2d8ed3447f3b4fc7fd1be0419394",
+ "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/test_nxos_hostname.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "0c65ec8fcd4ddd139ae9947a00ec54a04c2c4da5a5376b0d106d903c2f37a9a1",
+ "name": "tests/integration/targets/nxos_evpn_global/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/test_nxos_telemetry.py",
+ "name": "tests/integration/targets/nxos_evpn_global/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d5a38ca859e81c05061d9ee2f6cceefe3beccaa0587eb427a040be3c1e7e00f3",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/nxos_module.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "0d1a4eadc6d91ab91db07ea6ac3a5576e417f1339c1520079de065451b87fd3c",
+ "name": "tests/integration/targets/nxos_evpn_global/tests",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/test_nxos_feature.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "62fc7da4353d2dd07ee8ec9822b06cd244ef12c93c65a7e53b12b9f57105a7b2",
+ "name": "tests/integration/targets/nxos_evpn_global/tests/common",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/test_nxos_vlans.py",
+ "name": "tests/integration/targets/nxos_evpn_global/tests/common/sanity.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "320bd35df7abc52ebbb1a9cbb5286dcaed02a736d1051666fe5adcdf1a57a043",
+ "chksum_sha256": "9190f2d0dae47c3b29ebe68f89ec9e3b88f0af56af489b15624cf99c86415d9f",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/test_nxos_pim_interface.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "caa9723150e6b388c6ac57ac58e581317fe3b31757c2da99e00b1695d362358a",
+ "name": "tests/integration/targets/nxos_prefix_lists",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/test_nxos_vpc.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "fd868cf29e78bcc57d8d983ff6fcd4bbfcd1d0e34c63ab99737878827ec83d7c",
+ "name": "tests/integration/targets/nxos_prefix_lists/vars",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/test_nxos_devicealias.py",
+ "name": "tests/integration/targets/nxos_prefix_lists/vars/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "213ce495ad35b863cf252fa9bf4362db85fa8f2ab2916dfa76da754661d37e4d",
+ "chksum_sha256": "f869515b8a1ec55c8346c13f87eff1b4bef627722f02ba89f143fec41ea52254",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/test_nxos_interfaces.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "e983acf04cb96f85df2b16c5ad63601cb4b87de6e74f5003751d4090e5350ced",
+ "name": "tests/integration/targets/nxos_prefix_lists/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/test_nxos_route_maps.py",
+ "name": "tests/integration/targets/nxos_prefix_lists/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4ac86722ca4d5b62b0d586b4df8c59a207c76932b7b0596003de9722359d7872",
+ "chksum_sha256": "82e7cf4c490409760e85c9516e174b27c8d67fb419e829353d94c4d022016245",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/test_nxos_vxlan_vtep_vni.py",
+ "name": "tests/integration/targets/nxos_prefix_lists/tasks/nxapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "eb233dc0fead25086cf7310ea386b8e51b17bf00468ab2a1dbb08f44a1aa03b7",
+ "chksum_sha256": "adcabdbc005b34ef70ef70c9a7a3b0bfc9b94ff93d2f3ea18bf1f19fbc2d2201",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/test_nxos_zone_zoneset.py",
+ "name": "tests/integration/targets/nxos_prefix_lists/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f746132b73f02acee61de5e52f37e03ee6591a3410d6ec01948416009d36e441",
+ "chksum_sha256": "2396a1bb62aa2d673208deb59766a787971d061630b8467afa525b7fdcb2e95e",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/test_nxos_evpn_vni.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "71aa392dfc446e9f3fe0af4c32cba761151a0bae41be4831d6b560e332b2bea9",
+ "name": "tests/integration/targets/nxos_prefix_lists/defaults",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/test_nxos_bgp_address_family.py",
+ "name": "tests/integration/targets/nxos_prefix_lists/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b94c2ad86ce7c380e6080a791526f868016e43350612dfd321a92964c8f9a04a",
+ "chksum_sha256": "7d754db246e92eda93bade9b741e09afdc7c5ba2754a4ea5d874f92a479b65d0",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/test_nxos_banner.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "491dacb2d554279466c694b6c0266fcc0dc9be308b3bfe338cb1312ea0f2488c",
+ "name": "tests/integration/targets/nxos_prefix_lists/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/test_nxos_hsrp.py",
+ "name": "tests/integration/targets/nxos_prefix_lists/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cd0ecfc4be320078a8b5b73873598f3311e9d5b5848092a1b24b41029dde5dff",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/test_nxos_vpc_interface.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "d6e27d691e0dc50b4659403bb76593fef84cfa193d36933dd299a1e486525fe0",
+ "name": "tests/integration/targets/nxos_prefix_lists/tests",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/test_nxos_hsrp_interfaces.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "c9bd24f31dec735e5b7f5862c3e9cd48d692b6b528dcfa0114830aedcad227e5",
+ "name": "tests/integration/targets/nxos_prefix_lists/tests/common",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/__init__.py",
+ "name": "tests/integration/targets/nxos_prefix_lists/tests/common/_populate_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "2233813d83a2cf367f99e9348cfb36ea295efcebe61d310a4580abbab8836d5c",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/test_nxos_ospfv3.py",
+ "name": "tests/integration/targets/nxos_prefix_lists/tests/common/empty_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2759b15fdeec5305cbf7fb599142511bd89e5af0c772b3739c39c28880a49e1b",
+ "chksum_sha256": "56b14e2fd373b2211c65f09454a4c39137bb1f1a06ad96f007e377d8b2b51851",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/test_nxos_fc_interfaces.py",
+ "name": "tests/integration/targets/nxos_prefix_lists/tests/common/parsed.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "320e6449e95a55c156623eaaeeb69f81a2c66c66ec1d8a5c06702040595ec95e",
+ "chksum_sha256": "53a49d08552e563da400b6c13475d9e2a020cc039eee9985b1c5a3e930fb3843",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/test_nxos_pim_rp_address.py",
+ "name": "tests/integration/targets/nxos_prefix_lists/tests/common/gathered.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "aba5914ef5c9e367924aaafd1b555e2200d2f6c0aeaad5d5a2170880bd8f2cdf",
+ "chksum_sha256": "c10395ec2b03209c79681dd09690cb615195b8adaae1cdfa3fabc0fd68ef0565",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/test_nxos_lacp_interfaces.py",
+ "name": "tests/integration/targets/nxos_prefix_lists/tests/common/.DS_Store",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8b284eb5232ddef43ad1619acb13d73a7c48f20d736372b212a4d7352bd1e420",
+ "chksum_sha256": "5b8c3589c79b71429938a827331899d13ac16c156374f8c45ec59068bd2d2060",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/test_nxos_bgp_neighbor_address_family.py",
+ "name": "tests/integration/targets/nxos_prefix_lists/tests/common/merged.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f23808fbf9f008c689f4e1c02b7916bd261d6118037deb6fa58977cf805cee8f",
+ "chksum_sha256": "b63842b6c47d71229bd8ad567bce61e119a9da6f44fb987d22a5348d9b67f1e7",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/test_nxos_prefix_lists.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "123c5209374533c41d593eba1f82b63089145ea660be4c913f197a43a434c342",
+ "name": "tests/integration/targets/nxos_prefix_lists/tests/common/fixtures",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/test_nxos_ping.py",
+ "name": "tests/integration/targets/nxos_prefix_lists/tests/common/fixtures/parsed.cfg",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3fd816ad49427c17166246df05ec36c188e3caead4e04285d11815d31daa9999",
+ "chksum_sha256": "f7f1e53d17acf3059197aa476e1715028d571930c8cff8eabada2af3da2e7819",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/test_nxos_vxlan_vtep.py",
+ "name": "tests/integration/targets/nxos_prefix_lists/tests/common/replaced.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "92d1ef64dfcf0ea45c5294814d45a84bebce3070c429a57bda7f8f428a82f44b",
+ "chksum_sha256": "7bf73d5bf1f98884b2c8dedfcdc762af1c6542276fc649a04bf360fe10400cff",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/test_nxos_l3_interfaces.py",
+ "name": "tests/integration/targets/nxos_prefix_lists/tests/common/_remove_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9dd8c9037bd8ec303989a242280805dd82718f3a12b91f8ea34d323c819137a8",
+ "chksum_sha256": "b94ddd8f6e3aef1be0b00d88c934c971a436d3595b760e10dd388b6dacfac7d3",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/test_nxos_bgp_global.py",
+ "name": "tests/integration/targets/nxos_prefix_lists/tests/common/deleted.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "df17a538471e85b869a2d0fb40789d01afdf232e7dd781977d34d1e062120268",
+ "chksum_sha256": "8c7dfc255e4fdc2e873e375b7b4a24b1098f63ebc6f80b18ddaf4bb4e62a9a46",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/test_nxos_vsan.py",
+ "name": "tests/integration/targets/nxos_prefix_lists/tests/common/rendered.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8af4c5d2ddc4255350bdeea291e5661e3b5f07f5fd6cb755c9151cd8751394c5",
+ "chksum_sha256": "742244e5eb090968dbbab6b3bce1d5d52404f476371439bf2404e4467534b488",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/test_nxos.py",
+ "name": "tests/integration/targets/nxos_prefix_lists/tests/common/overridden.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2d93cfe3962b0a2c9ba309e973bf3db9087727c2459fa64f1c2fe0c4dfe61627",
+ "chksum_sha256": "56599df6259a9ed56f2794f1171dba7cd811995b228ee29610d679201d7094f0",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/test_nxos_ospf_interfaces.py",
+ "name": "tests/integration/targets/nxos_prefix_lists/tests/.DS_Store",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "19c4359cb20e2df0f649d97f3207a5861ebcdc6a2430db3ff94019ba14a38c73",
+ "chksum_sha256": "af891bb95ebcc39dfd38131bf64fba6eb7bb8e84f59a428b6af5477588fbc6b6",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/test_nxos_command.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "d2a3122d72313abb8ad6da5ba19c9070e95def315019bcc16cbc8ff835428411",
+ "name": "tests/integration/targets/nxos_ospfv2",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/test_nxos_user.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "1e084a512bff94fcf880a610ec5dd779c64ad7a3feea58bbc10e141e5eaf282c",
+ "name": "tests/integration/targets/nxos_ospfv2/vars",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/test_nxos_system.py",
+ "name": "tests/integration/targets/nxos_ospfv2/vars/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a7ea6f26526b9e4ead960a1c161b83de2581ca9d8a90d95925340e2fa8e17c8b",
+ "chksum_sha256": "61311d9ddd2174a10efead34de8526412e4c4d438600cf5a3ff852ec8692c4f6",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/test_nxos_pim.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "7e108bcc18c5b82ce1cb4cd46d8996147683f30a2f18d2a9eab8367dddb07bb1",
+ "name": "tests/integration/targets/nxos_ospfv2/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/test_nxos_logging_global.py",
+ "name": "tests/integration/targets/nxos_ospfv2/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "569405484ee944ef07a45c205f8588b8a00e4b626f64ad52c2da5f13d1d599b3",
+ "chksum_sha256": "82e7cf4c490409760e85c9516e174b27c8d67fb419e829353d94c4d022016245",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/test_nxos_bfd_global.py",
+ "name": "tests/integration/targets/nxos_ospfv2/tasks/nxapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "642bb2c06d13b8c2354bd9e46c01bef3beccde28fdbe5efa8af16cef2dc2a21f",
+ "chksum_sha256": "adcabdbc005b34ef70ef70c9a7a3b0bfc9b94ff93d2f3ea18bf1f19fbc2d2201",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/test_nxos_vrf_af.py",
+ "name": "tests/integration/targets/nxos_ospfv2/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "25bca3233974b76c3830c4d3b603c73dcb0032036a1752e0804c8cbc473a9973",
+ "chksum_sha256": "7826463ba77fb2f0f2bb32a0e02ce314ce3a06762e189598970cc26dd17828b1",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/test_nxos_config.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "f1acec58b3eaf968e8273380ab7d9170e37605cf1a1b5f66e08e568bf7d94e04",
+ "name": "tests/integration/targets/nxos_ospfv2/defaults",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/test_nxos_acl_interfaces.py",
+ "name": "tests/integration/targets/nxos_ospfv2/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6c9f9ef53093b66901ca6deaacd5cb8eeb43f20bf80782d5ce721a57290a7288",
+ "chksum_sha256": "7d754db246e92eda93bade9b741e09afdc7c5ba2754a4ea5d874f92a479b65d0",
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/test_nxos_static_routes.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "319d8c2d4417de66f5a808ab30acbe1e9edf6cbe6d9c3a26c78f891031d2a233",
+ "name": "tests/integration/targets/nxos_ospfv2/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/network/nxos/test_nxos_evpn_global.py",
+ "name": "tests/integration/targets/nxos_ospfv2/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7cb868c9311b9810788e13862c9b74c809e19a9f9b57ad9e46ff4124fef884dd",
+ "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
"format": 1
},
{
- "name": "tests/unit/modules/network/__init__.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "name": "tests/integration/targets/nxos_ospfv2/tests",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/__init__.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "name": "tests/integration/targets/nxos_ospfv2/tests/common",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/modules/utils.py",
+ "name": "tests/integration/targets/nxos_ospfv2/tests/common/_populate_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "414c272e9a403bb6009460ec9502c5acfaa9c80b2159c742f6cb2e225938be3e",
+ "chksum_sha256": "0a3077084e8ed39aae3bc10055bbff711e7d40af65e514c7ecd99bffaad96bfc",
"format": 1
},
{
- "name": "tests/unit/modules/conftest.py",
+ "name": "tests/integration/targets/nxos_ospfv2/tests/common/parsed.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "354440d86f684a9641f329807a687fe71fc66ab901207f7a166ecfa648935488",
+ "chksum_sha256": "b1738278980bd3d3426805bcd85053668078a6b992627d9119043bc29161ea8e",
"format": 1
},
{
- "name": "tests/unit/mock",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_ospfv2/tests/common/gathered.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "b8b689385d369d30980492e63bb16f13719d27452d738494899f85539130b2b5",
"format": 1
},
{
- "name": "tests/unit/mock/loader.py",
+ "name": "tests/integration/targets/nxos_ospfv2/tests/common/merged.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2343d1f644250d4b616f0bf0a826fdee8fe306243d8412d44dd0e033352b262b",
+ "chksum_sha256": "ddfff25fc88c57f108a7ed89d11d6df5c2233b27eac626512dc63709325fcf85",
"format": 1
},
{
- "name": "tests/unit/mock/path.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "5561a16a302e55ea79227591afefc3a93f33001fc80155dd29b068b90502a446",
+ "name": "tests/integration/targets/nxos_ospfv2/tests/common/fixtures",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/mock/vault_helper.py",
+ "name": "tests/integration/targets/nxos_ospfv2/tests/common/fixtures/parsed.cfg",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3bf7834d18bd34473d0f4b898253177229a131f14364874efba584ff985e4a41",
+ "chksum_sha256": "4ad11ae9e094b6bc2d4286b05967979551104857b314aa8af9afee18cdec2d4a",
"format": 1
},
{
- "name": "tests/unit/mock/__init__.py",
+ "name": "tests/integration/targets/nxos_ospfv2/tests/common/replaced.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "7a00b8609854d6a88cc33f870793ec8972cd5d81ae8dcfed33fbbca19938ce69",
"format": 1
},
{
- "name": "tests/unit/mock/yaml_helper.py",
+ "name": "tests/integration/targets/nxos_ospfv2/tests/common/_remove_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fc4f4d5d2968ced8e3a04653354945d720e7c6369aad67ba7724c5981c82b6ea",
+ "chksum_sha256": "c5bb202379b75e7aaffb216dd2e42b234acf1ab4fc0679f470fc01c67b309b27",
"format": 1
},
{
- "name": "tests/unit/mock/procenv.py",
+ "name": "tests/integration/targets/nxos_ospfv2/tests/common/deleted.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3ff983479d7aa4b90efe73337f723c14224edabcdfde4efe49f24cd905f45f1a",
+ "chksum_sha256": "69ef5f767607840c90b3cb9b80ca6eae206df8ec8a48b850dc47adbcf4b11aa5",
"format": 1
},
{
- "name": "tests/unit/__init__.py",
+ "name": "tests/integration/targets/nxos_ospfv2/tests/common/rendered.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "77ed312700fb0471e6e108949f8b983d6c418f0187b84b569551ce3c8205ce0c",
"format": 1
},
{
- "name": "tests/unit/requirements.txt",
+ "name": "tests/integration/targets/nxos_ospfv2/tests/common/overridden.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "49ba996dc4735c3463e9af561344346dfae14bcc1a68096ce78364b377f0df1f",
+ "chksum_sha256": "de58bef72adb70191cc8d212367d3332359130dbaf399807962787d1df375915",
"format": 1
},
{
- "name": "tests/integration",
+ "name": "tests/integration/targets/nxos_vxlan_vtep_vni",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/labs",
+ "name": "tests/integration/targets/nxos_vxlan_vtep_vni/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/labs/single.yaml",
+ "name": "tests/integration/targets/nxos_vxlan_vtep_vni/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fe76aae4b59ac2c628191394f5cfce79e8564ecb72be6157e14f32c7e8a97093",
+ "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
"format": 1
},
{
- "name": "tests/integration/network-integration.requirements.txt",
+ "name": "tests/integration/targets/nxos_vxlan_vtep_vni/tasks/nxapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "234d0353684f5a41c84eabb03e13539abf602dc0de061a90e129618976b3c106",
+ "chksum_sha256": "6fa74ca9eef193774b0257522c64344fa2c8851398dc0acd94e161192cc0015d",
"format": 1
},
{
- "name": "tests/integration/__init__.py",
+ "name": "tests/integration/targets/nxos_vxlan_vtep_vni/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "bb83a5490c188c20d9a8632bdb13043146a87d5dfc555cd2e389ee30885bf7ca",
"format": 1
},
{
- "name": "tests/integration/targets",
+ "name": "tests/integration/targets/nxos_vxlan_vtep_vni/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vlans",
+ "name": "tests/integration/targets/nxos_vxlan_vtep_vni/defaults/main.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_vxlan_vtep_vni/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vlans/tasks",
+ "name": "tests/integration/targets/nxos_vxlan_vtep_vni/meta/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_vxlan_vtep_vni/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vlans/tasks/nxapi.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
+ "name": "tests/integration/targets/nxos_vxlan_vtep_vni/tests/common",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vlans/tasks/main.yaml",
+ "name": "tests/integration/targets/nxos_vxlan_vtep_vni/tests/common/multisite.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
+ "chksum_sha256": "7127fe8481d55865ca7043a159fb4c4a6e02423899c7143bbb548aff0f3728e4",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vlans/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_vxlan_vtep_vni/tests/common/sanity.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "82e7cf4c490409760e85c9516e174b27c8d67fb419e829353d94c4d022016245",
+ "chksum_sha256": "81ba6bdc57dba2f0adc0b293862db415cb6aea44ae3793cbb0c755ea01c073a9",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vlans/fixtures",
+ "name": "tests/integration/targets/nxos_interfaces",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vlans/fixtures/parsed.cfg",
+ "name": "tests/integration/targets/nxos_interfaces/vars",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_interfaces/vars/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9d00aff551522b3a0a61d5ba2b38944de36985bd3353dc67987d76d650ccbea3",
+ "chksum_sha256": "c7aa68e2e8b3bb1d6b4258ed279670b1a8e5fc5e6a63b446eab511c3963db1c1",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vlans/meta",
+ "name": "tests/integration/targets/nxos_interfaces/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vlans/meta/main.yml",
+ "name": "tests/integration/targets/nxos_interfaces/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ec4fa30fc4a7b9e002d1c7b3932286ace72ba36e4f532e2cc79f49d07e0794c3",
+ "chksum_sha256": "82e7cf4c490409760e85c9516e174b27c8d67fb419e829353d94c4d022016245",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vlans/defaults",
+ "name": "tests/integration/targets/nxos_interfaces/tasks/nxapi.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "adcabdbc005b34ef70ef70c9a7a3b0bfc9b94ff93d2f3ea18bf1f19fbc2d2201",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_interfaces/tasks/main.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "64ca5b4de950cd0d788bdd92389f7741e663d949ee6b0e343f1038c573564b64",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_interfaces/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vlans/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_interfaces/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "7d754db246e92eda93bade9b741e09afdc7c5ba2754a4ea5d874f92a479b65d0",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vlans/vars",
+ "name": "tests/integration/targets/nxos_interfaces/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vlans/vars/main.yml",
+ "name": "tests/integration/targets/nxos_interfaces/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fd3723c027909807b1d6ef73222d5bfc96f98c680a9f86f3e89c0366bdd22116",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vlans/tests",
+ "name": "tests/integration/targets/nxos_interfaces/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vlans/tests/common",
+ "name": "tests/integration/targets/nxos_interfaces/tests/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vlans/tests/common/parsed.yaml",
+ "name": "tests/integration/targets/nxos_interfaces/tests/common/_populate_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fe16be9cd5316c7526ea489bad81d693f40c2f2351c01f60a4074802f208164e",
+ "chksum_sha256": "eb7918401f281cb2d25ccebc214ac1fd9ae10975f3ca4f77f56a53f3cc485a40",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vlans/tests/common/merged.yaml",
+ "name": "tests/integration/targets/nxos_interfaces/tests/common/empty_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6f9623efb3904d185246e618c1cb0726553a5556941e5c2e91b7c9025de0e89c",
+ "chksum_sha256": "c5a2d4d2fa3245673307a06eb29fd3b751fad50fde2dac24f84aaff5498a5af7",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vlans/tests/common/deleted.yaml",
+ "name": "tests/integration/targets/nxos_interfaces/tests/common/parsed.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e2964f468cfe2c92dbee0504a71845636d76ff4e079dd07ad7f8656cba328221",
+ "chksum_sha256": "e26569250511575d31ea2338e2abede7fda11240bd583ac7896adccb24abd7f2",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vlans/tests/common/replaced.yaml",
+ "name": "tests/integration/targets/nxos_interfaces/tests/common/purged.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1f1efb47f41f82d4be94cca66acf388791ed71b63f1f3897f769f7e4110f1be6",
+ "chksum_sha256": "832b01df3053b44b5e69f7b1e62fede8df0291af913f7b4abd1d4545439ef718",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vlans/tests/common/overridden.yaml",
+ "name": "tests/integration/targets/nxos_interfaces/tests/common/gathered.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c70b2b7642402aeef4772a4a8cc7b7abde208bfedd65fbdcf95231b2ab5a74db",
+ "chksum_sha256": "3f875bd8877a898338a0402b34179ad17f45b84d1623c72741490f52913eb064",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vlans/tests/common/_remove_config.yaml",
+ "name": "tests/integration/targets/nxos_interfaces/tests/common/rtt.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9d0b9b581e3e45b412907eecbc4ba5ad1632595e8cb58f397365287ce091bee0",
+ "chksum_sha256": "0eb7659a55ab72845d72bf76bcac9618bf84c8d5d55c6fa8f04ff9c15c68a92a",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vlans/tests/common/rtt.yaml",
+ "name": "tests/integration/targets/nxos_interfaces/tests/common/merged.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "65b9276bee903c86af57ed5a6afee1d84461bf6c7bf3a3cf95aa8369e0c7df57",
+ "chksum_sha256": "49898e976fb440f5b1ebc404e489b6e2511049c950c8c0d8fe75e251dc5c3671",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vlans/tests/common/gathered.yaml",
+ "name": "tests/integration/targets/nxos_interfaces/tests/common/replaced.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "87888ec93930b5630192fd9f60d931700a43910cfe6a3d092b04099ac88eb612",
+ "chksum_sha256": "c8d1e0d8e50dd46dc84c73912990ffca760b0f5e1a8ad8ab2f10a90bce902369",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vlans/tests/common/empty_config.yaml",
+ "name": "tests/integration/targets/nxos_interfaces/tests/common/_remove_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "69cfb989d7be0b980e8fc3eb9d33154c66e2cde5df1a9f8b17b1ea0894879212",
+ "chksum_sha256": "a639b04de3b8a2a70660b4034c291526f9e1f4bee9e24e4e543bd719a4f65a50",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vlans/tests/common/rendered.yaml",
+ "name": "tests/integration/targets/nxos_interfaces/tests/common/deleted.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5745013bf8f6e23f513cea1bb8878e4ca80ba61a331a5a5d2a7097673b786ed2",
+ "chksum_sha256": "d5ba532156bd9771e2ec35bb085d404ed049b88e1da827ab9eb6d4dd38a3c871",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vlans/tests/common/_populate_config.yaml",
+ "name": "tests/integration/targets/nxos_interfaces/tests/common/rendered.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0fef283f3a139da74d869920dfc1b12b127b20e505c060d30b893d457ea980bc",
+ "chksum_sha256": "454396b9f57fd4895a1d8063b565bde8a053291bc6f6e5f934397c75e99ac6cb",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vxlan_vtep",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_interfaces/tests/common/overridden.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "52f0f66116a7b07a9bfd39bcc2cdabc5a2db68ed24d92751f3f53f08332f0ed0",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vxlan_vtep/tasks",
+ "name": "tests/integration/targets/nxos_overlay_global",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vxlan_vtep/tasks/nxapi.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "1e03994e82f05167e558b19e20c435e80ed92b6ff4f62f71fb93ed9082d30f94",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_vxlan_vtep/tasks/main.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
+ "name": "tests/integration/targets/nxos_overlay_global/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vxlan_vtep/tasks/platform",
+ "name": "tests/integration/targets/nxos_overlay_global/tasks/platform",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vxlan_vtep/tasks/platform/n7k",
+ "name": "tests/integration/targets/nxos_overlay_global/tasks/platform/n7k",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vxlan_vtep/tasks/platform/n7k/cleanup.yaml",
+ "name": "tests/integration/targets/nxos_overlay_global/tasks/platform/n7k/cleanup.yaml",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "6fe5dccac28d950d4f93db2d5dd8d2d53f974b51a0c07a9371165a7fd9b20585",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vxlan_vtep/tasks/platform/n7k/setup.yaml",
+ "name": "tests/integration/targets/nxos_overlay_global/tasks/platform/n7k/setup.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b569f1de86c96331c196b935a58a716b5314f80ebd5b9130aeaa56f44c45f047",
+ "chksum_sha256": "1f41e716a6ee3d52141e15b1cf8d8a62a6656066f9761e5438f18b03b64148ba",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vxlan_vtep/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_overlay_global/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vxlan_vtep/meta",
+ "name": "tests/integration/targets/nxos_overlay_global/tasks/nxapi.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_overlay_global/tasks/main.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "bb83a5490c188c20d9a8632bdb13043146a87d5dfc555cd2e389ee30885bf7ca",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_overlay_global/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vxlan_vtep/meta/main.yml",
+ "name": "tests/integration/targets/nxos_overlay_global/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vxlan_vtep/defaults",
+ "name": "tests/integration/targets/nxos_overlay_global/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vxlan_vtep/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_overlay_global/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vxlan_vtep/tests",
+ "name": "tests/integration/targets/nxos_overlay_global/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vxlan_vtep/tests/common",
+ "name": "tests/integration/targets/nxos_overlay_global/tests/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vxlan_vtep/tests/common/sanity.yaml",
+ "name": "tests/integration/targets/nxos_overlay_global/tests/common/sanity.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6f5876cdb7e7636f06c87e462e6002cd2694520079c83f7103450520ff14b3b9",
+ "chksum_sha256": "10d3d02705dac2e31ab7424914f1dc74d20c5cf799141c26ac2e47ec2c68e1b4",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vxlan_vtep/tests/common/multisite.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "6e9df57ea06843b691dc030437dd9190193ddb70664dd0254133417e3a8b5cdc",
+ "name": "tests/integration/targets/nxos_telemetry",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vrf",
+ "name": "tests/integration/targets/nxos_telemetry/vars",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vrf/tasks",
+ "name": "tests/integration/targets/nxos_telemetry/vars/main.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "90ee74a6d8158f5b1a5e92b16e0e044a5fae636e2742931b542355cb71ed2da4",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_telemetry/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vrf/tasks/nxapi.yaml",
+ "name": "tests/integration/targets/nxos_telemetry/tasks/cli.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_telemetry/tasks/nxapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vrf/tasks/main.yaml",
+ "name": "tests/integration/targets/nxos_telemetry/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
+ "chksum_sha256": "4fe8ec88364fed45512b5f8e5687038986edd9b03ab1962afa6476aed1ab2f5a",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vrf/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_telemetry/defaults",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_telemetry/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
+ "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vrf/meta",
+ "name": "tests/integration/targets/nxos_telemetry/templates",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vrf/meta/main.yml",
+ "name": "tests/integration/targets/nxos_telemetry/templates/populate_config.cfg",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "chksum_sha256": "160c96e0f5f10f0cd536d1fcd93fad3b60a8eac159a64755ac8617a702f032b7",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vrf/defaults",
+ "name": "tests/integration/targets/nxos_telemetry/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vrf/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_telemetry/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vrf/tests",
+ "name": "tests/integration/targets/nxos_telemetry/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vrf/tests/common",
+ "name": "tests/integration/targets/nxos_telemetry/tests/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vrf/tests/common/intent.yaml",
+ "name": "tests/integration/targets/nxos_telemetry/tests/common/gathered.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b49015c20a19ae1f270a3b5e69d370744549a23915fb03c23f6e84ad40f8ef74",
+ "chksum_sha256": "0f66a09ea372c1fa2c05027c0a2097da33a0912daf4355ff6cab34a77e057acd",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vrf/tests/common/sanity.yaml",
+ "name": "tests/integration/targets/nxos_telemetry/tests/common/merged.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b8fb2d6249a8d9311ec67a31c87ad94a8184c7351dbf190d833bb1d7bafd4e7a",
+ "chksum_sha256": "b1ac88fec698b51b1f358987e65b5793cd9488ea085606963409a7b9d9b2b4cc",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_banner",
+ "name": "tests/integration/targets/nxos_telemetry/tests/common/fixtures",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_banner/tasks",
+ "name": "tests/integration/targets/nxos_telemetry/tests/common/fixtures/server.crt",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "9248b8354ba01bd1fecb1d24494a6098ee9547fa34a9d8840068abbb9a80fd92",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_telemetry/tests/common/fixtures/local_server.crt",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "29437b8420b999138ca0947aef901533fd9affbc7ae98a39b13fb6bfa0ae29cd",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_telemetry/tests/common/fixtures/new_server.crt",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d761bf530e78d0752a7d9f05dfd5084e106245c7cecac49ca75c0c76ba495fc1",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_telemetry/tests/common/replaced.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "00736c9321f59eb87d3655075f655cf2244ba30caed17b3175c3f0171c94bd9f",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_telemetry/tests/common/deleted.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d231b07bd5ab608ddebbdbbdbbcbed5e4648ac7ad05dfbbf1df4e2a21c063389",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_snmp_community",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_banner/tasks/nxapi.yaml",
+ "name": "tests/integration/targets/nxos_snmp_community/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_snmp_community/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
+ "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_banner/tasks/main.yaml",
+ "name": "tests/integration/targets/nxos_snmp_community/tasks/nxapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c35d2ac52b14846e9fb2bba91ef7ba5f946b1cd2e47ed10c0b30941d6f9de766",
+ "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_banner/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_snmp_community/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
+ "chksum_sha256": "bb83a5490c188c20d9a8632bdb13043146a87d5dfc555cd2e389ee30885bf7ca",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_banner/meta",
+ "name": "tests/integration/targets/nxos_snmp_community/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_banner/meta/main.yaml",
+ "name": "tests/integration/targets/nxos_snmp_community/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_banner/defaults",
+ "name": "tests/integration/targets/nxos_snmp_community/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_banner/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_snmp_community/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "665288590cd4226da42511817f09ccdfc92df36c9621583cd2c82919af295c5a",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_banner/tests",
+ "name": "tests/integration/targets/nxos_snmp_community/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_banner/tests/common",
+ "name": "tests/integration/targets/nxos_snmp_community/tests/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_banner/tests/common/sanity.yaml",
+ "name": "tests/integration/targets/nxos_snmp_community/tests/common/sanity.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "dcccaebec31acf1f9adfe71c2e5067fcb6c1416a7c63b077490beedfff6078ca",
+ "chksum_sha256": "7a31444a04409e8b5f9d58864e5405ec3ded7ec744e6e731b3ca0d22ec1a6ee0",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_location",
+ "name": "tests/integration/targets/nxos_aaa_server_host",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_location/tasks",
+ "name": "tests/integration/targets/nxos_aaa_server_host/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_location/tasks/nxapi.yaml",
+ "name": "tests/integration/targets/nxos_aaa_server_host/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
+ "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_location/tasks/main.yaml",
+ "name": "tests/integration/targets/nxos_aaa_server_host/tasks/nxapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bb83a5490c188c20d9a8632bdb13043146a87d5dfc555cd2e389ee30885bf7ca",
+ "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_location/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_aaa_server_host/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
+ "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_location/meta",
+ "name": "tests/integration/targets/nxos_aaa_server_host/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_location/meta/main.yml",
+ "name": "tests/integration/targets/nxos_aaa_server_host/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_location/defaults",
+ "name": "tests/integration/targets/nxos_aaa_server_host/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_location/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_aaa_server_host/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_location/tests",
+ "name": "tests/integration/targets/nxos_aaa_server_host/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_location/tests/common",
+ "name": "tests/integration/targets/nxos_aaa_server_host/tests/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_location/tests/common/sanity.yaml",
+ "name": "tests/integration/targets/nxos_aaa_server_host/tests/common/tacacs.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3e00a1c43687d39e22e8835b039f398d7bdebbcc45399d8cd01d6ca7d4ef6e96",
+ "chksum_sha256": "b669c11c0bee9b899cb3075b446411bac7b4fd7c79396e5add683a2d5ab8b725",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_pim_interface",
+ "name": "tests/integration/targets/nxos_aaa_server_host/tests/common/radius.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "10dec61711d8e2e9660491a3ac36bda92ef57264b44ce2fafd5c0512b2ecfd13",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_vrf_af",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_pim_interface/tasks",
+ "name": "tests/integration/targets/nxos_vrf_af/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_pim_interface/tasks/nxapi.yaml",
+ "name": "tests/integration/targets/nxos_vrf_af/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
+ "chksum_sha256": "8d37308cf2225418b5fab604f2b171a1aafbaa84be0c933a48ec0f153d7119ad",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_pim_interface/tasks/main.yaml",
+ "name": "tests/integration/targets/nxos_vrf_af/tasks/nxapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bb83a5490c188c20d9a8632bdb13043146a87d5dfc555cd2e389ee30885bf7ca",
+ "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_pim_interface/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_vrf_af/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
+ "chksum_sha256": "bb83a5490c188c20d9a8632bdb13043146a87d5dfc555cd2e389ee30885bf7ca",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_pim_interface/meta",
+ "name": "tests/integration/targets/nxos_vrf_af/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_pim_interface/meta/main.yml",
+ "name": "tests/integration/targets/nxos_vrf_af/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_pim_interface/defaults",
+ "name": "tests/integration/targets/nxos_vrf_af/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_pim_interface/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_vrf_af/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_pim_interface/tests",
+ "name": "tests/integration/targets/nxos_vrf_af/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_pim_interface/tests/common",
+ "name": "tests/integration/targets/nxos_vrf_af/tests/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_pim_interface/tests/common/sanity.yaml",
+ "name": "tests/integration/targets/nxos_vrf_af/tests/common/sanity.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "acc020ce7729ff57e637de9f3cd1f3aae98af404b208386a852c6c804d0006fd",
+ "chksum_sha256": "df6d2b3a3d7a67ce3759464bf144b6a71d61afa348cfda6faea7707ab93acf21",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospf_interfaces",
+ "name": "tests/integration/targets/nxos_bgp_address_family",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospf_interfaces/tasks",
+ "name": "tests/integration/targets/nxos_bgp_address_family/vars",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospf_interfaces/tasks/nxapi.yaml",
+ "name": "tests/integration/targets/nxos_bgp_address_family/vars/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "adcabdbc005b34ef70ef70c9a7a3b0bfc9b94ff93d2f3ea18bf1f19fbc2d2201",
+ "chksum_sha256": "528513c35a708d559281b5758e5c998565f110acf10803504b3cdbf2accf1988",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospf_interfaces/tasks/main.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "e9eff3b2082e2a94e384711077fcb0a67b0410a72a0d7425367798c7fe1c4c75",
+ "name": "tests/integration/targets/nxos_bgp_address_family/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospf_interfaces/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_bgp_address_family/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "82e7cf4c490409760e85c9516e174b27c8d67fb419e829353d94c4d022016245",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospf_interfaces/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_bgp_address_family/tasks/nxapi.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "adcabdbc005b34ef70ef70c9a7a3b0bfc9b94ff93d2f3ea18bf1f19fbc2d2201",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospf_interfaces/meta/main.yml",
+ "name": "tests/integration/targets/nxos_bgp_address_family/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "chksum_sha256": "918e266fd84500fc8a9d7958437789683bd47b7875371afaf4941b60f4594bc7",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospf_interfaces/defaults",
+ "name": "tests/integration/targets/nxos_bgp_address_family/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospf_interfaces/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_bgp_address_family/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "7d754db246e92eda93bade9b741e09afdc7c5ba2754a4ea5d874f92a479b65d0",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospf_interfaces/vars",
+ "name": "tests/integration/targets/nxos_bgp_address_family/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospf_interfaces/vars/main.yml",
+ "name": "tests/integration/targets/nxos_bgp_address_family/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "254903ea468078a13337c27e4ffbfe0aad84710a36cdfeca8027eba5a16b48bf",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_ospf_interfaces/tests",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospf_interfaces/tests/common",
+ "name": "tests/integration/targets/nxos_bgp_address_family/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospf_interfaces/tests/common/fixtures",
+ "name": "tests/integration/targets/nxos_bgp_address_family/tests/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospf_interfaces/tests/common/fixtures/parsed.cfg",
+ "name": "tests/integration/targets/nxos_bgp_address_family/tests/common/_populate_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "77f5af82eeb76922d66d564b1b3b713644b5154cd160ed32686613453f6ffb08",
+ "chksum_sha256": "4ad5b3d3c16b81ea4fcf40639099f5191577136f0cf3ff4970ed630d8248ec66",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospf_interfaces/tests/common/parsed.yaml",
+ "name": "tests/integration/targets/nxos_bgp_address_family/tests/common/empty_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "aa4d4d9ae4687f27e044a7c2ba6c6ffe31491031d9cfb724c69a54e4a1248b2e",
+ "chksum_sha256": "fed0782bb5669d310ba18f8a49c51be3b1c8b16b0951f388e2280ed4a1769b50",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospf_interfaces/tests/common/merged.yaml",
+ "name": "tests/integration/targets/nxos_bgp_address_family/tests/common/parsed.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6563a046c325a1872e9c20c3a80658c02bc684d828e1d8c0dbfbfb23ede1da1c",
+ "chksum_sha256": "29d4c709454554edeef9e3a0e394a93ff71200d5ae09e04d855d8edcedc24549",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospf_interfaces/tests/common/deleted.yaml",
+ "name": "tests/integration/targets/nxos_bgp_address_family/tests/common/gathered.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3e19f34c70b0982c9c03cc96e3fcf57b88c084597eae9036b3e4c6bbd0860dc1",
+ "chksum_sha256": "0aefe84d03b67ec239027816ac3522c7b7cfabac56bd979c781c74cab4b01955",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospf_interfaces/tests/common/replaced.yaml",
+ "name": "tests/integration/targets/nxos_bgp_address_family/tests/common/merged.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "22709a3fdba9d6c94de7e53132bf6178989f0ea494f7779069913fc918ffa131",
+ "chksum_sha256": "a6f21fe892ff7e532f40e0d7ecb6bba83f26bb1765ac4177b453d85f896dc633",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospf_interfaces/tests/common/_setup.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "5153443ed073b0cc035110ac96d297361e22abe390c84014d0a7dbc83ede74e4",
+ "name": "tests/integration/targets/nxos_bgp_address_family/tests/common/fixtures",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospf_interfaces/tests/common/overridden.yaml",
+ "name": "tests/integration/targets/nxos_bgp_address_family/tests/common/fixtures/parsed.cfg",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f4d88bbce44a9ca14508aebf3a495f3ced56b859aa74951b92922d613d0addbd",
+ "chksum_sha256": "f6189829f4b10872e4553b8c55db04c742cca9c7598d067ddb3eea05e0ab2f0d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospf_interfaces/tests/common/_remove_config.yaml",
+ "name": "tests/integration/targets/nxos_bgp_address_family/tests/common/replaced.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b76239c8dec85299b177fa8a11a790932f64db53e155a64222321eff524af8b7",
+ "chksum_sha256": "0a2cbefd9d104a0f9e0370a13ed7e87feb1e74175d922f33d786e604e775a498",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospf_interfaces/tests/common/gathered.yaml",
+ "name": "tests/integration/targets/nxos_bgp_address_family/tests/common/_remove_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "772abaa4b30e45bb3a3e0b168b9b50f62e75e5db9d1ee54915707631af8bc3a9",
+ "chksum_sha256": "c07047642c717ad5e564697143a11e7cc93a18376d91ee56dc5f44f3d19e22b5",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospf_interfaces/tests/common/empty_config.yaml",
+ "name": "tests/integration/targets/nxos_bgp_address_family/tests/common/deleted.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b880b29ecb1b59489b0d82bfaf1ebecc35e47cc4ae7d860d616a0e1c4c17b8f6",
+ "chksum_sha256": "e76deb0ccd6d015af64ec4b54fc1da88ccf401b0aa373bda8d4cfabd23b9b1d9",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospf_interfaces/tests/common/rendered.yaml",
+ "name": "tests/integration/targets/nxos_bgp_address_family/tests/common/rendered.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "df5bdd37b5f9ceac0c9cba9185dfd3c1c3d13d839447eca07daca2717b7261f0",
+ "chksum_sha256": "7121942f388ee7ab8d2484101d1a980b1d666be25f8388c6f00438f36a99736d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospf_interfaces/tests/common/_populate_config.yaml",
+ "name": "tests/integration/targets/nxos_bgp_address_family/tests/common/overridden.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "947100dd1e56c9ebd17cb32e1caa66f7c5a540d038dd36f4d0482271627ffa10",
+ "chksum_sha256": "ba2591fcb1bd54b023d7d45818463cb4d7efb6c7a918133129505ab24d3c9b7d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bfd_interfaces",
+ "name": "tests/integration/targets/nxos_igmp_interface",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bfd_interfaces/tasks",
+ "name": "tests/integration/targets/nxos_igmp_interface/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bfd_interfaces/tasks/nxapi.yaml",
+ "name": "tests/integration/targets/nxos_igmp_interface/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "adcabdbc005b34ef70ef70c9a7a3b0bfc9b94ff93d2f3ea18bf1f19fbc2d2201",
+ "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bfd_interfaces/tasks/main.yaml",
+ "name": "tests/integration/targets/nxos_igmp_interface/tasks/nxapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
+ "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bfd_interfaces/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_igmp_interface/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "82e7cf4c490409760e85c9516e174b27c8d67fb419e829353d94c4d022016245",
+ "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bfd_interfaces/meta",
+ "name": "tests/integration/targets/nxos_igmp_interface/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bfd_interfaces/meta/main.yml",
+ "name": "tests/integration/targets/nxos_igmp_interface/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bfd_interfaces/defaults",
+ "name": "tests/integration/targets/nxos_igmp_interface/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bfd_interfaces/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_igmp_interface/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7d754db246e92eda93bade9b741e09afdc7c5ba2754a4ea5d874f92a479b65d0",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bfd_interfaces/vars",
+ "name": "tests/integration/targets/nxos_igmp_interface/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bfd_interfaces/vars/main.yml",
+ "name": "tests/integration/targets/nxos_igmp_interface/tests/common",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_igmp_interface/tests/common/sanity.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ab74ba6b07626a0723ca4ba193159ba14bc7011f11acd94ca9b5f13f817def19",
+ "chksum_sha256": "3f1f412736c7af33d14c2be45d461f6f96778bc221e6abdabdc579247c32d72d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bfd_interfaces/tests",
+ "name": "tests/integration/targets/nxos_hsrp_interfaces",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bfd_interfaces/tests/common",
+ "name": "tests/integration/targets/nxos_hsrp_interfaces/vars",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bfd_interfaces/tests/common/parsed.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "c7ce5887a89b7e2b67eb2b4bae794a1e0a77c77c2c98c1fbd123bca9ecbf78f3",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_bfd_interfaces/tests/common/merged.yaml",
+ "name": "tests/integration/targets/nxos_hsrp_interfaces/vars/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b7125a5bf2a88b3a35ff6aa0d06f3afd1474431c753306f1390613c7892f8217",
+ "chksum_sha256": "a06be45c7fcb5fbb624b33983db74636889ef05ed32ce0f3ba56485272e3db09",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bfd_interfaces/tests/common/deleted.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "1ef608de6174de4a28060255b4f85b5c00beb4bf2c8c4083a85d4132fe06341f",
+ "name": "tests/integration/targets/nxos_hsrp_interfaces/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bfd_interfaces/tests/common/replaced.yaml",
+ "name": "tests/integration/targets/nxos_hsrp_interfaces/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "57b05a37ab33678d00ceac9fa16256aa5ce862265fb330cf2045a6f06bd40331",
+ "chksum_sha256": "82e7cf4c490409760e85c9516e174b27c8d67fb419e829353d94c4d022016245",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bfd_interfaces/tests/common/overridden.yaml",
+ "name": "tests/integration/targets/nxos_hsrp_interfaces/tasks/nxapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "903605fdce930f7ab623ddda18bc721d07632ae35cede2608f93d0178fffff86",
+ "chksum_sha256": "adcabdbc005b34ef70ef70c9a7a3b0bfc9b94ff93d2f3ea18bf1f19fbc2d2201",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bfd_interfaces/tests/common/_remove_config.yaml",
+ "name": "tests/integration/targets/nxos_hsrp_interfaces/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "35032ab6b2376156d9cc4386e0c3ae950aeea92146c89106e92963e5d9f30abd",
+ "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bfd_interfaces/tests/common/gathered.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "e8b8f05c67f53717f791d5fa39768a09589083927c1b288181a9a2b28d3f19fd",
+ "name": "tests/integration/targets/nxos_hsrp_interfaces/defaults",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bfd_interfaces/tests/common/empty_config.yaml",
+ "name": "tests/integration/targets/nxos_hsrp_interfaces/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a07b5658b8543fad6adbc03cc3dedf24b925d6c15ab96cba887e4057adde9af4",
+ "chksum_sha256": "7d754db246e92eda93bade9b741e09afdc7c5ba2754a4ea5d874f92a479b65d0",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bfd_interfaces/tests/common/rendered.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "879575bd8531da7b1f7752bc36e8b1d92d654dadebf3ac9e372c71ead2cd75d0",
+ "name": "tests/integration/targets/nxos_hsrp_interfaces/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bfd_interfaces/tests/common/_populate_config.yaml",
+ "name": "tests/integration/targets/nxos_hsrp_interfaces/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "febb1078f232f6b64f4600db4e81e08b293f9c1b5959937b92458ea3228ac8ce",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_install_os",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_install_os/tasks",
+ "name": "tests/integration/targets/nxos_hsrp_interfaces/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_install_os/tasks/httpapi.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "1c9ccfa8d43479520eb4cffae188c452027a694dd86a02bd92d31b5843301ad5",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_install_os/tasks/upgrade",
+ "name": "tests/integration/targets/nxos_hsrp_interfaces/tests/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_install_os/tasks/upgrade/install_os.yaml",
+ "name": "tests/integration/targets/nxos_hsrp_interfaces/tests/common/_populate_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d774b3f202b5780184d96cdb5ac41b3db3540d09f575f0c8b0840a3509fd75bb",
+ "chksum_sha256": "a313ec546dfda89fa9e4dbd468fa0940164b42c0ebce4e544f9c6eddf9c76e63",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_install_os/tasks/upgrade/delete_files.yaml",
+ "name": "tests/integration/targets/nxos_hsrp_interfaces/tests/common/empty_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f770f6c4e52488ddd7b0c1dd5c61ef6f7f2dfb329cbae4337e0845567fa3e113",
+ "chksum_sha256": "d1ff8909b5732150a58ea8fafa7e0f20c447a8bd7e6be23fa0d7cc5655b75510",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_install_os/tasks/upgrade/clear_persistent_sockets.yaml",
+ "name": "tests/integration/targets/nxos_hsrp_interfaces/tests/common/parsed.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2fcf3f5178da21851bf3cd61610f645b242d841bb553acc94fa88c003c8849e5",
+ "chksum_sha256": "9db583ddb82a512ca231ffb3a5ba6c672aa653ec28834c17328fd56190b4a558",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_install_os/tasks/upgrade/main_os_install.yaml",
+ "name": "tests/integration/targets/nxos_hsrp_interfaces/tests/common/gathered.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bc3281050236443452b55b83c4c3f32be752eb802c09c730e36e06ee7ef7ca31",
+ "chksum_sha256": "0ef96315e1d8b08e76b5d905207c1d85c0ddea43e8efa07b54896132e7d772e1",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_install_os/tasks/upgrade/install_with_kick.yaml",
+ "name": "tests/integration/targets/nxos_hsrp_interfaces/tests/common/merged.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bf39d3606053aeface47d66b776a0f8ed281c130cf0258ac09e8a0a759076df4",
+ "chksum_sha256": "da6b8a879ff9188026a3b1aa01b53289a7517026aff1a7d1954d1bdaead6dec0",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_install_os/tasks/upgrade/enable_scp_server.yaml",
+ "name": "tests/integration/targets/nxos_hsrp_interfaces/tests/common/replaced.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2f94537521756c0171b570da8d8012aa47441ed01f2085c5e0c4ce1c80d6dbf3",
+ "chksum_sha256": "d1c4df45348332e927abe784ea39438ab014af521a169cc07ca76ff6d8d7c52d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_install_os/tasks/upgrade/copy_kick_system_images.yaml",
+ "name": "tests/integration/targets/nxos_hsrp_interfaces/tests/common/_remove_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "71f8a18402f8b4c95d4bdcb1f4fc6542ce6b9671ea246c44fe9acac70bbd562d",
+ "chksum_sha256": "ce89afffb71cc896f467a3e32919ab4a8fdcd5ea07a9175ecd7db9a7ae6f77cb",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_install_os/tasks/upgrade/install_system.yaml",
+ "name": "tests/integration/targets/nxos_hsrp_interfaces/tests/common/deleted.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d7225266ebf0be4737420a6d63a5cd3c77d3b6cfde7bd0f1c7eded1ebfad70b7",
+ "chksum_sha256": "6e9f0e87988c4ded07d518b67a5118d9d6894e2fab2c86c9cd90ffcf5f56beab",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_install_os/tasks/network_cli.yaml",
+ "name": "tests/integration/targets/nxos_hsrp_interfaces/tests/common/rendered.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "29b863033e05757e739075e995a668f9bcc014a0e214ac894ffa9af68bd781ce",
+ "chksum_sha256": "4f2c908e56c100ed364248792037843ebe703124cb856054bba1a7904494edd3",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_install_os/tasks/main.yaml",
+ "name": "tests/integration/targets/nxos_hsrp_interfaces/tests/common/overridden.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5a0047c8f02776d1bffe8d2ced68ea4c0f823708554f48419af32b1b7172a1a9",
+ "chksum_sha256": "45a5f0f9979ffcd5b7ef73f95de39a4c6ad35b3f7f05a61b7f6770eb17166dcb",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_install_os/meta",
+ "name": "tests/integration/targets/nxos_facts",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_install_os/meta/main.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "efb275d54a7c21fd007e1d1c8b10b65d148a9121af425a4e7fbd60abacd23fc8",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_install_os/defaults",
+ "name": "tests/integration/targets/nxos_facts/vars",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_install_os/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_facts/vars/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a79b77c93a1982c2682c177681bfcef5f3fb3421e3ef10be452fc81dc3ce8871",
+ "chksum_sha256": "90b4146d19a8448ba711a28645cf964f8251d544bf1de77f552e08a1e60a9914",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_install_os/tests",
+ "name": "tests/integration/targets/nxos_facts/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_install_os/tests/common",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_facts/tasks/cli.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_install_os/tests/common/upgrade_n7k_helsinki.yaml",
+ "name": "tests/integration/targets/nxos_facts/tasks/nxapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c023ec2f8677adcd41bd418bb5e35d3c82dc0636d9d41c45a2074b32891214be",
+ "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_install_os/tests/common/upgrade_n5k_730_N11.yaml",
+ "name": "tests/integration/targets/nxos_facts/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "744e3327618fd03cced8667aa2dee9cd691c358777ecb1dc741abf3c8dc18484",
+ "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_install_os/tests/common/upgrade_n7k_atherton.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "f72c407c1bedf043f2a763f9db8de4c28cb40cd3f55a99180c0b54f46bc9910c",
+ "name": "tests/integration/targets/nxos_facts/defaults",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_install_os/tests/common/upgrade_n9k_greensboro.yaml",
+ "name": "tests/integration/targets/nxos_facts/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7a09f37097e9dbedf717f83715339eb5397073ac3909bfd0cae6f874d3cd6342",
+ "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_install_os/tests/common/upgrade_n3172_u61a.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "147279cb89618d50352ab1e86d4e939a3f3e0594cf28aec862eb867e1bdfc307",
+ "name": "tests/integration/targets/nxos_facts/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_install_os/tests/common/upgrade_n9k_hamilton.yaml",
+ "name": "tests/integration/targets/nxos_facts/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "669ba59d9d909f0d42d6e69ae0ef3e6e1d4c33995a4658761f0b2884113cc041",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_install_os/tests/common/upgrade_n35_greensboro.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "bf8414d8d1327c0db9056739e1b39b673abe37e939356efd1ef96ade985ccffc",
+ "name": "tests/integration/targets/nxos_facts/tests",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_install_os/tests/common/upgrade_n3172_u63a.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "c37931cb7c45570fe30f100fb278e26b7b94b7848d59ad8b6dbdda0670534452",
+ "name": "tests/integration/targets/nxos_facts/tests/common",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_install_os/tests/common/upgrade_n3172_u62a.yaml",
+ "name": "tests/integration/targets/nxos_facts/tests/common/default_facts.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9ee1eb9e5b4f1fea2c930c8a7c48eaf4708576b9af88c29f280f3dabcec163fb",
+ "chksum_sha256": "08c3680a5a839d8e3b53515643f3773bb1d25de670124063d41ff3e0a7fd2713",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_install_os/tests/common/upgrade_n35_62a88.yaml",
+ "name": "tests/integration/targets/nxos_facts/tests/common/all_facts.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c041886e29ede3a58d17f0e6d281d5dd458095369130bed0e617ffb200824743",
+ "chksum_sha256": "f737c75bd85a50361bfb755799335e3bf0044448e43308c6f4c3f9186a80b65f",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_install_os/tests/common/upgrade_n5k_733_N11.yaml",
+ "name": "tests/integration/targets/nxos_facts/tests/common/interface_facts.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7efa46e2021391e0fc9114f1b132861cd3614b7ec0064425b46ed876c81ea0f1",
+ "chksum_sha256": "5c7fa4749d973f3993d84e4ae324dfefa8a62167452abcbdd66de1cb668fdba0",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_install_os/tests/common/upgrade_n3172_greensboro.yaml",
+ "name": "tests/integration/targets/nxos_facts/tests/common/invalid_subset.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f26347411b88b126bce20f80dca17f5726cc830996a1d8bd6119b73647bea681",
+ "chksum_sha256": "d7ef14aa0d9ae451bba34e5fb2952f2034793f826a441cd042b03c34e0d78422",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_install_os/tests/common/upgrade_n9k_greensboro_force.yaml",
+ "name": "tests/integration/targets/nxos_facts/tests/common/sanity.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "13e87f23b456e285da0593775bfe2b876d167922ce61a4ba47a668cb0b0d82bb",
+ "chksum_sha256": "f15ccd5a6a25f5358288fb8ddb955a1b024c33c0d1f04df3811e03aabe460c32",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_install_os/tests/common/upgrade.yaml",
+ "name": "tests/integration/targets/nxos_facts/tests/common/not_hardware.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ee3909a9cbc7c9f42f3b146f122a61bdc2adada843b80ee111fe024dfa5c0d33",
+ "chksum_sha256": "933c388db57e0042a75290223a48d69efad1e524bce14f28f64694d34e3475e3",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hsrp",
+ "name": "tests/integration/targets/nxos_feature",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hsrp/tasks",
+ "name": "tests/integration/targets/nxos_feature/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hsrp/tasks/nxapi.yaml",
+ "name": "tests/integration/targets/nxos_feature/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
+ "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hsrp/tasks/main.yaml",
+ "name": "tests/integration/targets/nxos_feature/tasks/nxapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bb83a5490c188c20d9a8632bdb13043146a87d5dfc555cd2e389ee30885bf7ca",
+ "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hsrp/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_feature/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
+ "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hsrp/meta",
+ "name": "tests/integration/targets/nxos_feature/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hsrp/meta/main.yml",
+ "name": "tests/integration/targets/nxos_feature/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hsrp/defaults",
+ "name": "tests/integration/targets/nxos_feature/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hsrp/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_feature/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hsrp/tests",
+ "name": "tests/integration/targets/nxos_feature/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hsrp/tests/common",
+ "name": "tests/integration/targets/nxos_feature/tests/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hsrp/tests/common/sanity.yaml",
+ "name": "tests/integration/targets/nxos_feature/tests/common/configure.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a9c0076bb37472ad1bcf0815ed5845032be77683ffc6e967f2577c29cc28f8a8",
+ "chksum_sha256": "89d6028b9ede5f5589b6786b029044d9cdf739f164d0a593ecacb564588acfe5",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_igmp_interface",
+ "name": "tests/integration/targets/nxos_feature/tests/common/invalid.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "9876fc00630984bf3f7c6a0ce6e7a75b67d4c2dc198fe91c2d4dcaef40fec816",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_vsan",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_igmp_interface/tasks",
+ "name": "tests/integration/targets/nxos_vsan/vars",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_igmp_interface/tasks/nxapi.yaml",
+ "name": "tests/integration/targets/nxos_vsan/vars/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
+ "chksum_sha256": "97b10ee26b05c01b328d79887c41479bd2bf559184fb1df4af53645db6e109ba",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_igmp_interface/tasks/main.yaml",
+ "name": "tests/integration/targets/nxos_vsan/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_vsan/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
+ "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_igmp_interface/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_vsan/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
+ "chksum_sha256": "f47853340600c1c9b76ed00695079b203ae45896e40acfac2f48e28f2ad98b40",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_igmp_interface/meta",
+ "name": "tests/integration/targets/nxos_vsan/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_igmp_interface/meta/main.yml",
+ "name": "tests/integration/targets/nxos_vsan/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_igmp_interface/defaults",
+ "name": "tests/integration/targets/nxos_vsan/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_igmp_interface/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_vsan/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
+ "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_igmp_interface/tests",
+ "name": "tests/integration/targets/nxos_vsan/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_igmp_interface/tests/common",
+ "name": "tests/integration/targets/nxos_vsan/tests/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_igmp_interface/tests/common/sanity.yaml",
+ "name": "tests/integration/targets/nxos_vsan/tests/common/sanity.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3f1f412736c7af33d14c2be45d461f6f96778bc221e6abdabdc579247c32d72d",
+ "chksum_sha256": "cb88ac95adfc25626d2c81d7dc7d45341c006ac0490dda44c675101938ec4bcc",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vpc_interface",
+ "name": "tests/integration/targets/nxos_vtp_domain",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vpc_interface/tasks",
+ "name": "tests/integration/targets/nxos_vtp_domain/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vpc_interface/tasks/nxapi.yaml",
+ "name": "tests/integration/targets/nxos_vtp_domain/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
+ "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vpc_interface/tasks/main.yaml",
+ "name": "tests/integration/targets/nxos_vtp_domain/tasks/nxapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bb83a5490c188c20d9a8632bdb13043146a87d5dfc555cd2e389ee30885bf7ca",
+ "chksum_sha256": "1e03994e82f05167e558b19e20c435e80ed92b6ff4f62f71fb93ed9082d30f94",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vpc_interface/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_vtp_domain/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8d37308cf2225418b5fab604f2b171a1aafbaa84be0c933a48ec0f153d7119ad",
+ "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vpc_interface/meta",
+ "name": "tests/integration/targets/nxos_vtp_domain/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vpc_interface/meta/main.yml",
+ "name": "tests/integration/targets/nxos_vtp_domain/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vpc_interface/defaults",
+ "name": "tests/integration/targets/nxos_vtp_domain/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vpc_interface/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_vtp_domain/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vpc_interface/tests",
+ "name": "tests/integration/targets/nxos_vtp_domain/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vpc_interface/tests/common",
+ "name": "tests/integration/targets/nxos_vtp_domain/tests/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vpc_interface/tests/common/sanity.yaml",
+ "name": "tests/integration/targets/nxos_vtp_domain/tests/common/sanity.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "03b61d6df7de21547cead303ca85a6e4139660eb2df5e5e2af81ddfefc206951",
+ "chksum_sha256": "d5ce98107789e4105c99fef3d305b6b7099cf14b564000fe960cc6861772e803",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_prefix_lists",
+ "name": "tests/integration/targets/nxos_udld_interface",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_prefix_lists/tasks",
+ "name": "tests/integration/targets/nxos_udld_interface/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_prefix_lists/tasks/nxapi.yaml",
+ "name": "tests/integration/targets/nxos_udld_interface/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "adcabdbc005b34ef70ef70c9a7a3b0bfc9b94ff93d2f3ea18bf1f19fbc2d2201",
+ "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_prefix_lists/tasks/main.yaml",
+ "name": "tests/integration/targets/nxos_udld_interface/tasks/nxapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2396a1bb62aa2d673208deb59766a787971d061630b8467afa525b7fdcb2e95e",
+ "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_prefix_lists/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_udld_interface/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "82e7cf4c490409760e85c9516e174b27c8d67fb419e829353d94c4d022016245",
+ "chksum_sha256": "bb83a5490c188c20d9a8632bdb13043146a87d5dfc555cd2e389ee30885bf7ca",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_prefix_lists/meta",
+ "name": "tests/integration/targets/nxos_udld_interface/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_prefix_lists/meta/main.yml",
+ "name": "tests/integration/targets/nxos_udld_interface/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_prefix_lists/defaults",
+ "name": "tests/integration/targets/nxos_udld_interface/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_prefix_lists/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_udld_interface/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7d754db246e92eda93bade9b741e09afdc7c5ba2754a4ea5d874f92a479b65d0",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_prefix_lists/vars",
+ "name": "tests/integration/targets/nxos_udld_interface/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_prefix_lists/vars/main.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "f869515b8a1ec55c8346c13f87eff1b4bef627722f02ba89f143fec41ea52254",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_prefix_lists/tests",
+ "name": "tests/integration/targets/nxos_udld_interface/tests/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_prefix_lists/tests/.DS_Store",
+ "name": "tests/integration/targets/nxos_udld_interface/tests/common/sanity.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "af891bb95ebcc39dfd38131bf64fba6eb7bb8e84f59a428b6af5477588fbc6b6",
+ "chksum_sha256": "3cd8d7eab5171ab6ad59ea7fd737a7de66bff01398491e83f7d3904da0c5e451",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_prefix_lists/tests/common",
+ "name": "tests/integration/targets/nxos_bfd_interfaces",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_prefix_lists/tests/common/.DS_Store",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "5b8c3589c79b71429938a827331899d13ac16c156374f8c45ec59068bd2d2060",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_prefix_lists/tests/common/fixtures",
+ "name": "tests/integration/targets/nxos_bfd_interfaces/vars",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_prefix_lists/tests/common/fixtures/parsed.cfg",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "f7f1e53d17acf3059197aa476e1715028d571930c8cff8eabada2af3da2e7819",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_prefix_lists/tests/common/parsed.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "53a49d08552e563da400b6c13475d9e2a020cc039eee9985b1c5a3e930fb3843",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_prefix_lists/tests/common/merged.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "b63842b6c47d71229bd8ad567bce61e119a9da6f44fb987d22a5348d9b67f1e7",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_prefix_lists/tests/common/deleted.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "8c7dfc255e4fdc2e873e375b7b4a24b1098f63ebc6f80b18ddaf4bb4e62a9a46",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_prefix_lists/tests/common/replaced.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "7bf73d5bf1f98884b2c8dedfcdc762af1c6542276fc649a04bf360fe10400cff",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_prefix_lists/tests/common/overridden.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "56599df6259a9ed56f2794f1171dba7cd811995b228ee29610d679201d7094f0",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_prefix_lists/tests/common/_remove_config.yaml",
+ "name": "tests/integration/targets/nxos_bfd_interfaces/vars/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b94ddd8f6e3aef1be0b00d88c934c971a436d3595b760e10dd388b6dacfac7d3",
+ "chksum_sha256": "ab74ba6b07626a0723ca4ba193159ba14bc7011f11acd94ca9b5f13f817def19",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_prefix_lists/tests/common/gathered.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "c10395ec2b03209c79681dd09690cb615195b8adaae1cdfa3fabc0fd68ef0565",
+ "name": "tests/integration/targets/nxos_bfd_interfaces/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_prefix_lists/tests/common/empty_config.yaml",
+ "name": "tests/integration/targets/nxos_bfd_interfaces/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "56b14e2fd373b2211c65f09454a4c39137bb1f1a06ad96f007e377d8b2b51851",
+ "chksum_sha256": "82e7cf4c490409760e85c9516e174b27c8d67fb419e829353d94c4d022016245",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_prefix_lists/tests/common/rendered.yaml",
+ "name": "tests/integration/targets/nxos_bfd_interfaces/tasks/nxapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "742244e5eb090968dbbab6b3bce1d5d52404f476371439bf2404e4467534b488",
+ "chksum_sha256": "adcabdbc005b34ef70ef70c9a7a3b0bfc9b94ff93d2f3ea18bf1f19fbc2d2201",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_prefix_lists/tests/common/_populate_config.yaml",
+ "name": "tests/integration/targets/nxos_bfd_interfaces/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2233813d83a2cf367f99e9348cfb36ea295efcebe61d310a4580abbab8836d5c",
- "format": 1
- },
- {
- "name": "tests/integration/targets/prepare_nxos_tests",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
"format": 1
},
{
- "name": "tests/integration/targets/prepare_nxos_tests/tasks",
+ "name": "tests/integration/targets/nxos_bfd_interfaces/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/prepare_nxos_tests/tasks/main.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "8d49ed36175f83090c361ca918f66c751d9a55d3e57ed4097366dda3f04434ff",
- "format": 1
- },
- {
- "name": "tests/integration/targets/prepare_nxos_tests/tasks/prepare.yml",
+ "name": "tests/integration/targets/nxos_bfd_interfaces/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6dc1745aa3d09766233e4522d935898ecfdf07caf0b7d2a3ab7e20e40bbdc4a5",
+ "chksum_sha256": "7d754db246e92eda93bade9b741e09afdc7c5ba2754a4ea5d874f92a479b65d0",
"format": 1
},
{
- "name": "tests/integration/targets/prepare_nxos_tests/meta",
+ "name": "tests/integration/targets/nxos_bfd_interfaces/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/prepare_nxos_tests/meta/main.yaml",
+ "name": "tests/integration/targets/nxos_bfd_interfaces/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "851860fe2ed1258710d7fcf89ac5b7d8c3c87755e89ca56fdd79bea022eb2b92",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_rollback",
+ "name": "tests/integration/targets/nxos_bfd_interfaces/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_rollback/tasks",
+ "name": "tests/integration/targets/nxos_bfd_interfaces/tests/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_rollback/tasks/nxapi.yaml",
+ "name": "tests/integration/targets/nxos_bfd_interfaces/tests/common/_populate_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1e03994e82f05167e558b19e20c435e80ed92b6ff4f62f71fb93ed9082d30f94",
+ "chksum_sha256": "febb1078f232f6b64f4600db4e81e08b293f9c1b5959937b92458ea3228ac8ce",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_rollback/tasks/main.yaml",
+ "name": "tests/integration/targets/nxos_bfd_interfaces/tests/common/empty_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
+ "chksum_sha256": "a07b5658b8543fad6adbc03cc3dedf24b925d6c15ab96cba887e4057adde9af4",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_rollback/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_bfd_interfaces/tests/common/parsed.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
+ "chksum_sha256": "c7ce5887a89b7e2b67eb2b4bae794a1e0a77c77c2c98c1fbd123bca9ecbf78f3",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_rollback/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_bfd_interfaces/tests/common/gathered.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e8b8f05c67f53717f791d5fa39768a09589083927c1b288181a9a2b28d3f19fd",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_rollback/meta/main.yml",
+ "name": "tests/integration/targets/nxos_bfd_interfaces/tests/common/merged.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "chksum_sha256": "b7125a5bf2a88b3a35ff6aa0d06f3afd1474431c753306f1390613c7892f8217",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_rollback/defaults",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_bfd_interfaces/tests/common/replaced.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "57b05a37ab33678d00ceac9fa16256aa5ce862265fb330cf2045a6f06bd40331",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_rollback/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_bfd_interfaces/tests/common/_remove_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
+ "chksum_sha256": "35032ab6b2376156d9cc4386e0c3ae950aeea92146c89106e92963e5d9f30abd",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_rollback/tests",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_bfd_interfaces/tests/common/deleted.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "1ef608de6174de4a28060255b4f85b5c00beb4bf2c8c4083a85d4132fe06341f",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_rollback/tests/common",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_bfd_interfaces/tests/common/rendered.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "879575bd8531da7b1f7752bc36e8b1d92d654dadebf3ac9e372c71ead2cd75d0",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_rollback/tests/common/sanity.yaml",
+ "name": "tests/integration/targets/nxos_bfd_interfaces/tests/common/overridden.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "394570f3e6f1edf503b6782dd89ffa589499a778fc5336c8c610b01d473a7bb8",
+ "chksum_sha256": "903605fdce930f7ab623ddda18bc721d07632ae35cede2608f93d0178fffff86",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_system",
+ "name": "tests/integration/targets/nxos_snmp_host",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_system/tasks",
+ "name": "tests/integration/targets/nxos_snmp_host/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_system/tasks/nxapi.yaml",
+ "name": "tests/integration/targets/nxos_snmp_host/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
+ "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_system/tasks/main.yaml",
+ "name": "tests/integration/targets/nxos_snmp_host/tasks/nxapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
+ "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_system/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_snmp_host/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
+ "chksum_sha256": "bb83a5490c188c20d9a8632bdb13043146a87d5dfc555cd2e389ee30885bf7ca",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_system/meta",
+ "name": "tests/integration/targets/nxos_snmp_host/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_system/meta/main.yml",
+ "name": "tests/integration/targets/nxos_snmp_host/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_system/defaults",
+ "name": "tests/integration/targets/nxos_snmp_host/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_system/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_snmp_host/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_system/tests",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_system/tests/nxapi",
+ "name": "tests/integration/targets/nxos_snmp_host/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_system/tests/nxapi/set_domain_name.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "c3c9fe4f41326a05c53fb03a14ab15d9aca59b0cd5276ca5e28b7c7b7e24e1a5",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_system/tests/nxapi/set_domain_list.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "4f58661081e4b16a1b96f55d176abaa163d8eeb2d530a96420da6d15de4bb0ac",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_system/tests/nxapi/set_name_servers.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "bbee414220275ff9a13a61bb40c3f4aa127cf63ebb07221abf01f8995b22b252",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_system/tests/cli",
+ "name": "tests/integration/targets/nxos_snmp_host/tests/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_system/tests/cli/set_domain_name.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "fa3289fe04fe5f8a8259ca69cee34e575e690589e3c4b5a1e8ade6a724c81246",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_system/tests/cli/set_domain_list.yaml",
+ "name": "tests/integration/targets/nxos_snmp_host/tests/common/sanity_snmp_v1_trap.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "494d4bd1f0afc278a5422adec3684980a21807274f3887cc64743335104fc95f",
+ "chksum_sha256": "aa6a8b70ec63534db6d01bad4ccb22de07cf1d7c0c65b148a02474964d1a4e72",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_system/tests/cli/set_name_servers.yaml",
+ "name": "tests/integration/targets/nxos_snmp_host/tests/common/sanity_snmp_v2_inform.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "634ccd651c5a3aff45209b99a311b488422098e96df88ca31e1f1f6e5fcc0e2f",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_system/tests/common",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "54b2c813dda14d97a1eef23c8a3b3ff2bdc194b60ec4cf15d7f6e0068802afa5",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_system/tests/common/set_hostname.yaml",
+ "name": "tests/integration/targets/nxos_snmp_host/tests/common/sanity_snmp_v3_trap.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1f80d47b8482b052d64571ffd74d3e08219c8180e0bdfd38785cbdcb80b4dc01",
+ "chksum_sha256": "1e10b5c9c01f39edff0f76171c81720d458aca5775d0f5f97f20bd2e6c21f57f",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_system/tests/common/sanity.yaml",
+ "name": "tests/integration/targets/nxos_snmp_host/tests/common/sanity_snmp_v3_inform.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e76a24280e763ab2423dbd4062e39dd0c4b54131b361582bc4de92183c9fe32f",
+ "chksum_sha256": "0156bedc287881d6d9d767fd9720c5821708b9ad3c9c217c5c4b1df0a72a6891",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_feature",
+ "name": "tests/integration/targets/nxos_hsrp",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_feature/tasks",
+ "name": "tests/integration/targets/nxos_hsrp/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_feature/tasks/nxapi.yaml",
+ "name": "tests/integration/targets/nxos_hsrp/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
+ "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_feature/tasks/main.yaml",
+ "name": "tests/integration/targets/nxos_hsrp/tasks/nxapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
+ "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_feature/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_hsrp/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
+ "chksum_sha256": "bb83a5490c188c20d9a8632bdb13043146a87d5dfc555cd2e389ee30885bf7ca",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_feature/meta",
+ "name": "tests/integration/targets/nxos_hsrp/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_feature/meta/main.yml",
+ "name": "tests/integration/targets/nxos_hsrp/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_feature/defaults",
+ "name": "tests/integration/targets/nxos_hsrp/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_feature/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_hsrp/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_feature/tests",
+ "name": "tests/integration/targets/nxos_hsrp/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_feature/tests/common",
+ "name": "tests/integration/targets/nxos_hsrp/tests/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_feature/tests/common/invalid.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "9876fc00630984bf3f7c6a0ce6e7a75b67d4c2dc198fe91c2d4dcaef40fec816",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_feature/tests/common/configure.yaml",
+ "name": "tests/integration/targets/nxos_hsrp/tests/common/sanity.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "89d6028b9ede5f5589b6786b029044d9cdf739f164d0a593ecacb564588acfe5",
+ "chksum_sha256": "a9c0076bb37472ad1bcf0815ed5845032be77683ffc6e967f2577c29cc28f8a8",
"format": 1
},
{
@@ -8240,52 +8555,52 @@
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vrf_interface/tasks/nxapi.yaml",
+ "name": "tests/integration/targets/nxos_vrf_interface/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
+ "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vrf_interface/tasks/main.yaml",
+ "name": "tests/integration/targets/nxos_vrf_interface/tasks/nxapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bb83a5490c188c20d9a8632bdb13043146a87d5dfc555cd2e389ee30885bf7ca",
+ "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vrf_interface/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_vrf_interface/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
+ "chksum_sha256": "bb83a5490c188c20d9a8632bdb13043146a87d5dfc555cd2e389ee30885bf7ca",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vrf_interface/meta",
+ "name": "tests/integration/targets/nxos_vrf_interface/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vrf_interface/meta/main.yml",
+ "name": "tests/integration/targets/nxos_vrf_interface/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vrf_interface/defaults",
+ "name": "tests/integration/targets/nxos_vrf_interface/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vrf_interface/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_vrf_interface/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
@@ -8310,1179 +8625,1200 @@
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_address_family",
+ "name": "tests/integration/targets/nxos_acl_interfaces",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_address_family/tasks",
+ "name": "tests/integration/targets/nxos_acl_interfaces/vars",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_address_family/tasks/nxapi.yaml",
+ "name": "tests/integration/targets/nxos_acl_interfaces/vars/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "adcabdbc005b34ef70ef70c9a7a3b0bfc9b94ff93d2f3ea18bf1f19fbc2d2201",
+ "chksum_sha256": "6a97fa118eda919bd0c4d9b45fbacbb04ef221e6d264073eda5612e7b57228c9",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_address_family/tasks/main.yaml",
+ "name": "tests/integration/targets/nxos_acl_interfaces/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_acl_interfaces/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "918e266fd84500fc8a9d7958437789683bd47b7875371afaf4941b60f4594bc7",
+ "chksum_sha256": "c57c3ff656bb8daf9febb3c7a7bff934b952d1ccb83604d93e23ccfc3ea88724",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_address_family/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_acl_interfaces/tasks/nxapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "82e7cf4c490409760e85c9516e174b27c8d67fb419e829353d94c4d022016245",
+ "chksum_sha256": "280d285e1a9b531b25d8b880d3a032933d7135004c4658b7f1eb4872c60cc666",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_address_family/meta",
+ "name": "tests/integration/targets/nxos_acl_interfaces/tasks/main.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "7be6dc51b2f6fb5601e0d7f3498991d34f0910206fe6ed537217c8eba86a3cf5",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_acl_interfaces/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_address_family/meta/main.yml",
+ "name": "tests/integration/targets/nxos_acl_interfaces/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
+ "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_address_family/defaults",
+ "name": "tests/integration/targets/nxos_acl_interfaces/templates",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_address_family/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_acl_interfaces/templates/populate_config.cfg",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7d754db246e92eda93bade9b741e09afdc7c5ba2754a4ea5d874f92a479b65d0",
+ "chksum_sha256": "6dd477232ff3b52e84f25f81712afb11874700566d9c52d923c012ad1d770cf0",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_address_family/vars",
+ "name": "tests/integration/targets/nxos_acl_interfaces/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_address_family/vars/main.yml",
+ "name": "tests/integration/targets/nxos_acl_interfaces/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "528513c35a708d559281b5758e5c998565f110acf10803504b3cdbf2accf1988",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_address_family/tests",
+ "name": "tests/integration/targets/nxos_acl_interfaces/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_address_family/tests/common",
+ "name": "tests/integration/targets/nxos_acl_interfaces/tests/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_address_family/tests/common/fixtures",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_acl_interfaces/tests/common/populate_config.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "ff18091eda8eab8839ce8b146409c956a16da2af0d78204879e2a0d844835c46",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_address_family/tests/common/fixtures/parsed.cfg",
+ "name": "tests/integration/targets/nxos_acl_interfaces/tests/common/deleted.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6189829f4b10872e4553b8c55db04c742cca9c7598d067ddb3eea05e0ab2f0d",
+ "chksum_sha256": "3c80a977c86fd76c2bbf06ab9878e383170afc5cba3f336e2d4f8ca2de3500fa",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_address_family/tests/common/parsed.yaml",
+ "name": "tests/integration/targets/nxos_acl_interfaces/tests/common/gathered.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "29d4c709454554edeef9e3a0e394a93ff71200d5ae09e04d855d8edcedc24549",
+ "chksum_sha256": "698995afdf69835fb4ba540353a3e4f213d7954d21073c0cc754b1787cd05231",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_address_family/tests/common/merged.yaml",
+ "name": "tests/integration/targets/nxos_acl_interfaces/tests/common/rtt.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a6f21fe892ff7e532f40e0d7ecb6bba83f26bb1765ac4177b453d85f896dc633",
+ "chksum_sha256": "832178be08db3abba5b2d95b0c57c79f8a4d5b132f1f01f9dfe786c2a3b5cb31",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_address_family/tests/common/deleted.yaml",
+ "name": "tests/integration/targets/nxos_acl_interfaces/tests/common/replaced.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e76deb0ccd6d015af64ec4b54fc1da88ccf401b0aa373bda8d4cfabd23b9b1d9",
+ "chksum_sha256": "155aa10a15be7eea0049eb7035993970089c76ee53bcd152999c6055e8586aa1",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_address_family/tests/common/replaced.yaml",
+ "name": "tests/integration/targets/nxos_acl_interfaces/tests/common/parsed.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0a2cbefd9d104a0f9e0370a13ed7e87feb1e74175d922f33d786e604e775a498",
+ "chksum_sha256": "ac628812931497d1846e45df02f6c9fc4ba4b8d61fba9a559793b8b217e08484",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_address_family/tests/common/overridden.yaml",
+ "name": "tests/integration/targets/nxos_acl_interfaces/tests/common/merged.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ba2591fcb1bd54b023d7d45818463cb4d7efb6c7a918133129505ab24d3c9b7d",
+ "chksum_sha256": "8eae4ac211ea7861be4677520f4962046223747c3f400208cad64b245998d0df",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_address_family/tests/common/_remove_config.yaml",
+ "name": "tests/integration/targets/nxos_acl_interfaces/tests/common/rendered.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c07047642c717ad5e564697143a11e7cc93a18376d91ee56dc5f44f3d19e22b5",
+ "chksum_sha256": "9ab726e93c94df77ac9671e7dfa0b8c89104ea4333e25b56f04830f9bda81942",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_address_family/tests/common/gathered.yaml",
+ "name": "tests/integration/targets/nxos_acl_interfaces/tests/common/overridden.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0aefe84d03b67ec239027816ac3522c7b7cfabac56bd979c781c74cab4b01955",
+ "chksum_sha256": "603d8a6ba49dd33261b0c7a152e55df28f2c3e10f09e9ec0f3eebde8d6c5c7ad",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_address_family/tests/common/empty_config.yaml",
+ "name": "tests/integration/targets/nxos_acl_interfaces/tests/common/empty_config.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fed0782bb5669d310ba18f8a49c51be3b1c8b16b0951f388e2280ed4a1769b50",
+ "chksum_sha256": "46c8e8809399bcd1bdfe1242e346ac1c340668e32fdbfcba0cfe6a4ad9ff6f4d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_address_family/tests/common/rendered.yaml",
+ "name": "tests/integration/targets/nxos_acl_interfaces/tests/common/remove_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7121942f388ee7ab8d2484101d1a980b1d666be25f8388c6f00438f36a99736d",
+ "chksum_sha256": "4b97c05adf25534f7d6e4ac7a67e541969eb7e1060b6360f956c5dcae418e5f7",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_address_family/tests/common/_populate_config.yaml",
+ "name": "tests/integration/targets/nxos_acl_interfaces/tests/common/populate_acl.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4ad5b3d3c16b81ea4fcf40639099f5191577136f0cf3ff4970ed630d8248ec66",
+ "chksum_sha256": "d44542c7c93f7edcef7326941f254a969cc90fca962286a98a576a1979064cd0",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lacp_interfaces",
+ "name": "tests/integration/targets/nxos_lacp",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lacp_interfaces/tasks",
+ "name": "tests/integration/targets/nxos_lacp/vars",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lacp_interfaces/tasks/nxapi.yaml",
+ "name": "tests/integration/targets/nxos_lacp/vars/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
+ "chksum_sha256": "d990ef6f460be1be57ee33ce95b7fd66d05175e93e1e2716cce708d420a5d466",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lacp_interfaces/tasks/main.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "c0ff0c24a2fb4a8aa58a6356c5029065acb6cf1aba1e14add135ff69c8d1387b",
+ "name": "tests/integration/targets/nxos_lacp/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lacp_interfaces/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_lacp/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "82e7cf4c490409760e85c9516e174b27c8d67fb419e829353d94c4d022016245",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lacp_interfaces/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_lacp/tasks/nxapi.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "adcabdbc005b34ef70ef70c9a7a3b0bfc9b94ff93d2f3ea18bf1f19fbc2d2201",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lacp_interfaces/meta/main.yml",
+ "name": "tests/integration/targets/nxos_lacp/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "chksum_sha256": "c0ff0c24a2fb4a8aa58a6356c5029065acb6cf1aba1e14add135ff69c8d1387b",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lacp_interfaces/defaults",
+ "name": "tests/integration/targets/nxos_lacp/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lacp_interfaces/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_lacp/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "7d754db246e92eda93bade9b741e09afdc7c5ba2754a4ea5d874f92a479b65d0",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lacp_interfaces/vars",
+ "name": "tests/integration/targets/nxos_lacp/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lacp_interfaces/vars/main.yml",
+ "name": "tests/integration/targets/nxos_lacp/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "adb8824233c88430d3737a53e2cf1a939138f4f56b1aee31f730895793eb72b2",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lacp_interfaces/tests",
+ "name": "tests/integration/targets/nxos_lacp/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lacp_interfaces/tests/common",
+ "name": "tests/integration/targets/nxos_lacp/tests/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lacp_interfaces/tests/common/parsed.yaml",
+ "name": "tests/integration/targets/nxos_lacp/tests/common/_populate_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3e5be532a772bae7c777ccff5a0bd7873f6d67fd9cfc19ee1148d34381a7e6a7",
+ "chksum_sha256": "a10bac016859ecead883b3a70c3eb13f70718ad9cdd2796b92294bb73bbee29d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lacp_interfaces/tests/common/merged.yaml",
+ "name": "tests/integration/targets/nxos_lacp/tests/common/empty_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7619a726b4e64d1bd47dadee8ec75a16d3b2bdc44b6d82d82bd56e38ada9c86b",
+ "chksum_sha256": "d05b5fe9f03352e3b8f7827b2dcf181df0e3c72fe956593e4da447c9d4f8869c",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lacp_interfaces/tests/common/deleted.yaml",
+ "name": "tests/integration/targets/nxos_lacp/tests/common/parsed.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "62ca1f227ad278d7c4554e39d7368212b7004426b9c43bcc2bf1456f27e6ab98",
+ "chksum_sha256": "82e18104dcb23f633ded1a694fa4ecbf1df4e14a09620d260473341a8920698a",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lacp_interfaces/tests/common/replaced.yaml",
+ "name": "tests/integration/targets/nxos_lacp/tests/common/gathered.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7fba7cbbd3ce552b4dbe95b0929f2264f20fca017fd8d49dcbfcf704234fee03",
+ "chksum_sha256": "dc7919875ac197ca437f1f27d4ace434975e7b0ebc59f0b9e76779b7c13300bd",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lacp_interfaces/tests/common/overridden.yaml",
+ "name": "tests/integration/targets/nxos_lacp/tests/common/merged.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0f9f2f61c2ed959758be49bd3a688b41c8a536d857d83e57cded8dbf1b1a72b2",
+ "chksum_sha256": "d7f405af0ba053d9e613e791943bc8133e6dbf994db877e876b7dbfcfead68cb",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lacp_interfaces/tests/common/_remove_config.yaml",
+ "name": "tests/integration/targets/nxos_lacp/tests/common/replaced.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5ae69e8c5e516df2c81671c0360b711f53a426f7e2a4b1f7a24b422257f304a4",
+ "chksum_sha256": "546c7e8f53d5167a0c5fb320ccd6b5e7db098829ce47de467b2e20856dd402c2",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lacp_interfaces/tests/common/gathered.yaml",
+ "name": "tests/integration/targets/nxos_lacp/tests/common/_remove_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ea101ba2a00ebe731b075245fdac99f96d8536a8dbd4b1f3f41aa0bf3863713e",
+ "chksum_sha256": "6c963e6f41c7e38e88e7f37dd8025ee75cd3970358d2568dc84a25bf0833c221",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lacp_interfaces/tests/common/empty_config.yaml",
+ "name": "tests/integration/targets/nxos_lacp/tests/common/deleted.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c290a62babd394680c978ea09017b67507be0351b51e46c746a874320362e8ec",
+ "chksum_sha256": "5e8603749c8d553899ed39b13b3acd136e23d7bd3b09098fc55d615557712b8e",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lacp_interfaces/tests/common/rendered.yaml",
+ "name": "tests/integration/targets/nxos_lacp/tests/common/rendered.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d3e63a62df68b89bd619c1bd5632735df0c735ed4fa2cd9201bd1ba31faf87e7",
+ "chksum_sha256": "b9a08d012138636068569151cf4f74205676ea00cc6fccdc2e2627447110156c",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lacp_interfaces/tests/common/_populate_config.yaml",
+ "name": "tests/integration/targets/nxos_lacp/tests/common/overridden.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d8f3f3ddd194db7f9b22519fac0e45a4c3529b3beaf85c2f15780daaa1cac4a0",
+ "chksum_sha256": "c6245af2941f9a747ea7774bdad73686d4d453173680ea526ab20b52dba2f283",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vrf_af",
+ "name": "tests/integration/targets/nxos_lldp_interfaces",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vrf_af/tasks",
+ "name": "tests/integration/targets/nxos_lldp_interfaces/vars",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vrf_af/tasks/nxapi.yaml",
+ "name": "tests/integration/targets/nxos_lldp_interfaces/vars/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
+ "chksum_sha256": "823ad4608bc4788ff160348fe4e6be75f267c9f2a55bf2043875eb2d76084f39",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vrf_af/tasks/main.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "bb83a5490c188c20d9a8632bdb13043146a87d5dfc555cd2e389ee30885bf7ca",
+ "name": "tests/integration/targets/nxos_lldp_interfaces/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vrf_af/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_lldp_interfaces/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8d37308cf2225418b5fab604f2b171a1aafbaa84be0c933a48ec0f153d7119ad",
+ "chksum_sha256": "c57c3ff656bb8daf9febb3c7a7bff934b952d1ccb83604d93e23ccfc3ea88724",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vrf_af/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_lldp_interfaces/tasks/nxapi.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "280d285e1a9b531b25d8b880d3a032933d7135004c4658b7f1eb4872c60cc666",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vrf_af/meta/main.yml",
+ "name": "tests/integration/targets/nxos_lldp_interfaces/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "chksum_sha256": "c0ff0c24a2fb4a8aa58a6356c5029065acb6cf1aba1e14add135ff69c8d1387b",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vrf_af/defaults",
+ "name": "tests/integration/targets/nxos_lldp_interfaces/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vrf_af/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_lldp_interfaces/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vrf_af/tests",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_vrf_af/tests/common",
+ "name": "tests/integration/targets/nxos_lldp_interfaces/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vrf_af/tests/common/sanity.yaml",
+ "name": "tests/integration/targets/nxos_lldp_interfaces/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "df6d2b3a3d7a67ce3759464bf144b6a71d61afa348cfda6faea7707ab93acf21",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_user",
+ "name": "tests/integration/targets/nxos_lldp_interfaces/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_user/tasks",
+ "name": "tests/integration/targets/nxos_lldp_interfaces/tests/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_user/tasks/nxapi.yaml",
+ "name": "tests/integration/targets/nxos_lldp_interfaces/tests/common/deleted.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
+ "chksum_sha256": "8695d048ec2623b31a194a213ef80da6add19468e0f90a4ca97d5b8257979529",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_user/tasks/main.yaml",
+ "name": "tests/integration/targets/nxos_lldp_interfaces/tests/common/gathered.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bb83a5490c188c20d9a8632bdb13043146a87d5dfc555cd2e389ee30885bf7ca",
+ "chksum_sha256": "d559bcc46e44d101f2fa43a97d785b4476d6053486294e513a7cafbaf67b6bae",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_user/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_lldp_interfaces/tests/common/rtt.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_snmp_user/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "23d20e20e06201967d2847f91f879c7741ac48c9e08bc1b9b3f63ef8c111ad29",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_user/meta/main.yml",
+ "name": "tests/integration/targets/nxos_lldp_interfaces/tests/common/replaced.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "chksum_sha256": "2c101fa94fc443f0335f4fb75c440dc5c0fe47363d3463d0eccc2c340b39e6cf",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_user/defaults",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_lldp_interfaces/tests/common/parsed.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e63afbeefadaaf0dcc22637aaeebfc27980cb517c68aabda8338c05976255320",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_user/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_lldp_interfaces/tests/common/merged.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
+ "chksum_sha256": "2399f3e2619a3b82bcf30be4b9a2be7c660d0f53450d25a46b87fd8ee4abb25b",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_user/tests",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_lldp_interfaces/tests/common/rendered.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "05a26c146d796dee090a8d9931d7f38b5cf3ea87682c7f50f73597a6cff1a4cb",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_user/tests/common",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_lldp_interfaces/tests/common/overridden.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "b4403fc48624d3990b76b682c8b17543e81f43f152f1afe573bdde1722248ea1",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_user/tests/common/sanity.yaml",
+ "name": "tests/integration/targets/nxos_lldp_interfaces/tests/common/remove_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cc18bce3cf7f8a257784f7d4e432ef00b535bfe73b057d661756f364bcc9d176",
+ "chksum_sha256": "303bc0ba56d6f13d152987aaf4b6953d88e4461e8958c7db467d52d247a63dce",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_evpn_vni",
+ "name": "tests/integration/targets/nxos_rpm",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_evpn_vni/tasks",
+ "name": "tests/integration/targets/nxos_rpm/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_evpn_vni/tasks/nxapi.yaml",
+ "name": "tests/integration/targets/nxos_rpm/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
+ "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_evpn_vni/tasks/main.yaml",
+ "name": "tests/integration/targets/nxos_rpm/tasks/nxapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
+ "chksum_sha256": "1e03994e82f05167e558b19e20c435e80ed92b6ff4f62f71fb93ed9082d30f94",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_evpn_vni/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_rpm/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
+ "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_evpn_vni/meta",
+ "name": "tests/integration/targets/nxos_rpm/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_evpn_vni/meta/main.yml",
+ "name": "tests/integration/targets/nxos_rpm/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_evpn_vni/defaults",
+ "name": "tests/integration/targets/nxos_rpm/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_evpn_vni/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_rpm/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_evpn_vni/tests",
+ "name": "tests/integration/targets/nxos_rpm/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_evpn_vni/tests/common",
+ "name": "tests/integration/targets/nxos_rpm/tests/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_evpn_vni/tests/common/sanity.yaml",
+ "name": "tests/integration/targets/nxos_rpm/tests/common/sanity.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "feb636a1cf337d091e427267dc9639d6fd2a5a481a9fd73cc830618990dc12af",
+ "chksum_sha256": "70447866be2605ad11b7abd82e4ae0000992bbdb0d143e90b86c0b4905ba1d19",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vtp_version",
+ "name": "tests/integration/targets/nxos_smoke",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vtp_version/tasks",
+ "name": "tests/integration/targets/nxos_smoke/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vtp_version/tasks/nxapi.yaml",
+ "name": "tests/integration/targets/nxos_smoke/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1e03994e82f05167e558b19e20c435e80ed92b6ff4f62f71fb93ed9082d30f94",
+ "chksum_sha256": "62a31764ef528947cdefe8eb150e20bdaba422d40bac12a3609948ca1575cf8f",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vtp_version/tasks/main.yaml",
+ "name": "tests/integration/targets/nxos_smoke/tasks/nxapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
+ "chksum_sha256": "7277ec56762ca20a3617b1da87631d8dde35556a4c004e7c82974e49fab50d92",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vtp_version/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_smoke/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
+ "chksum_sha256": "849f2a2746300bf950bfe382b3f2b80990bbee773cbdbd2d1e2a897a9ed1e261",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vtp_version/meta",
+ "name": "tests/integration/targets/nxos_smoke/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vtp_version/meta/main.yml",
+ "name": "tests/integration/targets/nxos_smoke/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "chksum_sha256": "665288590cd4226da42511817f09ccdfc92df36c9621583cd2c82919af295c5a",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vtp_version/defaults",
+ "name": "tests/integration/targets/nxos_smoke/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vtp_version/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_smoke/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vtp_version/tests",
+ "name": "tests/integration/targets/nxos_smoke/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vtp_version/tests/common",
+ "name": "tests/integration/targets/nxos_smoke/tests/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vtp_version/tests/common/sanity.yaml",
+ "name": "tests/integration/targets/nxos_smoke/tests/common/common_utils.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e33039720ba0e6ccc73fe5d38aaa4c2d1284afbb89058f6fe98c1a09d2a30638",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_ospfv2",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_ospfv2/tasks",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "a60e645271b0b349228daf7eac3948d5651a32abd8cb578005202d4c1ed80236",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospfv2/tasks/nxapi.yaml",
+ "name": "tests/integration/targets/nxos_smoke/tests/common/misc_tests.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "adcabdbc005b34ef70ef70c9a7a3b0bfc9b94ff93d2f3ea18bf1f19fbc2d2201",
+ "chksum_sha256": "ca259adfd9eabd17072e03cae98011aa82cf2e1b6d8fc88fb0ada2ebaca18d7a",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospfv2/tasks/main.yaml",
+ "name": "tests/integration/targets/nxos_smoke/tests/common/common_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7826463ba77fb2f0f2bb32a0e02ce314ce3a06762e189598970cc26dd17828b1",
+ "chksum_sha256": "dea32a9f41f8c2367181bfb2fc2821e4304e393ac781dd02e19d6cb01c6273ae",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospfv2/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_smoke/tests/common/caching.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "82e7cf4c490409760e85c9516e174b27c8d67fb419e829353d94c4d022016245",
+ "chksum_sha256": "2db51e2d9c925d2773d4bcf45ee241c75cc02547793d07cad590f1d9657eb36c",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospfv2/meta",
+ "name": "tests/integration/targets/nxos_smoke/tests/cli",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospfv2/meta/main.yml",
+ "name": "tests/integration/targets/nxos_smoke/tests/cli/misc_tests.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
+ "chksum_sha256": "dedfb27511bd8fde863a0aa430bdd23272cd766d0c23694764d01852c9586073",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospfv2/defaults",
+ "name": "tests/integration/targets/nxos_nxapi",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospfv2/defaults/main.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "7d754db246e92eda93bade9b741e09afdc7c5ba2754a4ea5d874f92a479b65d0",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_ospfv2/vars",
+ "name": "tests/integration/targets/nxos_nxapi/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospfv2/vars/main.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "61311d9ddd2174a10efead34de8526412e4c4d438600cf5a3ff852ec8692c4f6",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_ospfv2/tests",
+ "name": "tests/integration/targets/nxos_nxapi/tasks/platform",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospfv2/tests/common",
+ "name": "tests/integration/targets/nxos_nxapi/tasks/platform/n5k",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospfv2/tests/common/fixtures",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_nxapi/tasks/platform/n5k/assert_changes_https.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "4f99598e0507cfb4e0e625a4075deabb91c5de5e1c0e5531846b9467a6faeaa9",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospfv2/tests/common/fixtures/parsed.cfg",
+ "name": "tests/integration/targets/nxos_nxapi/tasks/platform/n5k/assert_changes_http.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4ad11ae9e094b6bc2d4286b05967979551104857b314aa8af9afee18cdec2d4a",
+ "chksum_sha256": "ed763ee2de2c730ea2783225dc132f5c28f22ea272213a36b9b3bb4471492e08",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospfv2/tests/common/parsed.yaml",
+ "name": "tests/integration/targets/nxos_nxapi/tasks/platform/n5k/assert_changes_https_http.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b1738278980bd3d3426805bcd85053668078a6b992627d9119043bc29161ea8e",
+ "chksum_sha256": "79ee713da7a48ad0b493f6e07690138be3eb4d04d890126aec488e2242e7a442",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospfv2/tests/common/merged.yaml",
+ "name": "tests/integration/targets/nxos_nxapi/tasks/platform/n5k/assert_changes_https_http_ports.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ddfff25fc88c57f108a7ed89d11d6df5c2233b27eac626512dc63709325fcf85",
+ "chksum_sha256": "3cef9d277e717f6956a9d5136862c72616d866eb04d8f8c0da44c0777739c92c",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospfv2/tests/common/deleted.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "69ef5f767607840c90b3cb9b80ca6eae206df8ec8a48b850dc47adbcf4b11aa5",
+ "name": "tests/integration/targets/nxos_nxapi/tasks/platform/default",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospfv2/tests/common/replaced.yaml",
+ "name": "tests/integration/targets/nxos_nxapi/tasks/platform/default/assert_changes_https.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7a00b8609854d6a88cc33f870793ec8972cd5d81ae8dcfed33fbbca19938ce69",
+ "chksum_sha256": "bfae052c7df36024a59df89f314dc7e03019ad1649f4132cb34bb2a8f50fe903",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospfv2/tests/common/overridden.yaml",
+ "name": "tests/integration/targets/nxos_nxapi/tasks/platform/default/assert_changes_http.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "de58bef72adb70191cc8d212367d3332359130dbaf399807962787d1df375915",
+ "chksum_sha256": "db4865169c3dc97c5ca5e24083550b243b685cfaeaaba16333b1f55754dd265a",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospfv2/tests/common/_remove_config.yaml",
+ "name": "tests/integration/targets/nxos_nxapi/tasks/platform/default/assert_changes_https_http.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c5bb202379b75e7aaffb216dd2e42b234acf1ab4fc0679f470fc01c67b309b27",
+ "chksum_sha256": "b055af4a4779d412640cee585ef493e52c3937782e095218164d2719d5223d29",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospfv2/tests/common/gathered.yaml",
+ "name": "tests/integration/targets/nxos_nxapi/tasks/platform/default/assert_changes_https_http_ports.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b8b689385d369d30980492e63bb16f13719d27452d738494899f85539130b2b5",
+ "chksum_sha256": "04e8b8ae8842f40c777282e979ea824c7c886f3e8ae1d9e2710ff466d1fc2ae0",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospfv2/tests/common/rendered.yaml",
+ "name": "tests/integration/targets/nxos_nxapi/tasks/platform/n7k",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_nxapi/tasks/platform/n7k/assert_changes_https.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "77ed312700fb0471e6e108949f8b983d6c418f0187b84b569551ce3c8205ce0c",
+ "chksum_sha256": "d826b7762b3ab39ad7d294ca171272d1ff3213c35f8040c3657228f259d331d5",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospfv2/tests/common/_populate_config.yaml",
+ "name": "tests/integration/targets/nxos_nxapi/tasks/platform/n7k/assert_changes_http.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0a3077084e8ed39aae3bc10055bbff711e7d40af65e514c7ecd99bffaad96bfc",
+ "chksum_sha256": "35222b27a54a701db0042dd5fb6069d48d9e27a70821a1707a4aad0d19bc227f",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bfd_global",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_nxapi/tasks/platform/n7k/assert_changes_https_http.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "97089a108cc1eac9afae028c6da9bf6f7017afcfc3f1a85776cfe6af31f2c6af",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bfd_global/tasks",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_nxapi/tasks/platform/n7k/assert_changes_https_http_ports.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "5d5a7bc39789b15cf95dd49af7ab1636f3a4589a2be3ee5ca341dbb4414eb95b",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bfd_global/tasks/nxapi.yaml",
+ "name": "tests/integration/targets/nxos_nxapi/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
+ "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bfd_global/tasks/main.yaml",
+ "name": "tests/integration/targets/nxos_nxapi/tasks/nxapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
+ "chksum_sha256": "7022927c86a437ced8d1f9058b25af83150ab1d87223183b1c61bbf4c0d6523f",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bfd_global/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_nxapi/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
+ "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bfd_global/meta",
+ "name": "tests/integration/targets/nxos_nxapi/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bfd_global/meta/main.yml",
+ "name": "tests/integration/targets/nxos_nxapi/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bfd_global/defaults",
+ "name": "tests/integration/targets/nxos_nxapi/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bfd_global/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_nxapi/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bfd_global/tests",
+ "name": "tests/integration/targets/nxos_nxapi/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bfd_global/tests/common",
+ "name": "tests/integration/targets/nxos_nxapi/tests/nxapi",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bfd_global/tests/common/sanity.yaml",
+ "name": "tests/integration/targets/nxos_nxapi/tests/nxapi/badtransport.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "302caab98adbe8644dc6652426b6e9300fc4d8fbc1a479ee7aeaf54d6a23da59",
+ "chksum_sha256": "28a2dfdf7ba13f8dd757c8d9095797a0b91437b5bfd9d9c1c8bfe65508e15901",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_l2_interfaces",
+ "name": "tests/integration/targets/nxos_nxapi/tests/cli",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_l2_interfaces/tasks",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_nxapi/tests/cli/configure.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "b4853e02af108065809baebdb1f8e957bc48184f9a24e3510826d28f734d17a6",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_l2_interfaces/tasks/nxapi.yaml",
+ "name": "tests/integration/targets/nxos_nxapi/tests/cli/disable.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "adcabdbc005b34ef70ef70c9a7a3b0bfc9b94ff93d2f3ea18bf1f19fbc2d2201",
+ "chksum_sha256": "94889efb465f8b55120a28becc23705cd11dc1bc9aebc91204105ba9f8207302",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_l2_interfaces/tasks/main.yaml",
+ "name": "tests/integration/targets/nxos_nxapi/tests/cli/nxapi_ssl.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c0ff0c24a2fb4a8aa58a6356c5029065acb6cf1aba1e14add135ff69c8d1387b",
+ "chksum_sha256": "dae84b45affca730b7c9158216bf18dc9ab9c92bbce4701b6ba881f192ae47f4",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_l2_interfaces/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_nxapi/tests/cli/enable.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "82e7cf4c490409760e85c9516e174b27c8d67fb419e829353d94c4d022016245",
+ "chksum_sha256": "abdd34478d7db89533c9d3d8cc0fb2f26a5324ab85212086aa1fecc220e7d1c8",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_l2_interfaces/meta",
+ "name": "tests/integration/targets/nxos_lacp_interfaces",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_l2_interfaces/meta/main.yml",
+ "name": "tests/integration/targets/nxos_lacp_interfaces/vars",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_lacp_interfaces/vars/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "chksum_sha256": "adb8824233c88430d3737a53e2cf1a939138f4f56b1aee31f730895793eb72b2",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_l2_interfaces/defaults",
+ "name": "tests/integration/targets/nxos_lacp_interfaces/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_l2_interfaces/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_lacp_interfaces/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7d754db246e92eda93bade9b741e09afdc7c5ba2754a4ea5d874f92a479b65d0",
+ "chksum_sha256": "82e7cf4c490409760e85c9516e174b27c8d67fb419e829353d94c4d022016245",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_l2_interfaces/vars",
+ "name": "tests/integration/targets/nxos_lacp_interfaces/tasks/nxapi.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_lacp_interfaces/tasks/main.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "c0ff0c24a2fb4a8aa58a6356c5029065acb6cf1aba1e14add135ff69c8d1387b",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_lacp_interfaces/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_l2_interfaces/vars/main.yml",
+ "name": "tests/integration/targets/nxos_lacp_interfaces/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "80f38875da077b4094678a1b0d5fa1dbb55a586037c541d4daec3b3b8adca944",
+ "chksum_sha256": "7d754db246e92eda93bade9b741e09afdc7c5ba2754a4ea5d874f92a479b65d0",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_l2_interfaces/tests",
+ "name": "tests/integration/targets/nxos_lacp_interfaces/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_l2_interfaces/tests/common",
+ "name": "tests/integration/targets/nxos_lacp_interfaces/meta/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_lacp_interfaces/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_l2_interfaces/tests/common/parsed.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "3c30f97c6ae729fd7b3d88a3e7b6671c761ea07061ead87487979c013835b99c",
+ "name": "tests/integration/targets/nxos_lacp_interfaces/tests/common",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_l2_interfaces/tests/common/merged.yaml",
+ "name": "tests/integration/targets/nxos_lacp_interfaces/tests/common/_populate_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b6d84fa1c22c5e2d37b017a18ed59903fab43314717b4d632a46200d745318c0",
+ "chksum_sha256": "d8f3f3ddd194db7f9b22519fac0e45a4c3529b3beaf85c2f15780daaa1cac4a0",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_l2_interfaces/tests/common/deleted.yaml",
+ "name": "tests/integration/targets/nxos_lacp_interfaces/tests/common/empty_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c58134a7c574f9d19d3012c238b2be0d4983d6d856a11e9f6ee089c3df5ec6b4",
+ "chksum_sha256": "c290a62babd394680c978ea09017b67507be0351b51e46c746a874320362e8ec",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_l2_interfaces/tests/common/replaced.yaml",
+ "name": "tests/integration/targets/nxos_lacp_interfaces/tests/common/parsed.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9e1b2b7044a678c22cccca3dcc5c677fccf40a509764b7ecf5c8dfeecf4781d8",
+ "chksum_sha256": "3e5be532a772bae7c777ccff5a0bd7873f6d67fd9cfc19ee1148d34381a7e6a7",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_l2_interfaces/tests/common/overridden.yaml",
+ "name": "tests/integration/targets/nxos_lacp_interfaces/tests/common/gathered.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "aeb7c65908e1aee314df87a9afe8c2f4dc1e1b21e41ca8a03f2991597dfd0685",
+ "chksum_sha256": "ea101ba2a00ebe731b075245fdac99f96d8536a8dbd4b1f3f41aa0bf3863713e",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_l2_interfaces/tests/common/_remove_config.yaml",
+ "name": "tests/integration/targets/nxos_lacp_interfaces/tests/common/merged.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3644170fb7b4322fa4d272cd5d5e351480018b485a065fa7131fb3f6f5e2f353",
+ "chksum_sha256": "7619a726b4e64d1bd47dadee8ec75a16d3b2bdc44b6d82d82bd56e38ada9c86b",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_l2_interfaces/tests/common/rtt.yaml",
+ "name": "tests/integration/targets/nxos_lacp_interfaces/tests/common/replaced.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "148ed76eb41828c2417439c0b443097a6f2f9d1fa88a4abad48bd8e62de30c4e",
+ "chksum_sha256": "7fba7cbbd3ce552b4dbe95b0929f2264f20fca017fd8d49dcbfcf704234fee03",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_l2_interfaces/tests/common/gathered.yaml",
+ "name": "tests/integration/targets/nxos_lacp_interfaces/tests/common/_remove_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6c1ffa21ffa7a57ae8edbb0a72820f4ec4e7947ba1ee5a1548473e81efec8724",
+ "chksum_sha256": "5ae69e8c5e516df2c81671c0360b711f53a426f7e2a4b1f7a24b422257f304a4",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_l2_interfaces/tests/common/empty_config.yaml",
+ "name": "tests/integration/targets/nxos_lacp_interfaces/tests/common/deleted.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a2a0ff901006be504934a514a2f337a699091eef58ab871fb32e426af1d6f6fa",
+ "chksum_sha256": "62ca1f227ad278d7c4554e39d7368212b7004426b9c43bcc2bf1456f27e6ab98",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_l2_interfaces/tests/common/rendered.yaml",
+ "name": "tests/integration/targets/nxos_lacp_interfaces/tests/common/rendered.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f018dcb8d9a42d8930b0d10f62ed931546d66f2998aa42ce95f655d299dbb35b",
+ "chksum_sha256": "d3e63a62df68b89bd619c1bd5632735df0c735ed4fa2cd9201bd1ba31faf87e7",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_l2_interfaces/tests/common/_populate_config.yaml",
+ "name": "tests/integration/targets/nxos_lacp_interfaces/tests/common/overridden.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b9322c7dd05de65b972eb0bb02c699c4368fc68bb97dd155a05ed98dd304448f",
+ "chksum_sha256": "0f9f2f61c2ed959758be49bd3a688b41c8a536d857d83e57cded8dbf1b1a72b2",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snapshot",
+ "name": "tests/integration/targets/nxos_user",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snapshot/tasks",
+ "name": "tests/integration/targets/nxos_user/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snapshot/tasks/nxapi.yaml",
+ "name": "tests/integration/targets/nxos_user/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
+ "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snapshot/tasks/main.yaml",
+ "name": "tests/integration/targets/nxos_user/tasks/nxapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
+ "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snapshot/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_user/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
+ "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snapshot/meta",
+ "name": "tests/integration/targets/nxos_user/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snapshot/meta/main.yml",
+ "name": "tests/integration/targets/nxos_user/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snapshot/defaults",
+ "name": "tests/integration/targets/nxos_user/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snapshot/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_user/meta/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snapshot/tests",
+ "name": "tests/integration/targets/nxos_user/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snapshot/tests/common",
+ "name": "tests/integration/targets/nxos_user/tests/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snapshot/tests/common/sanity.yaml",
+ "name": "tests/integration/targets/nxos_user/tests/common/auth.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "171022870f501d3dcb8c73978ee34038cb5e65c32786bbabc9f638c9ae04be4d",
+ "chksum_sha256": "db7a57ed2e804cae9aea360123df898848cc2e772fcab84d843aac01ca0b6b61",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_user/tests/common/sanity.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "bd39b32f02401670bfda98c14839d0da9c76ae8446c151dc7f10131c9531ded6",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_user/tests/common/basic.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "ba9e801ff2ad1ae02085f83753b4f2e51565888a0fa5822ea0f671ee8a54229f",
"format": 1
},
{
@@ -9500,52 +9836,52 @@
"format": 1
},
{
- "name": "tests/integration/targets/nxos_command/tasks/nxapi.yaml",
+ "name": "tests/integration/targets/nxos_command/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
+ "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_command/tasks/main.yaml",
+ "name": "tests/integration/targets/nxos_command/tasks/nxapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
+ "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_command/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_command/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
+ "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_command/meta",
+ "name": "tests/integration/targets/nxos_command/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_command/meta/main.yml",
+ "name": "tests/integration/targets/nxos_command/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "chksum_sha256": "665288590cd4226da42511817f09ccdfc92df36c9621583cd2c82919af295c5a",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_command/defaults",
+ "name": "tests/integration/targets/nxos_command/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_command/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_command/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "665288590cd4226da42511817f09ccdfc92df36c9621583cd2c82919af295c5a",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
@@ -9577,34 +9913,6 @@
"format": 1
},
{
- "name": "tests/integration/targets/nxos_command/tests/cli",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_command/tests/cli/contains.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "ac46a36b0b073fcb691e33c0602443a2bef31fac538dd68d2c9d78edce98f1d4",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_command/tests/cli/sanity.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "41b22e35084ef1e82fe5fbe12cb7eec0591d256af60a6acdea4c26138a4b4b97",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_command/tests/cli/cli_command.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "c8dc21d11e5f13aacc82d1a8872ad62b00551edff6284a36a02eea970b73a9d2",
- "format": 1
- },
- {
"name": "tests/integration/targets/nxos_command/tests/common",
"ftype": "dir",
"chksum_type": null,
@@ -9612,17 +9920,17 @@
"format": 1
},
{
- "name": "tests/integration/targets/nxos_command/tests/common/output.yaml",
+ "name": "tests/integration/targets/nxos_command/tests/common/notequal.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "afc13fd26c10d41c29bd89f7b874d6c06a2636cbb434069506a9f30cb9da907a",
+ "chksum_sha256": "b12ffe1eb63e9b2854d5c509145508207bb471df7e881218c014a7fab0f41bb5",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_command/tests/common/bad_operator.yaml",
+ "name": "tests/integration/targets/nxos_command/tests/common/not_comparison_operator.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a929284d3616cf3d0ce228bff6a002588943bcf4a85f5403b519a1e46950a3b9",
+ "chksum_sha256": "30280c74bc0f1e1a85cafed4144bf12ba196ca4e86db30161f64b26d84951012",
"format": 1
},
{
@@ -9633,10 +9941,10 @@
"format": 1
},
{
- "name": "tests/integration/targets/nxos_command/tests/common/equal.yaml",
+ "name": "tests/integration/targets/nxos_command/tests/common/invalid.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "78ffc32b0c96adc09c9321e1d12fbb5c91908419e857e25e4a4ac7839cc0610c",
+ "chksum_sha256": "f8d3206de3a49dfbf253bca6957a915f6fb10c3f0411f102de10841ff6094d90",
"format": 1
},
{
@@ -9647,31 +9955,24 @@
"format": 1
},
{
- "name": "tests/integration/targets/nxos_command/tests/common/invalid.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "f8d3206de3a49dfbf253bca6957a915f6fb10c3f0411f102de10841ff6094d90",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_command/tests/common/greaterthan.yaml",
+ "name": "tests/integration/targets/nxos_command/tests/common/equal.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3dde04a006eb5b001ed0f80589e92b4fdaa9a939dd4ae904cd63af915bd938f5",
+ "chksum_sha256": "78ffc32b0c96adc09c9321e1d12fbb5c91908419e857e25e4a4ac7839cc0610c",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_command/tests/common/not_comparison_operator.yaml",
+ "name": "tests/integration/targets/nxos_command/tests/common/output.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "30280c74bc0f1e1a85cafed4144bf12ba196ca4e86db30161f64b26d84951012",
+ "chksum_sha256": "afc13fd26c10d41c29bd89f7b874d6c06a2636cbb434069506a9f30cb9da907a",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_command/tests/common/timeout.yaml",
+ "name": "tests/integration/targets/nxos_command/tests/common/greaterthan.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c859bb27d5fc01d07c00c6ce3125c74666663ccd7a9b08d9cf1a1b7385115f0e",
+ "chksum_sha256": "3dde04a006eb5b001ed0f80589e92b4fdaa9a939dd4ae904cd63af915bd938f5",
"format": 1
},
{
@@ -9682,6842 +9983,6541 @@
"format": 1
},
{
- "name": "tests/integration/targets/nxos_command/tests/common/notequal.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "b12ffe1eb63e9b2854d5c509145508207bb471df7e881218c014a7fab0f41bb5",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_vtp_domain",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_vtp_domain/tasks",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_vtp_domain/tasks/nxapi.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "1e03994e82f05167e558b19e20c435e80ed92b6ff4f62f71fb93ed9082d30f94",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_vtp_domain/tasks/main.yaml",
+ "name": "tests/integration/targets/nxos_command/tests/common/timeout.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
+ "chksum_sha256": "c859bb27d5fc01d07c00c6ce3125c74666663ccd7a9b08d9cf1a1b7385115f0e",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vtp_domain/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_command/tests/common/bad_operator.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
+ "chksum_sha256": "a929284d3616cf3d0ce228bff6a002588943bcf4a85f5403b519a1e46950a3b9",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vtp_domain/meta",
+ "name": "tests/integration/targets/nxos_command/tests/cli",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vtp_domain/meta/main.yml",
+ "name": "tests/integration/targets/nxos_command/tests/cli/contains.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_vtp_domain/defaults",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "ac46a36b0b073fcb691e33c0602443a2bef31fac538dd68d2c9d78edce98f1d4",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vtp_domain/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_command/tests/cli/sanity.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_vtp_domain/tests",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_vtp_domain/tests/common",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "41b22e35084ef1e82fe5fbe12cb7eec0591d256af60a6acdea4c26138a4b4b97",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vtp_domain/tests/common/sanity.yaml",
+ "name": "tests/integration/targets/nxos_command/tests/cli/cli_command.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d5ce98107789e4105c99fef3d305b6b7099cf14b564000fe960cc6861772e803",
+ "chksum_sha256": "c8dc21d11e5f13aacc82d1a8872ad62b00551edff6284a36a02eea970b73a9d2",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_udld_interface",
+ "name": "tests/integration/targets/nxos_snapshot",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_udld_interface/tasks",
+ "name": "tests/integration/targets/nxos_snapshot/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_udld_interface/tasks/nxapi.yaml",
+ "name": "tests/integration/targets/nxos_snapshot/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
+ "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_udld_interface/tasks/main.yaml",
+ "name": "tests/integration/targets/nxos_snapshot/tasks/nxapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bb83a5490c188c20d9a8632bdb13043146a87d5dfc555cd2e389ee30885bf7ca",
+ "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_udld_interface/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_snapshot/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
+ "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_udld_interface/meta",
+ "name": "tests/integration/targets/nxos_snapshot/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_udld_interface/meta/main.yml",
+ "name": "tests/integration/targets/nxos_snapshot/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_udld_interface/defaults",
+ "name": "tests/integration/targets/nxos_snapshot/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_udld_interface/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_snapshot/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_udld_interface/tests",
+ "name": "tests/integration/targets/nxos_snapshot/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_udld_interface/tests/common",
+ "name": "tests/integration/targets/nxos_snapshot/tests/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_udld_interface/tests/common/sanity.yaml",
+ "name": "tests/integration/targets/nxos_snapshot/tests/common/sanity.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3cd8d7eab5171ab6ad59ea7fd737a7de66bff01398491e83f7d3904da0c5e451",
+ "chksum_sha256": "171022870f501d3dcb8c73978ee34038cb5e65c32786bbabc9f638c9ae04be4d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_aaa_server",
+ "name": "tests/integration/targets/nxos_snmp_contact",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_aaa_server/tasks",
+ "name": "tests/integration/targets/nxos_snmp_contact/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_aaa_server/tasks/nxapi.yaml",
+ "name": "tests/integration/targets/nxos_snmp_contact/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
+ "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_aaa_server/tasks/main.yaml",
+ "name": "tests/integration/targets/nxos_snmp_contact/tasks/nxapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
+ "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_aaa_server/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_snmp_contact/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
+ "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_aaa_server/meta",
+ "name": "tests/integration/targets/nxos_snmp_contact/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_aaa_server/meta/main.yml",
+ "name": "tests/integration/targets/nxos_snmp_contact/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_aaa_server/defaults",
+ "name": "tests/integration/targets/nxos_snmp_contact/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_aaa_server/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_snmp_contact/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_aaa_server/tests",
+ "name": "tests/integration/targets/nxos_snmp_contact/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_aaa_server/tests/common",
+ "name": "tests/integration/targets/nxos_snmp_contact/tests/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_aaa_server/tests/common/tacacs.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "309cf43dacad631085bfede9a52ee2e7bdb4763488463bfb96dde11184bbe0fc",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_aaa_server/tests/common/radius.yaml",
+ "name": "tests/integration/targets/nxos_snmp_contact/tests/common/sanity.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "08fe50e81078371eff5d9482f9b7721eb6fd1288ccc88f11b8b42f94270df4ef",
+ "chksum_sha256": "21c997106ed806dbad5a3cdba01f5e201161020b6498a5d92ac003b702fa579a",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_l3_interfaces",
+ "name": "tests/integration/targets/nxos_bgp_templates",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_l3_interfaces/tasks",
+ "name": "tests/integration/targets/nxos_bgp_templates/vars",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_l3_interfaces/tasks/nxapi.yaml",
+ "name": "tests/integration/targets/nxos_bgp_templates/vars/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "adcabdbc005b34ef70ef70c9a7a3b0bfc9b94ff93d2f3ea18bf1f19fbc2d2201",
+ "chksum_sha256": "320a4fb71862d3232cd18205318c3a960d55b06bb1fd7fa7afd49ff122c04605",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_l3_interfaces/tasks/main.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "2418a2ada8700c06969324411c9be328b4d4ad913c6eb93fb824dff96900ff8f",
+ "name": "tests/integration/targets/nxos_bgp_templates/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_l3_interfaces/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_bgp_templates/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "82e7cf4c490409760e85c9516e174b27c8d67fb419e829353d94c4d022016245",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_l3_interfaces/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_bgp_templates/tasks/nxapi.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "adcabdbc005b34ef70ef70c9a7a3b0bfc9b94ff93d2f3ea18bf1f19fbc2d2201",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_l3_interfaces/meta/main.yml",
+ "name": "tests/integration/targets/nxos_bgp_templates/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "chksum_sha256": "918e266fd84500fc8a9d7958437789683bd47b7875371afaf4941b60f4594bc7",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_l3_interfaces/defaults",
+ "name": "tests/integration/targets/nxos_bgp_templates/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_l3_interfaces/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_bgp_templates/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "7d754db246e92eda93bade9b741e09afdc7c5ba2754a4ea5d874f92a479b65d0",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_l3_interfaces/vars",
+ "name": "tests/integration/targets/nxos_bgp_templates/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_l3_interfaces/vars/main.yml",
+ "name": "tests/integration/targets/nxos_bgp_templates/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "24e5a4f2e7330b185775678aa68b4645786e83617b90d5a32fb4ff4b77b345ad",
+ "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_l3_interfaces/tests",
+ "name": "tests/integration/targets/nxos_bgp_templates/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_l3_interfaces/tests/common",
+ "name": "tests/integration/targets/nxos_bgp_templates/tests/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_l3_interfaces/tests/common/parsed.yaml",
+ "name": "tests/integration/targets/nxos_bgp_templates/tests/common/_populate_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5e2abf21e437435b96d94e3a1c900ac1284a39092f3b0b1d19e8d5d7544a8424",
+ "chksum_sha256": "0f7753a98712bcde737fce34f1a0af8d5a655bba229b306685e5ae6f8c26257d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_l3_interfaces/tests/common/merged.yaml",
+ "name": "tests/integration/targets/nxos_bgp_templates/tests/common/empty_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e7555ee3d0322003861d6c2d810ec490027b8d792e8800c7e9a46fab869b1c92",
+ "chksum_sha256": "37be696dfcec49e17974100e3ea8181b391da9cdad82f8667aa2f5df0ff8aaca",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_l3_interfaces/tests/common/deleted.yaml",
+ "name": "tests/integration/targets/nxos_bgp_templates/tests/common/parsed.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1676a953c0e740cc280155ea76e6368fd5628556cdc0acae19e1834b40721681",
+ "chksum_sha256": "a1ffab53eece71058929fba7b157214cd3aeea388b6bf8aae94e146b03e5eb25",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_l3_interfaces/tests/common/replaced.yaml",
+ "name": "tests/integration/targets/nxos_bgp_templates/tests/common/gathered.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ef91b776e8c9627a55cc9759549aaa7482e0f7da4385079443b8e8f1d8d40b8c",
+ "chksum_sha256": "2326d2afd434fadf9b8d2ef427e3918e6a5947eb39d58c4fc1988df599c58510",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_l3_interfaces/tests/common/multisite.yaml",
+ "name": "tests/integration/targets/nxos_bgp_templates/tests/common/merged.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b0b3ebe6d2d7db422ec1a046818ce06ddf83ef9f32a9178691d732b0824b4da1",
+ "chksum_sha256": "09f60138ddf3d5c8a6782f9f5f937091c37a58ea72c0304384034becdf946850",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_l3_interfaces/tests/common/overridden.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "c670083adeabaa18419eea52a2297aeea81b1a5635718b95aebf3d1955f78385",
+ "name": "tests/integration/targets/nxos_bgp_templates/tests/common/fixtures",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_l3_interfaces/tests/common/_remove_config.yaml",
+ "name": "tests/integration/targets/nxos_bgp_templates/tests/common/fixtures/parsed.cfg",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9dbc4efa7c9e6843b987aa6286ba0de836129c75b66249ab846fee2012fc2394",
+ "chksum_sha256": "e921d4fa370873c1b237ba6b3ee2d3c0a70520b08f5097e960152137a57ed123",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_l3_interfaces/tests/common/rtt.yaml",
+ "name": "tests/integration/targets/nxos_bgp_templates/tests/common/replaced.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d660e4e5717467104076599f04b44aca40bbe45b79eb5e5554cb1ff390feb405",
+ "chksum_sha256": "d307cdaf9d9ecb2c711093d4f9879deb595a843ed2d17b6ae1036d918b22cf2f",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_l3_interfaces/tests/common/gathered.yaml",
+ "name": "tests/integration/targets/nxos_bgp_templates/tests/common/_remove_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4c8ea6a0bd93a5ae1092362c88b1eb6ed13a086968841b5d68654677741714dc",
+ "chksum_sha256": "c07047642c717ad5e564697143a11e7cc93a18376d91ee56dc5f44f3d19e22b5",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_l3_interfaces/tests/common/empty_config.yaml",
+ "name": "tests/integration/targets/nxos_bgp_templates/tests/common/deleted.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "858bc39fea4f9cedff070a248b5119124852eeb9237388ef833c414510dbf24c",
+ "chksum_sha256": "425c2ab3097932fcb46a43b4d5b8bcaeed500348e760e77a5b6f1d4e4fca9eea",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_l3_interfaces/tests/common/rendered.yaml",
+ "name": "tests/integration/targets/nxos_bgp_templates/tests/common/rendered.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f274f91771b34f2246ee4fa58bae6acd7fafb0ea1f1efb04ffb362cfd1678de3",
+ "chksum_sha256": "56e1c2649908ef81810457cc96bc917c530d7096e907b516389ef7cdbe3c9ba3",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_l3_interfaces/tests/common/_populate_config.yaml",
+ "name": "tests/integration/targets/nxos_bgp_templates/tests/common/overridden.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3a640fd87c6717d5876b64797f7d0092fd6673272fc5a674bde3cf5ed78e8b79",
+ "chksum_sha256": "1197072828ecfc518dd9ac00389c239db31f18e8d566fbe5a37fd80ff479f104",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_overlay_global",
+ "name": "tests/integration/targets/nxos_vlans",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_overlay_global/tasks",
+ "name": "tests/integration/targets/nxos_vlans/vars",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_overlay_global/tasks/nxapi.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_overlay_global/tasks/main.yaml",
+ "name": "tests/integration/targets/nxos_vlans/vars/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bb83a5490c188c20d9a8632bdb13043146a87d5dfc555cd2e389ee30885bf7ca",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_overlay_global/tasks/platform",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "fd3723c027909807b1d6ef73222d5bfc96f98c680a9f86f3e89c0366bdd22116",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_overlay_global/tasks/platform/n7k",
+ "name": "tests/integration/targets/nxos_vlans/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_overlay_global/tasks/platform/n7k/cleanup.yaml",
+ "name": "tests/integration/targets/nxos_vlans/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6fe5dccac28d950d4f93db2d5dd8d2d53f974b51a0c07a9371165a7fd9b20585",
+ "chksum_sha256": "82e7cf4c490409760e85c9516e174b27c8d67fb419e829353d94c4d022016245",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_overlay_global/tasks/platform/n7k/setup.yaml",
+ "name": "tests/integration/targets/nxos_vlans/tasks/nxapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1f41e716a6ee3d52141e15b1cf8d8a62a6656066f9761e5438f18b03b64148ba",
+ "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_overlay_global/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_vlans/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
+ "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_overlay_global/meta",
+ "name": "tests/integration/targets/nxos_vlans/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_overlay_global/meta/main.yml",
+ "name": "tests/integration/targets/nxos_vlans/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "chksum_sha256": "7d754db246e92eda93bade9b741e09afdc7c5ba2754a4ea5d874f92a479b65d0",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_overlay_global/defaults",
+ "name": "tests/integration/targets/nxos_vlans/fixtures",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_overlay_global/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_vlans/fixtures/parsed.cfg",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_overlay_global/tests",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "9d00aff551522b3a0a61d5ba2b38944de36985bd3353dc67987d76d650ccbea3",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_overlay_global/tests/common",
+ "name": "tests/integration/targets/nxos_vlans/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_overlay_global/tests/common/sanity.yaml",
+ "name": "tests/integration/targets/nxos_vlans/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "10d3d02705dac2e31ab7424914f1dc74d20c5cf799141c26ac2e47ec2c68e1b4",
+ "chksum_sha256": "ec4fa30fc4a7b9e002d1c7b3932286ace72ba36e4f532e2cc79f49d07e0794c3",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_global",
+ "name": "tests/integration/targets/nxos_vlans/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_global/tasks",
+ "name": "tests/integration/targets/nxos_vlans/tests/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_global/tasks/nxapi.yaml",
+ "name": "tests/integration/targets/nxos_vlans/tests/common/_populate_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "adcabdbc005b34ef70ef70c9a7a3b0bfc9b94ff93d2f3ea18bf1f19fbc2d2201",
+ "chksum_sha256": "0fef283f3a139da74d869920dfc1b12b127b20e505c060d30b893d457ea980bc",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_global/tasks/main.yaml",
+ "name": "tests/integration/targets/nxos_vlans/tests/common/empty_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "05625917ea950a0b177ebb25c692ba215e247b2f17a15309a86430a5871f5672",
+ "chksum_sha256": "69cfb989d7be0b980e8fc3eb9d33154c66e2cde5df1a9f8b17b1ea0894879212",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_global/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_vlans/tests/common/parsed.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "82e7cf4c490409760e85c9516e174b27c8d67fb419e829353d94c4d022016245",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_bgp_global/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "fe16be9cd5316c7526ea489bad81d693f40c2f2351c01f60a4074802f208164e",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_global/meta/main.yml",
+ "name": "tests/integration/targets/nxos_vlans/tests/common/gathered.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_bgp_global/defaults",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "87888ec93930b5630192fd9f60d931700a43910cfe6a3d092b04099ac88eb612",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_global/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_vlans/tests/common/rtt.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7d754db246e92eda93bade9b741e09afdc7c5ba2754a4ea5d874f92a479b65d0",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_bgp_global/vars",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "65b9276bee903c86af57ed5a6afee1d84461bf6c7bf3a3cf95aa8369e0c7df57",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_global/vars/main.yml",
+ "name": "tests/integration/targets/nxos_vlans/tests/common/merged.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5664e0d1a4c541d53f188ff988aafdf4f1802b97ff08c51fc128574ae38ce63b",
+ "chksum_sha256": "6f9623efb3904d185246e618c1cb0726553a5556941e5c2e91b7c9025de0e89c",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_global/tests",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_vlans/tests/common/replaced.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "1f1efb47f41f82d4be94cca66acf388791ed71b63f1f3897f769f7e4110f1be6",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_global/tests/common",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_vlans/tests/common/_remove_config.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "9d0b9b581e3e45b412907eecbc4ba5ad1632595e8cb58f397365287ce091bee0",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_global/tests/common/fixtures",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_vlans/tests/common/deleted.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e2964f468cfe2c92dbee0504a71845636d76ff4e079dd07ad7f8656cba328221",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_global/tests/common/fixtures/parsed.cfg",
+ "name": "tests/integration/targets/nxos_vlans/tests/common/rendered.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "52efea9c016168ecd2fcd4012748fdd81f2753c77b4cd322e5628278cf36a227",
+ "chksum_sha256": "5745013bf8f6e23f513cea1bb8878e4ca80ba61a331a5a5d2a7097673b786ed2",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_global/tests/common/parsed.yaml",
+ "name": "tests/integration/targets/nxos_vlans/tests/common/overridden.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5ad0de011701b4094ceef2432d855f96f3441d92e71e69dcd959cb05e86ba9ef",
+ "chksum_sha256": "c70b2b7642402aeef4772a4a8cc7b7abde208bfedd65fbdcf95231b2ab5a74db",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_global/tests/common/merged.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "35205173a9b4c2cdbf46f89706c24e10a7db1671892f25e7598957b6cd5424c2",
+ "name": "tests/integration/targets/nxos_evpn_vni",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_global/tests/common/deleted.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "bf494eb0d9629da4381465012ca762ab8ec270ac5b9270f7b2206f6e50eb2df6",
+ "name": "tests/integration/targets/nxos_evpn_vni/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_global/tests/common/replaced.yaml",
+ "name": "tests/integration/targets/nxos_evpn_vni/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "73a38ab49a6c4f73b8e5d6b5ff301846e40a8d55b18fe61d0647b6c54cac5688",
+ "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_global/tests/common/sanity.yaml",
+ "name": "tests/integration/targets/nxos_evpn_vni/tasks/nxapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e6dc32ce625064b12bbb4bf361b4b9a56df6792af52c16d6bcda508de05bea6f",
+ "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_global/tests/common/deleted_af.yaml",
+ "name": "tests/integration/targets/nxos_evpn_vni/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ef14541859231625e08fe2b53fbf4b8f6e64bcea02d4bb987038b915725abeab",
+ "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_global/tests/common/_remove_config.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "c07047642c717ad5e564697143a11e7cc93a18376d91ee56dc5f44f3d19e22b5",
+ "name": "tests/integration/targets/nxos_evpn_vni/defaults",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_global/tests/common/gathered.yaml",
+ "name": "tests/integration/targets/nxos_evpn_vni/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d96baf5dc0a51ac1a75147a0591b62dd0d0429b6f48ed375f6e41e73d43a21ed",
+ "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_global/tests/common/empty_config.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "b605ca79e5cc2cdb7f664f1aec94711594f2f485adfc760cbcea01b01e2b25f9",
+ "name": "tests/integration/targets/nxos_evpn_vni/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_global/tests/common/replaced_af.yaml",
+ "name": "tests/integration/targets/nxos_evpn_vni/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bf31728c79fcf0b446ffd43fe431bc399513af80a6e363931a9f16fc611bf921",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_global/tests/common/rendered.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "ff3eb25121425c6e970238566923eca969ba8f67f60e6fa8ef5c1516ca5757bd",
+ "name": "tests/integration/targets/nxos_evpn_vni/tests",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_global/tests/common/purged.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "43b3e83056042eb41ac0a74dc11eca0c07bec178548807f56d79e5d211f75360",
+ "name": "tests/integration/targets/nxos_evpn_vni/tests/common",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_global/tests/common/_populate_config.yaml",
+ "name": "tests/integration/targets/nxos_evpn_vni/tests/common/sanity.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "acd7172ee28b6a88b6f7a8a95548988e2c6ed27804288daec18676481280cb08",
+ "chksum_sha256": "feb636a1cf337d091e427267dc9639d6fd2a5a481a9fd73cc830618990dc12af",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_file_copy",
+ "name": "tests/integration/targets/nxos_vtp_version",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_file_copy/tasks",
+ "name": "tests/integration/targets/nxos_vtp_version/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_file_copy/tasks/nxapi.yaml",
+ "name": "tests/integration/targets/nxos_vtp_version/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
+ "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_file_copy/tasks/main.yaml",
+ "name": "tests/integration/targets/nxos_vtp_version/tasks/nxapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6bf1e5e97ebdee9d3da5227017b650cf6de337ae6ce68cee739acf39a32ec81",
+ "chksum_sha256": "1e03994e82f05167e558b19e20c435e80ed92b6ff4f62f71fb93ed9082d30f94",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_file_copy/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_vtp_version/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
+ "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_file_copy/fixtures",
+ "name": "tests/integration/targets/nxos_vtp_version/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_file_copy/fixtures/data.cfg",
+ "name": "tests/integration/targets/nxos_vtp_version/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ef43c9adc8aeec6910b4373ba0d9fbf28048ed53ec631ab9fd6fa8ad00a8a3a0",
+ "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_file_copy/meta",
+ "name": "tests/integration/targets/nxos_vtp_version/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_file_copy/meta/main.yml",
+ "name": "tests/integration/targets/nxos_vtp_version/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_file_copy/defaults",
+ "name": "tests/integration/targets/nxos_vtp_version/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_file_copy/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_vtp_version/tests/common",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_vtp_version/tests/common/sanity.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
+ "chksum_sha256": "e33039720ba0e6ccc73fe5d38aaa4c2d1284afbb89058f6fe98c1a09d2a30638",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_file_copy/tests",
+ "name": "tests/integration/targets/nxos_vrrp",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_file_copy/tests/nxapi",
+ "name": "tests/integration/targets/nxos_vrrp/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_file_copy/tests/nxapi/badtransport.yaml",
+ "name": "tests/integration/targets/nxos_vrrp/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "21427f41984508b9839c2f35a72d67fd2cf22fcd68d619118a5b5515d2268668",
+ "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_file_copy/tests/cli",
+ "name": "tests/integration/targets/nxos_vrrp/tasks/nxapi.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_vrrp/tasks/main.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_vrrp/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_file_copy/tests/cli/negative.yaml",
+ "name": "tests/integration/targets/nxos_vrrp/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0a918d9c6433c61aea340e1c0c94ef62d409893bfe7fd1858386da4422c5e208",
+ "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_file_copy/tests/cli/sanity.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "25a997ce10f97d08a348b04eb8e123dd4b6b7a6aaa035e5f2c45ef99f985f94c",
+ "name": "tests/integration/targets/nxos_vrrp/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_file_copy/tests/cli/input_validation.yaml",
+ "name": "tests/integration/targets/nxos_vrrp/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "86642c6a7b15cbac85986a618de3eea6d741cb9819a4b7896570ab07e6033a8c",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospfv3",
+ "name": "tests/integration/targets/nxos_vrrp/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospfv3/tasks",
+ "name": "tests/integration/targets/nxos_vrrp/tests/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospfv3/tasks/nxapi.yaml",
+ "name": "tests/integration/targets/nxos_vrrp/tests/common/sanity.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "adcabdbc005b34ef70ef70c9a7a3b0bfc9b94ff93d2f3ea18bf1f19fbc2d2201",
+ "chksum_sha256": "a7766c42cc22f1bb7cc7fa396978f2db1df0d1c744e3241f371f55912f3cd610",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospfv3/tasks/main.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "dd759468c5cc871fb7737ff3d8d736515fc4a391944ba48e311bd8e9cf4015bf",
+ "name": "tests/integration/targets/nxos_ntp_global",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospfv3/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_ntp_global/vars",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_ntp_global/vars/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "82e7cf4c490409760e85c9516e174b27c8d67fb419e829353d94c4d022016245",
+ "chksum_sha256": "2c37457ec8af62c1519304e787a4e2ca6be4c156f8cf7bc2293106f38aaa2957",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospfv3/meta",
+ "name": "tests/integration/targets/nxos_ntp_global/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospfv3/meta/main.yml",
+ "name": "tests/integration/targets/nxos_ntp_global/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
+ "chksum_sha256": "82e7cf4c490409760e85c9516e174b27c8d67fb419e829353d94c4d022016245",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospfv3/defaults",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_ntp_global/tasks/nxapi.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "adcabdbc005b34ef70ef70c9a7a3b0bfc9b94ff93d2f3ea18bf1f19fbc2d2201",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospfv3/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_ntp_global/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7d754db246e92eda93bade9b741e09afdc7c5ba2754a4ea5d874f92a479b65d0",
+ "chksum_sha256": "2396a1bb62aa2d673208deb59766a787971d061630b8467afa525b7fdcb2e95e",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospfv3/vars",
+ "name": "tests/integration/targets/nxos_ntp_global/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospfv3/vars/main.yml",
+ "name": "tests/integration/targets/nxos_ntp_global/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a58f6902557b2603f0814004741aba525b7a0950e5a11aa66b37c4955c16f39e",
+ "chksum_sha256": "7d754db246e92eda93bade9b741e09afdc7c5ba2754a4ea5d874f92a479b65d0",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospfv3/tests",
+ "name": "tests/integration/targets/nxos_ntp_global/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospfv3/tests/common",
+ "name": "tests/integration/targets/nxos_ntp_global/meta/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_ntp_global/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospfv3/tests/common/fixtures",
+ "name": "tests/integration/targets/nxos_ntp_global/tests/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospfv3/tests/common/fixtures/parsed.cfg",
+ "name": "tests/integration/targets/nxos_ntp_global/tests/common/_populate_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "98780d78ffa7990bb331aa22eb22a241c513c360e240824d45a7bf1de5be2ecc",
+ "chksum_sha256": "08e70b7bf07e9f6621e1389be04c654a23a2f975aa49fc4aabc7ecb593bc5cfa",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospfv3/tests/common/parsed.yaml",
+ "name": "tests/integration/targets/nxos_ntp_global/tests/common/empty_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1668f415528b25d80c75d0b7d1429ad1f2998999e48a9c1103c194437b8d53c6",
+ "chksum_sha256": "f836d5207479f29e09d0669ff7c58dd23bde729eda06dce4649cdc3e60be53a9",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospfv3/tests/common/merged.yaml",
+ "name": "tests/integration/targets/nxos_ntp_global/tests/common/parsed.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3351bbfbf0d6d47b6d974a6520e49d2c2e37fe82c909425bc2b35fbd2a1c3619",
+ "chksum_sha256": "9bcba4c729094da821fbbcce077fce21c817d1f560df1dc97ede6c6ff9cc4565",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospfv3/tests/common/deleted.yaml",
+ "name": "tests/integration/targets/nxos_ntp_global/tests/common/gathered.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "dd6501583c6d35a54f84e67f7a3c089c3d327af513f2843227d760ec81b09f26",
+ "chksum_sha256": "060aecc4cf5c2f2c7d5785001728b97f6913dd3900aa2dcb0c37436509d9d92e",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospfv3/tests/common/replaced.yaml",
+ "name": "tests/integration/targets/nxos_ntp_global/tests/common/merged.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c47c9c8159c406510ed5c37a849752430af675f5f74b2dee2d4361c5a4647ada",
+ "chksum_sha256": "159d739b15a17d8f62cbb4c9dfcd2fa3b7ede51f02fa6e9ac2bde196643b60fd",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospfv3/tests/common/overridden.yaml",
+ "name": "tests/integration/targets/nxos_ntp_global/tests/common/fixtures",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_ntp_global/tests/common/fixtures/parsed.cfg",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d97a097d36258f90bedf8acd4609bba2c4840b38c56f3cd6517e9fc394600f61",
+ "chksum_sha256": "f7771e974db6ccc58fcb0a69ff17e00e208d57c09d96c7b8dfe8f5ecae8c879b",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospfv3/tests/common/_remove_config.yaml",
+ "name": "tests/integration/targets/nxos_ntp_global/tests/common/replaced.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6e1cddebab71b07ac4b62257a57ee91bb813c2d2a627f21fd1ddc848c5c71242",
+ "chksum_sha256": "6b602368f7b9b4aba2e9a3e88b73abf107d658eb335fe4ce1a4cd4c0dca3a4c6",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospfv3/tests/common/gathered.yaml",
+ "name": "tests/integration/targets/nxos_ntp_global/tests/common/_remove_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "52b75fde8bd0aeae9fa656b94f228d66f550904e311166923cfbd6ad3504ec3b",
+ "chksum_sha256": "2a828ee760fb0dfc1ce2906e7498216901248da978df3b7a90ce842c1041a157",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospfv3/tests/common/empty_config.yaml",
+ "name": "tests/integration/targets/nxos_ntp_global/tests/common/deleted.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fbe1d547c87e7fce94633ac9d2659cf83c9f705225abd47e025752589adf5206",
+ "chksum_sha256": "e4c2d740371ba62f8ce9a298147acad6ef15cb6b851a8ea32e7682934e5c4294",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospfv3/tests/common/rendered.yaml",
+ "name": "tests/integration/targets/nxos_ntp_global/tests/common/rendered.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fdb04181004019fce7d2aa47598c99683fa0c17efda91a40679e647ae1080926",
+ "chksum_sha256": "eb766ac5e824552d743eb36cfc738ad9abd7985d945bdea3d22a0d9130f839ca",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ospfv3/tests/common/_populate_config.yaml",
+ "name": "tests/integration/targets/nxos_ntp_global/tests/common/overridden.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cef0853f045225f3cfd3ac7b2362e20d12372005f8a175b973e60fa22c30e3b0",
+ "chksum_sha256": "1581f91ffd8c1c23bb4676eab5c8b75109777725856083ae7e8f3f2e0993b3be",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vxlan_vtep_vni",
+ "name": "tests/integration/targets/nxos_gir_profile_management",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vxlan_vtep_vni/tasks",
+ "name": "tests/integration/targets/nxos_gir_profile_management/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vxlan_vtep_vni/tasks/nxapi.yaml",
+ "name": "tests/integration/targets/nxos_gir_profile_management/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6fa74ca9eef193774b0257522c64344fa2c8851398dc0acd94e161192cc0015d",
+ "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vxlan_vtep_vni/tasks/main.yaml",
+ "name": "tests/integration/targets/nxos_gir_profile_management/tasks/nxapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bb83a5490c188c20d9a8632bdb13043146a87d5dfc555cd2e389ee30885bf7ca",
+ "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vxlan_vtep_vni/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_gir_profile_management/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
+ "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vxlan_vtep_vni/meta",
+ "name": "tests/integration/targets/nxos_gir_profile_management/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vxlan_vtep_vni/meta/main.yml",
+ "name": "tests/integration/targets/nxos_gir_profile_management/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vxlan_vtep_vni/defaults",
+ "name": "tests/integration/targets/nxos_gir_profile_management/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vxlan_vtep_vni/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_gir_profile_management/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vxlan_vtep_vni/tests",
+ "name": "tests/integration/targets/nxos_gir_profile_management/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vxlan_vtep_vni/tests/common",
+ "name": "tests/integration/targets/nxos_gir_profile_management/tests/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vxlan_vtep_vni/tests/common/sanity.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "81ba6bdc57dba2f0adc0b293862db415cb6aea44ae3793cbb0c755ea01c073a9",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_vxlan_vtep_vni/tests/common/multisite.yaml",
+ "name": "tests/integration/targets/nxos_gir_profile_management/tests/common/sanity.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7127fe8481d55865ca7043a159fb4c4a6e02423899c7143bbb548aff0f3728e4",
+ "chksum_sha256": "d0237be40a79468b006b7bc99f59a3b8b9227b61b780adcd94fef59ea697e6fb",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_pim_rp_address",
+ "name": "tests/integration/targets/nxos_config",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_pim_rp_address/tasks",
+ "name": "tests/integration/targets/nxos_config/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_pim_rp_address/tasks/nxapi.yaml",
+ "name": "tests/integration/targets/nxos_config/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
+ "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_pim_rp_address/tasks/main.yaml",
+ "name": "tests/integration/targets/nxos_config/tasks/redirection.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
+ "chksum_sha256": "a4f04d0bc1463bb36c212620e786d8ce8d18681fd03f641c1a37351b6d0dfac8",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_pim_rp_address/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_config/tasks/nxapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_pim_rp_address/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_pim_rp_address/meta/main.yml",
+ "name": "tests/integration/targets/nxos_config/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_pim_rp_address/defaults",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "a069f9e3d9d905e5a1d7e2b316e284944bc38cddc390470831451fb4672fbde2",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_pim_rp_address/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_config/tasks/cli_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_pim_rp_address/tests",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "244797b68a9ec0f73b3fecca59e21469c0bc8126f36a5b7ffe2fe20ebdf22f19",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_pim_rp_address/tests/common",
+ "name": "tests/integration/targets/nxos_config/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_pim_rp_address/tests/common/configure.yaml",
+ "name": "tests/integration/targets/nxos_config/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5860ed215f27147987c62a8541f96530427cfadc64b6ce13e849a6785275c8ca",
+ "chksum_sha256": "665288590cd4226da42511817f09ccdfc92df36c9621583cd2c82919af295c5a",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_rpm",
+ "name": "tests/integration/targets/nxos_config/templates",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_rpm/tasks",
+ "name": "tests/integration/targets/nxos_config/templates/basic",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_rpm/tasks/nxapi.yaml",
+ "name": "tests/integration/targets/nxos_config/templates/basic/configblock.j2",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1e03994e82f05167e558b19e20c435e80ed92b6ff4f62f71fb93ed9082d30f94",
+ "chksum_sha256": "f8cfadc33b5527be5bd82a279cbcc769a882135ce815cb6d42177cb51d325e4e",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_rpm/tasks/main.yaml",
+ "name": "tests/integration/targets/nxos_config/templates/basic/configexact2.j2",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
+ "chksum_sha256": "b5884db197b0f69276768f1f6e5e8b4214ead32869dab90d76fae353425a4b28",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_rpm/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_config/templates/basic/intended_running_config",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_rpm/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "b478553dd335d5085c847219eae58a8daac0902372a5e8fbbd133da3850332b2",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_rpm/meta/main.yml",
+ "name": "tests/integration/targets/nxos_config/templates/basic/configstrict1.j2",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "chksum_sha256": "da30d9c2fbe9c9473f957d26568b2cb42baa620e261efbce7d604535ec532cf5",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_rpm/defaults",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_config/templates/basic/setupexact.j2",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "b358f8f54eb293ce2eac2ead8f8bc2e3aae7383c2981c9c192530d6bcbff3d6f",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_rpm/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_config/templates/basic/setupstrict.j2",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
+ "chksum_sha256": "b358f8f54eb293ce2eac2ead8f8bc2e3aae7383c2981c9c192530d6bcbff3d6f",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_rpm/tests",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_config/templates/basic/base_running_config",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "85528fb2bdea51956f0b6d9a7cb07c7bc75e498553f9b5301b904db35ed0f929",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_rpm/tests/common",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_config/templates/basic/acl_config.j2",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "f8cfadc33b5527be5bd82a279cbcc769a882135ce815cb6d42177cb51d325e4e",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_rpm/tests/common/sanity.yaml",
+ "name": "tests/integration/targets/nxos_config/templates/basic/config.j2",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "70447866be2605ad11b7abd82e4ae0000992bbdb0d143e90b86c0b4905ba1d19",
+ "chksum_sha256": "d62440fb734cd4db3dd623ab8e77214ff7f82e44c7dfca249b20dcd83d83a9af",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_gir",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_config/templates/basic/configexact1.j2",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "f8cfadc33b5527be5bd82a279cbcc769a882135ce815cb6d42177cb51d325e4e",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_gir/tasks",
+ "name": "tests/integration/targets/nxos_config/templates/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_gir/tasks/nxapi.yaml",
+ "name": "tests/integration/targets/nxos_config/templates/defaults/config.j2",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
+ "chksum_sha256": "522484a8eb35e809c1ebd645a73ca12f02b09c6cf237f1db1ed577088c149c51",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_gir/tasks/main.yaml",
+ "name": "tests/integration/targets/nxos_config/templates/defaults/test.j2",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
+ "chksum_sha256": "356d51cb39cbc26b408ca0b3aeacc83634ee2f6e061811a3a8fafb63955ffa4d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_gir/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_config/templates/config.js",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
+ "chksum_sha256": "c6d65e4fbe26ecfdf18d8eba2eac04aecfdd65e59e6b6db1c2aa2d29ed5a5847",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_gir/meta",
+ "name": "tests/integration/targets/nxos_config/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_gir/meta/main.yml",
+ "name": "tests/integration/targets/nxos_config/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_gir/defaults",
+ "name": "tests/integration/targets/nxos_config/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_gir/defaults/main.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_gir/tests",
+ "name": "tests/integration/targets/nxos_config/tests/redirection",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_gir/tests/common",
+ "name": "tests/integration/targets/nxos_config/tests/redirection/cli",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_gir/tests/common/sanity.yaml",
+ "name": "tests/integration/targets/nxos_config/tests/redirection/cli/shortname.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "13582ac62b7614778bc62c5a6f93a36a920e8664812b73a2b0903af35c2fe1da",
+ "chksum_sha256": "00bb1bd085c826013b04f4eaf7f74ff0de45ec8330aa386f4cd0263d468df9f1",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_igmp",
+ "name": "tests/integration/targets/nxos_config/tests/nxapi",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_igmp/tasks",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_config/tests/nxapi/sublevel_exact.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "694ab236d9c21462b423d507666dfe90caceca03e8a2f8fa3759f9f51b6d06e0",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_igmp/tasks/nxapi.yaml",
+ "name": "tests/integration/targets/nxos_config/tests/nxapi/sublevel.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
+ "chksum_sha256": "dc08a002e9e00d36590098bfe308ca825954a3e43755650cb5809684512433cb",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_igmp/tasks/main.yaml",
+ "name": "tests/integration/targets/nxos_config/tests/nxapi/toplevel_after.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
+ "chksum_sha256": "7b8ad2836c7ec7360561cb0f4319bbe95feae70b134c5dac9deb2ba18a17638e",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_igmp/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_config/tests/nxapi/multilevel.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
+ "chksum_sha256": "ff4c8dc408f5b4296300c4d7d983fcbdf19ee303fc95fe44ddb3d72a8b2aa08e",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_igmp/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_config/tests/nxapi/sublevel_strict.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "acaded71377fcc8c8b288d527caf6fc777c8d0f50079c8b409d1007f821cc9d3",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_igmp/meta/main.yml",
+ "name": "tests/integration/targets/nxos_config/tests/nxapi/toplevel_before.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "chksum_sha256": "1d150adeed3e6e02c3adc3871d29b6a153d504ab7a861cd401b02c480008ba5b",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_igmp/defaults",
+ "name": "tests/integration/targets/nxos_config/tests/cli_config",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_igmp/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_config/tests/cli_config/cli_basic.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
+ "chksum_sha256": "7b3703086ba8415b259a2c8353dc03d9295173092ba3dc15170ae9718b10626e",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_igmp/tests",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_config/tests/cli_config/cli_backup.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "02aa5aa3478284c141cb3741eb86bf919a560bc25b5a911f453e05ce25737bd7",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_igmp/tests/common",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_config/tests/cli_config/cli_exact_match.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "5eb01810e15ac6b6941a7b9b79ca5ca3be016f16ee7f534eea9be0115fb53a46",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_igmp/tests/common/sanity.yaml",
+ "name": "tests/integration/targets/nxos_config/tests/cli_config/cli_block_replace.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3f3aa30512b55460b5e6c82887f66d7642c56b86c7eabd5801148a945ce2ef4",
+ "chksum_sha256": "9058efd9127aa10094d23e72b8d7b8e0426d2707a113d7be2d539c15a1b9d229",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ntp_global",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_config/tests/cli_config/cli_strict_match.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "6329b5bab571f5218aeff32c54936052c2188466f842b4364d80dc9c6d800450",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ntp_global/tasks",
+ "name": "tests/integration/targets/nxos_config/tests/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ntp_global/tasks/nxapi.yaml",
+ "name": "tests/integration/targets/nxos_config/tests/common/src_match_none.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "adcabdbc005b34ef70ef70c9a7a3b0bfc9b94ff93d2f3ea18bf1f19fbc2d2201",
+ "chksum_sha256": "f485a516ee779d29f9c4bef94249e47c885743481d409fedaad14bd49ecbafe0",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ntp_global/tasks/main.yaml",
+ "name": "tests/integration/targets/nxos_config/tests/common/save.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2396a1bb62aa2d673208deb59766a787971d061630b8467afa525b7fdcb2e95e",
+ "chksum_sha256": "bc462c07044762a41b8eddbe29be65aa51e6d6cea05d1e0630555e8cb4a9e446",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ntp_global/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_config/tests/common/src_invalid.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "82e7cf4c490409760e85c9516e174b27c8d67fb419e829353d94c4d022016245",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_ntp_global/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "000c3c67b80eab4be77858bbb29844ec788342c63d8570c1db6166ca5a9539b5",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ntp_global/meta/main.yml",
+ "name": "tests/integration/targets/nxos_config/tests/common/defaults.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "chksum_sha256": "db431603f9429be57048a8789d1f0af71d6cdae47a26878a608c082768a1244d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ntp_global/defaults",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_config/tests/common/src_basic.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "eccd369dee1af74d2b9f2888107f438904ae0c58705d800ad95e913965991608",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ntp_global/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_config/tests/common/toplevel_nonidempotent.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7d754db246e92eda93bade9b741e09afdc7c5ba2754a4ea5d874f92a479b65d0",
+ "chksum_sha256": "48fbcde0f38c5b06062ccb0f93d50a5b7a7554edf66ee7ed4779d9d4ce524ac1",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ntp_global/vars",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_config/tests/common/sanity.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "55efec005b7a96d5dbd8739cfce99c359f047d1aacbee36ad6bc04bcf8283f26",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ntp_global/vars/main.yml",
+ "name": "tests/integration/targets/nxos_config/tests/common/backup.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2c37457ec8af62c1519304e787a4e2ca6be4c156f8cf7bc2293106f38aaa2957",
+ "chksum_sha256": "098fa1e9566ae02a5c089a7ab7a235ce53f1bc78b431e01595556f623f718c82",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ntp_global/tests",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_config/tests/common/toplevel.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "2bc1969eeb4a97775fd72dd6e5109d499c24cf5735c2c54bf75dcf7d49690bea",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ntp_global/tests/common",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_config/tests/common/sublevel_block.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "45748bd39c32ff0675cebcff8ed674384c0fcbd51797f2110edfbef816394801",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ntp_global/tests/common/fixtures",
+ "name": "tests/integration/targets/nxos_config/tests/cli",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ntp_global/tests/common/fixtures/parsed.cfg",
+ "name": "tests/integration/targets/nxos_config/tests/cli/sublevel_exact.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f7771e974db6ccc58fcb0a69ff17e00e208d57c09d96c7b8dfe8f5ecae8c879b",
+ "chksum_sha256": "b755c791e3a1b329e7da5896f70e10b01bdc5237bbe2e80316a4515d6f8d8bd8",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ntp_global/tests/common/parsed.yaml",
+ "name": "tests/integration/targets/nxos_config/tests/cli/diff.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9bcba4c729094da821fbbcce077fce21c817d1f560df1dc97ede6c6ff9cc4565",
+ "chksum_sha256": "d890138033f9812260dfd304fb4888d77e0c78b71f0afb2f8f97ce71f11a2491",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ntp_global/tests/common/merged.yaml",
+ "name": "tests/integration/targets/nxos_config/tests/cli/sublevel.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "159d739b15a17d8f62cbb4c9dfcd2fa3b7ede51f02fa6e9ac2bde196643b60fd",
+ "chksum_sha256": "937eb38ce64f2eaaab0f4c8f86a336d6fa022a96d75e285baaf7c85f45fb0b09",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ntp_global/tests/common/deleted.yaml",
+ "name": "tests/integration/targets/nxos_config/tests/cli/toplevel_after.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e4c2d740371ba62f8ce9a298147acad6ef15cb6b851a8ea32e7682934e5c4294",
+ "chksum_sha256": "7792c748c17cf012bd7462753459725b9b2d4e8418a3edb3f4f76eb84f8b5914",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ntp_global/tests/common/replaced.yaml",
+ "name": "tests/integration/targets/nxos_config/tests/cli/multilevel.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6b602368f7b9b4aba2e9a3e88b73abf107d658eb335fe4ce1a4cd4c0dca3a4c6",
+ "chksum_sha256": "b91edced261b04cd778d86a702fd71159bcb7e37050b2b59d66f5fbd0485ca6e",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ntp_global/tests/common/overridden.yaml",
+ "name": "tests/integration/targets/nxos_config/tests/cli/sublevel_strict.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1581f91ffd8c1c23bb4676eab5c8b75109777725856083ae7e8f3f2e0993b3be",
+ "chksum_sha256": "057bf9a3f64bb1abb24a5dc5a530553401a32850185af62cb278c85228ac6465",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ntp_global/tests/common/_remove_config.yaml",
+ "name": "tests/integration/targets/nxos_config/tests/cli/toplevel_before.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2a828ee760fb0dfc1ce2906e7498216901248da978df3b7a90ce842c1041a157",
+ "chksum_sha256": "da70c4dcf111e0de17abc736ab6b4cfef245ca202d235dd1d9e611b40bbab8b2",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ntp_global/tests/common/gathered.yaml",
+ "name": "tests/integration/targets/nxos_config/tests/cli/replace_block.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "060aecc4cf5c2f2c7d5785001728b97f6913dd3900aa2dcb0c37436509d9d92e",
+ "chksum_sha256": "e0eef2496cf4af6b50d6274c1b6ef30dbe7955e60ab6695c4cc0660ee5e8a51e",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ntp_global/tests/common/empty_config.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "f836d5207479f29e09d0669ff7c58dd23bde729eda06dce4649cdc3e60be53a9",
+ "name": "tests/integration/targets/nxos_install_os",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ntp_global/tests/common/rendered.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "eb766ac5e824552d743eb36cfc738ad9abd7985d945bdea3d22a0d9130f839ca",
+ "name": "tests/integration/targets/nxos_install_os/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_ntp_global/tests/common/_populate_config.yaml",
+ "name": "tests/integration/targets/nxos_install_os/tasks/httpapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "08e70b7bf07e9f6621e1389be04c654a23a2f975aa49fc4aabc7ecb593bc5cfa",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_vrrp",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "1c9ccfa8d43479520eb4cffae188c452027a694dd86a02bd92d31b5843301ad5",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vrrp/tasks",
+ "name": "tests/integration/targets/nxos_install_os/tasks/upgrade",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vrrp/tasks/nxapi.yaml",
+ "name": "tests/integration/targets/nxos_install_os/tasks/upgrade/enable_scp_server.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
+ "chksum_sha256": "2f94537521756c0171b570da8d8012aa47441ed01f2085c5e0c4ce1c80d6dbf3",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vrrp/tasks/main.yaml",
+ "name": "tests/integration/targets/nxos_install_os/tasks/upgrade/main_os_install.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
+ "chksum_sha256": "bc3281050236443452b55b83c4c3f32be752eb802c09c730e36e06ee7ef7ca31",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vrrp/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_install_os/tasks/upgrade/delete_files.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_vrrp/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "f770f6c4e52488ddd7b0c1dd5c61ef6f7f2dfb329cbae4337e0845567fa3e113",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vrrp/meta/main.yml",
+ "name": "tests/integration/targets/nxos_install_os/tasks/upgrade/copy_kick_system_images.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "chksum_sha256": "71f8a18402f8b4c95d4bdcb1f4fc6542ce6b9671ea246c44fe9acac70bbd562d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vrrp/defaults",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_install_os/tasks/upgrade/install_with_kick.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "bf39d3606053aeface47d66b776a0f8ed281c130cf0258ac09e8a0a759076df4",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vrrp/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_install_os/tasks/upgrade/clear_persistent_sockets.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
+ "chksum_sha256": "2fcf3f5178da21851bf3cd61610f645b242d841bb553acc94fa88c003c8849e5",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vrrp/tests",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_install_os/tasks/upgrade/install_os.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d774b3f202b5780184d96cdb5ac41b3db3540d09f575f0c8b0840a3509fd75bb",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vrrp/tests/common",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_install_os/tasks/upgrade/install_system.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d7225266ebf0be4737420a6d63a5cd3c77d3b6cfde7bd0f1c7eded1ebfad70b7",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vrrp/tests/common/sanity.yaml",
+ "name": "tests/integration/targets/nxos_install_os/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a7766c42cc22f1bb7cc7fa396978f2db1df0d1c744e3241f371f55912f3cd610",
+ "chksum_sha256": "5a0047c8f02776d1bffe8d2ced68ea4c0f823708554f48419af32b1b7172a1a9",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lacp",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_install_os/tasks/network_cli.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "29b863033e05757e739075e995a668f9bcc014a0e214ac894ffa9af68bd781ce",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lacp/tasks",
+ "name": "tests/integration/targets/nxos_install_os/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lacp/tasks/nxapi.yaml",
+ "name": "tests/integration/targets/nxos_install_os/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "adcabdbc005b34ef70ef70c9a7a3b0bfc9b94ff93d2f3ea18bf1f19fbc2d2201",
+ "chksum_sha256": "a79b77c93a1982c2682c177681bfcef5f3fb3421e3ef10be452fc81dc3ce8871",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lacp/tasks/main.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "c0ff0c24a2fb4a8aa58a6356c5029065acb6cf1aba1e14add135ff69c8d1387b",
+ "name": "tests/integration/targets/nxos_install_os/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lacp/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_install_os/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "82e7cf4c490409760e85c9516e174b27c8d67fb419e829353d94c4d022016245",
+ "chksum_sha256": "efb275d54a7c21fd007e1d1c8b10b65d148a9121af425a4e7fbd60abacd23fc8",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lacp/meta",
+ "name": "tests/integration/targets/nxos_install_os/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lacp/meta/main.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_lacp/defaults",
+ "name": "tests/integration/targets/nxos_install_os/tests/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lacp/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_install_os/tests/common/upgrade_n5k_733_N11.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7d754db246e92eda93bade9b741e09afdc7c5ba2754a4ea5d874f92a479b65d0",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_lacp/vars",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "7efa46e2021391e0fc9114f1b132861cd3614b7ec0064425b46ed876c81ea0f1",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lacp/vars/main.yml",
+ "name": "tests/integration/targets/nxos_install_os/tests/common/upgrade_n3172_u63a.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d990ef6f460be1be57ee33ce95b7fd66d05175e93e1e2716cce708d420a5d466",
+ "chksum_sha256": "c37931cb7c45570fe30f100fb278e26b7b94b7848d59ad8b6dbdda0670534452",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lacp/tests",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_install_os/tests/common/upgrade_n9k_greensboro_force.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "13e87f23b456e285da0593775bfe2b876d167922ce61a4ba47a668cb0b0d82bb",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lacp/tests/common",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_install_os/tests/common/upgrade_n9k_greensboro.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "7a09f37097e9dbedf717f83715339eb5397073ac3909bfd0cae6f874d3cd6342",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lacp/tests/common/parsed.yaml",
+ "name": "tests/integration/targets/nxos_install_os/tests/common/upgrade_n7k_atherton.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "82e18104dcb23f633ded1a694fa4ecbf1df4e14a09620d260473341a8920698a",
+ "chksum_sha256": "f72c407c1bedf043f2a763f9db8de4c28cb40cd3f55a99180c0b54f46bc9910c",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lacp/tests/common/merged.yaml",
+ "name": "tests/integration/targets/nxos_install_os/tests/common/upgrade.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d7f405af0ba053d9e613e791943bc8133e6dbf994db877e876b7dbfcfead68cb",
+ "chksum_sha256": "ee3909a9cbc7c9f42f3b146f122a61bdc2adada843b80ee111fe024dfa5c0d33",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lacp/tests/common/deleted.yaml",
+ "name": "tests/integration/targets/nxos_install_os/tests/common/upgrade_n35_greensboro.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5e8603749c8d553899ed39b13b3acd136e23d7bd3b09098fc55d615557712b8e",
+ "chksum_sha256": "bf8414d8d1327c0db9056739e1b39b673abe37e939356efd1ef96ade985ccffc",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lacp/tests/common/replaced.yaml",
+ "name": "tests/integration/targets/nxos_install_os/tests/common/upgrade_n9k_hamilton.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "546c7e8f53d5167a0c5fb320ccd6b5e7db098829ce47de467b2e20856dd402c2",
+ "chksum_sha256": "669ba59d9d909f0d42d6e69ae0ef3e6e1d4c33995a4658761f0b2884113cc041",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lacp/tests/common/overridden.yaml",
+ "name": "tests/integration/targets/nxos_install_os/tests/common/upgrade_n3172_u61a.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c6245af2941f9a747ea7774bdad73686d4d453173680ea526ab20b52dba2f283",
+ "chksum_sha256": "147279cb89618d50352ab1e86d4e939a3f3e0594cf28aec862eb867e1bdfc307",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lacp/tests/common/_remove_config.yaml",
+ "name": "tests/integration/targets/nxos_install_os/tests/common/upgrade_n35_62a88.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6c963e6f41c7e38e88e7f37dd8025ee75cd3970358d2568dc84a25bf0833c221",
+ "chksum_sha256": "c041886e29ede3a58d17f0e6d281d5dd458095369130bed0e617ffb200824743",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lacp/tests/common/gathered.yaml",
+ "name": "tests/integration/targets/nxos_install_os/tests/common/upgrade_n3172_u62a.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "dc7919875ac197ca437f1f27d4ace434975e7b0ebc59f0b9e76779b7c13300bd",
+ "chksum_sha256": "9ee1eb9e5b4f1fea2c930c8a7c48eaf4708576b9af88c29f280f3dabcec163fb",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lacp/tests/common/empty_config.yaml",
+ "name": "tests/integration/targets/nxos_install_os/tests/common/upgrade_n3172_greensboro.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d05b5fe9f03352e3b8f7827b2dcf181df0e3c72fe956593e4da447c9d4f8869c",
+ "chksum_sha256": "f26347411b88b126bce20f80dca17f5726cc830996a1d8bd6119b73647bea681",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lacp/tests/common/rendered.yaml",
+ "name": "tests/integration/targets/nxos_install_os/tests/common/upgrade_n7k_helsinki.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b9a08d012138636068569151cf4f74205676ea00cc6fccdc2e2627447110156c",
+ "chksum_sha256": "c023ec2f8677adcd41bd418bb5e35d3c82dc0636d9d41c45a2074b32891214be",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lacp/tests/common/_populate_config.yaml",
+ "name": "tests/integration/targets/nxos_install_os/tests/common/upgrade_n5k_730_N11.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a10bac016859ecead883b3a70c3eb13f70718ad9cdd2796b92294bb73bbee29d",
+ "chksum_sha256": "744e3327618fd03cced8667aa2dee9cd691c358777ecb1dc741abf3c8dc18484",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_igmp_snooping",
+ "name": "tests/integration/targets/nxos_gir",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_igmp_snooping/tasks",
+ "name": "tests/integration/targets/nxos_gir/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_igmp_snooping/tasks/nxapi.yaml",
+ "name": "tests/integration/targets/nxos_gir/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
+ "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_igmp_snooping/tasks/main.yaml",
+ "name": "tests/integration/targets/nxos_gir/tasks/nxapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
+ "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_igmp_snooping/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_gir/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
+ "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_igmp_snooping/meta",
+ "name": "tests/integration/targets/nxos_gir/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_igmp_snooping/meta/main.yml",
+ "name": "tests/integration/targets/nxos_gir/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_igmp_snooping/defaults",
+ "name": "tests/integration/targets/nxos_gir/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_igmp_snooping/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_gir/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_igmp_snooping/tests",
+ "name": "tests/integration/targets/nxos_gir/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_igmp_snooping/tests/common",
+ "name": "tests/integration/targets/nxos_gir/tests/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_igmp_snooping/tests/common/sanity.yaml",
+ "name": "tests/integration/targets/nxos_gir/tests/common/sanity.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a4a73bc840e125f41372313d58b550980ef7c1642f87ba9bb8ee85d8d026ff42",
+ "chksum_sha256": "13582ac62b7614778bc62c5a6f93a36a920e8664812b73a2b0903af35c2fe1da",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_gir_profile_management",
+ "name": "tests/integration/targets/nxos_bgp_global",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_gir_profile_management/tasks",
+ "name": "tests/integration/targets/nxos_bgp_global/vars",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_gir_profile_management/tasks/nxapi.yaml",
+ "name": "tests/integration/targets/nxos_bgp_global/vars/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
+ "chksum_sha256": "5664e0d1a4c541d53f188ff988aafdf4f1802b97ff08c51fc128574ae38ce63b",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_gir_profile_management/tasks/main.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
+ "name": "tests/integration/targets/nxos_bgp_global/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_gir_profile_management/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_bgp_global/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
+ "chksum_sha256": "82e7cf4c490409760e85c9516e174b27c8d67fb419e829353d94c4d022016245",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_gir_profile_management/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_bgp_global/tasks/nxapi.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "adcabdbc005b34ef70ef70c9a7a3b0bfc9b94ff93d2f3ea18bf1f19fbc2d2201",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_gir_profile_management/meta/main.yml",
+ "name": "tests/integration/targets/nxos_bgp_global/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "chksum_sha256": "05625917ea950a0b177ebb25c692ba215e247b2f17a15309a86430a5871f5672",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_gir_profile_management/defaults",
+ "name": "tests/integration/targets/nxos_bgp_global/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_gir_profile_management/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_bgp_global/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_gir_profile_management/tests",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "7d754db246e92eda93bade9b741e09afdc7c5ba2754a4ea5d874f92a479b65d0",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_gir_profile_management/tests/common",
+ "name": "tests/integration/targets/nxos_bgp_global/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_gir_profile_management/tests/common/sanity.yaml",
+ "name": "tests/integration/targets/nxos_bgp_global/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d0237be40a79468b006b7bc99f59a3b8b9227b61b780adcd94fef59ea697e6fb",
+ "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vpc",
+ "name": "tests/integration/targets/nxos_bgp_global/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vpc/tasks",
+ "name": "tests/integration/targets/nxos_bgp_global/tests/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vpc/tasks/nxapi.yaml",
+ "name": "tests/integration/targets/nxos_bgp_global/tests/common/replaced_af.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
+ "chksum_sha256": "bf31728c79fcf0b446ffd43fe431bc399513af80a6e363931a9f16fc611bf921",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vpc/tasks/main.yaml",
+ "name": "tests/integration/targets/nxos_bgp_global/tests/common/_populate_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bb83a5490c188c20d9a8632bdb13043146a87d5dfc555cd2e389ee30885bf7ca",
+ "chksum_sha256": "acd7172ee28b6a88b6f7a8a95548988e2c6ed27804288daec18676481280cb08",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vpc/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_bgp_global/tests/common/empty_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_vpc/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "b605ca79e5cc2cdb7f664f1aec94711594f2f485adfc760cbcea01b01e2b25f9",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vpc/meta/main.yml",
+ "name": "tests/integration/targets/nxos_bgp_global/tests/common/parsed.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_vpc/defaults",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "5ad0de011701b4094ceef2432d855f96f3441d92e71e69dcd959cb05e86ba9ef",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vpc/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_bgp_global/tests/common/deleted_af.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
+ "chksum_sha256": "ef14541859231625e08fe2b53fbf4b8f6e64bcea02d4bb987038b915725abeab",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vpc/tests",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_bgp_global/tests/common/purged.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "43b3e83056042eb41ac0a74dc11eca0c07bec178548807f56d79e5d211f75360",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vpc/tests/common",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_bgp_global/tests/common/gathered.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d96baf5dc0a51ac1a75147a0591b62dd0d0429b6f48ed375f6e41e73d43a21ed",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vpc/tests/common/sanity.yaml",
+ "name": "tests/integration/targets/nxos_bgp_global/tests/common/sanity.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2df57c902caed23947093482590483584784db8850c51e6efe545b314895eaed",
+ "chksum_sha256": "e6dc32ce625064b12bbb4bf361b4b9a56df6792af52c16d6bcda508de05bea6f",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vtp_password",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_bgp_global/tests/common/merged.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "35205173a9b4c2cdbf46f89706c24e10a7db1671892f25e7598957b6cd5424c2",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vtp_password/tasks",
+ "name": "tests/integration/targets/nxos_bgp_global/tests/common/fixtures",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vtp_password/tasks/nxapi.yaml",
+ "name": "tests/integration/targets/nxos_bgp_global/tests/common/fixtures/parsed.cfg",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
+ "chksum_sha256": "52efea9c016168ecd2fcd4012748fdd81f2753c77b4cd322e5628278cf36a227",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vtp_password/tasks/main.yaml",
+ "name": "tests/integration/targets/nxos_bgp_global/tests/common/replaced.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bb83a5490c188c20d9a8632bdb13043146a87d5dfc555cd2e389ee30885bf7ca",
+ "chksum_sha256": "73a38ab49a6c4f73b8e5d6b5ff301846e40a8d55b18fe61d0647b6c54cac5688",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vtp_password/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_bgp_global/tests/common/_remove_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_vtp_password/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "c07047642c717ad5e564697143a11e7cc93a18376d91ee56dc5f44f3d19e22b5",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vtp_password/meta/main.yml",
+ "name": "tests/integration/targets/nxos_bgp_global/tests/common/deleted.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_vtp_password/defaults",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "bf494eb0d9629da4381465012ca762ab8ec270ac5b9270f7b2206f6e50eb2df6",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vtp_password/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_bgp_global/tests/common/rendered.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
+ "chksum_sha256": "ff3eb25121425c6e970238566923eca969ba8f67f60e6fa8ef5c1516ca5757bd",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vtp_password/tests",
+ "name": "tests/integration/targets/nxos_vxlan_vtep",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vtp_password/tests/common",
+ "name": "tests/integration/targets/nxos_vxlan_vtep/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vtp_password/tests/common/sanity.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "3146095b01817bec07dd315c90e066bd664e16b5438d62d337a7ebfc0c48884e",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_lldp_interfaces",
+ "name": "tests/integration/targets/nxos_vxlan_vtep/tasks/platform",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lldp_interfaces/tasks",
+ "name": "tests/integration/targets/nxos_vxlan_vtep/tasks/platform/n7k",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lldp_interfaces/tasks/nxapi.yaml",
+ "name": "tests/integration/targets/nxos_vxlan_vtep/tasks/platform/n7k/cleanup.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "280d285e1a9b531b25d8b880d3a032933d7135004c4658b7f1eb4872c60cc666",
+ "chksum_sha256": "6fe5dccac28d950d4f93db2d5dd8d2d53f974b51a0c07a9371165a7fd9b20585",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lldp_interfaces/tasks/main.yaml",
+ "name": "tests/integration/targets/nxos_vxlan_vtep/tasks/platform/n7k/setup.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c0ff0c24a2fb4a8aa58a6356c5029065acb6cf1aba1e14add135ff69c8d1387b",
+ "chksum_sha256": "b569f1de86c96331c196b935a58a716b5314f80ebd5b9130aeaa56f44c45f047",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lldp_interfaces/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_vxlan_vtep/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c57c3ff656bb8daf9febb3c7a7bff934b952d1ccb83604d93e23ccfc3ea88724",
+ "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lldp_interfaces/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_vxlan_vtep/tasks/nxapi.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "1e03994e82f05167e558b19e20c435e80ed92b6ff4f62f71fb93ed9082d30f94",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lldp_interfaces/meta/main.yml",
+ "name": "tests/integration/targets/nxos_vxlan_vtep/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lldp_interfaces/defaults",
+ "name": "tests/integration/targets/nxos_vxlan_vtep/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lldp_interfaces/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_vxlan_vtep/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lldp_interfaces/vars",
+ "name": "tests/integration/targets/nxos_vxlan_vtep/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lldp_interfaces/vars/main.yml",
+ "name": "tests/integration/targets/nxos_vxlan_vtep/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "823ad4608bc4788ff160348fe4e6be75f267c9f2a55bf2043875eb2d76084f39",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lldp_interfaces/tests",
+ "name": "tests/integration/targets/nxos_vxlan_vtep/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lldp_interfaces/tests/common",
+ "name": "tests/integration/targets/nxos_vxlan_vtep/tests/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lldp_interfaces/tests/common/overridden.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "b4403fc48624d3990b76b682c8b17543e81f43f152f1afe573bdde1722248ea1",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_lldp_interfaces/tests/common/rtt.yml",
+ "name": "tests/integration/targets/nxos_vxlan_vtep/tests/common/multisite.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "23d20e20e06201967d2847f91f879c7741ac48c9e08bc1b9b3f63ef8c111ad29",
+ "chksum_sha256": "6e9df57ea06843b691dc030437dd9190193ddb70664dd0254133417e3a8b5cdc",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lldp_interfaces/tests/common/rendered.yml",
+ "name": "tests/integration/targets/nxos_vxlan_vtep/tests/common/sanity.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "05a26c146d796dee090a8d9931d7f38b5cf3ea87682c7f50f73597a6cff1a4cb",
+ "chksum_sha256": "6f5876cdb7e7636f06c87e462e6002cd2694520079c83f7103450520ff14b3b9",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lldp_interfaces/tests/common/remove_config.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "303bc0ba56d6f13d152987aaf4b6953d88e4461e8958c7db467d52d247a63dce",
+ "name": "tests/integration/targets/prepare_nxos_tests",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lldp_interfaces/tests/common/gathered.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "d559bcc46e44d101f2fa43a97d785b4476d6053486294e513a7cafbaf67b6bae",
+ "name": "tests/integration/targets/prepare_nxos_tests/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lldp_interfaces/tests/common/merged.yml",
+ "name": "tests/integration/targets/prepare_nxos_tests/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2399f3e2619a3b82bcf30be4b9a2be7c660d0f53450d25a46b87fd8ee4abb25b",
+ "chksum_sha256": "8d49ed36175f83090c361ca918f66c751d9a55d3e57ed4097366dda3f04434ff",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lldp_interfaces/tests/common/replaced.yml",
+ "name": "tests/integration/targets/prepare_nxos_tests/tasks/prepare.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2c101fa94fc443f0335f4fb75c440dc5c0fe47363d3463d0eccc2c340b39e6cf",
+ "chksum_sha256": "6dc1745aa3d09766233e4522d935898ecfdf07caf0b7d2a3ab7e20e40bbdc4a5",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lldp_interfaces/tests/common/deleted.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "8695d048ec2623b31a194a213ef80da6add19468e0f90a4ca97d5b8257979529",
+ "name": "tests/integration/targets/prepare_nxos_tests/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lldp_interfaces/tests/common/parsed.yml",
+ "name": "tests/integration/targets/prepare_nxos_tests/meta/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e63afbeefadaaf0dcc22637aaeebfc27980cb517c68aabda8338c05976255320",
+ "chksum_sha256": "851860fe2ed1258710d7fcf89ac5b7d8c3c87755e89ca56fdd79bea022eb2b92",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_evpn_global",
+ "name": "tests/integration/targets/nxos_snmp_traps",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_evpn_global/tasks",
+ "name": "tests/integration/targets/nxos_snmp_traps/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_evpn_global/tasks/nxapi.yaml",
+ "name": "tests/integration/targets/nxos_snmp_traps/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
+ "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_evpn_global/tasks/main.yaml",
+ "name": "tests/integration/targets/nxos_snmp_traps/tasks/nxapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
+ "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_evpn_global/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_snmp_traps/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
+ "chksum_sha256": "bb83a5490c188c20d9a8632bdb13043146a87d5dfc555cd2e389ee30885bf7ca",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_evpn_global/meta",
+ "name": "tests/integration/targets/nxos_snmp_traps/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_evpn_global/meta/main.yml",
+ "name": "tests/integration/targets/nxos_snmp_traps/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_evpn_global/defaults",
+ "name": "tests/integration/targets/nxos_snmp_traps/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_evpn_global/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_snmp_traps/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_evpn_global/tests",
+ "name": "tests/integration/targets/nxos_snmp_traps/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_evpn_global/tests/common",
+ "name": "tests/integration/targets/nxos_snmp_traps/tests/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_evpn_global/tests/common/sanity.yaml",
+ "name": "tests/integration/targets/nxos_snmp_traps/tests/common/sanity.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9190f2d0dae47c3b29ebe68f89ec9e3b88f0af56af489b15624cf99c86415d9f",
+ "chksum_sha256": "05f0dab9e4ec9f1e7c2800b2027b212b293b7f81a6a5a5645ded7d94b353e09f",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_templates",
+ "name": "tests/integration/targets/nxos_ospfv3",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_templates/tasks",
+ "name": "tests/integration/targets/nxos_ospfv3/vars",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_templates/tasks/nxapi.yaml",
+ "name": "tests/integration/targets/nxos_ospfv3/vars/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "adcabdbc005b34ef70ef70c9a7a3b0bfc9b94ff93d2f3ea18bf1f19fbc2d2201",
+ "chksum_sha256": "a58f6902557b2603f0814004741aba525b7a0950e5a11aa66b37c4955c16f39e",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_templates/tasks/main.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "918e266fd84500fc8a9d7958437789683bd47b7875371afaf4941b60f4594bc7",
+ "name": "tests/integration/targets/nxos_ospfv3/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_templates/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_ospfv3/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "82e7cf4c490409760e85c9516e174b27c8d67fb419e829353d94c4d022016245",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_templates/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_ospfv3/tasks/nxapi.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "adcabdbc005b34ef70ef70c9a7a3b0bfc9b94ff93d2f3ea18bf1f19fbc2d2201",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_templates/meta/main.yml",
+ "name": "tests/integration/targets/nxos_ospfv3/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
+ "chksum_sha256": "dd759468c5cc871fb7737ff3d8d736515fc4a391944ba48e311bd8e9cf4015bf",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_templates/defaults",
+ "name": "tests/integration/targets/nxos_ospfv3/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_templates/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_ospfv3/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "7d754db246e92eda93bade9b741e09afdc7c5ba2754a4ea5d874f92a479b65d0",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_templates/vars",
+ "name": "tests/integration/targets/nxos_ospfv3/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_templates/vars/main.yml",
+ "name": "tests/integration/targets/nxos_ospfv3/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "320a4fb71862d3232cd18205318c3a960d55b06bb1fd7fa7afd49ff122c04605",
+ "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_templates/tests",
+ "name": "tests/integration/targets/nxos_ospfv3/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_templates/tests/common",
+ "name": "tests/integration/targets/nxos_ospfv3/tests/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_templates/tests/common/fixtures",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_ospfv3/tests/common/_populate_config.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "cef0853f045225f3cfd3ac7b2362e20d12372005f8a175b973e60fa22c30e3b0",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_templates/tests/common/fixtures/parsed.cfg",
+ "name": "tests/integration/targets/nxos_ospfv3/tests/common/empty_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e921d4fa370873c1b237ba6b3ee2d3c0a70520b08f5097e960152137a57ed123",
+ "chksum_sha256": "fbe1d547c87e7fce94633ac9d2659cf83c9f705225abd47e025752589adf5206",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_templates/tests/common/parsed.yaml",
+ "name": "tests/integration/targets/nxos_ospfv3/tests/common/parsed.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a1ffab53eece71058929fba7b157214cd3aeea388b6bf8aae94e146b03e5eb25",
+ "chksum_sha256": "1668f415528b25d80c75d0b7d1429ad1f2998999e48a9c1103c194437b8d53c6",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_templates/tests/common/merged.yaml",
+ "name": "tests/integration/targets/nxos_ospfv3/tests/common/gathered.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "09f60138ddf3d5c8a6782f9f5f937091c37a58ea72c0304384034becdf946850",
+ "chksum_sha256": "52b75fde8bd0aeae9fa656b94f228d66f550904e311166923cfbd6ad3504ec3b",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_templates/tests/common/deleted.yaml",
+ "name": "tests/integration/targets/nxos_ospfv3/tests/common/merged.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "425c2ab3097932fcb46a43b4d5b8bcaeed500348e760e77a5b6f1d4e4fca9eea",
+ "chksum_sha256": "3351bbfbf0d6d47b6d974a6520e49d2c2e37fe82c909425bc2b35fbd2a1c3619",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_templates/tests/common/replaced.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "d307cdaf9d9ecb2c711093d4f9879deb595a843ed2d17b6ae1036d918b22cf2f",
+ "name": "tests/integration/targets/nxos_ospfv3/tests/common/fixtures",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_templates/tests/common/overridden.yaml",
+ "name": "tests/integration/targets/nxos_ospfv3/tests/common/fixtures/parsed.cfg",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1197072828ecfc518dd9ac00389c239db31f18e8d566fbe5a37fd80ff479f104",
+ "chksum_sha256": "98780d78ffa7990bb331aa22eb22a241c513c360e240824d45a7bf1de5be2ecc",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_templates/tests/common/_remove_config.yaml",
+ "name": "tests/integration/targets/nxos_ospfv3/tests/common/replaced.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c07047642c717ad5e564697143a11e7cc93a18376d91ee56dc5f44f3d19e22b5",
+ "chksum_sha256": "c47c9c8159c406510ed5c37a849752430af675f5f74b2dee2d4361c5a4647ada",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_templates/tests/common/gathered.yaml",
+ "name": "tests/integration/targets/nxos_ospfv3/tests/common/_remove_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2326d2afd434fadf9b8d2ef427e3918e6a5947eb39d58c4fc1988df599c58510",
+ "chksum_sha256": "6e1cddebab71b07ac4b62257a57ee91bb813c2d2a627f21fd1ddc848c5c71242",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_templates/tests/common/empty_config.yaml",
+ "name": "tests/integration/targets/nxos_ospfv3/tests/common/deleted.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "37be696dfcec49e17974100e3ea8181b391da9cdad82f8667aa2f5df0ff8aaca",
+ "chksum_sha256": "dd6501583c6d35a54f84e67f7a3c089c3d327af513f2843227d760ec81b09f26",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_templates/tests/common/rendered.yaml",
+ "name": "tests/integration/targets/nxos_ospfv3/tests/common/rendered.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "56e1c2649908ef81810457cc96bc917c530d7096e907b516389ef7cdbe3c9ba3",
+ "chksum_sha256": "fdb04181004019fce7d2aa47598c99683fa0c17efda91a40679e647ae1080926",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_templates/tests/common/_populate_config.yaml",
+ "name": "tests/integration/targets/nxos_ospfv3/tests/common/overridden.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0f7753a98712bcde737fce34f1a0af8d5a655bba229b306685e5ae6f8c26257d",
+ "chksum_sha256": "d97a097d36258f90bedf8acd4609bba2c4840b38c56f3cd6517e9fc394600f61",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_pim",
+ "name": "tests/integration/targets/nxos_vrf",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_pim/tasks",
+ "name": "tests/integration/targets/nxos_vrf/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_pim/tasks/nxapi.yaml",
+ "name": "tests/integration/targets/nxos_vrf/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
+ "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_pim/tasks/main.yaml",
+ "name": "tests/integration/targets/nxos_vrf/tasks/nxapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bb83a5490c188c20d9a8632bdb13043146a87d5dfc555cd2e389ee30885bf7ca",
+ "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_pim/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_vrf/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
+ "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_pim/meta",
+ "name": "tests/integration/targets/nxos_vrf/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_pim/meta/main.yml",
+ "name": "tests/integration/targets/nxos_vrf/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_pim/defaults",
+ "name": "tests/integration/targets/nxos_vrf/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_pim/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_vrf/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_pim/tests",
+ "name": "tests/integration/targets/nxos_vrf/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_pim/tests/common",
+ "name": "tests/integration/targets/nxos_vrf/tests/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_pim/tests/common/sanity.yaml",
+ "name": "tests/integration/targets/nxos_vrf/tests/common/sanity.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ab1efb1a3ca7cc688042b32d2d0660eeb0e25ff2bb2c833e9bcdeab5d6a1e0a0",
+ "chksum_sha256": "b8fb2d6249a8d9311ec67a31c87ad94a8184c7351dbf190d833bb1d7bafd4e7a",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_smoke",
+ "name": "tests/integration/targets/nxos_vrf/tests/common/intent.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "b49015c20a19ae1f270a3b5e69d370744549a23915fb03c23f6e84ad40f8ef74",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_file_copy",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_smoke/tasks",
+ "name": "tests/integration/targets/nxos_file_copy/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_smoke/tasks/nxapi.yaml",
+ "name": "tests/integration/targets/nxos_file_copy/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7277ec56762ca20a3617b1da87631d8dde35556a4c004e7c82974e49fab50d92",
+ "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_smoke/tasks/main.yaml",
+ "name": "tests/integration/targets/nxos_file_copy/tasks/nxapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "849f2a2746300bf950bfe382b3f2b80990bbee773cbdbd2d1e2a897a9ed1e261",
+ "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_smoke/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_file_copy/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "62a31764ef528947cdefe8eb150e20bdaba422d40bac12a3609948ca1575cf8f",
+ "chksum_sha256": "f6bf1e5e97ebdee9d3da5227017b650cf6de337ae6ce68cee739acf39a32ec81",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_smoke/meta",
+ "name": "tests/integration/targets/nxos_file_copy/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_smoke/meta/main.yml",
+ "name": "tests/integration/targets/nxos_file_copy/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_smoke/defaults",
+ "name": "tests/integration/targets/nxos_file_copy/fixtures",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_smoke/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_file_copy/fixtures/data.cfg",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "665288590cd4226da42511817f09ccdfc92df36c9621583cd2c82919af295c5a",
+ "chksum_sha256": "ef43c9adc8aeec6910b4373ba0d9fbf28048ed53ec631ab9fd6fa8ad00a8a3a0",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_smoke/tests",
+ "name": "tests/integration/targets/nxos_file_copy/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_smoke/tests/cli",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_file_copy/meta/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_smoke/tests/cli/misc_tests.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "dedfb27511bd8fde863a0aa430bdd23272cd766d0c23694764d01852c9586073",
+ "name": "tests/integration/targets/nxos_file_copy/tests",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_smoke/tests/common",
+ "name": "tests/integration/targets/nxos_file_copy/tests/nxapi",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_smoke/tests/common/common_utils.yaml",
+ "name": "tests/integration/targets/nxos_file_copy/tests/nxapi/badtransport.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a60e645271b0b349228daf7eac3948d5651a32abd8cb578005202d4c1ed80236",
+ "chksum_sha256": "21427f41984508b9839c2f35a72d67fd2cf22fcd68d619118a5b5515d2268668",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_smoke/tests/common/common_config.yaml",
+ "name": "tests/integration/targets/nxos_file_copy/tests/cli",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_file_copy/tests/cli/sanity.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "dea32a9f41f8c2367181bfb2fc2821e4304e393ac781dd02e19d6cb01c6273ae",
+ "chksum_sha256": "25a997ce10f97d08a348b04eb8e123dd4b6b7a6aaa035e5f2c45ef99f985f94c",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_smoke/tests/common/caching.yaml",
+ "name": "tests/integration/targets/nxos_file_copy/tests/cli/negative.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2db51e2d9c925d2773d4bcf45ee241c75cc02547793d07cad590f1d9657eb36c",
+ "chksum_sha256": "0a918d9c6433c61aea340e1c0c94ef62d409893bfe7fd1858386da4422c5e208",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_smoke/tests/common/misc_tests.yaml",
+ "name": "tests/integration/targets/nxos_file_copy/tests/cli/input_validation.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ca259adfd9eabd17072e03cae98011aa82cf2e1b6d8fc88fb0ada2ebaca18d7a",
+ "chksum_sha256": "86642c6a7b15cbac85986a618de3eea6d741cb9819a4b7896570ab07e6033a8c",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_user",
+ "name": "tests/integration/targets/nxos_rollback",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_user/tasks",
+ "name": "tests/integration/targets/nxos_rollback/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_user/tasks/nxapi.yaml",
+ "name": "tests/integration/targets/nxos_rollback/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
+ "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_user/tasks/main.yaml",
+ "name": "tests/integration/targets/nxos_rollback/tasks/nxapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
+ "chksum_sha256": "1e03994e82f05167e558b19e20c435e80ed92b6ff4f62f71fb93ed9082d30f94",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_user/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_rollback/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
+ "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_user/meta",
+ "name": "tests/integration/targets/nxos_rollback/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_user/meta/main.yaml",
+ "name": "tests/integration/targets/nxos_rollback/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_user/defaults",
+ "name": "tests/integration/targets/nxos_rollback/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_user/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_rollback/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_user/tests",
+ "name": "tests/integration/targets/nxos_rollback/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_user/tests/common",
+ "name": "tests/integration/targets/nxos_rollback/tests/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_user/tests/common/sanity.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "bd39b32f02401670bfda98c14839d0da9c76ae8446c151dc7f10131c9531ded6",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_user/tests/common/basic.yaml",
+ "name": "tests/integration/targets/nxos_rollback/tests/common/sanity.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ba9e801ff2ad1ae02085f83753b4f2e51565888a0fa5822ea0f671ee8a54229f",
+ "chksum_sha256": "394570f3e6f1edf503b6782dd89ffa589499a778fc5336c8c610b01d473a7bb8",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_user/tests/common/auth.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "db7a57ed2e804cae9aea360123df898848cc2e772fcab84d843aac01ca0b6b61",
+ "name": "tests/integration/targets/nxos_logging_global",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_facts",
+ "name": "tests/integration/targets/nxos_logging_global/vars",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_facts/tasks",
+ "name": "tests/integration/targets/nxos_logging_global/vars/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "15f80d89c3629bea0c669753c52136268f11e94a83ba6e69411fc96ab8449457",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_logging_global/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_facts/tasks/nxapi.yaml",
+ "name": "tests/integration/targets/nxos_logging_global/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
+ "chksum_sha256": "82e7cf4c490409760e85c9516e174b27c8d67fb419e829353d94c4d022016245",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_facts/tasks/main.yaml",
+ "name": "tests/integration/targets/nxos_logging_global/tasks/nxapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
+ "chksum_sha256": "adcabdbc005b34ef70ef70c9a7a3b0bfc9b94ff93d2f3ea18bf1f19fbc2d2201",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_facts/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_logging_global/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
+ "chksum_sha256": "2396a1bb62aa2d673208deb59766a787971d061630b8467afa525b7fdcb2e95e",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_facts/meta",
+ "name": "tests/integration/targets/nxos_logging_global/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_facts/meta/main.yml",
+ "name": "tests/integration/targets/nxos_logging_global/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "chksum_sha256": "7d754db246e92eda93bade9b741e09afdc7c5ba2754a4ea5d874f92a479b65d0",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_facts/defaults",
+ "name": "tests/integration/targets/nxos_logging_global/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_facts/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_logging_global/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_facts/vars",
+ "name": "tests/integration/targets/nxos_logging_global/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_facts/vars/main.yml",
+ "name": "tests/integration/targets/nxos_logging_global/tests/common",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_logging_global/tests/common/_populate_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "90b4146d19a8448ba711a28645cf964f8251d544bf1de77f552e08a1e60a9914",
+ "chksum_sha256": "dd0584d7e6d6a3aa5942acda6002b2e13a2bd64d59ee04dc5122274268676cc8",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_facts/tests",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_logging_global/tests/common/empty_config.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "b1658713d71b118aec3c1e27ae2d2453c239d56b32e3c6d816f5187796b40c7c",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_facts/tests/common",
+ "name": "tests/integration/targets/nxos_logging_global/tests/common/parsed.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d952ab3ac507f65ff98eb58e380db0abe1583198f653d77beaa4e384929ad698",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_logging_global/tests/common/gathered.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "7c13b226487605fb6b6c73406c97a1972ed4c1995825124cb426d64eaac1a2ec",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_logging_global/tests/common/merged.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "82ec13620a0c096e01a22d68b13c59798667ca68d3ddbbc1be802ff3f415a562",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_logging_global/tests/common/fixtures",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_facts/tests/common/invalid_subset.yaml",
+ "name": "tests/integration/targets/nxos_logging_global/tests/common/fixtures/parsed.cfg",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d7ef14aa0d9ae451bba34e5fb2952f2034793f826a441cd042b03c34e0d78422",
+ "chksum_sha256": "3a97c562338de257e93908f409c770a5439d362fcddcdb8d939dc7ff2e7002c6",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_facts/tests/common/default_facts.yaml",
+ "name": "tests/integration/targets/nxos_logging_global/tests/common/replaced.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "08c3680a5a839d8e3b53515643f3773bb1d25de670124063d41ff3e0a7fd2713",
+ "chksum_sha256": "dadff9821e586c3a062c7ac4b2936903359a8fc4b2f4cceeb466f558c82b52d1",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_facts/tests/common/not_hardware.yaml",
+ "name": "tests/integration/targets/nxos_logging_global/tests/common/_remove_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "933c388db57e0042a75290223a48d69efad1e524bce14f28f64694d34e3475e3",
+ "chksum_sha256": "619954ad3ae24e83760fb7b9f09daaf010f96020056398fe49a04001326f72c8",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_facts/tests/common/all_facts.yaml",
+ "name": "tests/integration/targets/nxos_logging_global/tests/common/deleted.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f737c75bd85a50361bfb755799335e3bf0044448e43308c6f4c3f9186a80b65f",
+ "chksum_sha256": "926b1b20a7babe989cc2f1b3e34a16d08368817d3ef38d6903650a294a42cc3a",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_facts/tests/common/sanity.yaml",
+ "name": "tests/integration/targets/nxos_logging_global/tests/common/rendered.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f15ccd5a6a25f5358288fb8ddb955a1b024c33c0d1f04df3811e03aabe460c32",
+ "chksum_sha256": "b4a5f8a10bd86d7609cb6688523bc611e052260c4a06bf9cf202f33eee40a1f5",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_facts/tests/common/interface_facts.yaml",
+ "name": "tests/integration/targets/nxos_logging_global/tests/common/overridden.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5c7fa4749d973f3993d84e4ae324dfefa8a62167452abcbdd66de1cb668fdba0",
+ "chksum_sha256": "04822e2f3237fcae131842012a520b5fde51c1fd4b381ab86aa055d7c076f867",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_udld",
+ "name": "tests/integration/targets/nxos_bfd_global",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_udld/tasks",
+ "name": "tests/integration/targets/nxos_bfd_global/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_udld/tasks/nxapi.yaml",
+ "name": "tests/integration/targets/nxos_bfd_global/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
+ "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_udld/tasks/main.yaml",
+ "name": "tests/integration/targets/nxos_bfd_global/tasks/nxapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bb83a5490c188c20d9a8632bdb13043146a87d5dfc555cd2e389ee30885bf7ca",
+ "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_udld/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_bfd_global/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
+ "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_udld/meta",
+ "name": "tests/integration/targets/nxos_bfd_global/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_udld/meta/main.yml",
+ "name": "tests/integration/targets/nxos_bfd_global/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_udld/defaults",
+ "name": "tests/integration/targets/nxos_bfd_global/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_udld/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_bfd_global/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_udld/tests",
+ "name": "tests/integration/targets/nxos_bfd_global/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_udld/tests/common",
+ "name": "tests/integration/targets/nxos_bfd_global/tests/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_udld/tests/common/sanity.yaml",
+ "name": "tests/integration/targets/nxos_bfd_global/tests/common/sanity.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "327f500b5a3e5f79399f7a93e74613eec32ee87f52ea77b89af0bb84862a89d2",
+ "chksum_sha256": "302caab98adbe8644dc6652426b6e9300fc4d8fbc1a479ee7aeaf54d6a23da59",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_nxapi",
+ "name": "tests/integration/targets/nxos_acls",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_nxapi/tasks",
+ "name": "tests/integration/targets/nxos_acls/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_nxapi/tasks/nxapi.yaml",
+ "name": "tests/integration/targets/nxos_acls/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7022927c86a437ced8d1f9058b25af83150ab1d87223183b1c61bbf4c0d6523f",
+ "chksum_sha256": "c57c3ff656bb8daf9febb3c7a7bff934b952d1ccb83604d93e23ccfc3ea88724",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_nxapi/tasks/main.yaml",
+ "name": "tests/integration/targets/nxos_acls/tasks/nxapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
+ "chksum_sha256": "280d285e1a9b531b25d8b880d3a032933d7135004c4658b7f1eb4872c60cc666",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_nxapi/tasks/platform",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_acls/tasks/main.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "dc2e669d7f6ccc6f6bd958fc52376105ef28a808ec084c11f2dfcecf1471760a",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_nxapi/tasks/platform/n5k",
+ "name": "tests/integration/targets/nxos_acls/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_nxapi/tasks/platform/n5k/assert_changes_https.yaml",
+ "name": "tests/integration/targets/nxos_acls/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4f99598e0507cfb4e0e625a4075deabb91c5de5e1c0e5531846b9467a6faeaa9",
+ "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_nxapi/tasks/platform/n5k/assert_changes_https_http.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "79ee713da7a48ad0b493f6e07690138be3eb4d04d890126aec488e2242e7a442",
+ "name": "tests/integration/targets/nxos_acls/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_nxapi/tasks/platform/n5k/assert_changes_https_http_ports.yaml",
+ "name": "tests/integration/targets/nxos_acls/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3cef9d277e717f6956a9d5136862c72616d866eb04d8f8c0da44c0777739c92c",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_nxapi/tasks/platform/n5k/assert_changes_http.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "ed763ee2de2c730ea2783225dc132f5c28f22ea272213a36b9b3bb4471492e08",
+ "name": "tests/integration/targets/nxos_acls/tests",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_nxapi/tasks/platform/default",
+ "name": "tests/integration/targets/nxos_acls/tests/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_nxapi/tasks/platform/default/assert_changes_https.yaml",
+ "name": "tests/integration/targets/nxos_acls/tests/common/populate_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bfae052c7df36024a59df89f314dc7e03019ad1649f4132cb34bb2a8f50fe903",
+ "chksum_sha256": "ea179f0b4974165f1ad36daff838b34a663eb136543c6a908df7b8fe73027eaa",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_nxapi/tasks/platform/default/assert_changes_https_http.yaml",
+ "name": "tests/integration/targets/nxos_acls/tests/common/deleted.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b055af4a4779d412640cee585ef493e52c3937782e095218164d2719d5223d29",
+ "chksum_sha256": "14f12dadd4d09d23feee07007209b1eae18bc9c013a50f79ed39ae022e652226",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_nxapi/tasks/platform/default/assert_changes_https_http_ports.yaml",
+ "name": "tests/integration/targets/nxos_acls/tests/common/gathered.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "04e8b8ae8842f40c777282e979ea824c7c886f3e8ae1d9e2710ff466d1fc2ae0",
+ "chksum_sha256": "8c6126ef877c72fdb6e19f6499e4c3ac4219a5fb4cde44f4529306aac3fd7966",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_nxapi/tasks/platform/default/assert_changes_http.yaml",
+ "name": "tests/integration/targets/nxos_acls/tests/common/rtt.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "db4865169c3dc97c5ca5e24083550b243b685cfaeaaba16333b1f55754dd265a",
+ "chksum_sha256": "4912e31a951706d32e6682e65095ad1794df0cdedeeeec3d5e9ba139e14a0409",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_nxapi/tasks/platform/n7k",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_acls/tests/common/replaced.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "2b9ce3eb07fb30eb390fc555830b61b4118c485523bf85538a805feb06ea0fb0",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_nxapi/tasks/platform/n7k/assert_changes_https.yaml",
+ "name": "tests/integration/targets/nxos_acls/tests/common/parsed.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d826b7762b3ab39ad7d294ca171272d1ff3213c35f8040c3657228f259d331d5",
+ "chksum_sha256": "9c6e2f1a0b9c9d9dc2c33b7906490f5acde968592b9257f095b4cfc38eee3dd9",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_nxapi/tasks/platform/n7k/assert_changes_https_http.yaml",
+ "name": "tests/integration/targets/nxos_acls/tests/common/merged.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "97089a108cc1eac9afae028c6da9bf6f7017afcfc3f1a85776cfe6af31f2c6af",
+ "chksum_sha256": "fc2db360c49a4f960de158645c43115dec175da832363eeaba86a8ea97d1e62b",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_nxapi/tasks/platform/n7k/assert_changes_https_http_ports.yaml",
+ "name": "tests/integration/targets/nxos_acls/tests/common/rendered.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5d5a7bc39789b15cf95dd49af7ab1636f3a4589a2be3ee5ca341dbb4414eb95b",
+ "chksum_sha256": "aa791c7d2f4a1989dd888e3880d230146cee3a448116cb224c31066a7f7573fe",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_nxapi/tasks/platform/n7k/assert_changes_http.yaml",
+ "name": "tests/integration/targets/nxos_acls/tests/common/overridden.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "35222b27a54a701db0042dd5fb6069d48d9e27a70821a1707a4aad0d19bc227f",
+ "chksum_sha256": "9e261e0e1a8b1778739d5bb1c17c75a378186c5c2efd72a351d604e0d7560318",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_nxapi/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_acls/tests/common/remove_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
+ "chksum_sha256": "6b36b668815324ab75f641e80f9341c4f7b4f264585d07409b01704c5f8a018d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_nxapi/meta",
+ "name": "tests/integration/targets/nxos_system",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_nxapi/meta/main.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_nxapi/defaults",
+ "name": "tests/integration/targets/nxos_system/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_nxapi/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_system/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_nxapi/tests",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_nxapi/tests/nxapi",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_system/tasks/nxapi.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_nxapi/tests/nxapi/badtransport.yaml",
+ "name": "tests/integration/targets/nxos_system/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "28a2dfdf7ba13f8dd757c8d9095797a0b91437b5bfd9d9c1c8bfe65508e15901",
+ "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_nxapi/tests/cli",
+ "name": "tests/integration/targets/nxos_system/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_nxapi/tests/cli/nxapi_ssl.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "dae84b45affca730b7c9158216bf18dc9ab9c92bbce4701b6ba881f192ae47f4",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_nxapi/tests/cli/disable.yaml",
+ "name": "tests/integration/targets/nxos_system/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "94889efb465f8b55120a28becc23705cd11dc1bc9aebc91204105ba9f8207302",
+ "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_nxapi/tests/cli/configure.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "b4853e02af108065809baebdb1f8e957bc48184f9a24e3510826d28f734d17a6",
+ "name": "tests/integration/targets/nxos_system/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_nxapi/tests/cli/enable.yaml",
+ "name": "tests/integration/targets/nxos_system/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "abdd34478d7db89533c9d3d8cc0fb2f26a5324ab85212086aa1fecc220e7d1c8",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_neighbor_address_family",
+ "name": "tests/integration/targets/nxos_system/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_neighbor_address_family/tasks",
+ "name": "tests/integration/targets/nxos_system/tests/nxapi",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_neighbor_address_family/tasks/nxapi.yaml",
+ "name": "tests/integration/targets/nxos_system/tests/nxapi/set_domain_name.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "adcabdbc005b34ef70ef70c9a7a3b0bfc9b94ff93d2f3ea18bf1f19fbc2d2201",
+ "chksum_sha256": "c3c9fe4f41326a05c53fb03a14ab15d9aca59b0cd5276ca5e28b7c7b7e24e1a5",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_neighbor_address_family/tasks/main.yaml",
+ "name": "tests/integration/targets/nxos_system/tests/nxapi/set_name_servers.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0defcb7ce6f353926624c2c3e045b403af33c34a6bc1c49b08e71ec602cd21f2",
+ "chksum_sha256": "bbee414220275ff9a13a61bb40c3f4aa127cf63ebb07221abf01f8995b22b252",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_neighbor_address_family/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_system/tests/nxapi/set_domain_list.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "82e7cf4c490409760e85c9516e174b27c8d67fb419e829353d94c4d022016245",
+ "chksum_sha256": "4f58661081e4b16a1b96f55d176abaa163d8eeb2d530a96420da6d15de4bb0ac",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_neighbor_address_family/meta",
+ "name": "tests/integration/targets/nxos_system/tests/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_neighbor_address_family/meta/main.yml",
+ "name": "tests/integration/targets/nxos_system/tests/common/sanity.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_bgp_neighbor_address_family/defaults",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "e76a24280e763ab2423dbd4062e39dd0c4b54131b361582bc4de92183c9fe32f",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_neighbor_address_family/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_system/tests/common/set_hostname.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7d754db246e92eda93bade9b741e09afdc7c5ba2754a4ea5d874f92a479b65d0",
+ "chksum_sha256": "1f80d47b8482b052d64571ffd74d3e08219c8180e0bdfd38785cbdcb80b4dc01",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_neighbor_address_family/vars",
+ "name": "tests/integration/targets/nxos_system/tests/cli",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_neighbor_address_family/vars/main.yml",
+ "name": "tests/integration/targets/nxos_system/tests/cli/set_domain_name.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c9c20222a6eea75718e72f0e12a439d0f2a36eee5fc85db00a855c0ca4a72f99",
+ "chksum_sha256": "fa3289fe04fe5f8a8259ca69cee34e575e690589e3c4b5a1e8ade6a724c81246",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_neighbor_address_family/tests",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_system/tests/cli/set_name_servers.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "634ccd651c5a3aff45209b99a311b488422098e96df88ca31e1f1f6e5fcc0e2f",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_neighbor_address_family/tests/common",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_system/tests/cli/set_domain_list.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "494d4bd1f0afc278a5422adec3684980a21807274f3887cc64743335104fc95f",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_neighbor_address_family/tests/common/fixtures",
+ "name": "tests/integration/targets/nxos_snmp_location",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_neighbor_address_family/tests/common/fixtures/parsed.cfg",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "b2a00e0d2590a7c6caff9c7d3d3df8eddf7a04bcb142a5900735574cf7d7ba14",
+ "name": "tests/integration/targets/nxos_snmp_location/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_neighbor_address_family/tests/common/parsed.yaml",
+ "name": "tests/integration/targets/nxos_snmp_location/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "40d8f8cfc0bfd8a2d5da53d9e61f2d398e4ed50e2cc17915359ba02b980d16d0",
+ "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_neighbor_address_family/tests/common/merged.yaml",
+ "name": "tests/integration/targets/nxos_snmp_location/tasks/nxapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ebe2088087a911826370430ed7e8029bee349d39be051ef9ce89f96ebb2d4118",
+ "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_neighbor_address_family/tests/common/deleted.yaml",
+ "name": "tests/integration/targets/nxos_snmp_location/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "dc0718832154ed173f9ada21e4072a3f3c7fc9202d9a83e0b18c26d72991702e",
+ "chksum_sha256": "bb83a5490c188c20d9a8632bdb13043146a87d5dfc555cd2e389ee30885bf7ca",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_neighbor_address_family/tests/common/replaced.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "bc402cfd6f07870283877fcd9c3158fdef7f756c7ea88c1aeebdd73629628a00",
+ "name": "tests/integration/targets/nxos_snmp_location/defaults",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_neighbor_address_family/tests/common/overridden.yaml",
+ "name": "tests/integration/targets/nxos_snmp_location/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9c296bdd111a832466197719e7d3a6da9215aaaa0ab6aeca20b16e5284c61b52",
+ "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_neighbor_address_family/tests/common/_remove_config.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "c07047642c717ad5e564697143a11e7cc93a18376d91ee56dc5f44f3d19e22b5",
+ "name": "tests/integration/targets/nxos_snmp_location/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_neighbor_address_family/tests/common/gathered.yaml",
+ "name": "tests/integration/targets/nxos_snmp_location/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "367b5c49d11893b14160cf94e6675588ac07bf0ee93574f6a1e9a9d4c6df70d9",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_neighbor_address_family/tests/common/empty_config.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "ed006b0b9dbb7b7c16264d7c9304040f7ddf7104c0aef4d084dd0380d81c0f0d",
+ "name": "tests/integration/targets/nxos_snmp_location/tests",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_neighbor_address_family/tests/common/rendered.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "a8055703f881f09873f851f1e0c212ac66aa662b744b5cc81a6bd07a7e854643",
+ "name": "tests/integration/targets/nxos_snmp_location/tests/common",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_bgp_neighbor_address_family/tests/common/_populate_config.yaml",
+ "name": "tests/integration/targets/nxos_snmp_location/tests/common/sanity.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "047f1c0feea67e71c51f1eed5b1434e093a7b9f98494aa230acea01f7b68a91b",
+ "chksum_sha256": "3e00a1c43687d39e22e8835b039f398d7bdebbcc45399d8cd01d6ca7d4ef6e96",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_host",
+ "name": "tests/integration/targets/nxos_vtp_password",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_host/tasks",
+ "name": "tests/integration/targets/nxos_vtp_password/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_host/tasks/nxapi.yaml",
+ "name": "tests/integration/targets/nxos_vtp_password/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
+ "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_host/tasks/main.yaml",
+ "name": "tests/integration/targets/nxos_vtp_password/tasks/nxapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bb83a5490c188c20d9a8632bdb13043146a87d5dfc555cd2e389ee30885bf7ca",
+ "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_host/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_vtp_password/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
+ "chksum_sha256": "bb83a5490c188c20d9a8632bdb13043146a87d5dfc555cd2e389ee30885bf7ca",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_host/meta",
+ "name": "tests/integration/targets/nxos_vtp_password/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_host/meta/main.yml",
+ "name": "tests/integration/targets/nxos_vtp_password/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_host/defaults",
+ "name": "tests/integration/targets/nxos_vtp_password/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_host/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_vtp_password/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_host/tests",
+ "name": "tests/integration/targets/nxos_vtp_password/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_host/tests/common",
+ "name": "tests/integration/targets/nxos_vtp_password/tests/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_host/tests/common/sanity_snmp_v3_trap.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "1e10b5c9c01f39edff0f76171c81720d458aca5775d0f5f97f20bd2e6c21f57f",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_snmp_host/tests/common/sanity_snmp_v2_inform.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "54b2c813dda14d97a1eef23c8a3b3ff2bdc194b60ec4cf15d7f6e0068802afa5",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_snmp_host/tests/common/sanity_snmp_v3_inform.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "0156bedc287881d6d9d767fd9720c5821708b9ad3c9c217c5c4b1df0a72a6891",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_snmp_host/tests/common/sanity_snmp_v1_trap.yaml",
+ "name": "tests/integration/targets/nxos_vtp_password/tests/common/sanity.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "aa6a8b70ec63534db6d01bad4ccb22de07cf1d7c0c65b148a02474964d1a4e72",
+ "chksum_sha256": "3146095b01817bec07dd315c90e066bd664e16b5438d62d337a7ebfc0c48884e",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_aaa_server_host",
+ "name": "tests/integration/targets/nxos_pim",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_aaa_server_host/tasks",
+ "name": "tests/integration/targets/nxos_pim/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_aaa_server_host/tasks/nxapi.yaml",
+ "name": "tests/integration/targets/nxos_pim/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
+ "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_aaa_server_host/tasks/main.yaml",
+ "name": "tests/integration/targets/nxos_pim/tasks/nxapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
+ "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_aaa_server_host/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_pim/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
+ "chksum_sha256": "bb83a5490c188c20d9a8632bdb13043146a87d5dfc555cd2e389ee30885bf7ca",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_aaa_server_host/meta",
+ "name": "tests/integration/targets/nxos_pim/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_aaa_server_host/meta/main.yml",
+ "name": "tests/integration/targets/nxos_pim/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_aaa_server_host/defaults",
+ "name": "tests/integration/targets/nxos_pim/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_aaa_server_host/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_pim/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_aaa_server_host/tests",
+ "name": "tests/integration/targets/nxos_pim/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_aaa_server_host/tests/common",
+ "name": "tests/integration/targets/nxos_pim/tests/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_aaa_server_host/tests/common/tacacs.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "b669c11c0bee9b899cb3075b446411bac7b4fd7c79396e5add683a2d5ab8b725",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_aaa_server_host/tests/common/radius.yaml",
+ "name": "tests/integration/targets/nxos_pim/tests/common/sanity.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "10dec61711d8e2e9660491a3ac36bda92ef57264b44ce2fafd5c0512b2ecfd13",
+ "chksum_sha256": "ab1efb1a3ca7cc688042b32d2d0660eeb0e25ff2bb2c833e9bcdeab5d6a1e0a0",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hsrp_interfaces",
+ "name": "tests/integration/targets/nxos_lag_interfaces",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hsrp_interfaces/tasks",
+ "name": "tests/integration/targets/nxos_lag_interfaces/vars",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hsrp_interfaces/tasks/nxapi.yaml",
+ "name": "tests/integration/targets/nxos_lag_interfaces/vars/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "adcabdbc005b34ef70ef70c9a7a3b0bfc9b94ff93d2f3ea18bf1f19fbc2d2201",
+ "chksum_sha256": "05455a29f1db6e4d3602a83b356d9d7353037b1ef0627d2d2943b43520af76e6",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hsrp_interfaces/tasks/main.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
+ "name": "tests/integration/targets/nxos_lag_interfaces/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hsrp_interfaces/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_lag_interfaces/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "82e7cf4c490409760e85c9516e174b27c8d67fb419e829353d94c4d022016245",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hsrp_interfaces/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_lag_interfaces/tasks/nxapi.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hsrp_interfaces/meta/main.yml",
+ "name": "tests/integration/targets/nxos_lag_interfaces/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "chksum_sha256": "b9647339f89f9eeca01a09cd4f31da21e0c0c2f10793ea3ea06c24fd8664046e",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hsrp_interfaces/defaults",
+ "name": "tests/integration/targets/nxos_lag_interfaces/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hsrp_interfaces/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_lag_interfaces/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "7d754db246e92eda93bade9b741e09afdc7c5ba2754a4ea5d874f92a479b65d0",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hsrp_interfaces/vars",
+ "name": "tests/integration/targets/nxos_lag_interfaces/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hsrp_interfaces/vars/main.yml",
+ "name": "tests/integration/targets/nxos_lag_interfaces/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a06be45c7fcb5fbb624b33983db74636889ef05ed32ce0f3ba56485272e3db09",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hsrp_interfaces/tests",
+ "name": "tests/integration/targets/nxos_lag_interfaces/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hsrp_interfaces/tests/common",
+ "name": "tests/integration/targets/nxos_lag_interfaces/tests/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hsrp_interfaces/tests/common/parsed.yaml",
+ "name": "tests/integration/targets/nxos_lag_interfaces/tests/common/_populate_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9db583ddb82a512ca231ffb3a5ba6c672aa653ec28834c17328fd56190b4a558",
+ "chksum_sha256": "76ea0558de38d30dd8813738f9d92fba9867c7d37fcb2a104acaaf5aeebb12af",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hsrp_interfaces/tests/common/merged.yaml",
+ "name": "tests/integration/targets/nxos_lag_interfaces/tests/common/empty_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "da6b8a879ff9188026a3b1aa01b53289a7517026aff1a7d1954d1bdaead6dec0",
+ "chksum_sha256": "96cae582141397682d092a6b5d6de9651b6203157e891342063c439ffb130a4d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hsrp_interfaces/tests/common/deleted.yaml",
+ "name": "tests/integration/targets/nxos_lag_interfaces/tests/common/parsed.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6e9f0e87988c4ded07d518b67a5118d9d6894e2fab2c86c9cd90ffcf5f56beab",
+ "chksum_sha256": "c10b8ab6302c0df3b5d8c99d1f67a9d92b80b518250c11e650dde96bc8f6b886",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hsrp_interfaces/tests/common/replaced.yaml",
+ "name": "tests/integration/targets/nxos_lag_interfaces/tests/common/gathered.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d1c4df45348332e927abe784ea39438ab014af521a169cc07ca76ff6d8d7c52d",
+ "chksum_sha256": "2e4441e52874693cf2caa332a4639c23ad59350a6eda4e9c5109a9792a306447",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hsrp_interfaces/tests/common/overridden.yaml",
+ "name": "tests/integration/targets/nxos_lag_interfaces/tests/common/merged.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "45a5f0f9979ffcd5b7ef73f95de39a4c6ad35b3f7f05a61b7f6770eb17166dcb",
+ "chksum_sha256": "53c3cd45c91dd6078cf71ea7a9973d65069a61eaef47c49a51c7b66355970a63",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hsrp_interfaces/tests/common/_remove_config.yaml",
+ "name": "tests/integration/targets/nxos_lag_interfaces/tests/common/replaced.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ce89afffb71cc896f467a3e32919ab4a8fdcd5ea07a9175ecd7db9a7ae6f77cb",
+ "chksum_sha256": "08e830f9373a4a360949e7c56cf2c374dad94a559adbde314f3b66daa642a0c8",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hsrp_interfaces/tests/common/gathered.yaml",
+ "name": "tests/integration/targets/nxos_lag_interfaces/tests/common/_remove_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0ef96315e1d8b08e76b5d905207c1d85c0ddea43e8efa07b54896132e7d772e1",
+ "chksum_sha256": "d6daa4d3b2cb43c7b62b7606b80b4f212348ecb1e28a516d16ac90e02f459266",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hsrp_interfaces/tests/common/empty_config.yaml",
+ "name": "tests/integration/targets/nxos_lag_interfaces/tests/common/deleted.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d1ff8909b5732150a58ea8fafa7e0f20c447a8bd7e6be23fa0d7cc5655b75510",
+ "chksum_sha256": "4af7dc65128b02d59c3c12d5f94cfc4a77c266e867b79b172fc6425c871f9ffd",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hsrp_interfaces/tests/common/rendered.yaml",
+ "name": "tests/integration/targets/nxos_lag_interfaces/tests/common/rendered.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4f2c908e56c100ed364248792037843ebe703124cb856054bba1a7904494edd3",
+ "chksum_sha256": "6eb88f454c810cee9cf1be9fc56eb6a574516fc49bddc50e81f329dc98cfb9eb",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hsrp_interfaces/tests/common/_populate_config.yaml",
+ "name": "tests/integration/targets/nxos_lag_interfaces/tests/common/overridden.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a313ec546dfda89fa9e4dbd468fa0940164b42c0ebce4e544f9c6eddf9c76e63",
+ "chksum_sha256": "96e1c7636b178b2ae705ac0d7dd2ea67a4d4c1aaf29122900549bde0da43b4de",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_server",
+ "name": "tests/integration/targets/nxos_reboot",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_server/tasks",
+ "name": "tests/integration/targets/nxos_reboot/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_server/tasks/nxapi.yaml",
+ "name": "tests/integration/targets/nxos_reboot/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "adcabdbc005b34ef70ef70c9a7a3b0bfc9b94ff93d2f3ea18bf1f19fbc2d2201",
+ "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_server/tasks/main.yaml",
+ "name": "tests/integration/targets/nxos_reboot/tasks/nxapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "dc2a3a8f315668c3a0a94a40753f7fd1e7eec342b781bbef50ce0f8d3c4abae2",
+ "chksum_sha256": "b93ca9bd41e9b2ddb1e47c5782c7104ad85b67441154a82ac8954fdaebc62cfc",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_server/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_reboot/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "82e7cf4c490409760e85c9516e174b27c8d67fb419e829353d94c4d022016245",
+ "chksum_sha256": "0789afb380e4ee3da221b8e5904d1fc0309acaaf7afce7e63c9ba4c75c34ef57",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_server/meta",
+ "name": "tests/integration/targets/nxos_reboot/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_server/meta/main.yml",
+ "name": "tests/integration/targets/nxos_reboot/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_server/defaults",
+ "name": "tests/integration/targets/nxos_reboot/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_server/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_reboot/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7d754db246e92eda93bade9b741e09afdc7c5ba2754a4ea5d874f92a479b65d0",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_server/vars",
+ "name": "tests/integration/targets/nxos_reboot/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_server/vars/main.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "8668c98a492f96e42640c00cc5b31c0b73f071b7a61dac9e872c31602833ade2",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_snmp_server/tests",
+ "name": "tests/integration/targets/nxos_reboot/tests/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_server/tests/common",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_reboot/tests/common/sanity.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "b4c67bb49d21e284b5a0589f01582abe664f16d3361279b26463e6db76fd2425",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_server/tests/common/fixtures",
+ "name": "tests/integration/targets/nxos_l3_interfaces",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_server/tests/common/fixtures/parsed.cfg",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "68295fddd79719b8ad7b74adc76ea194066abae9bd717b24c71eacb7d4b20f8e",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_snmp_server/tests/common/parsed.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "0a2278a138afa16699d2018af1c1e1e616a0898336bf1632352195a4d2be36ec",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_snmp_server/tests/common/merged.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "f8669a63ac0d96789d4722d662bf1568b86c5433817147820023d2b323b2c14e",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_snmp_server/tests/common/replaced.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "f6e477bd3b8bdcdbdb4a581d99fc15745c21c823d54c364ea2b1a1fbfb43958d",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_snmp_server/tests/common/_setup.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "5153443ed073b0cc035110ac96d297361e22abe390c84014d0a7dbc83ede74e4",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_snmp_server/tests/common/overridden.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "7fd565877939a79d2e7d7d25da2c520c9ea6d42572d5d4fc8110716dff0f961a",
+ "name": "tests/integration/targets/nxos_l3_interfaces/vars",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_server/tests/common/_remove_config.yaml",
+ "name": "tests/integration/targets/nxos_l3_interfaces/vars/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5e57172ca560cf57a5a6cda0d4427ef723ff0c2041494801839810d37f2dc448",
+ "chksum_sha256": "24e5a4f2e7330b185775678aa68b4645786e83617b90d5a32fb4ff4b77b345ad",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_server/tests/common/gathered.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "98bd39c65fc6a311ba043b2cb6e5fb636bf75e1cd4976826739b8a81f1a4a6fc",
+ "name": "tests/integration/targets/nxos_l3_interfaces/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_server/tests/common/empty_config.yaml",
+ "name": "tests/integration/targets/nxos_l3_interfaces/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "870e84431b7485f0d29bb5431284baf0098b3a2dc4c33433007de1b970ed5b6f",
+ "chksum_sha256": "82e7cf4c490409760e85c9516e174b27c8d67fb419e829353d94c4d022016245",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_server/tests/common/rendered.yaml",
+ "name": "tests/integration/targets/nxos_l3_interfaces/tasks/nxapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "84c00378264bacd0d235c0b7a80ebf88d754fb93b1e85b00047fac028b29275c",
+ "chksum_sha256": "adcabdbc005b34ef70ef70c9a7a3b0bfc9b94ff93d2f3ea18bf1f19fbc2d2201",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_server/tests/common/_populate_config.yaml",
+ "name": "tests/integration/targets/nxos_l3_interfaces/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "af12998368091442ef220d6e85760264d3d8b49b44ab62a34598479e622d6d61",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_telemetry",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "2418a2ada8700c06969324411c9be328b4d4ad913c6eb93fb824dff96900ff8f",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_telemetry/tasks",
+ "name": "tests/integration/targets/nxos_l3_interfaces/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_telemetry/tasks/nxapi.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_telemetry/tasks/main.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "4fe8ec88364fed45512b5f8e5687038986edd9b03ab1962afa6476aed1ab2f5a",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_telemetry/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_l3_interfaces/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
+ "chksum_sha256": "7d754db246e92eda93bade9b741e09afdc7c5ba2754a4ea5d874f92a479b65d0",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_telemetry/meta",
+ "name": "tests/integration/targets/nxos_l3_interfaces/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_telemetry/meta/main.yml",
+ "name": "tests/integration/targets/nxos_l3_interfaces/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_telemetry/templates",
+ "name": "tests/integration/targets/nxos_l3_interfaces/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_telemetry/templates/populate_config.cfg",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "160c96e0f5f10f0cd536d1fcd93fad3b60a8eac159a64755ac8617a702f032b7",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_telemetry/defaults",
+ "name": "tests/integration/targets/nxos_l3_interfaces/tests/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_telemetry/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_l3_interfaces/tests/common/_populate_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_telemetry/vars",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "3a640fd87c6717d5876b64797f7d0092fd6673272fc5a674bde3cf5ed78e8b79",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_telemetry/vars/main.yaml",
+ "name": "tests/integration/targets/nxos_l3_interfaces/tests/common/empty_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "90ee74a6d8158f5b1a5e92b16e0e044a5fae636e2742931b542355cb71ed2da4",
+ "chksum_sha256": "858bc39fea4f9cedff070a248b5119124852eeb9237388ef833c414510dbf24c",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_telemetry/tests",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_l3_interfaces/tests/common/parsed.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "5e2abf21e437435b96d94e3a1c900ac1284a39092f3b0b1d19e8d5d7544a8424",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_telemetry/tests/common",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_l3_interfaces/tests/common/gathered.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "4c8ea6a0bd93a5ae1092362c88b1eb6ed13a086968841b5d68654677741714dc",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_telemetry/tests/common/fixtures",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_l3_interfaces/tests/common/rtt.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d660e4e5717467104076599f04b44aca40bbe45b79eb5e5554cb1ff390feb405",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_telemetry/tests/common/fixtures/new_server.crt",
+ "name": "tests/integration/targets/nxos_l3_interfaces/tests/common/multisite.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d761bf530e78d0752a7d9f05dfd5084e106245c7cecac49ca75c0c76ba495fc1",
+ "chksum_sha256": "b0b3ebe6d2d7db422ec1a046818ce06ddf83ef9f32a9178691d732b0824b4da1",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_telemetry/tests/common/fixtures/server.crt",
+ "name": "tests/integration/targets/nxos_l3_interfaces/tests/common/merged.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9248b8354ba01bd1fecb1d24494a6098ee9547fa34a9d8840068abbb9a80fd92",
+ "chksum_sha256": "e7555ee3d0322003861d6c2d810ec490027b8d792e8800c7e9a46fab869b1c92",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_telemetry/tests/common/fixtures/local_server.crt",
+ "name": "tests/integration/targets/nxos_l3_interfaces/tests/common/replaced.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "29437b8420b999138ca0947aef901533fd9affbc7ae98a39b13fb6bfa0ae29cd",
+ "chksum_sha256": "ef91b776e8c9627a55cc9759549aaa7482e0f7da4385079443b8e8f1d8d40b8c",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_telemetry/tests/common/merged.yaml",
+ "name": "tests/integration/targets/nxos_l3_interfaces/tests/common/_remove_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b1ac88fec698b51b1f358987e65b5793cd9488ea085606963409a7b9d9b2b4cc",
+ "chksum_sha256": "9dbc4efa7c9e6843b987aa6286ba0de836129c75b66249ab846fee2012fc2394",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_telemetry/tests/common/deleted.yaml",
+ "name": "tests/integration/targets/nxos_l3_interfaces/tests/common/deleted.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d231b07bd5ab608ddebbdbbdbbcbed5e4648ac7ad05dfbbf1df4e2a21c063389",
+ "chksum_sha256": "1676a953c0e740cc280155ea76e6368fd5628556cdc0acae19e1834b40721681",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_telemetry/tests/common/replaced.yaml",
+ "name": "tests/integration/targets/nxos_l3_interfaces/tests/common/rendered.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "00736c9321f59eb87d3655075f655cf2244ba30caed17b3175c3f0171c94bd9f",
+ "chksum_sha256": "f274f91771b34f2246ee4fa58bae6acd7fafb0ea1f1efb04ffb362cfd1678de3",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_telemetry/tests/common/gathered.yaml",
+ "name": "tests/integration/targets/nxos_l3_interfaces/tests/common/overridden.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0f66a09ea372c1fa2c05027c0a2097da33a0912daf4355ff6cab34a77e057acd",
+ "chksum_sha256": "c670083adeabaa18419eea52a2297aeea81b1a5635718b95aebf3d1955f78385",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_static_routes",
+ "name": "tests/integration/targets/nxos_l2_interfaces",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_static_routes/tasks",
+ "name": "tests/integration/targets/nxos_l2_interfaces/vars",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_static_routes/tasks/main.yaml",
+ "name": "tests/integration/targets/nxos_l2_interfaces/vars/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "40fe4fe85031868241ac9845115f925a1c7be1ad8c4f932f4896fdaad51da36c",
+ "chksum_sha256": "80f38875da077b4094678a1b0d5fa1dbb55a586037c541d4daec3b3b8adca944",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_static_routes/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_l2_interfaces/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_l2_interfaces/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2eacf5012e1fb8a88bd7663844178dc5f6b617535d4bc2ddf0037ce474a33b1e",
+ "chksum_sha256": "82e7cf4c490409760e85c9516e174b27c8d67fb419e829353d94c4d022016245",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_static_routes/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_l2_interfaces/tasks/nxapi.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "adcabdbc005b34ef70ef70c9a7a3b0bfc9b94ff93d2f3ea18bf1f19fbc2d2201",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_static_routes/meta/main.yml",
+ "name": "tests/integration/targets/nxos_l2_interfaces/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "chksum_sha256": "c0ff0c24a2fb4a8aa58a6356c5029065acb6cf1aba1e14add135ff69c8d1387b",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_static_routes/defaults",
+ "name": "tests/integration/targets/nxos_l2_interfaces/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_static_routes/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_l2_interfaces/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "7d754db246e92eda93bade9b741e09afdc7c5ba2754a4ea5d874f92a479b65d0",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_static_routes/vars",
+ "name": "tests/integration/targets/nxos_l2_interfaces/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_static_routes/vars/main.yml",
+ "name": "tests/integration/targets/nxos_l2_interfaces/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3eda3cf170b296ca5e5f4996c8b03c030d87a4e3e6197f7770e2b14e739e4ab3",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_static_routes/tests",
+ "name": "tests/integration/targets/nxos_l2_interfaces/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_static_routes/tests/common",
+ "name": "tests/integration/targets/nxos_l2_interfaces/tests/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_static_routes/tests/common/overridden.yml",
+ "name": "tests/integration/targets/nxos_l2_interfaces/tests/common/_populate_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ff14829c4bb5758e9c2bcce36a6a134e5fd2d02af7586147e25ce45f5d3c6197",
+ "chksum_sha256": "b9322c7dd05de65b972eb0bb02c699c4368fc68bb97dd155a05ed98dd304448f",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_static_routes/tests/common/rtt.yml",
+ "name": "tests/integration/targets/nxos_l2_interfaces/tests/common/empty_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ac26dd65c91afd1fd0e3734c2d3b9c361904f6af8fed0fbce8ee7746c96b64bf",
+ "chksum_sha256": "a2a0ff901006be504934a514a2f337a699091eef58ab871fb32e426af1d6f6fa",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_static_routes/tests/common/rendered.yml",
+ "name": "tests/integration/targets/nxos_l2_interfaces/tests/common/parsed.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b9734f5468d080e059ad191471f78497c2ce42fa6d87de56f9f2e273bc0d8e28",
+ "chksum_sha256": "3c30f97c6ae729fd7b3d88a3e7b6671c761ea07061ead87487979c013835b99c",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_static_routes/tests/common/gathered.yml",
+ "name": "tests/integration/targets/nxos_l2_interfaces/tests/common/gathered.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f58a5e229e9cb4a2cd367d9bd3d36dfc7b8a03598bd35d77c19ea701e70b8976",
+ "chksum_sha256": "6c1ffa21ffa7a57ae8edbb0a72820f4ec4e7947ba1ee5a1548473e81efec8724",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_static_routes/tests/common/merged.yml",
+ "name": "tests/integration/targets/nxos_l2_interfaces/tests/common/rtt.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d8a7a44814c334c50589139d3473b014a953fda361f2d88075fe1538f65a580e",
+ "chksum_sha256": "148ed76eb41828c2417439c0b443097a6f2f9d1fa88a4abad48bd8e62de30c4e",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_static_routes/tests/common/_remove_config.yaml",
+ "name": "tests/integration/targets/nxos_l2_interfaces/tests/common/merged.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "244c13d831c6419ec7e7a46e3e00e82fc1be5a476e5204561deae0efce659d93",
+ "chksum_sha256": "b6d84fa1c22c5e2d37b017a18ed59903fab43314717b4d632a46200d745318c0",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_static_routes/tests/common/replaced.yml",
+ "name": "tests/integration/targets/nxos_l2_interfaces/tests/common/replaced.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5c23e4c252c1cd1583fd56550d8d702f0b09b867eb03c50886e83f3c24197690",
+ "chksum_sha256": "9e1b2b7044a678c22cccca3dcc5c677fccf40a509764b7ecf5c8dfeecf4781d8",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_static_routes/tests/common/deleted.yml",
+ "name": "tests/integration/targets/nxos_l2_interfaces/tests/common/_remove_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "10271b66076c4d1659a13c85ae4ba02c903ba2eb771ba8a2e31f0910c46de739",
+ "chksum_sha256": "3644170fb7b4322fa4d272cd5d5e351480018b485a065fa7131fb3f6f5e2f353",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_static_routes/tests/common/parsed.yml",
+ "name": "tests/integration/targets/nxos_l2_interfaces/tests/common/deleted.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4a2cd09da60d6b0bc0904e3f313e959fc3efbba7979d14309764e11aaefab96d",
+ "chksum_sha256": "c58134a7c574f9d19d3012c238b2be0d4983d6d856a11e9f6ee089c3df5ec6b4",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_static_routes/tests/common/_populate_config.yaml",
+ "name": "tests/integration/targets/nxos_l2_interfaces/tests/common/rendered.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d486bec06fd92b15f491fc77298d00447520a955e2dbf3d84a5a739d1a2703c2",
+ "chksum_sha256": "f018dcb8d9a42d8930b0d10f62ed931546d66f2998aa42ce95f655d299dbb35b",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config",
+ "name": "tests/integration/targets/nxos_l2_interfaces/tests/common/overridden.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "aeb7c65908e1aee314df87a9afe8c2f4dc1e1b21e41ca8a03f2991597dfd0685",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_pim_rp_address",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config/tasks",
+ "name": "tests/integration/targets/nxos_pim_rp_address/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config/tasks/nxapi.yaml",
+ "name": "tests/integration/targets/nxos_pim_rp_address/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
+ "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config/tasks/main.yaml",
+ "name": "tests/integration/targets/nxos_pim_rp_address/tasks/nxapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a069f9e3d9d905e5a1d7e2b316e284944bc38cddc390470831451fb4672fbde2",
+ "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config/tasks/redirection.yaml",
+ "name": "tests/integration/targets/nxos_pim_rp_address/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a4f04d0bc1463bb36c212620e786d8ce8d18681fd03f641c1a37351b6d0dfac8",
+ "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config/tasks/cli_config.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "244797b68a9ec0f73b3fecca59e21469c0bc8126f36a5b7ffe2fe20ebdf22f19",
+ "name": "tests/integration/targets/nxos_pim_rp_address/defaults",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_pim_rp_address/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
+ "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config/meta",
+ "name": "tests/integration/targets/nxos_pim_rp_address/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config/meta/main.yml",
+ "name": "tests/integration/targets/nxos_pim_rp_address/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config/templates",
+ "name": "tests/integration/targets/nxos_pim_rp_address/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config/templates/basic",
+ "name": "tests/integration/targets/nxos_pim_rp_address/tests/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config/templates/basic/acl_config.j2",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "f8cfadc33b5527be5bd82a279cbcc769a882135ce815cb6d42177cb51d325e4e",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_config/templates/basic/base_running_config",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "85528fb2bdea51956f0b6d9a7cb07c7bc75e498553f9b5301b904db35ed0f929",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_config/templates/basic/intended_running_config",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "b478553dd335d5085c847219eae58a8daac0902372a5e8fbbd133da3850332b2",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_config/templates/basic/configstrict1.j2",
+ "name": "tests/integration/targets/nxos_pim_rp_address/tests/common/configure.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "da30d9c2fbe9c9473f957d26568b2cb42baa620e261efbce7d604535ec532cf5",
+ "chksum_sha256": "5860ed215f27147987c62a8541f96530427cfadc64b6ce13e849a6785275c8ca",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config/templates/basic/setupstrict.j2",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "b358f8f54eb293ce2eac2ead8f8bc2e3aae7383c2981c9c192530d6bcbff3d6f",
+ "name": "tests/integration/targets/nxos_ospf_interfaces",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config/templates/basic/config.j2",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "d62440fb734cd4db3dd623ab8e77214ff7f82e44c7dfca249b20dcd83d83a9af",
+ "name": "tests/integration/targets/nxos_ospf_interfaces/vars",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config/templates/basic/setupexact.j2",
+ "name": "tests/integration/targets/nxos_ospf_interfaces/vars/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b358f8f54eb293ce2eac2ead8f8bc2e3aae7383c2981c9c192530d6bcbff3d6f",
+ "chksum_sha256": "254903ea468078a13337c27e4ffbfe0aad84710a36cdfeca8027eba5a16b48bf",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config/templates/basic/configexact2.j2",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "b5884db197b0f69276768f1f6e5e8b4214ead32869dab90d76fae353425a4b28",
+ "name": "tests/integration/targets/nxos_ospf_interfaces/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config/templates/basic/configblock.j2",
+ "name": "tests/integration/targets/nxos_ospf_interfaces/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f8cfadc33b5527be5bd82a279cbcc769a882135ce815cb6d42177cb51d325e4e",
+ "chksum_sha256": "82e7cf4c490409760e85c9516e174b27c8d67fb419e829353d94c4d022016245",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config/templates/basic/configexact1.j2",
+ "name": "tests/integration/targets/nxos_ospf_interfaces/tasks/nxapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f8cfadc33b5527be5bd82a279cbcc769a882135ce815cb6d42177cb51d325e4e",
+ "chksum_sha256": "adcabdbc005b34ef70ef70c9a7a3b0bfc9b94ff93d2f3ea18bf1f19fbc2d2201",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config/templates/config.js",
+ "name": "tests/integration/targets/nxos_ospf_interfaces/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c6d65e4fbe26ecfdf18d8eba2eac04aecfdd65e59e6b6db1c2aa2d29ed5a5847",
+ "chksum_sha256": "e9eff3b2082e2a94e384711077fcb0a67b0410a72a0d7425367798c7fe1c4c75",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config/templates/defaults",
+ "name": "tests/integration/targets/nxos_ospf_interfaces/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config/templates/defaults/test.j2",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "356d51cb39cbc26b408ca0b3aeacc83634ee2f6e061811a3a8fafb63955ffa4d",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_config/templates/defaults/config.j2",
+ "name": "tests/integration/targets/nxos_ospf_interfaces/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "522484a8eb35e809c1ebd645a73ca12f02b09c6cf237f1db1ed577088c149c51",
+ "chksum_sha256": "7d754db246e92eda93bade9b741e09afdc7c5ba2754a4ea5d874f92a479b65d0",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config/defaults",
+ "name": "tests/integration/targets/nxos_ospf_interfaces/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_ospf_interfaces/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "665288590cd4226da42511817f09ccdfc92df36c9621583cd2c82919af295c5a",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config/tests",
+ "name": "tests/integration/targets/nxos_ospf_interfaces/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config/tests/cli_config",
+ "name": "tests/integration/targets/nxos_ospf_interfaces/tests/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config/tests/cli_config/cli_basic.yaml",
+ "name": "tests/integration/targets/nxos_ospf_interfaces/tests/common/_populate_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7b3703086ba8415b259a2c8353dc03d9295173092ba3dc15170ae9718b10626e",
+ "chksum_sha256": "947100dd1e56c9ebd17cb32e1caa66f7c5a540d038dd36f4d0482271627ffa10",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config/tests/cli_config/cli_exact_match.yaml",
+ "name": "tests/integration/targets/nxos_ospf_interfaces/tests/common/empty_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5eb01810e15ac6b6941a7b9b79ca5ca3be016f16ee7f534eea9be0115fb53a46",
+ "chksum_sha256": "b880b29ecb1b59489b0d82bfaf1ebecc35e47cc4ae7d860d616a0e1c4c17b8f6",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config/tests/cli_config/cli_backup.yaml",
+ "name": "tests/integration/targets/nxos_ospf_interfaces/tests/common/parsed.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "02aa5aa3478284c141cb3741eb86bf919a560bc25b5a911f453e05ce25737bd7",
+ "chksum_sha256": "aa4d4d9ae4687f27e044a7c2ba6c6ffe31491031d9cfb724c69a54e4a1248b2e",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config/tests/cli_config/cli_strict_match.yaml",
+ "name": "tests/integration/targets/nxos_ospf_interfaces/tests/common/gathered.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6329b5bab571f5218aeff32c54936052c2188466f842b4364d80dc9c6d800450",
+ "chksum_sha256": "772abaa4b30e45bb3a3e0b168b9b50f62e75e5db9d1ee54915707631af8bc3a9",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config/tests/cli_config/cli_block_replace.yaml",
+ "name": "tests/integration/targets/nxos_ospf_interfaces/tests/common/merged.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9058efd9127aa10094d23e72b8d7b8e0426d2707a113d7be2d539c15a1b9d229",
+ "chksum_sha256": "6563a046c325a1872e9c20c3a80658c02bc684d828e1d8c0dbfbfb23ede1da1c",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config/tests/nxapi",
+ "name": "tests/integration/targets/nxos_ospf_interfaces/tests/common/_setup.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "5153443ed073b0cc035110ac96d297361e22abe390c84014d0a7dbc83ede74e4",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_ospf_interfaces/tests/common/fixtures",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config/tests/nxapi/sublevel_strict.yaml",
+ "name": "tests/integration/targets/nxos_ospf_interfaces/tests/common/fixtures/parsed.cfg",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "acaded71377fcc8c8b288d527caf6fc777c8d0f50079c8b409d1007f821cc9d3",
+ "chksum_sha256": "77f5af82eeb76922d66d564b1b3b713644b5154cd160ed32686613453f6ffb08",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config/tests/nxapi/sublevel.yaml",
+ "name": "tests/integration/targets/nxos_ospf_interfaces/tests/common/replaced.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "dc08a002e9e00d36590098bfe308ca825954a3e43755650cb5809684512433cb",
+ "chksum_sha256": "22709a3fdba9d6c94de7e53132bf6178989f0ea494f7779069913fc918ffa131",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config/tests/nxapi/toplevel_after.yaml",
+ "name": "tests/integration/targets/nxos_ospf_interfaces/tests/common/_remove_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7b8ad2836c7ec7360561cb0f4319bbe95feae70b134c5dac9deb2ba18a17638e",
+ "chksum_sha256": "b76239c8dec85299b177fa8a11a790932f64db53e155a64222321eff524af8b7",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config/tests/nxapi/multilevel.yaml",
+ "name": "tests/integration/targets/nxos_ospf_interfaces/tests/common/deleted.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ff4c8dc408f5b4296300c4d7d983fcbdf19ee303fc95fe44ddb3d72a8b2aa08e",
+ "chksum_sha256": "3e19f34c70b0982c9c03cc96e3fcf57b88c084597eae9036b3e4c6bbd0860dc1",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config/tests/nxapi/sublevel_exact.yaml",
+ "name": "tests/integration/targets/nxos_ospf_interfaces/tests/common/rendered.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "694ab236d9c21462b423d507666dfe90caceca03e8a2f8fa3759f9f51b6d06e0",
+ "chksum_sha256": "df5bdd37b5f9ceac0c9cba9185dfd3c1c3d13d839447eca07daca2717b7261f0",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config/tests/nxapi/toplevel_before.yaml",
+ "name": "tests/integration/targets/nxos_ospf_interfaces/tests/common/overridden.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1d150adeed3e6e02c3adc3871d29b6a153d504ab7a861cd401b02c480008ba5b",
+ "chksum_sha256": "f4d88bbce44a9ca14508aebf3a495f3ced56b859aa74951b92922d613d0addbd",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config/tests/cli",
+ "name": "tests/integration/targets/nxos_bgp_neighbor_address_family",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config/tests/cli/sublevel_strict.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "057bf9a3f64bb1abb24a5dc5a530553401a32850185af62cb278c85228ac6465",
+ "name": "tests/integration/targets/nxos_bgp_neighbor_address_family/vars",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config/tests/cli/diff.yaml",
+ "name": "tests/integration/targets/nxos_bgp_neighbor_address_family/vars/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d890138033f9812260dfd304fb4888d77e0c78b71f0afb2f8f97ce71f11a2491",
+ "chksum_sha256": "c9c20222a6eea75718e72f0e12a439d0f2a36eee5fc85db00a855c0ca4a72f99",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config/tests/cli/sublevel.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "937eb38ce64f2eaaab0f4c8f86a336d6fa022a96d75e285baaf7c85f45fb0b09",
+ "name": "tests/integration/targets/nxos_bgp_neighbor_address_family/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config/tests/cli/toplevel_after.yaml",
+ "name": "tests/integration/targets/nxos_bgp_neighbor_address_family/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7792c748c17cf012bd7462753459725b9b2d4e8418a3edb3f4f76eb84f8b5914",
+ "chksum_sha256": "82e7cf4c490409760e85c9516e174b27c8d67fb419e829353d94c4d022016245",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config/tests/cli/replace_block.yaml",
+ "name": "tests/integration/targets/nxos_bgp_neighbor_address_family/tasks/nxapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e0eef2496cf4af6b50d6274c1b6ef30dbe7955e60ab6695c4cc0660ee5e8a51e",
+ "chksum_sha256": "adcabdbc005b34ef70ef70c9a7a3b0bfc9b94ff93d2f3ea18bf1f19fbc2d2201",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config/tests/cli/multilevel.yaml",
+ "name": "tests/integration/targets/nxos_bgp_neighbor_address_family/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b91edced261b04cd778d86a702fd71159bcb7e37050b2b59d66f5fbd0485ca6e",
+ "chksum_sha256": "0defcb7ce6f353926624c2c3e045b403af33c34a6bc1c49b08e71ec602cd21f2",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config/tests/cli/sublevel_exact.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "b755c791e3a1b329e7da5896f70e10b01bdc5237bbe2e80316a4515d6f8d8bd8",
+ "name": "tests/integration/targets/nxos_bgp_neighbor_address_family/defaults",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config/tests/cli/toplevel_before.yaml",
+ "name": "tests/integration/targets/nxos_bgp_neighbor_address_family/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "da70c4dcf111e0de17abc736ab6b4cfef245ca202d235dd1d9e611b40bbab8b2",
+ "chksum_sha256": "7d754db246e92eda93bade9b741e09afdc7c5ba2754a4ea5d874f92a479b65d0",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config/tests/common",
+ "name": "tests/integration/targets/nxos_bgp_neighbor_address_family/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config/tests/common/src_invalid.yaml",
+ "name": "tests/integration/targets/nxos_bgp_neighbor_address_family/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "000c3c67b80eab4be77858bbb29844ec788342c63d8570c1db6166ca5a9539b5",
+ "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config/tests/common/src_basic.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "eccd369dee1af74d2b9f2888107f438904ae0c58705d800ad95e913965991608",
+ "name": "tests/integration/targets/nxos_bgp_neighbor_address_family/tests",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config/tests/common/save.yaml",
+ "name": "tests/integration/targets/nxos_bgp_neighbor_address_family/tests/common",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_bgp_neighbor_address_family/tests/common/_populate_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bc462c07044762a41b8eddbe29be65aa51e6d6cea05d1e0630555e8cb4a9e446",
+ "chksum_sha256": "047f1c0feea67e71c51f1eed5b1434e093a7b9f98494aa230acea01f7b68a91b",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config/tests/common/sanity.yaml",
+ "name": "tests/integration/targets/nxos_bgp_neighbor_address_family/tests/common/empty_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "55efec005b7a96d5dbd8739cfce99c359f047d1aacbee36ad6bc04bcf8283f26",
+ "chksum_sha256": "ed006b0b9dbb7b7c16264d7c9304040f7ddf7104c0aef4d084dd0380d81c0f0d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config/tests/common/toplevel_nonidempotent.yaml",
+ "name": "tests/integration/targets/nxos_bgp_neighbor_address_family/tests/common/parsed.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "48fbcde0f38c5b06062ccb0f93d50a5b7a7554edf66ee7ed4779d9d4ce524ac1",
+ "chksum_sha256": "40d8f8cfc0bfd8a2d5da53d9e61f2d398e4ed50e2cc17915359ba02b980d16d0",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config/tests/common/backup.yaml",
+ "name": "tests/integration/targets/nxos_bgp_neighbor_address_family/tests/common/gathered.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "098fa1e9566ae02a5c089a7ab7a235ce53f1bc78b431e01595556f623f718c82",
+ "chksum_sha256": "367b5c49d11893b14160cf94e6675588ac07bf0ee93574f6a1e9a9d4c6df70d9",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config/tests/common/toplevel.yaml",
+ "name": "tests/integration/targets/nxos_bgp_neighbor_address_family/tests/common/merged.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2bc1969eeb4a97775fd72dd6e5109d499c24cf5735c2c54bf75dcf7d49690bea",
+ "chksum_sha256": "ebe2088087a911826370430ed7e8029bee349d39be051ef9ce89f96ebb2d4118",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config/tests/common/defaults.yaml",
+ "name": "tests/integration/targets/nxos_bgp_neighbor_address_family/tests/common/fixtures",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nxos_bgp_neighbor_address_family/tests/common/fixtures/parsed.cfg",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "db431603f9429be57048a8789d1f0af71d6cdae47a26878a608c082768a1244d",
+ "chksum_sha256": "b2a00e0d2590a7c6caff9c7d3d3df8eddf7a04bcb142a5900735574cf7d7ba14",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config/tests/common/sublevel_block.yaml",
+ "name": "tests/integration/targets/nxos_bgp_neighbor_address_family/tests/common/replaced.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "45748bd39c32ff0675cebcff8ed674384c0fcbd51797f2110edfbef816394801",
+ "chksum_sha256": "bc402cfd6f07870283877fcd9c3158fdef7f756c7ea88c1aeebdd73629628a00",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config/tests/common/src_match_none.yaml",
+ "name": "tests/integration/targets/nxos_bgp_neighbor_address_family/tests/common/_remove_config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f485a516ee779d29f9c4bef94249e47c885743481d409fedaad14bd49ecbafe0",
+ "chksum_sha256": "c07047642c717ad5e564697143a11e7cc93a18376d91ee56dc5f44f3d19e22b5",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config/tests/redirection",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_bgp_neighbor_address_family/tests/common/deleted.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "dc0718832154ed173f9ada21e4072a3f3c7fc9202d9a83e0b18c26d72991702e",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config/tests/redirection/cli",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_bgp_neighbor_address_family/tests/common/rendered.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "a8055703f881f09873f851f1e0c212ac66aa662b744b5cc81a6bd07a7e854643",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_config/tests/redirection/cli/shortname.yaml",
+ "name": "tests/integration/targets/nxos_bgp_neighbor_address_family/tests/common/overridden.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "00bb1bd085c826013b04f4eaf7f74ff0de45ec8330aa386f4cd0263d468df9f1",
+ "chksum_sha256": "9c296bdd111a832466197719e7d3a6da9215aaaa0ab6aeca20b16e5284c61b52",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_route_maps",
+ "name": "tests/integration/targets/nxos_devicealias",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_route_maps/tasks",
+ "name": "tests/integration/targets/nxos_devicealias/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_route_maps/tasks/nxapi.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "adcabdbc005b34ef70ef70c9a7a3b0bfc9b94ff93d2f3ea18bf1f19fbc2d2201",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_route_maps/tasks/main.yaml",
+ "name": "tests/integration/targets/nxos_devicealias/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2396a1bb62aa2d673208deb59766a787971d061630b8467afa525b7fdcb2e95e",
+ "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_route_maps/tasks/cli.yaml",
+ "name": "tests/integration/targets/nxos_devicealias/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "82e7cf4c490409760e85c9516e174b27c8d67fb419e829353d94c4d022016245",
+ "chksum_sha256": "f47853340600c1c9b76ed00695079b203ae45896e40acfac2f48e28f2ad98b40",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_route_maps/meta",
+ "name": "tests/integration/targets/nxos_devicealias/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_route_maps/meta/main.yml",
+ "name": "tests/integration/targets/nxos_devicealias/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_route_maps/defaults",
+ "name": "tests/integration/targets/nxos_devicealias/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_route_maps/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_devicealias/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7d754db246e92eda93bade9b741e09afdc7c5ba2754a4ea5d874f92a479b65d0",
+ "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_route_maps/vars",
+ "name": "tests/integration/targets/nxos_devicealias/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_route_maps/vars/main.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "2e01454e13b8195b24273afb60288d0cb4a32a1ca4e7f1fff3bfea0fd2610f18",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_route_maps/tests",
+ "name": "tests/integration/targets/nxos_devicealias/tests/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_route_maps/tests/common",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_devicealias/tests/common/sanity.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "a93b103d12183553437c6808898ac57aa27e80ef5d1547e496e4ce5d5039baa9",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_route_maps/tests/common/fixtures",
+ "name": "tests/integration/targets/nxos_vpc",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_route_maps/tests/common/fixtures/parsed.cfg",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "09e1c1e0b37433d900624319d7037eb3672c94053d18f87f22552dceeec1dc96",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_route_maps/tests/common/parsed.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "ab508eea7234bac851c3c482330fa0c4afb0e8bd2af87d91901c230754d9779d",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_route_maps/tests/common/merged.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "3b2a9d97ae8aa59ee542363b43e7051f5d8492e4f45ba0db862a9d071e1ef820",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_route_maps/tests/common/deleted.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "c276d4c6888f077b84280f79606acb632995c8f1bf32e8c1c0bcb9c02055ee76",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_route_maps/tests/common/replaced.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "56653dab75b72e7a40ed1dbff928b5fd2dedf7651dcf2155747903b1a64803e4",
+ "name": "tests/integration/targets/nxos_vpc/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_route_maps/tests/common/_setup.yaml",
+ "name": "tests/integration/targets/nxos_vpc/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5153443ed073b0cc035110ac96d297361e22abe390c84014d0a7dbc83ede74e4",
+ "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_route_maps/tests/common/overridden.yaml",
+ "name": "tests/integration/targets/nxos_vpc/tasks/nxapi.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d8ea6575a0570f9ce32c1d3211f075ee42f56a4ba2d87115c6fccd3409b6b5d0",
+ "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_route_maps/tests/common/_remove_config.yaml",
+ "name": "tests/integration/targets/nxos_vpc/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cdf825a17b7761cfbd225079ec5d366f4584e7f97111e0af086884f87fda0fde",
+ "chksum_sha256": "bb83a5490c188c20d9a8632bdb13043146a87d5dfc555cd2e389ee30885bf7ca",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_route_maps/tests/common/gathered.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "562a6817ca710cc4c1b6c4babb9b7bca97334312e19d963f8df83a98b7e94512",
+ "name": "tests/integration/targets/nxos_vpc/defaults",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_route_maps/tests/common/empty_config.yaml",
+ "name": "tests/integration/targets/nxos_vpc/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ff60a5603326c1ae214cbac1f9411495c0cab6360f7880234d1b2e3be1a2ffbb",
+ "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_route_maps/tests/common/rendered.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "4e59d332194a4bb9ce27f445771203088b05b31a191b59d0e9e049deb72552a8",
+ "name": "tests/integration/targets/nxos_vpc/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_route_maps/tests/common/_populate_config.yaml",
+ "name": "tests/integration/targets/nxos_vpc/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "036af452cc03bdec84720356e6c7013b4f39c2a6800e1f8303120d785b2955e7",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_zone_zoneset",
+ "name": "tests/integration/targets/nxos_vpc/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_zone_zoneset/tasks",
+ "name": "tests/integration/targets/nxos_vpc/tests/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_zone_zoneset/tasks/main.yaml",
+ "name": "tests/integration/targets/nxos_vpc/tests/common/sanity.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f47853340600c1c9b76ed00695079b203ae45896e40acfac2f48e28f2ad98b40",
+ "chksum_sha256": "2df57c902caed23947093482590483584784db8850c51e6efe545b314895eaed",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_zone_zoneset/tasks/cli.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
+ "name": "tests/integration/targets/nxos_aaa_server",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_zone_zoneset/meta",
+ "name": "tests/integration/targets/nxos_aaa_server/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_zone_zoneset/meta/main.yml",
+ "name": "tests/integration/targets/nxos_aaa_server/tasks/cli.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
+ "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_zone_zoneset/defaults",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/nxos_aaa_server/tasks/nxapi.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_zone_zoneset/defaults/main.yaml",
+ "name": "tests/integration/targets/nxos_aaa_server/tasks/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
+ "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_zone_zoneset/vars",
+ "name": "tests/integration/targets/nxos_aaa_server/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_zone_zoneset/vars/main.yml",
+ "name": "tests/integration/targets/nxos_aaa_server/defaults/main.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "97b10ee26b05c01b328d79887c41479bd2bf559184fb1df4af53645db6e109ba",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_zone_zoneset/tests",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_zone_zoneset/tests/common",
+ "name": "tests/integration/targets/nxos_aaa_server/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_zone_zoneset/tests/common/sanity.yaml",
+ "name": "tests/integration/targets/nxos_aaa_server/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "781a480d71c224fb90d67eba3072492ff3f2d263df13ec8c735cabec41956751",
+ "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_acl_interfaces",
+ "name": "tests/integration/targets/nxos_aaa_server/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_acl_interfaces/tasks",
+ "name": "tests/integration/targets/nxos_aaa_server/tests/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_acl_interfaces/tasks/nxapi.yaml",
+ "name": "tests/integration/targets/nxos_aaa_server/tests/common/tacacs.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "280d285e1a9b531b25d8b880d3a032933d7135004c4658b7f1eb4872c60cc666",
+ "chksum_sha256": "309cf43dacad631085bfede9a52ee2e7bdb4763488463bfb96dde11184bbe0fc",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_acl_interfaces/tasks/main.yaml",
+ "name": "tests/integration/targets/nxos_aaa_server/tests/common/radius.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7be6dc51b2f6fb5601e0d7f3498991d34f0910206fe6ed537217c8eba86a3cf5",
+ "chksum_sha256": "08fe50e81078371eff5d9482f9b7721eb6fd1288ccc88f11b8b42f94270df4ef",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_acl_interfaces/tasks/cli.yaml",
+ "name": "tests/.gitignore",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c57c3ff656bb8daf9febb3c7a7bff934b952d1ccb83604d93e23ccfc3ea88724",
+ "chksum_sha256": "b5726d3ec9335a09c124469eca039523847a6b0f08a083efaefd002b83326600",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_acl_interfaces/meta",
+ "name": "tests/unit",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_acl_interfaces/meta/main.yml",
+ "name": "tests/unit/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_acl_interfaces/templates",
+ "name": "tests/unit/modules",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_acl_interfaces/templates/populate_config.cfg",
+ "name": "tests/unit/modules/conftest.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6dd477232ff3b52e84f25f81712afb11874700566d9c52d923c012ad1d770cf0",
+ "chksum_sha256": "354440d86f684a9641f329807a687fe71fc66ab901207f7a166ecfa648935488",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_acl_interfaces/defaults",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/unit/modules/__init__.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_acl_interfaces/defaults/main.yaml",
+ "name": "tests/unit/modules/utils.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
+ "chksum_sha256": "414c272e9a403bb6009460ec9502c5acfaa9c80b2159c742f6cb2e225938be3e",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_acl_interfaces/vars",
+ "name": "tests/unit/modules/network",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_acl_interfaces/vars/main.yml",
+ "name": "tests/unit/modules/network/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6a97fa118eda919bd0c4d9b45fbacbb04ef221e6d264073eda5612e7b57228c9",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_acl_interfaces/tests",
+ "name": "tests/unit/modules/network/nxos",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_acl_interfaces/tests/common",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/unit/modules/network/nxos/test_nxos_evpn_vni.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "71aa392dfc446e9f3fe0af4c32cba761151a0bae41be4831d6b560e332b2bea9",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_acl_interfaces/tests/common/populate_acl.yaml",
+ "name": "tests/unit/modules/network/nxos/test_nxos_ping.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d44542c7c93f7edcef7326941f254a969cc90fca962286a98a576a1979064cd0",
+ "chksum_sha256": "3fd816ad49427c17166246df05ec36c188e3caead4e04285d11815d31daa9999",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_acl_interfaces/tests/common/overridden.yml",
+ "name": "tests/unit/modules/network/nxos/test_nxos_bfd_interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "603d8a6ba49dd33261b0c7a152e55df28f2c3e10f09e9ec0f3eebde8d6c5c7ad",
+ "chksum_sha256": "5c791a5a862edc717ae75ce62fcd53f8f2f2c629f267194482909a8350946457",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_acl_interfaces/tests/common/empty_config.yml",
+ "name": "tests/unit/modules/network/nxos/test_nxos_ospfv3.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "46c8e8809399bcd1bdfe1242e346ac1c340668e32fdbfcba0cfe6a4ad9ff6f4d",
+ "chksum_sha256": "2759b15fdeec5305cbf7fb599142511bd89e5af0c772b3739c39c28880a49e1b",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_acl_interfaces/tests/common/rtt.yml",
+ "name": "tests/unit/modules/network/nxos/test_nxos_bfd_global.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "832178be08db3abba5b2d95b0c57c79f8a4d5b132f1f01f9dfe786c2a3b5cb31",
+ "chksum_sha256": "642bb2c06d13b8c2354bd9e46c01bef3beccde28fdbe5efa8af16cef2dc2a21f",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_acl_interfaces/tests/common/rendered.yml",
+ "name": "tests/unit/modules/network/nxos/test_nxos_prefix_lists.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9ab726e93c94df77ac9671e7dfa0b8c89104ea4333e25b56f04830f9bda81942",
+ "chksum_sha256": "123c5209374533c41d593eba1f82b63089145ea660be4c913f197a43a434c342",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_acl_interfaces/tests/common/remove_config.yaml",
+ "name": "tests/unit/modules/network/nxos/test_nxos_user.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4b97c05adf25534f7d6e4ac7a67e541969eb7e1060b6360f956c5dcae418e5f7",
+ "chksum_sha256": "1e084a512bff94fcf880a610ec5dd779c64ad7a3feea58bbc10e141e5eaf282c",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_acl_interfaces/tests/common/gathered.yml",
+ "name": "tests/unit/modules/network/nxos/test_nxos_hsrp_interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "698995afdf69835fb4ba540353a3e4f213d7954d21073c0cc754b1787cd05231",
+ "chksum_sha256": "c9bd24f31dec735e5b7f5862c3e9cd48d692b6b528dcfa0114830aedcad227e5",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_acl_interfaces/tests/common/merged.yml",
+ "name": "tests/unit/modules/network/nxos/test_nxos_bgp_global.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8eae4ac211ea7861be4677520f4962046223747c3f400208cad64b245998d0df",
+ "chksum_sha256": "df17a538471e85b869a2d0fb40789d01afdf232e7dd781977d34d1e062120268",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_acl_interfaces/tests/common/replaced.yml",
+ "name": "tests/unit/modules/network/nxos/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "155aa10a15be7eea0049eb7035993970089c76ee53bcd152999c6055e8586aa1",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_acl_interfaces/tests/common/deleted.yml",
+ "name": "tests/unit/modules/network/nxos/nxos_module.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3c80a977c86fd76c2bbf06ab9878e383170afc5cba3f336e2d4f8ca2de3500fa",
+ "chksum_sha256": "0d1a4eadc6d91ab91db07ea6ac3a5576e417f1339c1520079de065451b87fd3c",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_acl_interfaces/tests/common/parsed.yml",
+ "name": "tests/unit/modules/network/nxos/test_nxos_bgp_templates.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ac628812931497d1846e45df02f6c9fc4ba4b8d61fba9a559793b8b217e08484",
+ "chksum_sha256": "9ef2f97c111f9f0a6155a7353fde97cf4cbea592f715578a4df20893bad2e7ff",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_acl_interfaces/tests/common/populate_config.yaml",
+ "name": "tests/unit/modules/network/nxos/test_nxos_vrf_af.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ff18091eda8eab8839ce8b146409c956a16da2af0d78204879e2a0d844835c46",
+ "chksum_sha256": "25bca3233974b76c3830c4d3b603c73dcb0032036a1752e0804c8cbc473a9973",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hostname",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/unit/modules/network/nxos/test_nxos_acl_interfaces.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "6c9f9ef53093b66901ca6deaacd5cb8eeb43f20bf80782d5ce721a57290a7288",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hostname/tasks",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/unit/modules/network/nxos/test_nxos_command.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d2a3122d72313abb8ad6da5ba19c9070e95def315019bcc16cbc8ff835428411",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hostname/tasks/nxapi.yaml",
+ "name": "tests/unit/modules/network/nxos/test_nxos_vrf.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "adcabdbc005b34ef70ef70c9a7a3b0bfc9b94ff93d2f3ea18bf1f19fbc2d2201",
+ "chksum_sha256": "ce6b30fdf5ea958858abea77348d845b53c2430333dbd879d9d8dbcf3f438432",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hostname/tasks/main.yaml",
+ "name": "tests/unit/modules/network/nxos/test_nxos_vlans.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c100293cbf1e43e53f2ff874e3e1079621ae414e404db0078a26044ececac0b1",
+ "chksum_sha256": "320bd35df7abc52ebbb1a9cbb5286dcaed02a736d1051666fe5adcdf1a57a043",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hostname/tasks/cli.yaml",
+ "name": "tests/unit/modules/network/nxos/test_nxos_acls.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "82e7cf4c490409760e85c9516e174b27c8d67fb419e829353d94c4d022016245",
+ "chksum_sha256": "5a7acd1fa7f58e389510c0b12262c3ccad13fdbfa263a052df7ced07b51ae4a0",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hostname/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/unit/modules/network/nxos/test_nxos_vxlan_vtep.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "92d1ef64dfcf0ea45c5294814d45a84bebce3070c429a57bda7f8f428a82f44b",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hostname/meta/main.yml",
+ "name": "tests/unit/modules/network/nxos/test_nxos_nxapi.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "chksum_sha256": "c79a89af6839dbb8a41461dd0c68ff33e88b2d8ed3447f3b4fc7fd1be0419394",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hostname/defaults",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/unit/modules/network/nxos/test_nxos_config.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "f1acec58b3eaf968e8273380ab7d9170e37605cf1a1b5f66e08e568bf7d94e04",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hostname/defaults/main.yaml",
+ "name": "tests/unit/modules/network/nxos/test_nxos_interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7d754db246e92eda93bade9b741e09afdc7c5ba2754a4ea5d874f92a479b65d0",
+ "chksum_sha256": "e983acf04cb96f85df2b16c5ad63601cb4b87de6e74f5003751d4090e5350ced",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hostname/vars",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/unit/modules/network/nxos/test_nxos_ospf_interfaces.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "19c4359cb20e2df0f649d97f3207a5861ebcdc6a2430db3ff94019ba14a38c73",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hostname/vars/main.yml",
+ "name": "tests/unit/modules/network/nxos/test_nxos_lacp_interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "372a997c3a0ce9aa7b42746d99a75295a66504416d94fd1b0c545e58717a6a9d",
+ "chksum_sha256": "8b284eb5232ddef43ad1619acb13d73a7c48f20d736372b212a4d7352bd1e420",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hostname/tests",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/unit/modules/network/nxos/test_nxos_pim_rp_address.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "aba5914ef5c9e367924aaafd1b555e2200d2f6c0aeaad5d5a2170880bd8f2cdf",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hostname/tests/common",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/unit/modules/network/nxos/test_nxos_banner.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "491dacb2d554279466c694b6c0266fcc0dc9be308b3bfe338cb1312ea0f2488c",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hostname/tests/common/fixtures",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/unit/modules/network/nxos/test_nxos_route_maps.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "6310235666de6553845a8b9e1110ee0fb174b38fcd4568fa8df50bb18659f4e8",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hostname/tests/common/fixtures/parsed.cfg",
+ "name": "tests/unit/modules/network/nxos/test_nxos_bgp_address_family.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6cd019ca4bbc9dc9a7657ccbc89a52a8505890bda564813765bc2bb004ef3814",
+ "chksum_sha256": "b94c2ad86ce7c380e6080a791526f868016e43350612dfd321a92964c8f9a04a",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hostname/tests/common/parsed.yaml",
+ "name": "tests/unit/modules/network/nxos/test_nxos_pim_interface.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "292e2e1750dcd0b5114b4c1b2f8fe93125754fa76c1869f64f87e6055da6338f",
+ "chksum_sha256": "caa9723150e6b388c6ac57ac58e581317fe3b31757c2da99e00b1695d362358a",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hostname/tests/common/merged.yaml",
+ "name": "tests/unit/modules/network/nxos/test_nxos_ospfv2.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b2db6ef6d129f976a7c1dd3d1543020bba577d399fa4e780311e9942d0e39a21",
+ "chksum_sha256": "fae6c56a4916171573a12ad3ab0071c82488062af9e6562078262a50c699b83f",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hostname/tests/common/deleted.yaml",
+ "name": "tests/unit/modules/network/nxos/test_nxos_devicealias.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ce390e649cf85c4eddff7df5c0d545f57e389bb36774ae580dadafec45cf2a1b",
+ "chksum_sha256": "213ce495ad35b863cf252fa9bf4362db85fa8f2ab2916dfa76da754661d37e4d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hostname/tests/common/replaced.yaml",
+ "name": "tests/unit/modules/network/nxos/test_nxos_system.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8564938ffbfc1d998d5ce080e9bc6cb612cf2aeefa19ac79bf9bf6f895800a2e",
+ "chksum_sha256": "a7ea6f26526b9e4ead960a1c161b83de2581ca9d8a90d95925340e2fa8e17c8b",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hostname/tests/common/overridden.yaml",
+ "name": "tests/unit/modules/network/nxos/test_nxos_feature.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c76970334ac8f7dea1c52d4a2e9785b43fe9754ed2cece81e49036d8efce6507",
+ "chksum_sha256": "62fc7da4353d2dd07ee8ec9822b06cd244ef12c93c65a7e53b12b9f57105a7b2",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hostname/tests/common/_remove_config.yaml",
+ "name": "tests/unit/modules/network/nxos/test_nxos_pim.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3c610838a6fa946c2237c8292c07b688a1ba7e8096a2d35d75f12f0b58866ed7",
+ "chksum_sha256": "7e108bcc18c5b82ce1cb4cd46d8996147683f30a2f18d2a9eab8367dddb07bb1",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hostname/tests/common/gathered.yaml",
+ "name": "tests/unit/modules/network/nxos/test_nxos_static_routes.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0b28a51b56bc716da4598490f3cb3302329f0113eb3ab32fa9a860223eae43f6",
+ "chksum_sha256": "1c53987ad9ade88c6bc5458c8baa3ccb1df3d1e214173a0e6beaea3b7d734a26",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hostname/tests/common/empty_config.yaml",
+ "name": "tests/unit/modules/network/nxos/test_nxos_evpn_global.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c48e9bf3dac84165ae9953b7ad26ecec061cc599fd5cc78290d048b36119ce19",
+ "chksum_sha256": "7cb868c9311b9810788e13862c9b74c809e19a9f9b57ad9e46ff4124fef884dd",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hostname/tests/common/rendered.yaml",
+ "name": "tests/unit/modules/network/nxos/test_nxos_vxlan_vtep_vni.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6e0e085de3251f1846bf350484b27a639383cf70f82effdb6d8aa0b73f105c60",
+ "chksum_sha256": "eb233dc0fead25086cf7310ea386b8e51b17bf00468ab2a1dbb08f44a1aa03b7",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_hostname/tests/common/_populate_config.yaml",
+ "name": "tests/unit/modules/network/nxos/test_nxos_fc_interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "55288117a1ba1770a3e0bcac6e550c318807316ce84c65e536ba34cc449f62b0",
+ "chksum_sha256": "320e6449e95a55c156623eaaeeb69f81a2c66c66ec1d8a5c06702040595ec95e",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_devicealias",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/unit/modules/network/nxos/test_nxos_hsrp.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "cd0ecfc4be320078a8b5b73873598f3311e9d5b5848092a1b24b41029dde5dff",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_devicealias/tasks",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/unit/modules/network/nxos/test_nxos_telemetry.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d5a38ca859e81c05061d9ee2f6cceefe3beccaa0587eb427a040be3c1e7e00f3",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_devicealias/tasks/main.yaml",
+ "name": "tests/unit/modules/network/nxos/test_nxos_vpc.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f47853340600c1c9b76ed00695079b203ae45896e40acfac2f48e28f2ad98b40",
+ "chksum_sha256": "fd868cf29e78bcc57d8d983ff6fcd4bbfcd1d0e34c63ab99737878827ec83d7c",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_devicealias/tasks/cli.yaml",
+ "name": "tests/unit/modules/network/nxos/test_nxos_vsan.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
+ "chksum_sha256": "8af4c5d2ddc4255350bdeea291e5661e3b5f07f5fd6cb755c9151cd8751394c5",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_devicealias/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/unit/modules/network/nxos/test_nxos_vpc_interface.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d6e27d691e0dc50b4659403bb76593fef84cfa193d36933dd299a1e486525fe0",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_devicealias/meta/main.yml",
+ "name": "tests/unit/modules/network/nxos/test_nxos_logging_global.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
+ "chksum_sha256": "569405484ee944ef07a45c205f8588b8a00e4b626f64ad52c2da5f13d1d599b3",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_devicealias/defaults",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/unit/modules/network/nxos/test_nxos_lldp_interfaces.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d61028deb5268fdf63e8cfa7bcfa10f99738fb475e42f31911e3e264360eda34",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_devicealias/defaults/main.yaml",
+ "name": "tests/unit/modules/network/nxos/test_nxos_l3_interfaces.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
+ "chksum_sha256": "9dd8c9037bd8ec303989a242280805dd82718f3a12b91f8ea34d323c819137a8",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_devicealias/tests",
+ "name": "tests/unit/modules/network/nxos/fixtures",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_devicealias/tests/common",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_zone_zoneset",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_devicealias/tests/common/sanity.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_zone_zoneset/shzonestatus_3.cfg",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a93b103d12183553437c6808898ac57aa27e80ef5d1547e496e4ce5d5039baa9",
+ "chksum_sha256": "3fee2a63052400d50f93de5abc0dfd7897a48269d71af99f53182139461c3bb0",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_acls",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_zone_zoneset/shzonesetactive_0.cfg",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "1836db8862dd990ea4008e3c10d16d7a5563561ac27a8f846f2d2bfee8d805f0",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_acls/tasks",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_zone_zoneset/shzoneset_1.cfg",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "bcdd326b9710f2b1f56ac59ed3bdccaa6e853cfb24af1445743d3254ce458166",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_acls/tasks/nxapi.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_zone_zoneset/shzone_bug339.cfg",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "280d285e1a9b531b25d8b880d3a032933d7135004c4658b7f1eb4872c60cc666",
+ "chksum_sha256": "6c45bdddc81417a51bdb8256cc024efef8d8cf432083297902ef8fcda9850703",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_acls/tasks/main.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_zone_zoneset/show_zoneset_active_vsan.out",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "dc2e669d7f6ccc6f6bd958fc52376105ef28a808ec084c11f2dfcecf1471760a",
+ "chksum_sha256": "eef09bfcfaae85faa6c0b10f2ac4207c73947496c77783c7aa6bac45a9d02d3a",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_acls/tasks/cli.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_zone_zoneset/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c57c3ff656bb8daf9febb3c7a7bff934b952d1ccb83604d93e23ccfc3ea88724",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_acls/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_zone_zoneset/shzonestatus_0.cfg",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "010172dc57589c93d51b95aa50feecbda06ee706fd623422a088de23ccc9e723",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_acls/meta/main.yml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_zone_zoneset/show_zone_vsan.out",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "chksum_sha256": "74aee6c5327fabe2de027ca0907e76be2628510e125976c8918575bbf09ea703",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_acls/defaults",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_zone_zoneset/shzonestatus_2.cfg",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "171f743817a7ffe74398b6d78a0600ec0126dc2fe0890694a62eb6db89225a58",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_acls/defaults/main.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_zone_zoneset/show_zone_status_vsan.out",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
+ "chksum_sha256": "42ca86dc445e87a4cba4d08cbd217173af42f60181eb44017d852837c17e9a47",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_acls/tests",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_zone_zoneset/shzoneset_0.cfg",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "cae1aca46b8d82f44a8f728182fafd1030040360725fb7367040392c1d52c018",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_acls/tests/common",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_zone_zoneset/shzone_2.cfg",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "6e109ddbe7ad8d251edbe3ebe5bf960c055ac9a1d3d5bc3865596bf0d65d83a5",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_acls/tests/common/overridden.yml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_zone_zoneset/shzoneset_2.cfg",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9e261e0e1a8b1778739d5bb1c17c75a378186c5c2efd72a351d604e0d7560318",
+ "chksum_sha256": "08f7f3dc2d44ed61e7002bc417cea69f8407d7f4b0a62634c1f449f5338f7384",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_acls/tests/common/rtt.yml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_zone_zoneset/shzonestatus_4.cfg",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4912e31a951706d32e6682e65095ad1794df0cdedeeeec3d5e9ba139e14a0409",
+ "chksum_sha256": "5005380753639cd3afd7e1b4ed90452d5d9492bb05c0852563009949e285db0e",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_acls/tests/common/rendered.yml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_zone_zoneset/show_zoneset_vsan.out",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "aa791c7d2f4a1989dd888e3880d230146cee3a448116cb224c31066a7f7573fe",
+ "chksum_sha256": "fc71e64aa30f5e4da11b419ac36c9b36cae8057bd8bef47b2da5bc293ebaaebb",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_acls/tests/common/remove_config.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_zone_zoneset/shzonestatus_1.cfg",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6b36b668815324ab75f641e80f9341c4f7b4f264585d07409b01704c5f8a018d",
+ "chksum_sha256": "46c16cd03a600d15085abf2052ed33830fed61424a02004021ce4dd82f0086e2",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_acls/tests/common/gathered.yml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_zone_zoneset/shzone_1.cfg",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8c6126ef877c72fdb6e19f6499e4c3ac4219a5fb4cde44f4529306aac3fd7966",
+ "chksum_sha256": "abb9d2504120327f0b55c36a6c130c9180a0f455ae397737ecfaaa4e6f891ef5",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_acls/tests/common/merged.yml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_zone_zoneset/shzone_0.cfg",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fc2db360c49a4f960de158645c43115dec175da832363eeaba86a8ea97d1e62b",
+ "chksum_sha256": "13fa7bba46e2967e5c1bc2254620c4cd5337daab92a23d8aa6d6dec1ea7905ae",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_acls/tests/common/replaced.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "2b9ce3eb07fb30eb390fc555830b61b4118c485523bf85538a805feb06ea0fb0",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_vpc_interface",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_acls/tests/common/deleted.yml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_vpc_interface/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "14f12dadd4d09d23feee07007209b1eae18bc9c013a50f79ed39ae022e652226",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_acls/tests/common/parsed.yml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_vpc_interface/show_port-channel_summary",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9c6e2f1a0b9c9d9dc2c33b7906490f5acde968592b9257f095b4cfc38eee3dd9",
+ "chksum_sha256": "bc24581fc9c635989ae2926270c03c4c85c642b768f6685cff7cfdcd924e70f6",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_acls/tests/common/populate_config.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_vpc_interface/show_vpc_brief",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ea179f0b4974165f1ad36daff838b34a663eb136543c6a908df7b8fe73027eaa",
+ "chksum_sha256": "1901ac7c063992cfcebb4ba110637bc241aa0fb122959d9af40be8957b47bf54",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_contact",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/unit/modules/network/nxos/fixtures/__init__.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_contact/tasks",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_acl_interface",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_contact/tasks/nxapi.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_acl_interface/show_running-config_aclmgr.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
+ "chksum_sha256": "606d5b49ef095558924b32c6faa4e06f128f4655dddd57595e945cd33a4dbe72",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_contact/tasks/main.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_acl_interface/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_contact/tasks/cli.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_static_route.cfg",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
+ "chksum_sha256": "8787db2297394f70dfa9ea3550d604fcaef84c40d973c498b147d346422b8585",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_contact/meta",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_vlan",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_contact/meta/main.yml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_vlan/agg_show_vlan_brief.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "chksum_sha256": "a6ad270b50167f7b13c3e7f40f83ebc812ceb7ef7d3455c0ce7b404ede9d6238",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_contact/defaults",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_vlan/__init__.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_contact/defaults/main.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_vlan/config.cfg",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
+ "chksum_sha256": "4cd4cba4fba8bd0ceca02c0cea325b38c65d82a1b01633f59776902dfc00010c",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_contact/tests",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_vlan/show_vlan_brief.txt",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "c60b1775765492c72f6cf744aa56574aeea9b6c57ac19c974ebc73146cf4920a",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_contact/tests/common",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_pim_interface",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_contact/tests/common/sanity.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_pim_interface/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "21c997106ed806dbad5a3cdba01f5e201161020b6498a5d92ac003b702fa579a",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_reboot",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_reboot/tasks",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_pim_interface/sh_run_interface_eth2_1_all",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_reboot/tasks/nxapi.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_pim_interface/show_ip_pim_interface_eth2_1",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b93ca9bd41e9b2ddb1e47c5782c7104ad85b67441154a82ac8954fdaebc62cfc",
+ "chksum_sha256": "88ad9b709dae48e373fcf04c158d6714de1f94e808fa033204d3b7b4e48bf8bb",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_reboot/tasks/main.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_pim_interface/show_interface_eth2_1",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0789afb380e4ee3da221b8e5904d1fc0309acaaf7afce7e63c9ba4c75c34ef57",
+ "chksum_sha256": "de922c7abca31dbb4d597cc485a02b9affe0c85fbaf1d268b15d38d8fe6ba069",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_reboot/tasks/cli.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_pim_interface/config.cfg",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
+ "chksum_sha256": "25600ba56e8e7066889af660ff87eb22aec3ce587ad60c1a7eca25c627064a5c",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_reboot/meta",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_evpn_global",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_reboot/meta/main.yml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_evpn_global/configured.cfg",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "chksum_sha256": "53ca3e78de8cd1b9ec6e040ece7bec23ea29663d1eebd1aed66a2980e832dc19",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_reboot/defaults",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_evpn_global/__init__.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_reboot/defaults/main.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_evpn_global/unconfigured.cfg",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
+ "chksum_sha256": "84f47e5f6bde50931928544ec35970db216f2bffd80cff2d7b7350903d86a5c1",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_reboot/tests",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_vxlan_vtep_vni",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_reboot/tests/common",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_vxlan_vtep_vni/__init__.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_reboot/tests/common/sanity.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_vxlan_vtep_vni/config.cfg",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b4c67bb49d21e284b5a0589f01582abe664f16d3361279b26463e6db76fd2425",
+ "chksum_sha256": "9676910539593a9a50989e688d7848de6074092a58e720ebbb437fa02859bd5f",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_traps",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_telemetry",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_traps/tasks",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_telemetry/__init__.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "format": 1
+ },
+ {
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_telemetry/N9K.cfg",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "8d4c2bff81ce036402bf1bfc5a01191ead2507ea6deb7e7d4930ceb35965b1e5",
+ "format": 1
+ },
+ {
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_telemetry/N9K_SGs.cfg",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "af042e9ae8d0a2e6b6ddc28f13ce438e7b49050385ca546bdb5f1a1b51c1da1e",
+ "format": 1
+ },
+ {
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_vrf_af",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_traps/tasks/nxapi.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_vrf_af/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_traps/tasks/main.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_vrf_af/config.cfg",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bb83a5490c188c20d9a8632bdb13043146a87d5dfc555cd2e389ee30885bf7ca",
+ "chksum_sha256": "a11a202b4b688d09c6b7cb78a74d139f2b599c85973dd068b1ec18892926cbf4",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_traps/tasks/cli.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_overlay_global_config.cfg",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
+ "chksum_sha256": "aeb566110f15e114259185610a7e1a17ba8a4a8380f590120292eceab885bccd",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_traps/meta",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_feature",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_traps/meta/main.yml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_feature/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_traps/defaults",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_feature/show_feature.txt",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d7be73f626f2d19d384bda2c214b58505da78232c84a1b5c971495510e950db8",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_traps/defaults/main.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_feature/show_feature_mds.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
+ "chksum_sha256": "aca22131f56ec81f704e0b11d93c03bec037cd2b33a9964143b8c7bf53fa7878",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_traps/tests",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_vsan",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_traps/tests/common",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_vsan/__init__.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_traps/tests/common/sanity.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_vsan/shvsan.cfg",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "05f0dab9e4ec9f1e7c2800b2027b212b293b7f81a6a5a5645ded7d94b353e09f",
+ "chksum_sha256": "63e5ef2ff7dfbb786b21f77d0e047b16a5af1477901433189032c59f0d2e4988",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_logging_global",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_vsan/shvsanmem.cfg",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "91b604b4bdd22922dfbfb776dc40a760428806a2f845e8c9e76d42beef14bbb8",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_logging_global/tasks",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_l3_interface",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_logging_global/tasks/nxapi.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_l3_interface/ethernet_noshut_ipv4_ipv6",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "adcabdbc005b34ef70ef70c9a7a3b0bfc9b94ff93d2f3ea18bf1f19fbc2d2201",
+ "chksum_sha256": "9b65b76df737c6da986a8850a5874d6f6733c26b8209c778d7d861ce0b5cb894",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_logging_global/tasks/main.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_l3_interface/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2396a1bb62aa2d673208deb59766a787971d061630b8467afa525b7fdcb2e95e",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_logging_global/tasks/cli.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_l3_interface/ethernet_noshut_multiple_ipv6",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "82e7cf4c490409760e85c9516e174b27c8d67fb419e829353d94c4d022016245",
+ "chksum_sha256": "93f0654869c15b0670ae97f78615662b35f5f3aefccfb3c1e2e463ae3759a6d0",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_logging_global/meta",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_l3_interface/ethernet_noshut",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "96a17611067db53f8bdee41ec48055b8ec808b2968d4f3dd784dfae1d6d85d4e",
+ "format": 1
+ },
+ {
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_acl_interfaces",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_logging_global/meta/main.yml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_acl_interfaces/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_logging_global/defaults",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_acl_interfaces/nxos_acl_interfaces.cfg",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "764024b1837b2491a2e87e60ba305df48bc64541f6caf244fe4eea63653ee340",
+ "format": 1
+ },
+ {
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_nxapi",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_logging_global/defaults/main.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_nxapi/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7d754db246e92eda93bade9b741e09afdc7c5ba2754a4ea5d874f92a479b65d0",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_logging_global/vars",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_nxapi/n3k",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_logging_global/vars/main.yml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_nxapi/n3k/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "15f80d89c3629bea0c669753c52136268f11e94a83ba6e69411fc96ab8449457",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_logging_global/tests",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_nxapi/n3k/show_run_all",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "db42fd8881e10bf436a6712d228152f2ca6e627e0aa07531751009df2153fc3d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_logging_global/tests/common",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_nxapi/n7k",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_logging_global/tests/common/fixtures",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_nxapi/n7k/__init__.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "format": 1
+ },
+ {
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_nxapi/n7k/show_run_all",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "0f487063d9698ed988f461a0f1d90be2406db893a1881d162720dd0587a0ddd9",
+ "format": 1
+ },
+ {
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_interface_ospf",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_logging_global/tests/common/fixtures/parsed.cfg",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_interface_ospf/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3a97c562338de257e93908f409c770a5439d362fcddcdb8d939dc7ff2e7002c6",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_logging_global/tests/common/parsed.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_interface_ospf/config.cfg",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d952ab3ac507f65ff98eb58e380db0abe1583198f653d77beaa4e384929ad698",
+ "chksum_sha256": "5251c622e0f68d8c94e0215bd6a9f8c5d7ab755fc8879ceff8a9cac9b09caf9c",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_logging_global/tests/common/merged.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "82ec13620a0c096e01a22d68b13c59798667ca68d3ddbbc1be802ff3f415a562",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_command",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_logging_global/tests/common/deleted.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_command/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "926b1b20a7babe989cc2f1b3e34a16d08368817d3ef38d6903650a294a42cc3a",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_logging_global/tests/common/replaced.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_command/show_version.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "dadff9821e586c3a062c7ac4b2936903359a8fc4b2f4cceeb466f558c82b52d1",
+ "chksum_sha256": "c45a343dc5192f50ef68b6bebee3da8cb980be9ec3cd5b75b00a20658fde6748",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_logging_global/tests/common/overridden.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_evpn_vni_config.cfg",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "04822e2f3237fcae131842012a520b5fde51c1fd4b381ab86aa055d7c076f867",
+ "chksum_sha256": "f12c0f2a38e284dde284dd4042cda86d71671289619456a3438a58f88ab929e7",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_logging_global/tests/common/_remove_config.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "619954ad3ae24e83760fb7b9f09daaf010f96020056398fe49a04001326f72c8",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_vlans",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_logging_global/tests/common/gathered.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_vlans/show_vlan_no_facts",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7c13b226487605fb6b6c73406c97a1972ed4c1995825124cb426d64eaac1a2ec",
+ "chksum_sha256": "ca3d163bab055381827226140568f3bef7eaac187cebd76878e0b63e9e442356",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_logging_global/tests/common/empty_config.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_vlans/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b1658713d71b118aec3c1e27ae2d2453c239d56b32e3c6d816f5187796b40c7c",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_logging_global/tests/common/rendered.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_vlans/show_running-config_no_facts",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b4a5f8a10bd86d7609cb6688523bc611e052260c4a06bf9cf202f33eee40a1f5",
+ "chksum_sha256": "783b64a65f752ecfbf2ab084cb164df5d5a818d3dc9980856037a47853bcbfd8",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_logging_global/tests/common/_populate_config.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_vlans/show_running-config",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "dd0584d7e6d6a3aa5942acda6002b2e13a2bd64d59ee04dc5122274268676cc8",
+ "chksum_sha256": "6b793f9fc0770084b8babcdd914c4c42b2d1d3256be53d0cba3a78c19a616dec",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lldp_global",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_vlans/show_vlan",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "6ac358cca25afd3bbc1aa11515f5e31ea9d548abdbcdf0563949e446d76f0c3c",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lldp_global/tasks",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_bgp",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lldp_global/tasks/nxapi.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_bgp/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "280d285e1a9b531b25d8b880d3a032933d7135004c4658b7f1eb4872c60cc666",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lldp_global/tasks/main.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_bgp/config.cfg",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a4138a366efebe8659a4237d00d38110163d05b728a38c3514dce547a6d16a9d",
+ "chksum_sha256": "f971230c61be923533c3ea1eb63e9938cdf701f3884d511105d551d7ee531554",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lldp_global/tasks/cli.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_bgp/config_32_bits_as.cfg",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fe84894e2a9ff992a755f52f856ff23c23dd249fb9fe14dc4f264cd1a6070e12",
+ "chksum_sha256": "deed874c21f6472915657eed44c550ff438995b132d365fedb8b94deb510c856",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lldp_global/meta",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_config",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lldp_global/meta/main.yml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_config/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lldp_global/defaults",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_config/config.cfg",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "03abdcaedc2f739a12fd1c31f37164f02ed471aff024ad8e654a0d7ec3e611e6",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lldp_global/defaults/main.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_config/candidate.cfg",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7d754db246e92eda93bade9b741e09afdc7c5ba2754a4ea5d874f92a479b65d0",
+ "chksum_sha256": "3352c8002ead9454c96fa97e82bc2bf245ac57a69edbf8a934bed4ef809de3de",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lldp_global/vars",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_vxlan_vtep",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lldp_global/vars/main.yml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_vxlan_vtep/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ac136582618b2844cd5bb84e0aaf52f6b4e7a6ea581a0fa0ec5466e9c9c3bd68",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lldp_global/tests",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_vxlan_vtep/config.cfg",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "cea4ee76cbd062bd53302a5c1f37bc78584c8677950c99eb145c106a8852b278",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lldp_global/tests/common",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_vrf",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lldp_global/tests/common/overridden.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "8aeec5521c22e513a4e736c9b51461285abce953173508ae28fb02ecef95952a",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_lldp_global/tests/common/rendered.yml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_vrf/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "18c1848ef28d20fffd52045955b5a54ca72aea45d5c2ff9060d8856aeeb7162a",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lldp_global/tests/common/gathered.yml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_vrf/show_run_all",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e5f9f21d8529dd8144e8259391a04642355d3f3c9063db5a27472a5180d6b7dd",
+ "chksum_sha256": "f2a24625313932312f1d5e1c1d2c3935f99e5ab28880315c7dd39698df72daf8",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lldp_global/tests/common/merged.yml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_vrf/show_vrf_ntc",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e82eb173fbdc3209c167bc096bf1665240e72b24a34f75c19ec1a3b7c5d4c8d1",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lldp_global/tests/common/_remove_config.yml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_vrf/show_vrf_default_interface",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "339639545338297ee483c8e1370eeefc88356fceb240523dbbc6d8c237444982",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lldp_global/tests/common/replaced.yml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_vrf/show_vrf_management",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a62a8ac1f40b48c56dbd06eb543897331236925608b859a758f1c91037f17455",
+ "chksum_sha256": "ca2359ed30a431281f356f0bb15f8bc2abb7169e21a6022034d8a4411fc413f6",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lldp_global/tests/common/deleted.yml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_vrf/show_vrf_ntc_interface",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ffe0021fbc8bd39a7990267dc791d78f2571f3285efd5fd1244c68159ffca98f",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lldp_global/tests/common/parsed.yml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_vrf/show_vrf_management_interface",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ca1cd52e0c16bb7a97f9e1aa4eb9c49600b25854f35a93b0bcfeaf3ead1e7c07",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lldp_global/tests/common/_populate_config.yml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_vrf/show_vrf_default",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c7bacd3b7cb077dc6895ecf350711c1bf39e18739a361f6cf514e9dd4d6355f3",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_interfaces",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_interfaces/tasks",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_switchport",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_interfaces/tasks/nxapi.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_switchport/show_interface_ethernet_switchport",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "adcabdbc005b34ef70ef70c9a7a3b0bfc9b94ff93d2f3ea18bf1f19fbc2d2201",
+ "chksum_sha256": "8b266354ff8d44bc8987e97565cba41afedb2fbe282c577c8d371516e84d01bc",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_interfaces/tasks/main.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_switchport/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "64ca5b4de950cd0d788bdd92389f7741e663d949ee6b0e343f1038c573564b64",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_interfaces/tasks/cli.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_switchport/show_interface_ethernet",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "82e7cf4c490409760e85c9516e174b27c8d67fb419e829353d94c4d022016245",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_interfaces/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "3eba930c712f346d62428fc829d411a3c7f3b32d00ac0b54bee3e25f5a239876",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_interfaces/meta/main.yml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_switchport/show_vlan",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "chksum_sha256": "864e06b1e9befc20919e723ee5f9bcb2f51a88e524e8ce8e112ac9ec12bf38fc",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_interfaces/defaults",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_bfd_global",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_interfaces/defaults/main.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_bfd_global/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7d754db246e92eda93bade9b741e09afdc7c5ba2754a4ea5d874f92a479b65d0",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_interfaces/vars",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_bfd_global/N7K.cfg",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "406c13868678145de8fa80ab6540d805084b62c191a928a466fd44e6e59ccd78",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_interfaces/vars/main.yml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_bfd_global/N9K.cfg",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c7aa68e2e8b3bb1d6b4258ed279670b1a8e5fc5e6a63b446eab511c3963db1c1",
+ "chksum_sha256": "9089fae22c9d5b25f815c43f8de1b95041c2fd012d721e29e009e47f64fa66ff",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_interfaces/tests",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_interface",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_interfaces/tests/common",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_interface/__init__.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_interfaces/tests/common/parsed.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_interface/show_run_interface_loopback0",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e26569250511575d31ea2338e2abede7fda11240bd583ac7896adccb24abd7f2",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_interfaces/tests/common/merged.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_interface/show_run_interface_Ethernet2_1",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "49898e976fb440f5b1ebc404e489b6e2511049c950c8c0d8fe75e251dc5c3671",
+ "chksum_sha256": "6fda3eb0c249234e367f3e6d981a9957902f3443ff892ee35c3fbb87f8b2760b",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_interfaces/tests/common/deleted.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_interface/show_interface_loopback0",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d5ba532156bd9771e2ec35bb085d404ed049b88e1da827ab9eb6d4dd38a3c871",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_interfaces/tests/common/replaced.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_interface/show_interface_Ethernet2_1",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c8d1e0d8e50dd46dc84c73912990ffca760b0f5e1a8ad8ab2f10a90bce902369",
+ "chksum_sha256": "de922c7abca31dbb4d597cc485a02b9affe0c85fbaf1d268b15d38d8fe6ba069",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_interfaces/tests/common/overridden.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_interface/show_interface",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "52f0f66116a7b07a9bfd39bcc2cdabc5a2db68ed24d92751f3f53f08332f0ed0",
+ "chksum_sha256": "3152065a242771a7e158ce7a8a6da2f5e62f0a35807389d62d0b927830d2ab04",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_interfaces/tests/common/_remove_config.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "a639b04de3b8a2a70660b4034c291526f9e1f4bee9e24e4e543bd719a4f65a50",
+ "name": "tests/unit/modules/network/nxos/fixtures/cliconf",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_interfaces/tests/common/rtt.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "0eb7659a55ab72845d72bf76bcac9618bf84c8d5d55c6fa8f04ff9c15c68a92a",
+ "name": "tests/unit/modules/network/nxos/fixtures/cliconf/mds",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_interfaces/tests/common/gathered.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/cliconf/mds/show_inventory",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3f875bd8877a898338a0402b34179ad17f45b84d1623c72741490f52913eb064",
+ "chksum_sha256": "6e57d9cdf30356d92715fcd45fc9fc5e6fd0dc07298682343aef0f9c1c6231d5",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_interfaces/tests/common/empty_config.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/cliconf/mds/show_version",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c5a2d4d2fa3245673307a06eb29fd3b751fad50fde2dac24f84aaff5498a5af7",
+ "chksum_sha256": "33cb951f7e55bfc24ba68026e55c250973d49113340d4c86ec5f52d2dfb32198",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_interfaces/tests/common/rendered.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "454396b9f57fd4895a1d8063b565bde8a053291bc6f6e5f934397c75e99ac6cb",
+ "name": "tests/unit/modules/network/nxos/fixtures/cliconf/nxos",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_interfaces/tests/common/purged.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/cliconf/nxos/show_inventory",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "832b01df3053b44b5e69f7b1e62fede8df0291af913f7b4abd1d4545439ef718",
+ "chksum_sha256": "cb14576378ab0f2fd25d13dd4c4dbc31c3940d65ecb5f2fa29b39eba7e04d7ee",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_interfaces/tests/common/_populate_config.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/cliconf/nxos/show_version",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "eb7918401f281cb2d25ccebc214ac1fd9ae10975f3ca4f77f56a53f3cc485a40",
+ "chksum_sha256": "12c7c1c28fcd05fced589995a39f0a1389d970f6199fadcbf95543da330feb22",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_community",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_system",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_community/tasks",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_system/vrf_only",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_community/tasks/nxapi.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_snmp_community/tasks/main.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_system/vrf_only/config.cfg",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bb83a5490c188c20d9a8632bdb13043146a87d5dfc555cd2e389ee30885bf7ca",
+ "chksum_sha256": "78c6c60ac86950ccc102c733222dc40ca42ae78622fecbee3af27f61bb015adb",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_community/tasks/cli.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_system/config.cfg",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
+ "chksum_sha256": "c65fd0fea85d4421908dcb50a8778a562d62c2723ccb10520cb9c62c57f32977",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_community/meta",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_pim",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_community/meta/main.yml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_pim/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_community/defaults",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_pim/config.cfg",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "c0c10e6761048c3f05f7143dc844703c76c41ed38e9f0175479874c183cef2ac",
+ "format": 1
+ },
+ {
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_acl",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_community/defaults/main.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_acl/show_ip_access-list.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
+ "chksum_sha256": "61de402089db40d19ac51c2e8a52894c45d9002156027c09ab17d0cc3fea635d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_community/tests",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_acl/__init__.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_community/tests/common",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_pim_rp_address",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_snmp_community/tests/common/sanity.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_pim_rp_address/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7a31444a04409e8b5f9d58864e5405ec3ded7ec744e6e731b3ca0d22ec1a6ee0",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vsan",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_pim_rp_address/config.cfg",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "358b10ada00b4a323c671eaa193bc4c62bfa6e6600042382003a8efe5708bddf",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vsan/tasks",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_devicealias",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vsan/tasks/main.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_devicealias/shdadatabse.cfg",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f47853340600c1c9b76ed00695079b203ae45896e40acfac2f48e28f2ad98b40",
+ "chksum_sha256": "2e912812c9084c8676a32afd1e2522f00bfd75d164fbb86113f01a88f164ebb8",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vsan/tasks/cli.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_devicealias/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "44ac0ca4032d1756f572a45038a7dfbf52c9ba9762815aa3a900b7c425036120",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vsan/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_devicealias/shdastatus.cfg",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "5426bfb2d55edf2003ff250d0730cf4ea9ace4f44da02ce6664ed3bc77c7867d",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vsan/meta/main.yml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_devicealias/shdastatus_mansi.cfg",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
+ "chksum_sha256": "b47fe27d3b8f630988803930db4eb009b8ef93e5f7838e4d3a49adf1859d43f5",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vsan/defaults",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_devicealias/shdastatuslock.cfg",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "ba24a69173bd06b6a025d176427b5619878da807c2a2b8a42aced27ac16d607c",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vsan/defaults/main.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/_nxos_ip_interface.cfg",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b6cec8117492a3110c2e9066aa77a54abd2b9774cea08d60eb42b01c51c3e032",
+ "chksum_sha256": "ce28d8e36e6ed0776c23f9d57050b68a3dd4cace82f8d3909de89bf98c2b3401",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vsan/vars",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_vpc",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vsan/vars/main.yml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_vpc/show_vrf_all",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "97b10ee26b05c01b328d79887c41479bd2bf559184fb1df4af53645db6e109ba",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nxos_vsan/tests",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "a690233a51aeef35190bd4fd440a65fd819bc0153ea20b1b362c25392329e04c",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vsan/tests/common",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_vpc/__init__.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_vsan/tests/common/sanity.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_vpc/vrf_test_show_inventory",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cb88ac95adfc25626d2c81d7dc7d45341c006ac0490dda44c675101938ec4bcc",
+ "chksum_sha256": "3d501010723eb3675a96a954862a0210ac023095947f8e09ea951e15faa96545",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lag_interfaces",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_vpc/vrf_test_show_vrf_all",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "f9b34cc87b238b789fa2c54dd008597f37aa0a859cf06c25e5ce60aefdd0ed12",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lag_interfaces/tasks",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_vpc/vrf_test_show_vpc",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d5e079e4fa7499d1ee8e1f0ef98ba4908b5abe9bb146f2d07909c5e139940ff7",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lag_interfaces/tasks/nxapi.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_vpc/show_vpc",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "228491b275c71b123f5bbb1dbac0093d55bd77f2a291bec15d452b489ef1674d",
+ "chksum_sha256": "4876569ce50a9bb5a00412d9e0e3bd17aed2d739613b28796c3224ae5397b705",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lag_interfaces/tasks/main.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_vpc/vrf_test_vpc_config",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b9647339f89f9eeca01a09cd4f31da21e0c0c2f10793ea3ea06c24fd8664046e",
+ "chksum_sha256": "f68fa10ece2a3162e69d8bc6ae207437f76b07a956782d6637214ae088346557",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lag_interfaces/tasks/cli.yaml",
+ "name": "tests/unit/modules/network/nxos/fixtures/nxos_vpc/vrf_test_show_hardware",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "82e7cf4c490409760e85c9516e174b27c8d67fb419e829353d94c4d022016245",
+ "chksum_sha256": "cf21f74cb2df54e19dae270c2b973eea492c3d9925ad7a1b8b59ff9f0e575287",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lag_interfaces/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/unit/modules/network/nxos/test_nxos.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "2d93cfe3962b0a2c9ba309e973bf3db9087727c2459fa64f1c2fe0c4dfe61627",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lag_interfaces/meta/main.yml",
+ "name": "tests/unit/modules/network/nxos/test_nxos_bgp_neighbor_address_family.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "306ef26bd70b58285bef6b50a83c64b26b9077181c6823ef20ac63252373d5de",
+ "chksum_sha256": "f23808fbf9f008c689f4e1c02b7916bd261d6118037deb6fa58977cf805cee8f",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lag_interfaces/defaults",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/unit/modules/network/nxos/test_nxos_ntp_global.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "21b73c3764e98cb997bfd28ed8b5ce52bd4b3fa3f499feab3802ca37e5de012f",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lag_interfaces/defaults/main.yaml",
+ "name": "tests/unit/modules/network/nxos/test_nxos_zone_zoneset.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7d754db246e92eda93bade9b741e09afdc7c5ba2754a4ea5d874f92a479b65d0",
+ "chksum_sha256": "f746132b73f02acee61de5e52f37e03ee6591a3410d6ec01948416009d36e441",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lag_interfaces/vars",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/unit/modules/network/nxos/test_nxos_overlay_global.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "84c43269ad7e4c30ec87b50598018aa176ecc5fa6a5cf83c04163549df700c11",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lag_interfaces/vars/main.yml",
+ "name": "tests/unit/modules/network/nxos/test_nxos_snmp_server.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "05455a29f1db6e4d3602a83b356d9d7353037b1ef0627d2d2943b43520af76e6",
+ "chksum_sha256": "894022761947d303c8f275cdd907dd987a18b092589e069a7719332f9efc3180",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lag_interfaces/tests",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/unit/modules/network/nxos/test_nxos_hostname.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "0c65ec8fcd4ddd139ae9947a00ec54a04c2c4da5a5376b0d106d903c2f37a9a1",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lag_interfaces/tests/common",
+ "name": "tests/unit/mock",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lag_interfaces/tests/common/parsed.yaml",
+ "name": "tests/unit/mock/vault_helper.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c10b8ab6302c0df3b5d8c99d1f67a9d92b80b518250c11e650dde96bc8f6b886",
+ "chksum_sha256": "3bf7834d18bd34473d0f4b898253177229a131f14364874efba584ff985e4a41",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lag_interfaces/tests/common/merged.yaml",
+ "name": "tests/unit/mock/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "53c3cd45c91dd6078cf71ea7a9973d65069a61eaef47c49a51c7b66355970a63",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lag_interfaces/tests/common/deleted.yaml",
+ "name": "tests/unit/mock/path.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4af7dc65128b02d59c3c12d5f94cfc4a77c266e867b79b172fc6425c871f9ffd",
+ "chksum_sha256": "5561a16a302e55ea79227591afefc3a93f33001fc80155dd29b068b90502a446",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lag_interfaces/tests/common/replaced.yaml",
+ "name": "tests/unit/mock/procenv.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "08e830f9373a4a360949e7c56cf2c374dad94a559adbde314f3b66daa642a0c8",
+ "chksum_sha256": "3ff983479d7aa4b90efe73337f723c14224edabcdfde4efe49f24cd905f45f1a",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lag_interfaces/tests/common/overridden.yaml",
+ "name": "tests/unit/mock/yaml_helper.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "96e1c7636b178b2ae705ac0d7dd2ea67a4d4c1aaf29122900549bde0da43b4de",
+ "chksum_sha256": "fc4f4d5d2968ced8e3a04653354945d720e7c6369aad67ba7724c5981c82b6ea",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lag_interfaces/tests/common/_remove_config.yaml",
+ "name": "tests/unit/mock/loader.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d6daa4d3b2cb43c7b62b7606b80b4f212348ecb1e28a516d16ac90e02f459266",
+ "chksum_sha256": "2343d1f644250d4b616f0bf0a826fdee8fe306243d8412d44dd0e033352b262b",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lag_interfaces/tests/common/gathered.yaml",
+ "name": "tests/unit/requirements.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2e4441e52874693cf2caa332a4639c23ad59350a6eda4e9c5109a9792a306447",
+ "chksum_sha256": "49ba996dc4735c3463e9af561344346dfae14bcc1a68096ce78364b377f0df1f",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lag_interfaces/tests/common/empty_config.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "96cae582141397682d092a6b5d6de9651b6203157e891342063c439ffb130a4d",
+ "name": "tests/sanity",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lag_interfaces/tests/common/rendered.yaml",
+ "name": "tests/sanity/ignore-2.16.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6eb88f454c810cee9cf1be9fc56eb6a574516fc49bddc50e81f329dc98cfb9eb",
+ "chksum_sha256": "935199601f3c767ca623e1a8c6f73032f3cfee0536e4aed1cf5842b157956671",
"format": 1
},
{
- "name": "tests/integration/targets/nxos_lag_interfaces/tests/common/_populate_config.yaml",
+ "name": "tests/sanity/ignore-2.18.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "76ea0558de38d30dd8813738f9d92fba9867c7d37fcb2a104acaaf5aeebb12af",
+ "chksum_sha256": "935199601f3c767ca623e1a8c6f73032f3cfee0536e4aed1cf5842b157956671",
"format": 1
},
{
- "name": "tests/integration/target-prefixes.network",
+ "name": "tests/sanity/ignore-2.15.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1d281d4e38e720eeeee03bdd1df2429947e358ec15ab276b0b29765d99953d63",
+ "chksum_sha256": "935199601f3c767ca623e1a8c6f73032f3cfee0536e4aed1cf5842b157956671",
"format": 1
},
{
- "name": "tests/integration/test_integration.py",
+ "name": "tests/sanity/ignore-2.14.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a4966cf32b6505b51255468ffa72db6f36b948b07aa5b9ae3313607a93e48988",
+ "chksum_sha256": "935199601f3c767ca623e1a8c6f73032f3cfee0536e4aed1cf5842b157956671",
"format": 1
},
{
- "name": "tox-ansible.ini",
+ "name": "tests/sanity/ignore-2.17.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "da802bfc2dc2671ca7caba028e8a635b459b131874041d119b698717333a7a49",
+ "chksum_sha256": "935199601f3c767ca623e1a8c6f73032f3cfee0536e4aed1cf5842b157956671",
"format": 1
}
],
diff --git a/ansible_collections/cisco/nxos/MANIFEST.json b/ansible_collections/cisco/nxos/MANIFEST.json
index dd68be244..4a73e0974 100644
--- a/ansible_collections/cisco/nxos/MANIFEST.json
+++ b/ansible_collections/cisco/nxos/MANIFEST.json
@@ -2,7 +2,7 @@
"collection_info": {
"namespace": "cisco",
"name": "nxos",
- "version": "8.0.0",
+ "version": "8.1.0",
"authors": [
"Ansible Network Community (ansible-network)"
],
@@ -29,7 +29,7 @@
"name": "FILES.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e7c60a9cfda9d9f1ae29b578d7a8f10bf08f80cfb94eb05f7eb016110eaa973d",
+ "chksum_sha256": "f4cdd8422ed652986bd9713b4c82655e1a14b6757239803e9af032e10a32539b",
"format": 1
},
"format": 1
diff --git a/ansible_collections/cisco/nxos/README.md b/ansible_collections/cisco/nxos/README.md
index 82ce821c8..bceee4950 100644
--- a/ansible_collections/cisco/nxos/README.md
+++ b/ansible_collections/cisco/nxos/README.md
@@ -1,5 +1,4 @@
# Cisco NX-OS Collection
-[![CI](https://zuul-ci.org/gated.svg)](https://dashboard.zuul.ansible.com/t/ansible/project/github.com/ansible-collections/cisco.nxos)
[![Codecov](https://codecov.io/gh/ansible-collections/cisco.nxos/branch/main/graph/badge.svg)](https://codecov.io/gh/ansible-collections/cisco.nxos)
[![CI](https://github.com/ansible-collections/cisco.nxos/actions/workflows/tests.yml/badge.svg?branch=main&event=schedule)](https://github.com/ansible-collections/cisco.nxos/actions/workflows/tests.yml)
diff --git a/ansible_collections/cisco/nxos/changelogs/changelog.yaml b/ansible_collections/cisco/nxos/changelogs/changelog.yaml
index c0b40cc7f..6054f87e9 100644
--- a/ansible_collections/cisco/nxos/changelogs/changelog.yaml
+++ b/ansible_collections/cisco/nxos/changelogs/changelog.yaml
@@ -1091,3 +1091,17 @@ releases:
- fix_771.yaml
- trivial.yaml
release_date: "2024-04-19"
+ 8.1.0:
+ changes:
+ bugfixes:
+ - nxos_l3_interfaces - fail if encapsulation exists on a different sub-interface.
+ - nxos_static_routes - correctly generate command when track parameter is specified.
+ minor_changes:
+ - route_maps - support simple route-maps that do not contain set or match statements.
+ it allows for the creation and management of purely basic route-map entries
+ like 'route-map test-1 permit 10'.
+ fragments:
+ - ana_442.yaml
+ - nxos_l3_interfaces.yaml
+ - route_maps.yaml
+ release_date: "2024-05-22"
diff --git a/ansible_collections/cisco/nxos/plugins/cliconf/nxos.py b/ansible_collections/cisco/nxos/plugins/cliconf/nxos.py
index 3a1ae1c6c..b3e15b80e 100644
--- a/ansible_collections/cisco/nxos/plugins/cliconf/nxos.py
+++ b/ansible_collections/cisco/nxos/plugins/cliconf/nxos.py
@@ -210,38 +210,65 @@ class Cliconf(CliconfBase):
return self.send_command(cmd)
- def edit_config(self, candidate=None, commit=True, replace=None, comment=None):
+ def edit_config(
+ self,
+ candidate=None,
+ commit=True,
+ replace=None,
+ comment=None,
+ err_responses=None,
+ ):
resp = {}
operations = self.get_device_operations()
- self.check_edit_config_capability(operations, candidate, commit, replace, comment)
+ self.check_edit_config_capability(
+ operations,
+ candidate,
+ commit,
+ replace,
+ comment,
+ )
results = []
requests = []
+ if err_responses:
+ # update platform default stderr regexes to include modules specific ones
+ err_responses = [re.compile(to_bytes(err_re)) for err_re in err_responses]
+ current_stderr_re = self._connection._get_terminal_std_re(
+ "terminal_stderr_re",
+ )
+ current_stderr_re.extend(err_responses)
+
if replace:
- device_info = self.get_device_info()
# not all NX-OS versions support `config replace`
# we let the device throw the invalid command error
- candidate = "config replace {0}".format(replace)
+ candidate = f"config replace {replace}"
- if commit:
- self.send_command("configure terminal")
+ try:
+ if commit:
+ self.send_command("configure terminal")
- for line in to_list(candidate):
- if not isinstance(line, Mapping):
- line = {"command": line}
+ for line in to_list(candidate):
+ if not isinstance(line, Mapping):
+ line = {"command": line}
- cmd = line["command"]
- if cmd != "end":
- results.append(self.send_command(**line))
- requests.append(cmd)
+ cmd = line["command"]
+ if cmd != "end":
+ results.append(self.send_command(**line))
+ requests.append(cmd)
- self.send_command("end")
- else:
- raise ValueError("check mode is not supported")
+ self.send_command("end")
+ else:
+ raise ValueError("check mode is not supported")
+
+ resp["request"] = requests
+ resp["response"] = results
+ return resp
- resp["request"] = requests
- resp["response"] = results
- return resp
+ finally:
+ # always reset terminal regexes to platform default
+ if err_responses:
+ for x in err_responses:
+ current_stderr_re.remove(x)
def get(
self,
diff --git a/ansible_collections/cisco/nxos/plugins/module_utils/network/nxos/config/acls/acls.py b/ansible_collections/cisco/nxos/plugins/module_utils/network/nxos/config/acls/acls.py
index 5e6f3c34a..a62489b28 100644
--- a/ansible_collections/cisco/nxos/plugins/module_utils/network/nxos/config/acls/acls.py
+++ b/ansible_collections/cisco/nxos/plugins/module_utils/network/nxos/config/acls/acls.py
@@ -246,13 +246,13 @@ class Acls(ConfigBase):
end = int(ace[x]["port_protocol"]["range"]["end"])
if st in port_protocol.keys():
- ace[x]["port_protocol"]["range"][
- "start"
- ] = port_protocol[st]
+ ace[x]["port_protocol"]["range"]["start"] = (
+ port_protocol[st]
+ )
if end in port_protocol.keys():
- ace[x]["port_protocol"]["range"][
- "end"
- ] = port_protocol[end]
+ ace[x]["port_protocol"]["range"]["end"] = (
+ port_protocol[end]
+ )
return want
def set_state(self, want, have):
diff --git a/ansible_collections/cisco/nxos/plugins/module_utils/network/nxos/config/l3_interfaces/l3_interfaces.py b/ansible_collections/cisco/nxos/plugins/module_utils/network/nxos/config/l3_interfaces/l3_interfaces.py
index d4da881f5..e9aa2ee02 100644
--- a/ansible_collections/cisco/nxos/plugins/module_utils/network/nxos/config/l3_interfaces/l3_interfaces.py
+++ b/ansible_collections/cisco/nxos/plugins/module_utils/network/nxos/config/l3_interfaces/l3_interfaces.py
@@ -46,6 +46,8 @@ class L3_interfaces(ConfigBase):
exclude_params = []
+ err_responses = [r"encap in use by another sub-interface"]
+
def __init__(self, module):
super(L3_interfaces, self).__init__(module)
@@ -70,7 +72,7 @@ class L3_interfaces(ConfigBase):
return l3_interfaces_facts
def edit_config(self, commands):
- return self._connection.edit_config(commands)
+ return self._connection.edit_config(candidate=commands, err_responses=self.err_responses)
def execute_module(self):
"""Execute the module
diff --git a/ansible_collections/cisco/nxos/plugins/module_utils/network/nxos/config/route_maps/route_maps.py b/ansible_collections/cisco/nxos/plugins/module_utils/network/nxos/config/route_maps/route_maps.py
index fa7d57b1b..77f8f02be 100644
--- a/ansible_collections/cisco/nxos/plugins/module_utils/network/nxos/config/route_maps/route_maps.py
+++ b/ansible_collections/cisco/nxos/plugins/module_utils/network/nxos/config/route_maps/route_maps.py
@@ -47,6 +47,7 @@ class Route_maps(ResourceModule):
tmplt=Route_mapsTemplate(),
)
self.linear_parsers = [
+ "route_map",
"description",
"continue_sequence",
"set.as_path.prepend.last_as",
diff --git a/ansible_collections/cisco/nxos/plugins/module_utils/network/nxos/facts/static_routes/static_routes.py b/ansible_collections/cisco/nxos/plugins/module_utils/network/nxos/facts/static_routes/static_routes.py
index 991b552ff..51f805f2d 100644
--- a/ansible_collections/cisco/nxos/plugins/module_utils/network/nxos/facts/static_routes/static_routes.py
+++ b/ansible_collections/cisco/nxos/plugins/module_utils/network/nxos/facts/static_routes/static_routes.py
@@ -10,6 +10,7 @@ It is in this file the configuration is collected from the device
for a given resource, parsed, and the facts tree is populated
based on the configuration.
"""
+
from __future__ import absolute_import, division, print_function
@@ -28,7 +29,7 @@ from ansible_collections.cisco.nxos.plugins.module_utils.network.nxos.rm_templat
class Static_routesFacts(object):
"""The nxos static_routes fact class"""
- def __init__(self, module, subspec="config", options="options"):
+ def __init__(self, module):
self._module = module
self.argument_spec = Static_routesArgs.argument_spec
@@ -84,9 +85,13 @@ class Static_routesFacts(object):
_triv_static_route = {"address_families": []}
if afi_v4:
- _triv_static_route["address_families"].append({"afi": "ipv4", "routes": afi_v4})
+ _triv_static_route["address_families"].append(
+ {"afi": "ipv4", "routes": afi_v4},
+ )
if afi_v6:
- _triv_static_route["address_families"].append({"afi": "ipv6", "routes": afi_v6})
+ _triv_static_route["address_families"].append(
+ {"afi": "ipv6", "routes": afi_v6},
+ )
_static_route_facts.append(_triv_static_route)
@@ -100,9 +105,13 @@ class Static_routesFacts(object):
}
if afi_v4:
- _vrf_static_route["address_families"].append({"afi": "ipv4", "routes": afi_v4})
+ _vrf_static_route["address_families"].append(
+ {"afi": "ipv4", "routes": afi_v4},
+ )
if afi_v6:
- _vrf_static_route["address_families"].append({"afi": "ipv6", "routes": afi_v6})
+ _vrf_static_route["address_families"].append(
+ {"afi": "ipv6", "routes": afi_v6},
+ )
_static_route_facts.append(_vrf_static_route)
return _static_route_facts
@@ -125,7 +134,10 @@ class Static_routesFacts(object):
data = self.get_static_routes_data(connection)
# parse native config using the Static_routes template
- static_routes_parser = Static_routesTemplate(lines=data.splitlines(), module=self._module)
+ static_routes_parser = Static_routesTemplate(
+ lines=data.splitlines(),
+ module=self._module,
+ )
objs = static_routes_parser.parse()
strout = self.process_static_routes(objs)
@@ -134,7 +146,11 @@ class Static_routesFacts(object):
ansible_facts["ansible_network_resources"].pop("static_routes", None)
params = utils.remove_empties(
- static_routes_parser.validate_config(self.argument_spec, {"config": objs}, redact=True),
+ static_routes_parser.validate_config(
+ self.argument_spec,
+ {"config": objs},
+ redact=True,
+ ),
)
facts["static_routes"] = params.get("config")
diff --git a/ansible_collections/cisco/nxos/plugins/module_utils/network/nxos/rm_templates/static_routes.py b/ansible_collections/cisco/nxos/plugins/module_utils/network/nxos/rm_templates/static_routes.py
index a70bc4130..65a72a390 100644
--- a/ansible_collections/cisco/nxos/plugins/module_utils/network/nxos/rm_templates/static_routes.py
+++ b/ansible_collections/cisco/nxos/plugins/module_utils/network/nxos/rm_templates/static_routes.py
@@ -24,7 +24,11 @@ from ansible_collections.ansible.netcommon.plugins.module_utils.network.common.r
class Static_routesTemplate(NetworkTemplate):
def __init__(self, lines=None, module=None):
- super(Static_routesTemplate, self).__init__(lines=lines, tmplt=self, module=module)
+ super(Static_routesTemplate, self).__init__(
+ lines=lines,
+ tmplt=self,
+ module=module,
+ )
# fmt: off
PARSERS = [
@@ -49,9 +53,9 @@ class Static_routesTemplate(NetworkTemplate):
(\s(?P<interface>(Ethernet|loopback|mgmt|Null|port-channel)\S+))?
(\s(?P<forward_router_address>\S+))?
(\svrf\s(?P<dest_vrf>\S+))?
+ (\strack\s(?P<track>\d+))?
(\sname\s(?P<route_name>\S+))?
(\stag\s(?P<tag>\d+))?
- (\strack\s(?P<track>\d+))?
(\s(?P<admin_distance>\d+))?
$""", re.VERBOSE,
),
@@ -60,9 +64,9 @@ class Static_routesTemplate(NetworkTemplate):
"{{ (' ' + ipv4.interface) if ipv4.interface is defined else '' }}"
"{{ (' ' + ipv4.forward_router_address) if ipv4.forward_router_address is defined else '' }}"
"{{ (' vrf ' + ipv4.dest_vrf) if ipv4.dest_vrf is defined else '' }}"
+ "{{ (' track ' + ipv4.track|string) if ipv4.track is defined else '' }}"
"{{ (' name ' + ipv4.route_name) if ipv4.route_name is defined else '' }}"
"{{ (' tag ' + ipv4.tag|string) if ipv4.tag is defined else '' }}"
- "{{ (' track ' + ipv4.track|string) if ipv4.track is defined else '' }}"
"{{ (' ' + ipv4.admin_distance|string) if ipv4.admin_distance is defined else '' }}",
"result": {
"{{ dest }}_{{ namevrf|d() }}_ipv4": [
@@ -90,9 +94,9 @@ class Static_routesTemplate(NetworkTemplate):
(\s(?P<interface>(Ethernet|loopback|mgmt|Null|port-channel)\S+))?
(\s(?P<forward_router_address>\S+))?
(\svrf\s(?P<dest_vrf>\S+))?
+ (\strack\s(?P<track>\d+))?
(\sname\s(?P<route_name>\S+))?
(\stag\s(?P<tag>\d+))?
- (\strack\s(?P<track>\d+))?
(\s(?P<admin_distance>\d+))?
$""", re.VERBOSE,
),
@@ -101,9 +105,9 @@ class Static_routesTemplate(NetworkTemplate):
"{{ (' ' + ipv6.interface) if ipv6.interface is defined else '' }}"
"{{ (' ' + ipv6.forward_router_address) if ipv6.forward_router_address is defined else '' }}"
"{{ (' vrf ' + ipv6.dest_vrf) if ipv6.dest_vrf is defined else '' }}"
+ "{{ (' track ' + ipv6.track|string) if ipv6.track is defined else '' }}"
"{{ (' name ' + ipv6.route_name) if ipv6.route_name is defined else '' }}"
"{{ (' tag ' + ipv6.tag|string) if ipv6.tag is defined else '' }}"
- "{{ (' track ' + ipv6.track|string) if ipv6.track is defined else '' }}"
"{{ (' ' + ipv6.admin_distance|string) if ipv6.admin_distance is defined else '' }}",
"result": {
"{{ dest }}_{{ namevrf|d() }}_ipv6": [
diff --git a/ansible_collections/cisco/nxos/plugins/modules/nxos_file_copy.py b/ansible_collections/cisco/nxos/plugins/modules/nxos_file_copy.py
index 4847a446e..b32576a1f 100644
--- a/ansible_collections/cisco/nxos/plugins/modules/nxos_file_copy.py
+++ b/ansible_collections/cisco/nxos/plugins/modules/nxos_file_copy.py
@@ -418,9 +418,9 @@ class FilePull(FileCopy):
self.result["copy_cmd"] = copy_cmd
pulled = self._connection.pull_file(command=copy_cmd, remotepassword=rserverpassword)
if pulled:
- self.result[
- "transfer_status"
- ] = "Received: File copied/pulled to nxos device from remote scp server."
+ self.result["transfer_status"] = (
+ "Received: File copied/pulled to nxos device from remote scp server."
+ )
else:
self.result["failed"] = True
diff --git a/ansible_collections/cisco/nxos/tests/unit/modules/network/nxos/test_nxos_route_maps.py b/ansible_collections/cisco/nxos/tests/unit/modules/network/nxos/test_nxos_route_maps.py
index d247c02b4..45a64775e 100644
--- a/ansible_collections/cisco/nxos/tests/unit/modules/network/nxos/test_nxos_route_maps.py
+++ b/ansible_collections/cisco/nxos/tests/unit/modules/network/nxos/test_nxos_route_maps.py
@@ -1493,3 +1493,92 @@ class TestNxosRouteMapsModule(TestNxosModule):
]
result = self.execute_module(changed=True)
self.assertEqual(set(result["commands"]), set(commands))
+
+ def test_nxos_route_maps_without_match_and_set_merged(self):
+ self.get_config.return_value = dedent(
+ """\
+ route-map test-1 permit 10
+ """,
+ )
+ set_module_args(
+ dict(
+ config=[
+ dict(
+ route_map="test-1",
+ entries=[
+ dict(
+ action="permit",
+ sequence=20,
+ ),
+ ],
+ ),
+ ],
+ state="merged",
+ ),
+ )
+ commands = [
+ "route-map test-1 permit 20",
+ ]
+ result = self.execute_module(changed=True)
+ self.assertEqual(set(result["commands"]), set(commands))
+
+ def test_nxos_route_maps_without_match_and_set_overridden(self):
+ self.get_config.return_value = dedent(
+ """\
+ route-map test-1 permit 10
+ """,
+ )
+ set_module_args(
+ dict(
+ config=[
+ dict(
+ route_map="test-2",
+ entries=[
+ dict(
+ action="permit",
+ sequence=10,
+ ),
+ ],
+ ),
+ ],
+ state="overridden",
+ ),
+ )
+ commands = [
+ "no route-map test-1 permit 10",
+ "route-map test-2 permit 10",
+ ]
+ result = self.execute_module(changed=True)
+ self.assertEqual(set(result["commands"]), set(commands))
+
+ def test_nxos_route_maps_without_match_and_set_replaced(self):
+ self.get_config.return_value = dedent(
+ """\
+ route-map test-1 permit 10
+ route-map test-1 permit 20
+ route-map test-2 permit 10
+ """,
+ )
+ set_module_args(
+ dict(
+ config=[
+ dict(
+ route_map="test-1",
+ entries=[
+ dict(
+ action="permit",
+ sequence=30,
+ ),
+ ],
+ ),
+ ],
+ state="replaced",
+ ),
+ )
+ commands = [
+ "no route-map test-1 permit 10",
+ "no route-map test-1 permit 20",
+ "route-map test-1 permit 30",
+ ]
+ result = self.execute_module(changed=True)
+ self.assertEqual(set(result["commands"]), set(commands))
diff --git a/ansible_collections/cisco/nxos/tests/unit/modules/network/nxos/test_nxos_static_routes.py b/ansible_collections/cisco/nxos/tests/unit/modules/network/nxos/test_nxos_static_routes.py
index 7cd021314..e328ba652 100644
--- a/ansible_collections/cisco/nxos/tests/unit/modules/network/nxos/test_nxos_static_routes.py
+++ b/ansible_collections/cisco/nxos/tests/unit/modules/network/nxos/test_nxos_static_routes.py
@@ -90,7 +90,9 @@ class TestNxosStaticRoutesModule(TestNxosModule):
"dest": "192.0.2.16/28",
},
{
- "next_hops": [{"forward_router_address": "192.0.2.26", "tag": 12}],
+ "next_hops": [
+ {"forward_router_address": "192.0.2.26", "tag": 12},
+ ],
"dest": "192.0.2.80/28",
},
],
@@ -124,7 +126,9 @@ class TestNxosStaticRoutesModule(TestNxosModule):
"afi": "ipv4",
"routes": [
{
- "next_hops": [{"forward_router_address": "192.168.255.1"}],
+ "next_hops": [
+ {"forward_router_address": "192.168.255.1"},
+ ],
"dest": "0.0.0.0/0",
},
],
@@ -268,7 +272,10 @@ class TestNxosStaticRoutesModule(TestNxosModule):
},
{
"next_hops": [
- {"forward_router_address": "192.0.2.26", "tag": 12},
+ {
+ "forward_router_address": "192.0.2.26",
+ "tag": 12,
+ },
],
"dest": "192.0.2.80/28",
},
@@ -303,7 +310,9 @@ class TestNxosStaticRoutesModule(TestNxosModule):
"afi": "ipv4",
"routes": [
{
- "next_hops": [{"forward_router_address": "192.168.255.1"}],
+ "next_hops": [
+ {"forward_router_address": "192.168.255.1"},
+ ],
"dest": "0.0.0.0/0",
},
],
@@ -352,6 +361,16 @@ class TestNxosStaticRoutesModule(TestNxosModule):
],
"dest": "192.0.2.48/28",
},
+ {
+ "next_hops": [
+ {
+ "forward_router_address": "192.0.2.15",
+ "track": 1,
+ "route_name": "new_route",
+ },
+ ],
+ "dest": "192.0.2.49/28",
+ },
],
},
{
@@ -395,7 +414,10 @@ class TestNxosStaticRoutesModule(TestNxosModule):
"routes": [
{
"next_hops": [
- {"forward_router_address": "192.0.2.22", "tag": 4},
+ {
+ "forward_router_address": "192.0.2.22",
+ "tag": 4,
+ },
{
"forward_router_address": "192.0.2.23",
"admin_distance": 1,
@@ -420,6 +442,7 @@ class TestNxosStaticRoutesModule(TestNxosModule):
"ip route 10.0.11.0/25 10.0.11.10 tag 22 11",
"ip route 10.0.11.0/25 10.0.11.12 vrf Test tag 22 11",
"ip route 192.0.2.48/28 loopback22 192.0.2.13",
+ "ip route 192.0.2.49/28 192.0.2.15 track 1 name new_route",
"ipv6 route 2200:10::/36 2048:ae12::1 vrf dest 5",
"ipv6 route 2200:10::/36 mgmt0 2048:ae12::1 tag 22 11",
"ipv6 route 2200:10::/36 port-channel22 2048:ae12::1",
@@ -487,7 +510,10 @@ class TestNxosStaticRoutesModule(TestNxosModule):
},
{
"next_hops": [
- {"forward_router_address": "192.0.2.27", "tag": 13},
+ {
+ "forward_router_address": "192.0.2.27",
+ "tag": 13,
+ },
],
"dest": "192.0.2.80/28",
},
@@ -582,7 +608,10 @@ class TestNxosStaticRoutesModule(TestNxosModule):
},
{
"next_hops": [
- {"forward_router_address": "192.0.2.27", "tag": 13},
+ {
+ "forward_router_address": "192.0.2.27",
+ "tag": 13,
+ },
],
"dest": "192.0.2.80/28",
},
@@ -749,7 +778,9 @@ class TestNxosStaticRoutesModule(TestNxosModule):
config=[
dict(
vrf="Test",
- address_families=[dict(afi="ipv4", routes=[dict(dest="192.0.2.48/28")])],
+ address_families=[
+ dict(afi="ipv4", routes=[dict(dest="192.0.2.48/28")]),
+ ],
),
],
state="deleted",
@@ -815,7 +846,10 @@ class TestNxosStaticRoutesModule(TestNxosModule):
},
{
"next_hops": [
- {"forward_router_address": "192.0.2.26", "tag": 12},
+ {
+ "forward_router_address": "192.0.2.26",
+ "tag": 12,
+ },
],
"dest": "192.0.2.80/28",
},
@@ -948,7 +982,9 @@ class TestNxosStaticRoutesModule(TestNxosModule):
"dest": "192.0.2.16/28",
},
{
- "next_hops": [{"forward_router_address": "192.0.2.26", "tag": 12}],
+ "next_hops": [
+ {"forward_router_address": "192.0.2.26", "tag": 12},
+ ],
"dest": "192.0.2.80/28",
},
],
@@ -964,7 +1000,10 @@ class TestNxosStaticRoutesModule(TestNxosModule):
{
"next_hops": [
{"forward_router_address": "192.0.2.13"},
- {"forward_router_address": "192.0.2.14", "admin_distance": 5},
+ {
+ "forward_router_address": "192.0.2.14",
+ "admin_distance": 5,
+ },
],
"dest": "192.0.2.48/28",
},
@@ -979,7 +1018,9 @@ class TestNxosStaticRoutesModule(TestNxosModule):
"afi": "ipv4",
"routes": [
{
- "next_hops": [{"forward_router_address": "192.168.255.1"}],
+ "next_hops": [
+ {"forward_router_address": "192.168.255.1"},
+ ],
"dest": "0.0.0.0/0",
},
],
@@ -1138,7 +1179,9 @@ class TestNxosStaticRoutesModule(TestNxosModule):
"dest": "192.0.2.17/28",
},
{
- "next_hops": [{"forward_router_address": "192.0.2.26", "tag": 12}],
+ "next_hops": [
+ {"forward_router_address": "192.0.2.26", "tag": 12},
+ ],
"dest": "192.0.2.79/28",
},
],
@@ -1154,7 +1197,10 @@ class TestNxosStaticRoutesModule(TestNxosModule):
{
"next_hops": [
{"forward_router_address": "192.0.2.13"},
- {"forward_router_address": "192.0.2.14", "admin_distance": 5},
+ {
+ "forward_router_address": "192.0.2.14",
+ "admin_distance": 5,
+ },
],
"dest": "192.0.2.48/28",
},
@@ -1169,7 +1215,9 @@ class TestNxosStaticRoutesModule(TestNxosModule):
"afi": "ipv4",
"routes": [
{
- "next_hops": [{"forward_router_address": "192.168.255.1"}],
+ "next_hops": [
+ {"forward_router_address": "192.168.255.1"},
+ ],
"dest": "0.0.0.0/0",
},
],
diff --git a/ansible_collections/community/dns/.github/workflows/ansible-test.yml b/ansible_collections/community/dns/.github/workflows/ansible-test.yml
index d8121c8f7..69005c325 100644
--- a/ansible_collections/community/dns/.github/workflows/ansible-test.yml
+++ b/ansible_collections/community/dns/.github/workflows/ansible-test.yml
@@ -96,6 +96,7 @@ jobs:
- "3.10"
- "3.11"
- "3.12"
+ - "3.13"
include:
# 2.14
- ansible: stable-2.14
diff --git a/ansible_collections/community/dns/CHANGELOG.md b/ansible_collections/community/dns/CHANGELOG.md
index 95a940b1c..94c21810a 100644
--- a/ansible_collections/community/dns/CHANGELOG.md
+++ b/ansible_collections/community/dns/CHANGELOG.md
@@ -2,198 +2,214 @@
**Topics**
-- <a href="#v3-0-0">v3\.0\.0</a>
+- <a href="#v3-0-1">v3\.0\.1</a>
- <a href="#release-summary">Release Summary</a>
+ - <a href="#bugfixes">Bugfixes</a>
+- <a href="#v3-0-0">v3\.0\.0</a>
+ - <a href="#release-summary-1">Release Summary</a>
- <a href="#major-changes">Major Changes</a>
- <a href="#minor-changes">Minor Changes</a>
- <a href="#breaking-changes--porting-guide">Breaking Changes / Porting Guide</a>
- <a href="#removed-features-previously-deprecated">Removed Features \(previously deprecated\)</a>
- - <a href="#bugfixes">Bugfixes</a>
-- <a href="#v2-9-0">v2\.9\.0</a>
- - <a href="#release-summary-1">Release Summary</a>
- <a href="#bugfixes-1">Bugfixes</a>
+- <a href="#v2-9-0">v2\.9\.0</a>
+ - <a href="#release-summary-2">Release Summary</a>
+ - <a href="#bugfixes-2">Bugfixes</a>
- <a href="#new-plugins">New Plugins</a>
- <a href="#filter">Filter</a>
- <a href="#v2-8-3">v2\.8\.3</a>
- - <a href="#release-summary-2">Release Summary</a>
- - <a href="#bugfixes-2">Bugfixes</a>
-- <a href="#v2-8-2">v2\.8\.2</a>
- <a href="#release-summary-3">Release Summary</a>
- - <a href="#security-fixes">Security Fixes</a>
- <a href="#bugfixes-3">Bugfixes</a>
-- <a href="#v2-8-1">v2\.8\.1</a>
+- <a href="#v2-8-2">v2\.8\.2</a>
- <a href="#release-summary-4">Release Summary</a>
+ - <a href="#security-fixes">Security Fixes</a>
- <a href="#bugfixes-4">Bugfixes</a>
-- <a href="#v2-8-0">v2\.8\.0</a>
+- <a href="#v2-8-1">v2\.8\.1</a>
- <a href="#release-summary-5">Release Summary</a>
- - <a href="#minor-changes-1">Minor Changes</a>
- - <a href="#deprecated-features">Deprecated Features</a>
- <a href="#bugfixes-5">Bugfixes</a>
-- <a href="#v2-7-0">v2\.7\.0</a>
+- <a href="#v2-8-0">v2\.8\.0</a>
- <a href="#release-summary-6">Release Summary</a>
- - <a href="#minor-changes-2">Minor Changes</a>
+ - <a href="#minor-changes-1">Minor Changes</a>
+ - <a href="#deprecated-features">Deprecated Features</a>
- <a href="#bugfixes-6">Bugfixes</a>
-- <a href="#v2-6-4">v2\.6\.4</a>
+- <a href="#v2-7-0">v2\.7\.0</a>
- <a href="#release-summary-7">Release Summary</a>
+ - <a href="#minor-changes-2">Minor Changes</a>
- <a href="#bugfixes-7">Bugfixes</a>
-- <a href="#v2-6-3">v2\.6\.3</a>
+- <a href="#v2-6-4">v2\.6\.4</a>
- <a href="#release-summary-8">Release Summary</a>
- <a href="#bugfixes-8">Bugfixes</a>
-- <a href="#v2-6-2">v2\.6\.2</a>
+- <a href="#v2-6-3">v2\.6\.3</a>
- <a href="#release-summary-9">Release Summary</a>
- <a href="#bugfixes-9">Bugfixes</a>
-- <a href="#v2-6-1">v2\.6\.1</a>
+- <a href="#v2-6-2">v2\.6\.2</a>
- <a href="#release-summary-10">Release Summary</a>
- <a href="#bugfixes-10">Bugfixes</a>
-- <a href="#v2-6-0">v2\.6\.0</a>
+- <a href="#v2-6-1">v2\.6\.1</a>
- <a href="#release-summary-11">Release Summary</a>
- - <a href="#minor-changes-3">Minor Changes</a>
- <a href="#bugfixes-11">Bugfixes</a>
+- <a href="#v2-6-0">v2\.6\.0</a>
+ - <a href="#release-summary-12">Release Summary</a>
+ - <a href="#minor-changes-3">Minor Changes</a>
+ - <a href="#bugfixes-12">Bugfixes</a>
- <a href="#new-plugins-1">New Plugins</a>
- <a href="#lookup">Lookup</a>
- <a href="#new-modules">New Modules</a>
- <a href="#v2-5-7">v2\.5\.7</a>
- - <a href="#release-summary-12">Release Summary</a>
- - <a href="#bugfixes-12">Bugfixes</a>
-- <a href="#v2-5-6">v2\.5\.6</a>
- <a href="#release-summary-13">Release Summary</a>
+ - <a href="#bugfixes-13">Bugfixes</a>
+- <a href="#v2-5-6">v2\.5\.6</a>
+ - <a href="#release-summary-14">Release Summary</a>
- <a href="#known-issues">Known Issues</a>
- <a href="#v2-5-5">v2\.5\.5</a>
- - <a href="#release-summary-14">Release Summary</a>
- - <a href="#bugfixes-13">Bugfixes</a>
-- <a href="#v2-5-4">v2\.5\.4</a>
- <a href="#release-summary-15">Release Summary</a>
- <a href="#bugfixes-14">Bugfixes</a>
-- <a href="#v2-5-3">v2\.5\.3</a>
+- <a href="#v2-5-4">v2\.5\.4</a>
- <a href="#release-summary-16">Release Summary</a>
- <a href="#bugfixes-15">Bugfixes</a>
-- <a href="#v2-5-2">v2\.5\.2</a>
+- <a href="#v2-5-3">v2\.5\.3</a>
- <a href="#release-summary-17">Release Summary</a>
- <a href="#bugfixes-16">Bugfixes</a>
-- <a href="#v2-5-1">v2\.5\.1</a>
+- <a href="#v2-5-2">v2\.5\.2</a>
- <a href="#release-summary-18">Release Summary</a>
- <a href="#bugfixes-17">Bugfixes</a>
-- <a href="#v2-5-0">v2\.5\.0</a>
+- <a href="#v2-5-1">v2\.5\.1</a>
- <a href="#release-summary-19">Release Summary</a>
- - <a href="#minor-changes-4">Minor Changes</a>
- - <a href="#deprecated-features-1">Deprecated Features</a>
- <a href="#bugfixes-18">Bugfixes</a>
-- <a href="#v2-4-2">v2\.4\.2</a>
+- <a href="#v2-5-0">v2\.5\.0</a>
- <a href="#release-summary-20">Release Summary</a>
+ - <a href="#minor-changes-4">Minor Changes</a>
+ - <a href="#deprecated-features-1">Deprecated Features</a>
- <a href="#bugfixes-19">Bugfixes</a>
-- <a href="#v2-4-1">v2\.4\.1</a>
+- <a href="#v2-4-2">v2\.4\.2</a>
- <a href="#release-summary-21">Release Summary</a>
- <a href="#bugfixes-20">Bugfixes</a>
-- <a href="#v2-4-0">v2\.4\.0</a>
+- <a href="#v2-4-1">v2\.4\.1</a>
- <a href="#release-summary-22">Release Summary</a>
- - <a href="#minor-changes-5">Minor Changes</a>
- <a href="#bugfixes-21">Bugfixes</a>
-- <a href="#v2-3-4">v2\.3\.4</a>
+- <a href="#v2-4-0">v2\.4\.0</a>
- <a href="#release-summary-23">Release Summary</a>
+ - <a href="#minor-changes-5">Minor Changes</a>
- <a href="#bugfixes-22">Bugfixes</a>
-- <a href="#v2-3-3">v2\.3\.3</a>
+- <a href="#v2-3-4">v2\.3\.4</a>
- <a href="#release-summary-24">Release Summary</a>
- <a href="#bugfixes-23">Bugfixes</a>
-- <a href="#v2-3-2">v2\.3\.2</a>
+- <a href="#v2-3-3">v2\.3\.3</a>
- <a href="#release-summary-25">Release Summary</a>
- <a href="#bugfixes-24">Bugfixes</a>
-- <a href="#v2-3-1">v2\.3\.1</a>
+- <a href="#v2-3-2">v2\.3\.2</a>
- <a href="#release-summary-26">Release Summary</a>
- - <a href="#minor-changes-6">Minor Changes</a>
- <a href="#bugfixes-25">Bugfixes</a>
-- <a href="#v2-3-0">v2\.3\.0</a>
+- <a href="#v2-3-1">v2\.3\.1</a>
- <a href="#release-summary-27">Release Summary</a>
- - <a href="#minor-changes-7">Minor Changes</a>
+ - <a href="#minor-changes-6">Minor Changes</a>
- <a href="#bugfixes-26">Bugfixes</a>
-- <a href="#v2-2-1">v2\.2\.1</a>
+- <a href="#v2-3-0">v2\.3\.0</a>
- <a href="#release-summary-28">Release Summary</a>
+ - <a href="#minor-changes-7">Minor Changes</a>
- <a href="#bugfixes-27">Bugfixes</a>
-- <a href="#v2-2-0">v2\.2\.0</a>
+- <a href="#v2-2-1">v2\.2\.1</a>
- <a href="#release-summary-29">Release Summary</a>
- - <a href="#minor-changes-8">Minor Changes</a>
- <a href="#bugfixes-28">Bugfixes</a>
-- <a href="#v2-1-1">v2\.1\.1</a>
+- <a href="#v2-2-0">v2\.2\.0</a>
- <a href="#release-summary-30">Release Summary</a>
+ - <a href="#minor-changes-8">Minor Changes</a>
- <a href="#bugfixes-29">Bugfixes</a>
-- <a href="#v2-1-0">v2\.1\.0</a>
+- <a href="#v2-1-1">v2\.1\.1</a>
- <a href="#release-summary-31">Release Summary</a>
- - <a href="#minor-changes-9">Minor Changes</a>
- <a href="#bugfixes-30">Bugfixes</a>
-- <a href="#v2-0-9">v2\.0\.9</a>
+- <a href="#v2-1-0">v2\.1\.0</a>
- <a href="#release-summary-32">Release Summary</a>
+ - <a href="#minor-changes-9">Minor Changes</a>
- <a href="#bugfixes-31">Bugfixes</a>
-- <a href="#v2-0-8">v2\.0\.8</a>
+- <a href="#v2-0-9">v2\.0\.9</a>
- <a href="#release-summary-33">Release Summary</a>
- <a href="#bugfixes-32">Bugfixes</a>
-- <a href="#v2-0-7">v2\.0\.7</a>
+- <a href="#v2-0-8">v2\.0\.8</a>
- <a href="#release-summary-34">Release Summary</a>
- <a href="#bugfixes-33">Bugfixes</a>
-- <a href="#v2-0-6">v2\.0\.6</a>
+- <a href="#v2-0-7">v2\.0\.7</a>
- <a href="#release-summary-35">Release Summary</a>
- <a href="#bugfixes-34">Bugfixes</a>
-- <a href="#v2-0-5">v2\.0\.5</a>
+- <a href="#v2-0-6">v2\.0\.6</a>
- <a href="#release-summary-36">Release Summary</a>
- <a href="#bugfixes-35">Bugfixes</a>
-- <a href="#v2-0-4">v2\.0\.4</a>
+- <a href="#v2-0-5">v2\.0\.5</a>
- <a href="#release-summary-37">Release Summary</a>
- <a href="#bugfixes-36">Bugfixes</a>
-- <a href="#v2-0-3">v2\.0\.3</a>
+- <a href="#v2-0-4">v2\.0\.4</a>
- <a href="#release-summary-38">Release Summary</a>
+ - <a href="#bugfixes-37">Bugfixes</a>
+- <a href="#v2-0-3">v2\.0\.3</a>
+ - <a href="#release-summary-39">Release Summary</a>
- <a href="#minor-changes-10">Minor Changes</a>
- <a href="#v2-0-2">v2\.0\.2</a>
- - <a href="#release-summary-39">Release Summary</a>
- - <a href="#bugfixes-37">Bugfixes</a>
-- <a href="#v2-0-1">v2\.0\.1</a>
- <a href="#release-summary-40">Release Summary</a>
- <a href="#bugfixes-38">Bugfixes</a>
-- <a href="#v2-0-0">v2\.0\.0</a>
+- <a href="#v2-0-1">v2\.0\.1</a>
- <a href="#release-summary-41">Release Summary</a>
+ - <a href="#bugfixes-39">Bugfixes</a>
+- <a href="#v2-0-0">v2\.0\.0</a>
+ - <a href="#release-summary-42">Release Summary</a>
- <a href="#minor-changes-11">Minor Changes</a>
- <a href="#breaking-changes--porting-guide-1">Breaking Changes / Porting Guide</a>
- <a href="#deprecated-features-2">Deprecated Features</a>
- - <a href="#bugfixes-39">Bugfixes</a>
+ - <a href="#bugfixes-40">Bugfixes</a>
- <a href="#new-plugins-2">New Plugins</a>
- <a href="#inventory">Inventory</a>
- <a href="#new-modules-1">New Modules</a>
- <a href="#v1-2-0">v1\.2\.0</a>
- - <a href="#release-summary-42">Release Summary</a>
- - <a href="#minor-changes-12">Minor Changes</a>
- - <a href="#bugfixes-40">Bugfixes</a>
-- <a href="#v1-1-0">v1\.1\.0</a>
- <a href="#release-summary-43">Release Summary</a>
- - <a href="#minor-changes-13">Minor Changes</a>
+ - <a href="#minor-changes-12">Minor Changes</a>
- <a href="#bugfixes-41">Bugfixes</a>
-- <a href="#v1-0-1">v1\.0\.1</a>
+- <a href="#v1-1-0">v1\.1\.0</a>
- <a href="#release-summary-44">Release Summary</a>
+ - <a href="#minor-changes-13">Minor Changes</a>
- <a href="#bugfixes-42">Bugfixes</a>
-- <a href="#v1-0-0">v1\.0\.0</a>
+- <a href="#v1-0-1">v1\.0\.1</a>
- <a href="#release-summary-45">Release Summary</a>
- <a href="#bugfixes-43">Bugfixes</a>
-- <a href="#v0-3-0">v0\.3\.0</a>
+- <a href="#v1-0-0">v1\.0\.0</a>
- <a href="#release-summary-46">Release Summary</a>
- - <a href="#minor-changes-14">Minor Changes</a>
- <a href="#bugfixes-44">Bugfixes</a>
+- <a href="#v0-3-0">v0\.3\.0</a>
+ - <a href="#release-summary-47">Release Summary</a>
+ - <a href="#minor-changes-14">Minor Changes</a>
+ - <a href="#bugfixes-45">Bugfixes</a>
- <a href="#new-modules-2">New Modules</a>
- <a href="#v0-2-0">v0\.2\.0</a>
- - <a href="#release-summary-47">Release Summary</a>
+ - <a href="#release-summary-48">Release Summary</a>
- <a href="#major-changes-1">Major Changes</a>
- <a href="#minor-changes-15">Minor Changes</a>
- <a href="#breaking-changes--porting-guide-2">Breaking Changes / Porting Guide</a>
- - <a href="#bugfixes-45">Bugfixes</a>
+ - <a href="#bugfixes-46">Bugfixes</a>
- <a href="#new-modules-3">New Modules</a>
- <a href="#v0-1-0">v0\.1\.0</a>
- - <a href="#release-summary-48">Release Summary</a>
+ - <a href="#release-summary-49">Release Summary</a>
- <a href="#new-plugins-3">New Plugins</a>
- <a href="#filter-1">Filter</a>
- <a href="#new-modules-4">New Modules</a>
+<a id="v3-0-1"></a>
+## v3\.0\.1
+
+<a id="release-summary"></a>
+### Release Summary
+
+Maintenance release with updated PSL\.
+
+<a id="bugfixes"></a>
+### Bugfixes
+
+* Update Public Suffix List\.
+
<a id="v3-0-0"></a>
## v3\.0\.0
-<a id="release-summary"></a>
+<a id="release-summary-1"></a>
### Release Summary
-\.\.\.
+New major release\.
<a id="major-changes"></a>
### Major Changes
@@ -217,11 +233,11 @@
<a id="removed-features-previously-deprecated"></a>
### Removed Features \(previously deprecated\)
-* The collection no longer supports Ansible\, ansible\-base\, and ansible\-core releases that are currently End of Life at the time of the 3\.0\.0 release\. This means that Ansible 2\.9\, ansible\-base 2\.10\, ansible\-core 2\.11\, ansible\-core 2\.12\, ansible\-core 2\.13\, and ansible\-core 2\.14 are no longer supported\. The collection might still work with these versions\, but it can stop working at any moment without advance notice\, and this will not be considered a bug \([https\://github\.com/ansible\-collections/community\.dns/pull/196](https\://github\.com/ansible\-collections/community\.dns/pull/196)\)\.
+* The collection no longer supports Ansible\, ansible\-base\, and ansible\-core releases that are currently End of Life at the time of the 3\.0\.0 release\. This means that Ansible 2\.9\, ansible\-base 2\.10\, ansible\-core 2\.11\, ansible\-core 2\.12\, and ansible\-core 2\.13 are no longer supported\. The collection might still work with these versions\, but it can stop working at any moment without advance notice\, and this will not be considered a bug \([https\://github\.com/ansible\-collections/community\.dns/pull/196](https\://github\.com/ansible\-collections/community\.dns/pull/196)\)\.
* hetzner\_dns\_record\_set\, hetzner\_dns\_record \- the deprecated alias <code>name</code> of the prefix option was removed \([https\://github\.com/ansible\-collections/community\.dns/pull/196](https\://github\.com/ansible\-collections/community\.dns/pull/196)\)\.
* hosttech\_dns\_records \- the redirect to the <code>hosttech\_dns\_record\_sets</code> module has been removed \([https\://github\.com/ansible\-collections/community\.dns/pull/196](https\://github\.com/ansible\-collections/community\.dns/pull/196)\)\.
-<a id="bugfixes"></a>
+<a id="bugfixes-1"></a>
### Bugfixes
* Update Public Suffix List\.
@@ -229,12 +245,12 @@
<a id="v2-9-0"></a>
## v2\.9\.0
-<a id="release-summary-1"></a>
+<a id="release-summary-2"></a>
### Release Summary
Feature and bugfix release\.
-<a id="bugfixes-1"></a>
+<a id="bugfixes-2"></a>
### Bugfixes
* Update Public Suffix List\.
@@ -252,12 +268,12 @@ Feature and bugfix release\.
<a id="v2-8-3"></a>
## v2\.8\.3
-<a id="release-summary-2"></a>
+<a id="release-summary-3"></a>
### Release Summary
Bugfix release\.
-<a id="bugfixes-2"></a>
+<a id="bugfixes-3"></a>
### Bugfixes
* DNS record modules\, inventory plugins \- fix the TXT entry encoder to avoid splitting up escape sequences for quotes and backslashes over multiple TXT strings \([https\://github\.com/ansible\-collections/community\.dns/issues/190](https\://github\.com/ansible\-collections/community\.dns/issues/190)\, [https\://github\.com/ansible\-collections/community\.dns/pull/191](https\://github\.com/ansible\-collections/community\.dns/pull/191)\)\.
@@ -266,7 +282,7 @@ Bugfix release\.
<a id="v2-8-2"></a>
## v2\.8\.2
-<a id="release-summary-3"></a>
+<a id="release-summary-4"></a>
### Release Summary
Bugfix release\.
@@ -276,7 +292,7 @@ Bugfix release\.
* hosttech\_dns\_records and hetzner\_dns\_records inventory plugins \- make sure all data received from the remote servers is marked as unsafe\, so remote code execution by obtaining texts that can be evaluated as templates is not possible \([https\://www\.die\-welt\.net/2024/03/remote\-code\-execution\-in\-ansible\-dynamic\-inventory\-plugins/](https\://www\.die\-welt\.net/2024/03/remote\-code\-execution\-in\-ansible\-dynamic\-inventory\-plugins/)\, [https\://github\.com/ansible\-collections/community\.dns/pull/189](https\://github\.com/ansible\-collections/community\.dns/pull/189)\)\.
-<a id="bugfixes-3"></a>
+<a id="bugfixes-4"></a>
### Bugfixes
* Update Public Suffix List\.
@@ -284,12 +300,12 @@ Bugfix release\.
<a id="v2-8-1"></a>
## v2\.8\.1
-<a id="release-summary-4"></a>
+<a id="release-summary-5"></a>
### Release Summary
Maintenance release with updated PSL\.
-<a id="bugfixes-4"></a>
+<a id="bugfixes-5"></a>
### Bugfixes
* Update Public Suffix List\.
@@ -297,7 +313,7 @@ Maintenance release with updated PSL\.
<a id="v2-8-0"></a>
## v2\.8\.0
-<a id="release-summary-5"></a>
+<a id="release-summary-6"></a>
### Release Summary
Feature and maintenance release with updated PSL\.
@@ -312,7 +328,7 @@ Feature and maintenance release with updated PSL\.
* hetzner\_dns\_records and hosttech\_dns\_records inventory plugins \- the <code>filters</code> option has been renamed to <code>simple\_filters</code>\. The old name will stop working in community\.hrobot 2\.0\.0 \([https\://github\.com/ansible\-collections/community\.dns/pull/181](https\://github\.com/ansible\-collections/community\.dns/pull/181)\)\.
-<a id="bugfixes-5"></a>
+<a id="bugfixes-6"></a>
### Bugfixes
* Update Public Suffix List\.
@@ -320,7 +336,7 @@ Feature and maintenance release with updated PSL\.
<a id="v2-7-0"></a>
## v2\.7\.0
-<a id="release-summary-6"></a>
+<a id="release-summary-7"></a>
### Release Summary
Bugfix and feature release with updated PSL\.
@@ -331,7 +347,7 @@ Bugfix and feature release with updated PSL\.
* nameserver\_info and nameserver\_record\_info \- add <code>server</code> parameter to specify custom DNS servers \([https\://github\.com/ansible\-collections/community\.dns/pull/168](https\://github\.com/ansible\-collections/community\.dns/pull/168)\, [https\://github\.com/ansible\-collections/community\.dns/pull/178](https\://github\.com/ansible\-collections/community\.dns/pull/178)\)\.
* wait\_for\_txt \- add <code>server</code> parameter to specify custom DNS servers \([https\://github\.com/ansible\-collections/community\.dns/pull/178](https\://github\.com/ansible\-collections/community\.dns/pull/178)\)\.
-<a id="bugfixes-6"></a>
+<a id="bugfixes-7"></a>
### Bugfixes
* Update Public Suffix List\.
@@ -340,12 +356,12 @@ Bugfix and feature release with updated PSL\.
<a id="v2-6-4"></a>
## v2\.6\.4
-<a id="release-summary-7"></a>
+<a id="release-summary-8"></a>
### Release Summary
Bugfix and maintenance version\.
-<a id="bugfixes-7"></a>
+<a id="bugfixes-8"></a>
### Bugfixes
* Update Public Suffix List\.
@@ -354,12 +370,12 @@ Bugfix and maintenance version\.
<a id="v2-6-3"></a>
## v2\.6\.3
-<a id="release-summary-8"></a>
+<a id="release-summary-9"></a>
### Release Summary
Maintenance release with updated PSL\.
-<a id="bugfixes-8"></a>
+<a id="bugfixes-9"></a>
### Bugfixes
* HTTP module utils \- make compatible with ansible\-core 2\.17 \([https\://github\.com/ansible\-collections/community\.dns/pull/165](https\://github\.com/ansible\-collections/community\.dns/pull/165)\)\.
@@ -368,12 +384,12 @@ Maintenance release with updated PSL\.
<a id="v2-6-2"></a>
## v2\.6\.2
-<a id="release-summary-9"></a>
+<a id="release-summary-10"></a>
### Release Summary
Maintenance release with updated PSL\.
-<a id="bugfixes-9"></a>
+<a id="bugfixes-10"></a>
### Bugfixes
* Update Public Suffix List\.
@@ -381,12 +397,12 @@ Maintenance release with updated PSL\.
<a id="v2-6-1"></a>
## v2\.6\.1
-<a id="release-summary-10"></a>
+<a id="release-summary-11"></a>
### Release Summary
Maintenance release with updated PSL\.
-<a id="bugfixes-10"></a>
+<a id="bugfixes-11"></a>
### Bugfixes
* Update Public Suffix List\.
@@ -394,7 +410,7 @@ Maintenance release with updated PSL\.
<a id="v2-6-0"></a>
## v2\.6\.0
-<a id="release-summary-11"></a>
+<a id="release-summary-12"></a>
### Release Summary
Feature release with an updated Public Suffix List\.
@@ -405,7 +421,7 @@ Feature release with an updated Public Suffix List\.
* wait\_for\_txt \- add <code>servfail\_retries</code> parameter that allows retrying after SERVFAIL errors \([https\://github\.com/ansible\-collections/community\.dns/pull/159](https\://github\.com/ansible\-collections/community\.dns/pull/159)\)\.
* wait\_for\_txt\, resolver module utils \- use [EDNS](https\://en\.wikipedia\.org/wiki/Extension\_Mechanisms\_for\_DNS) \([https\://github\.com/ansible\-collections/community\.dns/pull/158](https\://github\.com/ansible\-collections/community\.dns/pull/158)\)\.
-<a id="bugfixes-11"></a>
+<a id="bugfixes-12"></a>
### Bugfixes
* Update Public Suffix List\.
@@ -429,12 +445,12 @@ Feature release with an updated Public Suffix List\.
<a id="v2-5-7"></a>
## v2\.5\.7
-<a id="release-summary-12"></a>
+<a id="release-summary-13"></a>
### Release Summary
Regular maintenance release with updated Public Suffix List\.
-<a id="bugfixes-12"></a>
+<a id="bugfixes-13"></a>
### Bugfixes
* Update Public Suffix List\.
@@ -442,7 +458,7 @@ Regular maintenance release with updated Public Suffix List\.
<a id="v2-5-6"></a>
## v2\.5\.6
-<a id="release-summary-13"></a>
+<a id="release-summary-14"></a>
### Release Summary
Maintenance release\.
@@ -463,12 +479,12 @@ for the rendered HTML version of the documentation of the latest release\.
<a id="v2-5-5"></a>
## v2\.5\.5
-<a id="release-summary-14"></a>
+<a id="release-summary-15"></a>
### Release Summary
Maintenance release with updated PSL\.
-<a id="bugfixes-13"></a>
+<a id="bugfixes-14"></a>
### Bugfixes
* Update Public Suffix List\.
@@ -476,12 +492,12 @@ Maintenance release with updated PSL\.
<a id="v2-5-4"></a>
## v2\.5\.4
-<a id="release-summary-15"></a>
+<a id="release-summary-16"></a>
### Release Summary
Maintenance release with updated PSL\.
-<a id="bugfixes-14"></a>
+<a id="bugfixes-15"></a>
### Bugfixes
* Update Public Suffix List\.
@@ -489,12 +505,12 @@ Maintenance release with updated PSL\.
<a id="v2-5-3"></a>
## v2\.5\.3
-<a id="release-summary-16"></a>
+<a id="release-summary-17"></a>
### Release Summary
Maintenance release with updated PSL\.
-<a id="bugfixes-15"></a>
+<a id="bugfixes-16"></a>
### Bugfixes
* Update Public Suffix List\.
@@ -502,12 +518,12 @@ Maintenance release with updated PSL\.
<a id="v2-5-2"></a>
## v2\.5\.2
-<a id="release-summary-17"></a>
+<a id="release-summary-18"></a>
### Release Summary
Maintenance release with improved documentation and updated PSL\.
-<a id="bugfixes-16"></a>
+<a id="bugfixes-17"></a>
### Bugfixes
* Update Public Suffix List\.
@@ -515,12 +531,12 @@ Maintenance release with improved documentation and updated PSL\.
<a id="v2-5-1"></a>
## v2\.5\.1
-<a id="release-summary-18"></a>
+<a id="release-summary-19"></a>
### Release Summary
Maintenance release \(updated PSL\)\.
-<a id="bugfixes-17"></a>
+<a id="bugfixes-18"></a>
### Bugfixes
* Update Public Suffix List\.
@@ -528,7 +544,7 @@ Maintenance release \(updated PSL\)\.
<a id="v2-5-0"></a>
## v2\.5\.0
-<a id="release-summary-19"></a>
+<a id="release-summary-20"></a>
### Release Summary
Feature and bugfix release with updated PSL\.
@@ -544,7 +560,7 @@ Feature and bugfix release with updated PSL\.
* The default of the newly added option <code>txt\_character\_encoding</code> will change from <code>octal</code> to <code>decimal</code> in community\.dns 3\.0\.0\. The new default will be compatible with [RFC 1035](https\://www\.ietf\.org/rfc/rfc1035\.txt) \([https\://github\.com/ansible\-collections/community\.dns/pull/134](https\://github\.com/ansible\-collections/community\.dns/pull/134)\)\.
-<a id="bugfixes-18"></a>
+<a id="bugfixes-19"></a>
### Bugfixes
* Update Public Suffix List\.
@@ -553,12 +569,12 @@ Feature and bugfix release with updated PSL\.
<a id="v2-4-2"></a>
## v2\.4\.2
-<a id="release-summary-20"></a>
+<a id="release-summary-21"></a>
### Release Summary
Maintenance release with updated Public Suffix List\.
-<a id="bugfixes-19"></a>
+<a id="bugfixes-20"></a>
### Bugfixes
* Update Public Suffix List\.
@@ -566,12 +582,12 @@ Maintenance release with updated Public Suffix List\.
<a id="v2-4-1"></a>
## v2\.4\.1
-<a id="release-summary-21"></a>
+<a id="release-summary-22"></a>
### Release Summary
Regular maintenance release\.
-<a id="bugfixes-20"></a>
+<a id="bugfixes-21"></a>
### Bugfixes
* Update Public Suffix List\.
@@ -580,7 +596,7 @@ Regular maintenance release\.
<a id="v2-4-0"></a>
## v2\.4\.0
-<a id="release-summary-22"></a>
+<a id="release-summary-23"></a>
### Release Summary
Feature and maintenance release\.
@@ -592,7 +608,7 @@ Feature and maintenance release\.
* Added a <code>community\.dns\.hosttech</code> module defaults group / action group\. Use with <code>group/community\.dns\.hosttech</code> to provide options for all Hosttech DNS modules \([https\://github\.com/ansible\-collections/community\.dns/pull/119](https\://github\.com/ansible\-collections/community\.dns/pull/119)\)\.
* wait\_for\_txt \- the module now supports check mode\. The only practical change in behavior is that in check mode\, the module is now executed instead of skipped\. Since the module does not change anything\, it should have been marked as supporting check mode since it was originally added \([https\://github\.com/ansible\-collections/community\.dns/pull/119](https\://github\.com/ansible\-collections/community\.dns/pull/119)\)\.
-<a id="bugfixes-21"></a>
+<a id="bugfixes-22"></a>
### Bugfixes
* Update Public Suffix List\.
@@ -600,12 +616,12 @@ Feature and maintenance release\.
<a id="v2-3-4"></a>
## v2\.3\.4
-<a id="release-summary-23"></a>
+<a id="release-summary-24"></a>
### Release Summary
Maintenance release with updated Public Suffix List\.
-<a id="bugfixes-22"></a>
+<a id="bugfixes-23"></a>
### Bugfixes
* Update Public Suffix List\.
@@ -613,12 +629,12 @@ Maintenance release with updated Public Suffix List\.
<a id="v2-3-3"></a>
## v2\.3\.3
-<a id="release-summary-24"></a>
+<a id="release-summary-25"></a>
### Release Summary
Maintenance release including an updated Public Suffix List\.
-<a id="bugfixes-23"></a>
+<a id="bugfixes-24"></a>
### Bugfixes
* Update Public Suffix List\.
@@ -626,12 +642,12 @@ Maintenance release including an updated Public Suffix List\.
<a id="v2-3-2"></a>
## v2\.3\.2
-<a id="release-summary-25"></a>
+<a id="release-summary-26"></a>
### Release Summary
Maintenance release with updated Public Suffix List\.
-<a id="bugfixes-24"></a>
+<a id="bugfixes-25"></a>
### Bugfixes
* Update Public Suffix List\.
@@ -639,7 +655,7 @@ Maintenance release with updated Public Suffix List\.
<a id="v2-3-1"></a>
## v2\.3\.1
-<a id="release-summary-26"></a>
+<a id="release-summary-27"></a>
### Release Summary
Maintenance release including an updated Public Suffix List\.
@@ -649,7 +665,7 @@ Maintenance release including an updated Public Suffix List\.
* The collection repository conforms to the [REUSE specification](https\://reuse\.software/spec/) except for the changelog fragments \([https\://github\.com/ansible\-collections/community\.dns/pull/112](https\://github\.com/ansible\-collections/community\.dns/pull/112)\)\.
-<a id="bugfixes-25"></a>
+<a id="bugfixes-26"></a>
### Bugfixes
* Update Public Suffix List\.
@@ -657,7 +673,7 @@ Maintenance release including an updated Public Suffix List\.
<a id="v2-3-0"></a>
## v2\.3\.0
-<a id="release-summary-27"></a>
+<a id="release-summary-28"></a>
### Release Summary
Maintenance release including an updated Public Suffix List\.
@@ -667,7 +683,7 @@ Maintenance release including an updated Public Suffix List\.
* All software licenses are now in the <code>LICENSES/</code> directory of the collection root\. Moreover\, <code>SPDX\-License\-Identifier\:</code> is used to declare the applicable license for every file that is not automatically generated \([https\://github\.com/ansible\-collections/community\.dns/pull/109](https\://github\.com/ansible\-collections/community\.dns/pull/109)\)\.
-<a id="bugfixes-26"></a>
+<a id="bugfixes-27"></a>
### Bugfixes
* Update Public Suffix List\.
@@ -675,12 +691,12 @@ Maintenance release including an updated Public Suffix List\.
<a id="v2-2-1"></a>
## v2\.2\.1
-<a id="release-summary-28"></a>
+<a id="release-summary-29"></a>
### Release Summary
Maintenance release with updated Public Suffix List\.
-<a id="bugfixes-27"></a>
+<a id="bugfixes-28"></a>
### Bugfixes
* Update Public Suffix List\.
@@ -688,7 +704,7 @@ Maintenance release with updated Public Suffix List\.
<a id="v2-2-0"></a>
## v2\.2\.0
-<a id="release-summary-29"></a>
+<a id="release-summary-30"></a>
### Release Summary
Feature release\.
@@ -699,7 +715,7 @@ Feature release\.
* hetzner\_dns\_records and hosttech\_dns\_records inventory plugins \- allow to template provider\-specific credentials and the <code>zone\_name</code>\, <code>zone\_id</code> options \([https\://github\.com/ansible\-collections/community\.dns/pull/106](https\://github\.com/ansible\-collections/community\.dns/pull/106)\)\.
* wait\_for\_txt \- improve error messages so that in case of SERVFAILs or other DNS errors it is clear which record was queried from which DNS server \([https\://github\.com/ansible\-collections/community\.dns/pull/105](https\://github\.com/ansible\-collections/community\.dns/pull/105)\)\.
-<a id="bugfixes-28"></a>
+<a id="bugfixes-29"></a>
### Bugfixes
* Update Public Suffix List\.
@@ -707,12 +723,12 @@ Feature release\.
<a id="v2-1-1"></a>
## v2\.1\.1
-<a id="release-summary-30"></a>
+<a id="release-summary-31"></a>
### Release Summary
Maintenance release with updated Public Suffix List\.
-<a id="bugfixes-29"></a>
+<a id="bugfixes-30"></a>
### Bugfixes
* Update Public Suffix List\.
@@ -720,7 +736,7 @@ Maintenance release with updated Public Suffix List\.
<a id="v2-1-0"></a>
## v2\.1\.0
-<a id="release-summary-31"></a>
+<a id="release-summary-32"></a>
### Release Summary
Feature and maintenance release with updated PSL\.
@@ -730,7 +746,7 @@ Feature and maintenance release with updated PSL\.
* Prepare collection for inclusion in an Execution Environment by declaring its dependencies \([https\://github\.com/ansible\-collections/community\.dns/pull/93](https\://github\.com/ansible\-collections/community\.dns/pull/93)\)\.
-<a id="bugfixes-30"></a>
+<a id="bugfixes-31"></a>
### Bugfixes
* Update Public Suffix List\.
@@ -738,12 +754,12 @@ Feature and maintenance release with updated PSL\.
<a id="v2-0-9"></a>
## v2\.0\.9
-<a id="release-summary-32"></a>
+<a id="release-summary-33"></a>
### Release Summary
Maintenance release with updated Public Suffix List and added collection links file\.
-<a id="bugfixes-31"></a>
+<a id="bugfixes-32"></a>
### Bugfixes
* Update Public Suffix List\.
@@ -751,12 +767,12 @@ Maintenance release with updated Public Suffix List and added collection links f
<a id="v2-0-8"></a>
## v2\.0\.8
-<a id="release-summary-33"></a>
+<a id="release-summary-34"></a>
### Release Summary
Maintenance release with updated Public Suffix List\.
-<a id="bugfixes-32"></a>
+<a id="bugfixes-33"></a>
### Bugfixes
* Update Public Suffix List\.
@@ -764,12 +780,12 @@ Maintenance release with updated Public Suffix List\.
<a id="v2-0-7"></a>
## v2\.0\.7
-<a id="release-summary-34"></a>
+<a id="release-summary-35"></a>
### Release Summary
Maintenance release with updated Public Suffix List\.
-<a id="bugfixes-33"></a>
+<a id="bugfixes-34"></a>
### Bugfixes
* Update Public Suffix List\.
@@ -777,12 +793,12 @@ Maintenance release with updated Public Suffix List\.
<a id="v2-0-6"></a>
## v2\.0\.6
-<a id="release-summary-35"></a>
+<a id="release-summary-36"></a>
### Release Summary
Bugfix release\.
-<a id="bugfixes-34"></a>
+<a id="bugfixes-35"></a>
### Bugfixes
* Update Public Suffix List\.
@@ -791,12 +807,12 @@ Bugfix release\.
<a id="v2-0-5"></a>
## v2\.0\.5
-<a id="release-summary-36"></a>
+<a id="release-summary-37"></a>
### Release Summary
Maintenance release with updated Public Suffix List\.
-<a id="bugfixes-35"></a>
+<a id="bugfixes-36"></a>
### Bugfixes
* Update Public Suffix List\.
@@ -804,12 +820,12 @@ Maintenance release with updated Public Suffix List\.
<a id="v2-0-4"></a>
## v2\.0\.4
-<a id="release-summary-37"></a>
+<a id="release-summary-38"></a>
### Release Summary
Maintenance release with updated Public Suffix List\.
-<a id="bugfixes-36"></a>
+<a id="bugfixes-37"></a>
### Bugfixes
* Update Public Suffix List\.
@@ -817,7 +833,7 @@ Maintenance release with updated Public Suffix List\.
<a id="v2-0-3"></a>
## v2\.0\.3
-<a id="release-summary-38"></a>
+<a id="release-summary-39"></a>
### Release Summary
Bugfix release\.
@@ -830,12 +846,12 @@ Bugfix release\.
<a id="v2-0-2"></a>
## v2\.0\.2
-<a id="release-summary-39"></a>
+<a id="release-summary-40"></a>
### Release Summary
Regular maintenance release\.
-<a id="bugfixes-37"></a>
+<a id="bugfixes-38"></a>
### Bugfixes
* Update Public Suffix List\.
@@ -843,12 +859,12 @@ Regular maintenance release\.
<a id="v2-0-1"></a>
## v2\.0\.1
-<a id="release-summary-40"></a>
+<a id="release-summary-41"></a>
### Release Summary
Maintenance release with Public Suffix List updates\.
-<a id="bugfixes-38"></a>
+<a id="bugfixes-39"></a>
### Bugfixes
* Update Public Suffix List\.
@@ -856,7 +872,7 @@ Maintenance release with Public Suffix List updates\.
<a id="v2-0-0"></a>
## v2\.0\.0
-<a id="release-summary-41"></a>
+<a id="release-summary-42"></a>
### Release Summary
This release contains many new features\, modules and plugins\, but also has several breaking changes to the 1\.x\.y versions\. Please read the changelog carefully to determine what to change if you used an earlier version of this collection\.
@@ -896,7 +912,7 @@ This release contains many new features\, modules and plugins\, but also has sev
* The hosttech\_dns\_records module has been renamed to hosttech\_dns\_record\_sets\. The old name will stop working in community\.dns 3\.0\.0 \([https\://github\.com/ansible\-collections/community\.dns/pull/31](https\://github\.com/ansible\-collections/community\.dns/pull/31)\)\.
-<a id="bugfixes-39"></a>
+<a id="bugfixes-40"></a>
### Bugfixes
* Hetzner API \- interpret missing TTL as 300\, which is what the web console also does \([https\://github\.com/ansible\-collections/community\.dns/pull/42](https\://github\.com/ansible\-collections/community\.dns/pull/42)\)\.
@@ -934,7 +950,7 @@ This release contains many new features\, modules and plugins\, but also has sev
<a id="v1-2-0"></a>
## v1\.2\.0
-<a id="release-summary-42"></a>
+<a id="release-summary-43"></a>
### Release Summary
Last minor 1\.x\.0 version\. The 2\.0\.0 version will have some backwards incompatible changes to the <code>hosttech\_dns\_record</code> and <code>hosttech\_dns\_records</code> modules which will require user intervention\. These changes should result in a better UX\.
@@ -946,7 +962,7 @@ Last minor 1\.x\.0 version\. The 2\.0\.0 version will have some backwards incomp
* hosttech\_dns\_record \- in <code>diff</code> mode\, also return <code>diff</code> data structure when <code>changed</code> is <code>false</code> \([https\://github\.com/ansible\-collections/community\.dns/pull/28](https\://github\.com/ansible\-collections/community\.dns/pull/28)\)\.
* module utils \- add default implementation for some zone/record API functions\, and move common JSON API code to helper class \([https\://github\.com/ansible\-collections/community\.dns/pull/26](https\://github\.com/ansible\-collections/community\.dns/pull/26)\)\.
-<a id="bugfixes-40"></a>
+<a id="bugfixes-41"></a>
### Bugfixes
* Update Public Suffix List\.
@@ -955,7 +971,7 @@ Last minor 1\.x\.0 version\. The 2\.0\.0 version will have some backwards incomp
<a id="v1-1-0"></a>
## v1\.1\.0
-<a id="release-summary-43"></a>
+<a id="release-summary-44"></a>
### Release Summary
Regular maintenance release\.
@@ -965,7 +981,7 @@ Regular maintenance release\.
* Avoid internal ansible\-core module\_utils in favor of equivalent public API available since at least Ansible 2\.9 \([https\://github\.com/ansible\-collections/community\.dns/pull/24](https\://github\.com/ansible\-collections/community\.dns/pull/24)\)\.
-<a id="bugfixes-41"></a>
+<a id="bugfixes-42"></a>
### Bugfixes
* Update Public Suffix List\.
@@ -973,12 +989,12 @@ Regular maintenance release\.
<a id="v1-0-1"></a>
## v1\.0\.1
-<a id="release-summary-44"></a>
+<a id="release-summary-45"></a>
### Release Summary
Regular maintenance release\.
-<a id="bugfixes-42"></a>
+<a id="bugfixes-43"></a>
### Bugfixes
* Update Public Suffix List\.
@@ -986,12 +1002,12 @@ Regular maintenance release\.
<a id="v1-0-0"></a>
## v1\.0\.0
-<a id="release-summary-45"></a>
+<a id="release-summary-46"></a>
### Release Summary
First stable release\.
-<a id="bugfixes-43"></a>
+<a id="bugfixes-44"></a>
### Bugfixes
* Update Public Suffix List\.
@@ -999,7 +1015,7 @@ First stable release\.
<a id="v0-3-0"></a>
## v0\.3\.0
-<a id="release-summary-46"></a>
+<a id="release-summary-47"></a>
### Release Summary
Fixes bugs\, adds rate limiting for Hosttech JSON API\, and adds a new bulk synchronization module\.
@@ -1009,7 +1025,7 @@ Fixes bugs\, adds rate limiting for Hosttech JSON API\, and adds a new bulk sync
* hosttech\_dns\_\* \- handle <code>419 Too Many Requests</code> with proper rate limiting for JSON API \([https\://github\.com/ansible\-collections/community\.dns/pull/14](https\://github\.com/ansible\-collections/community\.dns/pull/14)\)\.
-<a id="bugfixes-44"></a>
+<a id="bugfixes-45"></a>
### Bugfixes
* Avoid converting ASCII labels which contain underscores or other printable ASCII characters outside <code>\[a\-zA\-Z0\-9\-\]</code> to alabels during normalization \([https\://github\.com/ansible\-collections/community\.dns/pull/13](https\://github\.com/ansible\-collections/community\.dns/pull/13)\)\.
@@ -1023,7 +1039,7 @@ Fixes bugs\, adds rate limiting for Hosttech JSON API\, and adds a new bulk sync
<a id="v0-2-0"></a>
## v0\.2\.0
-<a id="release-summary-47"></a>
+<a id="release-summary-48"></a>
### Release Summary
Major refactoring release\, which adds a zone information module and supports HostTech\'s new REST API\.
@@ -1048,7 +1064,7 @@ Major refactoring release\, which adds a zone information module and supports Ho
* hosttech\_\* module\_utils \- completely rewrite and refactor to support new JSON API and allow to reuse provider\-independent module logic \([https\://github\.com/ansible\-collections/community\.dns/pull/4](https\://github\.com/ansible\-collections/community\.dns/pull/4)\)\.
-<a id="bugfixes-45"></a>
+<a id="bugfixes-46"></a>
### Bugfixes
* Update Public Suffix List\.
@@ -1063,7 +1079,7 @@ Major refactoring release\, which adds a zone information module and supports Ho
<a id="v0-1-0"></a>
## v0\.1\.0
-<a id="release-summary-48"></a>
+<a id="release-summary-49"></a>
### Release Summary
Initial public release\.
diff --git a/ansible_collections/community/dns/CHANGELOG.rst b/ansible_collections/community/dns/CHANGELOG.rst
index 9aef9afb1..e6b701cd5 100644
--- a/ansible_collections/community/dns/CHANGELOG.rst
+++ b/ansible_collections/community/dns/CHANGELOG.rst
@@ -4,13 +4,26 @@ Community DNS Collection Release Notes
.. contents:: Topics
+v3.0.1
+======
+
+Release Summary
+---------------
+
+Maintenance release with updated PSL.
+
+Bugfixes
+--------
+
+- Update Public Suffix List.
+
v3.0.0
======
Release Summary
---------------
-...
+New major release.
Major Changes
-------------
@@ -34,7 +47,7 @@ Breaking Changes / Porting Guide
Removed Features (previously deprecated)
----------------------------------------
-- The collection no longer supports Ansible, ansible-base, and ansible-core releases that are currently End of Life at the time of the 3.0.0 release. This means that Ansible 2.9, ansible-base 2.10, ansible-core 2.11, ansible-core 2.12, ansible-core 2.13, and ansible-core 2.14 are no longer supported. The collection might still work with these versions, but it can stop working at any moment without advance notice, and this will not be considered a bug (https://github.com/ansible-collections/community.dns/pull/196).
+- The collection no longer supports Ansible, ansible-base, and ansible-core releases that are currently End of Life at the time of the 3.0.0 release. This means that Ansible 2.9, ansible-base 2.10, ansible-core 2.11, ansible-core 2.12, and ansible-core 2.13 are no longer supported. The collection might still work with these versions, but it can stop working at any moment without advance notice, and this will not be considered a bug (https://github.com/ansible-collections/community.dns/pull/196).
- hetzner_dns_record_set, hetzner_dns_record - the deprecated alias ``name`` of the prefix option was removed (https://github.com/ansible-collections/community.dns/pull/196).
- hosttech_dns_records - the redirect to the ``hosttech_dns_record_sets`` module has been removed (https://github.com/ansible-collections/community.dns/pull/196).
diff --git a/ansible_collections/community/dns/FILES.json b/ansible_collections/community/dns/FILES.json
index 32915c500..a85f2dd35 100644
--- a/ansible_collections/community/dns/FILES.json
+++ b/ansible_collections/community/dns/FILES.json
@@ -25,7 +25,7 @@
"name": ".github/workflows/ansible-test.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "02ff9da8ae9991541407a1f05a5b2bafa3d558213b44c63287665d370c4091da",
+ "chksum_sha256": "09ce4d45716794335067d16a46c785116ade5bbaa5c72a96cbec55e20aff241d",
"format": 1
},
{
@@ -151,7 +151,7 @@
"name": "changelogs/changelog.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1f73967d9bae45a0341f90dd810cd70afa17786df1d296e5803f18f4d5c4d33c",
+ "chksum_sha256": "c73945d30fb57acbba2ec70923bfee2e09b09d15b02bc5e9e367241d856cabf4",
"format": 1
},
{
@@ -165,7 +165,7 @@
"name": "changelogs/config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e43f412de1a581ff888e1c037718691d86545bf11e07aaf66719f51233fc3d9b",
+ "chksum_sha256": "972a0999d91ec276eef7c2ccc72576026858e8058fac99ec44a843744b6480f4",
"format": 1
},
{
@@ -865,7 +865,7 @@
"name": "plugins/public_suffix_list.dat",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "437ffb5d25e5dee3fa4fef0109aac0e37f2f67b03a6787eeb22db831e308efe9",
+ "chksum_sha256": "1eded65439237a8a4ad9fda2c6ffbaf9967ec4859279783f7a4e40bfb18eb77a",
"format": 1
},
{
@@ -1117,7 +1117,7 @@
"name": "tests/integration/targets/lookup_lookup/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "34db5ec41377c4c79e1cf83b7f4ac88e29fd0b3c467ddf26104ee21eec89024e",
+ "chksum_sha256": "41a89304105ac17c7696d2c3e9dd422256a2055e14e170f1426ba52b57d8c2d0",
"format": 1
},
{
@@ -1145,7 +1145,7 @@
"name": "tests/integration/targets/lookup_lookup_as_dict/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d4d40eff9ec56b5236626dafd6db61fb0ef2c54f15af48dcd0aedf584cabea0c",
+ "chksum_sha256": "22ffe81a2f0571564fe97fab44d18afd0585ec8f8979214d0f8c8c27d8302f3f",
"format": 1
},
{
@@ -1971,7 +1971,7 @@
"name": "CHANGELOG.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1489e680b9d05205a48e068d84dc804325584d49005222f1f506f42ad49ae63a",
+ "chksum_sha256": "f046d34028a1d9d185983d0894d5a9f7266d42f841f2e9350315575cbd2906f3",
"format": 1
},
{
@@ -1985,7 +1985,7 @@
"name": "CHANGELOG.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8d243919ca88dbc2725a0b5a94ee622f405d42abcef4d6072f76c6539d1eef5a",
+ "chksum_sha256": "ad77c5068402f4ba9567514fd2f84a0fcf4e848ee2516441e2985e79b0dad45d",
"format": 1
},
{
diff --git a/ansible_collections/community/dns/MANIFEST.json b/ansible_collections/community/dns/MANIFEST.json
index 08e4d9c66..96ff26597 100644
--- a/ansible_collections/community/dns/MANIFEST.json
+++ b/ansible_collections/community/dns/MANIFEST.json
@@ -2,7 +2,7 @@
"collection_info": {
"namespace": "community",
"name": "dns",
- "version": "3.0.0",
+ "version": "3.0.1",
"authors": [
"Felix Fontein (github.com/felixfontein)",
"Markus Bergholz (github.com/markuman)"
@@ -32,7 +32,7 @@
"name": "FILES.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ba0f38847eafe7b48453fb5a53d01010332adb7982622a61061c9df456b18e68",
+ "chksum_sha256": "cb1e71b18fdd0a007c80538a85f41bc966e033b4d2726595a4310471f787862d",
"format": 1
},
"format": 1
diff --git a/ansible_collections/community/dns/changelogs/changelog.yaml b/ansible_collections/community/dns/changelogs/changelog.yaml
index 230527b2b..4adb7145b 100644
--- a/ansible_collections/community/dns/changelogs/changelog.yaml
+++ b/ansible_collections/community/dns/changelogs/changelog.yaml
@@ -859,16 +859,14 @@ releases:
should be treated as an absolute domain name (``search=false``), or potentially
as a relative domain name (``search=true``) (https://github.com/ansible-collections/community.dns/issues/200,
https://github.com/ansible-collections/community.dns/pull/201).
- release_summary: '...
-
- '
+ release_summary: New major release.
removed_features:
- The collection no longer supports Ansible, ansible-base, and ansible-core
releases that are currently End of Life at the time of the 3.0.0 release.
This means that Ansible 2.9, ansible-base 2.10, ansible-core 2.11, ansible-core
- 2.12, ansible-core 2.13, and ansible-core 2.14 are no longer supported. The
- collection might still work with these versions, but it can stop working at
- any moment without advance notice, and this will not be considered a bug (https://github.com/ansible-collections/community.dns/pull/196).
+ 2.12, and ansible-core 2.13 are no longer supported. The collection might
+ still work with these versions, but it can stop working at any moment without
+ advance notice, and this will not be considered a bug (https://github.com/ansible-collections/community.dns/pull/196).
- hetzner_dns_record_set, hetzner_dns_record - the deprecated alias ``name``
of the prefix option was removed (https://github.com/ansible-collections/community.dns/pull/196).
- hosttech_dns_records - the redirect to the ``hosttech_dns_record_sets`` module
@@ -878,3 +876,12 @@ releases:
- 3.0.0.yml
- update-psl.yml
release_date: '2024-05-19'
+ 3.0.1:
+ changes:
+ bugfixes:
+ - Update Public Suffix List.
+ release_summary: Maintenance release with updated PSL.
+ fragments:
+ - 3.0.1.yml
+ - update-psl.yml
+ release_date: '2024-06-16'
diff --git a/ansible_collections/community/dns/changelogs/config.yaml b/ansible_collections/community/dns/changelogs/config.yaml
index 06dc33cc8..600c62598 100644
--- a/ansible_collections/community/dns/changelogs/config.yaml
+++ b/ansible_collections/community/dns/changelogs/config.yaml
@@ -36,3 +36,4 @@ sections:
title: Community DNS Collection
trivial_section_name: trivial
use_fqcn: true
+add_plugin_period: true
diff --git a/ansible_collections/community/dns/plugins/public_suffix_list.dat b/ansible_collections/community/dns/plugins/public_suffix_list.dat
index 05df0af89..8aa631ac8 100644
--- a/ansible_collections/community/dns/plugins/public_suffix_list.dat
+++ b/ansible_collections/community/dns/plugins/public_suffix_list.dat
@@ -675,7 +675,6 @@ mil.by
// second-level domain, but it's being used as one (see www.google.com.by and
// www.yahoo.com.by, for example), so we list it here for safety's sake.
com.by
-
// http://hoster.by/
of.by
@@ -6710,7 +6709,7 @@ org.zw
// newGTLDs
-// List of new gTLDs imported from https://www.icann.org/resources/registries/gtlds/v2/gtlds.json on 2024-05-04T15:12:50Z
+// List of new gTLDs imported from https://www.icann.org/resources/registries/gtlds/v2/gtlds.json on 2024-06-13T15:15:16Z
// This list is auto-generated, don't edit it manually.
// aaa : American Automobile Association, Inc.
// https://www.iana.org/domains/root/db/aaa.html
@@ -8976,7 +8975,7 @@ lotte
// https://www.iana.org/domains/root/db/lotto.html
lotto
-// love : Merchant Law Group LLP
+// love : Waterford Limited
// https://www.iana.org/domains/root/db/love.html
love
@@ -9228,10 +9227,6 @@ nab
// https://www.iana.org/domains/root/db/nagoya.html
nagoya
-// natura : NATURA COSMÉTICOS S.A.
-// https://www.iana.org/domains/root/db/natura.html
-natura
-
// navy : Dog Beach, LLC
// https://www.iana.org/domains/root/db/navy.html
navy
@@ -11387,9 +11382,9 @@ auth-fips.us-west-2.amazoncognito.com
// Amazon EC2
// Submitted by Luke Wells <psl-maintainers@amazon.com>
// Reference: 4c38fa71-58ac-4768-99e5-689c1767e537
+*.compute.amazonaws.com.cn
*.compute.amazonaws.com
*.compute-1.amazonaws.com
-*.compute.amazonaws.com.cn
us-east-1.amazonaws.com
// Amazon EMR
@@ -12288,26 +12283,26 @@ ju.mp
// CentralNic : http://www.centralnic.com/names/domains
// Submitted by registry <gavin.brown@centralnic.com>
-ae.org
+za.bz
br.com
cn.com
-com.de
-com.se
de.com
eu.com
-gb.net
-hu.net
-jp.net
jpn.com
mex.com
ru.com
sa.com
-se.net
uk.com
-uk.net
us.com
-za.bz
za.com
+com.de
+gb.net
+hu.net
+jp.net
+se.net
+uk.net
+ae.org
+com.se
// No longer operated by CentralNic, these entries should be adopted and/or removed by current operators
// Submitted by Gavin Brown <gavin.brown@centralnic.com>
@@ -12328,8 +12323,8 @@ gr.com
// Radix FZC : http://domains.in.net
// Submitted by Gavin Brown <gavin.brown@centralnic.com>
-in.net
web.in
+in.net
// US REGISTRY LLC : http://us.org
// Submitted by Gavin Brown <gavin.brown@centralnic.com>
@@ -12401,8 +12396,8 @@ cloudaccess.net
// cloudControl : https://www.cloudcontrol.com/
// Submitted by Tobias Wilken <tw@cloudcontrol.com>
-cloudcontrolled.com
cloudcontrolapp.com
+cloudcontrolled.com
// Cloudera, Inc. : https://www.cloudera.com/
// Submitted by Kedarnath Waikar <security@cloudera.com>
@@ -12416,6 +12411,11 @@ trycloudflare.com
pages.dev
r2.dev
workers.dev
+cdn.cloudflareanycast.net
+cdn.cloudflarecn.net
+cdn.cloudflareglobal.net
+cloudflare.net
+cdn.cloudflare.net
// cloudscale.ch AG : https://www.cloudscale.ch/
// Submitted by Gaudenz Steinlin <support@cloudscale.ch>
@@ -12442,11 +12442,11 @@ co.cz
// Submitted by Jan Krpes <jan.krpes@cdn77.com>
cdn77-storage.com
rsc.contentproxy9.cz
-cdn77-ssl.net
r.cdn77.net
-ssl.origin.cdn77-secure.org
+cdn77-ssl.net
c.cdn77.org
rsc.cdn77.org
+ssl.origin.cdn77-secure.org
// Cloud DNS Ltd : http://www.cloudns.net
// Submitted by Aleksander Hristov <noc@cloudns.net> & Boyan Peychev <boyan@cloudns.net>
@@ -12509,8 +12509,8 @@ test.ru
// COSIMO GmbH : http://www.cosimo.de
// Submitted by Rene Marticke <rmarticke@cosimo.de>
dyn.cosidns.de
-dynamisches-dns.de
dnsupdater.de
+dynamisches-dns.de
internet-dns.de
l-o-g-i-n.de
dynamic-dns.info
@@ -12564,9 +12564,9 @@ cyon.site
// Danger Science Group: https://dangerscience.com/
// Submitted by Skylar MacDonald <skylar@dangerscience.com>
+platform0.app
fnwk.site
folionetwork.site
-platform0.app
// Daplie, Inc : https://daplie.com
// Submitted by AJ ONeal <aj@daplie.com>
@@ -12696,6 +12696,26 @@ dy.fi
tunk.org
// DynDNS.com : http://www.dyndns.com/services/dns/dyndns/
+dyndns.biz
+for-better.biz
+for-more.biz
+for-some.biz
+for-the.biz
+selfip.biz
+webhop.biz
+ftpaccess.cc
+game-server.cc
+myphotos.cc
+scrapping.cc
+blogdns.com
+cechire.com
+dnsalias.com
+dnsdojo.com
+doesntexist.com
+dontexist.com
+doomdns.com
+dyn-o-saur.com
+dynalias.com
dyndns-at-home.com
dyndns-at-work.com
dyndns-blog.com
@@ -12710,64 +12730,14 @@ dyndns-server.com
dyndns-web.com
dyndns-wiki.com
dyndns-work.com
-dyndns.biz
-dyndns.info
-dyndns.org
-dyndns.tv
-at-band-camp.net
-ath.cx
-barrel-of-knowledge.info
-barrell-of-knowledge.info
-better-than.tv
-blogdns.com
-blogdns.net
-blogdns.org
-blogsite.org
-boldlygoingnowhere.org
-broke-it.net
-buyshouses.net
-cechire.com
-dnsalias.com
-dnsalias.net
-dnsalias.org
-dnsdojo.com
-dnsdojo.net
-dnsdojo.org
-does-it.net
-doesntexist.com
-doesntexist.org
-dontexist.com
-dontexist.net
-dontexist.org
-doomdns.com
-doomdns.org
-dvrdns.org
-dyn-o-saur.com
-dynalias.com
-dynalias.net
-dynalias.org
-dynathome.net
-dyndns.ws
-endofinternet.net
-endofinternet.org
-endoftheinternet.org
est-a-la-maison.com
est-a-la-masion.com
est-le-patron.com
est-mon-blogueur.com
-for-better.biz
-for-more.biz
-for-our.info
-for-some.biz
-for-the.biz
-forgot.her.name
-forgot.his.name
from-ak.com
from-al.com
from-ar.com
-from-az.net
from-ca.com
-from-co.net
from-ct.com
from-dc.com
from-de.com
@@ -12780,10 +12750,8 @@ from-il.com
from-in.com
from-ks.com
from-ky.com
-from-la.net
from-ma.com
from-md.com
-from-me.org
from-mi.com
from-mn.com
from-mo.com
@@ -12796,7 +12764,6 @@ from-nh.com
from-nj.com
from-nm.com
from-nv.com
-from-ny.net
from-oh.com
from-ok.com
from-or.com
@@ -12814,45 +12781,18 @@ from-wa.com
from-wi.com
from-wv.com
from-wy.com
-ftpaccess.cc
-fuettertdasnetz.de
-game-host.org
-game-server.cc
getmyip.com
-gets-it.net
-go.dyndns.org
gotdns.com
-gotdns.org
-groks-the.info
-groks-this.info
-ham-radio-op.net
-here-for-more.info
hobby-site.com
-hobby-site.org
-home.dyndns.org
-homedns.org
-homeftp.net
-homeftp.org
-homeip.net
homelinux.com
-homelinux.net
-homelinux.org
homeunix.com
-homeunix.net
-homeunix.org
iamallama.com
-in-the-band.net
is-a-anarchist.com
is-a-blogger.com
is-a-bookkeeper.com
-is-a-bruinsfan.org
is-a-bulls-fan.com
-is-a-candidate.org
is-a-caterer.com
-is-a-celticsfan.org
is-a-chef.com
-is-a-chef.net
-is-a-chef.org
is-a-conservative.com
is-a-cpa.com
is-a-cubicle-slave.com
@@ -12861,31 +12801,25 @@ is-a-designer.com
is-a-doctor.com
is-a-financialadvisor.com
is-a-geek.com
-is-a-geek.net
-is-a-geek.org
is-a-green.com
is-a-guru.com
is-a-hard-worker.com
is-a-hunter.com
-is-a-knight.org
is-a-landscaper.com
is-a-lawyer.com
is-a-liberal.com
is-a-libertarian.com
-is-a-linux-user.org
is-a-llama.com
is-a-musician.com
is-a-nascarfan.com
is-a-nurse.com
is-a-painter.com
-is-a-patsfan.org
is-a-personaltrainer.com
is-a-photographer.com
is-a-player.com
is-a-republican.com
is-a-rockstar.com
is-a-socialist.com
-is-a-soxfan.org
is-a-student.com
is-a-teacher.com
is-a-techie.com
@@ -12897,92 +12831,158 @@ is-an-anarchist.com
is-an-artist.com
is-an-engineer.com
is-an-entertainer.com
-is-by.us
is-certified.com
-is-found.org
is-gone.com
is-into-anime.com
is-into-cars.com
is-into-cartoons.com
is-into-games.com
is-leet.com
-is-lost.org
is-not-certified.com
-is-saved.org
is-slick.com
is-uberleet.com
-is-very-bad.org
-is-very-evil.org
-is-very-good.org
-is-very-nice.org
-is-very-sweet.org
is-with-theband.com
isa-geek.com
-isa-geek.net
-isa-geek.org
isa-hockeynut.com
issmarterthanyou.com
+likes-pie.com
+likescandy.com
+neat-url.com
+saves-the-whales.com
+selfip.com
+sells-for-less.com
+sells-for-u.com
+servebbs.com
+simple-url.com
+space-to-rent.com
+teaches-yoga.com
+writesthisblog.com
+ath.cx
+fuettertdasnetz.de
isteingeek.de
istmein.de
-kicks-ass.net
-kicks-ass.org
-knowsitall.info
-land-4-sale.us
lebtimnetz.de
leitungsen.de
-likes-pie.com
-likescandy.com
+traeumtgerade.de
+barrel-of-knowledge.info
+barrell-of-knowledge.info
+dyndns.info
+for-our.info
+groks-the.info
+groks-this.info
+here-for-more.info
+knowsitall.info
+selfip.info
+webhop.info
+forgot.her.name
+forgot.his.name
+at-band-camp.net
+blogdns.net
+broke-it.net
+buyshouses.net
+dnsalias.net
+dnsdojo.net
+does-it.net
+dontexist.net
+dynalias.net
+dynathome.net
+endofinternet.net
+from-az.net
+from-co.net
+from-la.net
+from-ny.net
+gets-it.net
+ham-radio-op.net
+homeftp.net
+homeip.net
+homelinux.net
+homeunix.net
+in-the-band.net
+is-a-chef.net
+is-a-geek.net
+isa-geek.net
+kicks-ass.net
+office-on-the.net
+podzone.net
+scrapper-site.net
+selfip.net
+sells-it.net
+servebbs.net
+serveftp.net
+thruhere.net
+webhop.net
merseine.nu
mine.nu
+shacknet.nu
+blogdns.org
+blogsite.org
+boldlygoingnowhere.org
+dnsalias.org
+dnsdojo.org
+doesntexist.org
+dontexist.org
+doomdns.org
+dvrdns.org
+dynalias.org
+dyndns.org
+go.dyndns.org
+home.dyndns.org
+endofinternet.org
+endoftheinternet.org
+from-me.org
+game-host.org
+gotdns.org
+hobby-site.org
+homedns.org
+homeftp.org
+homelinux.org
+homeunix.org
+is-a-bruinsfan.org
+is-a-candidate.org
+is-a-celticsfan.org
+is-a-chef.org
+is-a-geek.org
+is-a-knight.org
+is-a-linux-user.org
+is-a-patsfan.org
+is-a-soxfan.org
+is-found.org
+is-lost.org
+is-saved.org
+is-very-bad.org
+is-very-evil.org
+is-very-good.org
+is-very-nice.org
+is-very-sweet.org
+isa-geek.org
+kicks-ass.org
misconfused.org
-mypets.ws
-myphotos.cc
-neat-url.com
-office-on-the.net
-on-the-web.tv
-podzone.net
podzone.org
readmyblog.org
-saves-the-whales.com
-scrapper-site.net
-scrapping.cc
-selfip.biz
-selfip.com
-selfip.info
-selfip.net
selfip.org
-sells-for-less.com
-sells-for-u.com
-sells-it.net
sellsyourhome.org
-servebbs.com
-servebbs.net
servebbs.org
-serveftp.net
serveftp.org
servegame.org
-shacknet.nu
-simple-url.com
-space-to-rent.com
stuff-4-sale.org
-stuff-4-sale.us
-teaches-yoga.com
-thruhere.net
-traeumtgerade.de
-webhop.biz
-webhop.info
-webhop.net
webhop.org
+better-than.tv
+dyndns.tv
+on-the-web.tv
worse-than.tv
-writesthisblog.com
+is-by.us
+land-4-sale.us
+stuff-4-sale.us
+dyndns.ws
+mypets.ws
// ddnss.de : https://www.ddnss.de/
// Submitted by Robert Niedziela <webmaster@ddnss.de>
ddnss.de
dyn.ddnss.de
dyndns.ddnss.de
-dyndns1.de
dyn-ip24.de
+dyndns1.de
home-webserver.de
dyn.home-webserver.de
myhome-server.de
@@ -12990,8 +12990,8 @@ ddnss.org
// Definima : http://www.definima.com/
// Submitted by Maxence Bitterli <maxence@definima.com>
-definima.net
definima.io
+definima.net
// DigitalOcean App Platform : https://www.digitalocean.com/products/app-platform/
// Submitted by Braxton Huggins <psl-maintainers@digitalocean.com>
@@ -13046,6 +13046,14 @@ easypanel.host
// Submitted by <infracloudteam@namecheap.com>
*.ewp.live
+// eDirect Corp. : https://hosting.url.com.tw/
+// Submitted by C.S. chang <cschang@corp.url.com.tw>
+twmail.cc
+twmail.net
+twmail.org
+mymailer.com.tw
+url.tw
+
// Electromagnetic Field : https://www.emfcamp.org
// Submitted by <noc@emfcamp.org>
at.emf.camp
@@ -13064,7 +13072,7 @@ elementor.cool
en-root.fr
// Enalean SAS: https://www.enalean.com
-// Submitted by Thomas Cottier <thomas.cottier@enalean.com>
+// Submitted by Enalean Security Team <security@enalean.com>
mytuleap.com
tuleap-partners.com
@@ -13156,19 +13164,16 @@ us-2.evennode.com
us-3.evennode.com
us-4.evennode.com
+// Evervault : https://evervault.com
+// Submitted by Hannah Neary <engineering@evervault.com>
+relay.evervault.app
+relay.evervault.dev
+
// Expo : https://expo.dev/
// Submitted by James Ide <psl@expo.dev>
expo.app
staging.expo.app
-// eDirect Corp. : https://hosting.url.com.tw/
-// Submitted by C.S. chang <cschang@corp.url.com.tw>
-twmail.cc
-twmail.net
-twmail.org
-mymailer.com.tw
-url.tw
-
// Fabrica Technologies, Inc. : https://www.fabrica.dev/
// Submitted by Eric Jiang <eric@fabrica.dev>
onfabrica.com
@@ -13259,8 +13264,6 @@ u.channelsdvr.net
edgecompute.app
fastly-edge.com
fastly-terrarium.com
-fastlylb.net
-map.fastlylb.net
freetls.fastly.net
map.fastly.net
a.prod.fastly.net
@@ -13268,6 +13271,8 @@ global.prod.fastly.net
a.ssl.fastly.net
b.ssl.fastly.net
global.ssl.fastly.net
+fastlylb.net
+map.fastlylb.net
// Fastmail : https://www.fastmail.com/
// Submitted by Marc Bradshaw <marc@fastmailteam.com>
@@ -13337,8 +13342,8 @@ flutterflow.app
// fly.io: https://fly.io
// Submitted by Kurt Mackey <kurt@fly.io>
fly.dev
-edgeapp.net
shw.io
+edgeapp.net
// Flynn : https://flynn.io
// Submitted by Jonathan Rudenberg <jonathan@flynn.io>
@@ -13427,6 +13432,8 @@ aliases121.com
// GDS : https://www.gov.uk/service-manual/technology/managing-domain-names
// Submitted by Stephen Ford <hostmaster@digital.cabinet-office.gov.uk>
+campaign.gov.uk
+service.gov.uk
independent-commission.uk
independent-inquest.uk
independent-inquiry.uk
@@ -13434,8 +13441,6 @@ independent-panel.uk
independent-review.uk
public-inquiry.uk
royal-commission.uk
-campaign.gov.uk
-service.gov.uk
// CDDO : https://www.gov.uk/guidance/get-an-api-domain-on-govuk
// Submitted by Jamie Tanna <jamie.tanna@digital.cabinet-office.gov.uk>
@@ -13624,12 +13629,30 @@ goip.de
// Google, Inc.
// Submitted by Shannon McCabe <public-suffix-editors@google.com>
+blogspot.ae
+blogspot.al
+blogspot.am
*.hosted.app
*.run.app
web.app
+blogspot.com.ar
+blogspot.co.at
+blogspot.com.au
+blogspot.ba
+blogspot.be
+blogspot.bg
+blogspot.bj
+blogspot.com.br
+blogspot.com.by
+blogspot.ca
+blogspot.cf
+blogspot.ch
+blogspot.cl
+blogspot.com.co
*.0emm.com
appspot.com
*.r.appspot.com
+blogspot.com
codespot.com
googleapis.com
googlecode.com
@@ -13637,58 +13660,32 @@ pagespeedmobilizer.com
publishproxy.com
withgoogle.com
withyoutube.com
-*.gateway.dev
-cloud.goog
-translate.goog
-*.usercontent.goog
-cloudfunctions.net
-blogspot.ae
-blogspot.al
-blogspot.am
-blogspot.ba
-blogspot.be
-blogspot.bg
-blogspot.bj
-blogspot.ca
-blogspot.cf
-blogspot.ch
-blogspot.cl
-blogspot.co.at
-blogspot.co.id
-blogspot.co.il
-blogspot.co.ke
-blogspot.co.nz
-blogspot.co.uk
-blogspot.co.za
-blogspot.com
-blogspot.com.ar
-blogspot.com.au
-blogspot.com.br
-blogspot.com.by
-blogspot.com.co
-blogspot.com.cy
-blogspot.com.ee
-blogspot.com.eg
-blogspot.com.es
-blogspot.com.mt
-blogspot.com.ng
-blogspot.com.tr
-blogspot.com.uy
blogspot.cv
+blogspot.com.cy
blogspot.cz
blogspot.de
+*.gateway.dev
blogspot.dk
+blogspot.com.ee
+blogspot.com.eg
+blogspot.com.es
blogspot.fi
blogspot.fr
+cloud.goog
+translate.goog
+*.usercontent.goog
blogspot.gr
blogspot.hk
blogspot.hr
blogspot.hu
+blogspot.co.id
blogspot.ie
+blogspot.co.il
blogspot.in
blogspot.is
blogspot.it
blogspot.jp
+blogspot.co.ke
blogspot.kr
blogspot.li
blogspot.lt
@@ -13696,10 +13693,14 @@ blogspot.lu
blogspot.md
blogspot.mk
blogspot.mr
+blogspot.com.mt
blogspot.mx
blogspot.my
+cloudfunctions.net
+blogspot.com.ng
blogspot.nl
blogspot.no
+blogspot.co.nz
blogspot.pe
blogspot.pt
blogspot.qa
@@ -13713,9 +13714,13 @@ blogspot.si
blogspot.sk
blogspot.sn
blogspot.td
+blogspot.com.tr
blogspot.tw
blogspot.ug
+blogspot.co.uk
+blogspot.com.uy
blogspot.vn
+blogspot.co.za
// Goupile : https://goupile.fr
// Submitted by Niels Martignene <hello@goupile.fr>
@@ -13748,8 +13753,8 @@ conf.se
// Handshake : https://handshake.org
// Submitted by Mike Damm <md@md.vc>
-hs.zone
hs.run
+hs.zone
// Hashbang : https://hashbang.sh
hashbang.sh
@@ -13768,6 +13773,10 @@ pages.it.hs-heilbronn.de
helioho.st
heliohost.us
+// HeiyuSpace: https://lazycat.cloud
+// Submitted by Xia Bin <admin@lazycat.cloud>
+heiyu.space
+
// Hepforge : https://www.hepforge.org
// Submitted by David Grellscheid <admin@hepforge.org>
hepforge.org
@@ -13822,6 +13831,10 @@ ie.ua
// HostyHosting (hostyhosting.com)
hostyhosting.io
+// Hypernode B.V. : https://www.hypernode.com/
+// Submitted by Cipriano Groenendal <security@nl.team.blue>
+hypernode.io
+
// Häkkinen.fi
// Submitted by Eero Häkkinen <Eero+psl@Häkkinen.fi>
häkkinen.fi
@@ -13842,8 +13855,8 @@ iliadboxos.it
// Impertrix Solutions : <https://impertrixcdn.com>
// Submitted by Zhixiang Zhao <csuite@impertrix.com>
-impertrixcdn.com
impertrix.com
+impertrixcdn.com
// Incsub, LLC: https://incsub.com/
// Submitted by Aaron Edwards <sysadmins@incsub.com>
@@ -13860,10 +13873,10 @@ in-berlin.de
in-brb.de
in-butter.de
in-dsl.de
-in-dsl.net
-in-dsl.org
in-vpn.de
+in-dsl.net
in-vpn.net
+in-dsl.org
in-vpn.org
// info.at : http://www.info.at/
@@ -14038,13 +14051,13 @@ jotelulu.cloud
// JouwWeb B.V. : https://www.jouwweb.nl
// Submitted by Camilo Sperberg <tech@webador.com>
-jouwweb.site
webadorsite.com
+jouwweb.site
// Joyent : https://www.joyent.com/
// Submitted by Brian Bennett <brian.bennett@joyent.com>
-*.triton.zone
*.cns.joyent.com
+*.triton.zone
// JS.ORG : http://dns.js.org
// Submitted by Stefan Keim <admin@js.org>
@@ -14086,8 +14099,8 @@ oya.to
// Katholieke Universiteit Leuven: https://www.kuleuven.be
// Submitted by Abuse KU Leuven <abuse@kuleuven.be>
-kuleuven.cloud
ezproxy.kuleuven.be
+kuleuven.cloud
// .KRD : http://nic.krd/data/krd/Registration%20Policy.pdf
co.krd
@@ -14095,8 +14108,8 @@ edu.krd
// Krellian Ltd. : https://krellian.com
// Submitted by Ben Francis <ben@krellian.com>
-krellian.net
webthings.io
+krellian.net
// LCube - Professional hosting e.K. : https://www.lcube-webhosting.de
// Submitted by Lars Laehn <info@lcube.de>
@@ -14130,8 +14143,8 @@ co.technology
// linkyard ldt: https://www.linkyard.ch/
// Submitted by Mario Siegenthaler <mario.siegenthaler@linkyard.ch>
-linkyard.cloud
linkyard-cloud.ch
+linkyard.cloud
// Linode : https://linode.com
// Submitted by <security@linode.com>
@@ -14186,18 +14199,19 @@ lugs.org.uk
// Lukanet Ltd : https://lukanet.com
// Submitted by Anton Avramov <register@lukanet.com>
barsy.bg
-barsy.co.uk
-barsyonline.co.uk
+barsy.club
barsycenter.com
barsyonline.com
-barsy.club
barsy.de
+barsy.dev
barsy.eu
+barsy.gr
barsy.in
barsy.info
barsy.io
barsy.me
barsy.menu
+barsyonline.menu
barsy.mobi
barsy.net
barsy.online
@@ -14205,10 +14219,15 @@ barsy.org
barsy.pro
barsy.pub
barsy.ro
+barsy.rs
barsy.shop
+barsyonline.shop
barsy.site
+barsy.store
barsy.support
barsy.uk
+barsy.co.uk
+barsyonline.co.uk
// Magento Commerce
// Submitted by Damien Tournoud <dtournoud@magento.cloud>
@@ -14239,8 +14258,8 @@ mcpe.me
// Submitted by Evgeniy Subbotin <e.subbotin@mchost.ru>
mcdir.me
mcdir.ru
-mcpre.ru
vps.mcdir.ru
+mcpre.ru
// Mediatech : https://mediatech.by
// Submitted by Evgeniy Kozhuhovskiy <ugenk@mediatech.by>
@@ -14289,10 +14308,9 @@ co.pl
// Microsoft Azure : https://home.azure
*.azurecontainer.io
azure-api.net
+azure-mobile.net
azureedge.net
azurefd.net
-azurewebsites.net
-azure-mobile.net
azurestaticapps.net
1.azurestaticapps.net
2.azurestaticapps.net
@@ -14306,6 +14324,7 @@ eastasia.azurestaticapps.net
eastus2.azurestaticapps.net
westeurope.azurestaticapps.net
westus2.azurestaticapps.net
+azurewebsites.net
cloudapp.net
trafficmanager.net
blob.core.windows.net
@@ -14344,8 +14363,8 @@ pp.ru
// Mythic Beasts : https://www.mythic-beasts.com
// Submitted by Paul Cammish <kelduum@mythic-beasts.com>
hostedpi.com
-customer.mythic-beasts.com
caracal.mythic-beasts.com
+customer.mythic-beasts.com
fentiger.mythic-beasts.com
lynx.mythic-beasts.com
ocelot.mythic-beasts.com
@@ -14466,91 +14485,91 @@ nerdpol.ovh
// No-IP.com : https://noip.com/
// Submitted by Deven Reza <publicsuffixlist@noip.com>
+mmafan.biz
+myftp.biz
+no-ip.biz
+no-ip.ca
+fantasyleague.cc
+gotdns.ch
+3utilities.com
blogsyte.com
-brasilia.me
-cable-modem.org
ciscofreak.com
-collegefan.org
-couchpotatofries.org
damnserver.com
-ddns.me
+ddnsking.com
ditchyourip.com
-dnsfor.me
dnsiskinky.com
-dvrcam.info
dynns.com
-eating-organic.net
-fantasyleague.cc
geekgalaxy.com
-golffan.us
health-carereform.com
homesecuritymac.com
homesecuritypc.com
-hopto.me
-ilovecollege.info
-loginto.me
-mlbfan.org
-mmafan.biz
myactivedirectory.com
-mydissent.net
-myeffect.net
-mymediapc.net
-mypsx.net
mysecuritycamera.com
-mysecuritycamera.net
-mysecuritycamera.org
+myvnc.com
net-freaks.com
-nflfan.org
-nhlfan.net
-no-ip.ca
-no-ip.co.uk
-no-ip.net
-noip.us
onthewifi.com
-pgafan.net
point2this.com
-pointto.us
-privatizehealthinsurance.net
quicksytes.com
-read-books.org
securitytactics.com
+servebeer.com
+servecounterstrike.com
serveexchange.com
+serveftp.com
+servegame.com
+servehalflife.com
+servehttp.com
servehumour.com
+serveirc.com
+servemp3.com
servep2p.com
+servepics.com
+servequake.com
servesarcasm.com
stufftoread.com
-ufcfan.org
unusualperson.com
workisboring.com
-3utilities.com
-bounceme.net
-ddns.net
-ddnsking.com
-gotdns.ch
-hopto.org
-myftp.biz
-myftp.org
-myvnc.com
-no-ip.biz
+dvrcam.info
+ilovecollege.info
no-ip.info
-no-ip.org
+brasilia.me
+ddns.me
+dnsfor.me
+hopto.me
+loginto.me
noip.me
+webhop.me
+bounceme.net
+ddns.net
+eating-organic.net
+mydissent.net
+myeffect.net
+mymediapc.net
+mypsx.net
+mysecuritycamera.net
+nhlfan.net
+no-ip.net
+pgafan.net
+privatizehealthinsurance.net
redirectme.net
-servebeer.com
serveblog.net
-servecounterstrike.com
-serveftp.com
-servegame.com
-servehalflife.com
-servehttp.com
-serveirc.com
serveminecraft.net
-servemp3.com
-servepics.com
-servequake.com
sytes.net
-webhop.me
+cable-modem.org
+collegefan.org
+couchpotatofries.org
+hopto.org
+mlbfan.org
+myftp.org
+mysecuritycamera.org
+nflfan.org
+no-ip.org
+read-books.org
+ufcfan.org
zapto.org
+no-ip.co.uk
+golffan.us
+noip.us
+pointto.us
// NodeArt : https://nodeart.io
// Submitted by Konstantin Nosov <Nosov@nodeart.io>
@@ -14591,25 +14610,25 @@ omniwe.site
// One.com: https://www.one.com/
// Submitted by Jacob Bunk Nielsen <jbn@one.com>
-123hjemmeside.dk
-123hjemmeside.no
-123homepage.it
-123kotisivu.fi
-123minsida.se
-123miweb.es
-123paginaweb.pt
-123siteweb.fr
123webseite.at
-123webseite.de
123website.be
+simplesite.com.br
123website.ch
+simplesite.com
+123webseite.de
+123hjemmeside.dk
+123miweb.es
+123kotisivu.fi
+123siteweb.fr
+simplesite.gr
+123homepage.it
123website.lu
123website.nl
+123hjemmeside.no
service.one
-simplesite.com
-simplesite.com.br
-simplesite.gr
simplesite.pl
+123paginaweb.pt
+123minsida.se
// One Fold Media : http://www.onefoldmedia.com/
// Submitted by Eddie Jones <eddie@onefoldmedia.com>
@@ -14659,8 +14678,8 @@ outsystemscloud.com
// OVHcloud: https://ovhcloud.com
// Submitted by Vincent Cassé <vincent.casse@ovhcloud.com>
-*.webpaas.ovh.net
*.hosting.ovh.net
+*.webpaas.ovh.net
// OwnProvider GmbH: http://www.ownprovider.com
// Submitted by Jan Moennich <jan.moennich@ownprovider.com>
@@ -14699,6 +14718,7 @@ pagexl.com
// Submitted by Pierre Carrier <pc@rrier.ca>
*.xmit.co
xmit.dev
+madethis.site
srv.us
gh.srv.us
gl.srv.us
@@ -14713,8 +14733,8 @@ zakopane.pl
// Pantheon Systems, Inc. : https://pantheon.io/
// Submitted by Gary Dylina <gary@pantheon.io>
-pantheonsite.io
gotpantheon.com
+pantheonsite.io
// Peplink | Pepwave : http://peplink.com/
// Submitted by Steve Leung <steveleung@peplink.com>
@@ -14750,9 +14770,9 @@ platterp.us
// Plesk : https://www.plesk.com/
// Submitted by Anton Akhtyamov <program-managers@plesk.com>
+pleskns.com
pdns.page
plesk.page
-pleskns.com
// Pley AB : https://www.pley.com/
// Submitted by Henning Pohl <infra@pley.com>
@@ -14883,8 +14903,8 @@ g.vbrplsbx.io
// Rancher Labs, Inc : https://rancher.com
// Submitted by Vincent Fiduccia <domains@rancher.com>
-*.on-k3s.io
*.on-rancher.cloud
+*.on-k3s.io
*.on-rio.io
// Read The Docs, Inc : https://www.readthedocs.org
@@ -14897,8 +14917,8 @@ rhcloud.com
// Render : https://render.com
// Submitted by Anurag Goel <dev@render.com>
-app.render.com
onrender.com
+app.render.com
// Repl.it : https://repl.it
// Submitted by Lincoln Bergeson <psl@repl.it>
@@ -15043,8 +15063,8 @@ sandcats.io
// SBE network solutions GmbH : https://www.sbe.de/
// Submitted by Norman Meilick <nm@sbe.de>
-logoip.de
logoip.com
+logoip.de
// Scaleway : https://www.scaleway.com/
// Submitted by Rémy Léone <rleone@scaleway.com>
@@ -15133,6 +15153,10 @@ biz.ua
co.ua
pp.ua
+// Shanghai Accounting Society : https://www.sasf.org.cn
+// Submitted by Information Administration <info@sasf.org.cn>
+as.sh.cn
+
// Sheezy.Art : https://sheezy.art
// Submitted by Nyoom <admin@sheezy.art>
sheezy.games
@@ -15221,6 +15245,10 @@ try-snowplow.com
// Submitted by Drew DeVault <sir@cmpwn.com>
srht.site
+// SparrowHost : https://sparrowhost.in/
+// Submitted by Anant Pandey <info@sparrowhost.in>
+ind.mom
+
// StackBlitz : https://stackblitz.com
// Submitted by Dominic Elm <hello@stackblitz.com>
w-corp-staticblitz.com
@@ -15241,8 +15269,8 @@ stackit.zone
// Staclar : https://staclar.com
// Submitted by Q Misell <q@staclar.com>
-musician.io
// Submitted by Matthias Merkel <matthias.merkel@staclar.com>
+musician.io
novecore.site
// staticland : https://static.land
@@ -15255,6 +15283,11 @@ sites.static.land
// Submitted by Tony Schirmer <tony@storebase.io>
storebase.store
+// Strapi : https://strapi.io/
+// Submitted by Florent Baldino <security@strapi.io>
+strapiapp.com
+media.strapiapp.com
+
// Strategic System Consulting (eApps Hosting): https://www.eapps.com/
// Submitted by Alex Oancea <aoancea@cloudscale365.com>
vps-host.net
@@ -15354,8 +15387,8 @@ su.paba.se
// Symfony, SAS : https://symfony.com/
// Submitted by Fabien Potencier <fabien@symfony.com>
-*.s5y.io
*.sensiosite.cloud
+*.s5y.io
// Syncloud : https://syncloud.org
// Submitted by Boris Rybalkin <syncloud@syncloud.it>
@@ -15377,14 +15410,14 @@ dsmynas.net
familyds.net
dsmynas.org
familyds.org
-vpnplus.to
direct.quickconnect.to
+vpnplus.to
// Tabit Technologies Ltd. : https://tabit.cloud/
// Submitted by Oren Agiv <oren@tabit.cloud>
-tabitorder.co.il
-mytabit.co.il
mytabit.com
+mytabit.co.il
+tabitorder.co.il
// TAIFUN Software AG : http://taifun-software.de
// Submitted by Bjoern Henke <dev-server@taifun-software.de>
@@ -15425,11 +15458,11 @@ telebit.io
reservd.com
thingdustdata.com
cust.dev.thingdust.io
+reservd.dev.thingdust.io
cust.disrec.thingdust.io
+reservd.disrec.thingdust.io
cust.prod.thingdust.io
cust.testing.thingdust.io
-reservd.dev.thingdust.io
-reservd.disrec.thingdust.io
reservd.testing.thingdust.io
// ticket i/O GmbH : https://ticket.io
@@ -15491,8 +15524,6 @@ tuxfamily.org
// TwoDNS : https://www.twodns.de/
// Submitted by TwoDNS-Support <support@two-dns.de>
dd-dns.de
-diskstation.eu
-diskstation.org
dray-dns.de
draydns.de
dyn-vpn.de
@@ -15503,6 +15534,8 @@ my-wan.de
syno-ds.de
synology-diskstation.de
synology-ds.de
+diskstation.eu
+diskstation.org
// Typedream : https://typedream.com
// Submitted by Putri Karunia <putri@typedream.com>
@@ -15514,15 +15547,15 @@ pro.typeform.com
// Uberspace : https://uberspace.de
// Submitted by Moritz Werner <mwerner@jonaspasche.com>
-uber.space
*.uberspace.de
+uber.space
// UDR Limited : http://www.udr.hk.com
// Submitted by registry <hostmaster@udr.hk.com>
hk.com
-hk.org
-ltd.hk
inc.hk
+ltd.hk
+hk.org
// UK Intis Telecom LTD : https://it.com
// Submitted by ITComdomains <to@it.com>
@@ -15543,8 +15576,8 @@ org.yt
// United Gameserver GmbH : https://united-gameserver.de
// Submitted by Stefan Schwarz <sysadm@united-gameserver.de>
-virtualuser.de
virtual-user.de
+virtualuser.de
// Upli : https://upli.io
// Submitted by Lenny Bakkalian <lenny.bakkalian@gmail.com>
@@ -15609,10 +15642,10 @@ webflowtest.io
// WebHotelier Technologies Ltd: https://www.webhotelier.net/
// Submitted by Apostolos Tsakpinis <apostolos.tsakpinis@gmail.com>
-reserve-online.net
-reserve-online.com
bookonline.app
hotelwithflight.com
+reserve-online.com
+reserve-online.net
// WebWaddle Ltd: https://webwaddle.com/
// Submitted by Merlin Glander <hostmaster@webwaddle.com>
@@ -15638,15 +15671,23 @@ pages.wiardweb.com
// Wikimedia Labs : https://wikitech.wikimedia.org
// Submitted by Arturo Borrero Gonzalez <aborrero@wikimedia.org>
-wmflabs.org
toolforge.org
wmcloud.org
+wmflabs.org
// WISP : https://wisp.gg
// Submitted by Stepan Fedotov <stepan@wisp.gg>
panel.gg
daemon.panel.gg
+// Wix.com, Inc. : https://www.wix.com
+// Submitted by Shahar Talmi / Alon Kochba <publicsuffixlist@wix.com>
+wixsite.com
+wixstudio.com
+editorx.io
+wixstudio.io
+wix.run
+
// Wizard Zines : https://wizardzines.com
// Submitted by Julia Evans <julia@wizardzines.com>
messwithdns.com
@@ -15672,13 +15713,6 @@ weeklylottery.org.uk
wpenginepowered.com
js.wpenginepowered.com
-// Wix.com, Inc. : https://www.wix.com
-// Submitted by Shahar Talmi <shahar@wix.com>
-wixsite.com
-editorx.io
-wixstudio.io
-wix.run
-
// XenonCloud GbR: https://xenoncloud.net
// Submitted by Julian Uphoff <publicsuffixlist@xenoncloud.net>
half.host
diff --git a/ansible_collections/community/dns/tests/integration/targets/lookup_lookup/tasks/main.yml b/ansible_collections/community/dns/tests/integration/targets/lookup_lookup/tasks/main.yml
index 52ba9a46c..d5fa9f052 100644
--- a/ansible_collections/community/dns/tests/integration/targets/lookup_lookup/tasks/main.yml
+++ b/ansible_collections/community/dns/tests/integration/targets/lookup_lookup/tasks/main.yml
@@ -8,7 +8,7 @@
ansible_a: >-
{{ query('community.dns.lookup', 'ansible.com', type='A') }}
ansible_aaaa: >-
- {{ query('community.dns.lookup', 'ansible.com', type='AAAA', server='9.9.9.9') }}
+ {{ query('community.dns.lookup', 'www.ansible.com', type='AAAA', server='9.9.9.9') }}
ansible_txt: >-
{{ query('community.dns.lookup', 'ansible.com', type='TXT', server=['1.1.1.1', '8.8.8.8', 'dns9.quad9.net.']) }}
ansible_empty: >-
diff --git a/ansible_collections/community/dns/tests/integration/targets/lookup_lookup_as_dict/tasks/main.yml b/ansible_collections/community/dns/tests/integration/targets/lookup_lookup_as_dict/tasks/main.yml
index 3ba6f52f0..faa9ba2ba 100644
--- a/ansible_collections/community/dns/tests/integration/targets/lookup_lookup_as_dict/tasks/main.yml
+++ b/ansible_collections/community/dns/tests/integration/targets/lookup_lookup_as_dict/tasks/main.yml
@@ -8,7 +8,7 @@
ansible_a: >-
{{ query('community.dns.lookup_as_dict', 'ansible.com', type='A') }}
ansible_aaaa: >-
- {{ query('community.dns.lookup_as_dict', 'ansible.com', type='AAAA', server='9.9.9.9') }}
+ {{ query('community.dns.lookup_as_dict', 'www.ansible.com', type='AAAA', server='9.9.9.9') }}
ansible_txt: >-
{{ query('community.dns.lookup_as_dict', 'ansible.com', type='TXT', server=['1.1.1.1', '8.8.8.8', 'dns9.quad9.net.']) }}
ansible_empty: >-
diff --git a/ansible_collections/community/docker/.azure-pipelines/azure-pipelines.yml b/ansible_collections/community/docker/.azure-pipelines/azure-pipelines.yml
index ae4197a4a..4f22d28b9 100644
--- a/ansible_collections/community/docker/.azure-pipelines/azure-pipelines.yml
+++ b/ansible_collections/community/docker/.azure-pipelines/azure-pipelines.yml
@@ -46,7 +46,7 @@ variables:
resources:
containers:
- container: default
- image: quay.io/ansible/azure-pipelines-test-container:4.0.1
+ image: quay.io/ansible/azure-pipelines-test-container:6.0.0
pool: Standard
diff --git a/ansible_collections/community/docker/.github/workflows/ansible-test.yml b/ansible_collections/community/docker/.github/workflows/ansible-test.yml
index d4778e091..3afb663ac 100644
--- a/ansible_collections/community/docker/.github/workflows/ansible-test.yml
+++ b/ansible_collections/community/docker/.github/workflows/ansible-test.yml
@@ -120,18 +120,6 @@ jobs:
include:
# 2.11
- ansible: '2.11'
- docker: fedora32
- python: ''
- target: azp/4/
- - ansible: '2.11'
- docker: fedora32
- python: ''
- target: azp/5/
- - ansible: '2.11'
- docker: fedora32
- python: ''
- target: azp/6/
- - ansible: '2.11'
docker: alpine3
python: ''
target: azp/4/
diff --git a/ansible_collections/community/docker/.github/workflows/ee.yml b/ansible_collections/community/docker/.github/workflows/ee.yml
index d884f56ef..adc5f5550 100644
--- a/ansible_collections/community/docker/.github/workflows/ee.yml
+++ b/ansible_collections/community/docker/.github/workflows/ee.yml
@@ -70,14 +70,6 @@ jobs:
package_system: python39 python39-pip python39-wheel python39-cryptography
base_image: docker.io/redhat/ubi8:latest
pre_base: '"#"'
- - name: ansible-core 2.12 @ CentOS Stream 8
- ansible_core: https://github.com/ansible/ansible/archive/stable-2.12.tar.gz
- ansible_runner: ansible-runner
- other_deps: |2
- python_interpreter:
- package_system: python39 python39-pip python39-wheel python39-cryptography
- base_image: quay.io/centos/centos:stream8
- pre_base: '"#"'
runs-on: ubuntu-latest
steps:
- name: Check out code
diff --git a/ansible_collections/community/docker/CHANGELOG.md b/ansible_collections/community/docker/CHANGELOG.md
index 13d287f2e..da43b356c 100644
--- a/ansible_collections/community/docker/CHANGELOG.md
+++ b/ansible_collections/community/docker/CHANGELOG.md
@@ -2,248 +2,251 @@
**Topics**
-- <a href="#v3-10-3">v3\.10\.3</a>
+- <a href="#v3-10-4">v3\.10\.4</a>
- <a href="#release-summary">Release Summary</a>
- <a href="#bugfixes">Bugfixes</a>
-- <a href="#v3-10-2">v3\.10\.2</a>
+- <a href="#v3-10-3">v3\.10\.3</a>
- <a href="#release-summary-1">Release Summary</a>
- <a href="#bugfixes-1">Bugfixes</a>
-- <a href="#v3-10-1">v3\.10\.1</a>
+- <a href="#v3-10-2">v3\.10\.2</a>
- <a href="#release-summary-2">Release Summary</a>
- <a href="#bugfixes-2">Bugfixes</a>
+- <a href="#v3-10-1">v3\.10\.1</a>
+ - <a href="#release-summary-3">Release Summary</a>
+ - <a href="#bugfixes-3">Bugfixes</a>
- <a href="#known-issues">Known Issues</a>
- <a href="#v3-10-0">v3\.10\.0</a>
- - <a href="#release-summary-3">Release Summary</a>
+ - <a href="#release-summary-4">Release Summary</a>
- <a href="#minor-changes">Minor Changes</a>
- <a href="#deprecated-features">Deprecated Features</a>
- <a href="#v3-9-0">v3\.9\.0</a>
- - <a href="#release-summary-4">Release Summary</a>
+ - <a href="#release-summary-5">Release Summary</a>
- <a href="#minor-changes-1">Minor Changes</a>
- - <a href="#bugfixes-3">Bugfixes</a>
+ - <a href="#bugfixes-4">Bugfixes</a>
- <a href="#v3-8-1">v3\.8\.1</a>
- - <a href="#release-summary-5">Release Summary</a>
+ - <a href="#release-summary-6">Release Summary</a>
- <a href="#security-fixes">Security Fixes</a>
- - <a href="#bugfixes-4">Bugfixes</a>
+ - <a href="#bugfixes-5">Bugfixes</a>
- <a href="#v3-8-0">v3\.8\.0</a>
- - <a href="#release-summary-6">Release Summary</a>
+ - <a href="#release-summary-7">Release Summary</a>
- <a href="#minor-changes-2">Minor Changes</a>
- - <a href="#bugfixes-5">Bugfixes</a>
+ - <a href="#bugfixes-6">Bugfixes</a>
- <a href="#v3-7-0">v3\.7\.0</a>
- - <a href="#release-summary-7">Release Summary</a>
+ - <a href="#release-summary-8">Release Summary</a>
- <a href="#minor-changes-3">Minor Changes</a>
- - <a href="#bugfixes-6">Bugfixes</a>
+ - <a href="#bugfixes-7">Bugfixes</a>
- <a href="#new-modules">New Modules</a>
- <a href="#v3-6-0">v3\.6\.0</a>
- - <a href="#release-summary-8">Release Summary</a>
+ - <a href="#release-summary-9">Release Summary</a>
- <a href="#major-changes">Major Changes</a>
- <a href="#minor-changes-4">Minor Changes</a>
- - <a href="#bugfixes-7">Bugfixes</a>
+ - <a href="#bugfixes-8">Bugfixes</a>
- <a href="#new-modules-1">New Modules</a>
- <a href="#v3-5-0">v3\.5\.0</a>
- - <a href="#release-summary-9">Release Summary</a>
+ - <a href="#release-summary-10">Release Summary</a>
- <a href="#minor-changes-5">Minor Changes</a>
- <a href="#deprecated-features-1">Deprecated Features</a>
- - <a href="#bugfixes-8">Bugfixes</a>
-- <a href="#v3-4-11">v3\.4\.11</a>
- - <a href="#release-summary-10">Release Summary</a>
- <a href="#bugfixes-9">Bugfixes</a>
-- <a href="#v3-4-10">v3\.4\.10</a>
+- <a href="#v3-4-11">v3\.4\.11</a>
- <a href="#release-summary-11">Release Summary</a>
- <a href="#bugfixes-10">Bugfixes</a>
-- <a href="#v3-4-9">v3\.4\.9</a>
+- <a href="#v3-4-10">v3\.4\.10</a>
- <a href="#release-summary-12">Release Summary</a>
- <a href="#bugfixes-11">Bugfixes</a>
-- <a href="#v3-4-8">v3\.4\.8</a>
+- <a href="#v3-4-9">v3\.4\.9</a>
- <a href="#release-summary-13">Release Summary</a>
+ - <a href="#bugfixes-12">Bugfixes</a>
+- <a href="#v3-4-8">v3\.4\.8</a>
+ - <a href="#release-summary-14">Release Summary</a>
- <a href="#known-issues-1">Known Issues</a>
- <a href="#v3-4-7">v3\.4\.7</a>
- - <a href="#release-summary-14">Release Summary</a>
- - <a href="#bugfixes-12">Bugfixes</a>
-- <a href="#v3-4-6">v3\.4\.6</a>
- <a href="#release-summary-15">Release Summary</a>
- <a href="#bugfixes-13">Bugfixes</a>
- - <a href="#known-issues-2">Known Issues</a>
-- <a href="#v3-4-5">v3\.4\.5</a>
+- <a href="#v3-4-6">v3\.4\.6</a>
- <a href="#release-summary-16">Release Summary</a>
- <a href="#bugfixes-14">Bugfixes</a>
-- <a href="#v3-4-4">v3\.4\.4</a>
+ - <a href="#known-issues-2">Known Issues</a>
+- <a href="#v3-4-5">v3\.4\.5</a>
- <a href="#release-summary-17">Release Summary</a>
+ - <a href="#bugfixes-15">Bugfixes</a>
+- <a href="#v3-4-4">v3\.4\.4</a>
+ - <a href="#release-summary-18">Release Summary</a>
- <a href="#minor-changes-6">Minor Changes</a>
- <a href="#known-issues-3">Known Issues</a>
- <a href="#v3-4-3">v3\.4\.3</a>
- - <a href="#release-summary-18">Release Summary</a>
-- <a href="#v3-4-2">v3\.4\.2</a>
- <a href="#release-summary-19">Release Summary</a>
- - <a href="#bugfixes-15">Bugfixes</a>
-- <a href="#v3-4-1">v3\.4\.1</a>
+- <a href="#v3-4-2">v3\.4\.2</a>
- <a href="#release-summary-20">Release Summary</a>
- <a href="#bugfixes-16">Bugfixes</a>
-- <a href="#v3-4-0">v3\.4\.0</a>
+- <a href="#v3-4-1">v3\.4\.1</a>
- <a href="#release-summary-21">Release Summary</a>
- - <a href="#minor-changes-7">Minor Changes</a>
- <a href="#bugfixes-17">Bugfixes</a>
- - <a href="#new-modules-2">New Modules</a>
-- <a href="#v3-3-2">v3\.3\.2</a>
+- <a href="#v3-4-0">v3\.4\.0</a>
- <a href="#release-summary-22">Release Summary</a>
+ - <a href="#minor-changes-7">Minor Changes</a>
- <a href="#bugfixes-18">Bugfixes</a>
-- <a href="#v3-3-1">v3\.3\.1</a>
+ - <a href="#new-modules-2">New Modules</a>
+- <a href="#v3-3-2">v3\.3\.2</a>
- <a href="#release-summary-23">Release Summary</a>
- <a href="#bugfixes-19">Bugfixes</a>
-- <a href="#v3-3-0">v3\.3\.0</a>
+- <a href="#v3-3-1">v3\.3\.1</a>
- <a href="#release-summary-24">Release Summary</a>
- - <a href="#minor-changes-8">Minor Changes</a>
- <a href="#bugfixes-20">Bugfixes</a>
-- <a href="#v3-2-2">v3\.2\.2</a>
+- <a href="#v3-3-0">v3\.3\.0</a>
- <a href="#release-summary-25">Release Summary</a>
+ - <a href="#minor-changes-8">Minor Changes</a>
- <a href="#bugfixes-21">Bugfixes</a>
-- <a href="#v3-2-1">v3\.2\.1</a>
+- <a href="#v3-2-2">v3\.2\.2</a>
- <a href="#release-summary-26">Release Summary</a>
-- <a href="#v3-2-0">v3\.2\.0</a>
+ - <a href="#bugfixes-22">Bugfixes</a>
+- <a href="#v3-2-1">v3\.2\.1</a>
- <a href="#release-summary-27">Release Summary</a>
+- <a href="#v3-2-0">v3\.2\.0</a>
+ - <a href="#release-summary-28">Release Summary</a>
- <a href="#minor-changes-9">Minor Changes</a>
- <a href="#deprecated-features-2">Deprecated Features</a>
- <a href="#v3-1-0">v3\.1\.0</a>
- - <a href="#release-summary-28">Release Summary</a>
+ - <a href="#release-summary-29">Release Summary</a>
- <a href="#minor-changes-10">Minor Changes</a>
- <a href="#v3-0-2">v3\.0\.2</a>
- - <a href="#release-summary-29">Release Summary</a>
- - <a href="#bugfixes-22">Bugfixes</a>
-- <a href="#v3-0-1">v3\.0\.1</a>
- <a href="#release-summary-30">Release Summary</a>
- <a href="#bugfixes-23">Bugfixes</a>
-- <a href="#v3-0-0">v3\.0\.0</a>
+- <a href="#v3-0-1">v3\.0\.1</a>
- <a href="#release-summary-31">Release Summary</a>
+ - <a href="#bugfixes-24">Bugfixes</a>
+- <a href="#v3-0-0">v3\.0\.0</a>
+ - <a href="#release-summary-32">Release Summary</a>
- <a href="#major-changes-1">Major Changes</a>
- <a href="#minor-changes-11">Minor Changes</a>
- <a href="#breaking-changes--porting-guide">Breaking Changes / Porting Guide</a>
- <a href="#removed-features-previously-deprecated">Removed Features \(previously deprecated\)</a>
- <a href="#security-fixes-1">Security Fixes</a>
- - <a href="#bugfixes-24">Bugfixes</a>
+ - <a href="#bugfixes-25">Bugfixes</a>
- <a href="#v2-7-0">v2\.7\.0</a>
- - <a href="#release-summary-32">Release Summary</a>
+ - <a href="#release-summary-33">Release Summary</a>
- <a href="#minor-changes-12">Minor Changes</a>
- <a href="#deprecated-features-3">Deprecated Features</a>
- - <a href="#bugfixes-25">Bugfixes</a>
+ - <a href="#bugfixes-26">Bugfixes</a>
- <a href="#v2-6-0">v2\.6\.0</a>
- - <a href="#release-summary-33">Release Summary</a>
+ - <a href="#release-summary-34">Release Summary</a>
- <a href="#minor-changes-13">Minor Changes</a>
- <a href="#deprecated-features-4">Deprecated Features</a>
- - <a href="#bugfixes-26">Bugfixes</a>
-- <a href="#v2-5-1">v2\.5\.1</a>
- - <a href="#release-summary-34">Release Summary</a>
- <a href="#bugfixes-27">Bugfixes</a>
-- <a href="#v2-5-0">v2\.5\.0</a>
+- <a href="#v2-5-1">v2\.5\.1</a>
- <a href="#release-summary-35">Release Summary</a>
+ - <a href="#bugfixes-28">Bugfixes</a>
+- <a href="#v2-5-0">v2\.5\.0</a>
+ - <a href="#release-summary-36">Release Summary</a>
- <a href="#minor-changes-14">Minor Changes</a>
- <a href="#v2-4-0">v2\.4\.0</a>
- - <a href="#release-summary-36">Release Summary</a>
- - <a href="#minor-changes-15">Minor Changes</a>
- - <a href="#bugfixes-28">Bugfixes</a>
-- <a href="#v2-3-0">v2\.3\.0</a>
- <a href="#release-summary-37">Release Summary</a>
- - <a href="#minor-changes-16">Minor Changes</a>
+ - <a href="#minor-changes-15">Minor Changes</a>
- <a href="#bugfixes-29">Bugfixes</a>
-- <a href="#v2-2-1">v2\.2\.1</a>
+- <a href="#v2-3-0">v2\.3\.0</a>
- <a href="#release-summary-38">Release Summary</a>
+ - <a href="#minor-changes-16">Minor Changes</a>
- <a href="#bugfixes-30">Bugfixes</a>
-- <a href="#v2-2-0">v2\.2\.0</a>
+- <a href="#v2-2-1">v2\.2\.1</a>
- <a href="#release-summary-39">Release Summary</a>
- - <a href="#minor-changes-17">Minor Changes</a>
- <a href="#bugfixes-31">Bugfixes</a>
-- <a href="#v2-1-1">v2\.1\.1</a>
+- <a href="#v2-2-0">v2\.2\.0</a>
- <a href="#release-summary-40">Release Summary</a>
+ - <a href="#minor-changes-17">Minor Changes</a>
- <a href="#bugfixes-32">Bugfixes</a>
-- <a href="#v2-1-0">v2\.1\.0</a>
+- <a href="#v2-1-1">v2\.1\.1</a>
- <a href="#release-summary-41">Release Summary</a>
- - <a href="#minor-changes-18">Minor Changes</a>
- <a href="#bugfixes-33">Bugfixes</a>
-- <a href="#v2-0-2">v2\.0\.2</a>
+- <a href="#v2-1-0">v2\.1\.0</a>
- <a href="#release-summary-42">Release Summary</a>
+ - <a href="#minor-changes-18">Minor Changes</a>
- <a href="#bugfixes-34">Bugfixes</a>
-- <a href="#v2-0-1">v2\.0\.1</a>
+- <a href="#v2-0-2">v2\.0\.2</a>
- <a href="#release-summary-43">Release Summary</a>
-- <a href="#v2-0-0">v2\.0\.0</a>
+ - <a href="#bugfixes-35">Bugfixes</a>
+- <a href="#v2-0-1">v2\.0\.1</a>
- <a href="#release-summary-44">Release Summary</a>
+- <a href="#v2-0-0">v2\.0\.0</a>
+ - <a href="#release-summary-45">Release Summary</a>
- <a href="#breaking-changes--porting-guide-1">Breaking Changes / Porting Guide</a>
- <a href="#deprecated-features-5">Deprecated Features</a>
- <a href="#removed-features-previously-deprecated-1">Removed Features \(previously deprecated\)</a>
- <a href="#v1-10-0">v1\.10\.0</a>
- - <a href="#release-summary-45">Release Summary</a>
+ - <a href="#release-summary-46">Release Summary</a>
- <a href="#minor-changes-19">Minor Changes</a>
- <a href="#v1-9-1">v1\.9\.1</a>
- - <a href="#release-summary-46">Release Summary</a>
- - <a href="#bugfixes-35">Bugfixes</a>
-- <a href="#v1-9-0">v1\.9\.0</a>
- <a href="#release-summary-47">Release Summary</a>
+ - <a href="#bugfixes-36">Bugfixes</a>
+- <a href="#v1-9-0">v1\.9\.0</a>
+ - <a href="#release-summary-48">Release Summary</a>
- <a href="#minor-changes-20">Minor Changes</a>
- <a href="#deprecated-features-6">Deprecated Features</a>
- - <a href="#bugfixes-36">Bugfixes</a>
+ - <a href="#bugfixes-37">Bugfixes</a>
- <a href="#new-plugins">New Plugins</a>
- <a href="#connection">Connection</a>
- <a href="#v1-8-0">v1\.8\.0</a>
- - <a href="#release-summary-48">Release Summary</a>
+ - <a href="#release-summary-49">Release Summary</a>
- <a href="#minor-changes-21">Minor Changes</a>
- - <a href="#bugfixes-37">Bugfixes</a>
+ - <a href="#bugfixes-38">Bugfixes</a>
- <a href="#v1-7-0">v1\.7\.0</a>
- - <a href="#release-summary-49">Release Summary</a>
+ - <a href="#release-summary-50">Release Summary</a>
- <a href="#minor-changes-22">Minor Changes</a>
- <a href="#v1-6-1">v1\.6\.1</a>
- - <a href="#release-summary-50">Release Summary</a>
- - <a href="#bugfixes-38">Bugfixes</a>
-- <a href="#v1-6-0">v1\.6\.0</a>
- <a href="#release-summary-51">Release Summary</a>
+ - <a href="#bugfixes-39">Bugfixes</a>
+- <a href="#v1-6-0">v1\.6\.0</a>
+ - <a href="#release-summary-52">Release Summary</a>
- <a href="#minor-changes-23">Minor Changes</a>
- <a href="#deprecated-features-7">Deprecated Features</a>
- - <a href="#bugfixes-39">Bugfixes</a>
+ - <a href="#bugfixes-40">Bugfixes</a>
- <a href="#v1-5-0">v1\.5\.0</a>
- - <a href="#release-summary-52">Release Summary</a>
+ - <a href="#release-summary-53">Release Summary</a>
- <a href="#minor-changes-24">Minor Changes</a>
- - <a href="#bugfixes-40">Bugfixes</a>
+ - <a href="#bugfixes-41">Bugfixes</a>
- <a href="#new-modules-3">New Modules</a>
- <a href="#v1-4-0">v1\.4\.0</a>
- - <a href="#release-summary-53">Release Summary</a>
+ - <a href="#release-summary-54">Release Summary</a>
- <a href="#minor-changes-25">Minor Changes</a>
- <a href="#breaking-changes--porting-guide-2">Breaking Changes / Porting Guide</a>
- <a href="#security-fixes-2">Security Fixes</a>
- - <a href="#bugfixes-41">Bugfixes</a>
+ - <a href="#bugfixes-42">Bugfixes</a>
- <a href="#v1-3-0">v1\.3\.0</a>
- - <a href="#release-summary-54">Release Summary</a>
+ - <a href="#release-summary-55">Release Summary</a>
- <a href="#minor-changes-26">Minor Changes</a>
- - <a href="#bugfixes-42">Bugfixes</a>
+ - <a href="#bugfixes-43">Bugfixes</a>
- <a href="#new-modules-4">New Modules</a>
- <a href="#v1-2-2">v1\.2\.2</a>
- - <a href="#release-summary-55">Release Summary</a>
+ - <a href="#release-summary-56">Release Summary</a>
- <a href="#security-fixes-3">Security Fixes</a>
- <a href="#v1-2-1">v1\.2\.1</a>
- - <a href="#release-summary-56">Release Summary</a>
- - <a href="#bugfixes-43">Bugfixes</a>
-- <a href="#v1-2-0">v1\.2\.0</a>
- <a href="#release-summary-57">Release Summary</a>
- - <a href="#minor-changes-27">Minor Changes</a>
- <a href="#bugfixes-44">Bugfixes</a>
-- <a href="#v1-1-0">v1\.1\.0</a>
+- <a href="#v1-2-0">v1\.2\.0</a>
- <a href="#release-summary-58">Release Summary</a>
+ - <a href="#minor-changes-27">Minor Changes</a>
+ - <a href="#bugfixes-45">Bugfixes</a>
+- <a href="#v1-1-0">v1\.1\.0</a>
+ - <a href="#release-summary-59">Release Summary</a>
- <a href="#minor-changes-28">Minor Changes</a>
- <a href="#deprecated-features-8">Deprecated Features</a>
- - <a href="#bugfixes-45">Bugfixes</a>
+ - <a href="#bugfixes-46">Bugfixes</a>
- <a href="#new-plugins-1">New Plugins</a>
- <a href="#connection-1">Connection</a>
- <a href="#inventory">Inventory</a>
- <a href="#new-modules-5">New Modules</a>
- <a href="#v1-0-1">v1\.0\.1</a>
- - <a href="#release-summary-59">Release Summary</a>
- - <a href="#bugfixes-46">Bugfixes</a>
-- <a href="#v1-0-0">v1\.0\.0</a>
- <a href="#release-summary-60">Release Summary</a>
+ - <a href="#bugfixes-47">Bugfixes</a>
+- <a href="#v1-0-0">v1\.0\.0</a>
+ - <a href="#release-summary-61">Release Summary</a>
- <a href="#minor-changes-29">Minor Changes</a>
- <a href="#v0-1-0">v0\.1\.0</a>
- - <a href="#release-summary-61">Release Summary</a>
+ - <a href="#release-summary-62">Release Summary</a>
- <a href="#minor-changes-30">Minor Changes</a>
- <a href="#removed-features-previously-deprecated-2">Removed Features \(previously deprecated\)</a>
- - <a href="#bugfixes-47">Bugfixes</a>
+ - <a href="#bugfixes-48">Bugfixes</a>
-<a id="v3-10-3"></a>
-## v3\.10\.3
+<a id="v3-10-4"></a>
+## v3\.10\.4
<a id="release-summary"></a>
### Release Summary
@@ -253,17 +256,31 @@ Bugfix release\.
<a id="bugfixes"></a>
### Bugfixes
+* docker\_compose \- make sure that the module uses the <code>api\_version</code> parameter \([https\://github\.com/ansible\-collections/community\.docker/pull/881](https\://github\.com/ansible\-collections/community\.docker/pull/881)\)\.
+* docker\_compose\_v2\* modules \- there was no check to make sure that one of <code>project\_src</code> and <code>definition</code> is provided\. The modules crashed if none were provided \([https\://github\.com/ansible\-collections/community\.docker/issues/885](https\://github\.com/ansible\-collections/community\.docker/issues/885)\, [https\://github\.com/ansible\-collections/community\.docker/pull/886](https\://github\.com/ansible\-collections/community\.docker/pull/886)\)\.
+
+<a id="v3-10-3"></a>
+## v3\.10\.3
+
+<a id="release-summary-1"></a>
+### Release Summary
+
+Bugfix release\.
+
+<a id="bugfixes-1"></a>
+### Bugfixes
+
* docker and nsenter connection plugins\, docker\_container\_exec module \- avoid using the deprecated <code>ansible\.module\_utils\.compat\.selectors</code> module util with Python 3 \([https\://github\.com/ansible\-collections/community\.docker/issues/870](https\://github\.com/ansible\-collections/community\.docker/issues/870)\, [https\://github\.com/ansible\-collections/community\.docker/pull/871](https\://github\.com/ansible\-collections/community\.docker/pull/871)\)\.
<a id="v3-10-2"></a>
## v3\.10\.2
-<a id="release-summary-1"></a>
+<a id="release-summary-2"></a>
### Release Summary
Bugfix release\.
-<a id="bugfixes-1"></a>
+<a id="bugfixes-2"></a>
### Bugfixes
* vendored Docker SDK for Python \- include a fix requests 2\.32\.2\+ compatibility \([https\://github\.com/ansible\-collections/community\.docker/issues/860](https\://github\.com/ansible\-collections/community\.docker/issues/860)\, [https\://github\.com/psf/requests/issues/6707](https\://github\.com/psf/requests/issues/6707)\, [https\://github\.com/ansible\-collections/community\.docker/pull/864](https\://github\.com/ansible\-collections/community\.docker/pull/864)\)\.
@@ -271,12 +288,12 @@ Bugfix release\.
<a id="v3-10-1"></a>
## v3\.10\.1
-<a id="release-summary-2"></a>
+<a id="release-summary-3"></a>
### Release Summary
Hotfix release for requests 2\.32\.0 compatibility\.
-<a id="bugfixes-2"></a>
+<a id="bugfixes-3"></a>
### Bugfixes
* vendored Docker SDK for Python \- include a hotfix for requests 2\.32\.0 compatibility \([https\://github\.com/ansible\-collections/community\.docker/issues/860](https\://github\.com/ansible\-collections/community\.docker/issues/860)\, [https\://github\.com/docker/docker\-py/issues/3256](https\://github\.com/docker/docker\-py/issues/3256)\, [https\://github\.com/ansible\-collections/community\.docker/pull/861](https\://github\.com/ansible\-collections/community\.docker/pull/861)\)\.
@@ -295,7 +312,7 @@ Hotfix release for requests 2\.32\.0 compatibility\.
<a id="v3-10-0"></a>
## v3\.10\.0
-<a id="release-summary-3"></a>
+<a id="release-summary-4"></a>
### Release Summary
Feature release\.
@@ -321,7 +338,7 @@ Feature release\.
<a id="v3-9-0"></a>
## v3\.9\.0
-<a id="release-summary-4"></a>
+<a id="release-summary-5"></a>
### Release Summary
Bugfix and feature release\.
@@ -334,7 +351,7 @@ Bugfix and feature release\.
* docker\_compose\_v2\* modules \- allow to provide an inline definition of the compose content instead of having to provide a <code>project\_src</code> directory with the compose file written into it \([https\://github\.com/ansible\-collections/community\.docker/issues/829](https\://github\.com/ansible\-collections/community\.docker/issues/829)\, [https\://github\.com/ansible\-collections/community\.docker/pull/832](https\://github\.com/ansible\-collections/community\.docker/pull/832)\)\.
* vendored Docker SDK for Python \- remove unused code that relies on functionality deprecated in Python 3\.12 \([https\://github\.com/ansible\-collections/community\.docker/pull/834](https\://github\.com/ansible\-collections/community\.docker/pull/834)\)\.
-<a id="bugfixes-3"></a>
+<a id="bugfixes-4"></a>
### Bugfixes
* docker\_compose\_v2\* \- allow <code>project\_src</code> to be a relative path\, by converting it to an absolute path before using it \([https\://github\.com/ansible\-collections/community\.docker/issues/827](https\://github\.com/ansible\-collections/community\.docker/issues/827)\, [https\://github\.com/ansible\-collections/community\.docker/pull/828](https\://github\.com/ansible\-collections/community\.docker/pull/828)\)\.
@@ -344,7 +361,7 @@ Bugfix and feature release\.
<a id="v3-8-1"></a>
## v3\.8\.1
-<a id="release-summary-5"></a>
+<a id="release-summary-6"></a>
### Release Summary
Bugfix release
@@ -354,7 +371,7 @@ Bugfix release
* docker\_containers\, docker\_machine\, and docker\_swarm inventory plugins \- make sure all data received from the Docker daemon / Docker machine is marked as unsafe\, so remote code execution by obtaining texts that can be evaluated as templates is not possible \([https\://www\.die\-welt\.net/2024/03/remote\-code\-execution\-in\-ansible\-dynamic\-inventory\-plugins/](https\://www\.die\-welt\.net/2024/03/remote\-code\-execution\-in\-ansible\-dynamic\-inventory\-plugins/)\, [https\://github\.com/ansible\-collections/community\.docker/pull/815](https\://github\.com/ansible\-collections/community\.docker/pull/815)\)\.
-<a id="bugfixes-4"></a>
+<a id="bugfixes-5"></a>
### Bugfixes
* docker\_compose\_v2 \- do not fail when non\-fatal errors occur\. This can happen when pulling an image fails\, but then the image can be built for another service\. Docker Compose emits an error in that case\, but <code>docker compose up</code> still completes successfully \([https\://github\.com/ansible\-collections/community\.docker/issues/807](https\://github\.com/ansible\-collections/community\.docker/issues/807)\, [https\://github\.com/ansible\-collections/community\.docker/pull/810](https\://github\.com/ansible\-collections/community\.docker/pull/810)\, [https\://github\.com/ansible\-collections/community\.docker/pull/811](https\://github\.com/ansible\-collections/community\.docker/pull/811)\)\.
@@ -365,7 +382,7 @@ Bugfix release
<a id="v3-8-0"></a>
## v3\.8\.0
-<a id="release-summary-6"></a>
+<a id="release-summary-7"></a>
### Release Summary
Bugfix and feature release\.
@@ -377,7 +394,7 @@ Bugfix and feature release\.
* docker\_container \- the <code>pull\_check\_mode\_behavior</code> option now allows to control the module\'s behavior in check mode when <code>pull\=always</code> \([https\://github\.com/ansible\-collections/community\.docker/issues/792](https\://github\.com/ansible\-collections/community\.docker/issues/792)\, [https\://github\.com/ansible\-collections/community\.docker/pull/797](https\://github\.com/ansible\-collections/community\.docker/pull/797)\)\.
* docker\_container \- the <code>pull</code> option now accepts the three values <code>never</code>\, <code>missing\_image</code> \(default\)\, and <code>never</code>\, next to the previously valid values <code>true</code> \(equivalent to <code>always</code>\) and <code>false</code> \(equivalent to <code>missing\_image</code>\)\. This allows the equivalent to <code>\-\-pull\=never</code> from the Docker command line \([https\://github\.com/ansible\-collections/community\.docker/issues/783](https\://github\.com/ansible\-collections/community\.docker/issues/783)\, [https\://github\.com/ansible\-collections/community\.docker/pull/797](https\://github\.com/ansible\-collections/community\.docker/pull/797)\)\.
-<a id="bugfixes-5"></a>
+<a id="bugfixes-6"></a>
### Bugfixes
* docker\_compose\_v2 \- do not consider a <code>Waiting</code> event as an action/change \([https\://github\.com/ansible\-collections/community\.docker/pull/804](https\://github\.com/ansible\-collections/community\.docker/pull/804)\)\.
@@ -387,7 +404,7 @@ Bugfix and feature release\.
<a id="v3-7-0"></a>
## v3\.7\.0
-<a id="release-summary-7"></a>
+<a id="release-summary-8"></a>
### Release Summary
Bugfix and feature release\.
@@ -398,7 +415,7 @@ Bugfix and feature release\.
* docker\_compose\_v2 \- add <code>scale</code> option to allow to explicitly scale services \([https\://github\.com/ansible\-collections/community\.docker/pull/776](https\://github\.com/ansible\-collections/community\.docker/pull/776)\)\.
* docker\_compose\_v2\, docker\_compose\_v2\_pull \- support <code>files</code> parameter to specify multiple Compose files \([https\://github\.com/ansible\-collections/community\.docker/issues/772](https\://github\.com/ansible\-collections/community\.docker/issues/772)\, [https\://github\.com/ansible\-collections/community\.docker/pull/775](https\://github\.com/ansible\-collections/community\.docker/pull/775)\)\.
-<a id="bugfixes-6"></a>
+<a id="bugfixes-7"></a>
### Bugfixes
* docker\_compose\_v2 \- properly parse dry\-run build events from <code>stderr</code> \([https\://github\.com/ansible\-collections/community\.docker/issues/778](https\://github\.com/ansible\-collections/community\.docker/issues/778)\, [https\://github\.com/ansible\-collections/community\.docker/pull/779](https\://github\.com/ansible\-collections/community\.docker/pull/779)\)\.
@@ -412,7 +429,7 @@ Bugfix and feature release\.
<a id="v3-6-0"></a>
## v3\.6\.0
-<a id="release-summary-8"></a>
+<a id="release-summary-9"></a>
### Release Summary
Bugfix and feature release\.
@@ -442,7 +459,7 @@ so the main difference is that instead of some Python requirements\, they depend
* docker\_image \- allow to specify memory size and swap memory size in other units than bytes \([https\://github\.com/ansible\-collections/community\.docker/pull/727](https\://github\.com/ansible\-collections/community\.docker/pull/727)\)\.
* inventory plugins \- add <code>filter</code> option which allows to include and exclude hosts based on Jinja2 conditions \([https\://github\.com/ansible\-collections/community\.docker/pull/698](https\://github\.com/ansible\-collections/community\.docker/pull/698)\, [https\://github\.com/ansible\-collections/community\.docker/issues/610](https\://github\.com/ansible\-collections/community\.docker/issues/610)\)\.
-<a id="bugfixes-7"></a>
+<a id="bugfixes-8"></a>
### Bugfixes
* Use <code>unix\:///var/run/docker\.sock</code> instead of the legacy <code>unix\://var/run/docker\.sock</code> as default for <code>docker\_host</code> \([https\://github\.com/ansible\-collections/community\.docker/pull/736](https\://github\.com/ansible\-collections/community\.docker/pull/736)\)\.
@@ -462,7 +479,7 @@ so the main difference is that instead of some Python requirements\, they depend
<a id="v3-5-0"></a>
## v3\.5\.0
-<a id="release-summary-9"></a>
+<a id="release-summary-10"></a>
### Release Summary
Bugfix and feature release\.
@@ -478,7 +495,7 @@ Bugfix and feature release\.
* docker\_container \- the default <code>ignore</code> for the <code>image\_name\_mismatch</code> parameter has been deprecated and will switch to <code>recreate</code> in community\.docker 4\.0\.0\. A deprecation warning will be printed in situations where the default value is used and where a behavior would change once the default changes \([https\://github\.com/ansible\-collections/community\.docker/pull/703](https\://github\.com/ansible\-collections/community\.docker/pull/703)\)\.
-<a id="bugfixes-8"></a>
+<a id="bugfixes-9"></a>
### Bugfixes
* modules and plugins using the Docker SDK for Python \- remove <code>ssl\_version</code> from the parameters passed to Docker SDK for Python 7\.0\.0\+\. Explicitly fail with a nicer error message if it was explicitly set in this case \([https\://github\.com/ansible\-collections/community\.docker/pull/715](https\://github\.com/ansible\-collections/community\.docker/pull/715)\)\.
@@ -488,12 +505,12 @@ Bugfix and feature release\.
<a id="v3-4-11"></a>
## v3\.4\.11
-<a id="release-summary-10"></a>
+<a id="release-summary-11"></a>
### Release Summary
Bugfix release\.
-<a id="bugfixes-9"></a>
+<a id="bugfixes-10"></a>
### Bugfixes
* docker\_volume \- fix crash caused by accessing an empty dictionary\. The <code>has\_different\_config\(\)</code> was raising an <code>AttributeError</code> because the <code>self\.existing\_volume\[\"Labels\"\]</code> dictionary was <code>None</code> \([https\://github\.com/ansible\-collections/community\.docker/pull/702](https\://github\.com/ansible\-collections/community\.docker/pull/702)\)\.
@@ -501,12 +518,12 @@ Bugfix release\.
<a id="v3-4-10"></a>
## v3\.4\.10
-<a id="release-summary-11"></a>
+<a id="release-summary-12"></a>
### Release Summary
Bugfix release\.
-<a id="bugfixes-10"></a>
+<a id="bugfixes-11"></a>
### Bugfixes
* docker\_swarm \- make init and join operations work again with Docker SDK for Python before 4\.0\.0 \([https\://github\.com/ansible\-collections/community\.docker/issues/695](https\://github\.com/ansible\-collections/community\.docker/issues/695)\, [https\://github\.com/ansible\-collections/community\.docker/pull/696](https\://github\.com/ansible\-collections/community\.docker/pull/696)\)\.
@@ -514,12 +531,12 @@ Bugfix release\.
<a id="v3-4-9"></a>
## v3\.4\.9
-<a id="release-summary-12"></a>
+<a id="release-summary-13"></a>
### Release Summary
Maintenance release with updated documentation and vendored Docker SDK for Python code\.
-<a id="bugfixes-11"></a>
+<a id="bugfixes-12"></a>
### Bugfixes
* vendored Docker SDK for Python code \- cherry\-pick changes from the Docker SDK for Python code to align code\. These changes should not affect the parts used by the collection\'s code \([https\://github\.com/ansible\-collections/community\.docker/pull/694](https\://github\.com/ansible\-collections/community\.docker/pull/694)\)\.
@@ -527,7 +544,7 @@ Maintenance release with updated documentation and vendored Docker SDK for Pytho
<a id="v3-4-8"></a>
## v3\.4\.8
-<a id="release-summary-13"></a>
+<a id="release-summary-14"></a>
### Release Summary
Maintenance release with updated documentation\.
@@ -548,12 +565,12 @@ for the rendered HTML version of the documentation of the latest release\.
<a id="v3-4-7"></a>
## v3\.4\.7
-<a id="release-summary-14"></a>
+<a id="release-summary-15"></a>
### Release Summary
Bugfix release\.
-<a id="bugfixes-12"></a>
+<a id="bugfixes-13"></a>
### Bugfixes
* docker\_swarm\_info \- if <code>service\=true</code> is used\, do not crash when a service without an endpoint spec is encountered \([https\://github\.com/ansible\-collections/community\.docker/issues/636](https\://github\.com/ansible\-collections/community\.docker/issues/636)\, [https\://github\.com/ansible\-collections/community\.docker/pull/637](https\://github\.com/ansible\-collections/community\.docker/pull/637)\)\.
@@ -561,12 +578,12 @@ Bugfix release\.
<a id="v3-4-6"></a>
## v3\.4\.6
-<a id="release-summary-15"></a>
+<a id="release-summary-16"></a>
### Release Summary
Bugfix release with documentation warnings about using certain functionality when connecting to the Docker daemon with TCP TLS\.
-<a id="bugfixes-13"></a>
+<a id="bugfixes-14"></a>
### Bugfixes
* socket\_handler module utils \- make sure this fully works when Docker SDK for Python is not available \([https\://github\.com/ansible\-collections/community\.docker/pull/620](https\://github\.com/ansible\-collections/community\.docker/pull/620)\)\.
@@ -583,12 +600,12 @@ Bugfix release with documentation warnings about using certain functionality whe
<a id="v3-4-5"></a>
## v3\.4\.5
-<a id="release-summary-16"></a>
+<a id="release-summary-17"></a>
### Release Summary
Maintenance release which adds compatibility with requests 2\.29\.0 and 2\.30\.0 and urllib3 2\.0\.
-<a id="bugfixes-14"></a>
+<a id="bugfixes-15"></a>
### Bugfixes
* Make vendored Docker SDK for Python code compatible with requests 2\.29\.0 and urllib3 2\.0 \([https\://github\.com/ansible\-collections/community\.docker/pull/613](https\://github\.com/ansible\-collections/community\.docker/pull/613)\)\.
@@ -596,7 +613,7 @@ Maintenance release which adds compatibility with requests 2\.29\.0 and 2\.30\.0
<a id="v3-4-4"></a>
## v3\.4\.4
-<a id="release-summary-17"></a>
+<a id="release-summary-18"></a>
### Release Summary
Maintenance release with updated EE requirements and updated documentation\.
@@ -614,7 +631,7 @@ Maintenance release with updated EE requirements and updated documentation\.
<a id="v3-4-3"></a>
## v3\.4\.3
-<a id="release-summary-18"></a>
+<a id="release-summary-19"></a>
### Release Summary
Maintenance release with improved documentation\.
@@ -622,12 +639,12 @@ Maintenance release with improved documentation\.
<a id="v3-4-2"></a>
## v3\.4\.2
-<a id="release-summary-19"></a>
+<a id="release-summary-20"></a>
### Release Summary
Bugfix release\.
-<a id="bugfixes-15"></a>
+<a id="bugfixes-16"></a>
### Bugfixes
* docker\_prune \- return correct value for <code>changed</code>\. So far the module always claimed that nothing changed \([https\://github\.com/ansible\-collections/community\.docker/pull/593](https\://github\.com/ansible\-collections/community\.docker/pull/593)\)\.
@@ -635,12 +652,12 @@ Bugfix release\.
<a id="v3-4-1"></a>
## v3\.4\.1
-<a id="release-summary-20"></a>
+<a id="release-summary-21"></a>
### Release Summary
Regular bugfix release\.
-<a id="bugfixes-16"></a>
+<a id="bugfixes-17"></a>
### Bugfixes
* docker\_api connection plugin\, docker\_container\_exec\, docker\_container\_copy\_into \- properly close socket to Daemon after executing commands in containers \([https\://github\.com/ansible\-collections/community\.docker/pull/582](https\://github\.com/ansible\-collections/community\.docker/pull/582)\)\.
@@ -650,7 +667,7 @@ Regular bugfix release\.
<a id="v3-4-0"></a>
## v3\.4\.0
-<a id="release-summary-21"></a>
+<a id="release-summary-22"></a>
### Release Summary
Regular bugfix and feature release\.
@@ -661,7 +678,7 @@ Regular bugfix and feature release\.
* docker\_api connection plugin \- when copying files to/from a container\, stream the file contents instead of first reading them to memory \([https\://github\.com/ansible\-collections/community\.docker/pull/545](https\://github\.com/ansible\-collections/community\.docker/pull/545)\)\.
* docker\_host\_info \- allow to list all containers with new option <code>containers\_all</code> \([https\://github\.com/ansible\-collections/community\.docker/issues/535](https\://github\.com/ansible\-collections/community\.docker/issues/535)\, [https\://github\.com/ansible\-collections/community\.docker/pull/538](https\://github\.com/ansible\-collections/community\.docker/pull/538)\)\.
-<a id="bugfixes-17"></a>
+<a id="bugfixes-18"></a>
### Bugfixes
* docker\_api connection plugin \- fix error handling when 409 Conflict is returned by the Docker daemon in case of a stopped container \([https\://github\.com/ansible\-collections/community\.docker/pull/546](https\://github\.com/ansible\-collections/community\.docker/pull/546)\)\.
@@ -677,12 +694,12 @@ Regular bugfix and feature release\.
<a id="v3-3-2"></a>
## v3\.3\.2
-<a id="release-summary-22"></a>
+<a id="release-summary-23"></a>
### Release Summary
Bugfix release\.
-<a id="bugfixes-18"></a>
+<a id="bugfixes-19"></a>
### Bugfixes
* docker\_container \- when <code>detach\=false</code>\, wait indefinitely and not at most one minute\. This was the behavior with Docker SDK for Python\, and was accidentally changed in 3\.0\.0 \([https\://github\.com/ansible\-collections/community\.docker/issues/526](https\://github\.com/ansible\-collections/community\.docker/issues/526)\, [https\://github\.com/ansible\-collections/community\.docker/pull/527](https\://github\.com/ansible\-collections/community\.docker/pull/527)\)\.
@@ -690,12 +707,12 @@ Bugfix release\.
<a id="v3-3-1"></a>
## v3\.3\.1
-<a id="release-summary-23"></a>
+<a id="release-summary-24"></a>
### Release Summary
Bugfix release\.
-<a id="bugfixes-19"></a>
+<a id="bugfixes-20"></a>
### Bugfixes
* current\_container\_facts \- make container detection work better in more cases \([https\://github\.com/ansible\-collections/community\.docker/pull/522](https\://github\.com/ansible\-collections/community\.docker/pull/522)\)\.
@@ -703,7 +720,7 @@ Bugfix release\.
<a id="v3-3-0"></a>
## v3\.3\.0
-<a id="release-summary-24"></a>
+<a id="release-summary-25"></a>
### Release Summary
Feature and bugfix release\.
@@ -714,7 +731,7 @@ Feature and bugfix release\.
* current\_container\_facts \- make work with current Docker version\, also support Podman \([https\://github\.com/ansible\-collections/community\.docker/pull/510](https\://github\.com/ansible\-collections/community\.docker/pull/510)\)\.
* docker\_image \- when using <code>archive\_path</code>\, detect whether changes are necessary based on the image ID \(hash\)\. If the existing tar archive matches the source\, do nothing\. Previously\, each task execution re\-created the archive \([https\://github\.com/ansible\-collections/community\.docker/pull/500](https\://github\.com/ansible\-collections/community\.docker/pull/500)\)\.
-<a id="bugfixes-20"></a>
+<a id="bugfixes-21"></a>
### Bugfixes
* docker\_container\_exec \- fix <code>chdir</code> option which was ignored since community\.docker 3\.0\.0 \([https\://github\.com/ansible\-collections/community\.docker/issues/517](https\://github\.com/ansible\-collections/community\.docker/issues/517)\, [https\://github\.com/ansible\-collections/community\.docker/pull/518](https\://github\.com/ansible\-collections/community\.docker/pull/518)\)\.
@@ -723,12 +740,12 @@ Feature and bugfix release\.
<a id="v3-2-2"></a>
## v3\.2\.2
-<a id="release-summary-25"></a>
+<a id="release-summary-26"></a>
### Release Summary
Bugfix release\.
-<a id="bugfixes-21"></a>
+<a id="bugfixes-22"></a>
### Bugfixes
* docker\_container \- the <code>kill\_signal</code> option erroneously did not accept strings anymore since 3\.0\.0 \([https\://github\.com/ansible\-collections/community\.docker/issues/505](https\://github\.com/ansible\-collections/community\.docker/issues/505)\, [https\://github\.com/ansible\-collections/community\.docker/pull/506](https\://github\.com/ansible\-collections/community\.docker/pull/506)\)\.
@@ -736,7 +753,7 @@ Bugfix release\.
<a id="v3-2-1"></a>
## v3\.2\.1
-<a id="release-summary-26"></a>
+<a id="release-summary-27"></a>
### Release Summary
Maintenance release with improved documentation\.
@@ -744,7 +761,7 @@ Maintenance release with improved documentation\.
<a id="v3-2-0"></a>
## v3\.2\.0
-<a id="release-summary-27"></a>
+<a id="release-summary-28"></a>
### Release Summary
Feature and deprecation release\.
@@ -763,7 +780,7 @@ Feature and deprecation release\.
<a id="v3-1-0"></a>
## v3\.1\.0
-<a id="release-summary-28"></a>
+<a id="release-summary-29"></a>
### Release Summary
Feature release\.
@@ -777,12 +794,12 @@ Feature release\.
<a id="v3-0-2"></a>
## v3\.0\.2
-<a id="release-summary-29"></a>
+<a id="release-summary-30"></a>
### Release Summary
Bugfix release\.
-<a id="bugfixes-22"></a>
+<a id="bugfixes-23"></a>
### Bugfixes
* docker\_image \- fix build argument handling \([https\://github\.com/ansible\-collections/community\.docker/issues/455](https\://github\.com/ansible\-collections/community\.docker/issues/455)\, [https\://github\.com/ansible\-collections/community\.docker/pull/456](https\://github\.com/ansible\-collections/community\.docker/pull/456)\)\.
@@ -790,12 +807,12 @@ Bugfix release\.
<a id="v3-0-1"></a>
## v3\.0\.1
-<a id="release-summary-30"></a>
+<a id="release-summary-31"></a>
### Release Summary
Bugfix release\.
-<a id="bugfixes-23"></a>
+<a id="bugfixes-24"></a>
### Bugfixes
* docker\_container \- fix handling of <code>env\_file</code> \([https\://github\.com/ansible\-collections/community\.docker/issues/451](https\://github\.com/ansible\-collections/community\.docker/issues/451)\, [https\://github\.com/ansible\-collections/community\.docker/pull/452](https\://github\.com/ansible\-collections/community\.docker/pull/452)\)\.
@@ -803,7 +820,7 @@ Bugfix release\.
<a id="v3-0-0"></a>
## v3\.0\.0
-<a id="release-summary-31"></a>
+<a id="release-summary-32"></a>
### Release Summary
The 3\.0\.0 release features a rewrite of the <code>docker\_container</code> module\, and many modules and plugins no longer depend on the Docker SDK for Python\.
@@ -870,7 +887,7 @@ The 3\.0\.0 release features a rewrite of the <code>docker\_container</code> mod
* modules and plugins communicating directly with the Docker daemon \- when connecting by SSH and not using <code>use\_ssh\_client\=true</code>\, reject unknown host keys instead of accepting them\. This is only a change relative to older community\.docker 3\.0\.0 pre\-releases or with respect to Docker SDK for Python \< 6\.0\.0\. Docker SDK for Python 6\.0\.0 will also include this change \([https\://github\.com/ansible\-collections/community\.docker/pull/434](https\://github\.com/ansible\-collections/community\.docker/pull/434)\)\.
-<a id="bugfixes-24"></a>
+<a id="bugfixes-25"></a>
### Bugfixes
* docker\_image \- when composing the build context\, trim trailing whitespace from <code>\.dockerignore</code> entries\. This is only a change relative to older community\.docker 3\.0\.0 pre\-releases or with respect to Docker SDK for Python \< 6\.0\.0\. Docker SDK for Python 6\.0\.0 will also include this change \([https\://github\.com/ansible\-collections/community\.docker/pull/434](https\://github\.com/ansible\-collections/community\.docker/pull/434)\)\.
@@ -884,7 +901,7 @@ The 3\.0\.0 release features a rewrite of the <code>docker\_container</code> mod
<a id="v2-7-0"></a>
## v2\.7\.0
-<a id="release-summary-32"></a>
+<a id="release-summary-33"></a>
### Release Summary
Bugfix and deprecation release\. The next 2\.x\.y releases will only be bugfix releases\, the next expect minor/major release will be 3\.0\.0 with some major changes\.
@@ -900,7 +917,7 @@ Bugfix and deprecation release\. The next 2\.x\.y releases will only be bugfix r
* Support for Docker API version 1\.20 to 1\.24 has been deprecated and will be removed in community\.docker 3\.0\.0\. The first Docker version supporting API version 1\.25 was Docker 1\.13\, released in January 2017\. This affects the modules <code>docker\_container</code>\, <code>docker\_container\_exec</code>\, <code>docker\_container\_info</code>\, <code>docker\_compose</code>\, <code>docker\_login</code>\, <code>docker\_image</code>\, <code>docker\_image\_info</code>\, <code>docker\_image\_load</code>\, <code>docker\_host\_info</code>\, <code>docker\_network</code>\, <code>docker\_network\_info</code>\, <code>docker\_node\_info</code>\, <code>docker\_swarm\_info</code>\, <code>docker\_swarm\_service</code>\, <code>docker\_swarm\_service\_info</code>\, <code>docker\_volume\_info</code>\, and <code>docker\_volume</code>\, whose minimally supported API version is between 1\.20 and 1\.24 \([https\://github\.com/ansible\-collections/community\.docker/pull/396](https\://github\.com/ansible\-collections/community\.docker/pull/396)\)\.
* Support for Python 2\.6 is deprecated and will be removed in the next major release \(community\.docker 3\.0\.0\)\. Some modules might still work with Python 2\.6\, but we will no longer try to ensure compatibility \([https\://github\.com/ansible\-collections/community\.docker/pull/388](https\://github\.com/ansible\-collections/community\.docker/pull/388)\)\.
-<a id="bugfixes-25"></a>
+<a id="bugfixes-26"></a>
### Bugfixes
* Docker SDK for Python based modules and plugins \- if the API version is specified as an option\, use that one to validate API version requirements of module/plugin options instead of the latest API version supported by the Docker daemon\. This also avoids one unnecessary API call per module/plugin \([https\://github\.com/ansible\-collections/community\.docker/pull/389](https\://github\.com/ansible\-collections/community\.docker/pull/389)\)\.
@@ -908,7 +925,7 @@ Bugfix and deprecation release\. The next 2\.x\.y releases will only be bugfix r
<a id="v2-6-0"></a>
## v2\.6\.0
-<a id="release-summary-33"></a>
+<a id="release-summary-34"></a>
### Release Summary
Bugfix and feature release\.
@@ -926,7 +943,7 @@ Bugfix and feature release\.
* Various modules \- the default of <code>tls\_hostname</code> that was supposed to be removed in community\.docker 2\.0\.0 will now be removed in version 3\.0\.0 \([https\://github\.com/ansible\-collections/community\.docker/pull/362](https\://github\.com/ansible\-collections/community\.docker/pull/362)\)\.
* docker\_stack \- the return values <code>out</code> and <code>err</code> that were supposed to be removed in community\.docker 2\.0\.0 will now be removed in version 3\.0\.0 \([https\://github\.com/ansible\-collections/community\.docker/pull/362](https\://github\.com/ansible\-collections/community\.docker/pull/362)\)\.
-<a id="bugfixes-26"></a>
+<a id="bugfixes-27"></a>
### Bugfixes
* docker\_container \- fail with a meaningful message instead of crashing if a port is specified with more than three colon\-separated parts \([https\://github\.com/ansible\-collections/community\.docker/pull/367](https\://github\.com/ansible\-collections/community\.docker/pull/367)\, [https\://github\.com/ansible\-collections/community\.docker/issues/365](https\://github\.com/ansible\-collections/community\.docker/issues/365)\)\.
@@ -935,12 +952,12 @@ Bugfix and feature release\.
<a id="v2-5-1"></a>
## v2\.5\.1
-<a id="release-summary-34"></a>
+<a id="release-summary-35"></a>
### Release Summary
Maintenance release\.
-<a id="bugfixes-27"></a>
+<a id="bugfixes-28"></a>
### Bugfixes
* Include <code>PSF\-license\.txt</code> file for <code>plugins/module\_utils/\_version\.py</code>\.
@@ -948,7 +965,7 @@ Maintenance release\.
<a id="v2-5-0"></a>
## v2\.5\.0
-<a id="release-summary-35"></a>
+<a id="release-summary-36"></a>
### Release Summary
Regular feature release\.
@@ -962,7 +979,7 @@ Regular feature release\.
<a id="v2-4-0"></a>
## v2\.4\.0
-<a id="release-summary-36"></a>
+<a id="release-summary-37"></a>
### Release Summary
Regular feature and bugfix release\.
@@ -974,7 +991,7 @@ Regular feature and bugfix release\.
* current\_container\_facts \- add detection for GitHub Actions \([https\://github\.com/ansible\-collections/community\.docker/pull/336](https\://github\.com/ansible\-collections/community\.docker/pull/336)\)\.
* docker\_container \- support returning Docker container log output when using Docker\'s <code>local</code> logging driver\, an optimized local logging driver introduced in Docker 18\.09 \([https\://github\.com/ansible\-collections/community\.docker/pull/337](https\://github\.com/ansible\-collections/community\.docker/pull/337)\)\.
-<a id="bugfixes-28"></a>
+<a id="bugfixes-29"></a>
### Bugfixes
* docker connection plugin \- make sure that <code>docker\_extra\_args</code> is used for querying the Docker version\. Also ensures that the Docker version is only queried when needed\. This is currently the case if a remote user is specified \([https\://github\.com/ansible\-collections/community\.docker/issues/325](https\://github\.com/ansible\-collections/community\.docker/issues/325)\, [https\://github\.com/ansible\-collections/community\.docker/pull/327](https\://github\.com/ansible\-collections/community\.docker/pull/327)\)\.
@@ -982,7 +999,7 @@ Regular feature and bugfix release\.
<a id="v2-3-0"></a>
## v2\.3\.0
-<a id="release-summary-37"></a>
+<a id="release-summary-38"></a>
### Release Summary
Regular feature and bugfix release\.
@@ -996,7 +1013,7 @@ Regular feature and bugfix release\.
* docker\_api connection plugin \- implement connection reset by clearing internal container user/group ID cache \([https\://github\.com/ansible\-collections/community\.docker/pull/312](https\://github\.com/ansible\-collections/community\.docker/pull/312)\)\.
* docker\_api connection plugin \- the plugin supports new ways to define the timeout\. These are the <code>ANSIBLE\_DOCKER\_TIMEOUT</code> environment variable\, the <code>timeout</code> setting in the <code>docker\_connection</code> section of <code>ansible\.cfg</code>\, and the <code>ansible\_docker\_timeout</code> variable \([https\://github\.com/ansible\-collections/community\.docker/pull/308](https\://github\.com/ansible\-collections/community\.docker/pull/308)\)\.
-<a id="bugfixes-29"></a>
+<a id="bugfixes-30"></a>
### Bugfixes
* docker connection plugin \- fix option handling to be compatible with ansible\-core 2\.13 \([https\://github\.com/ansible\-collections/community\.docker/pull/297](https\://github\.com/ansible\-collections/community\.docker/pull/297)\, [https\://github\.com/ansible\-collections/community\.docker/issues/307](https\://github\.com/ansible\-collections/community\.docker/issues/307)\)\.
@@ -1005,12 +1022,12 @@ Regular feature and bugfix release\.
<a id="v2-2-1"></a>
## v2\.2\.1
-<a id="release-summary-38"></a>
+<a id="release-summary-39"></a>
### Release Summary
Regular bugfix release\.
-<a id="bugfixes-30"></a>
+<a id="bugfixes-31"></a>
### Bugfixes
* docker\_compose \- fix Python 3 type error when extracting warnings or errors from docker\-compose\'s output \([https\://github\.com/ansible\-collections/community\.docker/pull/305](https\://github\.com/ansible\-collections/community\.docker/pull/305)\)\.
@@ -1018,7 +1035,7 @@ Regular bugfix release\.
<a id="v2-2-0"></a>
## v2\.2\.0
-<a id="release-summary-39"></a>
+<a id="release-summary-40"></a>
### Release Summary
Regular feature and bugfix release\.
@@ -1030,7 +1047,7 @@ Regular feature and bugfix release\.
* docker\_secret \- add support for rolling update\, set <code>rolling\_versions</code> to <code>true</code> to enable \([https\://github\.com/ansible\-collections/community\.docker/pull/293](https\://github\.com/ansible\-collections/community\.docker/pull/293)\, [https\://github\.com/ansible\-collections/community\.docker/issues/21](https\://github\.com/ansible\-collections/community\.docker/issues/21)\)\.
* docker\_swarm\_service \- add support for setting capabilities with the <code>cap\_add</code> and <code>cap\_drop</code> parameters\. Usage is the same as with the <code>capabilities</code> and <code>cap\_drop</code> parameters for <code>docker\_container</code> \([https\://github\.com/ansible\-collections/community\.docker/pull/294](https\://github\.com/ansible\-collections/community\.docker/pull/294)\)\.
-<a id="bugfixes-31"></a>
+<a id="bugfixes-32"></a>
### Bugfixes
* docker\_container\, docker\_image \- adjust image finding code to peculiarities of <code>podman\-docker</code>\'s API emulation when Docker short names like <code>redis</code> are used \([https\://github\.com/ansible\-collections/community\.docker/issues/292](https\://github\.com/ansible\-collections/community\.docker/issues/292)\)\.
@@ -1038,12 +1055,12 @@ Regular feature and bugfix release\.
<a id="v2-1-1"></a>
## v2\.1\.1
-<a id="release-summary-40"></a>
+<a id="release-summary-41"></a>
### Release Summary
Emergency release to amend breaking change in previous release\.
-<a id="bugfixes-32"></a>
+<a id="bugfixes-33"></a>
### Bugfixes
* Fix unintended breaking change caused by [an earlier fix](https\://github\.com/ansible\-collections/community\.docker/pull/258) by vendoring the deprecated Python standard library <code>distutils\.version</code> until this collection stops supporting Ansible 2\.9 and ansible\-base 2\.10 \([https\://github\.com/ansible\-collections/community\.docker/issues/267](https\://github\.com/ansible\-collections/community\.docker/issues/267)\, [https\://github\.com/ansible\-collections/community\.docker/pull/269](https\://github\.com/ansible\-collections/community\.docker/pull/269)\)\.
@@ -1051,7 +1068,7 @@ Emergency release to amend breaking change in previous release\.
<a id="v2-1-0"></a>
## v2\.1\.0
-<a id="release-summary-41"></a>
+<a id="release-summary-42"></a>
### Release Summary
Feature and bugfix release\.
@@ -1062,7 +1079,7 @@ Feature and bugfix release\.
* docker\_container\_exec \- add <code>detach</code> parameter \([https\://github\.com/ansible\-collections/community\.docker/issues/250](https\://github\.com/ansible\-collections/community\.docker/issues/250)\, [https\://github\.com/ansible\-collections/community\.docker/pull/255](https\://github\.com/ansible\-collections/community\.docker/pull/255)\)\.
* docker\_container\_exec \- add <code>env</code> option \([https\://github\.com/ansible\-collections/community\.docker/issues/248](https\://github\.com/ansible\-collections/community\.docker/issues/248)\, [https\://github\.com/ansible\-collections/community\.docker/pull/254](https\://github\.com/ansible\-collections/community\.docker/pull/254)\)\.
-<a id="bugfixes-33"></a>
+<a id="bugfixes-34"></a>
### Bugfixes
* Various modules and plugins \- use vendored version of <code>distutils\.version</code> included in ansible\-core 2\.12 if available\. This avoids breakage when <code>distutils</code> is removed from the standard library of Python 3\.12\. Note that ansible\-core 2\.11\, ansible\-base 2\.10 and Ansible 2\.9 are right now not compatible with Python 3\.12\, hence this fix does not target these ansible\-core/\-base/2\.9 versions \([https\://github\.com/ansible\-collections/community\.docker/pull/258](https\://github\.com/ansible\-collections/community\.docker/pull/258)\)\.
@@ -1072,12 +1089,12 @@ Feature and bugfix release\.
<a id="v2-0-2"></a>
## v2\.0\.2
-<a id="release-summary-42"></a>
+<a id="release-summary-43"></a>
### Release Summary
Bugfix release\.
-<a id="bugfixes-34"></a>
+<a id="bugfixes-35"></a>
### Bugfixes
* docker\_api connection plugin \- avoid passing an unnecessary argument to a Docker SDK for Python call that is only supported by version 3\.0\.0 or later \([https\://github\.com/ansible\-collections/community\.docker/pull/243](https\://github\.com/ansible\-collections/community\.docker/pull/243)\)\.
@@ -1088,7 +1105,7 @@ Bugfix release\.
<a id="v2-0-1"></a>
## v2\.0\.1
-<a id="release-summary-43"></a>
+<a id="release-summary-44"></a>
### Release Summary
Maintenance release with some documentation fixes\.
@@ -1096,7 +1113,7 @@ Maintenance release with some documentation fixes\.
<a id="v2-0-0"></a>
## v2\.0\.0
-<a id="release-summary-44"></a>
+<a id="release-summary-45"></a>
### Release Summary
New major release with some deprecations removed and a breaking change in the <code>docker\_compose</code> module regarding the <code>timeout</code> parameter\.
@@ -1122,7 +1139,7 @@ New major release with some deprecations removed and a breaking change in the <c
<a id="v1-10-0"></a>
## v1\.10\.0
-<a id="release-summary-45"></a>
+<a id="release-summary-46"></a>
### Release Summary
Regular feature and bugfix release\.
@@ -1137,12 +1154,12 @@ Regular feature and bugfix release\.
<a id="v1-9-1"></a>
## v1\.9\.1
-<a id="release-summary-46"></a>
+<a id="release-summary-47"></a>
### Release Summary
Regular bugfix release\.
-<a id="bugfixes-35"></a>
+<a id="bugfixes-36"></a>
### Bugfixes
* docker\_compose \- fixed incorrect <code>changed</code> status for services with <code>profiles</code> defined\, but none enabled \([https\://github\.com/ansible\-collections/community\.docker/pull/192](https\://github\.com/ansible\-collections/community\.docker/pull/192)\)\.
@@ -1150,7 +1167,7 @@ Regular bugfix release\.
<a id="v1-9-0"></a>
## v1\.9\.0
-<a id="release-summary-47"></a>
+<a id="release-summary-48"></a>
### Release Summary
New bugfixes and features release\.
@@ -1169,7 +1186,7 @@ New bugfixes and features release\.
* docker\_container \- the new <code>command\_handling</code>\'s default value\, <code>compatibility</code>\, is deprecated and will change to <code>correct</code> in community\.docker 3\.0\.0\. A deprecation warning is emitted by the module in cases where the behavior will change\. Please note that ansible\-core will output a deprecation warning only once\, so if it is shown for an earlier task\, there could be more tasks with this warning where it is not shown \([https\://github\.com/ansible\-collections/community\.docker/pull/186](https\://github\.com/ansible\-collections/community\.docker/pull/186)\)\.
-<a id="bugfixes-36"></a>
+<a id="bugfixes-37"></a>
### Bugfixes
* docker\_compose \- fixes task failures when bringing up services while using <code>docker\-compose \<1\.17\.0</code> \([https\://github\.com/ansible\-collections/community\.docker/issues/180](https\://github\.com/ansible\-collections/community\.docker/issues/180)\)\.
@@ -1188,7 +1205,7 @@ New bugfixes and features release\.
<a id="v1-8-0"></a>
## v1\.8\.0
-<a id="release-summary-48"></a>
+<a id="release-summary-49"></a>
### Release Summary
Regular bugfix and feature release\.
@@ -1202,7 +1219,7 @@ Regular bugfix and feature release\.
* docker\_host\_info \- allow values for keys in <code>containers\_filters</code>\, <code>images\_filters</code>\, <code>networks\_filters</code>\, and <code>volumes\_filters</code> to be passed as YAML lists \([https\://github\.com/ansible\-collections/community\.docker/pull/160](https\://github\.com/ansible\-collections/community\.docker/pull/160)\)\.
* docker\_plugin \- added <code>alias</code> option to specify local names for docker plugins \([https\://github\.com/ansible\-collections/community\.docker/pull/161](https\://github\.com/ansible\-collections/community\.docker/pull/161)\)\.
-<a id="bugfixes-37"></a>
+<a id="bugfixes-38"></a>
### Bugfixes
* docker\_compose \- fix idempotence bug when using <code>stopped\: true</code> \([https\://github\.com/ansible\-collections/community\.docker/issues/142](https\://github\.com/ansible\-collections/community\.docker/issues/142)\, [https\://github\.com/ansible\-collections/community\.docker/pull/159](https\://github\.com/ansible\-collections/community\.docker/pull/159)\)\.
@@ -1210,7 +1227,7 @@ Regular bugfix and feature release\.
<a id="v1-7-0"></a>
## v1\.7\.0
-<a id="release-summary-49"></a>
+<a id="release-summary-50"></a>
### Release Summary
Small feature and bugfix release\.
@@ -1223,12 +1240,12 @@ Small feature and bugfix release\.
<a id="v1-6-1"></a>
## v1\.6\.1
-<a id="release-summary-50"></a>
+<a id="release-summary-51"></a>
### Release Summary
Bugfix release to reduce deprecation warning spam\.
-<a id="bugfixes-38"></a>
+<a id="bugfixes-39"></a>
### Bugfixes
* docker\_\* modules and plugins\, except <code>docker\_swarm</code> connection plugin and <code>docker\_compose</code> and <code>docker\_stack\*\` modules \- only emit \`\`tls\_hostname</code> deprecation message if TLS is actually used \([https\://github\.com/ansible\-collections/community\.docker/pull/143](https\://github\.com/ansible\-collections/community\.docker/pull/143)\)\.
@@ -1236,7 +1253,7 @@ Bugfix release to reduce deprecation warning spam\.
<a id="v1-6-0"></a>
## v1\.6\.0
-<a id="release-summary-51"></a>
+<a id="release-summary-52"></a>
### Release Summary
Regular bugfix and feature release\.
@@ -1252,7 +1269,7 @@ Regular bugfix and feature release\.
* docker\_\* modules and plugins\, except <code>docker\_swarm</code> connection plugin and <code>docker\_compose</code> and <code>docker\_stack\*\` modules \- the current default \`\`localhost</code> for <code>tls\_hostname</code> is deprecated\. In community\.docker 2\.0\.0 it will be computed from <code>docker\_host</code> instead \([https\://github\.com/ansible\-collections/community\.docker/pull/134](https\://github\.com/ansible\-collections/community\.docker/pull/134)\)\.
-<a id="bugfixes-39"></a>
+<a id="bugfixes-40"></a>
### Bugfixes
* docker\-compose \- fix not pulling when <code>state\: present</code> and <code>stopped\: true</code> \([https\://github\.com/ansible\-collections/community\.docker/issues/12](https\://github\.com/ansible\-collections/community\.docker/issues/12)\, [https\://github\.com/ansible\-collections/community\.docker/pull/119](https\://github\.com/ansible\-collections/community\.docker/pull/119)\)\.
@@ -1262,7 +1279,7 @@ Regular bugfix and feature release\.
<a id="v1-5-0"></a>
## v1\.5\.0
-<a id="release-summary-52"></a>
+<a id="release-summary-53"></a>
### Release Summary
Regular feature release\.
@@ -1272,7 +1289,7 @@ Regular feature release\.
* Add the <code>use\_ssh\_client</code> option to most docker modules and plugins \([https\://github\.com/ansible\-collections/community\.docker/issues/108](https\://github\.com/ansible\-collections/community\.docker/issues/108)\, [https\://github\.com/ansible\-collections/community\.docker/pull/114](https\://github\.com/ansible\-collections/community\.docker/pull/114)\)\.
-<a id="bugfixes-40"></a>
+<a id="bugfixes-41"></a>
### Bugfixes
* all modules \- use <code>to\_native</code> to convert exceptions to strings \([https\://github\.com/ansible\-collections/community\.docker/pull/121](https\://github\.com/ansible\-collections/community\.docker/pull/121)\)\.
@@ -1285,7 +1302,7 @@ Regular feature release\.
<a id="v1-4-0"></a>
## v1\.4\.0
-<a id="release-summary-53"></a>
+<a id="release-summary-54"></a>
### Release Summary
Security release to address another potential secret leak\. Also includes regular bugfixes and features\.
@@ -1305,7 +1322,7 @@ Security release to address another potential secret leak\. Also includes regula
* docker\_swarm \- the <code>join\_token</code> option is now marked as <code>no\_log</code> so it is no longer written into logs \([https\://github\.com/ansible\-collections/community\.docker/pull/103](https\://github\.com/ansible\-collections/community\.docker/pull/103)\)\.
-<a id="bugfixes-41"></a>
+<a id="bugfixes-42"></a>
### Bugfixes
* <code>docker\_swarm\_service</code> \- fix KeyError on caused by reference to deprecated option <code>update\_failure\_action</code> \([https\://github\.com/ansible\-collections/community\.docker/pull/100](https\://github\.com/ansible\-collections/community\.docker/pull/100)\)\.
@@ -1314,7 +1331,7 @@ Security release to address another potential secret leak\. Also includes regula
<a id="v1-3-0"></a>
## v1\.3\.0
-<a id="release-summary-54"></a>
+<a id="release-summary-55"></a>
### Release Summary
Regular feature and bugfix release\.
@@ -1327,7 +1344,7 @@ Regular feature and bugfix release\.
* docker\_image \- properly support image IDs \(hashes\) for loading and tagging images \([https\://github\.com/ansible\-collections/community\.docker/issues/86](https\://github\.com/ansible\-collections/community\.docker/issues/86)\, [https\://github\.com/ansible\-collections/community\.docker/pull/87](https\://github\.com/ansible\-collections/community\.docker/pull/87)\)\.
* docker\_swarm\_service \- adding support for maximum number of tasks per node \(<code>replicas\_max\_per\_node</code>\) when running swarm service in replicated mode\. Introduced in API 1\.40 \([https\://github\.com/ansible\-collections/community\.docker/issues/7](https\://github\.com/ansible\-collections/community\.docker/issues/7)\, [https\://github\.com/ansible\-collections/community\.docker/pull/92](https\://github\.com/ansible\-collections/community\.docker/pull/92)\)\.
-<a id="bugfixes-42"></a>
+<a id="bugfixes-43"></a>
### Bugfixes
* docker\_container \- fix healthcheck disabling idempotency issue with strict comparison \([https\://github\.com/ansible\-collections/community\.docker/issues/85](https\://github\.com/ansible\-collections/community\.docker/issues/85)\)\.
@@ -1345,7 +1362,7 @@ Regular feature and bugfix release\.
<a id="v1-2-2"></a>
## v1\.2\.2
-<a id="release-summary-55"></a>
+<a id="release-summary-56"></a>
### Release Summary
Security bugfix release to address CVE\-2021\-20191\.
@@ -1358,12 +1375,12 @@ Security bugfix release to address CVE\-2021\-20191\.
<a id="v1-2-1"></a>
## v1\.2\.1
-<a id="release-summary-56"></a>
+<a id="release-summary-57"></a>
### Release Summary
Bugfix release\.
-<a id="bugfixes-43"></a>
+<a id="bugfixes-44"></a>
### Bugfixes
* docker connection plugin \- fix Docker version parsing\, as some docker versions have a leading <code>v</code> in the output of the command <code>docker version \-\-format \"\{\{\.Server\.Version\}\}\"</code> \([https\://github\.com/ansible\-collections/community\.docker/pull/76](https\://github\.com/ansible\-collections/community\.docker/pull/76)\)\.
@@ -1371,7 +1388,7 @@ Bugfix release\.
<a id="v1-2-0"></a>
## v1\.2\.0
-<a id="release-summary-57"></a>
+<a id="release-summary-58"></a>
### Release Summary
Feature release with one new feature and two bugfixes\.
@@ -1381,7 +1398,7 @@ Feature release with one new feature and two bugfixes\.
* docker\_container \- added <code>default\_host\_ip</code> option which allows to explicitly set the default IP string for published ports without explicitly specified IPs\. When using IPv6 binds with Docker 20\.10\.2 or newer\, this needs to be set to an empty string \(<code>\"\"</code>\) \([https\://github\.com/ansible\-collections/community\.docker/issues/70](https\://github\.com/ansible\-collections/community\.docker/issues/70)\, [https\://github\.com/ansible\-collections/community\.docker/pull/71](https\://github\.com/ansible\-collections/community\.docker/pull/71)\)\.
-<a id="bugfixes-44"></a>
+<a id="bugfixes-45"></a>
### Bugfixes
* docker\_container \- allow IPv6 zones \(RFC 4007\) in bind IPs \([https\://github\.com/ansible\-collections/community\.docker/pull/66](https\://github\.com/ansible\-collections/community\.docker/pull/66)\)\.
@@ -1390,7 +1407,7 @@ Feature release with one new feature and two bugfixes\.
<a id="v1-1-0"></a>
## v1\.1\.0
-<a id="release-summary-58"></a>
+<a id="release-summary-59"></a>
### Release Summary
Feature release with three new plugins and modules\.
@@ -1407,7 +1424,7 @@ Feature release with three new plugins and modules\.
* docker\_container \- currently <code>published\_ports</code> can contain port mappings next to the special value <code>all</code>\, in which case the port mappings are ignored\. This behavior is deprecated for community\.docker 2\.0\.0\, at which point it will either be forbidden\, or this behavior will be properly implemented similar to how the Docker CLI tool handles this \([https\://github\.com/ansible\-collections/community\.docker/issues/8](https\://github\.com/ansible\-collections/community\.docker/issues/8)\, [https\://github\.com/ansible\-collections/community\.docker/pull/60](https\://github\.com/ansible\-collections/community\.docker/pull/60)\)\.
-<a id="bugfixes-45"></a>
+<a id="bugfixes-46"></a>
### Bugfixes
* docker\_image \- if <code>push\=true</code> is used with <code>repository</code>\, and the image does not need to be tagged\, still push\. This can happen if <code>repository</code> and <code>name</code> are equal \([https\://github\.com/ansible\-collections/community\.docker/issues/52](https\://github\.com/ansible\-collections/community\.docker/issues/52)\, [https\://github\.com/ansible\-collections/community\.docker/pull/53](https\://github\.com/ansible\-collections/community\.docker/pull/53)\)\.
@@ -1435,12 +1452,12 @@ Feature release with three new plugins and modules\.
<a id="v1-0-1"></a>
## v1\.0\.1
-<a id="release-summary-59"></a>
+<a id="release-summary-60"></a>
### Release Summary
Maintenance release with a bugfix for <code>docker\_container</code>\.
-<a id="bugfixes-46"></a>
+<a id="bugfixes-47"></a>
### Bugfixes
* docker\_container \- the validation for <code>capabilities</code> in <code>device\_requests</code> was incorrect \([https\://github\.com/ansible\-collections/community\.docker/issues/42](https\://github\.com/ansible\-collections/community\.docker/issues/42)\, [https\://github\.com/ansible\-collections/community\.docker/pull/43](https\://github\.com/ansible\-collections/community\.docker/pull/43)\)\.
@@ -1448,7 +1465,7 @@ Maintenance release with a bugfix for <code>docker\_container</code>\.
<a id="v1-0-0"></a>
## v1\.0\.0
-<a id="release-summary-60"></a>
+<a id="release-summary-61"></a>
### Release Summary
This is the first production \(non\-prerelease\) release of <code>community\.docker</code>\.
@@ -1463,7 +1480,7 @@ This is the first production \(non\-prerelease\) release of <code>community\.doc
<a id="v0-1-0"></a>
## v0\.1\.0
-<a id="release-summary-61"></a>
+<a id="release-summary-62"></a>
### Release Summary
The <code>community\.docker</code> continues the work on the Ansible docker modules and plugins from their state in <code>community\.general</code> 1\.2\.0\. The changes listed here are thus relative to the modules and plugins <code>community\.general\.docker\*</code>\.
@@ -1501,7 +1518,7 @@ All deprecation removals planned for <code>community\.general</code> 2\.0\.0 hav
* docker\_volume \- no longer returns <code>ansible\_facts</code> \([https\://github\.com/ansible\-collections/community\.docker/pull/1](https\://github\.com/ansible\-collections/community\.docker/pull/1)\)\.
* docker\_volume \- the <code>force</code> option has been removed\. Use <code>recreate</code> instead \([https\://github\.com/ansible\-collections/community\.docker/pull/1](https\://github\.com/ansible\-collections/community\.docker/pull/1)\)\.
-<a id="bugfixes-47"></a>
+<a id="bugfixes-48"></a>
### Bugfixes
* docker\_login \- fix internal config file storage to handle credentials for more than one registry \([https\://github\.com/ansible\-collections/community\.general/issues/1117](https\://github\.com/ansible\-collections/community\.general/issues/1117)\)\.
diff --git a/ansible_collections/community/docker/CHANGELOG.rst b/ansible_collections/community/docker/CHANGELOG.rst
index dff8b150b..7543fd867 100644
--- a/ansible_collections/community/docker/CHANGELOG.rst
+++ b/ansible_collections/community/docker/CHANGELOG.rst
@@ -4,6 +4,20 @@ Docker Community Collection Release Notes
.. contents:: Topics
+v3.10.4
+=======
+
+Release Summary
+---------------
+
+Bugfix release.
+
+Bugfixes
+--------
+
+- docker_compose - make sure that the module uses the ``api_version`` parameter (https://github.com/ansible-collections/community.docker/pull/881).
+- docker_compose_v2* modules - there was no check to make sure that one of ``project_src`` and ``definition`` is provided. The modules crashed if none were provided (https://github.com/ansible-collections/community.docker/issues/885, https://github.com/ansible-collections/community.docker/pull/886).
+
v3.10.3
=======
diff --git a/ansible_collections/community/docker/FILES.json b/ansible_collections/community/docker/FILES.json
index c2f6a28c3..b84bef8d7 100644
--- a/ansible_collections/community/docker/FILES.json
+++ b/ansible_collections/community/docker/FILES.json
@@ -109,7 +109,7 @@
"name": ".azure-pipelines/azure-pipelines.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "eed726931f1b030bc5f6c4165c0251704a098760de49fe58cad4c56aa91522e0",
+ "chksum_sha256": "87305f1c245e1c52e7222b67f827cd5f801ed78a6208dfed5e29d0aa0e845c25",
"format": 1
},
{
@@ -130,7 +130,7 @@
"name": ".github/workflows/ansible-test.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b36f9ccf4acf3c6d9358268de4c973d43406516c7f2b09f4303fa7c9672ae69a",
+ "chksum_sha256": "619dd32a8e2dc4fc8f6fc052854345adfec190772705e3150b1129ca5f94bf5e",
"format": 1
},
{
@@ -151,7 +151,7 @@
"name": ".github/workflows/ee.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f7bad2dd69eaeb6413cc12e92a6bf9c962ed705563a8803a09c76669c39ca501",
+ "chksum_sha256": "2f30e1de609ea367102f9414192b3d490b57b0623f8a13dc7b639f8e12f853a4",
"format": 1
},
{
@@ -242,7 +242,7 @@
"name": "changelogs/changelog.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e678850bfb28299b51297b445b2319e25d84b492364f236ee0154baefcb7d8ce",
+ "chksum_sha256": "b070d4c6bf2b6514ee5e0c36abc759afc24c7d81d42d814c65ddc765f89bff92",
"format": 1
},
{
@@ -410,7 +410,7 @@
"name": "plugins/doc_fragments/compose_v2.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b92c9446b36c3a2d8faa368a8036561d65cf5dae443d5f6a26f8d800a767a9a5",
+ "chksum_sha256": "b933078742267339f91deefafecdb5b5734b981c9fe25981c1e708ee69a67635",
"format": 1
},
{
@@ -760,7 +760,7 @@
"name": "plugins/module_utils/compose_v2.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "010f71093f38f18e9fcd1b5cd78a5babe601d2dd79ceb16642b999d299719b6f",
+ "chksum_sha256": "416f931290cec2b13d46496c2357fe43b383ba5e240f826e141f0302df36b62c",
"format": 1
},
{
@@ -837,14 +837,14 @@
"name": "plugins/modules/docker_compose.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "973b8aee68fcb76dd620c243904c64dee7f234ca1f69019b87c83ca72a8efc48",
+ "chksum_sha256": "65293128ce247f1ee759daa55dd234b07b3ef720e7ab4a2dd8aea7a5f596135e",
"format": 1
},
{
"name": "plugins/modules/docker_compose_v2.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "299fc207fcda246f73bbb7213f0a3f1eca8c5ebf0a6344e571c867fff0eba258",
+ "chksum_sha256": "5a1ceb2917ba2515f62c8f2fb1c16046a6622bccd3d32a1550df60c1febb9b16",
"format": 1
},
{
@@ -1285,7 +1285,7 @@
"name": "tests/integration/targets/connection_docker/runme.sh",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5b05e80ee60e868b634ac29fbe44a9453f109867892361b4ecf61e7016dcb75f",
+ "chksum_sha256": "ee629dbe7fa3be75e076aef7245c3ab5be9fc1846a907233061f7f08f5eae653",
"format": 1
},
{
@@ -1341,7 +1341,7 @@
"name": "tests/integration/targets/connection_docker_api/runme.sh",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7e79404445ba135ee4250d7e0789d95b19f94cd316702b30f6d3082a02eb3d30",
+ "chksum_sha256": "c24e24cd3fa7f24e89352bcade32a0695c78653cfc949a30a10268af7a28777d",
"format": 1
},
{
@@ -1516,7 +1516,7 @@
"name": "tests/integration/targets/docker_compose/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6b8ed3ac1dcbc2e57090222c0122e12eed47715945aa536b910fb924733b57ee",
+ "chksum_sha256": "ba201578bf7c2802525e8aaaf87a7019d60542d6a372826559fc1413a5b0d052",
"format": 1
},
{
@@ -4904,7 +4904,7 @@
"name": "tests/integration/targets/setup_docker_compose_v1/vars/Archlinux.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "10152adf3d38b6a56495180b4585ffea1ca37943561b97ce2fb504dcedcb6339",
+ "chksum_sha256": "80ff30ed65e73b2a13b7cf04eab0b0208205f055ae23b1abd2b14b15efd9f7b9",
"format": 1
},
{
@@ -6416,7 +6416,7 @@
"name": "CHANGELOG.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "42e61d2b2354fbf9c5f2da9873c0477a1c16afede7b23e7fa222f25a158f7590",
+ "chksum_sha256": "0f2f209b91bd3f38f3ac844605a9a920180a7cbf241d0ca8881d990d72114aed",
"format": 1
},
{
@@ -6430,7 +6430,7 @@
"name": "CHANGELOG.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5f823c49d14587f6dfa983c8f2359a9a5ff36e0e34c73615f01eb6504d9d0f9b",
+ "chksum_sha256": "a5df87a1bab6df6d3dc2f0d9e43cfaedc31e633df6fe4bd82f46d9d8b4a1b415",
"format": 1
},
{
diff --git a/ansible_collections/community/docker/MANIFEST.json b/ansible_collections/community/docker/MANIFEST.json
index 69ffc1b62..fe2dadd5b 100644
--- a/ansible_collections/community/docker/MANIFEST.json
+++ b/ansible_collections/community/docker/MANIFEST.json
@@ -2,7 +2,7 @@
"collection_info": {
"namespace": "community",
"name": "docker",
- "version": "3.10.3",
+ "version": "3.10.4",
"authors": [
"Ansible Docker Working Group"
],
@@ -28,7 +28,7 @@
"name": "FILES.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c5b11456e6a1ea1ee5144451aa5a1c46f7f835b3102db5ad06d16a005b4af551",
+ "chksum_sha256": "438fa53b8b53c9b7566343ad04bc98237e3706a0f989c4f2e9a9310f7f0af89b",
"format": 1
},
"format": 1
diff --git a/ansible_collections/community/docker/changelogs/changelog.yaml b/ansible_collections/community/docker/changelogs/changelog.yaml
index ba40629e7..85a0b07a4 100644
--- a/ansible_collections/community/docker/changelogs/changelog.yaml
+++ b/ansible_collections/community/docker/changelogs/changelog.yaml
@@ -1098,6 +1098,20 @@ releases:
- 3.10.3.yml
- 871-selectors.yml
release_date: '2024-05-26'
+ 3.10.4:
+ changes:
+ bugfixes:
+ - docker_compose - make sure that the module uses the ``api_version`` parameter
+ (https://github.com/ansible-collections/community.docker/pull/881).
+ - docker_compose_v2* modules - there was no check to make sure that one of ``project_src``
+ and ``definition`` is provided. The modules crashed if none were provided
+ (https://github.com/ansible-collections/community.docker/issues/885, https://github.com/ansible-collections/community.docker/pull/886).
+ release_summary: Bugfix release.
+ fragments:
+ - 3.10.4.yml
+ - 881-docker-compose-v1-api-version.yml
+ - 886-compose-v2-req.yml
+ release_date: '2024-06-16'
3.2.0:
changes:
deprecated_features:
diff --git a/ansible_collections/community/docker/plugins/doc_fragments/compose_v2.py b/ansible_collections/community/docker/plugins/doc_fragments/compose_v2.py
index 234a94fe2..58ea7dc28 100644
--- a/ansible_collections/community/docker/plugins/doc_fragments/compose_v2.py
+++ b/ansible_collections/community/docker/plugins/doc_fragments/compose_v2.py
@@ -18,7 +18,7 @@ options:
- Path to a directory containing a Compose file
(C(compose.yml), C(compose.yaml), C(docker-compose.yml), or C(docker-compose.yaml)).
- If O(files) is provided, will look for these files in this directory instead.
- - Mutually exclusive with O(definition).
+ - Mutually exclusive with O(definition). One of O(project_src) and O(definition) must be provided.
type: path
project_name:
description:
@@ -37,7 +37,7 @@ options:
definition:
description:
- Compose file describing one or more services, networks and volumes.
- - Mutually exclusive with O(project_src) and O(files).
+ - Mutually exclusive with O(project_src) and O(files). One of O(project_src) and O(definition) must be provided.
- If provided, PyYAML must be available to this module, and O(project_name) must be specified.
- Note that a temporary directory will be created and deleted afterwards when using this option.
type: dict
diff --git a/ansible_collections/community/docker/plugins/module_utils/compose_v2.py b/ansible_collections/community/docker/plugins/module_utils/compose_v2.py
index 85fbd5d80..80d6be66f 100644
--- a/ansible_collections/community/docker/plugins/module_utils/compose_v2.py
+++ b/ansible_collections/community/docker/plugins/module_utils/compose_v2.py
@@ -518,6 +518,9 @@ def common_compose_argspec_ex():
('definition', 'project_src'),
('definition', 'files')
],
+ required_one_of=[
+ ('definition', 'project_src'),
+ ],
required_by={
'definition': ('project_name', ),
},
diff --git a/ansible_collections/community/docker/plugins/modules/docker_compose.py b/ansible_collections/community/docker/plugins/modules/docker_compose.py
index 3af3bebb1..dcba5adfb 100644
--- a/ansible_collections/community/docker/plugins/modules/docker_compose.py
+++ b/ansible_collections/community/docker/plugins/modules/docker_compose.py
@@ -675,6 +675,9 @@ class ContainerManager(DockerBaseClass):
for key, value in client.module.params.items():
setattr(self, key, value)
+ if self.api_version:
+ os.environ['COMPOSE_API_VERSION'] = self.api_version
+
self.check_mode = client.check_mode
if not self.debug:
diff --git a/ansible_collections/community/docker/plugins/modules/docker_compose_v2.py b/ansible_collections/community/docker/plugins/modules/docker_compose_v2.py
index 9972d45f6..921294bf9 100644
--- a/ansible_collections/community/docker/plugins/modules/docker_compose_v2.py
+++ b/ansible_collections/community/docker/plugins/modules/docker_compose_v2.py
@@ -169,7 +169,7 @@ EXAMPLES = '''
ansible.builtin.debug:
var: output
- - name: Run `docker-compose up` again
+ - name: Run `docker compose up` again
community.docker.docker_compose_v2:
project_src: flask
register: output
diff --git a/ansible_collections/community/docker/tests/integration/targets/connection_docker/runme.sh b/ansible_collections/community/docker/tests/integration/targets/connection_docker/runme.sh
index 0965c5d72..4ebbf22e2 100755
--- a/ansible_collections/community/docker/tests/integration/targets/connection_docker/runme.sh
+++ b/ansible_collections/community/docker/tests/integration/targets/connection_docker/runme.sh
@@ -42,12 +42,12 @@ trap cleanup INT TERM EXIT
echo "Start containers"
for CONTAINER in ${DOCKER_CONTAINERS}; do
if [ "${ANSIBLE_TEST_COVERAGE:-}" == "" ]; then
- docker run --rm --name ${CONTAINER} --detach "${IMAGE}" /bin/sh -c 'sleep 10m'
+ docker run --rm --name "${CONTAINER}" --detach "${IMAGE}" /bin/sh -c 'sleep 10m'
else
- docker run --rm --name ${CONTAINER} --detach -v /tmp:/tmp "${IMAGE}" /bin/sh -c 'sleep 10m'
- docker exec ${CONTAINER} pip3 install coverage
+ docker run --rm --name "${CONTAINER}" --detach -v /tmp:/tmp "${IMAGE}" /bin/sh -c 'sleep 10m'
+ docker exec "${CONTAINER}" pip3 install coverage
fi
- echo ${CONTAINER}
+ echo "${CONTAINER}"
done
cat > test_connection.inventory << EOF
diff --git a/ansible_collections/community/docker/tests/integration/targets/connection_docker_api/runme.sh b/ansible_collections/community/docker/tests/integration/targets/connection_docker_api/runme.sh
index 893b019ad..ea5588a6a 100755
--- a/ansible_collections/community/docker/tests/integration/targets/connection_docker_api/runme.sh
+++ b/ansible_collections/community/docker/tests/integration/targets/connection_docker_api/runme.sh
@@ -42,12 +42,12 @@ trap cleanup INT TERM EXIT
echo "Start containers"
for CONTAINER in ${DOCKER_CONTAINERS}; do
if [ "${ANSIBLE_TEST_COVERAGE:-}" == "" ]; then
- docker run --rm --name ${CONTAINER} --detach "${IMAGE}" /bin/sh -c 'sleep 10m'
+ docker run --rm --name "${CONTAINER}" --detach "${IMAGE}" /bin/sh -c 'sleep 10m'
else
- docker run --rm --name ${CONTAINER} --detach -v /tmp:/tmp "${IMAGE}" /bin/sh -c 'sleep 10m'
- docker exec ${CONTAINER} pip3 install coverage
+ docker run --rm --name "${CONTAINER}" --detach -v /tmp:/tmp "${IMAGE}" /bin/sh -c 'sleep 10m'
+ docker exec "${CONTAINER}" pip3 install coverage
fi
- echo ${CONTAINER}
+ echo "${CONTAINER}"
done
cat > test_connection.inventory << EOF
diff --git a/ansible_collections/community/docker/tests/integration/targets/docker_compose/tasks/main.yml b/ansible_collections/community/docker/tests/integration/targets/docker_compose/tasks/main.yml
index 8ea59f865..34d518872 100644
--- a/ansible_collections/community/docker/tests/integration/targets/docker_compose/tasks/main.yml
+++ b/ansible_collections/community/docker/tests/integration/targets/docker_compose/tasks/main.yml
@@ -19,7 +19,11 @@
msg: "Using container name prefix {{ cname_prefix }}"
# Run the tests
-- block:
+- module_defaults:
+ community.docker.docker_compose:
+ api_version: '{{ omit if docker_api_version is version("1.45", "<") else "1.44" }}'
+
+ block:
- include_tasks: run-test.yml
with_fileglob:
- "tests/*.yml"
diff --git a/ansible_collections/community/docker/tests/integration/targets/setup_docker_compose_v1/vars/Archlinux.yml b/ansible_collections/community/docker/tests/integration/targets/setup_docker_compose_v1/vars/Archlinux.yml
index f0698a3e4..bd619ad5b 100644
--- a/ansible_collections/community/docker/tests/integration/targets/setup_docker_compose_v1/vars/Archlinux.yml
+++ b/ansible_collections/community/docker/tests/integration/targets/setup_docker_compose_v1/vars/Archlinux.yml
@@ -7,3 +7,5 @@ docker_compose_pip_packages:
- docker-compose
# Force PyYAML to 5.3.1
- PyYAML==5.3.1
+ # Force requests to < 2.32.0 (https://github.com/docker/docker-py/issues/3256)
+ - requests<2.32.0
diff --git a/ansible_collections/community/general/.azure-pipelines/azure-pipelines.yml b/ansible_collections/community/general/.azure-pipelines/azure-pipelines.yml
index 7dc438ad3..3f9293ac1 100644
--- a/ansible_collections/community/general/.azure-pipelines/azure-pipelines.yml
+++ b/ansible_collections/community/general/.azure-pipelines/azure-pipelines.yml
@@ -53,7 +53,7 @@ variables:
resources:
containers:
- container: default
- image: quay.io/ansible/azure-pipelines-test-container:4.0.1
+ image: quay.io/ansible/azure-pipelines-test-container:6.0.0
pool: Standard
@@ -127,6 +127,7 @@ stages:
- test: '3.10'
- test: '3.11'
- test: '3.12'
+ - test: '3.13'
- stage: Units_2_17
displayName: Units 2.17
dependsOn: []
@@ -354,6 +355,7 @@ stages:
targets:
- test: '3.8'
- test: '3.11'
+ - test: '3.13'
- stage: Generic_2_17
displayName: Generic 2.17
dependsOn: []
diff --git a/ansible_collections/community/general/.github/BOTMETA.yml b/ansible_collections/community/general/.github/BOTMETA.yml
index add324935..faedb4260 100644
--- a/ansible_collections/community/general/.github/BOTMETA.yml
+++ b/ansible_collections/community/general/.github/BOTMETA.yml
@@ -157,6 +157,8 @@ files:
$filters/jc.py:
maintainers: kellyjonbrazil
$filters/json_query.py: {}
+ $filters/keep_keys.py:
+ maintainers: vbotka
$filters/lists.py:
maintainers: cfiehe
$filters/lists_difference.yml:
@@ -170,6 +172,10 @@ files:
$filters/lists_union.yml:
maintainers: cfiehe
$filters/random_mac.py: {}
+ $filters/remove_keys.py:
+ maintainers: vbotka
+ $filters/replace_keys.py:
+ maintainers: vbotka
$filters/time.py:
maintainers: resmo
$filters/to_days.yml:
@@ -502,12 +508,16 @@ files:
maintainers: tintoy
$modules/discord.py:
maintainers: cwollinger
+ $modules/django_check.py:
+ maintainers: russoz
+ $modules/django_command.py:
+ maintainers: russoz
+ $modules/django_createcachetable.py:
+ maintainers: russoz
$modules/django_manage.py:
ignore: scottanderson42 tastychutney
labels: django_manage
maintainers: russoz
- $modules/django_command.py:
- maintainers: russoz
$modules/dnf_versionlock.py:
maintainers: moreda
$modules/dnf_config_manager.py:
@@ -1415,6 +1425,8 @@ files:
ignore: matze
labels: zypper
maintainers: $team_suse
+ $plugin_utils/keys_filter.py:
+ maintainers: vbotka
$plugin_utils/unsafe.py:
maintainers: felixfontein
$tests/a_module.py:
@@ -1454,6 +1466,10 @@ files:
maintainers: baldwinSPC nurfet-becirevic t0mk teebes
docs/docsite/rst/guide_scaleway.rst:
maintainers: $team_scaleway
+ docs/docsite/rst/guide_deps.rst:
+ maintainers: russoz
+ docs/docsite/rst/guide_vardict.rst:
+ maintainers: russoz
docs/docsite/rst/test_guide.rst:
maintainers: felixfontein
#########################
@@ -1485,7 +1501,7 @@ macros:
team_ansible_core:
team_aix: MorrisA bcoca d-little flynn1973 gforster kairoaraujo marvin-sinister mator molekuul ramooncamacho wtcross
team_bsd: JoergFiedler MacLemon bcoca dch jasperla mekanix opoplawski overhacked tuxillo
- team_consul: sgargan apollo13
+ team_consul: sgargan apollo13 Ilgmi
team_cyberark_conjur: jvanderhoof ryanprior
team_e_spirit: MatrixCrawler getjack
team_flatpak: JayKayy oolongbrothers
diff --git a/ansible_collections/community/general/CHANGELOG.md b/ansible_collections/community/general/CHANGELOG.md
index 91922fb7a..7a752cb6a 100644
--- a/ansible_collections/community/general/CHANGELOG.md
+++ b/ansible_collections/community/general/CHANGELOG.md
@@ -2,42 +2,118 @@
**Topics**
-- <a href="#v9-0-1">v9\.0\.1</a>
+- <a href="#v9-1-0">v9\.1\.0</a>
- <a href="#release-summary">Release Summary</a>
- <a href="#minor-changes">Minor Changes</a>
+ - <a href="#deprecated-features">Deprecated Features</a>
- <a href="#bugfixes">Bugfixes</a>
-- <a href="#v9-0-0">v9\.0\.0</a>
+ - <a href="#known-issues">Known Issues</a>
+ - <a href="#new-plugins">New Plugins</a>
+ - <a href="#filter">Filter</a>
+ - <a href="#new-modules">New Modules</a>
+- <a href="#v9-0-1">v9\.0\.1</a>
- <a href="#release-summary-1">Release Summary</a>
- <a href="#minor-changes-1">Minor Changes</a>
+ - <a href="#bugfixes-1">Bugfixes</a>
+- <a href="#v9-0-0">v9\.0\.0</a>
+ - <a href="#release-summary-2">Release Summary</a>
+ - <a href="#minor-changes-2">Minor Changes</a>
- <a href="#breaking-changes--porting-guide">Breaking Changes / Porting Guide</a>
- - <a href="#deprecated-features">Deprecated Features</a>
+ - <a href="#deprecated-features-1">Deprecated Features</a>
- <a href="#removed-features-previously-deprecated">Removed Features \(previously deprecated\)</a>
- <a href="#security-fixes">Security Fixes</a>
- - <a href="#bugfixes-1">Bugfixes</a>
- - <a href="#new-plugins">New Plugins</a>
+ - <a href="#bugfixes-2">Bugfixes</a>
+ - <a href="#new-plugins-1">New Plugins</a>
- <a href="#become">Become</a>
- <a href="#callback">Callback</a>
- <a href="#connection">Connection</a>
- - <a href="#filter">Filter</a>
+ - <a href="#filter-1">Filter</a>
- <a href="#lookup">Lookup</a>
- <a href="#test">Test</a>
- - <a href="#new-modules">New Modules</a>
+ - <a href="#new-modules-1">New Modules</a>
This changelog describes changes after version 8\.0\.0\.
+<a id="v9-1-0"></a>
+## v9\.1\.0
+
+<a id="release-summary"></a>
+### Release Summary
+
+Regular feature and bugfix release\.
+
+<a id="minor-changes"></a>
+### Minor Changes
+
+* CmdRunner module util \- argument formats can be specified as plain functions without calling <code>cmd\_runner\_fmt\.as\_func\(\)</code> \([https\://github\.com/ansible\-collections/community\.general/pull/8479](https\://github\.com/ansible\-collections/community\.general/pull/8479)\)\.
+* ansible\_galaxy\_install \- add upgrade feature \([https\://github\.com/ansible\-collections/community\.general/pull/8431](https\://github\.com/ansible\-collections/community\.general/pull/8431)\, [https\://github\.com/ansible\-collections/community\.general/issues/8351](https\://github\.com/ansible\-collections/community\.general/issues/8351)\)\.
+* cargo \- add option <code>directory</code>\, which allows source directory to be specified \([https\://github\.com/ansible\-collections/community\.general/pull/8480](https\://github\.com/ansible\-collections/community\.general/pull/8480)\)\.
+* cmd\_runner module utils \- add decorator <code>cmd\_runner\_fmt\.stack</code> \([https\://github\.com/ansible\-collections/community\.general/pull/8415](https\://github\.com/ansible\-collections/community\.general/pull/8415)\)\.
+* cmd\_runner\_fmt module utils \- simplify implementation of <code>cmd\_runner\_fmt\.as\_bool\_not\(\)</code> \([https\://github\.com/ansible\-collections/community\.general/pull/8512](https\://github\.com/ansible\-collections/community\.general/pull/8512)\)\.
+* ipa\_dnsrecord \- adds <code>SSHFP</code> record type for managing SSH fingerprints in FreeIPA DNS \([https\://github\.com/ansible\-collections/community\.general/pull/8404](https\://github\.com/ansible\-collections/community\.general/pull/8404)\)\.
+* keycloak\_client \- assign auth flow by name \([https\://github\.com/ansible\-collections/community\.general/pull/8428](https\://github\.com/ansible\-collections/community\.general/pull/8428)\)\.
+* openbsd\_pkg \- adds diff support to show changes in installed package list\. This does not yet work for check mode \([https\://github\.com/ansible\-collections/community\.general/pull/8402](https\://github\.com/ansible\-collections/community\.general/pull/8402)\)\.
+* proxmox \- allow specification of the API port when using proxmox\_\* \([https\://github\.com/ansible\-collections/community\.general/issues/8440](https\://github\.com/ansible\-collections/community\.general/issues/8440)\, [https\://github\.com/ansible\-collections/community\.general/pull/8441](https\://github\.com/ansible\-collections/community\.general/pull/8441)\)\.
+* proxmox\_vm\_info \- add <code>network</code> option to retrieve current network information \([https\://github\.com/ansible\-collections/community\.general/pull/8471](https\://github\.com/ansible\-collections/community\.general/pull/8471)\)\.
+* redfish\_command \- add <code>wait</code> and <code>wait\_timeout</code> options to allow a user to block a command until a service is accessible after performing the requested command \([https\://github\.com/ansible\-collections/community\.general/issues/8051](https\://github\.com/ansible\-collections/community\.general/issues/8051)\, [https\://github\.com/ansible\-collections/community\.general/pull/8434](https\://github\.com/ansible\-collections/community\.general/pull/8434)\)\.
+* redfish\_info \- add command <code>CheckAvailability</code> to check if a service is accessible \([https\://github\.com/ansible\-collections/community\.general/issues/8051](https\://github\.com/ansible\-collections/community\.general/issues/8051)\, [https\://github\.com/ansible\-collections/community\.general/pull/8434](https\://github\.com/ansible\-collections/community\.general/pull/8434)\)\.
+* redis\_info \- adds support for getting cluster info \([https\://github\.com/ansible\-collections/community\.general/pull/8464](https\://github\.com/ansible\-collections/community\.general/pull/8464)\)\.
+
+<a id="deprecated-features"></a>
+### Deprecated Features
+
+* CmdRunner module util \- setting the value of the <code>ignore\_none</code> parameter within a <code>CmdRunner</code> context is deprecated and that feature should be removed in community\.general 12\.0\.0 \([https\://github\.com/ansible\-collections/community\.general/pull/8479](https\://github\.com/ansible\-collections/community\.general/pull/8479)\)\.
+* git\_config \- the <code>list\_all</code> option has been deprecated and will be removed in community\.general 11\.0\.0\. Use the <code>community\.general\.git\_config\_info</code> module instead \([https\://github\.com/ansible\-collections/community\.general/pull/8453](https\://github\.com/ansible\-collections/community\.general/pull/8453)\)\.
+* git\_config \- using <code>state\=present</code> without providing <code>value</code> is deprecated and will be disallowed in community\.general 11\.0\.0\. Use the <code>community\.general\.git\_config\_info</code> module instead to read a value \([https\://github\.com/ansible\-collections/community\.general/pull/8453](https\://github\.com/ansible\-collections/community\.general/pull/8453)\)\.
+
+<a id="bugfixes"></a>
+### Bugfixes
+
+* git\_config \- fix behavior of <code>state\=absent</code> if <code>value</code> is present \([https\://github\.com/ansible\-collections/community\.general/issues/8436](https\://github\.com/ansible\-collections/community\.general/issues/8436)\, [https\://github\.com/ansible\-collections/community\.general/pull/8452](https\://github\.com/ansible\-collections/community\.general/pull/8452)\)\.
+* keycloak\_realm \- add normalizations for <code>attributes</code> and <code>protocol\_mappers</code> \([https\://github\.com/ansible\-collections/community\.general/pull/8496](https\://github\.com/ansible\-collections/community\.general/pull/8496)\)\.
+* launched \- correctly report changed status in check mode \([https\://github\.com/ansible\-collections/community\.general/pull/8406](https\://github\.com/ansible\-collections/community\.general/pull/8406)\)\.
+* opennebula inventory plugin \- fix invalid reference to IP when inventory runs against NICs with no IPv4 address \([https\://github\.com/ansible\-collections/community\.general/pull/8489](https\://github\.com/ansible\-collections/community\.general/pull/8489)\)\.
+* opentelemetry callback \- do not save the JSON response when using the <code>ansible\.builtin\.uri</code> module \([https\://github\.com/ansible\-collections/community\.general/pull/8430](https\://github\.com/ansible\-collections/community\.general/pull/8430)\)\.
+* opentelemetry callback \- do not save the content response when using the <code>ansible\.builtin\.slurp</code> module \([https\://github\.com/ansible\-collections/community\.general/pull/8430](https\://github\.com/ansible\-collections/community\.general/pull/8430)\)\.
+* paman \- do not fail if an empty list of packages has been provided and there is nothing to do \([https\://github\.com/ansible\-collections/community\.general/pull/8514](https\://github\.com/ansible\-collections/community\.general/pull/8514)\)\.
+
+<a id="known-issues"></a>
+### Known Issues
+
+* homectl \- the module does not work under Python 3\.13 or newer\, since it relies on the removed <code>crypt</code> standard library module \([https\://github\.com/ansible\-collections/community\.general/issues/4691](https\://github\.com/ansible\-collections/community\.general/issues/4691)\, [https\://github\.com/ansible\-collections/community\.general/pull/8497](https\://github\.com/ansible\-collections/community\.general/pull/8497)\)\.
+* udm\_user \- the module does not work under Python 3\.13 or newer\, since it relies on the removed <code>crypt</code> standard library module \([https\://github\.com/ansible\-collections/community\.general/issues/4690](https\://github\.com/ansible\-collections/community\.general/issues/4690)\, [https\://github\.com/ansible\-collections/community\.general/pull/8497](https\://github\.com/ansible\-collections/community\.general/pull/8497)\)\.
+
+<a id="new-plugins"></a>
+### New Plugins
+
+<a id="filter"></a>
+#### Filter
+
+* community\.general\.keep\_keys \- Keep specific keys from dictionaries in a list\.
+* community\.general\.remove\_keys \- Remove specific keys from dictionaries in a list\.
+* community\.general\.replace\_keys \- Replace specific keys in a list of dictionaries\.
+
+<a id="new-modules"></a>
+### New Modules
+
+* community\.general\.consul\_agent\_check \- Add\, modify\, and delete checks within a consul cluster\.
+* community\.general\.consul\_agent\_service \- Add\, modify and delete services within a consul cluster\.
+* community\.general\.django\_check \- Wrapper for C\(django\-admin check\)\.
+* community\.general\.django\_createcachetable \- Wrapper for C\(django\-admin createcachetable\)\.
+
<a id="v9-0-1"></a>
## v9\.0\.1
-<a id="release-summary"></a>
+<a id="release-summary-1"></a>
### Release Summary
Bugfix release for inclusion in Ansible 10\.0\.0rc1\.
-<a id="minor-changes"></a>
+<a id="minor-changes-1"></a>
### Minor Changes
* ansible\_galaxy\_install \- minor refactor in the module \([https\://github\.com/ansible\-collections/community\.general/pull/8413](https\://github\.com/ansible\-collections/community\.general/pull/8413)\)\.
-<a id="bugfixes"></a>
+<a id="bugfixes-1"></a>
### Bugfixes
* cpanm \- use new <code>VarDict</code> to prevent deprecation warning \([https\://github\.com/ansible\-collections/community\.general/issues/8410](https\://github\.com/ansible\-collections/community\.general/issues/8410)\, [https\://github\.com/ansible\-collections/community\.general/pull/8411](https\://github\.com/ansible\-collections/community\.general/pull/8411)\)\.
@@ -56,12 +132,12 @@ Bugfix release for inclusion in Ansible 10\.0\.0rc1\.
<a id="v9-0-0"></a>
## v9\.0\.0
-<a id="release-summary-1"></a>
+<a id="release-summary-2"></a>
### Release Summary
This is release 9\.0\.0 of <code>community\.general</code>\, released on 2024\-05\-20\.
-<a id="minor-changes-1"></a>
+<a id="minor-changes-2"></a>
### Minor Changes
* PythonRunner module utils \- specialisation of <code>CmdRunner</code> to execute Python scripts \([https\://github\.com/ansible\-collections/community\.general/pull/8289](https\://github\.com/ansible\-collections/community\.general/pull/8289)\)\.
@@ -190,7 +266,7 @@ This is release 9\.0\.0 of <code>community\.general</code>\, released on 2024\-0
* django\_manage \- the module will now fail if <code>virtualenv</code> is specified but no virtual environment exists at that location \([https\://github\.com/ansible\-collections/community\.general/pull/8198](https\://github\.com/ansible\-collections/community\.general/pull/8198)\)\.
* redfish\_command\, redfish\_config\, redfish\_info \- change the default for <code>timeout</code> from 10 to 60 \([https\://github\.com/ansible\-collections/community\.general/pull/8198](https\://github\.com/ansible\-collections/community\.general/pull/8198)\)\.
-<a id="deprecated-features"></a>
+<a id="deprecated-features-1"></a>
### Deprecated Features
* MH DependencyCtxMgr module\_utils \- deprecate <code>module\_utils\.mh\.mixin\.deps\.DependencyCtxMgr</code> in favour of <code>module\_utils\.deps</code> \([https\://github\.com/ansible\-collections/community\.general/pull/8280](https\://github\.com/ansible\-collections/community\.general/pull/8280)\)\.
@@ -231,7 +307,7 @@ This is release 9\.0\.0 of <code>community\.general</code>\, released on 2024\-0
* cobbler\, gitlab\_runners\, icinga2\, linode\, lxd\, nmap\, online\, opennebula\, proxmox\, scaleway\, stackpath\_compute\, virtualbox\, and xen\_orchestra inventory plugin \- make sure all data received from the remote servers is marked as unsafe\, so remote code execution by obtaining texts that can be evaluated as templates is not possible \([https\://www\.die\-welt\.net/2024/03/remote\-code\-execution\-in\-ansible\-dynamic\-inventory\-plugins/](https\://www\.die\-welt\.net/2024/03/remote\-code\-execution\-in\-ansible\-dynamic\-inventory\-plugins/)\, [https\://github\.com/ansible\-collections/community\.general/pull/8098](https\://github\.com/ansible\-collections/community\.general/pull/8098)\)\.
* keycloak\_identity\_provider \- the client secret was not correctly sanitized by the module\. The return values <code>proposed</code>\, <code>existing</code>\, and <code>end\_state</code>\, as well as the diff\, did contain the client secret unmasked \([https\://github\.com/ansible\-collections/community\.general/pull/8355](https\://github\.com/ansible\-collections/community\.general/pull/8355)\)\.
-<a id="bugfixes-1"></a>
+<a id="bugfixes-2"></a>
### Bugfixes
* aix\_filesystem \- fix <code>\_validate\_vg</code> not passing VG name to <code>lsvg\_cmd</code> \([https\://github\.com/ansible\-collections/community\.general/issues/8151](https\://github\.com/ansible\-collections/community\.general/issues/8151)\)\.
@@ -306,7 +382,7 @@ This is release 9\.0\.0 of <code>community\.general</code>\, released on 2024\-0
* to\_ini filter plugin \- disabling interpolation of <code>ConfigParser</code> to allow converting values with a <code>\%</code> sign \([https\://github\.com/ansible\-collections/community\.general/issues/8183](https\://github\.com/ansible\-collections/community\.general/issues/8183)\, [https\://github\.com/ansible\-collections/community\.general/pull/8185](https\://github\.com/ansible\-collections/community\.general/pull/8185)\)\.
* xml \- make module work with lxml 5\.1\.1\, which removed some internals that the module was relying on \([https\://github\.com/ansible\-collections/community\.general/pull/8169](https\://github\.com/ansible\-collections/community\.general/pull/8169)\)\.
-<a id="new-plugins"></a>
+<a id="new-plugins-1"></a>
### New Plugins
<a id="become"></a>
@@ -325,7 +401,7 @@ This is release 9\.0\.0 of <code>community\.general</code>\, released on 2024\-0
* community\.general\.incus \- Run tasks in Incus instances via the Incus CLI\.
-<a id="filter"></a>
+<a id="filter-1"></a>
#### Filter
* community\.general\.from\_ini \- Converts INI text input into a dictionary\.
@@ -346,7 +422,7 @@ This is release 9\.0\.0 of <code>community\.general</code>\, released on 2024\-0
* community\.general\.fqdn\_valid \- Validates fully\-qualified domain names against RFC 1123\.
-<a id="new-modules"></a>
+<a id="new-modules-1"></a>
### New Modules
* community\.general\.consul\_acl\_bootstrap \- Bootstrap ACLs in Consul\.
diff --git a/ansible_collections/community/general/CHANGELOG.rst b/ansible_collections/community/general/CHANGELOG.rst
index 384bee747..523acb975 100644
--- a/ansible_collections/community/general/CHANGELOG.rst
+++ b/ansible_collections/community/general/CHANGELOG.rst
@@ -6,6 +6,73 @@ Community General Release Notes
This changelog describes changes after version 8.0.0.
+v9.1.0
+======
+
+Release Summary
+---------------
+
+Regular feature and bugfix release.
+
+Minor Changes
+-------------
+
+- CmdRunner module util - argument formats can be specified as plain functions without calling ``cmd_runner_fmt.as_func()`` (https://github.com/ansible-collections/community.general/pull/8479).
+- ansible_galaxy_install - add upgrade feature (https://github.com/ansible-collections/community.general/pull/8431, https://github.com/ansible-collections/community.general/issues/8351).
+- cargo - add option ``directory``, which allows source directory to be specified (https://github.com/ansible-collections/community.general/pull/8480).
+- cmd_runner module utils - add decorator ``cmd_runner_fmt.stack`` (https://github.com/ansible-collections/community.general/pull/8415).
+- cmd_runner_fmt module utils - simplify implementation of ``cmd_runner_fmt.as_bool_not()`` (https://github.com/ansible-collections/community.general/pull/8512).
+- ipa_dnsrecord - adds ``SSHFP`` record type for managing SSH fingerprints in FreeIPA DNS (https://github.com/ansible-collections/community.general/pull/8404).
+- keycloak_client - assign auth flow by name (https://github.com/ansible-collections/community.general/pull/8428).
+- openbsd_pkg - adds diff support to show changes in installed package list. This does not yet work for check mode (https://github.com/ansible-collections/community.general/pull/8402).
+- proxmox - allow specification of the API port when using proxmox_* (https://github.com/ansible-collections/community.general/issues/8440, https://github.com/ansible-collections/community.general/pull/8441).
+- proxmox_vm_info - add ``network`` option to retrieve current network information (https://github.com/ansible-collections/community.general/pull/8471).
+- redfish_command - add ``wait`` and ``wait_timeout`` options to allow a user to block a command until a service is accessible after performing the requested command (https://github.com/ansible-collections/community.general/issues/8051, https://github.com/ansible-collections/community.general/pull/8434).
+- redfish_info - add command ``CheckAvailability`` to check if a service is accessible (https://github.com/ansible-collections/community.general/issues/8051, https://github.com/ansible-collections/community.general/pull/8434).
+- redis_info - adds support for getting cluster info (https://github.com/ansible-collections/community.general/pull/8464).
+
+Deprecated Features
+-------------------
+
+- CmdRunner module util - setting the value of the ``ignore_none`` parameter within a ``CmdRunner`` context is deprecated and that feature should be removed in community.general 12.0.0 (https://github.com/ansible-collections/community.general/pull/8479).
+- git_config - the ``list_all`` option has been deprecated and will be removed in community.general 11.0.0. Use the ``community.general.git_config_info`` module instead (https://github.com/ansible-collections/community.general/pull/8453).
+- git_config - using ``state=present`` without providing ``value`` is deprecated and will be disallowed in community.general 11.0.0. Use the ``community.general.git_config_info`` module instead to read a value (https://github.com/ansible-collections/community.general/pull/8453).
+
+Bugfixes
+--------
+
+- git_config - fix behavior of ``state=absent`` if ``value`` is present (https://github.com/ansible-collections/community.general/issues/8436, https://github.com/ansible-collections/community.general/pull/8452).
+- keycloak_realm - add normalizations for ``attributes`` and ``protocol_mappers`` (https://github.com/ansible-collections/community.general/pull/8496).
+- launched - correctly report changed status in check mode (https://github.com/ansible-collections/community.general/pull/8406).
+- opennebula inventory plugin - fix invalid reference to IP when inventory runs against NICs with no IPv4 address (https://github.com/ansible-collections/community.general/pull/8489).
+- opentelemetry callback - do not save the JSON response when using the ``ansible.builtin.uri`` module (https://github.com/ansible-collections/community.general/pull/8430).
+- opentelemetry callback - do not save the content response when using the ``ansible.builtin.slurp`` module (https://github.com/ansible-collections/community.general/pull/8430).
+- paman - do not fail if an empty list of packages has been provided and there is nothing to do (https://github.com/ansible-collections/community.general/pull/8514).
+
+Known Issues
+------------
+
+- homectl - the module does not work under Python 3.13 or newer, since it relies on the removed ``crypt`` standard library module (https://github.com/ansible-collections/community.general/issues/4691, https://github.com/ansible-collections/community.general/pull/8497).
+- udm_user - the module does not work under Python 3.13 or newer, since it relies on the removed ``crypt`` standard library module (https://github.com/ansible-collections/community.general/issues/4690, https://github.com/ansible-collections/community.general/pull/8497).
+
+New Plugins
+-----------
+
+Filter
+~~~~~~
+
+- community.general.keep_keys - Keep specific keys from dictionaries in a list.
+- community.general.remove_keys - Remove specific keys from dictionaries in a list.
+- community.general.replace_keys - Replace specific keys in a list of dictionaries.
+
+New Modules
+-----------
+
+- community.general.consul_agent_check - Add, modify, and delete checks within a consul cluster.
+- community.general.consul_agent_service - Add, modify and delete services within a consul cluster.
+- community.general.django_check - Wrapper for C(django-admin check).
+- community.general.django_createcachetable - Wrapper for C(django-admin createcachetable).
+
v9.0.1
======
diff --git a/ansible_collections/community/general/FILES.json b/ansible_collections/community/general/FILES.json
index 87fdd5c9a..41f20623d 100644
--- a/ansible_collections/community/general/FILES.json
+++ b/ansible_collections/community/general/FILES.json
@@ -109,7 +109,7 @@
"name": ".azure-pipelines/azure-pipelines.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "48eb26e372a37363159b6a17ba403c830d01c2cb29c106bfa8e9748ad534ff50",
+ "chksum_sha256": "a79320cab46fce92c3205300c9f17c58bc8dacbb21868bebe83636d4a730e99d",
"format": 1
},
{
@@ -193,7 +193,7 @@
"name": ".github/BOTMETA.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "95b932042b339875673fcbe218094b962576d3900511a041b9b7fb91a858e68c",
+ "chksum_sha256": "cc8b862bd68049694fb8b89ff204b657855e0a8f80ea78a643647d19ed4997f3",
"format": 1
},
{
@@ -312,7 +312,7 @@
"name": "changelogs/changelog.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0014fa74832def0e1d47b994b7aee9fbe393a30553ac051acc1f9a2ac886be6a",
+ "chksum_sha256": "7079dd86212c9c1ba677b86a9b240c210f0f9d591eba846bd2b871050032cbf0",
"format": 1
},
{
@@ -368,14 +368,14 @@
"name": "docs/docsite/helper/lists_mergeby/example-001_vars/default-common.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bd2eef902441d9c71b84d997508e78803e648c534e75c8b12323e199eeca81d6",
+ "chksum_sha256": "1cf297e6880eb27f8fd1da74208b089d70647a972fb8357f613e58bf40334fcd",
"format": 1
},
{
"name": "docs/docsite/helper/lists_mergeby/example-001_vars/list3.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "663a3b0429cd096b10b06038a561f01872a34bc0e611f535abc9e626a474b6a9",
+ "chksum_sha256": "3cdee8eb1544ce35baf81583ab7e7c722dbe81e5643c3013d2739f22919581c0",
"format": 1
},
{
@@ -389,14 +389,14 @@
"name": "docs/docsite/helper/lists_mergeby/example-002_vars/default-common.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bd2eef902441d9c71b84d997508e78803e648c534e75c8b12323e199eeca81d6",
+ "chksum_sha256": "1cf297e6880eb27f8fd1da74208b089d70647a972fb8357f613e58bf40334fcd",
"format": 1
},
{
"name": "docs/docsite/helper/lists_mergeby/example-002_vars/list3.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "852a8518e33c1d8a2144f1835f6a10b0c17fcc13850cf45475ce538a4312171e",
+ "chksum_sha256": "39da0ce92bf01050b04f3aafecad1870fba5b6970e7301a618f9445d885ddf94",
"format": 1
},
{
@@ -410,14 +410,14 @@
"name": "docs/docsite/helper/lists_mergeby/example-003_vars/default-recursive-true.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ce747f67c1635a6dfd887c7ebf3ee02c2014b9eced5773b08d747a11fd916a95",
+ "chksum_sha256": "40233cee2ac468835816e1e05b898c6c2444c89dafeb81f5aac16a26dc417734",
"format": 1
},
{
"name": "docs/docsite/helper/lists_mergeby/example-003_vars/list3.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "73003165935630df144177b3cbb78954f7eeaccc3031d694a1e1c2f8b365d99d",
+ "chksum_sha256": "2f7de68a5c297dc9bec3127737002cf214052069d709b77412df2c42eb03d3bb",
"format": 1
},
{
@@ -431,14 +431,14 @@
"name": "docs/docsite/helper/lists_mergeby/example-004_vars/default-recursive-true.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ce747f67c1635a6dfd887c7ebf3ee02c2014b9eced5773b08d747a11fd916a95",
+ "chksum_sha256": "40233cee2ac468835816e1e05b898c6c2444c89dafeb81f5aac16a26dc417734",
"format": 1
},
{
"name": "docs/docsite/helper/lists_mergeby/example-004_vars/list3.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1b04d243241c56ad51cdee8c34a6475af76482801a74a55a269a296122c3be44",
+ "chksum_sha256": "a260b433df2d9d689bf5cd61141e91a40f86677c944f8ad75e85da0c1fb02a31",
"format": 1
},
{
@@ -452,14 +452,14 @@
"name": "docs/docsite/helper/lists_mergeby/example-005_vars/default-recursive-true.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ce747f67c1635a6dfd887c7ebf3ee02c2014b9eced5773b08d747a11fd916a95",
+ "chksum_sha256": "40233cee2ac468835816e1e05b898c6c2444c89dafeb81f5aac16a26dc417734",
"format": 1
},
{
"name": "docs/docsite/helper/lists_mergeby/example-005_vars/list3.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d4541eb704de6d64c4c34c12cc22328e19016c66811e99af22b4be27053d9526",
+ "chksum_sha256": "2a198e5156b90d540962bf0adfffdb445294449e815cf2c0471efcafdfd3a996",
"format": 1
},
{
@@ -473,14 +473,14 @@
"name": "docs/docsite/helper/lists_mergeby/example-006_vars/default-recursive-true.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ce747f67c1635a6dfd887c7ebf3ee02c2014b9eced5773b08d747a11fd916a95",
+ "chksum_sha256": "40233cee2ac468835816e1e05b898c6c2444c89dafeb81f5aac16a26dc417734",
"format": 1
},
{
"name": "docs/docsite/helper/lists_mergeby/example-006_vars/list3.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "205959e977ba30d49bdfd879f4b7bb2f50b50762361d89b2360cbb419a6af931",
+ "chksum_sha256": "577f0bd0c766630aa359f0453691007d9a9d3a0f4535b9b9b161ce407bff2d6d",
"format": 1
},
{
@@ -494,14 +494,14 @@
"name": "docs/docsite/helper/lists_mergeby/example-007_vars/default-recursive-true.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ce747f67c1635a6dfd887c7ebf3ee02c2014b9eced5773b08d747a11fd916a95",
+ "chksum_sha256": "40233cee2ac468835816e1e05b898c6c2444c89dafeb81f5aac16a26dc417734",
"format": 1
},
{
"name": "docs/docsite/helper/lists_mergeby/example-007_vars/list3.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f5517facd2643e52b06c5181df7768b096b2916ac411f7403484f0ad1b7d8ee1",
+ "chksum_sha256": "41b95615fb4eca9d5beab31ea436ca9875a2b2d109dc036919cd68388fc64b17",
"format": 1
},
{
@@ -515,119 +515,154 @@
"name": "docs/docsite/helper/lists_mergeby/example-008_vars/default-recursive-true.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ce747f67c1635a6dfd887c7ebf3ee02c2014b9eced5773b08d747a11fd916a95",
+ "chksum_sha256": "40233cee2ac468835816e1e05b898c6c2444c89dafeb81f5aac16a26dc417734",
"format": 1
},
{
"name": "docs/docsite/helper/lists_mergeby/example-008_vars/list3.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a833536f106aebd7a5f737743f96c791302260e72f9393be53bdda0a86a10c9a",
+ "chksum_sha256": "9b2d102869f86a54b1ad7c5b38bdd6515aa3b7e444747069dce98d32fa12f640",
+ "format": 1
+ },
+ {
+ "name": "docs/docsite/helper/lists_mergeby/example-009_vars",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "docs/docsite/helper/lists_mergeby/example-009_vars/default-common.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "1cf297e6880eb27f8fd1da74208b089d70647a972fb8357f613e58bf40334fcd",
+ "format": 1
+ },
+ {
+ "name": "docs/docsite/helper/lists_mergeby/example-009_vars/list3.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "4523d397880b12fc1b8ec3c85403c30cf59e95fdfafc9b40e4e3279856f3fa5a",
"format": 1
},
{
"name": "docs/docsite/helper/lists_mergeby/default-common.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bd2eef902441d9c71b84d997508e78803e648c534e75c8b12323e199eeca81d6",
+ "chksum_sha256": "1cf297e6880eb27f8fd1da74208b089d70647a972fb8357f613e58bf40334fcd",
"format": 1
},
{
"name": "docs/docsite/helper/lists_mergeby/default-recursive-true.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ce747f67c1635a6dfd887c7ebf3ee02c2014b9eced5773b08d747a11fd916a95",
+ "chksum_sha256": "40233cee2ac468835816e1e05b898c6c2444c89dafeb81f5aac16a26dc417734",
"format": 1
},
{
"name": "docs/docsite/helper/lists_mergeby/example-001.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ec664dbed63f2d8f9d7173a44f6f53b8aac3917e4c152082157ae06b2b64b717",
+ "chksum_sha256": "b7f83ae4eda65288a5c7b1e448d3f379ac69f5c40c0bbb2b180d0ad71b33ac3b",
"format": 1
},
{
"name": "docs/docsite/helper/lists_mergeby/example-002.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "174db79b357280e60b4b37b96e77708148894d4d81571fa10d65d9846bbcf365",
+ "chksum_sha256": "c5abd9609513167ba5a0df49223fb9269e288d3ecd597004a6ae066179c6d368",
"format": 1
},
{
"name": "docs/docsite/helper/lists_mergeby/example-003.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5fac7cdbb3fc0b3addddb5ffaa58fcbf241df07195b41ad011f2550df1281fc7",
+ "chksum_sha256": "211edd2c682dcee996f4211f8c832896401ef0dc76f38f6e91aef6c5789d7721",
"format": 1
},
{
"name": "docs/docsite/helper/lists_mergeby/example-004.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c7781daf8df6f6e6403fc8bd8ba40a6a5515e24b1ffa96f85b4d3cb2e23d926e",
+ "chksum_sha256": "05090716aaf46cb3ed48035d83482aec11ff0b3c30cc850f30fb5320844c8877",
"format": 1
},
{
"name": "docs/docsite/helper/lists_mergeby/example-005.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c02a2a83fc72b27668c5ce96e0c3feb466ea89047f5fa8bc961260bce0aa97b5",
+ "chksum_sha256": "6dc6b4160e81b1d49936db28762428d182da87441fa66817146daa4b747998b2",
"format": 1
},
{
"name": "docs/docsite/helper/lists_mergeby/example-006.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9eda979168ded0b39e43a4ae0e02c38cdccecc236604b61cbf80069869e762e3",
+ "chksum_sha256": "03adfa30300af062e28fb963bc2472cabf4102dce03e088470727ee35df488db",
"format": 1
},
{
"name": "docs/docsite/helper/lists_mergeby/example-007.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "78a002ab23ee4a16c60322ce988af490a5d900131fb621ada85541afee932fdd",
+ "chksum_sha256": "c5ab5b8f580cd9bca71c93f2e0581593e170357d4a677fafefa10023524f6bd5",
"format": 1
},
{
"name": "docs/docsite/helper/lists_mergeby/example-008.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "11c87a5bd327951a21baaf28e47d11c0c07e8a12cdcc5326fcd7f8b0d5217d56",
+ "chksum_sha256": "ac56a41a9e1bcdef1c80c590484f9081ec7f095106df18777a56e506a3a26970",
+ "format": 1
+ },
+ {
+ "name": "docs/docsite/helper/lists_mergeby/example-009.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "1cb469396a08823b54768d987f12bf94414e82f218c3923d9af1c421833c09e1",
"format": 1
},
{
"name": "docs/docsite/helper/lists_mergeby/examples.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fe278ca276168ebfc2167cf5ad2e2d4b3cae0d6cdd246326b11620db259e2eb3",
+ "chksum_sha256": "f22163a6554b36a7df5d626e0061f3c4b8d83d4fe7f6c64b3f3ff3373598bb92",
"format": 1
},
{
"name": "docs/docsite/helper/lists_mergeby/examples_all.rst.j2",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "31f8a0a79c3aebf801b74e0cc7587ea7486fc221a7ab948701211f0178310ace",
+ "chksum_sha256": "55874c71532504a32b927c036756f8e32a8dc6695f34c4451e91defa6c62b316",
+ "format": 1
+ },
+ {
+ "name": "docs/docsite/helper/lists_mergeby/extra-vars.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "3cc1cfbd48140a9368dc9583683da172197ce124e3e1e44a75bb50e524945729",
"format": 1
},
{
"name": "docs/docsite/helper/lists_mergeby/filter_guide_abstract_informations_merging_lists_of_dictionaries.rst.j2",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "27fd7341d931b081d0caa6d9dbeacee7cd788bb5418019fb70e5794293f15582",
+ "chksum_sha256": "1f30ab3058307c531437e39abb3aaa7b0258b8a1537adcb82844f9b51d5bc1fc",
"format": 1
},
{
"name": "docs/docsite/helper/lists_mergeby/list3.out.j2",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d228735e576466950b3a90b1de44b6a503de957521b3db41a492d742695d8d71",
+ "chksum_sha256": "e3a21ec55be646c12180396c2311c85d341ab5f75bf8a16ea09fe3c214b47da9",
"format": 1
},
{
"name": "docs/docsite/helper/lists_mergeby/playbook.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "825f6e9f15549cd02a8fa1cf5a6b6bdbc1dc39ff80ce59f5014af8c0358bee58",
+ "chksum_sha256": "840405c1b7fc7cd852404e9ee35337626f97cd6843457cc7ad5a4de50902a707",
"format": 1
},
{
@@ -683,7 +718,7 @@
"name": "docs/docsite/rst/filter_guide_abstract_informations_merging_lists_of_dictionaries.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a0a81486d2b415efa2e2f8576b3aadb98125e74b10b2113f13f6dcf1e652faed",
+ "chksum_sha256": "6b7a3ec73c58b87695919903c13805db6bac8c2b44d0a916f432651ac1e4ee58",
"format": 1
},
{
@@ -743,6 +778,13 @@
"format": 1
},
{
+ "name": "docs/docsite/rst/guide_deps.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "df07337b3596ae5246294cceecf12ca46287436ad2e381a7e7d25a84ed33e9bd",
+ "format": 1
+ },
+ {
"name": "docs/docsite/rst/guide_online.rst",
"ftype": "file",
"chksum_type": "sha256",
@@ -764,6 +806,13 @@
"format": 1
},
{
+ "name": "docs/docsite/rst/guide_vardict.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "eabc2712be99330fba6016b38ef1410427b115631ff10bad98922c1c0dc1705a",
+ "format": 1
+ },
+ {
"name": "docs/docsite/rst/test_guide.rst",
"ftype": "file",
"chksum_type": "sha256",
@@ -781,7 +830,7 @@
"name": "docs/docsite/extra-docs.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f3f9cbc513d494da6c34cf6db49319f34645a8dd1ee79736360c0e48643e76af",
+ "chksum_sha256": "3099ca216ce7a424bef49f96819691f5c0411bf838ebeb814f3fd2c0ef8c8eeb",
"format": 1
},
{
@@ -802,7 +851,7 @@
"name": "meta/runtime.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bdd7473e27902ea009dac03f604dd957a93413ee0275b561aff956ff26caf391",
+ "chksum_sha256": "6934d562c2c37a3ffb229276c9bfbaf2f869a1d9e0538072b004885e09c0b13f",
"format": 1
},
{
@@ -1075,7 +1124,7 @@
"name": "plugins/callback/opentelemetry.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6eb005c02c46afa1b07d7d7861b54496d30ced27113f10b156719b3f151b3014",
+ "chksum_sha256": "f53258ebf00c73a1ac7908a1e963b100673c586e1f68da40592caf0af78bdb10",
"format": 1
},
{
@@ -1278,7 +1327,7 @@
"name": "plugins/doc_fragments/django.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0836d2f746a8b62a1408455fdd54bcbd8f41342eb2f07a681b378b0efef5f3c8",
+ "chksum_sha256": "e30b30e859e4c2861828eee9c1b35b54c535ef2aa0ac77ba12cfcf1611ab9bb4",
"format": 1
},
{
@@ -1453,7 +1502,7 @@
"name": "plugins/doc_fragments/proxmox.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "daa7d15e8a1ffc6e4c539712099537ab9c0f667684d2bcf4ba89e196caf9e460",
+ "chksum_sha256": "b64edf0d5addde754b70fd42e65b304913c0ff2e45004853d04d0b2bd5adbbca",
"format": 1
},
{
@@ -1604,6 +1653,13 @@
"format": 1
},
{
+ "name": "plugins/filter/keep_keys.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "f4474076e83cfe8850482fbe29663644d0ca4b15f179a0a2646b9b92bbe7b824",
+ "format": 1
+ },
+ {
"name": "plugins/filter/lists.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -1628,7 +1684,7 @@
"name": "plugins/filter/lists_mergeby.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4163df17512c4388bf020417cdd3f9919db9c6f1c23b2a57852bd10523a0abbe",
+ "chksum_sha256": "6ab23ff362b7940d0ec340eda1ef9a0d3262dc6894a9139fa1357ffdbb2674da",
"format": 1
},
{
@@ -1653,6 +1709,20 @@
"format": 1
},
{
+ "name": "plugins/filter/remove_keys.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d8c3d19a4129c1007a675dbb97f9f593aa410c35887891dfeb556e68a5aec62a",
+ "format": 1
+ },
+ {
+ "name": "plugins/filter/replace_keys.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "c5ddedca5a0fa34413b894a9e40991aa91ba6cb112269fe3e4bf311c86dc99bf",
+ "format": 1
+ },
+ {
"name": "plugins/filter/time.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -1803,7 +1873,7 @@
"name": "plugins/inventory/opennebula.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "954bb22612557fbe3be3510d3ebc7e230164cb3dcb689345a287270b85234970",
+ "chksum_sha256": "e3fd2fdd5abb43a0690b6b97daa79d45bfa334268e9d1051f11a5806b87956e4",
"format": 1
},
{
@@ -2342,14 +2412,14 @@
"name": "plugins/module_utils/cmd_runner.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9b9f5d0e0ed818b1bdc80e8a6a4a858ba23f92bcaf6d5b224073b4e4a5de1e4c",
+ "chksum_sha256": "d627300d236348a1df761fcb8624c5d55f72c33938a08250097c7e28259294d8",
"format": 1
},
{
"name": "plugins/module_utils/consul.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9e572ae8233e2f4a72bbdaba9d39f0e0d631ff4ccb3684a32f853d0b7b6581bd",
+ "chksum_sha256": "3f55f60552f8187b7db9c4ce3bc915cf6e278594f21c0e30c5024af736aff172",
"format": 1
},
{
@@ -2391,7 +2461,7 @@
"name": "plugins/module_utils/django.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "78e63b0a1b6304d42171f878bcb50bfd1ff96bfead0ba41220c0b43b6ad1f7cb",
+ "chksum_sha256": "46b9d866dcc4432d3c7bf67b4e31037d1fdd064bf421359a1c1660bd4331bc32",
"format": 1
},
{
@@ -2587,7 +2657,7 @@
"name": "plugins/module_utils/proxmox.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "756e2b829ba10cf0f2d177b8729b92007ef23edaf1a708c064b1cbdb9b96b02c",
+ "chksum_sha256": "235bc199d79987aaa2d49984ad2b8140fbb58f3fa536109e82d660381316d57b",
"format": 1
},
{
@@ -2615,7 +2685,7 @@
"name": "plugins/module_utils/redfish_utils.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ce4679e15df51fc1a102f5f6f8ef1cb8f8153cfb5a896786c5531829d76ce0e2",
+ "chksum_sha256": "43fc29e5b41bc436a7c1d2face5334cc3a8a79861defbcece2dc5156b78c0081",
"format": 1
},
{
@@ -2811,7 +2881,7 @@
"name": "plugins/modules/ansible_galaxy_install.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7d6e02e1a7306d048c97e5e9f2e5553fe8ece50ded61a39f014af2fecd51da2a",
+ "chksum_sha256": "a482de0224539003f97a7cc019a7ec499c7fb3b40a95964f421c47f19d397935",
"format": 1
},
{
@@ -2986,7 +3056,7 @@
"name": "plugins/modules/cargo.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1f937d2d6b2eb1d54baaaddc8aad6e45ff219c1762ea07d73e028b1030b47601",
+ "chksum_sha256": "59179736f71cff627a2c85788a5cd93bc0885347bfeb5037bfd1d4921ba20382",
"format": 1
},
{
@@ -3137,17 +3207,31 @@
"format": 1
},
{
+ "name": "plugins/modules/consul_agent_check.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "4b47d0ab359a40730c169d01873d0eb1023ade6c0c42529910cb047ec0fb4be1",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/consul_agent_service.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "dc493896df6b2b2754a68c557e7902214acfc9a34fcdaea25dd754e90a17c6e0",
+ "format": 1
+ },
+ {
"name": "plugins/modules/consul_auth_method.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "776c407945f239f40ea36e24f42606d20141316ffa7f36e95cb3f00efa4c6bcb",
+ "chksum_sha256": "0ee3b66b9b873472ad25479c8809b34ea9da645e942efd602b9f0ff7fe6ccafd",
"format": 1
},
{
"name": "plugins/modules/consul_binding_rule.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bfc78b6b53d3ad705f099e2ed04eaff75ab0d1ef3d823b3de58d6881efe2178d",
+ "chksum_sha256": "78859326d12520ddec8e4f3b324f6e7b8150e8252a0babd4dc7932ae29dad096",
"format": 1
},
{
@@ -3161,14 +3245,14 @@
"name": "plugins/modules/consul_policy.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "05fcd6bd9085bb781d7f9bdc8a808f1e63f493d018299ce3dcb6a8c7f89ae729",
+ "chksum_sha256": "3757983feca43c6da4a0cf08fc6dbbf71f6095bf7e16cf6f3698d47fe171cb5f",
"format": 1
},
{
"name": "plugins/modules/consul_role.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f18b68eab0c232b18fefe5d66164dfa49e4e9c11396616200e23b0774c98c5ae",
+ "chksum_sha256": "33f6b5f02c216241fef4dd98371c08737c1cb8094f058f4a08f71828d4eeed0b",
"format": 1
},
{
@@ -3182,7 +3266,7 @@
"name": "plugins/modules/consul_token.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8572e2337ce15303534844ed997859fc6e26b3b5537ea80fbea280f1bfa17419",
+ "chksum_sha256": "34b3c9e7bed0b8566edb79ccdd46f49c466f7c261e0b69c1b1d748dcbbe6ab86",
"format": 1
},
{
@@ -3270,6 +3354,13 @@
"format": 1
},
{
+ "name": "plugins/modules/django_check.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "5e028e8dd895fef7c8002e310ec5ac92c183c4993fc8d061c80fa0de916c057c",
+ "format": 1
+ },
+ {
"name": "plugins/modules/django_command.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -3277,6 +3368,13 @@
"format": 1
},
{
+ "name": "plugins/modules/django_createcachetable.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "66f18cc86d806a4447888afd48e3e7d008933f0ee93b109c0fea62b5b8e1ec4a",
+ "format": 1
+ },
+ {
"name": "plugins/modules/django_manage.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -3441,7 +3539,7 @@
"name": "plugins/modules/git_config.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3764f124422fcd3be0c60b955a6286da22e458fff5023c57ba73d0e90d3842a7",
+ "chksum_sha256": "92c164fb3a497d2d0887b0f239657c66d82f1ecd22cf2950a01c2f2b5b35e174",
"format": 1
},
{
@@ -3707,7 +3805,7 @@
"name": "plugins/modules/homectl.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f5af3533ea90dda0925e64715ea23b8aea4418020c0fb66115fa0595d452d749",
+ "chksum_sha256": "c8dac7831a418cc0385c4616484aefea40eddc91eef77f85404e58b3a6e47633",
"format": 1
},
{
@@ -4022,7 +4120,7 @@
"name": "plugins/modules/ipa_dnsrecord.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c8ae9aa977adb6ec60404eb8bf40adbaa7b1ce897e6707769d5e90230b40f16c",
+ "chksum_sha256": "2f3e8e8a7e64541c19873a1fb534afb8295f163d9c2204e6687031f68675ef31",
"format": 1
},
{
@@ -4351,7 +4449,7 @@
"name": "plugins/modules/keycloak_client.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7870103accbfbd0204f1b7adeee0e20b9a6b8cddad0edda0bd31ec252572f186",
+ "chksum_sha256": "1200158abd819a25429fc2ad8c563623c117c668e4740dc9ffa68b22b1660abc",
"format": 1
},
{
@@ -4372,7 +4470,7 @@
"name": "plugins/modules/keycloak_clientscope.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c712e8e6d6b9d27297f7fbed371d7338aea6c92c6fc940fea1cf9e8c4b897c71",
+ "chksum_sha256": "3bf4a3c511dca811693ec895233193c633ead77585e3f3710844e7f3f6cb3510",
"format": 1
},
{
@@ -4505,7 +4603,7 @@
"name": "plugins/modules/launchd.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "239bdd1ceebfc51da46babbede85732efd5aca73719214225bd505c202cc4915",
+ "chksum_sha256": "0008d09fc7fb13a3bd959dbbc7f0089a22a8d6c5720fdf0fb69acee361cddd62",
"format": 1
},
{
@@ -5233,7 +5331,7 @@
"name": "plugins/modules/openbsd_pkg.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "aa5f9a76a91826cfcf340da018cee24dec149b5d858e9161023cf94e5867ac09",
+ "chksum_sha256": "eea55d4a493ae779edcf4fa9d6d9d401c502b9cf0acc000c9c4c85fafe78215a",
"format": 1
},
{
@@ -5338,7 +5436,7 @@
"name": "plugins/modules/pacman.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3e92ff37a394cf3ee414ca9e55e928fae7d2875992630b8e80e7fad1c5d1273f",
+ "chksum_sha256": "77db65d60288cc9d30ab7fa148a1f94f969d61d6f5c23a34bf76fda8306fd478",
"format": 1
},
{
@@ -5597,7 +5695,7 @@
"name": "plugins/modules/proxmox_kvm.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "030cd0ca64eb1eac59e15bd62ce58239e6466b01391a6abc314feff1c8a32614",
+ "chksum_sha256": "afdba49a542d59df6afba627eb4a3ca187f9033070193b1db599667427102cfc",
"format": 1
},
{
@@ -5674,7 +5772,7 @@
"name": "plugins/modules/proxmox_vm_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9bb1491ad5da16bd483f659726e5d99a29ac784a23f615244165ad4d9bde0120",
+ "chksum_sha256": "84b5e54194e9b6f67c49b01b5e20b795bd67f3cd82adcece514d8256827bd2d0",
"format": 1
},
{
@@ -5730,7 +5828,7 @@
"name": "plugins/modules/redfish_command.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3a06abd918e13f440dcd7bd66a1eae9d77a414aa80dc231eba9ddad2c3fc16d7",
+ "chksum_sha256": "c3beab5266697dca3d00e13de6760bbcb8ca48dc39d1425ab1da361ba39eb051",
"format": 1
},
{
@@ -5744,7 +5842,7 @@
"name": "plugins/modules/redfish_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3f14d5645eb692b829ab83dac5e38eac9ea9fc7bae38f87ffd532ad2e8d49de5",
+ "chksum_sha256": "7a423fb3fd03d467af475d4823658c2ff3cba2f2c2a72ac63be4bca9292e5466",
"format": 1
},
{
@@ -5786,7 +5884,7 @@
"name": "plugins/modules/redis_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5c544a02b0415efa329c84abc4119e0039991bfadcbdae32d14d484c7443e47a",
+ "chksum_sha256": "ace8770d46241e343ba03a55a42f56d674acc9f88e4f12e06fe130c81bb1da41",
"format": 1
},
{
@@ -6444,7 +6542,7 @@
"name": "plugins/modules/udm_user.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "108d20d909305308b8e5e7a6bf5e323f154db4c473c02e47734e8fb0c8bc7b07",
+ "chksum_sha256": "b061e35e6e8ea1970710888fe431a17dd3739b40e59e6342846e50dbdd9154ef",
"format": 1
},
{
@@ -6798,6 +6896,13 @@
"format": 1
},
{
+ "name": "plugins/plugin_utils/keys_filter.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "950a97d3f8fc8d57b0495b08dc40be2ad47e4436e65c2ac1384a8b656c07c2b0",
+ "format": 1
+ },
+ {
"name": "plugins/plugin_utils/unsafe.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -7130,7 +7235,7 @@
"name": "tests/integration/targets/ansible_galaxy_install/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "db8a8fd9d62a035862276fb8ce6bae558ccfe9ee6a03a498c561272236301307",
+ "chksum_sha256": "f6896e1d1f1b563728905f203d6623fcc9ced037e88132af50457c368d750fb8",
"format": 1
},
{
@@ -7662,7 +7767,7 @@
"name": "tests/integration/targets/cargo/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e5f66835ea6bd23ef85f61c95211e57f257d3f2ecab06d669afe311ba64045b3",
+ "chksum_sha256": "9370d274dfb0bec0b81f998ccc03441f09cbb731d9647e1810043a5857ff5c52",
"format": 1
},
{
@@ -7673,6 +7778,13 @@
"format": 1
},
{
+ "name": "tests/integration/targets/cargo/tasks/test_directory.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "3aed8a73a3b83b455c377054f1198be385d5cfb5e055368be7222a70fc0f40a7",
+ "format": 1
+ },
+ {
"name": "tests/integration/targets/cargo/tasks/test_general.yml",
"ftype": "file",
"chksum_type": "sha256",
@@ -8051,6 +8163,20 @@
"format": 1
},
{
+ "name": "tests/integration/targets/consul/tasks/consul_agent_check.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "ac1e5b0cf2a5483ba484c99016b34975759a6a8060a503603146d7c9be552f75",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/consul/tasks/consul_agent_service.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "4f7deacb2c709ea660887c954c8005acc6f8163958cc71e9a5f5cec547784d33",
+ "format": 1
+ },
+ {
"name": "tests/integration/targets/consul/tasks/consul_auth_method.yml",
"ftype": "file",
"chksum_type": "sha256",
@@ -8110,7 +8236,7 @@
"name": "tests/integration/targets/consul/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6bafc431979010557c06259d8b5cccc03f49d1ec2097a26118dd3a2091fd4cc7",
+ "chksum_sha256": "76dec6dcfe653795428e6eaa8f0eb4c8eccd6d04d7c3896b99362c386ac93ffc",
"format": 1
},
{
@@ -9297,6 +9423,55 @@
"format": 1
},
{
+ "name": "tests/integration/targets/filter_keep_keys",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/filter_keep_keys/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/filter_keep_keys/tasks/keep_keys.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "1ece426f8edfd07e80705b0925b97d70c0b020dc7ed6c2bd02dc7ed69834a85f",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/filter_keep_keys/tasks/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "928ea7662292fac8633465e8405edf98c5e71c0c9de8b81d6cf1b33e7aabfe4c",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/filter_keep_keys/vars",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/filter_keep_keys/vars/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "da6753430b6dea343d2a605d0a7af54e6ac66640bdca97159417c54c75696be0",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/filter_keep_keys/aliases",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "96646c5ab7118c53b8b722f6abe91a7ed4d3eed9be401faefb399d6a8f427c2e",
+ "format": 1
+ },
+ {
"name": "tests/integration/targets/filter_lists",
"ftype": "dir",
"chksum_type": null,
@@ -9465,6 +9640,111 @@
"format": 1
},
{
+ "name": "tests/integration/targets/filter_remove_keys",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/filter_remove_keys/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/filter_remove_keys/tasks/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "3815b9d04a798cc01c090d0494ca27544bec4a4a08bd1995fa8ff586e1201c40",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/filter_remove_keys/tasks/remove_keys.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "430024a06df2b362cd3d5d658102a8389ddacc780613dde834d95c01cd5cff21",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/filter_remove_keys/vars",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/filter_remove_keys/vars/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "19b18860ad324c02cfb996c048e2516e53e7d6b84f13059abeff602e9802120c",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/filter_remove_keys/aliases",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "96646c5ab7118c53b8b722f6abe91a7ed4d3eed9be401faefb399d6a8f427c2e",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/filter_replace_keys",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/filter_replace_keys/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/filter_replace_keys/tasks/fn-test-replace_keys.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "b62bfcb4f9b61bc818b033993b111eb88196dbd8ddf6fa0942f93badf3274338",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/filter_replace_keys/tasks/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "1efdb33269dba21292e7b4525611ca41360277f60aaed0d476d3115b6b683c74",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/filter_replace_keys/tasks/replace_keys.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "4f92e48603760a77ca302371847e004bec26b1599f863fe7719d6bcfa37d8e33",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/filter_replace_keys/vars",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/filter_replace_keys/vars/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "12fb80e1cc0fed908d3bb9ae1bb3b1e1bbebeb7f2e259476f16cf4a75173cbc6",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/filter_replace_keys/aliases",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "96646c5ab7118c53b8b722f6abe91a7ed4d3eed9be401faefb399d6a8f427c2e",
+ "format": 1
+ },
+ {
"name": "tests/integration/targets/filter_time",
"ftype": "dir",
"chksum_type": null,
@@ -10028,7 +10308,7 @@
"name": "tests/integration/targets/git_config/tasks/unset_value.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e31339e0f76aecbacc8839d3755b7fd21fa1882a8189d5cc79890e45d65e559c",
+ "chksum_sha256": "a8836b380c222836aea5c2aee38187580c031cbb7aec8d70fa55588f7a162f75",
"format": 1
},
{
@@ -13010,7 +13290,7 @@
"name": "tests/integration/targets/keycloak_client/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "10f83e1b6754b9dbc932e5bc5e6dfd85d028dffb2f85c5f9023f197211ea85bc",
+ "chksum_sha256": "dac1226601e47b2dc8127eca264d249e57054828048d590f6e9a260ef9b675ec",
"format": 1
},
{
@@ -23237,7 +23517,7 @@
"name": "tests/unit/plugins/module_utils/test_cmd_runner.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5e44ed79ba57b534227340b6e97849f84d20811597aaed62aa697d09ab6243f6",
+ "chksum_sha256": "98b8cc91992ff261d7bddcafc08f106a0ff38a17d6f45240f3387c4c019eecff",
"format": 1
},
{
@@ -26538,6 +26818,20 @@
"format": 1
},
{
+ "name": "tests/unit/plugins/modules/test_django_check.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "71fb52b56b5957aa54aebeb786ff74f396902effb388a202a86627e0cf0256d5",
+ "format": 1
+ },
+ {
+ "name": "tests/unit/plugins/modules/test_django_check.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "7f3e625f4f6aec1e810913ecd137e8340c8d3c1f97dc2dc81bffa0c1600eed1d",
+ "format": 1
+ },
+ {
"name": "tests/unit/plugins/modules/test_django_command.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -26552,6 +26846,20 @@
"format": 1
},
{
+ "name": "tests/unit/plugins/modules/test_django_createcachetable.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "b12c8d85f2466e732a6f05e80485a6105d26fdd5c67477e4e46031c43f46086b",
+ "format": 1
+ },
+ {
+ "name": "tests/unit/plugins/modules/test_django_createcachetable.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "4eaa1b63defefb1bf8e4be46b98ce23ac97c49a01b06225bf1da803b3ebcce63",
+ "format": 1
+ },
+ {
"name": "tests/unit/plugins/modules/test_dnf_config_manager.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -27654,7 +27962,7 @@
"name": "CHANGELOG.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2fc9c5d7b462624c77da1320ae1acaa13e7e4f765f0fe3393278146104281693",
+ "chksum_sha256": "d4edd4ccef5e7c4b176e19e06f7deb76de1ce252c72ad839644ee37f0a181476",
"format": 1
},
{
@@ -27668,7 +27976,7 @@
"name": "CHANGELOG.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "28d86e5061f86ee8a49776681a2f660ee38c29d057b9c23c6d2ab473c0f218a5",
+ "chksum_sha256": "781bf561f0bec0521238cbf4b7481f2887d01fef494ed6ff450fae4daa870616",
"format": 1
},
{
diff --git a/ansible_collections/community/general/MANIFEST.json b/ansible_collections/community/general/MANIFEST.json
index ffbae1e0e..26506ad50 100644
--- a/ansible_collections/community/general/MANIFEST.json
+++ b/ansible_collections/community/general/MANIFEST.json
@@ -2,7 +2,7 @@
"collection_info": {
"namespace": "community",
"name": "general",
- "version": "9.0.1",
+ "version": "9.1.0",
"authors": [
"Ansible (https://github.com/ansible)"
],
@@ -23,7 +23,7 @@
"name": "FILES.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be2f78923401a69f1bb6105804257106a4092634b2fa6a02058fea1e14de1bff",
+ "chksum_sha256": "98bfdff0974e81d3a63af8d9b29886515483d7536197a6f78e3c706272aba8f8",
"format": 1
},
"format": 1
diff --git a/ansible_collections/community/general/changelogs/changelog.yaml b/ansible_collections/community/general/changelogs/changelog.yaml
index 6215a89ce..609989343 100644
--- a/ansible_collections/community/general/changelogs/changelog.yaml
+++ b/ansible_collections/community/general/changelogs/changelog.yaml
@@ -794,3 +794,111 @@ releases:
- 8413-galaxy-refactor.yml
- 9.0.1.yml
release_date: '2024-05-27'
+ 9.1.0:
+ changes:
+ bugfixes:
+ - git_config - fix behavior of ``state=absent`` if ``value`` is present (https://github.com/ansible-collections/community.general/issues/8436,
+ https://github.com/ansible-collections/community.general/pull/8452).
+ - keycloak_realm - add normalizations for ``attributes`` and ``protocol_mappers``
+ (https://github.com/ansible-collections/community.general/pull/8496).
+ - launched - correctly report changed status in check mode (https://github.com/ansible-collections/community.general/pull/8406).
+ - opennebula inventory plugin - fix invalid reference to IP when inventory runs
+ against NICs with no IPv4 address (https://github.com/ansible-collections/community.general/pull/8489).
+ - opentelemetry callback - do not save the JSON response when using the ``ansible.builtin.uri``
+ module (https://github.com/ansible-collections/community.general/pull/8430).
+ - opentelemetry callback - do not save the content response when using the ``ansible.builtin.slurp``
+ module (https://github.com/ansible-collections/community.general/pull/8430).
+ - paman - do not fail if an empty list of packages has been provided and there
+ is nothing to do (https://github.com/ansible-collections/community.general/pull/8514).
+ deprecated_features:
+ - CmdRunner module util - setting the value of the ``ignore_none`` parameter
+ within a ``CmdRunner`` context is deprecated and that feature should be removed
+ in community.general 12.0.0 (https://github.com/ansible-collections/community.general/pull/8479).
+ - git_config - the ``list_all`` option has been deprecated and will be removed
+ in community.general 11.0.0. Use the ``community.general.git_config_info``
+ module instead (https://github.com/ansible-collections/community.general/pull/8453).
+ - git_config - using ``state=present`` without providing ``value`` is deprecated
+ and will be disallowed in community.general 11.0.0. Use the ``community.general.git_config_info``
+ module instead to read a value (https://github.com/ansible-collections/community.general/pull/8453).
+ known_issues:
+ - homectl - the module does not work under Python 3.13 or newer, since it relies
+ on the removed ``crypt`` standard library module (https://github.com/ansible-collections/community.general/issues/4691,
+ https://github.com/ansible-collections/community.general/pull/8497).
+ - udm_user - the module does not work under Python 3.13 or newer, since it relies
+ on the removed ``crypt`` standard library module (https://github.com/ansible-collections/community.general/issues/4690,
+ https://github.com/ansible-collections/community.general/pull/8497).
+ minor_changes:
+ - CmdRunner module util - argument formats can be specified as plain functions
+ without calling ``cmd_runner_fmt.as_func()`` (https://github.com/ansible-collections/community.general/pull/8479).
+ - ansible_galaxy_install - add upgrade feature (https://github.com/ansible-collections/community.general/pull/8431,
+ https://github.com/ansible-collections/community.general/issues/8351).
+ - cargo - add option ``directory``, which allows source directory to be specified
+ (https://github.com/ansible-collections/community.general/pull/8480).
+ - cmd_runner module utils - add decorator ``cmd_runner_fmt.stack`` (https://github.com/ansible-collections/community.general/pull/8415).
+ - cmd_runner_fmt module utils - simplify implementation of ``cmd_runner_fmt.as_bool_not()``
+ (https://github.com/ansible-collections/community.general/pull/8512).
+ - ipa_dnsrecord - adds ``SSHFP`` record type for managing SSH fingerprints in
+ FreeIPA DNS (https://github.com/ansible-collections/community.general/pull/8404).
+ - keycloak_client - assign auth flow by name (https://github.com/ansible-collections/community.general/pull/8428).
+ - openbsd_pkg - adds diff support to show changes in installed package list.
+ This does not yet work for check mode (https://github.com/ansible-collections/community.general/pull/8402).
+ - proxmox - allow specification of the API port when using proxmox_* (https://github.com/ansible-collections/community.general/issues/8440,
+ https://github.com/ansible-collections/community.general/pull/8441).
+ - proxmox_vm_info - add ``network`` option to retrieve current network information
+ (https://github.com/ansible-collections/community.general/pull/8471).
+ - redfish_command - add ``wait`` and ``wait_timeout`` options to allow a user
+ to block a command until a service is accessible after performing the requested
+ command (https://github.com/ansible-collections/community.general/issues/8051,
+ https://github.com/ansible-collections/community.general/pull/8434).
+ - redfish_info - add command ``CheckAvailability`` to check if a service is
+ accessible (https://github.com/ansible-collections/community.general/issues/8051,
+ https://github.com/ansible-collections/community.general/pull/8434).
+ - redis_info - adds support for getting cluster info (https://github.com/ansible-collections/community.general/pull/8464).
+ release_summary: Regular feature and bugfix release.
+ fragments:
+ - 8051-Redfish-Wait-For-Service.yml
+ - 8402-add-diif-mode-openbsd-pkg.yml
+ - 8404-ipa_dnsrecord_sshfp.yml
+ - 8415-cmd-runner-stack.yml
+ - 8428-assign-auth-flow-by-name-keycloak-client.yaml
+ - 8430-fix-opentelemetry-when-using-logs-with-uri-or-slurp-tasks.yaml
+ - 8431-galaxy-upgrade.yml
+ - 8440-allow-api-port-specification.yaml
+ - 8452-git_config-absent.yml
+ - 8453-git_config-deprecate-read.yml
+ - 8464-redis-add-cluster-info.yml
+ - 8471-proxmox-vm-info-network.yml
+ - 8476-launchd-check-mode-changed.yaml
+ - 8479-cmdrunner-improvements.yml
+ - 8480-directory-feature-cargo.yml
+ - 8489-fix-opennebula-inventory-crash-when-nic-has-no-ip.yml
+ - 8496-keycloak_clientscope-add-normalizations.yaml
+ - 8497-crypt.yml
+ - 8512-as-bool-not.yml
+ - 8514-pacman-empty.yml
+ - 9.1.0.yml
+ modules:
+ - description: Add, modify, and delete checks within a consul cluster.
+ name: consul_agent_check
+ namespace: ''
+ - description: Add, modify and delete services within a consul cluster.
+ name: consul_agent_service
+ namespace: ''
+ - description: Wrapper for C(django-admin check).
+ name: django_check
+ namespace: ''
+ - description: Wrapper for C(django-admin createcachetable).
+ name: django_createcachetable
+ namespace: ''
+ plugins:
+ filter:
+ - description: Keep specific keys from dictionaries in a list.
+ name: keep_keys
+ namespace: null
+ - description: Remove specific keys from dictionaries in a list.
+ name: remove_keys
+ namespace: null
+ - description: Replace specific keys in a list of dictionaries.
+ name: replace_keys
+ namespace: null
+ release_date: '2024-06-17'
diff --git a/ansible_collections/community/general/docs/docsite/extra-docs.yml b/ansible_collections/community/general/docs/docsite/extra-docs.yml
index 529573606..3bed9e35f 100644
--- a/ansible_collections/community/general/docs/docsite/extra-docs.yml
+++ b/ansible_collections/community/general/docs/docsite/extra-docs.yml
@@ -14,3 +14,7 @@ sections:
- guide_online
- guide_packet
- guide_scaleway
+ - title: Developer Guides
+ toctree:
+ - guide_deps
+ - guide_vardict
diff --git a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/default-common.yml b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/default-common.yml
index fd874e5c9..4431fe27d 100644
--- a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/default-common.yml
+++ b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/default-common.yml
@@ -2,17 +2,11 @@
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
-
list1:
- - name: foo
- extra: true
- - name: bar
- extra: false
- - name: meh
- extra: true
+ - {name: foo, extra: true}
+ - {name: bar, extra: false}
+ - {name: meh, extra: true}
list2:
- - name: foo
- path: /foo
- - name: baz
- path: /baz
+ - {name: foo, path: /foo}
+ - {name: baz, path: /baz}
diff --git a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/default-recursive-true.yml b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/default-recursive-true.yml
index 133c8f2ae..eb83ea82e 100644
--- a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/default-recursive-true.yml
+++ b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/default-recursive-true.yml
@@ -2,14 +2,12 @@
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
-
list1:
- name: myname01
param01:
x: default_value
y: default_value
- list:
- - default_value
+ list: [default_value]
- name: myname02
param01: [1, 1, 2, 3]
@@ -18,7 +16,6 @@ list2:
param01:
y: patch_value
z: patch_value
- list:
- - patch_value
+ list: [patch_value]
- name: myname02
- param01: [3, 4, 4, {key: value}]
+ param01: [3, 4, 4]
diff --git a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-001.yml b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-001.yml
index 0cf6a9b8a..c27b019e5 100644
--- a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-001.yml
+++ b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-001.yml
@@ -8,7 +8,7 @@
dir: example-001_vars
- debug:
var: list3
- when: debug|d(false)|bool
+ when: debug | d(false) | bool
- template:
src: list3.out.j2
dest: example-001.out
diff --git a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-001_vars/default-common.yml b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-001_vars/default-common.yml
index fd874e5c9..4431fe27d 100644
--- a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-001_vars/default-common.yml
+++ b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-001_vars/default-common.yml
@@ -2,17 +2,11 @@
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
-
list1:
- - name: foo
- extra: true
- - name: bar
- extra: false
- - name: meh
- extra: true
+ - {name: foo, extra: true}
+ - {name: bar, extra: false}
+ - {name: meh, extra: true}
list2:
- - name: foo
- path: /foo
- - name: baz
- path: /baz
+ - {name: foo, path: /foo}
+ - {name: baz, path: /baz}
diff --git a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-001_vars/list3.yml b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-001_vars/list3.yml
index 0604feccb..8bd8bc8f2 100644
--- a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-001_vars/list3.yml
+++ b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-001_vars/list3.yml
@@ -2,6 +2,5 @@
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
-
-list3: "{{ list1|
+list3: "{{ list1 |
community.general.lists_mergeby(list2, 'name') }}"
diff --git a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-002.yml b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-002.yml
index 5e6e0315d..e164db125 100644
--- a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-002.yml
+++ b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-002.yml
@@ -8,7 +8,7 @@
dir: example-002_vars
- debug:
var: list3
- when: debug|d(false)|bool
+ when: debug | d(false) | bool
- template:
src: list3.out.j2
dest: example-002.out
diff --git a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-002_vars/default-common.yml b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-002_vars/default-common.yml
index fd874e5c9..4431fe27d 100644
--- a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-002_vars/default-common.yml
+++ b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-002_vars/default-common.yml
@@ -2,17 +2,11 @@
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
-
list1:
- - name: foo
- extra: true
- - name: bar
- extra: false
- - name: meh
- extra: true
+ - {name: foo, extra: true}
+ - {name: bar, extra: false}
+ - {name: meh, extra: true}
list2:
- - name: foo
- path: /foo
- - name: baz
- path: /baz
+ - {name: foo, path: /foo}
+ - {name: baz, path: /baz}
diff --git a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-002_vars/list3.yml b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-002_vars/list3.yml
index 8ad752407..be6cfcbf3 100644
--- a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-002_vars/list3.yml
+++ b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-002_vars/list3.yml
@@ -2,6 +2,5 @@
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
-
-list3: "{{ [list1, list2]|
+list3: "{{ [list1, list2] |
community.general.lists_mergeby('name') }}"
diff --git a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-003.yml b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-003.yml
index 2f93ab8a2..cbc5e43a5 100644
--- a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-003.yml
+++ b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-003.yml
@@ -8,7 +8,7 @@
dir: example-003_vars
- debug:
var: list3
- when: debug|d(false)|bool
+ when: debug | d(false) | bool
- template:
src: list3.out.j2
dest: example-003.out
diff --git a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-003_vars/default-recursive-true.yml b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-003_vars/default-recursive-true.yml
index 133c8f2ae..eb83ea82e 100644
--- a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-003_vars/default-recursive-true.yml
+++ b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-003_vars/default-recursive-true.yml
@@ -2,14 +2,12 @@
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
-
list1:
- name: myname01
param01:
x: default_value
y: default_value
- list:
- - default_value
+ list: [default_value]
- name: myname02
param01: [1, 1, 2, 3]
@@ -18,7 +16,6 @@ list2:
param01:
y: patch_value
z: patch_value
- list:
- - patch_value
+ list: [patch_value]
- name: myname02
- param01: [3, 4, 4, {key: value}]
+ param01: [3, 4, 4]
diff --git a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-003_vars/list3.yml b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-003_vars/list3.yml
index d5374eece..2eff5df41 100644
--- a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-003_vars/list3.yml
+++ b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-003_vars/list3.yml
@@ -2,7 +2,6 @@
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
-
-list3: "{{ [list1, list2]|
+list3: "{{ [list1, list2] |
community.general.lists_mergeby('name',
recursive=true) }}"
diff --git a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-004.yml b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-004.yml
index 3ef067faf..68e77dea8 100644
--- a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-004.yml
+++ b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-004.yml
@@ -8,7 +8,7 @@
dir: example-004_vars
- debug:
var: list3
- when: debug|d(false)|bool
+ when: debug | d(false) | bool
- template:
src: list3.out.j2
dest: example-004.out
diff --git a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-004_vars/default-recursive-true.yml b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-004_vars/default-recursive-true.yml
index 133c8f2ae..eb83ea82e 100644
--- a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-004_vars/default-recursive-true.yml
+++ b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-004_vars/default-recursive-true.yml
@@ -2,14 +2,12 @@
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
-
list1:
- name: myname01
param01:
x: default_value
y: default_value
- list:
- - default_value
+ list: [default_value]
- name: myname02
param01: [1, 1, 2, 3]
@@ -18,7 +16,6 @@ list2:
param01:
y: patch_value
z: patch_value
- list:
- - patch_value
+ list: [patch_value]
- name: myname02
- param01: [3, 4, 4, {key: value}]
+ param01: [3, 4, 4]
diff --git a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-004_vars/list3.yml b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-004_vars/list3.yml
index a054ea1e7..94c8ceed3 100644
--- a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-004_vars/list3.yml
+++ b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-004_vars/list3.yml
@@ -2,8 +2,7 @@
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
-
-list3: "{{ [list1, list2]|
+list3: "{{ [list1, list2] |
community.general.lists_mergeby('name',
recursive=true,
list_merge='keep') }}"
diff --git a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-005.yml b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-005.yml
index 57e7a779d..b7b81de29 100644
--- a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-005.yml
+++ b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-005.yml
@@ -8,7 +8,7 @@
dir: example-005_vars
- debug:
var: list3
- when: debug|d(false)|bool
+ when: debug | d(false) | bool
- template:
src: list3.out.j2
dest: example-005.out
diff --git a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-005_vars/default-recursive-true.yml b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-005_vars/default-recursive-true.yml
index 133c8f2ae..eb83ea82e 100644
--- a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-005_vars/default-recursive-true.yml
+++ b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-005_vars/default-recursive-true.yml
@@ -2,14 +2,12 @@
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
-
list1:
- name: myname01
param01:
x: default_value
y: default_value
- list:
- - default_value
+ list: [default_value]
- name: myname02
param01: [1, 1, 2, 3]
@@ -18,7 +16,6 @@ list2:
param01:
y: patch_value
z: patch_value
- list:
- - patch_value
+ list: [patch_value]
- name: myname02
- param01: [3, 4, 4, {key: value}]
+ param01: [3, 4, 4]
diff --git a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-005_vars/list3.yml b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-005_vars/list3.yml
index 3480bf658..f0d7751f2 100644
--- a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-005_vars/list3.yml
+++ b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-005_vars/list3.yml
@@ -2,8 +2,7 @@
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
-
-list3: "{{ [list1, list2]|
+list3: "{{ [list1, list2] |
community.general.lists_mergeby('name',
recursive=true,
list_merge='append') }}"
diff --git a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-006.yml b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-006.yml
index 41fc88e49..1be3becbc 100644
--- a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-006.yml
+++ b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-006.yml
@@ -8,7 +8,7 @@
dir: example-006_vars
- debug:
var: list3
- when: debug|d(false)|bool
+ when: debug | d(false) | bool
- template:
src: list3.out.j2
dest: example-006.out
diff --git a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-006_vars/default-recursive-true.yml b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-006_vars/default-recursive-true.yml
index 133c8f2ae..eb83ea82e 100644
--- a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-006_vars/default-recursive-true.yml
+++ b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-006_vars/default-recursive-true.yml
@@ -2,14 +2,12 @@
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
-
list1:
- name: myname01
param01:
x: default_value
y: default_value
- list:
- - default_value
+ list: [default_value]
- name: myname02
param01: [1, 1, 2, 3]
@@ -18,7 +16,6 @@ list2:
param01:
y: patch_value
z: patch_value
- list:
- - patch_value
+ list: [patch_value]
- name: myname02
- param01: [3, 4, 4, {key: value}]
+ param01: [3, 4, 4]
diff --git a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-006_vars/list3.yml b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-006_vars/list3.yml
index 97513b559..f555c8dcb 100644
--- a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-006_vars/list3.yml
+++ b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-006_vars/list3.yml
@@ -2,8 +2,7 @@
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
-
-list3: "{{ [list1, list2]|
+list3: "{{ [list1, list2] |
community.general.lists_mergeby('name',
recursive=true,
list_merge='prepend') }}"
diff --git a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-007.yml b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-007.yml
index 3de715844..8a596ea68 100644
--- a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-007.yml
+++ b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-007.yml
@@ -8,7 +8,7 @@
dir: example-007_vars
- debug:
var: list3
- when: debug|d(false)|bool
+ when: debug|d(false) | bool
- template:
src: list3.out.j2
dest: example-007.out
diff --git a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-007_vars/default-recursive-true.yml b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-007_vars/default-recursive-true.yml
index 133c8f2ae..eb83ea82e 100644
--- a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-007_vars/default-recursive-true.yml
+++ b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-007_vars/default-recursive-true.yml
@@ -2,14 +2,12 @@
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
-
list1:
- name: myname01
param01:
x: default_value
y: default_value
- list:
- - default_value
+ list: [default_value]
- name: myname02
param01: [1, 1, 2, 3]
@@ -18,7 +16,6 @@ list2:
param01:
y: patch_value
z: patch_value
- list:
- - patch_value
+ list: [patch_value]
- name: myname02
- param01: [3, 4, 4, {key: value}]
+ param01: [3, 4, 4]
diff --git a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-007_vars/list3.yml b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-007_vars/list3.yml
index cb51653b4..d8ad16cf4 100644
--- a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-007_vars/list3.yml
+++ b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-007_vars/list3.yml
@@ -2,8 +2,7 @@
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
-
-list3: "{{ [list1, list2]|
+list3: "{{ [list1, list2] |
community.general.lists_mergeby('name',
recursive=true,
list_merge='append_rp') }}"
diff --git a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-008.yml b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-008.yml
index e33828bf9..6d5c03bc6 100644
--- a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-008.yml
+++ b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-008.yml
@@ -8,7 +8,7 @@
dir: example-008_vars
- debug:
var: list3
- when: debug|d(false)|bool
+ when: debug | d(false) | bool
- template:
src: list3.out.j2
dest: example-008.out
diff --git a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-008_vars/default-recursive-true.yml b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-008_vars/default-recursive-true.yml
index 133c8f2ae..eb83ea82e 100644
--- a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-008_vars/default-recursive-true.yml
+++ b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-008_vars/default-recursive-true.yml
@@ -2,14 +2,12 @@
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
-
list1:
- name: myname01
param01:
x: default_value
y: default_value
- list:
- - default_value
+ list: [default_value]
- name: myname02
param01: [1, 1, 2, 3]
@@ -18,7 +16,6 @@ list2:
param01:
y: patch_value
z: patch_value
- list:
- - patch_value
+ list: [patch_value]
- name: myname02
- param01: [3, 4, 4, {key: value}]
+ param01: [3, 4, 4]
diff --git a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-008_vars/list3.yml b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-008_vars/list3.yml
index af7001fc4..b2051376e 100644
--- a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-008_vars/list3.yml
+++ b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-008_vars/list3.yml
@@ -2,8 +2,7 @@
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
-
-list3: "{{ [list1, list2]|
+list3: "{{ [list1, list2] |
community.general.lists_mergeby('name',
recursive=true,
list_merge='prepend_rp') }}"
diff --git a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-009.yml b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-009.yml
new file mode 100644
index 000000000..beef5d356
--- /dev/null
+++ b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-009.yml
@@ -0,0 +1,14 @@
+---
+# Copyright (c) Ansible Project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+- name: 9. Merge single list by common attribute 'name'
+ include_vars:
+ dir: example-009_vars
+- debug:
+ var: list3
+ when: debug | d(false) | bool
+- template:
+ src: list3.out.j2
+ dest: example-009.out
diff --git a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-009_vars/default-common.yml b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-009_vars/default-common.yml
new file mode 100644
index 000000000..4431fe27d
--- /dev/null
+++ b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-009_vars/default-common.yml
@@ -0,0 +1,12 @@
+---
+# Copyright (c) Ansible Project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+list1:
+ - {name: foo, extra: true}
+ - {name: bar, extra: false}
+ - {name: meh, extra: true}
+
+list2:
+ - {name: foo, path: /foo}
+ - {name: baz, path: /baz}
diff --git a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-009_vars/list3.yml b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-009_vars/list3.yml
new file mode 100644
index 000000000..1708e3baf
--- /dev/null
+++ b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-009_vars/list3.yml
@@ -0,0 +1,6 @@
+---
+# Copyright (c) Ansible Project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+list3: "{{ [list1 + list2, []] |
+ community.general.lists_mergeby('name') }}"
diff --git a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/examples.yml b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/examples.yml
index 83b985084..34ad2d155 100644
--- a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/examples.yml
+++ b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/examples.yml
@@ -4,51 +4,75 @@
# SPDX-License-Identifier: GPL-3.0-or-later
examples:
- - label: 'In the example below the lists are merged by the attribute ``name``:'
+ - title: Two lists
+ description: 'In the example below the lists are merged by the attribute ``name``:'
file: example-001_vars/list3.yml
lang: 'yaml+jinja'
- - label: 'This produces:'
+ - title:
+ description: 'This produces:'
file: example-001.out
lang: 'yaml'
- - label: 'It is possible to use a list of lists as an input of the filter:'
+ - title: List of two lists
+ description: 'It is possible to use a list of lists as an input of the filter:'
file: example-002_vars/list3.yml
lang: 'yaml+jinja'
- - label: 'This produces the same result as in the previous example:'
+ - title:
+ description: 'This produces the same result as in the previous example:'
file: example-002.out
lang: 'yaml'
- - label: 'Example ``list_merge=replace`` (default):'
+ - title: Single list
+ description: 'It is possible to merge single list:'
+ file: example-009_vars/list3.yml
+ lang: 'yaml+jinja'
+ - title:
+ description: 'This produces the same result as in the previous example:'
+ file: example-009.out
+ lang: 'yaml'
+ - title: list_merge=replace (default)
+ description: 'Example :ansopt:`community.general.lists_mergeby#filter:list_merge=replace` (default):'
file: example-003_vars/list3.yml
lang: 'yaml+jinja'
- - label: 'This produces:'
+ - title:
+ description: 'This produces:'
file: example-003.out
lang: 'yaml'
- - label: 'Example ``list_merge=keep``:'
+ - title: list_merge=keep
+ description: 'Example :ansopt:`community.general.lists_mergeby#filter:list_merge=keep`:'
file: example-004_vars/list3.yml
lang: 'yaml+jinja'
- - label: 'This produces:'
+ - title:
+ description: 'This produces:'
file: example-004.out
lang: 'yaml'
- - label: 'Example ``list_merge=append``:'
+ - title: list_merge=append
+ description: 'Example :ansopt:`community.general.lists_mergeby#filter:list_merge=append`:'
file: example-005_vars/list3.yml
lang: 'yaml+jinja'
- - label: 'This produces:'
+ - title:
+ description: 'This produces:'
file: example-005.out
lang: 'yaml'
- - label: 'Example ``list_merge=prepend``:'
+ - title: list_merge=prepend
+ description: 'Example :ansopt:`community.general.lists_mergeby#filter:list_merge=prepend`:'
file: example-006_vars/list3.yml
lang: 'yaml+jinja'
- - label: 'This produces:'
+ - title:
+ description: 'This produces:'
file: example-006.out
lang: 'yaml'
- - label: 'Example ``list_merge=append_rp``:'
+ - title: list_merge=append_rp
+ description: 'Example :ansopt:`community.general.lists_mergeby#filter:list_merge=append_rp`:'
file: example-007_vars/list3.yml
lang: 'yaml+jinja'
- - label: 'This produces:'
+ - title:
+ description: 'This produces:'
file: example-007.out
lang: 'yaml'
- - label: 'Example ``list_merge=prepend_rp``:'
+ - title: list_merge=prepend_rp
+ description: 'Example :ansopt:`community.general.lists_mergeby#filter:list_merge=prepend_rp`:'
file: example-008_vars/list3.yml
lang: 'yaml+jinja'
- - label: 'This produces:'
+ - title:
+ description: 'This produces:'
file: example-008.out
lang: 'yaml'
diff --git a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/examples_all.rst.j2 b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/examples_all.rst.j2
index 95a0fafdd..88098683b 100644
--- a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/examples_all.rst.j2
+++ b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/examples_all.rst.j2
@@ -4,10 +4,10 @@
SPDX-License-Identifier: GPL-3.0-or-later
{% for i in examples %}
-{{ i.label }}
+{{ i.description }}
.. code-block:: {{ i.lang }}
- {{ lookup('file', i.file)|indent(2) }}
+ {{ lookup('file', i.file) | split('\n') | reject('match', '^(#|---)') | join ('\n') | indent(2) }}
{% endfor %}
diff --git a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/extra-vars.yml b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/extra-vars.yml
new file mode 100644
index 000000000..0482c7ff2
--- /dev/null
+++ b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/extra-vars.yml
@@ -0,0 +1,7 @@
+---
+# Copyright (c) Ansible Project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+examples_one: true
+examples_all: true
+merging_lists_of_dictionaries: true
diff --git a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/filter_guide_abstract_informations_merging_lists_of_dictionaries.rst.j2 b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/filter_guide_abstract_informations_merging_lists_of_dictionaries.rst.j2
index 71d0d5da6..ad74161dc 100644
--- a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/filter_guide_abstract_informations_merging_lists_of_dictionaries.rst.j2
+++ b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/filter_guide_abstract_informations_merging_lists_of_dictionaries.rst.j2
@@ -6,57 +6,69 @@
Merging lists of dictionaries
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-If you have two or more lists of dictionaries and want to combine them into a list of merged dictionaries, where the dictionaries are merged by an attribute, you can use the ``lists_mergeby`` filter.
+If you have two or more lists of dictionaries and want to combine them into a list of merged dictionaries, where the dictionaries are merged by an attribute, you can use the :ansplugin:`community.general.lists_mergeby <community.general.lists_mergeby#filter>` filter.
-.. note:: The output of the examples in this section use the YAML callback plugin. Quoting: "Ansible output that can be quite a bit easier to read than the default JSON formatting." See :ref:`the documentation for the community.general.yaml callback plugin <ansible_collections.community.general.yaml_callback>`.
+.. note:: The output of the examples in this section use the YAML callback plugin. Quoting: "Ansible output that can be quite a bit easier to read than the default JSON formatting." See the documentation for the :ansplugin:`community.general.yaml callback plugin <community.general.yaml#callback>`.
Let us use the lists below in the following examples:
.. code-block:: yaml
- {{ lookup('file', 'default-common.yml')|indent(2) }}
+ {{ lookup('file', 'default-common.yml') | split('\n') | reject('match', '^(#|---)') | join ('\n') | indent(2) }}
{% for i in examples[0:2] %}
-{{ i.label }}
+{% if i.title | d('', true) | length > 0 %}
+{{ i.title }}
+{{ "%s" % ('"' * i.title|length) }}
+{% endif %}
+{{ i.description }}
.. code-block:: {{ i.lang }}
- {{ lookup('file', i.file)|indent(2) }}
+ {{ lookup('file', i.file) | split('\n') | reject('match', '^(#|---)') | join ('\n') | indent(2) }}
{% endfor %}
.. versionadded:: 2.0.0
-{% for i in examples[2:4] %}
-{{ i.label }}
+{% for i in examples[2:6] %}
+{% if i.title | d('', true) | length > 0 %}
+{{ i.title }}
+{{ "%s" % ('"' * i.title|length) }}
+{% endif %}
+{{ i.description }}
.. code-block:: {{ i.lang }}
- {{ lookup('file', i.file)|indent(2) }}
+ {{ lookup('file', i.file) | split('\n') | reject('match', '^(#|---)') | join ('\n') | indent(2) }}
{% endfor %}
-The filter also accepts two optional parameters: ``recursive`` and ``list_merge``. These parameters are only supported when used with ansible-base 2.10 or ansible-core, but not with Ansible 2.9. This is available since community.general 4.4.0.
+The filter also accepts two optional parameters: :ansopt:`community.general.lists_mergeby#filter:recursive` and :ansopt:`community.general.lists_mergeby#filter:list_merge`. This is available since community.general 4.4.0.
**recursive**
- Is a boolean, default to ``False``. Should the ``community.general.lists_mergeby`` recursively merge nested hashes. Note: It does not depend on the value of the ``hash_behaviour`` setting in ``ansible.cfg``.
+ Is a boolean, default to ``false``. Should the :ansplugin:`community.general.lists_mergeby#filter` filter recursively merge nested hashes. Note: It does not depend on the value of the ``hash_behaviour`` setting in ``ansible.cfg``.
**list_merge**
- Is a string, its possible values are ``replace`` (default), ``keep``, ``append``, ``prepend``, ``append_rp`` or ``prepend_rp``. It modifies the behaviour of ``community.general.lists_mergeby`` when the hashes to merge contain arrays/lists.
+ Is a string, its possible values are :ansval:`replace` (default), :ansval:`keep`, :ansval:`append`, :ansval:`prepend`, :ansval:`append_rp` or :ansval:`prepend_rp`. It modifies the behaviour of :ansplugin:`community.general.lists_mergeby#filter` when the hashes to merge contain arrays/lists.
-The examples below set ``recursive=true`` and display the differences among all six options of ``list_merge``. Functionality of the parameters is exactly the same as in the filter ``combine``. See :ref:`Combining hashes/dictionaries <combine_filter>` to learn details about these options.
+The examples below set :ansopt:`community.general.lists_mergeby#filter:recursive=true` and display the differences among all six options of :ansopt:`community.general.lists_mergeby#filter:list_merge`. Functionality of the parameters is exactly the same as in the filter :ansplugin:`ansible.builtin.combine#filter`. See :ref:`Combining hashes/dictionaries <combine_filter>` to learn details about these options.
Let us use the lists below in the following examples
.. code-block:: yaml
- {{ lookup('file', 'default-recursive-true.yml')|indent(2) }}
+ {{ lookup('file', 'default-recursive-true.yml') | split('\n') | reject('match', '^(#|---)') | join ('\n') |indent(2) }}
-{% for i in examples[4:16] %}
-{{ i.label }}
+{% for i in examples[6:] %}
+{% if i.title | d('', true) | length > 0 %}
+{{ i.title }}
+{{ "%s" % ('"' * i.title|length) }}
+{% endif %}
+{{ i.description }}
.. code-block:: {{ i.lang }}
- {{ lookup('file', i.file)|indent(2) }}
+ {{ lookup('file', i.file) | split('\n') | reject('match', '^(#|---)') | join ('\n') |indent(2) }}
{% endfor %}
diff --git a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/list3.out.j2 b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/list3.out.j2
index b51f6b868..a30a5c4ab 100644
--- a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/list3.out.j2
+++ b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/list3.out.j2
@@ -4,4 +4,4 @@ GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://w
SPDX-License-Identifier: GPL-3.0-or-later
#}
list3:
-{{ list3|to_nice_yaml(indent=0) }}
+ {{ list3 | to_yaml(indent=2, sort_keys=false) | indent(2) }}
diff --git a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/playbook.yml b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/playbook.yml
index 793d23348..ab389fa12 100644
--- a/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/playbook.yml
+++ b/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/playbook.yml
@@ -5,7 +5,7 @@
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# 1) Run all examples and create example-XXX.out
-# shell> ansible-playbook playbook.yml -e examples=true
+# shell> ansible-playbook playbook.yml -e examples_one=true
#
# 2) Optionally, for testing, create examples_all.rst
# shell> ansible-playbook playbook.yml -e examples_all=true
@@ -45,18 +45,20 @@
tags: t007
- import_tasks: example-008.yml
tags: t008
- when: examples|d(false)|bool
+ - import_tasks: example-009.yml
+ tags: t009
+ when: examples_one | d(false) | bool
- block:
- include_vars: examples.yml
- template:
src: examples_all.rst.j2
dest: examples_all.rst
- when: examples_all|d(false)|bool
+ when: examples_all | d(false) | bool
- block:
- include_vars: examples.yml
- template:
src: filter_guide_abstract_informations_merging_lists_of_dictionaries.rst.j2
dest: filter_guide_abstract_informations_merging_lists_of_dictionaries.rst
- when: merging_lists_of_dictionaries|d(false)|bool
+ when: merging_lists_of_dictionaries | d(false) | bool
diff --git a/ansible_collections/community/general/docs/docsite/rst/filter_guide_abstract_informations_merging_lists_of_dictionaries.rst b/ansible_collections/community/general/docs/docsite/rst/filter_guide_abstract_informations_merging_lists_of_dictionaries.rst
index 06fa79d16..cafe04e5c 100644
--- a/ansible_collections/community/general/docs/docsite/rst/filter_guide_abstract_informations_merging_lists_of_dictionaries.rst
+++ b/ansible_collections/community/general/docs/docsite/rst/filter_guide_abstract_informations_merging_lists_of_dictionaries.rst
@@ -6,33 +6,30 @@
Merging lists of dictionaries
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-If you have two or more lists of dictionaries and want to combine them into a list of merged dictionaries, where the dictionaries are merged by an attribute, you can use the :ansplugin:`community.general.lists_mergeby filter <community.general.lists_mergeby#filter>`.
+If you have two or more lists of dictionaries and want to combine them into a list of merged dictionaries, where the dictionaries are merged by an attribute, you can use the :ansplugin:`community.general.lists_mergeby <community.general.lists_mergeby#filter>` filter.
-.. note:: The output of the examples in this section use the YAML callback plugin. Quoting: "Ansible output that can be quite a bit easier to read than the default JSON formatting." See :ref:`the documentation for the community.general.yaml callback plugin <ansible_collections.community.general.yaml_callback>`.
+.. note:: The output of the examples in this section use the YAML callback plugin. Quoting: "Ansible output that can be quite a bit easier to read than the default JSON formatting." See the documentation for the :ansplugin:`community.general.yaml callback plugin <community.general.yaml#callback>`.
Let us use the lists below in the following examples:
.. code-block:: yaml
list1:
- - name: foo
- extra: true
- - name: bar
- extra: false
- - name: meh
- extra: true
+ - {name: foo, extra: true}
+ - {name: bar, extra: false}
+ - {name: meh, extra: true}
list2:
- - name: foo
- path: /foo
- - name: baz
- path: /baz
+ - {name: foo, path: /foo}
+ - {name: baz, path: /baz}
+Two lists
+"""""""""
In the example below the lists are merged by the attribute ``name``:
.. code-block:: yaml+jinja
- list3: "{{ list1|
+ list3: "{{ list1 |
community.general.lists_mergeby(list2, 'name') }}"
This produces:
@@ -40,24 +37,21 @@ This produces:
.. code-block:: yaml
list3:
- - extra: false
- name: bar
- - name: baz
- path: /baz
- - extra: true
- name: foo
- path: /foo
- - extra: true
- name: meh
+ - {name: bar, extra: false}
+ - {name: baz, path: /baz}
+ - {name: foo, extra: true, path: /foo}
+ - {name: meh, extra: true}
.. versionadded:: 2.0.0
+List of two lists
+"""""""""""""""""
It is possible to use a list of lists as an input of the filter:
.. code-block:: yaml+jinja
- list3: "{{ [list1, list2]|
+ list3: "{{ [list1, list2] |
community.general.lists_mergeby('name') }}"
This produces the same result as in the previous example:
@@ -65,15 +59,29 @@ This produces the same result as in the previous example:
.. code-block:: yaml
list3:
- - extra: false
- name: bar
- - name: baz
- path: /baz
- - extra: true
- name: foo
- path: /foo
- - extra: true
- name: meh
+ - {name: bar, extra: false}
+ - {name: baz, path: /baz}
+ - {name: foo, extra: true, path: /foo}
+ - {name: meh, extra: true}
+
+Single list
+"""""""""""
+It is possible to merge single list:
+
+.. code-block:: yaml+jinja
+
+ list3: "{{ [list1 + list2, []] |
+ community.general.lists_mergeby('name') }}"
+
+This produces the same result as in the previous example:
+
+.. code-block:: yaml
+
+ list3:
+ - {name: bar, extra: false}
+ - {name: baz, path: /baz}
+ - {name: foo, extra: true, path: /foo}
+ - {name: meh, extra: true}
The filter also accepts two optional parameters: :ansopt:`community.general.lists_mergeby#filter:recursive` and :ansopt:`community.general.lists_mergeby#filter:list_merge`. This is available since community.general 4.4.0.
@@ -95,8 +103,7 @@ Let us use the lists below in the following examples
param01:
x: default_value
y: default_value
- list:
- - default_value
+ list: [default_value]
- name: myname02
param01: [1, 1, 2, 3]
@@ -105,16 +112,17 @@ Let us use the lists below in the following examples
param01:
y: patch_value
z: patch_value
- list:
- - patch_value
+ list: [patch_value]
- name: myname02
- param01: [3, 4, 4, {key: value}]
+ param01: [3, 4, 4]
+list_merge=replace (default)
+""""""""""""""""""""""""""""
Example :ansopt:`community.general.lists_mergeby#filter:list_merge=replace` (default):
.. code-block:: yaml+jinja
- list3: "{{ [list1, list2]|
+ list3: "{{ [list1, list2] |
community.general.lists_mergeby('name',
recursive=true) }}"
@@ -123,25 +131,22 @@ This produces:
.. code-block:: yaml
list3:
- - name: myname01
- param01:
- list:
- - patch_value
- x: default_value
- y: patch_value
- z: patch_value
- - name: myname02
- param01:
- - 3
- - 4
- - 4
- - key: value
+ - name: myname01
+ param01:
+ x: default_value
+ y: patch_value
+ list: [patch_value]
+ z: patch_value
+ - name: myname02
+ param01: [3, 4, 4]
+list_merge=keep
+"""""""""""""""
Example :ansopt:`community.general.lists_mergeby#filter:list_merge=keep`:
.. code-block:: yaml+jinja
- list3: "{{ [list1, list2]|
+ list3: "{{ [list1, list2] |
community.general.lists_mergeby('name',
recursive=true,
list_merge='keep') }}"
@@ -151,25 +156,22 @@ This produces:
.. code-block:: yaml
list3:
- - name: myname01
- param01:
- list:
- - default_value
- x: default_value
- y: patch_value
- z: patch_value
- - name: myname02
- param01:
- - 1
- - 1
- - 2
- - 3
+ - name: myname01
+ param01:
+ x: default_value
+ y: patch_value
+ list: [default_value]
+ z: patch_value
+ - name: myname02
+ param01: [1, 1, 2, 3]
+list_merge=append
+"""""""""""""""""
Example :ansopt:`community.general.lists_mergeby#filter:list_merge=append`:
.. code-block:: yaml+jinja
- list3: "{{ [list1, list2]|
+ list3: "{{ [list1, list2] |
community.general.lists_mergeby('name',
recursive=true,
list_merge='append') }}"
@@ -179,30 +181,22 @@ This produces:
.. code-block:: yaml
list3:
- - name: myname01
- param01:
- list:
- - default_value
- - patch_value
- x: default_value
- y: patch_value
- z: patch_value
- - name: myname02
- param01:
- - 1
- - 1
- - 2
- - 3
- - 3
- - 4
- - 4
- - key: value
+ - name: myname01
+ param01:
+ x: default_value
+ y: patch_value
+ list: [default_value, patch_value]
+ z: patch_value
+ - name: myname02
+ param01: [1, 1, 2, 3, 3, 4, 4]
+list_merge=prepend
+""""""""""""""""""
Example :ansopt:`community.general.lists_mergeby#filter:list_merge=prepend`:
.. code-block:: yaml+jinja
- list3: "{{ [list1, list2]|
+ list3: "{{ [list1, list2] |
community.general.lists_mergeby('name',
recursive=true,
list_merge='prepend') }}"
@@ -212,30 +206,22 @@ This produces:
.. code-block:: yaml
list3:
- - name: myname01
- param01:
- list:
- - patch_value
- - default_value
- x: default_value
- y: patch_value
- z: patch_value
- - name: myname02
- param01:
- - 3
- - 4
- - 4
- - key: value
- - 1
- - 1
- - 2
- - 3
+ - name: myname01
+ param01:
+ x: default_value
+ y: patch_value
+ list: [patch_value, default_value]
+ z: patch_value
+ - name: myname02
+ param01: [3, 4, 4, 1, 1, 2, 3]
+list_merge=append_rp
+""""""""""""""""""""
Example :ansopt:`community.general.lists_mergeby#filter:list_merge=append_rp`:
.. code-block:: yaml+jinja
- list3: "{{ [list1, list2]|
+ list3: "{{ [list1, list2] |
community.general.lists_mergeby('name',
recursive=true,
list_merge='append_rp') }}"
@@ -245,29 +231,22 @@ This produces:
.. code-block:: yaml
list3:
- - name: myname01
- param01:
- list:
- - default_value
- - patch_value
- x: default_value
- y: patch_value
- z: patch_value
- - name: myname02
- param01:
- - 1
- - 1
- - 2
- - 3
- - 4
- - 4
- - key: value
+ - name: myname01
+ param01:
+ x: default_value
+ y: patch_value
+ list: [default_value, patch_value]
+ z: patch_value
+ - name: myname02
+ param01: [1, 1, 2, 3, 4, 4]
+list_merge=prepend_rp
+"""""""""""""""""""""
Example :ansopt:`community.general.lists_mergeby#filter:list_merge=prepend_rp`:
.. code-block:: yaml+jinja
- list3: "{{ [list1, list2]|
+ list3: "{{ [list1, list2] |
community.general.lists_mergeby('name',
recursive=true,
list_merge='prepend_rp') }}"
@@ -277,21 +256,12 @@ This produces:
.. code-block:: yaml
list3:
- - name: myname01
- param01:
- list:
- - patch_value
- - default_value
- x: default_value
- y: patch_value
- z: patch_value
- - name: myname02
- param01:
- - 3
- - 4
- - 4
- - key: value
- - 1
- - 1
- - 2
+ - name: myname01
+ param01:
+ x: default_value
+ y: patch_value
+ list: [patch_value, default_value]
+ z: patch_value
+ - name: myname02
+ param01: [3, 4, 4, 1, 1, 2]
diff --git a/ansible_collections/community/general/docs/docsite/rst/guide_deps.rst b/ansible_collections/community/general/docs/docsite/rst/guide_deps.rst
new file mode 100644
index 000000000..4c0c4687a
--- /dev/null
+++ b/ansible_collections/community/general/docs/docsite/rst/guide_deps.rst
@@ -0,0 +1,74 @@
+..
+ Copyright (c) Ansible Project
+ GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+ SPDX-License-Identifier: GPL-3.0-or-later
+
+.. _ansible_collections.community.general.docsite.guide_deps:
+
+``deps`` Guide
+==============
+
+
+Using ``deps``
+^^^^^^^^^^^^^^
+
+The ``ansible_collections.community.general.plugins.module_utils.deps`` module util simplifies
+the importing of code as described in :ref:`Importing and using shared code <shared_code>`.
+Please notice that ``deps`` is meant to be used specifically with Ansible modules, and not other types of plugins.
+
+The same example from the Developer Guide would become:
+
+.. code-block:: python
+
+ from ansible_collections.community.general.plugins.module_utils import deps
+
+ with deps.declare("foo"):
+ import foo
+
+Then in ``main()``, just after the argspec (or anywhere in the code, for that matter), do
+
+.. code-block:: python
+
+ deps.validate(module) # assuming module is a valid AnsibleModule instance
+
+By default, ``deps`` will rely on ``ansible.module_utils.basic.missing_required_lib`` to generate
+a message about a failing import. That function accepts parameters ``reason`` and ``url``, and
+and so does ``deps```:
+
+.. code-block:: python
+
+ with deps.declare("foo", reason="foo is needed to properly bar", url="https://foo.bar.io"):
+ import foo
+
+If you would rather write a custom message instead of using ``missing_required_lib`` then do:
+
+.. code-block:: python
+
+ with deps.declare("foo", msg="Custom msg explaining why foo is needed"):
+ import foo
+
+``deps`` allows for multiple dependencies to be declared:
+
+.. code-block:: python
+
+ with deps.declare("foo"):
+ import foo
+
+ with deps.declare("bar"):
+ import bar
+
+ with deps.declare("doe"):
+ import doe
+
+By default, ``deps.validate()`` will check on all the declared dependencies, but if so desired,
+they can be validated selectively by doing:
+
+.. code-block:: python
+
+ deps.validate(module, "foo") # only validates the "foo" dependency
+
+ deps.validate(module, "doe:bar") # only validates the "doe" and "bar" dependencies
+
+ deps.validate(module, "-doe:bar") # validates all dependencies except "doe" and "bar"
+
+.. versionadded:: 6.1.0
diff --git a/ansible_collections/community/general/docs/docsite/rst/guide_vardict.rst b/ansible_collections/community/general/docs/docsite/rst/guide_vardict.rst
new file mode 100644
index 000000000..f65b09055
--- /dev/null
+++ b/ansible_collections/community/general/docs/docsite/rst/guide_vardict.rst
@@ -0,0 +1,176 @@
+..
+ Copyright (c) Ansible Project
+ GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+ SPDX-License-Identifier: GPL-3.0-or-later
+
+.. _ansible_collections.community.general.docsite.guide_vardict:
+
+VarDict Guide
+=============
+
+Introduction
+^^^^^^^^^^^^
+
+The ``ansible_collections.community.general.plugins.module_utils.vardict`` module util provides the
+``VarDict`` class to help manage the module variables. That class is a container for module variables,
+especially the ones for which the module must keep track of state changes, and the ones that should
+be published as return values.
+
+Each variable has extra behaviors controlled by associated metadata, simplifying the generation of
+output values from the module.
+
+Quickstart
+""""""""""
+
+The simplest way of using ``VarDict`` is:
+
+.. code-block:: python
+
+ from ansible_collections.community.general.plugins.module_utils.vardict import VarDict
+
+Then in ``main()``, or any other function called from there:
+
+.. code-block:: python
+
+ vars = VarDict()
+
+ # Next 3 statements are equivalent
+ vars.abc = 123
+ vars["abc"] = 123
+ vars.set("abc", 123)
+
+ vars.xyz = "bananas"
+ vars.ghi = False
+
+And by the time the module is about to exit:
+
+.. code-block:: python
+
+ results = vars.output()
+ module.exit_json(**results)
+
+That makes the return value of the module:
+
+.. code-block:: javascript
+
+ {
+ "abc": 123,
+ "xyz": "bananas",
+ "ghi": false
+ }
+
+Metadata
+""""""""
+
+The metadata values associated with each variable are:
+
+- ``output: bool`` - marks the variable for module output as a module return value.
+- ``fact: bool`` - marks the variable for module output as an Ansible fact.
+- ``verbosity: int`` - sets the minimum level of verbosity for which the variable will be included in the output.
+- ``change: bool`` - controls the detection of changes in the variable value.
+- ``initial_value: any`` - when using ``change`` and need to forcefully set an intial value to the variable.
+- ``diff: bool`` - used along with ``change``, this generates an Ansible-style diff ``dict``.
+
+See the sections below for more details on how to use the metadata.
+
+
+Using VarDict
+^^^^^^^^^^^^^
+
+Basic Usage
+"""""""""""
+
+As shown above, variables can be accessed using the ``[]`` operator, as in a ``dict`` object,
+and also as an object attribute, such as ``vars.abc``. The form using the ``set()``
+method is special in the sense that you can use it to set metadata values:
+
+.. code-block:: python
+
+ vars.set("abc", 123, output=False)
+ vars.set("abc", 123, output=True, change=True)
+
+Another way to set metadata after the variables have been created is:
+
+.. code-block:: python
+
+ vars.set_meta("abc", output=False)
+ vars.set_meta("abc", output=True, change=True, diff=True)
+
+You can use either operator and attribute forms to access the value of the variable. Other ways to
+access its value and its metadata are:
+
+.. code-block:: python
+
+ print("abc value = {0}".format(vars.var("abc")["value"])) # get the value
+ print("abc output? {0}".format(vars.get_meta("abc")["output"])) # get the metadata like this
+
+The names of methods, such as ``set``, ``get_meta``, ``output`` amongst others, are reserved and
+cannot be used as variable names. If you try to use a reserved name a ``ValueError`` exception
+is raised with the message "Name <var> is reserved".
+
+Generating output
+"""""""""""""""""
+
+By default, every variable create will be enable for output with minimum verbosity set to zero, in
+other words, they will always be in the output by default.
+
+You can control that when creating the variable for the first time or later in the code:
+
+.. code-block:: python
+
+ vars.set("internal", x + 4, output=False)
+ vars.set_meta("internal", output=False)
+
+You can also set the verbosity of some variable, like:
+
+.. code-block:: python
+
+ vars.set("abc", x + 4)
+ vars.set("debug_x", x, verbosity=3)
+
+ results = vars.output(module._verbosity)
+ module.exit_json(**results)
+
+If the module was invoked with verbosity lower than 3, then the output will only contain
+the variable ``abc``. If running at higher verbosity, as in ``ansible-playbook -vvv``,
+then the output will also contain ``debug_x``.
+
+Generating facts is very similar to regular output, but variables are not marked as facts by default.
+
+.. code-block:: python
+
+ vars.set("modulefact", x + 4, fact=True)
+ vars.set("debugfact", x, fact=True, verbosity=3)
+
+ results = vars.output(module._verbosity)
+ results["ansible_facts"] = {"module_name": vars.facts(module._verbosity)}
+ module.exit_json(**results)
+
+Handling change
+"""""""""""""""
+
+You can use ``VarDict`` to determine whether variables have had their values changed.
+
+.. code-block:: python
+
+ vars.set("abc", 42, change=True)
+ vars.abc = 90
+
+ results = vars.output()
+ results["changed"] = vars.has_changed
+ module.exit_json(**results)
+
+If tracking changes in variables, you may want to present the difference between the initial and the final
+values of it. For that, you want to use:
+
+.. code-block:: python
+
+ vars.set("abc", 42, change=True, diff=True)
+ vars.abc = 90
+
+ results = vars.output()
+ results["changed"] = vars.has_changed
+ results["diff"] = vars.diff()
+ module.exit_json(**results)
+
+.. versionadded:: 7.1.0
diff --git a/ansible_collections/community/general/meta/runtime.yml b/ansible_collections/community/general/meta/runtime.yml
index edeb53005..4f5007b4a 100644
--- a/ansible_collections/community/general/meta/runtime.yml
+++ b/ansible_collections/community/general/meta/runtime.yml
@@ -6,6 +6,8 @@
requires_ansible: '>=2.13.0'
action_groups:
consul:
+ - consul_agent_check
+ - consul_agent_service
- consul_auth_method
- consul_binding_rule
- consul_policy
diff --git a/ansible_collections/community/general/plugins/callback/opentelemetry.py b/ansible_collections/community/general/plugins/callback/opentelemetry.py
index 58cfa057b..c6e8a87c1 100644
--- a/ansible_collections/community/general/plugins/callback/opentelemetry.py
+++ b/ansible_collections/community/general/plugins/callback/opentelemetry.py
@@ -556,11 +556,19 @@ class CallbackModule(CallbackBase):
self.otel_exporter_otlp_traces_protocol = self.get_option('otel_exporter_otlp_traces_protocol')
- def dump_results(self, result):
+ def dump_results(self, task, result):
""" dump the results if disable_logs is not enabled """
if self.disable_logs:
return ""
- return self._dump_results(result._result)
+ # ansible.builtin.uri contains the response in the json field
+ save = dict(result._result)
+
+ if "json" in save and task.action in ("ansible.builtin.uri", "ansible.legacy.uri", "uri"):
+ save.pop("json")
+ # ansible.builtin.slurp contains the response in the content field
+ if "content" in save and task.action in ("ansible.builtin.slurp", "ansible.legacy.slurp", "slurp"):
+ save.pop("content")
+ return self._dump_results(save)
def v2_playbook_on_start(self, playbook):
self.ansible_playbook = basename(playbook._file_name)
@@ -611,7 +619,7 @@ class CallbackModule(CallbackBase):
self.tasks_data,
status,
result,
- self.dump_results(result)
+ self.dump_results(self.tasks_data[result._task._uuid], result)
)
def v2_runner_on_ok(self, result):
@@ -619,7 +627,7 @@ class CallbackModule(CallbackBase):
self.tasks_data,
'ok',
result,
- self.dump_results(result)
+ self.dump_results(self.tasks_data[result._task._uuid], result)
)
def v2_runner_on_skipped(self, result):
@@ -627,7 +635,7 @@ class CallbackModule(CallbackBase):
self.tasks_data,
'skipped',
result,
- self.dump_results(result)
+ self.dump_results(self.tasks_data[result._task._uuid], result)
)
def v2_playbook_on_include(self, included_file):
diff --git a/ansible_collections/community/general/plugins/doc_fragments/django.py b/ansible_collections/community/general/plugins/doc_fragments/django.py
index d92799937..f89ec9144 100644
--- a/ansible_collections/community/general/plugins/doc_fragments/django.py
+++ b/ansible_collections/community/general/plugins/doc_fragments/django.py
@@ -51,3 +51,12 @@ seealso:
Please make sure that you select the right version of Django in the version selector on that page.
link: https://docs.djangoproject.com/en/5.0/ref/django-admin/
'''
+
+ DATABASE = r'''
+options:
+ database:
+ description:
+ - Specify the database to be used.
+ type: str
+ default: default
+'''
diff --git a/ansible_collections/community/general/plugins/doc_fragments/proxmox.py b/ansible_collections/community/general/plugins/doc_fragments/proxmox.py
index cb533fefa..239dba06d 100644
--- a/ansible_collections/community/general/plugins/doc_fragments/proxmox.py
+++ b/ansible_collections/community/general/plugins/doc_fragments/proxmox.py
@@ -16,6 +16,13 @@ options:
- Specify the target host of the Proxmox VE cluster.
type: str
required: true
+ api_port:
+ description:
+ - Specify the target port of the Proxmox VE cluster.
+ - Uses the E(PROXMOX_PORT) environment variable if not specified.
+ type: int
+ required: false
+ version_added: 9.1.0
api_user:
description:
- Specify the user to authenticate with.
diff --git a/ansible_collections/community/general/plugins/filter/keep_keys.py b/ansible_collections/community/general/plugins/filter/keep_keys.py
new file mode 100644
index 000000000..dffccba35
--- /dev/null
+++ b/ansible_collections/community/general/plugins/filter/keep_keys.py
@@ -0,0 +1,138 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2024 Vladimir Botka <vbotka@gmail.com>
+# Copyright (c) 2024 Felix Fontein <felix@fontein.de>
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+DOCUMENTATION = '''
+ name: keep_keys
+ short_description: Keep specific keys from dictionaries in a list
+ version_added: "9.1.0"
+ author:
+ - Vladimir Botka (@vbotka)
+ - Felix Fontein (@felixfontein)
+ description: This filter keeps only specified keys from a provided list of dictionaries.
+ options:
+ _input:
+ description:
+ - A list of dictionaries.
+ - Top level keys must be strings.
+ type: list
+ elements: dictionary
+ required: true
+ target:
+ description:
+ - A single key or key pattern to keep, or a list of keys or keys patterns to keep.
+ - If O(matching_parameter=regex) there must be exactly one pattern provided.
+ type: raw
+ required: true
+ matching_parameter:
+ description: Specify the matching option of target keys.
+ type: str
+ default: equal
+ choices:
+ equal: Matches keys of exactly one of the O(target) items.
+ starts_with: Matches keys that start with one of the O(target) items.
+ ends_with: Matches keys that end with one of the O(target) items.
+ regex:
+ - Matches keys that match the regular expresion provided in O(target).
+ - In this case, O(target) must be a regex string or a list with single regex string.
+'''
+
+EXAMPLES = '''
+ l:
+ - {k0_x0: A0, k1_x1: B0, k2_x2: [C0], k3_x3: foo}
+ - {k0_x0: A1, k1_x1: B1, k2_x2: [C1], k3_x3: bar}
+
+ # 1) By default match keys that equal any of the items in the target.
+ t: [k0_x0, k1_x1]
+ r: "{{ l | community.general.keep_keys(target=t) }}"
+
+ # 2) Match keys that start with any of the items in the target.
+ t: [k0, k1]
+ r: "{{ l | community.general.keep_keys(target=t, matching_parameter='starts_with') }}"
+
+ # 3) Match keys that end with any of the items in target.
+ t: [x0, x1]
+ r: "{{ l | community.general.keep_keys(target=t, matching_parameter='ends_with') }}"
+
+ # 4) Match keys by the regex.
+ t: ['^.*[01]_x.*$']
+ r: "{{ l | community.general.keep_keys(target=t, matching_parameter='regex') }}"
+
+ # 5) Match keys by the regex.
+ t: '^.*[01]_x.*$'
+ r: "{{ l | community.general.keep_keys(target=t, matching_parameter='regex') }}"
+
+ # The results of above examples 1-5 are all the same.
+ r:
+ - {k0_x0: A0, k1_x1: B0}
+ - {k0_x0: A1, k1_x1: B1}
+
+ # 6) By default match keys that equal the target.
+ t: k0_x0
+ r: "{{ l | community.general.keep_keys(target=t) }}"
+
+ # 7) Match keys that start with the target.
+ t: k0
+ r: "{{ l | community.general.keep_keys(target=t, matching_parameter='starts_with') }}"
+
+ # 8) Match keys that end with the target.
+ t: x0
+ r: "{{ l | community.general.keep_keys(target=t, matching_parameter='ends_with') }}"
+
+ # 9) Match keys by the regex.
+ t: '^.*0_x.*$'
+ r: "{{ l | community.general.keep_keys(target=t, matching_parameter='regex') }}"
+
+ # The results of above examples 6-9 are all the same.
+ r:
+ - {k0_x0: A0}
+ - {k0_x0: A1}
+'''
+
+RETURN = '''
+ _value:
+ description: The list of dictionaries with selected keys.
+ type: list
+ elements: dictionary
+'''
+
+from ansible_collections.community.general.plugins.plugin_utils.keys_filter import (
+ _keys_filter_params,
+ _keys_filter_target_str)
+
+
+def keep_keys(data, target=None, matching_parameter='equal'):
+ """keep specific keys from dictionaries in a list"""
+
+ # test parameters
+ _keys_filter_params(data, matching_parameter)
+ # test and transform target
+ tt = _keys_filter_target_str(target, matching_parameter)
+
+ if matching_parameter == 'equal':
+ def keep_key(key):
+ return key in tt
+ elif matching_parameter == 'starts_with':
+ def keep_key(key):
+ return key.startswith(tt)
+ elif matching_parameter == 'ends_with':
+ def keep_key(key):
+ return key.endswith(tt)
+ elif matching_parameter == 'regex':
+ def keep_key(key):
+ return tt.match(key) is not None
+
+ return [dict((k, v) for k, v in d.items() if keep_key(k)) for d in data]
+
+
+class FilterModule(object):
+
+ def filters(self):
+ return {
+ 'keep_keys': keep_keys,
+ }
diff --git a/ansible_collections/community/general/plugins/filter/lists_mergeby.py b/ansible_collections/community/general/plugins/filter/lists_mergeby.py
index caf183492..0e47d5017 100644
--- a/ansible_collections/community/general/plugins/filter/lists_mergeby.py
+++ b/ansible_collections/community/general/plugins/filter/lists_mergeby.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright (c) 2020-2022, Vladimir Botka <vbotka@gmail.com>
+# Copyright (c) 2020-2024, Vladimir Botka <vbotka@gmail.com>
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
@@ -12,22 +12,32 @@ DOCUMENTATION = '''
version_added: 2.0.0
author: Vladimir Botka (@vbotka)
description:
- - Merge two or more lists by attribute O(index). Optional parameters O(recursive) and O(list_merge)
- control the merging of the lists in values. The function merge_hash from ansible.utils.vars
- is used. To learn details on how to use the parameters O(recursive) and O(list_merge) see
- Ansible User's Guide chapter "Using filters to manipulate data" section "Combining
- hashes/dictionaries".
+ - Merge two or more lists by attribute O(index). Optional
+ parameters O(recursive) and O(list_merge) control the merging of
+ the nested dictionaries and lists.
+ - The function C(merge_hash) from C(ansible.utils.vars) is used.
+ - To learn details on how to use the parameters O(recursive) and
+ O(list_merge) see Ansible User's Guide chapter "Using filters to
+ manipulate data" section R(Combining hashes/dictionaries, combine_filter) or the
+ filter P(ansible.builtin.combine#filter).
+
positional: another_list, index
options:
_input:
- description: A list of dictionaries.
+ description:
+ - A list of dictionaries, or a list of lists of dictionaries.
+ - The required type of the C(elements) is set to C(raw)
+ because all elements of O(_input) can be either dictionaries
+ or lists.
type: list
- elements: dictionary
+ elements: raw
required: true
another_list:
- description: Another list of dictionaries. This parameter can be specified multiple times.
+ description:
+ - Another list of dictionaries, or a list of lists of dictionaries.
+ - This parameter can be specified multiple times.
type: list
- elements: dictionary
+ elements: raw
index:
description:
- The dictionary key that must be present in every dictionary in every list that is used to
@@ -55,40 +65,134 @@ DOCUMENTATION = '''
'''
EXAMPLES = '''
-- name: Merge two lists
+# Some results below are manually formatted for better readability. The
+# dictionaries' keys will be sorted alphabetically in real output.
+
+- name: Example 1. Merge two lists. The results r1 and r2 are the same.
+ ansible.builtin.debug:
+ msg: |
+ r1: {{ r1 }}
+ r2: {{ r2 }}
+ vars:
+ list1:
+ - {index: a, value: 123}
+ - {index: b, value: 4}
+ list2:
+ - {index: a, foo: bar}
+ - {index: c, foo: baz}
+ r1: "{{ list1 | community.general.lists_mergeby(list2, 'index') }}"
+ r2: "{{ [list1, list2] | community.general.lists_mergeby('index') }}"
+
+# r1:
+# - {index: a, foo: bar, value: 123}
+# - {index: b, value: 4}
+# - {index: c, foo: baz}
+# r2:
+# - {index: a, foo: bar, value: 123}
+# - {index: b, value: 4}
+# - {index: c, foo: baz}
+
+- name: Example 2. Merge three lists
+ ansible.builtin.debug:
+ var: r
+ vars:
+ list1:
+ - {index: a, value: 123}
+ - {index: b, value: 4}
+ list2:
+ - {index: a, foo: bar}
+ - {index: c, foo: baz}
+ list3:
+ - {index: d, foo: qux}
+ r: "{{ [list1, list2, list3] | community.general.lists_mergeby('index') }}"
+
+# r:
+# - {index: a, foo: bar, value: 123}
+# - {index: b, value: 4}
+# - {index: c, foo: baz}
+# - {index: d, foo: qux}
+
+- name: Example 3. Merge single list. The result is the same as 2.
+ ansible.builtin.debug:
+ var: r
+ vars:
+ list1:
+ - {index: a, value: 123}
+ - {index: b, value: 4}
+ - {index: a, foo: bar}
+ - {index: c, foo: baz}
+ - {index: d, foo: qux}
+ r: "{{ [list1, []] | community.general.lists_mergeby('index') }}"
+
+# r:
+# - {index: a, foo: bar, value: 123}
+# - {index: b, value: 4}
+# - {index: c, foo: baz}
+# - {index: d, foo: qux}
+
+- name: Example 4. Merge two lists. By default, replace nested lists.
+ ansible.builtin.debug:
+ var: r
+ vars:
+ list1:
+ - {index: a, foo: [X1, X2]}
+ - {index: b, foo: [X1, X2]}
+ list2:
+ - {index: a, foo: [Y1, Y2]}
+ - {index: b, foo: [Y1, Y2]}
+ r: "{{ [list1, list2] | community.general.lists_mergeby('index') }}"
+
+# r:
+# - {index: a, foo: [Y1, Y2]}
+# - {index: b, foo: [Y1, Y2]}
+
+- name: Example 5. Merge two lists. Append nested lists.
+ ansible.builtin.debug:
+ var: r
+ vars:
+ list1:
+ - {index: a, foo: [X1, X2]}
+ - {index: b, foo: [X1, X2]}
+ list2:
+ - {index: a, foo: [Y1, Y2]}
+ - {index: b, foo: [Y1, Y2]}
+ r: "{{ [list1, list2] | community.general.lists_mergeby('index', list_merge='append') }}"
+
+# r:
+# - {index: a, foo: [X1, X2, Y1, Y2]}
+# - {index: b, foo: [X1, X2, Y1, Y2]}
+
+- name: Example 6. Merge two lists. By default, do not merge nested dictionaries.
+ ansible.builtin.debug:
+ var: r
+ vars:
+ list1:
+ - {index: a, foo: {x: 1, y: 2}}
+ - {index: b, foo: [X1, X2]}
+ list2:
+ - {index: a, foo: {y: 3, z: 4}}
+ - {index: b, foo: [Y1, Y2]}
+ r: "{{ [list1, list2] | community.general.lists_mergeby('index') }}"
+
+# r:
+# - {index: a, foo: {y: 3, z: 4}}
+# - {index: b, foo: [Y1, Y2]}
+
+- name: Example 7. Merge two lists. Merge nested dictionaries too.
ansible.builtin.debug:
- msg: >-
- {{ list1 | community.general.lists_mergeby(
- list2,
- 'index',
- recursive=True,
- list_merge='append'
- ) }}"
+ var: r
vars:
list1:
- - index: a
- value: 123
- - index: b
- value: 42
+ - {index: a, foo: {x: 1, y: 2}}
+ - {index: b, foo: [X1, X2]}
list2:
- - index: a
- foo: bar
- - index: c
- foo: baz
- # Produces the following list of dictionaries:
- # {
- # "index": "a",
- # "foo": "bar",
- # "value": 123
- # },
- # {
- # "index": "b",
- # "value": 42
- # },
- # {
- # "index": "c",
- # "foo": "baz"
- # }
+ - {index: a, foo: {y: 3, z: 4}}
+ - {index: b, foo: [Y1, Y2]}
+ r: "{{ [list1, list2] | community.general.lists_mergeby('index', recursive=true) }}"
+
+# r:
+# - {index: a, foo: {x:1, y: 3, z: 4}}
+# - {index: b, foo: [Y1, Y2]}
'''
RETURN = '''
@@ -108,13 +212,14 @@ from operator import itemgetter
def list_mergeby(x, y, index, recursive=False, list_merge='replace'):
- ''' Merge 2 lists by attribute 'index'. The function merge_hash from ansible.utils.vars is used.
- This function is used by the function lists_mergeby.
+ '''Merge 2 lists by attribute 'index'. The function 'merge_hash'
+ from ansible.utils.vars is used. This function is used by the
+ function lists_mergeby.
'''
d = defaultdict(dict)
- for l in (x, y):
- for elem in l:
+ for lst in (x, y):
+ for elem in lst:
if not isinstance(elem, Mapping):
msg = "Elements of list arguments for lists_mergeby must be dictionaries. %s is %s"
raise AnsibleFilterError(msg % (elem, type(elem)))
@@ -124,20 +229,9 @@ def list_mergeby(x, y, index, recursive=False, list_merge='replace'):
def lists_mergeby(*terms, **kwargs):
- ''' Merge 2 or more lists by attribute 'index'. Optional parameters 'recursive' and 'list_merge'
- control the merging of the lists in values. The function merge_hash from ansible.utils.vars
- is used. To learn details on how to use the parameters 'recursive' and 'list_merge' see
- Ansible User's Guide chapter "Using filters to manipulate data" section "Combining
- hashes/dictionaries".
-
- Example:
- - debug:
- msg: "{{ list1|
- community.general.lists_mergeby(list2,
- 'index',
- recursive=True,
- list_merge='append')|
- list }}"
+ '''Merge 2 or more lists by attribute 'index'. To learn details
+ on how to use the parameters 'recursive' and 'list_merge' see
+ the filter ansible.builtin.combine.
'''
recursive = kwargs.pop('recursive', False)
@@ -155,7 +249,7 @@ def lists_mergeby(*terms, **kwargs):
"must be lists. %s is %s")
raise AnsibleFilterError(msg % (sublist, type(sublist)))
if len(sublist) > 0:
- if all(isinstance(l, Sequence) for l in sublist):
+ if all(isinstance(lst, Sequence) for lst in sublist):
for item in sublist:
flat_list.append(item)
else:
diff --git a/ansible_collections/community/general/plugins/filter/remove_keys.py b/ansible_collections/community/general/plugins/filter/remove_keys.py
new file mode 100644
index 000000000..cabce1468
--- /dev/null
+++ b/ansible_collections/community/general/plugins/filter/remove_keys.py
@@ -0,0 +1,138 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2024 Vladimir Botka <vbotka@gmail.com>
+# Copyright (c) 2024 Felix Fontein <felix@fontein.de>
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+DOCUMENTATION = '''
+ name: remove_keys
+ short_description: Remove specific keys from dictionaries in a list
+ version_added: "9.1.0"
+ author:
+ - Vladimir Botka (@vbotka)
+ - Felix Fontein (@felixfontein)
+ description: This filter removes only specified keys from a provided list of dictionaries.
+ options:
+ _input:
+ description:
+ - A list of dictionaries.
+ - Top level keys must be strings.
+ type: list
+ elements: dictionary
+ required: true
+ target:
+ description:
+ - A single key or key pattern to remove, or a list of keys or keys patterns to remove.
+ - If O(matching_parameter=regex) there must be exactly one pattern provided.
+ type: raw
+ required: true
+ matching_parameter:
+ description: Specify the matching option of target keys.
+ type: str
+ default: equal
+ choices:
+ equal: Matches keys of exactly one of the O(target) items.
+ starts_with: Matches keys that start with one of the O(target) items.
+ ends_with: Matches keys that end with one of the O(target) items.
+ regex:
+ - Matches keys that match the regular expresion provided in O(target).
+ - In this case, O(target) must be a regex string or a list with single regex string.
+'''
+
+EXAMPLES = '''
+ l:
+ - {k0_x0: A0, k1_x1: B0, k2_x2: [C0], k3_x3: foo}
+ - {k0_x0: A1, k1_x1: B1, k2_x2: [C1], k3_x3: bar}
+
+ # 1) By default match keys that equal any of the items in the target.
+ t: [k0_x0, k1_x1]
+ r: "{{ l | community.general.remove_keys(target=t) }}"
+
+ # 2) Match keys that start with any of the items in the target.
+ t: [k0, k1]
+ r: "{{ l | community.general.remove_keys(target=t, matching_parameter='starts_with') }}"
+
+ # 3) Match keys that end with any of the items in target.
+ t: [x0, x1]
+ r: "{{ l | community.general.remove_keys(target=t, matching_parameter='ends_with') }}"
+
+ # 4) Match keys by the regex.
+ t: ['^.*[01]_x.*$']
+ r: "{{ l | community.general.remove_keys(target=t, matching_parameter='regex') }}"
+
+ # 5) Match keys by the regex.
+ t: '^.*[01]_x.*$'
+ r: "{{ l | community.general.remove_keys(target=t, matching_parameter='regex') }}"
+
+ # The results of above examples 1-5 are all the same.
+ r:
+ - {k2_x2: [C0], k3_x3: foo}
+ - {k2_x2: [C1], k3_x3: bar}
+
+ # 6) By default match keys that equal the target.
+ t: k0_x0
+ r: "{{ l | community.general.remove_keys(target=t) }}"
+
+ # 7) Match keys that start with the target.
+ t: k0
+ r: "{{ l | community.general.remove_keys(target=t, matching_parameter='starts_with') }}"
+
+ # 8) Match keys that end with the target.
+ t: x0
+ r: "{{ l | community.general.remove_keys(target=t, matching_parameter='ends_with') }}"
+
+ # 9) Match keys by the regex.
+ t: '^.*0_x.*$'
+ r: "{{ l | community.general.remove_keys(target=t, matching_parameter='regex') }}"
+
+ # The results of above examples 6-9 are all the same.
+ r:
+ - {k1_x1: B0, k2_x2: [C0], k3_x3: foo}
+ - {k1_x1: B1, k2_x2: [C1], k3_x3: bar}
+'''
+
+RETURN = '''
+ _value:
+ description: The list of dictionaries with selected keys removed.
+ type: list
+ elements: dictionary
+'''
+
+from ansible_collections.community.general.plugins.plugin_utils.keys_filter import (
+ _keys_filter_params,
+ _keys_filter_target_str)
+
+
+def remove_keys(data, target=None, matching_parameter='equal'):
+ """remove specific keys from dictionaries in a list"""
+
+ # test parameters
+ _keys_filter_params(data, matching_parameter)
+ # test and transform target
+ tt = _keys_filter_target_str(target, matching_parameter)
+
+ if matching_parameter == 'equal':
+ def keep_key(key):
+ return key not in tt
+ elif matching_parameter == 'starts_with':
+ def keep_key(key):
+ return not key.startswith(tt)
+ elif matching_parameter == 'ends_with':
+ def keep_key(key):
+ return not key.endswith(tt)
+ elif matching_parameter == 'regex':
+ def keep_key(key):
+ return tt.match(key) is None
+
+ return [dict((k, v) for k, v in d.items() if keep_key(k)) for d in data]
+
+
+class FilterModule(object):
+
+ def filters(self):
+ return {
+ 'remove_keys': remove_keys,
+ }
diff --git a/ansible_collections/community/general/plugins/filter/replace_keys.py b/ansible_collections/community/general/plugins/filter/replace_keys.py
new file mode 100644
index 000000000..d3b12c05d
--- /dev/null
+++ b/ansible_collections/community/general/plugins/filter/replace_keys.py
@@ -0,0 +1,180 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2024 Vladimir Botka <vbotka@gmail.com>
+# Copyright (c) 2024 Felix Fontein <felix@fontein.de>
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+DOCUMENTATION = '''
+ name: replace_keys
+ short_description: Replace specific keys in a list of dictionaries
+ version_added: "9.1.0"
+ author:
+ - Vladimir Botka (@vbotka)
+ - Felix Fontein (@felixfontein)
+ description: This filter replaces specified keys in a provided list of dictionaries.
+ options:
+ _input:
+ description:
+ - A list of dictionaries.
+ - Top level keys must be strings.
+ type: list
+ elements: dictionary
+ required: true
+ target:
+ description:
+ - A list of dictionaries with attributes C(before) and C(after).
+ - The value of O(target[].after) replaces key matching O(target[].before).
+ type: list
+ elements: dictionary
+ required: true
+ suboptions:
+ before:
+ description:
+ - A key or key pattern to change.
+ - The interpretation of O(target[].before) depends on O(matching_parameter).
+ - For a key that matches multiple O(target[].before)s, the B(first) matching O(target[].after) will be used.
+ type: str
+ after:
+ description: A matching key change to.
+ type: str
+ matching_parameter:
+ description: Specify the matching option of target keys.
+ type: str
+ default: equal
+ choices:
+ equal: Matches keys of exactly one of the O(target[].before) items.
+ starts_with: Matches keys that start with one of the O(target[].before) items.
+ ends_with: Matches keys that end with one of the O(target[].before) items.
+ regex: Matches keys that match one of the regular expressions provided in O(target[].before).
+'''
+
+EXAMPLES = '''
+ l:
+ - {k0_x0: A0, k1_x1: B0, k2_x2: [C0], k3_x3: foo}
+ - {k0_x0: A1, k1_x1: B1, k2_x2: [C1], k3_x3: bar}
+
+ # 1) By default, replace keys that are equal any of the attributes before.
+ t:
+ - {before: k0_x0, after: a0}
+ - {before: k1_x1, after: a1}
+ r: "{{ l | community.general.replace_keys(target=t) }}"
+
+ # 2) Replace keys that starts with any of the attributes before.
+ t:
+ - {before: k0, after: a0}
+ - {before: k1, after: a1}
+ r: "{{ l | community.general.replace_keys(target=t, matching_parameter='starts_with') }}"
+
+ # 3) Replace keys that ends with any of the attributes before.
+ t:
+ - {before: x0, after: a0}
+ - {before: x1, after: a1}
+ r: "{{ l | community.general.replace_keys(target=t, matching_parameter='ends_with') }}"
+
+ # 4) Replace keys that match any regex of the attributes before.
+ t:
+ - {before: "^.*0_x.*$", after: a0}
+ - {before: "^.*1_x.*$", after: a1}
+ r: "{{ l | community.general.replace_keys(target=t, matching_parameter='regex') }}"
+
+ # The results of above examples 1-4 are all the same.
+ r:
+ - {a0: A0, a1: B0, k2_x2: [C0], k3_x3: foo}
+ - {a0: A1, a1: B1, k2_x2: [C1], k3_x3: bar}
+
+ # 5) If more keys match the same attribute before the last one will be used.
+ t:
+ - {before: "^.*_x.*$", after: X}
+ r: "{{ l | community.general.replace_keys(target=t, matching_parameter='regex') }}"
+
+ # gives
+
+ r:
+ - X: foo
+ - X: bar
+
+ # 6) If there are items with equal attribute before the first one will be used.
+ t:
+ - {before: "^.*_x.*$", after: X}
+ - {before: "^.*_x.*$", after: Y}
+ r: "{{ l | community.general.replace_keys(target=t, matching_parameter='regex') }}"
+
+ # gives
+
+ r:
+ - X: foo
+ - X: bar
+
+ # 7) If there are more matches for a key the first one will be used.
+ l:
+ - {aaa1: A, bbb1: B, ccc1: C}
+ - {aaa2: D, bbb2: E, ccc2: F}
+ t:
+ - {before: a, after: X}
+ - {before: aa, after: Y}
+ r: "{{ l | community.general.replace_keys(target=t, matching_parameter='starts_with') }}"
+
+ # gives
+
+ r:
+ - {X: A, bbb1: B, ccc1: C}
+ - {X: D, bbb2: E, ccc2: F}
+'''
+
+RETURN = '''
+ _value:
+ description: The list of dictionaries with replaced keys.
+ type: list
+ elements: dictionary
+'''
+
+from ansible_collections.community.general.plugins.plugin_utils.keys_filter import (
+ _keys_filter_params,
+ _keys_filter_target_dict)
+
+
+def replace_keys(data, target=None, matching_parameter='equal'):
+ """replace specific keys in a list of dictionaries"""
+
+ # test parameters
+ _keys_filter_params(data, matching_parameter)
+ # test and transform target
+ tz = _keys_filter_target_dict(target, matching_parameter)
+
+ if matching_parameter == 'equal':
+ def replace_key(key):
+ for b, a in tz:
+ if key == b:
+ return a
+ return key
+ elif matching_parameter == 'starts_with':
+ def replace_key(key):
+ for b, a in tz:
+ if key.startswith(b):
+ return a
+ return key
+ elif matching_parameter == 'ends_with':
+ def replace_key(key):
+ for b, a in tz:
+ if key.endswith(b):
+ return a
+ return key
+ elif matching_parameter == 'regex':
+ def replace_key(key):
+ for b, a in tz:
+ if b.match(key):
+ return a
+ return key
+
+ return [dict((replace_key(k), v) for k, v in d.items()) for d in data]
+
+
+class FilterModule(object):
+
+ def filters(self):
+ return {
+ 'replace_keys': replace_keys,
+ }
diff --git a/ansible_collections/community/general/plugins/inventory/opennebula.py b/ansible_collections/community/general/plugins/inventory/opennebula.py
index b097307c3..bf81758ef 100644
--- a/ansible_collections/community/general/plugins/inventory/opennebula.py
+++ b/ansible_collections/community/general/plugins/inventory/opennebula.py
@@ -143,7 +143,8 @@ class InventoryModule(BaseInventoryPlugin, Constructable):
nic = [nic]
for net in nic:
- return net['IP']
+ if net.get('IP'):
+ return net['IP']
return False
diff --git a/ansible_collections/community/general/plugins/module_utils/cmd_runner.py b/ansible_collections/community/general/plugins/module_utils/cmd_runner.py
index 2bf2b32e8..da4f1b6fc 100644
--- a/ansible_collections/community/general/plugins/module_utils/cmd_runner.py
+++ b/ansible_collections/community/general/plugins/module_utils/cmd_runner.py
@@ -89,18 +89,31 @@ class FormatError(CmdRunnerException):
class _ArgFormat(object):
+ # DEPRECATION: set default value for ignore_none to True in community.general 12.0.0
def __init__(self, func, ignore_none=None, ignore_missing_value=False):
self.func = func
self.ignore_none = ignore_none
self.ignore_missing_value = ignore_missing_value
- def __call__(self, value, ctx_ignore_none):
+ # DEPRECATION: remove parameter ctx_ignore_none in community.general 12.0.0
+ def __call__(self, value, ctx_ignore_none=True):
+ # DEPRECATION: replace ctx_ignore_none with True in community.general 12.0.0
ignore_none = self.ignore_none if self.ignore_none is not None else ctx_ignore_none
if value is None and ignore_none:
return []
f = self.func
return [str(x) for x in f(value)]
+ def __str__(self):
+ return "<ArgFormat: func={0}, ignore_none={1}, ignore_missing_value={2}>".format(
+ self.func,
+ self.ignore_none,
+ self.ignore_missing_value,
+ )
+
+ def __repr__(self):
+ return str(self)
+
class _Format(object):
@staticmethod
@@ -114,7 +127,7 @@ class _Format(object):
@staticmethod
def as_bool_not(args):
- return _ArgFormat(lambda value: [] if value else _ensure_list(args), ignore_none=False)
+ return _Format.as_bool([], args, ignore_none=False)
@staticmethod
def as_optval(arg, ignore_none=None):
@@ -184,6 +197,19 @@ class _Format(object):
return func(**v)
return wrapper
+ @staticmethod
+ def stack(fmt):
+ @wraps(fmt)
+ def wrapper(*args, **kwargs):
+ new_func = fmt(ignore_none=True, *args, **kwargs)
+
+ def stacking(value):
+ stack = [new_func(v) for v in value if v]
+ stack = [x for args in stack for x in args]
+ return stack
+ return _ArgFormat(stacking, ignore_none=True)
+ return wrapper
+
class CmdRunner(object):
"""
@@ -204,7 +230,11 @@ class CmdRunner(object):
self.default_args_order = self._prepare_args_order(default_args_order)
if arg_formats is None:
arg_formats = {}
- self.arg_formats = dict(arg_formats)
+ self.arg_formats = {}
+ for fmt_name, fmt in arg_formats.items():
+ if not isinstance(fmt, _ArgFormat):
+ fmt = _Format.as_func(func=fmt, ignore_none=True)
+ self.arg_formats[fmt_name] = fmt
self.check_rc = check_rc
self.force_lang = force_lang
self.path_prefix = path_prefix
@@ -223,7 +253,16 @@ class CmdRunner(object):
def binary(self):
return self.command[0]
- def __call__(self, args_order=None, output_process=None, ignore_value_none=True, check_mode_skip=False, check_mode_return=None, **kwargs):
+ # remove parameter ignore_value_none in community.general 12.0.0
+ def __call__(self, args_order=None, output_process=None, ignore_value_none=None, check_mode_skip=False, check_mode_return=None, **kwargs):
+ if ignore_value_none is None:
+ ignore_value_none = True
+ else:
+ self.module.deprecate(
+ "Using ignore_value_none when creating the runner context is now deprecated, "
+ "and the parameter will be removed in community.general 12.0.0. ",
+ version="12.0.0", collection_name="community.general"
+ )
if output_process is None:
output_process = _process_as_is
if args_order is None:
@@ -235,7 +274,7 @@ class CmdRunner(object):
return _CmdRunnerContext(runner=self,
args_order=args_order,
output_process=output_process,
- ignore_value_none=ignore_value_none,
+ ignore_value_none=ignore_value_none, # DEPRECATION: remove in community.general 12.0.0
check_mode_skip=check_mode_skip,
check_mode_return=check_mode_return, **kwargs)
@@ -251,6 +290,7 @@ class _CmdRunnerContext(object):
self.runner = runner
self.args_order = tuple(args_order)
self.output_process = output_process
+ # DEPRECATION: parameter ignore_value_none at the context level is deprecated and will be removed in community.general 12.0.0
self.ignore_value_none = ignore_value_none
self.check_mode_skip = check_mode_skip
self.check_mode_return = check_mode_return
@@ -290,6 +330,7 @@ class _CmdRunnerContext(object):
value = named_args[arg_name]
elif not runner.arg_formats[arg_name].ignore_missing_value:
raise MissingArgumentValue(self.args_order, arg_name)
+ # DEPRECATION: remove parameter ctx_ignore_none in 12.0.0
self.cmd.extend(runner.arg_formats[arg_name](value, ctx_ignore_none=self.ignore_value_none))
except MissingArgumentValue:
raise
@@ -306,7 +347,7 @@ class _CmdRunnerContext(object):
@property
def run_info(self):
return dict(
- ignore_value_none=self.ignore_value_none,
+ ignore_value_none=self.ignore_value_none, # DEPRECATION: remove in community.general 12.0.0
check_rc=self.check_rc,
environ_update=self.environ_update,
args_order=self.args_order,
diff --git a/ansible_collections/community/general/plugins/module_utils/consul.py b/ansible_collections/community/general/plugins/module_utils/consul.py
index 68c1a130b..cd54a105f 100644
--- a/ansible_collections/community/general/plugins/module_utils/consul.py
+++ b/ansible_collections/community/general/plugins/module_utils/consul.py
@@ -10,6 +10,7 @@ __metaclass__ = type
import copy
import json
+import re
from ansible.module_utils.six.moves.urllib import error as urllib_error
from ansible.module_utils.six.moves.urllib.parse import urlencode
@@ -68,6 +69,25 @@ def camel_case_key(key):
return "".join(parts)
+def validate_check(check):
+ validate_duration_keys = ['Interval', 'Ttl', 'Timeout']
+ validate_tcp_regex = r"(?P<host>.*):(?P<port>(?:[0-9]+))$"
+ if check.get('Tcp') is not None:
+ match = re.match(validate_tcp_regex, check['Tcp'])
+ if not match:
+ raise Exception('tcp check must be in host:port format')
+ for duration in validate_duration_keys:
+ if duration in check and check[duration] is not None:
+ check[duration] = validate_duration(check[duration])
+
+
+def validate_duration(duration):
+ if duration:
+ if not re.search(r"\d+(?:ns|us|ms|s|m|h)", duration):
+ duration = "{0}s".format(duration)
+ return duration
+
+
STATE_PARAMETER = "state"
STATE_PRESENT = "present"
STATE_ABSENT = "absent"
@@ -81,7 +101,7 @@ OPERATION_DELETE = "remove"
def _normalize_params(params, arg_spec):
final_params = {}
for k, v in params.items():
- if k not in arg_spec: # Alias
+ if k not in arg_spec or v is None: # Alias
continue
spec = arg_spec[k]
if (
@@ -105,9 +125,10 @@ class _ConsulModule:
"""
api_endpoint = None # type: str
- unique_identifier = None # type: str
+ unique_identifiers = None # type: list
result_key = None # type: str
create_only_fields = set()
+ operational_attributes = set()
params = {}
def __init__(self, module):
@@ -119,6 +140,8 @@ class _ConsulModule:
if k not in STATE_PARAMETER and k not in AUTH_ARGUMENTS_SPEC
}
+ self.operational_attributes.update({"CreateIndex", "CreateTime", "Hash", "ModifyIndex"})
+
def execute(self):
obj = self.read_object()
@@ -203,14 +226,24 @@ class _ConsulModule:
return False
def prepare_object(self, existing, obj):
- operational_attributes = {"CreateIndex", "CreateTime", "Hash", "ModifyIndex"}
existing = {
- k: v for k, v in existing.items() if k not in operational_attributes
+ k: v for k, v in existing.items() if k not in self.operational_attributes
}
for k, v in obj.items():
existing[k] = v
return existing
+ def id_from_obj(self, obj, camel_case=False):
+ def key_func(key):
+ return camel_case_key(key) if camel_case else key
+
+ if self.unique_identifiers:
+ for identifier in self.unique_identifiers:
+ identifier = key_func(identifier)
+ if identifier in obj:
+ return obj[identifier]
+ return None
+
def endpoint_url(self, operation, identifier=None):
if operation == OPERATION_CREATE:
return self.api_endpoint
@@ -219,7 +252,8 @@ class _ConsulModule:
raise RuntimeError("invalid arguments passed")
def read_object(self):
- url = self.endpoint_url(OPERATION_READ, self.params.get(self.unique_identifier))
+ identifier = self.id_from_obj(self.params)
+ url = self.endpoint_url(OPERATION_READ, identifier)
try:
return self.get(url)
except RequestError as e:
@@ -233,25 +267,28 @@ class _ConsulModule:
if self._module.check_mode:
return obj
else:
- return self.put(self.api_endpoint, data=self.prepare_object({}, obj))
+ url = self.endpoint_url(OPERATION_CREATE)
+ created_obj = self.put(url, data=self.prepare_object({}, obj))
+ if created_obj is None:
+ created_obj = self.read_object()
+ return created_obj
def update_object(self, existing, obj):
- url = self.endpoint_url(
- OPERATION_UPDATE, existing.get(camel_case_key(self.unique_identifier))
- )
merged_object = self.prepare_object(existing, obj)
if self._module.check_mode:
return merged_object
else:
- return self.put(url, data=merged_object)
+ url = self.endpoint_url(OPERATION_UPDATE, self.id_from_obj(existing, camel_case=True))
+ updated_obj = self.put(url, data=merged_object)
+ if updated_obj is None:
+ updated_obj = self.read_object()
+ return updated_obj
def delete_object(self, obj):
if self._module.check_mode:
return {}
else:
- url = self.endpoint_url(
- OPERATION_DELETE, obj.get(camel_case_key(self.unique_identifier))
- )
+ url = self.endpoint_url(OPERATION_DELETE, self.id_from_obj(obj, camel_case=True))
return self.delete(url)
def _request(self, method, url_parts, data=None, params=None):
@@ -309,7 +346,9 @@ class _ConsulModule:
if 400 <= status < 600:
raise RequestError(status, response_data)
- return json.loads(response_data)
+ if response_data:
+ return json.loads(response_data)
+ return None
def get(self, url_parts, **kwargs):
return self._request("GET", url_parts, **kwargs)
diff --git a/ansible_collections/community/general/plugins/module_utils/django.py b/ansible_collections/community/general/plugins/module_utils/django.py
index fbaf840db..5fb375c6f 100644
--- a/ansible_collections/community/general/plugins/module_utils/django.py
+++ b/ansible_collections/community/general/plugins/module_utils/django.py
@@ -7,6 +7,7 @@ from __future__ import absolute_import, division, print_function
__metaclass__ = type
+from ansible.module_utils.common.dict_transformations import dict_merge
from ansible_collections.community.general.plugins.module_utils.cmd_runner import cmd_runner_fmt
from ansible_collections.community.general.plugins.module_utils.python_runner import PythonRunner
from ansible_collections.community.general.plugins.module_utils.module_helper import ModuleHelper
@@ -33,6 +34,18 @@ _django_std_arg_fmts = dict(
skip_checks=cmd_runner_fmt.as_bool("--skip-checks"),
)
+_django_database_args = dict(
+ database=dict(type="str", default="default"),
+)
+
+_args_menu = dict(
+ std=(django_std_args, _django_std_arg_fmts),
+ database=(_django_database_args, {"database": cmd_runner_fmt.as_opt_eq_val("--database")}),
+ noinput=({}, {"noinput": cmd_runner_fmt.as_fixed("--noinput")}),
+ dry_run=({}, {"dry_run": cmd_runner_fmt.as_bool("--dry-run")}),
+ check=({}, {"check": cmd_runner_fmt.as_bool("--check")}),
+)
+
class _DjangoRunner(PythonRunner):
def __init__(self, module, arg_formats=None, **kwargs):
@@ -55,15 +68,30 @@ class DjangoModuleHelper(ModuleHelper):
arg_formats = {}
django_admin_arg_order = ()
use_old_vardict = False
+ _django_args = []
+ _check_mode_arg = ""
def __init__(self):
- argument_spec = dict(django_std_args)
- argument_spec.update(self.module.get("argument_spec", {}))
- self.module["argument_spec"] = argument_spec
+ self.module["argument_spec"], self.arg_formats = self._build_args(self.module.get("argument_spec", {}),
+ self.arg_formats,
+ *(["std"] + self._django_args))
super(DjangoModuleHelper, self).__init__(self.module)
if self.django_admin_cmd is not None:
self.vars.command = self.django_admin_cmd
+ @staticmethod
+ def _build_args(arg_spec, arg_format, *names):
+ res_arg_spec = {}
+ res_arg_fmts = {}
+ for name in names:
+ args, fmts = _args_menu[name]
+ res_arg_spec = dict_merge(res_arg_spec, args)
+ res_arg_fmts = dict_merge(res_arg_fmts, fmts)
+ res_arg_spec = dict_merge(res_arg_spec, arg_spec)
+ res_arg_fmts = dict_merge(res_arg_fmts, arg_format)
+
+ return res_arg_spec, res_arg_fmts
+
def __run__(self):
runner = _DjangoRunner(self.module,
default_args_order=self.django_admin_arg_order,
@@ -71,7 +99,10 @@ class DjangoModuleHelper(ModuleHelper):
venv=self.vars.venv,
check_rc=True)
with runner() as ctx:
- results = ctx.run()
+ run_params = self.vars.as_dict()
+ if self._check_mode_arg:
+ run_params.update({self._check_mode_arg: self.check_mode})
+ results = ctx.run(**run_params)
self.vars.stdout = ctx.results_out
self.vars.stderr = ctx.results_err
self.vars.cmd = ctx.cmd
diff --git a/ansible_collections/community/general/plugins/module_utils/proxmox.py b/ansible_collections/community/general/plugins/module_utils/proxmox.py
index 5fd783d65..05bf1874b 100644
--- a/ansible_collections/community/general/plugins/module_utils/proxmox.py
+++ b/ansible_collections/community/general/plugins/module_utils/proxmox.py
@@ -29,6 +29,9 @@ def proxmox_auth_argument_spec():
required=True,
fallback=(env_fallback, ['PROXMOX_HOST'])
),
+ api_port=dict(type='int',
+ fallback=(env_fallback, ['PROXMOX_PORT'])
+ ),
api_user=dict(type='str',
required=True,
fallback=(env_fallback, ['PROXMOX_USER'])
@@ -82,6 +85,7 @@ class ProxmoxAnsible(object):
def _connect(self):
api_host = self.module.params['api_host']
+ api_port = self.module.params['api_port']
api_user = self.module.params['api_user']
api_password = self.module.params['api_password']
api_token_id = self.module.params['api_token_id']
@@ -89,6 +93,10 @@ class ProxmoxAnsible(object):
validate_certs = self.module.params['validate_certs']
auth_args = {'user': api_user}
+
+ if api_port:
+ auth_args['port'] = api_port
+
if api_password:
auth_args['password'] = api_password
else:
diff --git a/ansible_collections/community/general/plugins/module_utils/redfish_utils.py b/ansible_collections/community/general/plugins/module_utils/redfish_utils.py
index 6935573d0..139628bd9 100644
--- a/ansible_collections/community/general/plugins/module_utils/redfish_utils.py
+++ b/ansible_collections/community/general/plugins/module_utils/redfish_utils.py
@@ -11,6 +11,7 @@ import os
import random
import string
import gzip
+import time
from io import BytesIO
from ansible.module_utils.urls import open_url
from ansible.module_utils.common.text.converters import to_native
@@ -132,11 +133,13 @@ class RedfishUtils(object):
return resp
# The following functions are to send GET/POST/PATCH/DELETE requests
- def get_request(self, uri, override_headers=None, allow_no_resp=False):
+ def get_request(self, uri, override_headers=None, allow_no_resp=False, timeout=None):
req_headers = dict(GET_HEADERS)
if override_headers:
req_headers.update(override_headers)
username, password, basic_auth = self._auth_params(req_headers)
+ if timeout is None:
+ timeout = self.timeout
try:
# Service root is an unauthenticated resource; remove credentials
# in case the caller will be using sessions later.
@@ -146,7 +149,7 @@ class RedfishUtils(object):
url_username=username, url_password=password,
force_basic_auth=basic_auth, validate_certs=False,
follow_redirects='all',
- use_proxy=True, timeout=self.timeout)
+ use_proxy=True, timeout=timeout)
headers = dict((k.lower(), v) for (k, v) in resp.info().items())
try:
if headers.get('content-encoding') == 'gzip' and LooseVersion(ansible_version) < LooseVersion('2.14'):
@@ -624,6 +627,24 @@ class RedfishUtils(object):
allowable_values = default_values
return allowable_values
+ def check_service_availability(self):
+ """
+ Checks if the service is accessible.
+
+ :return: dict containing the status of the service
+ """
+
+ # Get the service root
+ # Override the timeout since the service root is expected to be readily
+ # available.
+ service_root = self.get_request(self.root_uri + self.service_root, timeout=10)
+ if service_root['ret'] is False:
+ # Failed, either due to a timeout or HTTP error; not available
+ return {'ret': True, 'available': False}
+
+ # Successfully accessed the service root; available
+ return {'ret': True, 'available': True}
+
def get_logs(self):
log_svcs_uri_list = []
list_of_logs = []
@@ -1083,11 +1104,12 @@ class RedfishUtils(object):
return self.manage_power(command, self.systems_uri,
'#ComputerSystem.Reset')
- def manage_manager_power(self, command):
+ def manage_manager_power(self, command, wait=False, wait_timeout=120):
return self.manage_power(command, self.manager_uri,
- '#Manager.Reset')
+ '#Manager.Reset', wait, wait_timeout)
- def manage_power(self, command, resource_uri, action_name):
+ def manage_power(self, command, resource_uri, action_name, wait=False,
+ wait_timeout=120):
key = "Actions"
reset_type_values = ['On', 'ForceOff', 'GracefulShutdown',
'GracefulRestart', 'ForceRestart', 'Nmi',
@@ -1147,6 +1169,30 @@ class RedfishUtils(object):
response = self.post_request(self.root_uri + action_uri, payload)
if response['ret'] is False:
return response
+
+ # If requested to wait for the service to be available again, block
+ # until it's ready
+ if wait:
+ elapsed_time = 0
+ start_time = time.time()
+ # Start with a large enough sleep. Some services will process new
+ # requests while in the middle of shutting down, thus breaking out
+ # early.
+ time.sleep(30)
+
+ # Periodically check for the service's availability.
+ while elapsed_time <= wait_timeout:
+ status = self.check_service_availability()
+ if status['available']:
+ # It's available; we're done
+ break
+ time.sleep(5)
+ elapsed_time = time.time() - start_time
+
+ if elapsed_time > wait_timeout:
+ # Exhausted the wait timer; error
+ return {'ret': False, 'changed': True,
+ 'msg': 'The service did not become available after %d seconds' % wait_timeout}
return {'ret': True, 'changed': True}
def manager_reset_to_defaults(self, command):
diff --git a/ansible_collections/community/general/plugins/modules/ansible_galaxy_install.py b/ansible_collections/community/general/plugins/modules/ansible_galaxy_install.py
index d382ed93a..b0f3aeb5d 100644
--- a/ansible_collections/community/general/plugins/modules/ansible_galaxy_install.py
+++ b/ansible_collections/community/general/plugins/modules/ansible_galaxy_install.py
@@ -32,6 +32,19 @@ attributes:
diff_mode:
support: none
options:
+ state:
+ description:
+ - >
+ If O(state=present) then the collection or role will be installed.
+ Note that the collections and roles are not updated with this option.
+ - >
+ Currently the O(state=latest) is ignored unless O(type=collection), and it will
+ ensure the collection is installed and updated to the latest available version.
+ - Please note that O(force=true) can be used to perform upgrade regardless of O(type).
+ type: str
+ choices: [ present, latest ]
+ default: present
+ version_added: 9.1.0
type:
description:
- The type of installation performed by C(ansible-galaxy).
@@ -69,7 +82,8 @@ options:
default: false
force:
description:
- - Force overwriting an existing role or collection.
+ - Force overwriting existing roles and/or collections.
+ - It can be used for upgrading, but the module output will always report C(changed=true).
- Using O(force=true) is mandatory when downgrading.
type: bool
default: false
@@ -188,6 +202,7 @@ class AnsibleGalaxyInstall(ModuleHelper):
output_params = ('type', 'name', 'dest', 'requirements_file', 'force', 'no_deps')
module = dict(
argument_spec=dict(
+ state=dict(type='str', choices=['present', 'latest'], default='present'),
type=dict(type='str', choices=('collection', 'role', 'both'), required=True),
name=dict(type='str'),
requirements_file=dict(type='path'),
@@ -206,6 +221,7 @@ class AnsibleGalaxyInstall(ModuleHelper):
command_args_formats = dict(
type=cmd_runner_fmt.as_func(lambda v: [] if v == 'both' else [v]),
galaxy_cmd=cmd_runner_fmt.as_list(),
+ upgrade=cmd_runner_fmt.as_bool("--upgrade"),
requirements_file=cmd_runner_fmt.as_opt_val('-r'),
dest=cmd_runner_fmt.as_opt_val('-p'),
force=cmd_runner_fmt.as_bool("--force"),
@@ -244,9 +260,7 @@ class AnsibleGalaxyInstall(ModuleHelper):
def __init_module__(self):
self.runner, self.ansible_version = self._get_ansible_galaxy_version()
if self.ansible_version < (2, 11):
- self.module.fail_json(
- msg="Support for Ansible 2.9 and ansible-base 2.10 has been removed."
- )
+ self.module.fail_json(msg="Support for Ansible 2.9 and ansible-base 2.10 has been removed.")
self.vars.set("new_collections", {}, change=True)
self.vars.set("new_roles", {}, change=True)
if self.vars.type != "collection":
@@ -299,8 +313,9 @@ class AnsibleGalaxyInstall(ModuleHelper):
elif match.group("role"):
self.vars.new_roles[match.group("role")] = match.group("rversion")
- with self.runner("type galaxy_cmd force no_deps dest requirements_file name", output_process=process) as ctx:
- ctx.run(galaxy_cmd="install")
+ upgrade = (self.vars.type == "collection" and self.vars.state == "latest")
+ with self.runner("type galaxy_cmd upgrade force no_deps dest requirements_file name", output_process=process) as ctx:
+ ctx.run(galaxy_cmd="install", upgrade=upgrade)
if self.verbosity > 2:
self.vars.set("run_info", ctx.run_info)
diff --git a/ansible_collections/community/general/plugins/modules/cargo.py b/ansible_collections/community/general/plugins/modules/cargo.py
index ba9c05ed7..2fc729da2 100644
--- a/ansible_collections/community/general/plugins/modules/cargo.py
+++ b/ansible_collections/community/general/plugins/modules/cargo.py
@@ -1,6 +1,7 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (c) 2021 Radek Sprta <mail@radeksprta.eu>
+# Copyright (c) 2024 Colin Nolan <cn580@alumni.york.ac.uk>
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
@@ -65,6 +66,13 @@ options:
type: str
default: present
choices: [ "present", "absent", "latest" ]
+ directory:
+ description:
+ - Path to the source directory to install the Rust package from.
+ - This is only used when installing packages.
+ type: path
+ required: false
+ version_added: 9.1.0
requirements:
- cargo installed
"""
@@ -98,8 +106,14 @@ EXAMPLES = r"""
community.general.cargo:
name: ludusavi
state: latest
+
+- name: Install "ludusavi" Rust package from source directory
+ community.general.cargo:
+ name: ludusavi
+ directory: /path/to/ludusavi/source
"""
+import json
import os
import re
@@ -115,6 +129,7 @@ class Cargo(object):
self.state = kwargs["state"]
self.version = kwargs["version"]
self.locked = kwargs["locked"]
+ self.directory = kwargs["directory"]
@property
def path(self):
@@ -143,7 +158,7 @@ class Cargo(object):
data, dummy = self._exec(cmd, True, False, False)
- package_regex = re.compile(r"^([\w\-]+) v(.+):$")
+ package_regex = re.compile(r"^([\w\-]+) v(\S+).*:$")
installed = {}
for line in data.splitlines():
package_info = package_regex.match(line)
@@ -163,19 +178,53 @@ class Cargo(object):
if self.version:
cmd.append("--version")
cmd.append(self.version)
+ if self.directory:
+ cmd.append("--path")
+ cmd.append(self.directory)
return self._exec(cmd)
def is_outdated(self, name):
installed_version = self.get_installed().get(name)
+ latest_version = (
+ self.get_latest_published_version(name)
+ if not self.directory
+ else self.get_source_directory_version(name)
+ )
+ return installed_version != latest_version
+ def get_latest_published_version(self, name):
cmd = ["search", name, "--limit", "1"]
data, dummy = self._exec(cmd, True, False, False)
match = re.search(r'"(.+)"', data)
- if match:
- latest_version = match.group(1)
-
- return installed_version != latest_version
+ if not match:
+ self.module.fail_json(
+ msg="No published version for package %s found" % name
+ )
+ return match.group(1)
+
+ def get_source_directory_version(self, name):
+ cmd = [
+ "metadata",
+ "--format-version",
+ "1",
+ "--no-deps",
+ "--manifest-path",
+ os.path.join(self.directory, "Cargo.toml"),
+ ]
+ data, dummy = self._exec(cmd, True, False, False)
+ manifest = json.loads(data)
+
+ package = next(
+ (package for package in manifest["packages"] if package["name"] == name),
+ None,
+ )
+ if not package:
+ self.module.fail_json(
+ msg="Package %s not defined in source, found: %s"
+ % (name, [x["name"] for x in manifest["packages"]])
+ )
+ return package["version"]
def uninstall(self, packages=None):
cmd = ["uninstall"]
@@ -191,16 +240,21 @@ def main():
state=dict(default="present", choices=["present", "absent", "latest"]),
version=dict(default=None, type="str"),
locked=dict(default=False, type="bool"),
+ directory=dict(default=None, type="path"),
)
module = AnsibleModule(argument_spec=arg_spec, supports_check_mode=True)
name = module.params["name"]
state = module.params["state"]
version = module.params["version"]
+ directory = module.params["directory"]
if not name:
module.fail_json(msg="Package name must be specified")
+ if directory is not None and not os.path.isdir(directory):
+ module.fail_json(msg="Source directory does not exist")
+
# Set LANG env since we parse stdout
module.run_command_environ_update = dict(
LANG="C", LC_ALL="C", LC_MESSAGES="C", LC_CTYPE="C"
diff --git a/ansible_collections/community/general/plugins/modules/consul_agent_check.py b/ansible_collections/community/general/plugins/modules/consul_agent_check.py
new file mode 100644
index 000000000..373926004
--- /dev/null
+++ b/ansible_collections/community/general/plugins/modules/consul_agent_check.py
@@ -0,0 +1,254 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2024, Michael Ilg
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+module: consul_agent_check
+short_description: Add, modify, and delete checks within a consul cluster
+version_added: 9.1.0
+description:
+ - Allows the addition, modification and deletion of checks in a consul
+ cluster via the agent. For more details on using and configuring Checks,
+ see U(https://developer.hashicorp.com/consul/api-docs/agent/check).
+ - Currently, there is no complete way to retrieve the script, interval or TTL
+ metadata for a registered check. Without this metadata it is not possible to
+ tell if the data supplied with ansible represents a change to a check. As a
+ result this does not attempt to determine changes and will always report a
+ changed occurred. An API method is planned to supply this metadata so at that
+ stage change management will be added.
+author:
+ - Michael Ilg (@Ilgmi)
+extends_documentation_fragment:
+ - community.general.consul
+ - community.general.consul.actiongroup_consul
+ - community.general.consul.token
+ - community.general.attributes
+attributes:
+ check_mode:
+ support: full
+ details:
+ - The result is the object as it is defined in the module options and not the object structure of the consul API.
+ For a better overview of what the object structure looks like,
+ take a look at U(https://developer.hashicorp.com/consul/api-docs/agent/check#list-checks).
+ diff_mode:
+ support: partial
+ details:
+ - In check mode the diff will show the object as it is defined in the module options and not the object structure of the consul API.
+options:
+ state:
+ description:
+ - Whether the check should be present or absent.
+ choices: ['present', 'absent']
+ default: present
+ type: str
+ name:
+ description:
+ - Required name for the service check.
+ type: str
+ id:
+ description:
+ - Specifies a unique ID for this check on the node. This defaults to the O(name) parameter, but it may be necessary to provide
+ an ID for uniqueness. This value will return in the response as "CheckId".
+ type: str
+ interval:
+ description:
+ - The interval at which the service check will be run.
+ This is a number with a V(s) or V(m) suffix to signify the units of seconds or minutes, for example V(15s) or V(1m).
+ If no suffix is supplied V(s) will be used by default, for example V(10) will be V(10s).
+ - Required if one of the parameters O(args), O(http), or O(tcp) is specified.
+ type: str
+ notes:
+ description:
+ - Notes to attach to check when registering it.
+ type: str
+ args:
+ description:
+ - Specifies command arguments to run to update the status of the check.
+ - Requires O(interval) to be provided.
+ - Mutually exclusive with O(ttl), O(tcp) and O(http).
+ type: list
+ elements: str
+ ttl:
+ description:
+ - Checks can be registered with a TTL instead of a O(args) and O(interval)
+ this means that the service will check in with the agent before the
+ TTL expires. If it doesn't the check will be considered failed.
+ Required if registering a check and the script an interval are missing
+ Similar to the interval this is a number with a V(s) or V(m) suffix to
+ signify the units of seconds or minutes, for example V(15s) or V(1m).
+ If no suffix is supplied V(s) will be used by default, for example V(10) will be V(10s).
+ - Mutually exclusive with O(args), O(tcp) and O(http).
+ type: str
+ tcp:
+ description:
+ - Checks can be registered with a TCP port. This means that consul
+ will check if the connection attempt to that port is successful (that is, the port is currently accepting connections).
+ The format is V(host:port), for example V(localhost:80).
+ - Requires O(interval) to be provided.
+ - Mutually exclusive with O(args), O(ttl) and O(http).
+ type: str
+ version_added: '1.3.0'
+ http:
+ description:
+ - Checks can be registered with an HTTP endpoint. This means that consul
+ will check that the http endpoint returns a successful HTTP status.
+ - Requires O(interval) to be provided.
+ - Mutually exclusive with O(args), O(ttl) and O(tcp).
+ type: str
+ timeout:
+ description:
+ - A custom HTTP check timeout. The consul default is 10 seconds.
+ Similar to the interval this is a number with a V(s) or V(m) suffix to
+ signify the units of seconds or minutes, for example V(15s) or V(1m).
+ If no suffix is supplied V(s) will be used by default, for example V(10) will be V(10s).
+ type: str
+ service_id:
+ description:
+ - The ID for the service, must be unique per node. If O(state=absent),
+ defaults to the service name if supplied.
+ type: str
+'''
+
+EXAMPLES = '''
+- name: Register tcp check for service 'nginx'
+ community.general.consul_agent_check:
+ name: nginx_tcp_check
+ service_id: nginx
+ interval: 60s
+ tcp: localhost:80
+ notes: "Nginx Check"
+
+- name: Register http check for service 'nginx'
+ community.general.consul_agent_check:
+ name: nginx_http_check
+ service_id: nginx
+ interval: 60s
+ http: http://localhost:80/status
+ notes: "Nginx Check"
+
+- name: Remove check for service 'nginx'
+ community.general.consul_agent_check:
+ state: absent
+ id: nginx_http_check
+ service_id: "{{ nginx_service.ID }}"
+'''
+
+RETURN = """
+check:
+ description: The check as returned by the consul HTTP API.
+ returned: always
+ type: dict
+ sample:
+ CheckID: nginx_check
+ ServiceID: nginx
+ Interval: 30s
+ Type: http
+ Notes: Nginx Check
+operation:
+ description: The operation performed.
+ returned: changed
+ type: str
+ sample: update
+"""
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.community.general.plugins.module_utils.consul import (
+ AUTH_ARGUMENTS_SPEC,
+ OPERATION_CREATE,
+ OPERATION_UPDATE,
+ OPERATION_DELETE,
+ OPERATION_READ,
+ _ConsulModule,
+ validate_check,
+)
+
+_ARGUMENT_SPEC = {
+ "state": dict(default="present", choices=["present", "absent"]),
+ "name": dict(type='str'),
+ "id": dict(type='str'),
+ "interval": dict(type='str'),
+ "notes": dict(type='str'),
+ "args": dict(type='list', elements='str'),
+ "http": dict(type='str'),
+ "tcp": dict(type='str'),
+ "ttl": dict(type='str'),
+ "timeout": dict(type='str'),
+ "service_id": dict(type='str'),
+}
+
+_MUTUALLY_EXCLUSIVE = [
+ ('args', 'ttl', 'tcp', 'http'),
+]
+
+_REQUIRED_IF = [
+ ('state', 'present', ['name']),
+ ('state', 'absent', ('id', 'name'), True),
+]
+
+_REQUIRED_BY = {
+ 'args': 'interval',
+ 'http': 'interval',
+ 'tcp': 'interval',
+}
+
+_ARGUMENT_SPEC.update(AUTH_ARGUMENTS_SPEC)
+
+
+class ConsulAgentCheckModule(_ConsulModule):
+ api_endpoint = "agent/check"
+ result_key = "check"
+ unique_identifiers = ["id", "name"]
+ operational_attributes = {"Node", "CheckID", "Output", "ServiceName", "ServiceTags",
+ "Status", "Type", "ExposedPort", "Definition"}
+
+ def endpoint_url(self, operation, identifier=None):
+ if operation == OPERATION_READ:
+ return "agent/checks"
+ if operation in [OPERATION_CREATE, OPERATION_UPDATE]:
+ return "/".join([self.api_endpoint, "register"])
+ if operation == OPERATION_DELETE:
+ return "/".join([self.api_endpoint, "deregister", identifier])
+
+ return super(ConsulAgentCheckModule, self).endpoint_url(operation, identifier)
+
+ def read_object(self):
+ url = self.endpoint_url(OPERATION_READ)
+ checks = self.get(url)
+ identifier = self.id_from_obj(self.params)
+ if identifier in checks:
+ return checks[identifier]
+ return None
+
+ def prepare_object(self, existing, obj):
+ existing = super(ConsulAgentCheckModule, self).prepare_object(existing, obj)
+ validate_check(existing)
+ return existing
+
+ def delete_object(self, obj):
+ if not self._module.check_mode:
+ self.put(self.endpoint_url(OPERATION_DELETE, obj.get("CheckID")))
+ return {}
+
+
+def main():
+ module = AnsibleModule(
+ _ARGUMENT_SPEC,
+ mutually_exclusive=_MUTUALLY_EXCLUSIVE,
+ required_if=_REQUIRED_IF,
+ required_by=_REQUIRED_BY,
+ supports_check_mode=True,
+ )
+
+ consul_module = ConsulAgentCheckModule(module)
+ consul_module.execute()
+
+
+if __name__ == "__main__":
+ main()
diff --git a/ansible_collections/community/general/plugins/modules/consul_agent_service.py b/ansible_collections/community/general/plugins/modules/consul_agent_service.py
new file mode 100644
index 000000000..a8ef09897
--- /dev/null
+++ b/ansible_collections/community/general/plugins/modules/consul_agent_service.py
@@ -0,0 +1,289 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2024, Michael Ilg
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+module: consul_agent_service
+short_description: Add, modify and delete services within a consul cluster
+version_added: 9.1.0
+description:
+ - Allows the addition, modification and deletion of services in a consul
+ cluster via the agent.
+ - There are currently no plans to create services and checks in one.
+ This is because the Consul API does not provide checks for a service and
+ the checks themselves do not match the module parameters.
+ Therefore, only a service without checks can be created in this module.
+author:
+ - Michael Ilg (@Ilgmi)
+extends_documentation_fragment:
+ - community.general.consul
+ - community.general.consul.actiongroup_consul
+ - community.general.consul.token
+ - community.general.attributes
+attributes:
+ check_mode:
+ support: full
+ diff_mode:
+ support: partial
+ details:
+ - In check mode the diff will miss operational attributes.
+options:
+ state:
+ description:
+ - Whether the service should be present or absent.
+ choices: ['present', 'absent']
+ default: present
+ type: str
+ name:
+ description:
+ - Unique name for the service on a node, must be unique per node,
+ required if registering a service.
+ type: str
+ id:
+ description:
+ - Specifies a unique ID for this service. This must be unique per agent. This defaults to the O(name) parameter if not provided.
+ If O(state=absent), defaults to the service name if supplied.
+ type: str
+ tags:
+ description:
+ - Tags that will be attached to the service registration.
+ type: list
+ elements: str
+ address:
+ description:
+ - The address to advertise that the service will be listening on.
+ This value will be passed as the C(address) parameter to Consul's
+ C(/v1/agent/service/register) API method, so refer to the Consul API
+ documentation for further details.
+ type: str
+ meta:
+ description:
+ - Optional meta data used for filtering.
+ For keys, the characters C(A-Z), C(a-z), C(0-9), C(_), C(-) are allowed.
+ Not allowed characters are replaced with underscores.
+ type: dict
+ service_port:
+ description:
+ - The port on which the service is listening. Can optionally be supplied for
+ registration of a service, that is if O(name) or O(id) is set.
+ type: int
+ enable_tag_override:
+ description:
+ - Specifies to disable the anti-entropy feature for this service's tags.
+ If EnableTagOverride is set to true then external agents can update this service in the catalog and modify the tags.
+ type: bool
+ default: False
+ weights:
+ description:
+ - Specifies weights for the service
+ type: dict
+ suboptions:
+ passing:
+ description:
+ - Weights for passing.
+ type: int
+ default: 1
+ warning:
+ description:
+ - Weights for warning.
+ type: int
+ default: 1
+ default: {"passing": 1, "warning": 1}
+'''
+
+EXAMPLES = '''
+- name: Register nginx service with the local consul agent
+ community.general.consul_agent_service:
+ host: consul1.example.com
+ token: some_management_acl
+ name: nginx
+ service_port: 80
+
+- name: Register nginx with a tcp check
+ community.general.consul_agent_service:
+ host: consul1.example.com
+ token: some_management_acl
+ name: nginx
+ service_port: 80
+
+- name: Register nginx with an http check
+ community.general.consul_agent_service:
+ host: consul1.example.com
+ token: some_management_acl
+ name: nginx
+ service_port: 80
+
+- name: Register external service nginx available at 10.1.5.23
+ community.general.consul_agent_service:
+ host: consul1.example.com
+ token: some_management_acl
+ name: nginx
+ service_port: 80
+ address: 10.1.5.23
+
+- name: Register nginx with some service tags
+ community.general.consul_agent_service:
+ host: consul1.example.com
+ token: some_management_acl
+ name: nginx
+ service_port: 80
+ tags:
+ - prod
+ - webservers
+
+- name: Register nginx with some service meta
+ community.general.consul_agent_service:
+ host: consul1.example.com
+ token: some_management_acl
+ name: nginx
+ service_port: 80
+ meta:
+ nginx_version: 1.25.3
+
+- name: Remove nginx service
+ community.general.consul_agent_service:
+ host: consul1.example.com
+ token: some_management_acl
+ service_id: nginx
+ state: absent
+
+- name: Register celery worker service
+ community.general.consul_agent_service:
+ host: consul1.example.com
+ token: some_management_acl
+ name: celery-worker
+ tags:
+ - prod
+ - worker
+'''
+
+RETURN = """
+service:
+ description: The service as returned by the consul HTTP API.
+ returned: always
+ type: dict
+ sample:
+ ID: nginx
+ Service: nginx
+ Address: localhost
+ Port: 80
+ Tags:
+ - http
+ Meta:
+ - nginx_version: 1.23.3
+ Datacenter: dc1
+ Weights:
+ Passing: 1
+ Warning: 1
+ ContentHash: 61a245cd985261ac
+ EnableTagOverride: false
+operation:
+ description: The operation performed.
+ returned: changed
+ type: str
+ sample: update
+"""
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.community.general.plugins.module_utils.consul import (
+ AUTH_ARGUMENTS_SPEC,
+ OPERATION_CREATE,
+ OPERATION_UPDATE,
+ OPERATION_DELETE,
+ _ConsulModule
+)
+
+_CHECK_MUTUALLY_EXCLUSIVE = [('args', 'ttl', 'tcp', 'http')]
+_CHECK_REQUIRED_BY = {
+ 'args': 'interval',
+ 'http': 'interval',
+ 'tcp': 'interval',
+}
+
+_ARGUMENT_SPEC = {
+ "state": dict(default="present", choices=["present", "absent"]),
+ "name": dict(type='str'),
+ "id": dict(type='str'),
+ "tags": dict(type='list', elements='str'),
+ "address": dict(type='str'),
+ "meta": dict(type='dict'),
+ "service_port": dict(type='int'),
+ "enable_tag_override": dict(type='bool', default=False),
+ "weights": dict(type='dict', options=dict(
+ passing=dict(type='int', default=1, no_log=False),
+ warning=dict(type='int', default=1)
+ ), default={"passing": 1, "warning": 1})
+}
+
+_REQUIRED_IF = [
+ ('state', 'present', ['name']),
+ ('state', 'absent', ('id', 'name'), True),
+]
+
+_ARGUMENT_SPEC.update(AUTH_ARGUMENTS_SPEC)
+
+
+class ConsulAgentServiceModule(_ConsulModule):
+ api_endpoint = "agent/service"
+ result_key = "service"
+ unique_identifiers = ["id", "name"]
+ operational_attributes = {"Service", "ContentHash", "Datacenter"}
+
+ def endpoint_url(self, operation, identifier=None):
+ if operation in [OPERATION_CREATE, OPERATION_UPDATE]:
+ return "/".join([self.api_endpoint, "register"])
+ if operation == OPERATION_DELETE:
+ return "/".join([self.api_endpoint, "deregister", identifier])
+
+ return super(ConsulAgentServiceModule, self).endpoint_url(operation, identifier)
+
+ def prepare_object(self, existing, obj):
+ existing = super(ConsulAgentServiceModule, self).prepare_object(existing, obj)
+ if "ServicePort" in existing:
+ existing["Port"] = existing.pop("ServicePort")
+
+ if "ID" not in existing:
+ existing["ID"] = existing["Name"]
+
+ return existing
+
+ def needs_update(self, api_obj, module_obj):
+ obj = {}
+ if "Service" in api_obj:
+ obj["Service"] = api_obj["Service"]
+ api_obj = self.prepare_object(api_obj, obj)
+
+ if "Name" in module_obj:
+ module_obj["Service"] = module_obj.pop("Name")
+ if "ServicePort" in module_obj:
+ module_obj["Port"] = module_obj.pop("ServicePort")
+
+ return super(ConsulAgentServiceModule, self).needs_update(api_obj, module_obj)
+
+ def delete_object(self, obj):
+ if not self._module.check_mode:
+ url = self.endpoint_url(OPERATION_DELETE, self.id_from_obj(obj, camel_case=True))
+ self.put(url)
+ return {}
+
+
+def main():
+ module = AnsibleModule(
+ _ARGUMENT_SPEC,
+ required_if=_REQUIRED_IF,
+ supports_check_mode=True,
+ )
+
+ consul_module = ConsulAgentServiceModule(module)
+ consul_module.execute()
+
+
+if __name__ == "__main__":
+ main()
diff --git a/ansible_collections/community/general/plugins/modules/consul_auth_method.py b/ansible_collections/community/general/plugins/modules/consul_auth_method.py
index afe549f6e..e28474c31 100644
--- a/ansible_collections/community/general/plugins/modules/consul_auth_method.py
+++ b/ansible_collections/community/general/plugins/modules/consul_auth_method.py
@@ -168,7 +168,7 @@ def normalize_ttl(ttl):
class ConsulAuthMethodModule(_ConsulModule):
api_endpoint = "acl/auth-method"
result_key = "auth_method"
- unique_identifier = "name"
+ unique_identifiers = ["name"]
def map_param(self, k, v, is_update):
if k == "config" and v:
diff --git a/ansible_collections/community/general/plugins/modules/consul_binding_rule.py b/ansible_collections/community/general/plugins/modules/consul_binding_rule.py
index 88496f867..6a2882cee 100644
--- a/ansible_collections/community/general/plugins/modules/consul_binding_rule.py
+++ b/ansible_collections/community/general/plugins/modules/consul_binding_rule.py
@@ -124,7 +124,7 @@ from ansible_collections.community.general.plugins.module_utils.consul import (
class ConsulBindingRuleModule(_ConsulModule):
api_endpoint = "acl/binding-rule"
result_key = "binding_rule"
- unique_identifier = "id"
+ unique_identifiers = ["id"]
def read_object(self):
url = "acl/binding-rules?authmethod={0}".format(self.params["auth_method"])
diff --git a/ansible_collections/community/general/plugins/modules/consul_policy.py b/ansible_collections/community/general/plugins/modules/consul_policy.py
index 2ed6021b0..36139ac09 100644
--- a/ansible_collections/community/general/plugins/modules/consul_policy.py
+++ b/ansible_collections/community/general/plugins/modules/consul_policy.py
@@ -145,7 +145,7 @@ _ARGUMENT_SPEC.update(AUTH_ARGUMENTS_SPEC)
class ConsulPolicyModule(_ConsulModule):
api_endpoint = "acl/policy"
result_key = "policy"
- unique_identifier = "id"
+ unique_identifiers = ["id"]
def endpoint_url(self, operation, identifier=None):
if operation == OPERATION_READ:
diff --git a/ansible_collections/community/general/plugins/modules/consul_role.py b/ansible_collections/community/general/plugins/modules/consul_role.py
index e07e2036f..d6c4e4dd9 100644
--- a/ansible_collections/community/general/plugins/modules/consul_role.py
+++ b/ansible_collections/community/general/plugins/modules/consul_role.py
@@ -212,7 +212,7 @@ from ansible_collections.community.general.plugins.module_utils.consul import (
class ConsulRoleModule(_ConsulModule):
api_endpoint = "acl/role"
result_key = "role"
- unique_identifier = "id"
+ unique_identifiers = ["id"]
def endpoint_url(self, operation, identifier=None):
if operation == OPERATION_READ:
diff --git a/ansible_collections/community/general/plugins/modules/consul_token.py b/ansible_collections/community/general/plugins/modules/consul_token.py
index 02bc544da..c8bc8bc27 100644
--- a/ansible_collections/community/general/plugins/modules/consul_token.py
+++ b/ansible_collections/community/general/plugins/modules/consul_token.py
@@ -235,13 +235,13 @@ def normalize_link_obj(api_obj, module_obj, key):
class ConsulTokenModule(_ConsulModule):
api_endpoint = "acl/token"
result_key = "token"
- unique_identifier = "accessor_id"
+ unique_identifiers = ["accessor_id"]
create_only_fields = {"expiration_ttl"}
def read_object(self):
# if `accessor_id` is not supplied we can only create objects and are not idempotent
- if not self.params.get(self.unique_identifier):
+ if not self.id_from_obj(self.params):
return None
return super(ConsulTokenModule, self).read_object()
diff --git a/ansible_collections/community/general/plugins/modules/django_check.py b/ansible_collections/community/general/plugins/modules/django_check.py
new file mode 100644
index 000000000..1553da7a3
--- /dev/null
+++ b/ansible_collections/community/general/plugins/modules/django_check.py
@@ -0,0 +1,113 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2024, Alexei Znamensky <russoz@gmail.com>
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+DOCUMENTATION = """
+module: django_check
+author:
+ - Alexei Znamensky (@russoz)
+short_description: Wrapper for C(django-admin check)
+version_added: 9.1.0
+description:
+ - This module is a wrapper for the execution of C(django-admin check).
+extends_documentation_fragment:
+ - community.general.attributes
+ - community.general.django
+options:
+ database:
+ description:
+ - Specify databases to run checks against.
+ - If not specified, Django will not run database tests.
+ type: list
+ elements: str
+ deploy:
+ description:
+ - Include additional checks relevant in a deployment setting.
+ type: bool
+ default: false
+ fail_level:
+ description:
+ - Message level that will trigger failure.
+ - Default is the Django default value. Check the documentation for the version being used.
+ type: str
+ choices: [CRITICAL, ERROR, WARNING, INFO, DEBUG]
+ tags:
+ description:
+ - Restrict checks to specific tags.
+ type: list
+ elements: str
+ apps:
+ description:
+ - Restrict checks to specific applications.
+ - Default is to check all applications.
+ type: list
+ elements: str
+notes:
+ - The outcome of the module is found in the common return values RV(ignore:stdout), RV(ignore:stderr), RV(ignore:rc).
+ - The module will fail if RV(ignore:rc) is not zero.
+attributes:
+ check_mode:
+ support: full
+ diff_mode:
+ support: none
+"""
+
+EXAMPLES = """
+- name: Check the entire project
+ community.general.django_check:
+ settings: myproject.settings
+
+- name: Create the project using specific databases
+ community.general.django_check:
+ database:
+ - somedb
+ - myotherdb
+ settings: fancysite.settings
+ pythonpath: /home/joedoe/project/fancysite
+ venv: /home/joedoe/project/fancysite/venv
+"""
+
+RETURN = """
+run_info:
+ description: Command-line execution information.
+ type: dict
+ returned: success and C(verbosity) >= 3
+"""
+
+from ansible_collections.community.general.plugins.module_utils.django import DjangoModuleHelper
+from ansible_collections.community.general.plugins.module_utils.cmd_runner import cmd_runner_fmt
+
+
+class DjangoCheck(DjangoModuleHelper):
+ module = dict(
+ argument_spec=dict(
+ database=dict(type="list", elements="str"),
+ deploy=dict(type="bool", default=False),
+ fail_level=dict(type="str", choices=["CRITICAL", "ERROR", "WARNING", "INFO", "DEBUG"]),
+ tags=dict(type="list", elements="str"),
+ apps=dict(type="list", elements="str"),
+ ),
+ supports_check_mode=True,
+ )
+ arg_formats = dict(
+ database=cmd_runner_fmt.stack(cmd_runner_fmt.as_opt_val)("--database"),
+ deploy=cmd_runner_fmt.as_bool("--deploy"),
+ fail_level=cmd_runner_fmt.as_opt_val("--fail-level"),
+ tags=cmd_runner_fmt.stack(cmd_runner_fmt.as_opt_val)("--tag"),
+ apps=cmd_runner_fmt.as_list(),
+ )
+ django_admin_cmd = "check"
+ django_admin_arg_order = "database deploy fail_level tags apps"
+
+
+def main():
+ DjangoCheck.execute()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/community/general/plugins/modules/django_createcachetable.py b/ansible_collections/community/general/plugins/modules/django_createcachetable.py
new file mode 100644
index 000000000..b038e0358
--- /dev/null
+++ b/ansible_collections/community/general/plugins/modules/django_createcachetable.py
@@ -0,0 +1,67 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2024, Alexei Znamensky <russoz@gmail.com>
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+DOCUMENTATION = """
+module: django_createcachetable
+author:
+ - Alexei Znamensky (@russoz)
+short_description: Wrapper for C(django-admin createcachetable)
+version_added: 9.1.0
+description:
+ - This module is a wrapper for the execution of C(django-admin createcachetable).
+extends_documentation_fragment:
+ - community.general.attributes
+ - community.general.django
+ - community.general.django.database
+attributes:
+ check_mode:
+ support: full
+ diff_mode:
+ support: none
+"""
+
+EXAMPLES = """
+- name: Create cache table in the default database
+ community.general.django_createcachetable:
+ settings: myproject.settings
+
+- name: Create cache table in the other database
+ community.general.django_createcachetable:
+ database: myotherdb
+ settings: fancysite.settings
+ pythonpath: /home/joedoe/project/fancysite
+ venv: /home/joedoe/project/fancysite/venv
+"""
+
+RETURN = """
+run_info:
+ description: Command-line execution information.
+ type: dict
+ returned: success and O(verbosity) >= 3
+"""
+
+from ansible_collections.community.general.plugins.module_utils.django import DjangoModuleHelper
+
+
+class DjangoCreateCacheTable(DjangoModuleHelper):
+ module = dict(
+ supports_check_mode=True,
+ )
+ django_admin_cmd = "createcachetable"
+ django_admin_arg_order = "noinput database dry_run"
+ _django_args = ["noinput", "database", "dry_run"]
+ _check_mode_arg = "dry_run"
+
+
+def main():
+ DjangoCreateCacheTable.execute()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/community/general/plugins/modules/git_config.py b/ansible_collections/community/general/plugins/modules/git_config.py
index a8d2ebe97..95969c1b3 100644
--- a/ansible_collections/community/general/plugins/modules/git_config.py
+++ b/ansible_collections/community/general/plugins/modules/git_config.py
@@ -18,7 +18,7 @@ author:
- Matthew Gamble (@djmattyg007)
- Marius Gedminas (@mgedmin)
requirements: ['git']
-short_description: Read and write git configuration
+short_description: Update git configuration
description:
- The M(community.general.git_config) module changes git configuration by invoking C(git config).
This is needed if you do not want to use M(ansible.builtin.template) for the entire git
@@ -36,6 +36,8 @@ options:
list_all:
description:
- List all settings (optionally limited to a given O(scope)).
+ - This option is B(deprecated) and will be removed from community.general 11.0.0.
+ Please use M(community.general.git_config_info) instead.
type: bool
default: false
name:
@@ -74,6 +76,8 @@ options:
description:
- When specifying the name of a single setting, supply a value to
set that setting to the given value.
+ - From community.general 11.0.0 on, O(value) will be required if O(state=present).
+ To read values, use the M(community.general.git_config_info) module instead.
type: str
add_mode:
description:
@@ -143,29 +147,6 @@ EXAMPLES = '''
repo: /etc
scope: local
value: 'root@{{ ansible_fqdn }}'
-
-- name: Read individual values from git config
- community.general.git_config:
- name: alias.ci
- scope: global
-
-- name: Scope system is also assumed when reading values, unless list_all=true
- community.general.git_config:
- name: alias.diffc
-
-- name: Read all values from git config
- community.general.git_config:
- list_all: true
- scope: global
-
-- name: When list_all is yes and no scope is specified, you get configuration from all scopes
- community.general.git_config:
- list_all: true
-
-- name: Specify a repository to include local settings
- community.general.git_config:
- list_all: true
- repo: /path/to/repo.git
'''
RETURN = '''
@@ -193,7 +174,7 @@ from ansible.module_utils.basic import AnsibleModule
def main():
module = AnsibleModule(
argument_spec=dict(
- list_all=dict(required=False, type='bool', default=False),
+ list_all=dict(required=False, type='bool', default=False, removed_in_version='11.0.0', removed_from_collection='community.general'),
name=dict(type='str'),
repo=dict(type='path'),
file=dict(type='path'),
@@ -222,6 +203,14 @@ def main():
new_value = params['value'] or ''
add_mode = params['add_mode']
+ if not unset and not new_value and not params['list_all']:
+ module.deprecate(
+ 'If state=present, a value must be specified from community.general 11.0.0 on.'
+ ' To read a config value, use the community.general.git_config_info module instead.',
+ version='11.0.0',
+ collection_name='community.general',
+ )
+
scope = determine_scope(params)
cwd = determine_cwd(scope, params)
@@ -263,7 +252,7 @@ def main():
module.exit_json(changed=False, msg='', config_value=old_values[0] if old_values else '')
elif unset and not out:
module.exit_json(changed=False, msg='no setting to unset')
- elif new_value in old_values and (len(old_values) == 1 or add_mode == "add"):
+ elif new_value in old_values and (len(old_values) == 1 or add_mode == "add") and not unset:
module.exit_json(changed=False, msg="")
# Until this point, the git config was just read and in case no change is needed, the module has already exited.
diff --git a/ansible_collections/community/general/plugins/modules/homectl.py b/ansible_collections/community/general/plugins/modules/homectl.py
index ca4c19a87..7751651c8 100644
--- a/ansible_collections/community/general/plugins/modules/homectl.py
+++ b/ansible_collections/community/general/plugins/modules/homectl.py
@@ -17,6 +17,12 @@ short_description: Manage user accounts with systemd-homed
version_added: 4.4.0
description:
- Manages a user's home directory managed by systemd-homed.
+notes:
+ - This module does B(not) work with Python 3.13 or newer. It uses the deprecated L(crypt Python module,
+ https://docs.python.org/3.12/library/crypt.html) from the Python standard library, which was removed
+ from Python 3.13.
+requirements:
+ - Python 3.12 or earlier
extends_documentation_fragment:
- community.general.attributes
attributes:
@@ -263,12 +269,21 @@ data:
}
'''
-import crypt
import json
-from ansible.module_utils.basic import AnsibleModule
+import traceback
+from ansible.module_utils.basic import AnsibleModule, missing_required_lib
from ansible.module_utils.basic import jsonify
from ansible.module_utils.common.text.formatters import human_to_bytes
+try:
+ import crypt
+except ImportError:
+ HAS_CRYPT = False
+ CRYPT_IMPORT_ERROR = traceback.format_exc()
+else:
+ HAS_CRYPT = True
+ CRYPT_IMPORT_ERROR = None
+
class Homectl(object):
'''#TODO DOC STRINGS'''
@@ -591,6 +606,12 @@ def main():
]
)
+ if not HAS_CRYPT:
+ module.fail_json(
+ msg=missing_required_lib('crypt (part of Python 3.13 standard library)'),
+ exception=CRYPT_IMPORT_ERROR,
+ )
+
homectl = Homectl(module)
homectl.result['state'] = homectl.state
diff --git a/ansible_collections/community/general/plugins/modules/ipa_dnsrecord.py b/ansible_collections/community/general/plugins/modules/ipa_dnsrecord.py
index cb4ce03dd..59475a55b 100644
--- a/ansible_collections/community/general/plugins/modules/ipa_dnsrecord.py
+++ b/ansible_collections/community/general/plugins/modules/ipa_dnsrecord.py
@@ -35,13 +35,14 @@ options:
record_type:
description:
- The type of DNS record name.
- - Currently, 'A', 'AAAA', 'A6', 'CNAME', 'DNAME', 'NS', 'PTR', 'TXT', 'SRV' and 'MX' are supported.
+ - Currently, 'A', 'AAAA', 'A6', 'CNAME', 'DNAME', 'NS', 'PTR', 'TXT', 'SRV', 'MX' and 'SSHFP' are supported.
- "'A6', 'CNAME', 'DNAME' and 'TXT' are added in version 2.5."
- "'SRV' and 'MX' are added in version 2.8."
- "'NS' are added in comunity.general 8.2.0."
+ - "'SSHFP' are added in community.general 9.1.0."
required: false
default: 'A'
- choices: ['A', 'AAAA', 'A6', 'CNAME', 'DNAME', 'MX', 'NS', 'PTR', 'SRV', 'TXT']
+ choices: ['A', 'AAAA', 'A6', 'CNAME', 'DNAME', 'MX', 'NS', 'PTR', 'SRV', 'TXT', 'SSHFP']
type: str
record_value:
description:
@@ -57,6 +58,7 @@ options:
- In the case of 'TXT' record type, this will be a text.
- In the case of 'SRV' record type, this will be a service record.
- In the case of 'MX' record type, this will be a mail exchanger record.
+ - In the case of 'SSHFP' record type, this will be an SSH fingerprint record.
type: str
record_values:
description:
@@ -71,6 +73,7 @@ options:
- In the case of 'TXT' record type, this will be a text.
- In the case of 'SRV' record type, this will be a service record.
- In the case of 'MX' record type, this will be a mail exchanger record.
+ - In the case of 'SSHFP' record type, this will be an SSH fingerprint record.
type: list
elements: str
record_ttl:
@@ -175,6 +178,20 @@ EXAMPLES = r'''
ipa_host: ipa.example.com
ipa_user: admin
ipa_pass: ChangeMe!
+
+- name: Retrieve the current sshfp fingerprints
+ ansible.builtin.command: ssh-keyscan -D localhost
+ register: ssh_hostkeys
+
+- name: Update the SSHFP records in DNS
+ community.general.ipa_dnsrecord:
+ name: "{{ inventory_hostname}}"
+ zone_name: example.com
+ record_type: 'SSHFP'
+ record_values: "{{ ssh_hostkeys.stdout.split('\n') | map('split', 'SSHFP ') | map('last') | list }}"
+ ipa_host: ipa.example.com
+ ipa_user: admin
+ ipa_pass: ChangeMe!
'''
RETURN = r'''
@@ -228,6 +245,8 @@ class DNSRecordIPAClient(IPAClient):
item.update(srvrecord=value)
elif details['record_type'] == 'MX':
item.update(mxrecord=value)
+ elif details['record_type'] == 'SSHFP':
+ item.update(sshfprecord=value)
self._post_json(method='dnsrecord_add', name=zone_name, item=item)
@@ -266,6 +285,8 @@ def get_dnsrecord_dict(details=None):
module_dnsrecord.update(srvrecord=details['record_values'])
elif details['record_type'] == 'MX' and details['record_values']:
module_dnsrecord.update(mxrecord=details['record_values'])
+ elif details['record_type'] == 'SSHFP' and details['record_values']:
+ module_dnsrecord.update(sshfprecord=details['record_values'])
if details.get('record_ttl'):
module_dnsrecord.update(dnsttl=details['record_ttl'])
@@ -328,7 +349,7 @@ def ensure(module, client):
def main():
- record_types = ['A', 'AAAA', 'A6', 'CNAME', 'DNAME', 'NS', 'PTR', 'TXT', 'SRV', 'MX']
+ record_types = ['A', 'AAAA', 'A6', 'CNAME', 'DNAME', 'NS', 'PTR', 'TXT', 'SRV', 'MX', 'SSHFP']
argument_spec = ipa_argument_spec()
argument_spec.update(
zone_name=dict(type='str', required=True),
diff --git a/ansible_collections/community/general/plugins/modules/keycloak_client.py b/ansible_collections/community/general/plugins/modules/keycloak_client.py
index 3628e5a51..efaa66e26 100644
--- a/ansible_collections/community/general/plugins/modules/keycloak_client.py
+++ b/ansible_collections/community/general/plugins/modules/keycloak_client.py
@@ -340,6 +340,42 @@ options:
description:
- Override realm authentication flow bindings.
type: dict
+ suboptions:
+ browser:
+ description:
+ - Flow ID of the browser authentication flow.
+ - O(authentication_flow_binding_overrides.browser)
+ and O(authentication_flow_binding_overrides.browser_name) are mutually exclusive.
+ type: str
+
+ browser_name:
+ description:
+ - Flow name of the browser authentication flow.
+ - O(authentication_flow_binding_overrides.browser)
+ and O(authentication_flow_binding_overrides.browser_name) are mutually exclusive.
+ aliases:
+ - browserName
+ type: str
+ version_added: 9.1.0
+
+ direct_grant:
+ description:
+ - Flow ID of the direct grant authentication flow.
+ - O(authentication_flow_binding_overrides.direct_grant)
+ and O(authentication_flow_binding_overrides.direct_grant_name) are mutually exclusive.
+ aliases:
+ - directGrant
+ type: str
+
+ direct_grant_name:
+ description:
+ - Flow name of the direct grant authentication flow.
+ - O(authentication_flow_binding_overrides.direct_grant)
+ and O(authentication_flow_binding_overrides.direct_grant_name) are mutually exclusive.
+ aliases:
+ - directGrantName
+ type: str
+ version_added: 9.1.0
aliases:
- authenticationFlowBindingOverrides
version_added: 3.4.0
@@ -781,6 +817,64 @@ def sanitize_cr(clientrep):
return normalise_cr(result)
+def get_authentication_flow_id(flow_name, realm, kc):
+ """ Get the authentication flow ID based on the flow name, realm, and Keycloak client.
+
+ Args:
+ flow_name (str): The name of the authentication flow.
+ realm (str): The name of the realm.
+ kc (KeycloakClient): The Keycloak client instance.
+
+ Returns:
+ str: The ID of the authentication flow.
+
+ Raises:
+ KeycloakAPIException: If the authentication flow with the given name is not found in the realm.
+ """
+ flow = kc.get_authentication_flow_by_alias(flow_name, realm)
+ if flow:
+ return flow["id"]
+ kc.module.fail_json(msg='Authentification flow %s not found in realm %s' % (flow_name, realm))
+
+
+def flow_binding_from_dict_to_model(newClientFlowBinding, realm, kc):
+ """ Convert a dictionary representing client flow bindings to a model representation.
+
+ Args:
+ newClientFlowBinding (dict): A dictionary containing client flow bindings.
+ realm (str): The name of the realm.
+ kc (KeycloakClient): An instance of the KeycloakClient class.
+
+ Returns:
+ dict: A dictionary representing the model flow bindings. The dictionary has two keys:
+ - "browser" (str or None): The ID of the browser authentication flow binding, or None if not provided.
+ - "direct_grant" (str or None): The ID of the direct grant authentication flow binding, or None if not provided.
+
+ Raises:
+ KeycloakAPIException: If the authentication flow with the given name is not found in the realm.
+
+ """
+
+ modelFlow = {
+ "browser": None,
+ "direct_grant": None
+ }
+
+ for k, v in newClientFlowBinding.items():
+ if not v:
+ continue
+ if k == "browser":
+ modelFlow["browser"] = v
+ elif k == "browser_name":
+ modelFlow["browser"] = get_authentication_flow_id(v, realm, kc)
+ elif k == "direct_grant":
+ modelFlow["direct_grant"] = v
+ elif k == "direct_grant_name":
+ modelFlow["direct_grant"] = get_authentication_flow_id(v, realm, kc)
+
+ return modelFlow
+
+
def main():
"""
Module execution
@@ -799,6 +893,13 @@ def main():
config=dict(type='dict'),
)
+ authentication_flow_spec = dict(
+ browser=dict(type='str'),
+ browser_name=dict(type='str', aliases=['browserName']),
+ direct_grant=dict(type='str', aliases=['directGrant']),
+ direct_grant_name=dict(type='str', aliases=['directGrantName']),
+ )
+
meta_args = dict(
state=dict(default='present', choices=['present', 'absent']),
realm=dict(type='str', default='master'),
@@ -838,7 +939,13 @@ def main():
use_template_scope=dict(type='bool', aliases=['useTemplateScope']),
use_template_mappers=dict(type='bool', aliases=['useTemplateMappers']),
always_display_in_console=dict(type='bool', aliases=['alwaysDisplayInConsole']),
- authentication_flow_binding_overrides=dict(type='dict', aliases=['authenticationFlowBindingOverrides']),
+ authentication_flow_binding_overrides=dict(
+ type='dict',
+ aliases=['authenticationFlowBindingOverrides'],
+ options=authentication_flow_spec,
+ required_one_of=[['browser', 'direct_grant', 'browser_name', 'direct_grant_name']],
+ mutually_exclusive=[['browser', 'browser_name'], ['direct_grant', 'direct_grant_name']],
+ ),
protocol_mappers=dict(type='list', elements='dict', options=protmapper_spec, aliases=['protocolMappers']),
authorization_settings=dict(type='dict', aliases=['authorizationSettings']),
default_client_scopes=dict(type='list', elements='str', aliases=['defaultClientScopes']),
@@ -900,6 +1007,8 @@ def main():
# they are not specified
if client_param == 'protocol_mappers':
new_param_value = [dict((k, v) for k, v in x.items() if x[k] is not None) for x in new_param_value]
+ elif client_param == 'authentication_flow_binding_overrides':
+ new_param_value = flow_binding_from_dict_to_model(new_param_value, realm, kc)
changeset[camel(client_param)] = new_param_value
diff --git a/ansible_collections/community/general/plugins/modules/keycloak_clientscope.py b/ansible_collections/community/general/plugins/modules/keycloak_clientscope.py
index d24e0f1f2..b962b932c 100644
--- a/ansible_collections/community/general/plugins/modules/keycloak_clientscope.py
+++ b/ansible_collections/community/general/plugins/modules/keycloak_clientscope.py
@@ -301,10 +301,37 @@ end_state:
'''
from ansible_collections.community.general.plugins.module_utils.identity.keycloak.keycloak import KeycloakAPI, camel, \
- keycloak_argument_spec, get_token, KeycloakError
+ keycloak_argument_spec, get_token, KeycloakError, is_struct_included
from ansible.module_utils.basic import AnsibleModule
+def normalise_cr(clientscoperep, remove_ids=False):
+ """ Re-sorts any properties where the order so that diff's is minimised, and adds default values where appropriate so that the
+ the change detection is more effective.
+
+ :param clientscoperep: the clientscoperep dict to be sanitized
+ :param remove_ids: If set to true, then the unique ID's of objects is removed to make the diff and checks for changed
+ not alert when the ID's of objects are not usually known, (e.g. for protocol_mappers)
+ :return: normalised clientscoperep dict
+ """
+ # Avoid the dict passed in to be modified
+ clientscoperep = clientscoperep.copy()
+
+ if 'attributes' in clientscoperep:
+ clientscoperep['attributes'] = list(sorted(clientscoperep['attributes']))
+
+ if 'protocolMappers' in clientscoperep:
+ clientscoperep['protocolMappers'] = sorted(clientscoperep['protocolMappers'], key=lambda x: (x.get('name'), x.get('protocol'), x.get('protocolMapper')))
+ for mapper in clientscoperep['protocolMappers']:
+ if remove_ids:
+ mapper.pop('id', None)
+
+ # Set to a default value.
+ mapper['consentRequired'] = mapper.get('consentRequired', False)
+
+ return clientscoperep
+
+
def sanitize_cr(clientscoperep):
""" Removes probably sensitive details from a clientscoperep representation.
@@ -317,7 +344,7 @@ def sanitize_cr(clientscoperep):
if 'attributes' in result:
if 'saml.signing.private.key' in result['attributes']:
result['attributes']['saml.signing.private.key'] = 'no_log'
- return result
+ return normalise_cr(result)
def main():
@@ -458,6 +485,13 @@ def main():
result['diff'] = dict(before=sanitize_cr(before_clientscope), after=sanitize_cr(desired_clientscope))
if module.check_mode:
+ # We can only compare the current clientscope with the proposed updates we have
+ before_norm = normalise_cr(before_clientscope, remove_ids=True)
+ desired_norm = normalise_cr(desired_clientscope, remove_ids=True)
+ if module._diff:
+ result['diff'] = dict(before=sanitize_cr(before_norm),
+ after=sanitize_cr(desired_norm))
+ result['changed'] = not is_struct_included(desired_norm, before_norm)
module.exit_json(**result)
# do the update
diff --git a/ansible_collections/community/general/plugins/modules/launchd.py b/ansible_collections/community/general/plugins/modules/launchd.py
index e5942ea7c..a6427bdb2 100644
--- a/ansible_collections/community/general/plugins/modules/launchd.py
+++ b/ansible_collections/community/general/plugins/modules/launchd.py
@@ -514,7 +514,8 @@ def main():
result['status']['current_pid'] != result['status']['previous_pid']):
result['changed'] = True
if module.check_mode:
- result['changed'] = True
+ if result['status']['current_state'] != action:
+ result['changed'] = True
module.exit_json(**result)
diff --git a/ansible_collections/community/general/plugins/modules/openbsd_pkg.py b/ansible_collections/community/general/plugins/modules/openbsd_pkg.py
index c83113611..69ac7bff8 100644
--- a/ansible_collections/community/general/plugins/modules/openbsd_pkg.py
+++ b/ansible_collections/community/general/plugins/modules/openbsd_pkg.py
@@ -24,7 +24,10 @@ attributes:
check_mode:
support: full
diff_mode:
- support: none
+ support: partial
+ version_added: 9.1.0
+ details:
+ - Only works when check mode is not enabled.
options:
name:
description:
@@ -159,6 +162,20 @@ def execute_command(cmd, module):
return module.run_command(cmd_args, environ_update={'TERM': 'dumb'})
+def get_all_installed(module):
+ """
+ Get all installed packaged. Used to support diff mode
+ """
+ command = 'pkg_info -Iq'
+
+ rc, stdout, stderr = execute_command(command, module)
+
+ if stderr:
+ module.fail_json(msg="failed in get_all_installed(): %s" % stderr)
+
+ return stdout
+
+
# Function used to find out if a package is currently installed.
def get_package_state(names, pkg_spec, module):
info_cmd = 'pkg_info -Iq'
@@ -573,10 +590,13 @@ def main():
result['name'] = name
result['state'] = state
result['build'] = build
+ result['diff'] = {}
# The data structure used to keep track of package information.
pkg_spec = {}
+ new_package_list = original_package_list = get_all_installed(module)
+
if build is True:
if not os.path.isdir(ports_dir):
module.fail_json(msg="the ports source directory %s does not exist" % (ports_dir))
@@ -661,6 +681,10 @@ def main():
result['changed'] = combined_changed
+ if result['changed'] and not module.check_mode:
+ new_package_list = get_all_installed(module)
+ result['diff'] = dict(before=original_package_list, after=new_package_list)
+
module.exit_json(**result)
diff --git a/ansible_collections/community/general/plugins/modules/pacman.py b/ansible_collections/community/general/plugins/modules/pacman.py
index 7f67b9103..f13bde317 100644
--- a/ansible_collections/community/general/plugins/modules/pacman.py
+++ b/ansible_collections/community/general/plugins/modules/pacman.py
@@ -367,8 +367,9 @@ class Pacman(object):
self.install_packages(pkgs)
self.success()
- # This shouldn't happen...
- self.fail("This is a bug")
+ # This happens if an empty list has been provided for name
+ self.add_exit_infos(msg='Nothing to do')
+ self.success()
def install_packages(self, pkgs):
pkgs_to_install = []
diff --git a/ansible_collections/community/general/plugins/modules/proxmox_kvm.py b/ansible_collections/community/general/plugins/modules/proxmox_kvm.py
index 9fe805c7a..71cbb51fc 100644
--- a/ansible_collections/community/general/plugins/modules/proxmox_kvm.py
+++ b/ansible_collections/community/general/plugins/modules/proxmox_kvm.py
@@ -174,6 +174,7 @@ options:
- Allow to force stop VM.
- Can be used with states V(stopped), V(restarted), and V(absent).
- This option has no default unless O(proxmox_default_behavior) is set to V(compatibility); then the default is V(false).
+ - Requires parameter O(archive).
type: bool
format:
description:
diff --git a/ansible_collections/community/general/plugins/modules/proxmox_vm_info.py b/ansible_collections/community/general/plugins/modules/proxmox_vm_info.py
index 39d8307a4..e10b9dff6 100644
--- a/ansible_collections/community/general/plugins/modules/proxmox_vm_info.py
+++ b/ansible_collections/community/general/plugins/modules/proxmox_vm_info.py
@@ -57,6 +57,13 @@ options:
- pending
default: none
version_added: 8.1.0
+ network:
+ description:
+ - Whether to retrieve the current network status.
+ - Requires enabled/running qemu-guest-agent on qemu VMs.
+ type: bool
+ default: false
+ version_added: 9.1.0
extends_documentation_fragment:
- community.general.proxmox.actiongroup_proxmox
- community.general.proxmox.documentation
@@ -172,7 +179,7 @@ class ProxmoxVmInfoAnsible(ProxmoxAnsible):
msg="Failed to retrieve VMs information from cluster resources: %s" % e
)
- def get_vms_from_nodes(self, cluster_machines, type, vmid=None, name=None, node=None, config=None):
+ def get_vms_from_nodes(self, cluster_machines, type, vmid=None, name=None, node=None, config=None, network=False):
# Leave in dict only machines that user wants to know about
filtered_vms = {
vm: info for vm, info in cluster_machines.items() if not (
@@ -201,17 +208,23 @@ class ProxmoxVmInfoAnsible(ProxmoxAnsible):
config_type = 0 if config == "pending" else 1
# GET /nodes/{node}/qemu/{vmid}/config current=[0/1]
desired_vm["config"] = call_vm_getter(this_vm_id).config().get(current=config_type)
+ if network:
+ if type == "qemu":
+ desired_vm["network"] = call_vm_getter(this_vm_id).agent("network-get-interfaces").get()['result']
+ elif type == "lxc":
+ desired_vm["network"] = call_vm_getter(this_vm_id).interfaces.get()
+
return filtered_vms
- def get_qemu_vms(self, cluster_machines, vmid=None, name=None, node=None, config=None):
+ def get_qemu_vms(self, cluster_machines, vmid=None, name=None, node=None, config=None, network=False):
try:
- return self.get_vms_from_nodes(cluster_machines, "qemu", vmid, name, node, config)
+ return self.get_vms_from_nodes(cluster_machines, "qemu", vmid, name, node, config, network)
except Exception as e:
self.module.fail_json(msg="Failed to retrieve QEMU VMs information: %s" % e)
- def get_lxc_vms(self, cluster_machines, vmid=None, name=None, node=None, config=None):
+ def get_lxc_vms(self, cluster_machines, vmid=None, name=None, node=None, config=None, network=False):
try:
- return self.get_vms_from_nodes(cluster_machines, "lxc", vmid, name, node, config)
+ return self.get_vms_from_nodes(cluster_machines, "lxc", vmid, name, node, config, network)
except Exception as e:
self.module.fail_json(msg="Failed to retrieve LXC VMs information: %s" % e)
@@ -229,6 +242,7 @@ def main():
type="str", choices=["none", "current", "pending"],
default="none", required=False
),
+ network=dict(type="bool", default=False, required=False),
)
module_args.update(vm_info_args)
@@ -245,6 +259,7 @@ def main():
vmid = module.params["vmid"]
name = module.params["name"]
config = module.params["config"]
+ network = module.params["network"]
result = dict(changed=False)
@@ -256,12 +271,12 @@ def main():
vms = {}
if type == "lxc":
- vms = proxmox.get_lxc_vms(cluster_machines, vmid, name, node, config)
+ vms = proxmox.get_lxc_vms(cluster_machines, vmid, name, node, config, network)
elif type == "qemu":
- vms = proxmox.get_qemu_vms(cluster_machines, vmid, name, node, config)
+ vms = proxmox.get_qemu_vms(cluster_machines, vmid, name, node, config, network)
else:
- vms = proxmox.get_qemu_vms(cluster_machines, vmid, name, node, config)
- vms.update(proxmox.get_lxc_vms(cluster_machines, vmid, name, node, config))
+ vms = proxmox.get_qemu_vms(cluster_machines, vmid, name, node, config, network)
+ vms.update(proxmox.get_lxc_vms(cluster_machines, vmid, name, node, config, network))
result["proxmox_vms"] = [info for vm, info in sorted(vms.items())]
module.exit_json(**result)
diff --git a/ansible_collections/community/general/plugins/modules/redfish_command.py b/ansible_collections/community/general/plugins/modules/redfish_command.py
index d351e7c1d..0f7a64b81 100644
--- a/ansible_collections/community/general/plugins/modules/redfish_command.py
+++ b/ansible_collections/community/general/plugins/modules/redfish_command.py
@@ -288,6 +288,20 @@ options:
type: str
choices: [ ResetAll, PreserveNetworkAndUsers, PreserveNetwork ]
version_added: 8.6.0
+ wait:
+ required: false
+ description:
+ - Block until the service is ready again.
+ type: bool
+ default: false
+ version_added: 9.1.0
+ wait_timeout:
+ required: false
+ description:
+ - How long to block until the service is ready again before giving up.
+ type: int
+ default: 120
+ version_added: 9.1.0
author:
- "Jose Delarosa (@jose-delarosa)"
@@ -685,6 +699,16 @@ EXAMPLES = '''
username: "{{ username }}"
password: "{{ password }}"
+ - name: Restart manager power gracefully and wait for it to be available
+ community.general.redfish_command:
+ category: Manager
+ command: GracefulRestart
+ resource_id: BMC
+ baseuri: "{{ baseuri }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ wait: True
+
- name: Restart manager power gracefully
community.general.redfish_command:
category: Manager
@@ -841,7 +865,9 @@ def main():
),
strip_etag_quotes=dict(type='bool', default=False),
reset_to_defaults_mode=dict(choices=['ResetAll', 'PreserveNetworkAndUsers', 'PreserveNetwork']),
- bios_attributes=dict(type="dict")
+ bios_attributes=dict(type="dict"),
+ wait=dict(type='bool', default=False),
+ wait_timeout=dict(type='int', default=120),
),
required_together=[
('username', 'password'),
@@ -1016,7 +1042,7 @@ def main():
command = 'PowerGracefulRestart'
if command.startswith('Power'):
- result = rf_utils.manage_manager_power(command)
+ result = rf_utils.manage_manager_power(command, module.params['wait'], module.params['wait_timeout'])
elif command == 'ClearLogs':
result = rf_utils.clear_logs()
elif command == 'VirtualMediaInsert':
diff --git a/ansible_collections/community/general/plugins/modules/redfish_info.py b/ansible_collections/community/general/plugins/modules/redfish_info.py
index 3b594b7a2..efcb34f01 100644
--- a/ansible_collections/community/general/plugins/modules/redfish_info.py
+++ b/ansible_collections/community/general/plugins/modules/redfish_info.py
@@ -359,6 +359,16 @@ EXAMPLES = '''
baseuri: "{{ baseuri }}"
username: "{{ username }}"
password: "{{ password }}"
+
+ - name: Check the availability of the service with a timeout of 5 seconds
+ community.general.redfish_info:
+ category: Service
+ command: CheckAvailability
+ baseuri: "{{ baseuri }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ timeout: 5
+ register: result
'''
RETURN = '''
@@ -385,6 +395,7 @@ CATEGORY_COMMANDS_ALL = {
"GetUpdateStatus"],
"Manager": ["GetManagerNicInventory", "GetVirtualMedia", "GetLogs", "GetNetworkProtocols",
"GetHealthReport", "GetHostInterfaces", "GetManagerInventory", "GetServiceIdentification"],
+ "Service": ["CheckAvailability"],
}
CATEGORY_COMMANDS_DEFAULT = {
@@ -393,7 +404,8 @@ CATEGORY_COMMANDS_DEFAULT = {
"Accounts": "ListUsers",
"Update": "GetFirmwareInventory",
"Sessions": "GetSessions",
- "Manager": "GetManagerNicInventory"
+ "Manager": "GetManagerNicInventory",
+ "Service": "CheckAvailability",
}
@@ -473,7 +485,13 @@ def main():
module.fail_json(msg="Invalid Category: %s" % category)
# Organize by Categories / Commands
- if category == "Systems":
+ if category == "Service":
+ # service-level commands are always available
+ for command in command_list:
+ if command == "CheckAvailability":
+ result["service"] = rf_utils.check_service_availability()
+
+ elif category == "Systems":
# execute only if we find a Systems resource
resource = rf_utils._find_systems_resource()
if resource['ret'] is False:
diff --git a/ansible_collections/community/general/plugins/modules/redis_info.py b/ansible_collections/community/general/plugins/modules/redis_info.py
index f352d53d7..c75abcf21 100644
--- a/ansible_collections/community/general/plugins/modules/redis_info.py
+++ b/ansible_collections/community/general/plugins/modules/redis_info.py
@@ -30,6 +30,11 @@ options:
version_added: 7.5.0
ca_certs:
version_added: 7.5.0
+ cluster:
+ default: false
+ description: Get informations about cluster status as RV(cluster).
+ type: bool
+ version_added: 9.1.0
seealso:
- module: community.general.redis
author: "Pavlo Bashynskyi (@levonet)"
@@ -43,6 +48,15 @@ EXAMPLES = r'''
- name: Print server information
ansible.builtin.debug:
var: result.info
+
+- name: Get server cluster information
+ community.general.redis_info:
+ cluster: true
+ register: result
+
+- name: Print server cluster information
+ ansible.builtin.debug:
+ var: result.cluster_info
'''
RETURN = r'''
@@ -178,6 +192,25 @@ info:
"used_memory_scripts_human": "0B",
"used_memory_startup": 791264
}
+cluster:
+ description: The default set of cluster information sections U(https://redis.io/commands/cluster-info).
+ returned: success if O(cluster=true)
+ version_added: 9.1.0
+ type: dict
+ sample: {
+ "cluster_state": ok,
+ "cluster_slots_assigned": 16384,
+ "cluster_slots_ok": 16384,
+ "cluster_slots_pfail": 0,
+ "cluster_slots_fail": 0,
+ "cluster_known_nodes": 6,
+ "cluster_size": 3,
+ "cluster_current_epoch": 6,
+ "cluster_my_epoch": 2,
+ "cluster_stats_messages_sent": 1483972,
+ "cluster_stats_messages_received": 1483968,
+ "total_cluster_links_buffer_limit_exceeded": 0
+ }
'''
import traceback
@@ -202,14 +235,19 @@ def redis_client(**client_params):
# Module execution.
def main():
+ module_args = dict(
+ cluster=dict(type='bool', default=False),
+ )
+ module_args.update(redis_auth_argument_spec(tls_default=False))
module = AnsibleModule(
- argument_spec=redis_auth_argument_spec(tls_default=False),
+ argument_spec=module_args,
supports_check_mode=True,
)
fail_imports(module, module.params['tls'])
redis_params = redis_auth_params(module)
+ cluster = module.params['cluster']
# Connect and check
client = redis_client(**redis_params)
@@ -219,7 +257,13 @@ def main():
module.fail_json(msg="unable to connect to database: %s" % to_native(e), exception=traceback.format_exc())
info = client.info()
- module.exit_json(changed=False, info=info)
+
+ result = dict(changed=False, info=info)
+
+ if cluster:
+ result['cluster_info'] = client.execute_command('CLUSTER INFO')
+
+ module.exit_json(**result)
if __name__ == '__main__':
diff --git a/ansible_collections/community/general/plugins/modules/udm_user.py b/ansible_collections/community/general/plugins/modules/udm_user.py
index dcbf0ec85..5a2e09049 100644
--- a/ansible_collections/community/general/plugins/modules/udm_user.py
+++ b/ansible_collections/community/general/plugins/modules/udm_user.py
@@ -20,6 +20,12 @@ description:
- "This module allows to manage posix users on a univention corporate
server (UCS).
It uses the python API of the UCS to create a new object or edit it."
+notes:
+ - This module does B(not) work with Python 3.13 or newer. It uses the deprecated L(crypt Python module,
+ https://docs.python.org/3.12/library/crypt.html) from the Python standard library, which was removed
+ from Python 3.13.
+requirements:
+ - Python 3.12 or earlier
extends_documentation_fragment:
- community.general.attributes
attributes:
@@ -324,10 +330,10 @@ EXAMPLES = '''
RETURN = '''# '''
-import crypt
from datetime import date, timedelta
+import traceback
-from ansible.module_utils.basic import AnsibleModule
+from ansible.module_utils.basic import AnsibleModule, missing_required_lib
from ansible_collections.community.general.plugins.module_utils.univention_umc import (
umc_module_for_add,
umc_module_for_edit,
@@ -335,6 +341,15 @@ from ansible_collections.community.general.plugins.module_utils.univention_umc i
base_dn,
)
+try:
+ import crypt
+except ImportError:
+ HAS_CRYPT = False
+ CRYPT_IMPORT_ERROR = traceback.format_exc()
+else:
+ HAS_CRYPT = True
+ CRYPT_IMPORT_ERROR = None
+
def main():
expiry = date.strftime(date.today() + timedelta(days=365), "%Y-%m-%d")
@@ -451,6 +466,13 @@ def main():
('state', 'present', ['firstname', 'lastname', 'password'])
])
)
+
+ if not HAS_CRYPT:
+ module.fail_json(
+ msg=missing_required_lib('crypt (part of Python 3.13 standard library)'),
+ exception=CRYPT_IMPORT_ERROR,
+ )
+
username = module.params['username']
position = module.params['position']
ou = module.params['ou']
diff --git a/ansible_collections/community/general/plugins/plugin_utils/keys_filter.py b/ansible_collections/community/general/plugins/plugin_utils/keys_filter.py
new file mode 100644
index 000000000..94234a15d
--- /dev/null
+++ b/ansible_collections/community/general/plugins/plugin_utils/keys_filter.py
@@ -0,0 +1,141 @@
+# Copyright (c) 2024 Vladimir Botka <vbotka@gmail.com>
+# Copyright (c) 2024 Felix Fontein <felix@fontein.de>
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import re
+
+from ansible.errors import AnsibleFilterError
+from ansible.module_utils.six import string_types
+from ansible.module_utils.common._collections_compat import Mapping, Sequence
+
+
+def _keys_filter_params(data, matching_parameter):
+ """test parameters:
+ * data must be a list of dictionaries. All keys must be strings.
+ * matching_parameter is member of a list.
+ """
+
+ mp = matching_parameter
+ ml = ['equal', 'starts_with', 'ends_with', 'regex']
+
+ if not isinstance(data, Sequence):
+ msg = "First argument must be a list. %s is %s"
+ raise AnsibleFilterError(msg % (data, type(data)))
+
+ for elem in data:
+ if not isinstance(elem, Mapping):
+ msg = "The data items must be dictionaries. %s is %s"
+ raise AnsibleFilterError(msg % (elem, type(elem)))
+
+ for elem in data:
+ if not all(isinstance(item, string_types) for item in elem.keys()):
+ msg = "Top level keys must be strings. keys: %s"
+ raise AnsibleFilterError(msg % elem.keys())
+
+ if mp not in ml:
+ msg = "The matching_parameter must be one of %s. matching_parameter=%s"
+ raise AnsibleFilterError(msg % (ml, mp))
+
+ return
+
+
+def _keys_filter_target_str(target, matching_parameter):
+ """
+ Test:
+ * target is a non-empty string or list.
+ * If target is list all items are strings.
+ * target is a string or list with single string if matching_parameter=regex.
+ Convert target and return:
+ * tuple of unique target items, or
+ * tuple with single item, or
+ * compiled regex if matching_parameter=regex.
+ """
+
+ if not isinstance(target, Sequence):
+ msg = "The target must be a string or a list. target is %s."
+ raise AnsibleFilterError(msg % type(target))
+
+ if len(target) == 0:
+ msg = "The target can't be empty."
+ raise AnsibleFilterError(msg)
+
+ if isinstance(target, list):
+ for elem in target:
+ if not isinstance(elem, string_types):
+ msg = "The target items must be strings. %s is %s"
+ raise AnsibleFilterError(msg % (elem, type(elem)))
+
+ if matching_parameter == 'regex':
+ if isinstance(target, string_types):
+ r = target
+ else:
+ if len(target) > 1:
+ msg = "Single item is required in the target list if matching_parameter=regex."
+ raise AnsibleFilterError(msg)
+ else:
+ r = target[0]
+ try:
+ tt = re.compile(r)
+ except re.error:
+ msg = "The target must be a valid regex if matching_parameter=regex. target is %s"
+ raise AnsibleFilterError(msg % r)
+ elif isinstance(target, string_types):
+ tt = (target, )
+ else:
+ tt = tuple(set(target))
+
+ return tt
+
+
+def _keys_filter_target_dict(target, matching_parameter):
+ """
+ Test:
+ * target is a list of dictionaries with attributes 'after' and 'before'.
+ * Attributes 'before' must be valid regex if matching_parameter=regex.
+ * Otherwise, the attributes 'before' must be strings.
+ Convert target and return:
+ * iterator that aggregates attributes 'before' and 'after', or
+ * iterator that aggregates compiled regex of attributes 'before' and 'after' if matching_parameter=regex.
+ """
+
+ if not isinstance(target, list):
+ msg = "The target must be a list. target is %s."
+ raise AnsibleFilterError(msg % (target, type(target)))
+
+ if len(target) == 0:
+ msg = "The target can't be empty."
+ raise AnsibleFilterError(msg)
+
+ for elem in target:
+ if not isinstance(elem, Mapping):
+ msg = "The target items must be dictionaries. %s is %s"
+ raise AnsibleFilterError(msg % (elem, type(elem)))
+ if not all(k in elem for k in ('before', 'after')):
+ msg = "All dictionaries in target must include attributes: after, before."
+ raise AnsibleFilterError(msg)
+ if not isinstance(elem['before'], string_types):
+ msg = "The attributes before must be strings. %s is %s"
+ raise AnsibleFilterError(msg % (elem['before'], type(elem['before'])))
+ if not isinstance(elem['after'], string_types):
+ msg = "The attributes after must be strings. %s is %s"
+ raise AnsibleFilterError(msg % (elem['after'], type(elem['after'])))
+
+ before = [d['before'] for d in target]
+ after = [d['after'] for d in target]
+
+ if matching_parameter == 'regex':
+ try:
+ tr = map(re.compile, before)
+ tz = list(zip(tr, after))
+ except re.error:
+ msg = ("The attributes before must be valid regex if matching_parameter=regex."
+ " Not all items are valid regex in: %s")
+ raise AnsibleFilterError(msg % before)
+ else:
+ tz = list(zip(before, after))
+
+ return tz
diff --git a/ansible_collections/community/general/tests/integration/targets/ansible_galaxy_install/tasks/main.yml b/ansible_collections/community/general/tests/integration/targets/ansible_galaxy_install/tasks/main.yml
index 1ecd9980d..5c4af6d16 100644
--- a/ansible_collections/community/general/tests/integration/targets/ansible_galaxy_install/tasks/main.yml
+++ b/ansible_collections/community/general/tests/integration/targets/ansible_galaxy_install/tasks/main.yml
@@ -4,10 +4,16 @@
# SPDX-License-Identifier: GPL-3.0-or-later
###################################################
+- name: Make directory install_c
+ ansible.builtin.file:
+ path: "{{ remote_tmp_dir }}/install_c"
+ state: directory
+
- name: Install collection netbox.netbox
community.general.ansible_galaxy_install:
type: collection
name: netbox.netbox
+ dest: "{{ remote_tmp_dir }}/install_c"
register: install_c0
- name: Assert collection netbox.netbox was installed
@@ -20,6 +26,7 @@
community.general.ansible_galaxy_install:
type: collection
name: netbox.netbox
+ dest: "{{ remote_tmp_dir }}/install_c"
register: install_c1
- name: Assert collection was not installed
@@ -28,10 +35,16 @@
- install_c1 is not changed
###################################################
+- name: Make directory install_r
+ ansible.builtin.file:
+ path: "{{ remote_tmp_dir }}/install_r"
+ state: directory
+
- name: Install role ansistrano.deploy
community.general.ansible_galaxy_install:
type: role
name: ansistrano.deploy
+ dest: "{{ remote_tmp_dir }}/install_r"
register: install_r0
- name: Assert collection ansistrano.deploy was installed
@@ -44,6 +57,7 @@
community.general.ansible_galaxy_install:
type: role
name: ansistrano.deploy
+ dest: "{{ remote_tmp_dir }}/install_r"
register: install_r1
- name: Assert role was not installed
@@ -86,3 +100,44 @@
assert:
that:
- install_rq1 is not changed
+
+###################################################
+- name: Make directory upgrade_c
+ ansible.builtin.file:
+ path: "{{ remote_tmp_dir }}/upgrade_c"
+ state: directory
+
+- name: Install collection netbox.netbox 3.17.0
+ community.general.ansible_galaxy_install:
+ type: collection
+ name: netbox.netbox:3.17.0
+ dest: "{{ remote_tmp_dir }}/upgrade_c"
+ register: upgrade_c0
+
+- name: Assert collection netbox.netbox was installed
+ assert:
+ that:
+ - upgrade_c0 is changed
+ - '"netbox.netbox" in upgrade_c0.new_collections'
+
+- name: Upgrade collection netbox.netbox
+ community.general.ansible_galaxy_install:
+ state: latest
+ type: collection
+ name: netbox.netbox
+ dest: "{{ remote_tmp_dir }}/upgrade_c"
+ register: upgrade_c1
+
+- name: Upgrade collection netbox.netbox (again)
+ community.general.ansible_galaxy_install:
+ state: latest
+ type: collection
+ name: netbox.netbox
+ dest: "{{ remote_tmp_dir }}/upgrade_c"
+ register: upgrade_c2
+
+- name: Assert collection was not installed
+ assert:
+ that:
+ - upgrade_c1 is changed
+ - upgrade_c2 is not changed
diff --git a/ansible_collections/community/general/tests/integration/targets/cargo/tasks/main.yml b/ansible_collections/community/general/tests/integration/targets/cargo/tasks/main.yml
index 29f27c3fd..89f13960a 100644
--- a/ansible_collections/community/general/tests/integration/targets/cargo/tasks/main.yml
+++ b/ansible_collections/community/general/tests/integration/targets/cargo/tasks/main.yml
@@ -16,6 +16,7 @@
- block:
- import_tasks: test_general.yml
- import_tasks: test_version.yml
+ - import_tasks: test_directory.yml
environment: "{{ cargo_environment }}"
when: has_cargo | default(false)
- import_tasks: test_rustup_cargo.yml
diff --git a/ansible_collections/community/general/tests/integration/targets/cargo/tasks/test_directory.yml b/ansible_collections/community/general/tests/integration/targets/cargo/tasks/test_directory.yml
new file mode 100644
index 000000000..f4275ede6
--- /dev/null
+++ b/ansible_collections/community/general/tests/integration/targets/cargo/tasks/test_directory.yml
@@ -0,0 +1,122 @@
+---
+# Copyright (c) 2024 Colin Nolan <cn580@alumni.york.ac.uk>
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+- name: Create temp directory
+ tempfile:
+ state: directory
+ register: temp_directory
+
+- name: Test block
+ vars:
+ manifest_path: "{{ temp_directory.path }}/Cargo.toml"
+ package_name: hello-world-directory-test
+ block:
+ - name: Initialize package
+ ansible.builtin.command:
+ cmd: "cargo init --name {{ package_name }}"
+ args:
+ chdir: "{{ temp_directory.path }}"
+
+ - name: Set package version (1.0.0)
+ ansible.builtin.lineinfile:
+ path: "{{ manifest_path }}"
+ regexp: '^version = ".*"$'
+ line: 'version = "1.0.0"'
+
+ - name: Ensure package is uninstalled
+ community.general.cargo:
+ name: "{{ package_name }}"
+ state: absent
+ directory: "{{ temp_directory.path }}"
+ register: uninstall_absent
+
+ - name: Install package
+ community.general.cargo:
+ name: "{{ package_name }}"
+ directory: "{{ temp_directory.path }}"
+ register: install_absent
+
+ - name: Change package version (1.0.1)
+ ansible.builtin.lineinfile:
+ path: "{{ manifest_path }}"
+ regexp: '^version = ".*"$'
+ line: 'version = "1.0.1"'
+
+ - name: Install package again (present)
+ community.general.cargo:
+ name: "{{ package_name }}"
+ state: present
+ directory: "{{ temp_directory.path }}"
+ register: install_present_state
+
+ - name: Install package again (latest)
+ community.general.cargo:
+ name: "{{ package_name }}"
+ state: latest
+ directory: "{{ temp_directory.path }}"
+ register: install_latest_state
+
+ - name: Change package version (2.0.0)
+ ansible.builtin.lineinfile:
+ path: "{{ manifest_path }}"
+ regexp: '^version = ".*"$'
+ line: 'version = "2.0.0"'
+
+ - name: Install package with given version (matched)
+ community.general.cargo:
+ name: "{{ package_name }}"
+ version: "2.0.0"
+ directory: "{{ temp_directory.path }}"
+ register: install_given_version_matched
+
+ - name: Install package with given version (unmatched)
+ community.general.cargo:
+ name: "{{ package_name }}"
+ version: "2.0.1"
+ directory: "{{ temp_directory.path }}"
+ register: install_given_version_unmatched
+ ignore_errors: true
+
+ - name: Uninstall package
+ community.general.cargo:
+ name: "{{ package_name }}"
+ state: absent
+ directory: "{{ temp_directory.path }}"
+ register: uninstall_present
+
+ - name: Install non-existant package
+ community.general.cargo:
+ name: "{{ package_name }}-non-existant"
+ state: present
+ directory: "{{ temp_directory.path }}"
+ register: install_non_existant
+ ignore_errors: true
+
+ - name: Install non-existant source directory
+ community.general.cargo:
+ name: "{{ package_name }}"
+ state: present
+ directory: "{{ temp_directory.path }}/non-existant"
+ register: install_non_existant_source
+ ignore_errors: true
+
+ always:
+ - name: Remove temp directory
+ file:
+ path: "{{ temp_directory.path }}"
+ state: absent
+
+- name: Check assertions
+ assert:
+ that:
+ - uninstall_absent is not changed
+ - install_absent is changed
+ - install_present_state is not changed
+ - install_latest_state is changed
+ - install_given_version_matched is changed
+ - install_given_version_unmatched is failed
+ - uninstall_present is changed
+ - install_non_existant is failed
+ - install_non_existant_source is failed
diff --git a/ansible_collections/community/general/tests/integration/targets/consul/tasks/consul_agent_check.yml b/ansible_collections/community/general/tests/integration/targets/consul/tasks/consul_agent_check.yml
new file mode 100644
index 000000000..e1229c794
--- /dev/null
+++ b/ansible_collections/community/general/tests/integration/targets/consul/tasks/consul_agent_check.yml
@@ -0,0 +1,114 @@
+---
+# Copyright (c) 2024, Michael Ilg (@Ilgmi)
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+- name: Create a service
+ community.general.consul_agent_service:
+ name: nginx
+ service_port: 80
+ address: localhost
+ tags:
+ - http
+ meta:
+ nginx_version: 1.25.3
+ register: result
+
+- set_fact:
+ nginx_service: "{{result.service}}"
+
+- assert:
+ that:
+ - result is changed
+ - result.service.ID is defined
+
+- name: Add a check for service
+ community.general.consul_agent_check:
+ name: nginx_check
+ id: nginx_check
+ interval: 30s
+ http: http://localhost:80/morestatus
+ notes: "Nginx Check"
+ service_id: "{{ nginx_service.ID }}"
+ register: result
+
+- assert:
+ that:
+ - result is changed
+ - result.check is defined
+ - result.check.CheckID == 'nginx_check'
+ - result.check.ServiceID == 'nginx'
+ - result.check.Interval == '30s'
+ - result.check.Type == 'http'
+ - result.check.Notes == 'Nginx Check'
+
+- set_fact:
+ nginx_service_check: "{{ result.check }}"
+
+- name: Update check for service
+ community.general.consul_agent_check:
+ name: "{{ nginx_service_check.Name }}"
+ id: "{{ nginx_service_check.CheckID }}"
+ interval: 60s
+ http: http://localhost:80/morestatus
+ notes: "New Nginx Check"
+ service_id: "{{ nginx_service.ID }}"
+ register: result
+
+- assert:
+ that:
+ - result is changed
+ - result.check is defined
+ - result.check.CheckID == 'nginx_check'
+ - result.check.ServiceID == 'nginx'
+ - result.check.Interval == '1m0s'
+ - result.check.Type == 'http'
+ - result.check.Notes == 'New Nginx Check'
+
+- name: Remove check
+ community.general.consul_agent_check:
+ id: "{{ nginx_service_check.Name }}"
+ state: absent
+ service_id: "{{ nginx_service.ID }}"
+ register: result
+
+- assert:
+ that:
+ - result is changed
+ - result is not failed
+ - result.operation == 'remove'
+
+- name: Add a check
+ community.general.consul_agent_check:
+ name: check
+ id: check
+ interval: 30s
+ tcp: localhost:80
+ notes: "check"
+ register: result
+
+- assert:
+ that:
+ - result is changed
+ - result.check is defined
+
+- name: Update a check
+ community.general.consul_agent_check:
+ name: check
+ id: check
+ interval: 60s
+ tcp: localhost:80
+ notes: "check"
+ register: result
+
+- assert:
+ that:
+ - result is changed
+ - result.check is defined
+ - result.check.Interval == '1m0s'
+
+- name: Remove check
+ community.general.consul_agent_check:
+ id: check
+ state: absent
+ register: result \ No newline at end of file
diff --git a/ansible_collections/community/general/tests/integration/targets/consul/tasks/consul_agent_service.yml b/ansible_collections/community/general/tests/integration/targets/consul/tasks/consul_agent_service.yml
new file mode 100644
index 000000000..95270f74b
--- /dev/null
+++ b/ansible_collections/community/general/tests/integration/targets/consul/tasks/consul_agent_service.yml
@@ -0,0 +1,89 @@
+---
+# Copyright (c) 2024, Michael Ilg (@Ilgmi)
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+- name: Create a service
+ community.general.consul_agent_service:
+ name: nginx
+ service_port: 80
+ address: localhost
+ tags:
+ - http
+ meta:
+ nginx_version: 1.25.3
+ register: result
+
+- set_fact:
+ nginx_service: "{{result.service}}"
+
+- assert:
+ that:
+ - result is changed
+ - result.service.ID is defined
+ - result.service.Service == 'nginx'
+ - result.service.Address == 'localhost'
+ - result.service.Port == 80
+ - result.service.Tags[0] == 'http'
+ - result.service.Meta.nginx_version is defined
+ - result.service.Meta.nginx_version == '1.25.3'
+ - result.service.ContentHash is defined
+
+- name: Update service
+ community.general.consul_agent_service:
+ id: "{{ nginx_service.ID }}"
+ name: "{{ nginx_service.Service }}"
+ service_port: 8080
+ address: 127.0.0.1
+ tags:
+ - http
+ - new_tag
+ meta:
+ nginx_version: 1.0.0
+ nginx: 1.25.3
+ register: result
+- assert:
+ that:
+ - result is changed
+ - result.service.ID is defined
+ - result.service.Service == 'nginx'
+ - result.service.Address == '127.0.0.1'
+ - result.service.Port == 8080
+ - result.service.Tags[0] == 'http'
+ - result.service.Tags[1] == 'new_tag'
+ - result.service.Meta.nginx_version is defined
+ - result.service.Meta.nginx_version == '1.0.0'
+ - result.service.Meta.nginx is defined
+ - result.service.Meta.nginx == '1.25.3'
+ - result.service.ContentHash is defined
+
+- name: Update service not changed when updating again without changes
+ community.general.consul_agent_service:
+ id: "{{ nginx_service.ID }}"
+ name: "{{ nginx_service.Service }}"
+ service_port: 8080
+ address: 127.0.0.1
+ tags:
+ - http
+ - new_tag
+ meta:
+ nginx_version: 1.0.0
+ nginx: 1.25.3
+ register: result
+
+- assert:
+ that:
+ - result is not changed
+ - result.operation is not defined
+
+- name: Remove service
+ community.general.consul_agent_service:
+ id: "{{ nginx_service.ID }}"
+ state: absent
+ register: result
+
+- assert:
+ that:
+ - result is changed
+ - result is not failed
+ - result.operation == 'remove' \ No newline at end of file
diff --git a/ansible_collections/community/general/tests/integration/targets/consul/tasks/main.yml b/ansible_collections/community/general/tests/integration/targets/consul/tasks/main.yml
index 6fef2b998..0ac58fc40 100644
--- a/ansible_collections/community/general/tests/integration/targets/consul/tasks/main.yml
+++ b/ansible_collections/community/general/tests/integration/targets/consul/tasks/main.yml
@@ -97,6 +97,8 @@
- import_tasks: consul_token.yml
- import_tasks: consul_auth_method.yml
- import_tasks: consul_binding_rule.yml
+ - import_tasks: consul_agent_service.yml
+ - import_tasks: consul_agent_check.yml
module_defaults:
group/community.general.consul:
token: "{{ consul_management_token }}"
diff --git a/ansible_collections/community/general/tests/integration/targets/filter_keep_keys/aliases b/ansible_collections/community/general/tests/integration/targets/filter_keep_keys/aliases
new file mode 100644
index 000000000..12d1d6617
--- /dev/null
+++ b/ansible_collections/community/general/tests/integration/targets/filter_keep_keys/aliases
@@ -0,0 +1,5 @@
+# Copyright (c) Ansible Project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+azp/posix/2
diff --git a/ansible_collections/community/general/tests/integration/targets/filter_keep_keys/tasks/keep_keys.yml b/ansible_collections/community/general/tests/integration/targets/filter_keep_keys/tasks/keep_keys.yml
new file mode 100644
index 000000000..94825c9d6
--- /dev/null
+++ b/ansible_collections/community/general/tests/integration/targets/filter_keep_keys/tasks/keep_keys.yml
@@ -0,0 +1,79 @@
+---
+# Copyright (c) Ansible Project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+- name: Debug ansible_version
+ ansible.builtin.debug:
+ var: ansible_version
+ when: not quite_test | d(true) | bool
+ tags: ansible_version
+
+- name: Test keep keys equal (default)
+ ansible.builtin.assert:
+ that:
+ - (rr | difference(result1) | length) == 0
+ success_msg: |
+ [OK] result:
+ {{ rr | to_yaml }}
+ fail_msg: |
+ [ERR] result:
+ {{ rr | to_yaml }}
+ quiet: "{{ quiet_test | d(true) | bool }}"
+ vars:
+ rr: "{{ list1 | community.general.keep_keys(target=tt) }}"
+ tt: [k0_x0, k1_x1]
+ tags: equal_default
+
+- name: Test keep keys regex string
+ ansible.builtin.assert:
+ that:
+ - (rr | difference(result1) | length) == 0
+ success_msg: |
+ [OK] result:
+ {{ rr | to_yaml }}
+ fail_msg: |
+ [ERR] result:
+ {{ rr | to_yaml }}
+ quiet: "{{ quiet_test | d(true) | bool }}"
+ vars:
+ rr: "{{ list1 | community.general.keep_keys(target=tt, matching_parameter=mp) }}"
+ mp: regex
+ tt: '^.*[01]_x.*$'
+ tags: regex_string
+
+- name: Test keep keys targets1
+ ansible.builtin.assert:
+ that:
+ - (rr | difference(result1) | length) == 0
+ success_msg: |
+ [OK] result:
+ {{ rr | to_yaml }}
+ fail_msg: |
+ [ERR] result:
+ {{ rr | to_yaml }}
+ quiet: "{{ quiet_test | d(true) | bool }}"
+ loop: "{{ targets1 }}"
+ loop_control:
+ label: "{{ item.mp }}: {{ item.tt }}"
+ vars:
+ rr: "{{ list1 | community.general.keep_keys(target=item.tt, matching_parameter=item.mp) }}"
+ tags: targets1
+
+- name: Test keep keys targets2
+ ansible.builtin.assert:
+ that:
+ - (rr | difference(result2) | length) == 0
+ success_msg: |
+ [OK] result:
+ {{ rr | to_yaml }}
+ fail_msg: |
+ [ERR] result:
+ {{ rr | to_yaml }}
+ quiet: "{{ quiet_test | d(true) | bool }}"
+ loop: "{{ targets2 }}"
+ loop_control:
+ label: "{{ item.mp }}: {{ item.tt }}"
+ vars:
+ rr: "{{ list2 | community.general.keep_keys(target=item.tt, matching_parameter=item.mp) }}"
+ tags: targets2
diff --git a/ansible_collections/community/general/tests/integration/targets/filter_keep_keys/tasks/main.yml b/ansible_collections/community/general/tests/integration/targets/filter_keep_keys/tasks/main.yml
new file mode 100644
index 000000000..23457d1e1
--- /dev/null
+++ b/ansible_collections/community/general/tests/integration/targets/filter_keep_keys/tasks/main.yml
@@ -0,0 +1,7 @@
+---
+# Copyright (c) Ansible Project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+- name: Test keep_keys
+ import_tasks: keep_keys.yml
diff --git a/ansible_collections/community/general/tests/integration/targets/filter_keep_keys/vars/main.yml b/ansible_collections/community/general/tests/integration/targets/filter_keep_keys/vars/main.yml
new file mode 100644
index 000000000..b25325253
--- /dev/null
+++ b/ansible_collections/community/general/tests/integration/targets/filter_keep_keys/vars/main.yml
@@ -0,0 +1,33 @@
+---
+# Copyright (c) Ansible Project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+targets1:
+ - {mp: equal, tt: [k0_x0, k1_x1]}
+ - {mp: starts_with, tt: [k0, k1]}
+ - {mp: ends_with, tt: [x0, x1]}
+ - {mp: regex, tt: ['^.*[01]_x.*$']}
+ - {mp: regex, tt: '^.*[01]_x.*$'}
+
+list1:
+ - {k0_x0: A0, k1_x1: B0, k2_x2: [C0], k3_x3: foo}
+ - {k0_x0: A1, k1_x1: B1, k2_x2: [C1], k3_x3: bar}
+
+result1:
+ - {k0_x0: A0, k1_x1: B0}
+ - {k0_x0: A1, k1_x1: B1}
+
+targets2:
+ - {mp: equal, tt: k0_x0}
+ - {mp: starts_with, tt: k0}
+ - {mp: ends_with, tt: x0}
+ - {mp: regex, tt: '^.*0_x.*$'}
+
+list2:
+ - {k0_x0: A0, k1_x1: B0, k2_x2: [C0], k3_x3: foo}
+ - {k0_x0: A1, k1_x1: B1, k2_x2: [C1], k3_x3: bar}
+
+result2:
+ - {k0_x0: A0}
+ - {k0_x0: A1}
diff --git a/ansible_collections/community/general/tests/integration/targets/filter_remove_keys/aliases b/ansible_collections/community/general/tests/integration/targets/filter_remove_keys/aliases
new file mode 100644
index 000000000..12d1d6617
--- /dev/null
+++ b/ansible_collections/community/general/tests/integration/targets/filter_remove_keys/aliases
@@ -0,0 +1,5 @@
+# Copyright (c) Ansible Project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+azp/posix/2
diff --git a/ansible_collections/community/general/tests/integration/targets/filter_remove_keys/tasks/main.yml b/ansible_collections/community/general/tests/integration/targets/filter_remove_keys/tasks/main.yml
new file mode 100644
index 000000000..d4215d8c5
--- /dev/null
+++ b/ansible_collections/community/general/tests/integration/targets/filter_remove_keys/tasks/main.yml
@@ -0,0 +1,7 @@
+---
+# Copyright (c) Ansible Project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+- name: Test remove_keys
+ import_tasks: remove_keys.yml
diff --git a/ansible_collections/community/general/tests/integration/targets/filter_remove_keys/tasks/remove_keys.yml b/ansible_collections/community/general/tests/integration/targets/filter_remove_keys/tasks/remove_keys.yml
new file mode 100644
index 000000000..121cd88cf
--- /dev/null
+++ b/ansible_collections/community/general/tests/integration/targets/filter_remove_keys/tasks/remove_keys.yml
@@ -0,0 +1,79 @@
+---
+# Copyright (c) Ansible Project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+- name: Debug ansible_version
+ ansible.builtin.debug:
+ var: ansible_version
+ when: not quite_test | d(true) | bool
+ tags: ansible_version
+
+- name: Test remove keys equal (default)
+ ansible.builtin.assert:
+ that:
+ - (rr | difference(result1) | length) == 0
+ success_msg: |
+ [OK] result:
+ {{ rr | to_yaml }}
+ fail_msg: |
+ [ERR] result:
+ {{ rr | to_yaml }}
+ quiet: "{{ quiet_test | d(true) | bool }}"
+ vars:
+ rr: "{{ list1 | community.general.remove_keys(target=tt) }}"
+ tt: [k0_x0, k1_x1]
+ tags: equal_default
+
+- name: Test remove keys regex string
+ ansible.builtin.assert:
+ that:
+ - (rr | difference(result1) | length) == 0
+ success_msg: |
+ [OK] result:
+ {{ rr | to_yaml }}
+ fail_msg: |
+ [ERR] result:
+ {{ rr | to_yaml }}
+ quiet: "{{ quiet_test | d(true) | bool }}"
+ vars:
+ rr: "{{ list1 | community.general.remove_keys(target=tt, matching_parameter=mp) }}"
+ mp: regex
+ tt: '^.*[01]_x.*$'
+ tags: regex_string
+
+- name: Test remove keys targets1
+ ansible.builtin.assert:
+ that:
+ - (rr | difference(result1) | length) == 0
+ success_msg: |
+ [OK] result:
+ {{ rr | to_yaml }}
+ fail_msg: |
+ [ERR] result:
+ {{ rr | to_yaml }}
+ quiet: "{{ quiet_test | d(true) | bool }}"
+ loop: "{{ targets1 }}"
+ loop_control:
+ label: "{{ item.mp }}: {{ item.tt }}"
+ vars:
+ rr: "{{ list1 | community.general.remove_keys(target=item.tt, matching_parameter=item.mp) }}"
+ tags: targets1
+
+- name: Test remove keys targets2
+ ansible.builtin.assert:
+ that:
+ - (rr | difference(result2) | length) == 0
+ success_msg: |
+ [OK] result:
+ {{ rr | to_yaml }}
+ fail_msg: |
+ [ERR] result:
+ {{ rr | to_yaml }}
+ quiet: "{{ quiet_test | d(true) | bool }}"
+ loop: "{{ targets2 }}"
+ loop_control:
+ label: "{{ item.mp }}: {{ item.tt }}"
+ vars:
+ rr: "{{ list2 | community.general.remove_keys(target=item.tt, matching_parameter=item.mp) }}"
+ tags: targets1
diff --git a/ansible_collections/community/general/tests/integration/targets/filter_remove_keys/vars/main.yml b/ansible_collections/community/general/tests/integration/targets/filter_remove_keys/vars/main.yml
new file mode 100644
index 000000000..a52d09a34
--- /dev/null
+++ b/ansible_collections/community/general/tests/integration/targets/filter_remove_keys/vars/main.yml
@@ -0,0 +1,33 @@
+---
+# Copyright (c) Ansible Project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+targets1:
+ - {mp: equal, tt: [k0_x0, k1_x1]}
+ - {mp: starts_with, tt: [k0, k1]}
+ - {mp: ends_with, tt: [x0, x1]}
+ - {mp: regex, tt: ['^.*[01]_x.*$']}
+ - {mp: regex, tt: '^.*[01]_x.*$'}
+
+list1:
+ - {k0_x0: A0, k1_x1: B0, k2_x2: [C0], k3_x3: foo}
+ - {k0_x0: A1, k1_x1: B1, k2_x2: [C1], k3_x3: bar}
+
+result1:
+ - {k2_x2: [C0], k3_x3: foo}
+ - {k2_x2: [C1], k3_x3: bar}
+
+targets2:
+ - {mp: equal, tt: k0_x0}
+ - {mp: starts_with, tt: k0}
+ - {mp: ends_with, tt: x0}
+ - {mp: regex, tt: '^.*0_x.*$'}
+
+list2:
+ - {k0_x0: A0, k1_x1: B0, k2_x2: [C0], k3_x3: foo}
+ - {k0_x0: A1, k1_x1: B1, k2_x2: [C1], k3_x3: bar}
+
+result2:
+ - {k1_x1: B0, k2_x2: [C0], k3_x3: foo}
+ - {k1_x1: B1, k2_x2: [C1], k3_x3: bar}
diff --git a/ansible_collections/community/general/tests/integration/targets/filter_replace_keys/aliases b/ansible_collections/community/general/tests/integration/targets/filter_replace_keys/aliases
new file mode 100644
index 000000000..12d1d6617
--- /dev/null
+++ b/ansible_collections/community/general/tests/integration/targets/filter_replace_keys/aliases
@@ -0,0 +1,5 @@
+# Copyright (c) Ansible Project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+azp/posix/2
diff --git a/ansible_collections/community/general/tests/integration/targets/filter_replace_keys/tasks/fn-test-replace_keys.yml b/ansible_collections/community/general/tests/integration/targets/filter_replace_keys/tasks/fn-test-replace_keys.yml
new file mode 100644
index 000000000..e324376a5
--- /dev/null
+++ b/ansible_collections/community/general/tests/integration/targets/filter_replace_keys/tasks/fn-test-replace_keys.yml
@@ -0,0 +1,21 @@
+---
+# Copyright (c) Ansible Project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+- name: Test replace keys
+ ansible.builtin.assert:
+ that:
+ - (rr | difference(item.result) | length) == 0
+ success_msg: |
+ [OK] {{ item.label }}
+ result:
+ {{ rr | to_nice_yaml(indent=2) | indent(2) }}
+ fail_msg: |
+ [ERR] {{ item.label }}
+ result:
+ {{ rr | to_nice_yaml(indent=2) | indent(2) }}
+ quiet: "{{ quiet_test | d(true) | bool }}"
+ vars:
+ rr: "{{ item.data |
+ community.general.replace_keys(target=item.target, matching_parameter=item.match) }}"
diff --git a/ansible_collections/community/general/tests/integration/targets/filter_replace_keys/tasks/main.yml b/ansible_collections/community/general/tests/integration/targets/filter_replace_keys/tasks/main.yml
new file mode 100644
index 000000000..35addaf94
--- /dev/null
+++ b/ansible_collections/community/general/tests/integration/targets/filter_replace_keys/tasks/main.yml
@@ -0,0 +1,7 @@
+---
+# Copyright (c) Ansible Project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+- name: Test replace_keys
+ import_tasks: replace_keys.yml
diff --git a/ansible_collections/community/general/tests/integration/targets/filter_replace_keys/tasks/replace_keys.yml b/ansible_collections/community/general/tests/integration/targets/filter_replace_keys/tasks/replace_keys.yml
new file mode 100644
index 000000000..a57921b81
--- /dev/null
+++ b/ansible_collections/community/general/tests/integration/targets/filter_replace_keys/tasks/replace_keys.yml
@@ -0,0 +1,56 @@
+---
+# Copyright (c) Ansible Project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+- name: Debug ansible_version
+ ansible.builtin.debug:
+ var: ansible_version
+ when: not quiet_test | d(true) | bool
+ tags: ansible_version
+
+- name: Test replace keys equal (default)
+ ansible.builtin.assert:
+ that:
+ - (rr | difference(result1) | length) == 0
+ success_msg: |
+ [OK] result:
+ {{ rr | to_yaml }}
+ fail_msg: |
+ [ERR] result:
+ {{ rr | to_yaml }}
+ quiet: "{{ quiet_test | d(true) | bool }}"
+ vars:
+ rr: "{{ list1 | community.general.replace_keys(target=tt) }}"
+ tt:
+ - {before: k0_x0, after: a0}
+ - {before: k1_x1, after: a1}
+ tags: equal_default
+
+- name: Test replace keys targets1
+ ansible.builtin.assert:
+ that:
+ - (rr | difference(result1) | length) == 0
+ success_msg: |
+ [OK] result:
+ {{ rr | to_yaml }}
+ fail_msg: |
+ [ERR] result:
+ {{ rr | to_yaml }}
+ quiet: "{{ quiet_test | d(true) | bool }}"
+ loop: "{{ targets1 | dict2items }}"
+ loop_control:
+ label: "{{ item.key }}"
+ vars:
+ rr: "{{ list1 | community.general.replace_keys(target=item.value, matching_parameter=item.key) }}"
+ tags: targets1
+
+- name: Test replace keys targets2
+ include_tasks:
+ file: fn-test-replace_keys.yml
+ apply:
+ tags: targets2
+ loop: "{{ targets2 }}"
+ loop_control:
+ label: "{{ item.label }}"
+ tags: targets2
diff --git a/ansible_collections/community/general/tests/integration/targets/filter_replace_keys/vars/main.yml b/ansible_collections/community/general/tests/integration/targets/filter_replace_keys/vars/main.yml
new file mode 100644
index 000000000..167e08396
--- /dev/null
+++ b/ansible_collections/community/general/tests/integration/targets/filter_replace_keys/vars/main.yml
@@ -0,0 +1,58 @@
+---
+# Copyright (c) Ansible Project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+list1:
+ - {k0_x0: A0, k1_x1: B0, k2_x2: [C0], k3_x3: foo}
+ - {k0_x0: A1, k1_x1: B1, k2_x2: [C1], k3_x3: bar}
+
+result1:
+ - {a0: A0, a1: B0, k2_x2: [C0], k3_x3: foo}
+ - {a0: A1, a1: B1, k2_x2: [C1], k3_x3: bar}
+
+targets1:
+ equal:
+ - {before: k0_x0, after: a0}
+ - {before: k1_x1, after: a1}
+ starts_with:
+ - {before: k0, after: a0}
+ - {before: k1, after: a1}
+ ends_with:
+ - {before: x0, after: a0}
+ - {before: x1, after: a1}
+ regex:
+ - {before: "^.*0_x.*$", after: a0}
+ - {before: "^.*1_x.*$", after: a1}
+
+list2:
+ - {aaa1: A, bbb1: B, ccc1: C}
+ - {aaa2: D, bbb2: E, ccc2: F}
+
+targets2:
+ - label: If more keys match the same attribute before the last one will be used.
+ match: regex
+ target:
+ - {before: "^.*_x.*$", after: X}
+ data: "{{ list1 }}"
+ result:
+ - X: foo
+ - X: bar
+ - label: If there are items with equal attribute before the first one will be used.
+ match: regex
+ target:
+ - {before: "^.*_x.*$", after: X}
+ - {before: "^.*_x.*$", after: Y}
+ data: "{{ list1 }}"
+ result:
+ - X: foo
+ - X: bar
+ - label: If there are more matches for a key the first one will be used.
+ match: starts_with
+ target:
+ - {before: a, after: X}
+ - {before: aa, after: Y}
+ data: "{{ list2 }}"
+ result:
+ - {X: A, bbb1: B, ccc1: C}
+ - {X: D, bbb2: E, ccc2: F}
diff --git a/ansible_collections/community/general/tests/integration/targets/git_config/tasks/unset_value.yml b/ansible_collections/community/general/tests/integration/targets/git_config/tasks/unset_value.yml
index dfa535a2d..5f8c52c96 100644
--- a/ansible_collections/community/general/tests/integration/targets/git_config/tasks/unset_value.yml
+++ b/ansible_collections/community/general/tests/integration/targets/git_config/tasks/unset_value.yml
@@ -25,4 +25,28 @@
- unset_result.diff.before == option_value + "\n"
- unset_result.diff.after == "\n"
- get_result.config_value == ''
+
+- import_tasks: setup_value.yml
+
+- name: unsetting value with value specified
+ git_config:
+ name: "{{ option_name }}"
+ scope: "{{ option_scope }}"
+ value: "{{ option_value }}"
+ state: absent
+ register: unset_result
+
+- name: getting value
+ git_config:
+ name: "{{ option_name }}"
+ scope: "{{ option_scope }}"
+ register: get_result
+
+- name: assert unset changed and deleted value
+ assert:
+ that:
+ - unset_result is changed
+ - unset_result.diff.before == option_value + "\n"
+ - unset_result.diff.after == "\n"
+ - get_result.config_value == ''
...
diff --git a/ansible_collections/community/general/tests/integration/targets/keycloak_client/tasks/main.yml b/ansible_collections/community/general/tests/integration/targets/keycloak_client/tasks/main.yml
index 5e7c7fae3..e1a7d2ebf 100644
--- a/ansible_collections/community/general/tests/integration/targets/keycloak_client/tasks/main.yml
+++ b/ansible_collections/community/general/tests/integration/targets/keycloak_client/tasks/main.yml
@@ -103,3 +103,131 @@
assert:
that:
- check_client_when_present_and_changed is changed
+
+- name: Desire client with flow binding overrides
+ community.general.keycloak_client:
+ auth_keycloak_url: "{{ url }}"
+ auth_realm: "{{ admin_realm }}"
+ auth_username: "{{ admin_user }}"
+ auth_password: "{{ admin_password }}"
+ realm: "{{ realm }}"
+ client_id: "{{ client_id }}"
+ state: present
+ redirect_uris: '{{redirect_uris1}}'
+ attributes: '{{client_attributes1}}'
+ protocol_mappers: '{{protocol_mappers1}}'
+ authentication_flow_binding_overrides:
+ browser_name: browser
+ direct_grant_name: direct grant
+ register: desire_client_with_flow_binding_overrides
+
+- name: Assert flows are set
+ assert:
+ that:
+ - desire_client_with_flow_binding_overrides is changed
+ - "'authenticationFlowBindingOverrides' in desire_client_with_flow_binding_overrides.end_state"
+ - desire_client_with_flow_binding_overrides.end_state.authenticationFlowBindingOverrides.browser | length > 0
+ - desire_client_with_flow_binding_overrides.end_state.authenticationFlowBindingOverrides.direct_grant | length > 0
+
+- name: Backup flow UUIDs
+ set_fact:
+ flow_browser_uuid: "{{ desire_client_with_flow_binding_overrides.end_state.authenticationFlowBindingOverrides.browser }}"
+ flow_direct_grant_uuid: "{{ desire_client_with_flow_binding_overrides.end_state.authenticationFlowBindingOverrides.direct_grant }}"
+
+- name: Desire client with flow binding overrides remove direct_grant_name
+ community.general.keycloak_client:
+ auth_keycloak_url: "{{ url }}"
+ auth_realm: "{{ admin_realm }}"
+ auth_username: "{{ admin_user }}"
+ auth_password: "{{ admin_password }}"
+ realm: "{{ realm }}"
+ client_id: "{{ client_id }}"
+ state: present
+ redirect_uris: '{{redirect_uris1}}'
+ attributes: '{{client_attributes1}}'
+ protocol_mappers: '{{protocol_mappers1}}'
+ authentication_flow_binding_overrides:
+ browser_name: browser
+ register: desire_client_with_flow_binding_overrides
+
+- name: Assert flows are updated
+ assert:
+ that:
+ - desire_client_with_flow_binding_overrides is changed
+ - "'authenticationFlowBindingOverrides' in desire_client_with_flow_binding_overrides.end_state"
+ - desire_client_with_flow_binding_overrides.end_state.authenticationFlowBindingOverrides.browser | length > 0
+ - "'direct_grant' not in desire_client_with_flow_binding_overrides.end_state.authenticationFlowBindingOverrides"
+
+- name: Desire client with flow binding overrides remove browser add direct_grant
+ community.general.keycloak_client:
+ auth_keycloak_url: "{{ url }}"
+ auth_realm: "{{ admin_realm }}"
+ auth_username: "{{ admin_user }}"
+ auth_password: "{{ admin_password }}"
+ realm: "{{ realm }}"
+ client_id: "{{ client_id }}"
+ state: present
+ redirect_uris: '{{redirect_uris1}}'
+ attributes: '{{client_attributes1}}'
+ protocol_mappers: '{{protocol_mappers1}}'
+ authentication_flow_binding_overrides:
+ direct_grant_name: direct grant
+ register: desire_client_with_flow_binding_overrides
+
+- name: Assert flows are updated
+ assert:
+ that:
+ - desire_client_with_flow_binding_overrides is changed
+ - "'authenticationFlowBindingOverrides' in desire_client_with_flow_binding_overrides.end_state"
+ - "'browser' not in desire_client_with_flow_binding_overrides.end_state.authenticationFlowBindingOverrides"
+ - desire_client_with_flow_binding_overrides.end_state.authenticationFlowBindingOverrides.direct_grant | length > 0
+
+- name: Desire client with flow binding overrides with UUIDs
+ community.general.keycloak_client:
+ auth_keycloak_url: "{{ url }}"
+ auth_realm: "{{ admin_realm }}"
+ auth_username: "{{ admin_user }}"
+ auth_password: "{{ admin_password }}"
+ realm: "{{ realm }}"
+ client_id: "{{ client_id }}"
+ state: present
+ redirect_uris: '{{redirect_uris1}}'
+ attributes: '{{client_attributes1}}'
+ protocol_mappers: '{{protocol_mappers1}}'
+ authentication_flow_binding_overrides:
+ browser: "{{ flow_browser_uuid }}"
+ direct_grant: "{{ flow_direct_grant_uuid }}"
+ register: desire_client_with_flow_binding_overrides
+
+- name: Assert flows are updated
+ assert:
+ that:
+ - desire_client_with_flow_binding_overrides is changed
+ - "'authenticationFlowBindingOverrides' in desire_client_with_flow_binding_overrides.end_state"
+ - desire_client_with_flow_binding_overrides.end_state.authenticationFlowBindingOverrides.browser == flow_browser_uuid
+ - desire_client_with_flow_binding_overrides.end_state.authenticationFlowBindingOverrides.direct_grant == flow_direct_grant_uuid
+
+- name: Unset flow binding overrides
+ community.general.keycloak_client:
+ auth_keycloak_url: "{{ url }}"
+ auth_realm: "{{ admin_realm }}"
+ auth_username: "{{ admin_user }}"
+ auth_password: "{{ admin_password }}"
+ realm: "{{ realm }}"
+ client_id: "{{ client_id }}"
+ state: present
+ redirect_uris: '{{redirect_uris1}}'
+ attributes: '{{client_attributes1}}'
+ protocol_mappers: '{{protocol_mappers1}}'
+ authentication_flow_binding_overrides:
+ browser: "{{ None }}"
+ direct_grant: null
+ register: desire_client_with_flow_binding_overrides
+
+- name: Assert flows are removed
+ assert:
+ that:
+ - desire_client_with_flow_binding_overrides is changed
+ - "'authenticationFlowBindingOverrides' in desire_client_with_flow_binding_overrides.end_state"
+ - "'browser' not in desire_client_with_flow_binding_overrides.end_state.authenticationFlowBindingOverrides"
+ - "'direct_grant' not in desire_client_with_flow_binding_overrides.end_state.authenticationFlowBindingOverrides" \ No newline at end of file
diff --git a/ansible_collections/community/general/tests/unit/plugins/module_utils/test_cmd_runner.py b/ansible_collections/community/general/tests/unit/plugins/module_utils/test_cmd_runner.py
index 6816afb34..fcdffe7d2 100644
--- a/ansible_collections/community/general/tests/unit/plugins/module_utils/test_cmd_runner.py
+++ b/ansible_collections/community/general/tests/unit/plugins/module_utils/test_cmd_runner.py
@@ -47,6 +47,9 @@ TC_FORMATS = dict(
simple_fixed_false=(partial(cmd_runner_fmt.as_fixed, ["--always-here", "--forever"]), False, ["--always-here", "--forever"], None),
simple_fixed_none=(partial(cmd_runner_fmt.as_fixed, ["--always-here", "--forever"]), None, ["--always-here", "--forever"], None),
simple_fixed_str=(partial(cmd_runner_fmt.as_fixed, ["--always-here", "--forever"]), "something", ["--always-here", "--forever"], None),
+ stack_optval__str=(partial(cmd_runner_fmt.stack(cmd_runner_fmt.as_optval), "-t"), ["potatoes", "bananas"], ["-tpotatoes", "-tbananas"], None),
+ stack_opt_val__str=(partial(cmd_runner_fmt.stack(cmd_runner_fmt.as_opt_val), "-t"), ["potatoes", "bananas"], ["-t", "potatoes", "-t", "bananas"], None),
+ stack_opt_eq_val__int=(partial(cmd_runner_fmt.stack(cmd_runner_fmt.as_opt_eq_val), "--answer"), [42, 17], ["--answer=42", "--answer=17"], None),
)
if tuple(version_info) >= (3, 1):
from collections import OrderedDict
@@ -67,7 +70,7 @@ TC_FORMATS_IDS = sorted(TC_FORMATS.keys())
def test_arg_format(func, value, expected, exception):
fmt_func = func()
try:
- actual = fmt_func(value, ctx_ignore_none=True)
+ actual = fmt_func(value)
print("formatted string = {0}".format(actual))
assert actual == expected, "actual = {0}".format(actual)
except Exception as e:
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_django_check.py b/ansible_collections/community/general/tests/unit/plugins/modules/test_django_check.py
new file mode 100644
index 000000000..8aec71900
--- /dev/null
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_django_check.py
@@ -0,0 +1,13 @@
+# Copyright (c) Alexei Znamensky (russoz@gmail.com)
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+
+from ansible_collections.community.general.plugins.modules import django_check
+from .helper import Helper
+
+
+Helper.from_module(django_check, __name__)
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_django_check.yaml b/ansible_collections/community/general/tests/unit/plugins/modules/test_django_check.yaml
new file mode 100644
index 000000000..6156aaa2c
--- /dev/null
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_django_check.yaml
@@ -0,0 +1,27 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) Alexei Znamensky (russoz@gmail.com)
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+---
+- id: success
+ input:
+ settings: whatever.settings
+ run_command_calls:
+ - command: [/testbin/python, -m, django, check, --no-color, --settings=whatever.settings]
+ environ: &env-def {environ_update: {LANGUAGE: C, LC_ALL: C}, check_rc: true}
+ rc: 0
+ out: "whatever\n"
+ err: ""
+- id: multiple_databases
+ input:
+ settings: whatever.settings
+ database:
+ - abc
+ - def
+ run_command_calls:
+ - command: [/testbin/python, -m, django, check, --no-color, --settings=whatever.settings, --database, abc, --database, def]
+ environ: *env-def
+ rc: 0
+ out: "whatever\n"
+ err: ""
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_django_createcachetable.py b/ansible_collections/community/general/tests/unit/plugins/modules/test_django_createcachetable.py
new file mode 100644
index 000000000..5a4b89c0c
--- /dev/null
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_django_createcachetable.py
@@ -0,0 +1,13 @@
+# Copyright (c) Alexei Znamensky (russoz@gmail.com)
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+
+from ansible_collections.community.general.plugins.modules import django_createcachetable
+from .helper import Helper
+
+
+Helper.from_module(django_createcachetable, __name__)
diff --git a/ansible_collections/community/general/tests/unit/plugins/modules/test_django_createcachetable.yaml b/ansible_collections/community/general/tests/unit/plugins/modules/test_django_createcachetable.yaml
new file mode 100644
index 000000000..1808b163f
--- /dev/null
+++ b/ansible_collections/community/general/tests/unit/plugins/modules/test_django_createcachetable.yaml
@@ -0,0 +1,15 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) Alexei Znamensky (russoz@gmail.com)
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+---
+- id: command_success
+ input:
+ settings: whatever.settings
+ run_command_calls:
+ - command: [/testbin/python, -m, django, createcachetable, --no-color, --settings=whatever.settings, --noinput, --database=default]
+ environ: &env-def {environ_update: {LANGUAGE: C, LC_ALL: C}, check_rc: true}
+ rc: 0
+ out: "whatever\n"
+ err: ""
diff --git a/ansible_collections/community/hrobot/.github/patchback.yml b/ansible_collections/community/hrobot/.github/patchback.yml
new file mode 100644
index 000000000..5ee7812ed
--- /dev/null
+++ b/ansible_collections/community/hrobot/.github/patchback.yml
@@ -0,0 +1,9 @@
+---
+# Copyright (c) Ansible Project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+backport_branch_prefix: patchback/backports/
+backport_label_prefix: backport-
+target_branch_prefix: stable-
+...
diff --git a/ansible_collections/community/hrobot/.github/workflows/ansible-test.yml b/ansible_collections/community/hrobot/.github/workflows/ansible-test.yml
index fe133e857..62151bf25 100644
--- a/ansible_collections/community/hrobot/.github/workflows/ansible-test.yml
+++ b/ansible_collections/community/hrobot/.github/workflows/ansible-test.yml
@@ -37,6 +37,7 @@ jobs:
uses: felixfontein/ansible-test-gh-action@main
with:
ansible-core-version: ${{ matrix.ansible }}
+ codecov-token: ${{ secrets.CODECOV_TOKEN }}
testing-type: sanity
pre-test-cmd: >-
git clone --depth=1 --single-branch --branch stable-1 https://github.com/ansible-collections/community.library_inventory_filtering.git ../../community/library_inventory_filtering_v1
@@ -62,6 +63,7 @@ jobs:
uses: felixfontein/ansible-test-gh-action@main
with:
ansible-core-version: ${{ matrix.ansible }}
+ codecov-token: ${{ secrets.CODECOV_TOKEN }}
testing-type: units
test-deps: >-
git+https://github.com/ansible-collections/community.internal_test_tools.git,main
diff --git a/ansible_collections/community/hrobot/CHANGELOG.md b/ansible_collections/community/hrobot/CHANGELOG.md
index d995f8d6d..3dd6208b0 100644
--- a/ansible_collections/community/hrobot/CHANGELOG.md
+++ b/ansible_collections/community/hrobot/CHANGELOG.md
@@ -2,86 +2,102 @@
**Topics**
-- <a href="#v2-0-0">v2\.0\.0</a>
+- <a href="#v2-0-1">v2\.0\.1</a>
- <a href="#release-summary">Release Summary</a>
+ - <a href="#bugfixes">Bugfixes</a>
+- <a href="#v2-0-0">v2\.0\.0</a>
+ - <a href="#release-summary-1">Release Summary</a>
- <a href="#major-changes">Major Changes</a>
- <a href="#minor-changes">Minor Changes</a>
- <a href="#breaking-changes--porting-guide">Breaking Changes / Porting Guide</a>
- <a href="#removed-features-previously-deprecated">Removed Features \(previously deprecated\)</a>
- <a href="#v1-9-2">v1\.9\.2</a>
- - <a href="#release-summary-1">Release Summary</a>
- - <a href="#bugfixes">Bugfixes</a>
-- <a href="#v1-9-1">v1\.9\.1</a>
- <a href="#release-summary-2">Release Summary</a>
+ - <a href="#bugfixes-1">Bugfixes</a>
+- <a href="#v1-9-1">v1\.9\.1</a>
+ - <a href="#release-summary-3">Release Summary</a>
- <a href="#security-fixes">Security Fixes</a>
- <a href="#v1-9-0">v1\.9\.0</a>
- - <a href="#release-summary-3">Release Summary</a>
+ - <a href="#release-summary-4">Release Summary</a>
- <a href="#minor-changes-1">Minor Changes</a>
- <a href="#deprecated-features">Deprecated Features</a>
- <a href="#v1-8-2">v1\.8\.2</a>
- - <a href="#release-summary-4">Release Summary</a>
- - <a href="#bugfixes-1">Bugfixes</a>
-- <a href="#v1-8-1">v1\.8\.1</a>
- <a href="#release-summary-5">Release Summary</a>
+ - <a href="#bugfixes-2">Bugfixes</a>
+- <a href="#v1-8-1">v1\.8\.1</a>
+ - <a href="#release-summary-6">Release Summary</a>
- <a href="#known-issues">Known Issues</a>
- <a href="#v1-8-0">v1\.8\.0</a>
- - <a href="#release-summary-6">Release Summary</a>
+ - <a href="#release-summary-7">Release Summary</a>
- <a href="#major-changes-1">Major Changes</a>
- <a href="#minor-changes-2">Minor Changes</a>
- <a href="#v1-7-0">v1\.7\.0</a>
- - <a href="#release-summary-7">Release Summary</a>
+ - <a href="#release-summary-8">Release Summary</a>
- <a href="#new-modules">New Modules</a>
- <a href="#v1-6-0">v1\.6\.0</a>
- - <a href="#release-summary-8">Release Summary</a>
+ - <a href="#release-summary-9">Release Summary</a>
- <a href="#minor-changes-3">Minor Changes</a>
- <a href="#v1-5-2">v1\.5\.2</a>
- - <a href="#release-summary-9">Release Summary</a>
+ - <a href="#release-summary-10">Release Summary</a>
- <a href="#minor-changes-4">Minor Changes</a>
- <a href="#v1-5-1">v1\.5\.1</a>
- - <a href="#release-summary-10">Release Summary</a>
-- <a href="#v1-5-0">v1\.5\.0</a>
- <a href="#release-summary-11">Release Summary</a>
+- <a href="#v1-5-0">v1\.5\.0</a>
+ - <a href="#release-summary-12">Release Summary</a>
- <a href="#minor-changes-5">Minor Changes</a>
- <a href="#v1-4-0">v1\.4\.0</a>
- - <a href="#release-summary-12">Release Summary</a>
+ - <a href="#release-summary-13">Release Summary</a>
- <a href="#minor-changes-6">Minor Changes</a>
- <a href="#v1-3-1">v1\.3\.1</a>
- - <a href="#release-summary-13">Release Summary</a>
- - <a href="#bugfixes-2">Bugfixes</a>
-- <a href="#v1-3-0">v1\.3\.0</a>
- <a href="#release-summary-14">Release Summary</a>
- - <a href="#minor-changes-7">Minor Changes</a>
- <a href="#bugfixes-3">Bugfixes</a>
-- <a href="#v1-2-3">v1\.2\.3</a>
+- <a href="#v1-3-0">v1\.3\.0</a>
- <a href="#release-summary-15">Release Summary</a>
-- <a href="#v1-2-2">v1\.2\.2</a>
- - <a href="#release-summary-16">Release Summary</a>
+ - <a href="#minor-changes-7">Minor Changes</a>
- <a href="#bugfixes-4">Bugfixes</a>
-- <a href="#v1-2-1">v1\.2\.1</a>
+- <a href="#v1-2-3">v1\.2\.3</a>
+ - <a href="#release-summary-16">Release Summary</a>
+- <a href="#v1-2-2">v1\.2\.2</a>
- <a href="#release-summary-17">Release Summary</a>
+ - <a href="#bugfixes-5">Bugfixes</a>
+- <a href="#v1-2-1">v1\.2\.1</a>
+ - <a href="#release-summary-18">Release Summary</a>
- <a href="#minor-changes-8">Minor Changes</a>
- <a href="#v1-2-0">v1\.2\.0</a>
- - <a href="#release-summary-18">Release Summary</a>
+ - <a href="#release-summary-19">Release Summary</a>
- <a href="#minor-changes-9">Minor Changes</a>
- <a href="#new-modules-1">New Modules</a>
- <a href="#v1-1-1">v1\.1\.1</a>
- - <a href="#release-summary-19">Release Summary</a>
- - <a href="#bugfixes-5">Bugfixes</a>
-- <a href="#v1-1-0">v1\.1\.0</a>
- <a href="#release-summary-20">Release Summary</a>
+ - <a href="#bugfixes-6">Bugfixes</a>
+- <a href="#v1-1-0">v1\.1\.0</a>
+ - <a href="#release-summary-21">Release Summary</a>
- <a href="#new-plugins">New Plugins</a>
- <a href="#inventory">Inventory</a>
- <a href="#v1-0-0">v1\.0\.0</a>
- - <a href="#release-summary-21">Release Summary</a>
+ - <a href="#release-summary-22">Release Summary</a>
- <a href="#breaking-changes--porting-guide-1">Breaking Changes / Porting Guide</a>
+<a id="v2-0-1"></a>
+## v2\.0\.1
+
+<a id="release-summary"></a>
+### Release Summary
+
+Bugfix release\.
+
+<a id="bugfixes"></a>
+### Bugfixes
+
+* boot \- use PHP array form encoding when sending multiple <code>authorized\_key</code> \([https\://github\.com/ansible\-collections/community\.hrobot/issues/112](https\://github\.com/ansible\-collections/community\.hrobot/issues/112)\, [https\://github\.com/ansible\-collections/community\.hrobot/pull/113](https\://github\.com/ansible\-collections/community\.hrobot/pull/113)\)\.
+
<a id="v2-0-0"></a>
## v2\.0\.0
-<a id="release-summary"></a>
+<a id="release-summary-1"></a>
### Release Summary
-\.\.\.
+New major release 2\.0\.0\.
<a id="major-changes"></a>
### Major Changes
@@ -101,17 +117,17 @@
<a id="removed-features-previously-deprecated"></a>
### Removed Features \(previously deprecated\)
-* The collection no longer supports Ansible\, ansible\-base\, and ansible\-core releases that are currently End of Life at the time of the 2\.0\.0 release\. This means that Ansible 2\.9\, ansible\-base 2\.10\, ansible\-core 2\.11\, ansible\-core 2\.12\, ansible\-core 2\.13\, and ansible\-core 2\.14 are no longer supported\. The collection might still work with these versions\, but it can stop working at any moment without advance notice\, and this will not be considered a bug \([https\://github\.com/ansible\-collections/community\.hrobot/pull/101](https\://github\.com/ansible\-collections/community\.hrobot/pull/101)\)\.
+* The collection no longer supports Ansible\, ansible\-base\, and ansible\-core releases that are currently End of Life at the time of the 2\.0\.0 release\. This means that Ansible 2\.9\, ansible\-base 2\.10\, ansible\-core 2\.11\, ansible\-core 2\.12\, and ansible\-core 2\.13 are no longer supported\. The collection might still work with these versions\, but it can stop working at any moment without advance notice\, and this will not be considered a bug \([https\://github\.com/ansible\-collections/community\.hrobot/pull/101](https\://github\.com/ansible\-collections/community\.hrobot/pull/101)\)\.
<a id="v1-9-2"></a>
## v1\.9\.2
-<a id="release-summary-1"></a>
+<a id="release-summary-2"></a>
### Release Summary
Bugfix release\.
-<a id="bugfixes"></a>
+<a id="bugfixes-1"></a>
### Bugfixes
* inventory plugins \- add unsafe wrapper to avoid marking strings that do not contain <code>\{</code> or <code>\}</code> as unsafe\, to work around a bug in AWX \([https\://github\.com/ansible\-collections/community\.hrobot/pull/102](https\://github\.com/ansible\-collections/community\.hrobot/pull/102)\)\.
@@ -119,7 +135,7 @@ Bugfix release\.
<a id="v1-9-1"></a>
## v1\.9\.1
-<a id="release-summary-2"></a>
+<a id="release-summary-3"></a>
### Release Summary
Bugfix release\.
@@ -132,7 +148,7 @@ Bugfix release\.
<a id="v1-9-0"></a>
## v1\.9\.0
-<a id="release-summary-3"></a>
+<a id="release-summary-4"></a>
### Release Summary
Feature and maintenance release\.
@@ -150,12 +166,12 @@ Feature and maintenance release\.
<a id="v1-8-2"></a>
## v1\.8\.2
-<a id="release-summary-4"></a>
+<a id="release-summary-5"></a>
### Release Summary
Maintenance release with updated documentation\.
-<a id="bugfixes-1"></a>
+<a id="bugfixes-2"></a>
### Bugfixes
* Show more information \(if available\) from error messages \([https\://github\.com/ansible\-collections/community\.hrobot/pull/89](https\://github\.com/ansible\-collections/community\.hrobot/pull/89)\)\.
@@ -163,7 +179,7 @@ Maintenance release with updated documentation\.
<a id="v1-8-1"></a>
## v1\.8\.1
-<a id="release-summary-5"></a>
+<a id="release-summary-6"></a>
### Release Summary
Maintenance release with updated documentation\.
@@ -184,7 +200,7 @@ for the rendered HTML version of the documentation of the latest release\.
<a id="v1-8-0"></a>
## v1\.8\.0
-<a id="release-summary-6"></a>
+<a id="release-summary-7"></a>
### Release Summary
Feature release for the Hetzner firewall changes\.
@@ -203,7 +219,7 @@ Feature release for the Hetzner firewall changes\.
<a id="v1-7-0"></a>
## v1\.7\.0
-<a id="release-summary-7"></a>
+<a id="release-summary-8"></a>
### Release Summary
Feature release\.
@@ -216,7 +232,7 @@ Feature release\.
<a id="v1-6-0"></a>
## v1\.6\.0
-<a id="release-summary-8"></a>
+<a id="release-summary-9"></a>
### Release Summary
Feature release with improved documentation\.
@@ -229,7 +245,7 @@ Feature release with improved documentation\.
<a id="v1-5-2"></a>
## v1\.5\.2
-<a id="release-summary-9"></a>
+<a id="release-summary-10"></a>
### Release Summary
Maintenance release with a documentation improvement\.
@@ -242,7 +258,7 @@ Maintenance release with a documentation improvement\.
<a id="v1-5-1"></a>
## v1\.5\.1
-<a id="release-summary-10"></a>
+<a id="release-summary-11"></a>
### Release Summary
Maintenance release with small documentation fixes\.
@@ -250,7 +266,7 @@ Maintenance release with small documentation fixes\.
<a id="v1-5-0"></a>
## v1\.5\.0
-<a id="release-summary-11"></a>
+<a id="release-summary-12"></a>
### Release Summary
Maintenance release changing the way licenses are declared\. No functional changes\.
@@ -263,7 +279,7 @@ Maintenance release changing the way licenses are declared\. No functional chang
<a id="v1-4-0"></a>
## v1\.4\.0
-<a id="release-summary-12"></a>
+<a id="release-summary-13"></a>
### Release Summary
Feature release\.
@@ -276,12 +292,12 @@ Feature release\.
<a id="v1-3-1"></a>
## v1\.3\.1
-<a id="release-summary-13"></a>
+<a id="release-summary-14"></a>
### Release Summary
Maintenance release\.
-<a id="bugfixes-2"></a>
+<a id="bugfixes-3"></a>
### Bugfixes
* Include <code>simplified\_bsd\.txt</code> license file for the <code>robot</code> and <code>failover</code> module utils\.
@@ -289,7 +305,7 @@ Maintenance release\.
<a id="v1-3-0"></a>
## v1\.3\.0
-<a id="release-summary-14"></a>
+<a id="release-summary-15"></a>
### Release Summary
Feature and bugfix release\.
@@ -299,7 +315,7 @@ Feature and bugfix release\.
* Prepare collection for inclusion in an Execution Environment by declaring its dependencies \([https\://github\.com/ansible\-collections/community\.hrobot/pull/45](https\://github\.com/ansible\-collections/community\.hrobot/pull/45)\)\.
-<a id="bugfixes-3"></a>
+<a id="bugfixes-4"></a>
### Bugfixes
* robot inventory plugin \- do not crash if a server neither has name or primary IP set\. Instead\, fall back to using the server\'s number as the name\. This can happen if unnamed rack reservations show up in your server list \([https\://github\.com/ansible\-collections/community\.hrobot/issues/40](https\://github\.com/ansible\-collections/community\.hrobot/issues/40)\, [https\://github\.com/ansible\-collections/community\.hrobot/pull/47](https\://github\.com/ansible\-collections/community\.hrobot/pull/47)\)\.
@@ -307,7 +323,7 @@ Feature and bugfix release\.
<a id="v1-2-3"></a>
## v1\.2\.3
-<a id="release-summary-15"></a>
+<a id="release-summary-16"></a>
### Release Summary
Docs update release\.
@@ -315,12 +331,12 @@ Docs update release\.
<a id="v1-2-2"></a>
## v1\.2\.2
-<a id="release-summary-16"></a>
+<a id="release-summary-17"></a>
### Release Summary
Bugfix release\.
-<a id="bugfixes-4"></a>
+<a id="bugfixes-5"></a>
### Bugfixes
* boot \- fix incorrect handling of SSH authorized keys \([https\://github\.com/ansible\-collections/community\.hrobot/issues/32](https\://github\.com/ansible\-collections/community\.hrobot/issues/32)\, [https\://github\.com/ansible\-collections/community\.hrobot/pull/33](https\://github\.com/ansible\-collections/community\.hrobot/pull/33)\)\.
@@ -328,7 +344,7 @@ Bugfix release\.
<a id="v1-2-1"></a>
## v1\.2\.1
-<a id="release-summary-17"></a>
+<a id="release-summary-18"></a>
### Release Summary
Maintenance release\.
@@ -341,7 +357,7 @@ Maintenance release\.
<a id="v1-2-0"></a>
## v1\.2\.0
-<a id="release-summary-18"></a>
+<a id="release-summary-19"></a>
### Release Summary
Feature release with multiple new modules\.
@@ -368,12 +384,12 @@ Feature release with multiple new modules\.
<a id="v1-1-1"></a>
## v1\.1\.1
-<a id="release-summary-19"></a>
+<a id="release-summary-20"></a>
### Release Summary
Bugfix release which reduces the number of HTTPS queries for the modules and plugins\.
-<a id="bugfixes-5"></a>
+<a id="bugfixes-6"></a>
### Bugfixes
* robot \- force HTTP basic authentication to reduce number of HTTPS requests \([https\://github\.com/ansible\-collections/community\.hrobot/pull/9](https\://github\.com/ansible\-collections/community\.hrobot/pull/9)\)\.
@@ -381,7 +397,7 @@ Bugfix release which reduces the number of HTTPS queries for the modules and plu
<a id="v1-1-0"></a>
## v1\.1\.0
-<a id="release-summary-20"></a>
+<a id="release-summary-21"></a>
### Release Summary
Release with a new inventory plugin\.
@@ -397,7 +413,7 @@ Release with a new inventory plugin\.
<a id="v1-0-0"></a>
## v1\.0\.0
-<a id="release-summary-21"></a>
+<a id="release-summary-22"></a>
### Release Summary
The <code>community\.hrobot</code> continues the work on the Hetzner Robot modules from their state in <code>community\.general</code> 1\.2\.0\. The changes listed here are thus relative to the modules <code>community\.general\.hetzner\_\*</code>\.
diff --git a/ansible_collections/community/hrobot/CHANGELOG.rst b/ansible_collections/community/hrobot/CHANGELOG.rst
index 18407d29f..d1ea2cb36 100644
--- a/ansible_collections/community/hrobot/CHANGELOG.rst
+++ b/ansible_collections/community/hrobot/CHANGELOG.rst
@@ -4,13 +4,26 @@ Community Hetzner Robot Collection Release Notes
.. contents:: Topics
+v2.0.1
+======
+
+Release Summary
+---------------
+
+Bugfix release.
+
+Bugfixes
+--------
+
+- boot - use PHP array form encoding when sending multiple ``authorized_key`` (https://github.com/ansible-collections/community.hrobot/issues/112, https://github.com/ansible-collections/community.hrobot/pull/113).
+
v2.0.0
======
Release Summary
---------------
-...
+New major release 2.0.0.
Major Changes
-------------
@@ -30,7 +43,7 @@ Breaking Changes / Porting Guide
Removed Features (previously deprecated)
----------------------------------------
-- The collection no longer supports Ansible, ansible-base, and ansible-core releases that are currently End of Life at the time of the 2.0.0 release. This means that Ansible 2.9, ansible-base 2.10, ansible-core 2.11, ansible-core 2.12, ansible-core 2.13, and ansible-core 2.14 are no longer supported. The collection might still work with these versions, but it can stop working at any moment without advance notice, and this will not be considered a bug (https://github.com/ansible-collections/community.hrobot/pull/101).
+- The collection no longer supports Ansible, ansible-base, and ansible-core releases that are currently End of Life at the time of the 2.0.0 release. This means that Ansible 2.9, ansible-base 2.10, ansible-core 2.11, ansible-core 2.12, and ansible-core 2.13 are no longer supported. The collection might still work with these versions, but it can stop working at any moment without advance notice, and this will not be considered a bug (https://github.com/ansible-collections/community.hrobot/pull/101).
v1.9.2
======
diff --git a/ansible_collections/community/hrobot/FILES.json b/ansible_collections/community/hrobot/FILES.json
index 1f4f1fc3f..09dec60ac 100644
--- a/ansible_collections/community/hrobot/FILES.json
+++ b/ansible_collections/community/hrobot/FILES.json
@@ -25,7 +25,7 @@
"name": ".github/workflows/ansible-test.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "597f0f97a3486451bdf364c20f5b4b899486c0350422811b76fc57922ee50f6a",
+ "chksum_sha256": "85db68b8cc1783c04a9ef87bdafab9b83d16a2612572ad2bdfdbcdf432ddfe56",
"format": 1
},
{
@@ -78,6 +78,13 @@
"format": 1
},
{
+ "name": ".github/patchback.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "6a23e48e2562604318540e6ddcac75213ad2c367258d76fc75914e9b939d380e",
+ "format": 1
+ },
+ {
"name": ".reuse",
"ftype": "dir",
"chksum_type": null,
@@ -137,7 +144,7 @@
"name": "changelogs/changelog.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "44809fb2e5dc28dd9948e07f4987e0785b1835f56d82129d2feabd0ac6acd699",
+ "chksum_sha256": "64c7e3e389a931ddebeb7eb9278b37a6ebd88a0f9d798bc288759b818816889a",
"format": 1
},
{
@@ -151,7 +158,7 @@
"name": "changelogs/config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a7b8fa9406e8d12b8783432c8dcbec5e940b502649eb8d4c1bbc18bcac0c7ede",
+ "chksum_sha256": "d8c47805aa4e71f2eef1541193ba774adbcfefa618767ea031e5ea35a69041fd",
"format": 1
},
{
@@ -277,7 +284,7 @@
"name": "plugins/modules/boot.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6e602be89c58a047b8b0874a462e950340f1ae5c0066876efc3c6826f9fd1464",
+ "chksum_sha256": "f6b543c5f7adcdbdca015e31c3de5fa9ec7cd33a02df683f9b618e1fa75a7a1e",
"format": 1
},
{
@@ -718,7 +725,7 @@
"name": "tests/unit/plugins/modules/test_boot.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c33b5e7c7a0f2c498bbef9143ad11a78f45d72c18bdb185f541c04de5732d540",
+ "chksum_sha256": "94b751bb13debb65839d90a55ae38e6e4907b9d10795e887d8ca35d417e4e21a",
"format": 1
},
{
@@ -799,6 +806,20 @@
"format": 1
},
{
+ "name": "tests/unit/plugins/plugin_utils",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/unit/plugins/plugin_utils/test_unsafe.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "5a4c6a1d48a9c95c74b14ebf8b9f5e00e7d081b9840bfe60ebc1c7e2df401db2",
+ "format": 1
+ },
+ {
"name": "tests/unit/requirements.txt",
"ftype": "file",
"chksum_type": "sha256",
@@ -823,7 +844,7 @@
"name": "CHANGELOG.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "86fd0f752604f3fa70164f00acf0ad9e5d1567c6499701e555eeeedd59c6fbc7",
+ "chksum_sha256": "d3fba45513628cb08c98535d084a23208f1950aabfe2c95041fe4ba86c7ebe7f",
"format": 1
},
{
@@ -837,7 +858,7 @@
"name": "CHANGELOG.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e8ca25b706ccb628b73cd623cc1720c34df09756bc716aa07827c161648207a1",
+ "chksum_sha256": "9c71f9a0c226a55d3c9c7152a5da8755038630125f12609416643b93c766dddb",
"format": 1
},
{
@@ -858,7 +879,7 @@
"name": "README.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4a16caa53d97e1924daaabaf372ab085aba6fc95af3cca830d1eda04f22ae41d",
+ "chksum_sha256": "533f1aab35a3017269e2eef3f539da137896de77925eabeafae979130ef21a43",
"format": 1
},
{
diff --git a/ansible_collections/community/hrobot/MANIFEST.json b/ansible_collections/community/hrobot/MANIFEST.json
index 65cba09e0..a54f673dd 100644
--- a/ansible_collections/community/hrobot/MANIFEST.json
+++ b/ansible_collections/community/hrobot/MANIFEST.json
@@ -2,7 +2,7 @@
"collection_info": {
"namespace": "community",
"name": "hrobot",
- "version": "2.0.0",
+ "version": "2.0.1",
"authors": [
"Felix Fontein (github.com/felixfontein)"
],
@@ -32,7 +32,7 @@
"name": "FILES.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "47a01d4394191e0a36c408e3add9b1a0434bfcf4fd39a7d11ba1d7b4cd2f39b8",
+ "chksum_sha256": "ca5f7a213b869f2de3357bd0fc89eec1733f27e3506b7fe77a4bc5b9f247b424",
"format": 1
},
"format": 1
diff --git a/ansible_collections/community/hrobot/README.md b/ansible_collections/community/hrobot/README.md
index 8ef8be19b..d20aee3d3 100644
--- a/ansible_collections/community/hrobot/README.md
+++ b/ansible_collections/community/hrobot/README.md
@@ -5,7 +5,9 @@ SPDX-License-Identifier: GPL-3.0-or-later
-->
# Community Hetzner Robot Collection
-[![CI](https://github.com/ansible-collections/community.hrobot/workflows/CI/badge.svg?event=push)](https://github.com/ansible-collections/community.hrobot/actions) [![Codecov](https://img.shields.io/codecov/c/github/ansible-collections/community.hrobot)](https://codecov.io/gh/ansible-collections/community.hrobot)
+[![CI](https://github.com/ansible-collections/community.hrobot/workflows/CI/badge.svg?event=push)](https://github.com/ansible-collections/community.hrobot/actions)
+[![Codecov](https://img.shields.io/codecov/c/github/ansible-collections/community.hrobot)](https://codecov.io/gh/ansible-collections/community.hrobot)
+[![REUSE status](https://api.reuse.software/badge/github.com/ansible-collections/community.hrobot)](https://api.reuse.software/info/github.com/ansible-collections/community.hrobot)
This repository contains the `community.hrobot` Ansible Collection. The collection includes modules to work with [Hetzner's Robot](https://docs.hetzner.com/robot/).
diff --git a/ansible_collections/community/hrobot/changelogs/changelog.yaml b/ansible_collections/community/hrobot/changelogs/changelog.yaml
index 1862813f3..9fcb1256a 100644
--- a/ansible_collections/community/hrobot/changelogs/changelog.yaml
+++ b/ansible_collections/community/hrobot/changelogs/changelog.yaml
@@ -308,16 +308,24 @@ releases:
minor_changes:
- robot inventory plugin - add ``filter`` option which allows to include and
exclude hosts based on Jinja2 conditions (https://github.com/ansible-collections/community.hrobot/pull/101).
- release_summary: '...
-
- '
+ release_summary: New major release 2.0.0.
removed_features:
- The collection no longer supports Ansible, ansible-base, and ansible-core
releases that are currently End of Life at the time of the 2.0.0 release.
This means that Ansible 2.9, ansible-base 2.10, ansible-core 2.11, ansible-core
- 2.12, ansible-core 2.13, and ansible-core 2.14 are no longer supported. The
- collection might still work with these versions, but it can stop working at
- any moment without advance notice, and this will not be considered a bug (https://github.com/ansible-collections/community.hrobot/pull/101).
+ 2.12, and ansible-core 2.13 are no longer supported. The collection might
+ still work with these versions, but it can stop working at any moment without
+ advance notice, and this will not be considered a bug (https://github.com/ansible-collections/community.hrobot/pull/101).
fragments:
- 2.0.0.yml
release_date: '2024-05-11'
+ 2.0.1:
+ changes:
+ bugfixes:
+ - boot - use PHP array form encoding when sending multiple ``authorized_key``
+ (https://github.com/ansible-collections/community.hrobot/issues/112, https://github.com/ansible-collections/community.hrobot/pull/113).
+ release_summary: Bugfix release.
+ fragments:
+ - 2.0.1.yml
+ - fix-authorized_key-php-array-form-encoding.yml
+ release_date: '2024-06-17'
diff --git a/ansible_collections/community/hrobot/changelogs/config.yaml b/ansible_collections/community/hrobot/changelogs/config.yaml
index ed2ab2a71..3323f697c 100644
--- a/ansible_collections/community/hrobot/changelogs/config.yaml
+++ b/ansible_collections/community/hrobot/changelogs/config.yaml
@@ -36,3 +36,4 @@ sections:
title: Community Hetzner Robot Collection
trivial_section_name: trivial
use_fqcn: true
+add_plugin_period: true
diff --git a/ansible_collections/community/hrobot/plugins/modules/boot.py b/ansible_collections/community/hrobot/plugins/modules/boot.py
index bcf6f3c47..1ba8f36e8 100644
--- a/ansible_collections/community/hrobot/plugins/modules/boot.py
+++ b/ansible_collections/community/hrobot/plugins/modules/boot.py
@@ -284,13 +284,13 @@ BOOT_CONFIGURATION_DATA = [
('rescue', 'rescue', {
'os': ('os', 'os'),
'arch': ('arch', 'arch'),
- 'authorized_keys': ('authorized_key', 'authorized_key'),
+ 'authorized_keys': ('authorized_key', 'authorized_key[]'),
}),
('install_linux', 'linux', {
'dist': ('dist', 'dist'),
'arch': ('arch', 'arch'),
'lang': ('lang', 'lang'),
- 'authorized_keys': ('authorized_key', 'authorized_key'),
+ 'authorized_keys': ('authorized_key', 'authorized_key[]'),
}),
('install_vnc', 'vnc', {
'dist': ('dist', 'dist'),
@@ -404,7 +404,7 @@ def main():
if should is None:
continue
# unfold the return object for the idempotence check to work correctly
- has = existing.get(data_key)
+ has = existing.get(result_key)
if has and option_key == 'authorized_keys':
has = [x['key']['fingerprint'] for x in has]
if isinstance(has, list):
diff --git a/ansible_collections/community/hrobot/tests/unit/plugins/modules/test_boot.py b/ansible_collections/community/hrobot/tests/unit/plugins/modules/test_boot.py
index 7117afb21..87842fbaf 100644
--- a/ansible_collections/community/hrobot/tests/unit/plugins/modules/test_boot.py
+++ b/ansible_collections/community/hrobot/tests/unit/plugins/modules/test_boot.py
@@ -473,9 +473,11 @@ class TestHetznerBoot(BaseTestModule):
.expect_form_value('dist', 'Debian 11 base')
.expect_form_value('arch', '32')
.expect_form_value('lang', 'fr')
- .expect_form_present('authorized_key')
- # .expect_form_value('authorized_key', 'e4:47:42:71:81:62:bf:06:1c:23:fa:f3:8f:7b:6f:d0')
- # .expect_form_value('authorized_key', 'aa:bb:cc:dd:ee:ff:00:11:22:33:44:55:66:77:88:99')
+ .expect_form_present('authorized_key[]')
+ .expect_form_values('authorized_key[]', [
+ 'e4:47:42:71:81:62:bf:06:1c:23:fa:f3:8f:7b:6f:d0',
+ 'aa:bb:cc:dd:ee:ff:00:11:22:33:44:55:66:77:88:99',
+ ])
.result_json({
'linux': create_linux_active(dist='Debian 11 base', lang='fr', arch=32, authorized_key=[
{
diff --git a/ansible_collections/community/hrobot/tests/unit/plugins/plugin_utils/test_unsafe.py b/ansible_collections/community/hrobot/tests/unit/plugins/plugin_utils/test_unsafe.py
new file mode 100644
index 000000000..f33318a71
--- /dev/null
+++ b/ansible_collections/community/hrobot/tests/unit/plugins/plugin_utils/test_unsafe.py
@@ -0,0 +1,133 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2024, Felix Fontein <felix@fontein.de>
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+# Make coding more python3-ish
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+
+import pytest
+
+from ansible.utils.unsafe_proxy import AnsibleUnsafe
+
+from ansible_collections.community.hrobot.plugins.plugin_utils.unsafe import (
+ make_unsafe,
+)
+
+
+TEST_MAKE_UNSAFE = [
+ (
+ u'text',
+ [],
+ [
+ (),
+ ],
+ ),
+ (
+ u'{{text}}',
+ [
+ (),
+ ],
+ [],
+ ),
+ (
+ b'text',
+ [],
+ [
+ (),
+ ],
+ ),
+ (
+ b'{{text}}',
+ [
+ (),
+ ],
+ [],
+ ),
+ (
+ {
+ 'skey': 'value',
+ 'ukey': '{{value}}',
+ 1: [
+ 'value',
+ '{{value}}',
+ {
+ 1.0: '{{value}}',
+ 2.0: 'value',
+ },
+ ],
+ },
+ [
+ ('ukey', ),
+ (1, 1),
+ (1, 2, 1.0),
+ ],
+ [
+ ('skey', ),
+ (1, 0),
+ (1, 2, 2.0),
+ ],
+ ),
+ (
+ ['value', '{{value}}'],
+ [
+ (1, ),
+ ],
+ [
+ (0, ),
+ ],
+ ),
+]
+
+
+@pytest.mark.parametrize("value, check_unsafe_paths, check_safe_paths", TEST_MAKE_UNSAFE)
+def test_make_unsafe(value, check_unsafe_paths, check_safe_paths):
+ unsafe_value = make_unsafe(value)
+ assert unsafe_value == value
+ for check_path in check_unsafe_paths:
+ obj = unsafe_value
+ for elt in check_path:
+ obj = obj[elt]
+ assert isinstance(obj, AnsibleUnsafe)
+ for check_path in check_safe_paths:
+ obj = unsafe_value
+ for elt in check_path:
+ obj = obj[elt]
+ assert not isinstance(obj, AnsibleUnsafe)
+
+
+def test_make_unsafe_dict_key():
+ value = {
+ b'test': 1,
+ u'test': 2,
+ }
+ unsafe_value = make_unsafe(value)
+ assert unsafe_value == value
+ for obj in unsafe_value:
+ assert not isinstance(obj, AnsibleUnsafe)
+
+ value = {
+ b'{{test}}': 1,
+ u'{{test}}': 2,
+ }
+ unsafe_value = make_unsafe(value)
+ assert unsafe_value == value
+ for obj in unsafe_value:
+ assert isinstance(obj, AnsibleUnsafe)
+
+
+def test_make_unsafe_set():
+ value = set([b'test', u'test'])
+ unsafe_value = make_unsafe(value)
+ assert unsafe_value == value
+ for obj in unsafe_value:
+ assert not isinstance(obj, AnsibleUnsafe)
+
+ value = set([b'{{test}}', u'{{test}}'])
+ unsafe_value = make_unsafe(value)
+ assert unsafe_value == value
+ for obj in unsafe_value:
+ assert isinstance(obj, AnsibleUnsafe)
diff --git a/ansible_collections/community/network/.azure-pipelines/azure-pipelines.yml b/ansible_collections/community/network/.azure-pipelines/azure-pipelines.yml
index edff3f6c4..1ba74581c 100644
--- a/ansible_collections/community/network/.azure-pipelines/azure-pipelines.yml
+++ b/ansible_collections/community/network/.azure-pipelines/azure-pipelines.yml
@@ -64,14 +64,14 @@ stages:
- test: 5
- test: extra
- - stage: Sanity_2_16
- displayName: Sanity 2.16
+ - stage: Sanity_2_17
+ displayName: Sanity 2.17
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
nameFormat: Test {0}
- testFormat: 2.16/sanity/{0}
+ testFormat: 2.17/sanity/{0}
targets:
- test: 1
- test: 2
@@ -80,14 +80,14 @@ stages:
- test: 5
- test: extra
- - stage: Sanity_2_15
- displayName: Sanity 2.15
+ - stage: Sanity_2_16
+ displayName: Sanity 2.16
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
nameFormat: Test {0}
- testFormat: 2.15/sanity/{0}
+ testFormat: 2.16/sanity/{0}
targets:
- test: 1
- test: 2
@@ -96,35 +96,21 @@ stages:
- test: 5
- test: extra
- - stage: Sanity_2_14
- displayName: Sanity 2.14
- dependsOn: []
- jobs:
- - template: templates/matrix.yml
- parameters:
- nameFormat: Test {0}
- testFormat: 2.14/sanity/{0}
- targets:
- - test: 1
- - test: 2
- - test: 3
- - test: 4
- - test: 5
-
- - stage: Sanity_2_13
- displayName: Sanity 2.13
+ - stage: Sanity_2_15
+ displayName: Sanity 2.15
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
nameFormat: Test {0}
- testFormat: 2.13/sanity/{0}
+ testFormat: 2.15/sanity/{0}
targets:
- test: 1
- test: 2
- test: 3
- test: 4
- test: 5
+ - test: extra
### Units
- stage: Units_devel
@@ -138,63 +124,60 @@ stages:
targets:
- test: '3.10'
- - stage: Units_2_16
- displayName: Units 2.16
+ - stage: Units_2_17
+ displayName: Units 2.17
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
nameFormat: Python {0}
- testFormat: 2.16/units/{0}/1
+ testFormat: 2.17/units/{0}/1
targets:
- test: '3.10'
- - stage: Units_2_15
- displayName: Units 2.15
+ - stage: Units_2_16
+ displayName: Units 2.16
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
nameFormat: Python {0}
- testFormat: 2.15/units/{0}/1
+ testFormat: 2.16/units/{0}/1
targets:
- test: '3.10'
- - stage: Units_2_14
- displayName: Units 2.14
+ - stage: Units_2_15
+ displayName: Units 2.15
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
nameFormat: Python {0}
- testFormat: 2.14/units/{0}/1
+ testFormat: 2.15/units/{0}/1
targets:
- test: '3.10'
- - stage: Units_2_13
- displayName: Units 2.13
+### Cloud
+ - stage: Cloud_devel
+ displayName: Cloud devel
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
nameFormat: Python {0}
- testFormat: 2.13/units/{0}/1
+ testFormat: devel/cloud/{0}/1
targets:
- - test: 3.8
- - test: '3.10'
+ - test: "3.10"
-### Cloud
- - stage: Cloud_devel
- displayName: Cloud devel
+ - stage: Cloud_2_17
+ displayName: Cloud 2.17
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
nameFormat: Python {0}
- testFormat: devel/cloud/{0}/1
+ testFormat: 2.17/cloud/{0}/1
targets:
- - test: 3.8
- - test: 3.9
- test: "3.10"
- stage: Cloud_2_16
@@ -206,8 +189,6 @@ stages:
nameFormat: Python {0}
testFormat: 2.16/cloud/{0}/1
targets:
- - test: 3.8
- - test: 3.9
- test: "3.10"
- stage: Cloud_2_15
@@ -219,32 +200,6 @@ stages:
nameFormat: Python {0}
testFormat: 2.15/cloud/{0}/1
targets:
- - test: 3.8
- - test: 3.9
- - test: "3.10"
-
- - stage: Cloud_2_14
- displayName: Cloud 2.14
- dependsOn: []
- jobs:
- - template: templates/matrix.yml
- parameters:
- nameFormat: Python {0}
- testFormat: 2.14/cloud/{0}/1
- targets:
- - test: 3.8
- - test: 3.9
- - test: "3.10"
-
- - stage: Cloud_2_13
- displayName: Cloud 2.13
- dependsOn: []
- jobs:
- - template: templates/matrix.yml
- parameters:
- nameFormat: Python {0}
- testFormat: 2.13/cloud/{0}/1
- targets:
- test: 3.9
- test: "3.10"
@@ -253,19 +208,16 @@ stages:
condition: succeededOrFailed()
dependsOn:
- Sanity_devel
+ - Sanity_2_17
- Sanity_2_16
- Sanity_2_15
- - Sanity_2_14
- - Sanity_2_13
- Units_devel
+ - Units_2_17
- Units_2_16
- Units_2_15
- - Units_2_14
- - Units_2_13
- Cloud_devel
+ - Cloud_2_17
- Cloud_2_16
- Cloud_2_15
- - Cloud_2_14
- - Cloud_2_13
jobs:
- template: templates/coverage.yml
diff --git a/ansible_collections/community/network/CHANGELOG.rst b/ansible_collections/community/network/CHANGELOG.rst
index 6805773cf..a868cace0 100644
--- a/ansible_collections/community/network/CHANGELOG.rst
+++ b/ansible_collections/community/network/CHANGELOG.rst
@@ -6,6 +6,20 @@ Community Network Release Notes
This changelog describes changes after version 4.0.0.
+v5.0.3
+======
+
+Release Summary
+---------------
+
+This is a patch release of the ``community.network`` collection.
+This changelog contains changes made since the previous release.
+
+Bugfixes
+--------
+
+- exos - Add error handling of ``Permission denied`` errors (https://github.com/ansible-collections/community.network/pull/571).
+
v5.0.2
======
diff --git a/ansible_collections/community/network/FILES.json b/ansible_collections/community/network/FILES.json
index 48d500a30..de45f37db 100644
--- a/ansible_collections/community/network/FILES.json
+++ b/ansible_collections/community/network/FILES.json
@@ -109,7 +109,7 @@
"name": ".azure-pipelines/azure-pipelines.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f2112d8c5ddaca809f1d13953864b03b713590d7cb7805f0c16f62277a009484",
+ "chksum_sha256": "c9869ee28f0952e3c9461770c30f4a6e15db2f6a2933ac8d058950b1547eadd4",
"format": 1
},
{
@@ -172,7 +172,7 @@
"name": "changelogs/changelog.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9e2550ba9ca90e0d106b52fc04c136140485803b62db3f9e4fb12371b26c914b",
+ "chksum_sha256": "5f7fd05e3edfcbb231b556f6d3ad0e2831ea25668366a13569fcffb285cc607d",
"format": 1
},
{
@@ -3987,7 +3987,7 @@
"name": "plugins/terminal/exos.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cf743fa5c18ba7055eb87fad3c2948305479d9715adfb0e06ac1b4f45fa09f3f",
+ "chksum_sha256": "b2ffb423befb1c0341f9217687b3ef9ec48c5c3fd9539e5d7ec3935a791007af",
"format": 1
},
{
@@ -8362,7 +8362,14 @@
"name": "tests/sanity/ignore-2.17.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1c18bd4cb71e089a317436248584e1482a76baf50ed173c32ce45915118b575b",
+ "chksum_sha256": "98377bb2ecb77dedfa8c99ba5f870e935dda583cdae0f9578850a730bebee9cf",
+ "format": 1
+ },
+ {
+ "name": "tests/sanity/ignore-2.18.txt",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "98377bb2ecb77dedfa8c99ba5f870e935dda583cdae0f9578850a730bebee9cf",
"format": 1
},
{
@@ -11701,7 +11708,7 @@
"name": "tests/utils/shippable/shippable.sh",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e5e1a79c1fc0598e35051b035c11ca862be55778c7b5cd79bdf0b3127a991208",
+ "chksum_sha256": "a522bc30e09d45274138a7ae30acfb23507f02d68cfa44aac8afc4419e4ea832",
"format": 1
},
{
@@ -11750,7 +11757,7 @@
"name": "CHANGELOG.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f75120a830b09fe02ee6342d7e5e2f9ce03992ddfa862bdb1538f7b08d9014a0",
+ "chksum_sha256": "1cf3de4a3c421e2ad4ec0334566bd424f4d40c94b7c4296a32a9adad332c828b",
"format": 1
},
{
@@ -11778,7 +11785,7 @@
"name": "README.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5db1e1a06abb9ba8c8c63f78cfc20827eedb55817e3fb58dd41ee48c5373e903",
+ "chksum_sha256": "67bed77efdd430b14dd24ca26da27646dc25d76979f5a6025ed8af7e56467736",
"format": 1
},
{
diff --git a/ansible_collections/community/network/MANIFEST.json b/ansible_collections/community/network/MANIFEST.json
index 82b457808..5a934c5dd 100644
--- a/ansible_collections/community/network/MANIFEST.json
+++ b/ansible_collections/community/network/MANIFEST.json
@@ -2,7 +2,7 @@
"collection_info": {
"namespace": "community",
"name": "network",
- "version": "5.0.2",
+ "version": "5.0.3",
"authors": [
"Ansible (https://github.com/ansible)"
],
@@ -26,7 +26,7 @@
"name": "FILES.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e1c51f77033732d00f7c6000f269e730bdde56f0ff50e22c7be1f81f3d59cfc1",
+ "chksum_sha256": "6d5b9f8a9bb56811ea84c47aa7f8a70d59ecc434486c83635f4bed458c7fe3ff",
"format": 1
},
"format": 1
diff --git a/ansible_collections/community/network/README.md b/ansible_collections/community/network/README.md
index 5e94a32cf..382f0050d 100644
--- a/ansible_collections/community/network/README.md
+++ b/ansible_collections/community/network/README.md
@@ -63,7 +63,7 @@ Click the `Content` button to see the list of content included in this collectio
This collection is shipped with the `ansible` package. So if you have it installed, no more action is required.
-If you have a minimal installation (only Ansible Core installed) or you want to use the latest version of the collection along with the whole `ansible` package, you need to install the collection from [Ansible Galaxy](https://galaxy.ansible.com/community/network) manually with the `ansible-galaxy` command-line tool:
+If you have a minimal installation (only Ansible Core installed) or you want to use the latest version of the collection along with the whole `ansible` package, you need to install the collection from [Ansible Galaxy](https://galaxy.ansible.com/ui/repo/published/community/network/) manually with the `ansible-galaxy` command-line tool:
ansible-galaxy collection install community.network
@@ -83,7 +83,7 @@ Note that if you install the collection manually, it will not be upgraded automa
ansible-galaxy collection install community.network --upgrade
```
-You can also install a specific version of the collection, for example, if you need to downgrade when something is broken in the latest version (please report an issue in this repository). Use the following syntax where `X.Y.Z` can be any [available version](https://galaxy.ansible.com/community/network):
+You can also install a specific version of the collection, for example, if you need to downgrade when something is broken in the latest version (please report an issue in this repository). Use the following syntax where `X.Y.Z` can be any [available version](https://galaxy.ansible.com/ui/repo/published/community/network/):
```bash
ansible-galaxy collection install community.network:==X.Y.Z
diff --git a/ansible_collections/community/network/changelogs/changelog.yaml b/ansible_collections/community/network/changelogs/changelog.yaml
index 36d66e2a3..f1045ba5a 100644
--- a/ansible_collections/community/network/changelogs/changelog.yaml
+++ b/ansible_collections/community/network/changelogs/changelog.yaml
@@ -116,3 +116,14 @@ releases:
fragments:
- 5.0.2.yml
release_date: '2023-10-25'
+ 5.0.3:
+ changes:
+ bugfixes:
+ - exos - Add error handling of ``Permission denied`` errors (https://github.com/ansible-collections/community.network/pull/571).
+ release_summary: 'This is a patch release of the ``community.network`` collection.
+
+ This changelog contains changes made since the previous release.'
+ fragments:
+ - 5.0.3.yml
+ - 571-permission-denied-error-handling.yml
+ release_date: '2024-06-13'
diff --git a/ansible_collections/community/network/plugins/terminal/exos.py b/ansible_collections/community/network/plugins/terminal/exos.py
index 6836cdb8f..a2f39415b 100644
--- a/ansible_collections/community/network/plugins/terminal/exos.py
+++ b/ansible_collections/community/network/plugins/terminal/exos.py
@@ -45,6 +45,7 @@ class TerminalModule(TerminalBase):
re.compile(br"[%\S] ?Error: ?[\s]+", re.I),
re.compile(br"[%\S] ?Informational: ?[\s]+", re.I),
re.compile(br"%% Invalid .* at '\^' marker.", re.I),
+ re.compile(br"Permission denied", re.I),
]
def on_open_shell(self):
diff --git a/ansible_collections/community/network/tests/sanity/ignore-2.17.txt b/ansible_collections/community/network/tests/sanity/ignore-2.17.txt
index f19cd8d79..597bcf203 100644
--- a/ansible_collections/community/network/tests/sanity/ignore-2.17.txt
+++ b/ansible_collections/community/network/tests/sanity/ignore-2.17.txt
@@ -382,38 +382,30 @@ plugins/modules/cnos_backup.py validate-modules:doc-missing-type
plugins/modules/cnos_backup.py validate-modules:doc-required-mismatch
plugins/modules/cnos_backup.py validate-modules:nonexistent-parameter-documented
plugins/modules/cnos_backup.py validate-modules:undocumented-parameter
-plugins/modules/cnos_backup.py yamllint:unparsable-with-libyaml
plugins/modules/cnos_banner.py validate-modules:doc-missing-type
plugins/modules/cnos_bgp.py validate-modules:doc-choices-do-not-match-spec
plugins/modules/cnos_bgp.py validate-modules:doc-missing-type
plugins/modules/cnos_bgp.py validate-modules:doc-required-mismatch
-plugins/modules/cnos_bgp.py yamllint:unparsable-with-libyaml
plugins/modules/cnos_command.py validate-modules:doc-missing-type
plugins/modules/cnos_command.py validate-modules:parameter-list-no-elements
plugins/modules/cnos_command.py validate-modules:parameter-type-not-in-doc
plugins/modules/cnos_conditional_command.py validate-modules:doc-choices-do-not-match-spec
plugins/modules/cnos_conditional_command.py validate-modules:doc-missing-type
plugins/modules/cnos_conditional_command.py validate-modules:doc-required-mismatch
-plugins/modules/cnos_conditional_command.py yamllint:unparsable-with-libyaml
plugins/modules/cnos_conditional_template.py validate-modules:doc-choices-do-not-match-spec
plugins/modules/cnos_conditional_template.py validate-modules:doc-missing-type
plugins/modules/cnos_conditional_template.py validate-modules:doc-required-mismatch
-plugins/modules/cnos_conditional_template.py yamllint:unparsable-with-libyaml
plugins/modules/cnos_config.py validate-modules:doc-missing-type
plugins/modules/cnos_config.py validate-modules:parameter-list-no-elements
plugins/modules/cnos_config.py validate-modules:parameter-type-not-in-doc
-plugins/modules/cnos_config.py yamllint:unparsable-with-libyaml
plugins/modules/cnos_factory.py validate-modules:doc-choices-do-not-match-spec
plugins/modules/cnos_factory.py validate-modules:doc-required-mismatch
-plugins/modules/cnos_factory.py yamllint:unparsable-with-libyaml
plugins/modules/cnos_facts.py validate-modules:nonexistent-parameter-documented
plugins/modules/cnos_facts.py validate-modules:parameter-list-no-elements
plugins/modules/cnos_facts.py validate-modules:parameter-type-not-in-doc
-plugins/modules/cnos_facts.py yamllint:unparsable-with-libyaml
plugins/modules/cnos_image.py validate-modules:doc-choices-do-not-match-spec
plugins/modules/cnos_image.py validate-modules:doc-missing-type
plugins/modules/cnos_image.py validate-modules:doc-required-mismatch
-plugins/modules/cnos_image.py yamllint:unparsable-with-libyaml
plugins/modules/cnos_interface.py validate-modules:doc-choices-do-not-match-spec
plugins/modules/cnos_interface.py validate-modules:doc-elements-mismatch
plugins/modules/cnos_interface.py validate-modules:doc-missing-type
@@ -452,19 +444,15 @@ plugins/modules/cnos_logging.py validate-modules:parameter-type-not-in-doc
plugins/modules/cnos_logging.py validate-modules:undocumented-parameter
plugins/modules/cnos_reload.py validate-modules:doc-choices-do-not-match-spec
plugins/modules/cnos_reload.py validate-modules:doc-required-mismatch
-plugins/modules/cnos_reload.py yamllint:unparsable-with-libyaml
plugins/modules/cnos_rollback.py validate-modules:doc-choices-do-not-match-spec
plugins/modules/cnos_rollback.py validate-modules:doc-missing-type
plugins/modules/cnos_rollback.py validate-modules:doc-required-mismatch
plugins/modules/cnos_rollback.py validate-modules:nonexistent-parameter-documented
plugins/modules/cnos_rollback.py validate-modules:undocumented-parameter
-plugins/modules/cnos_rollback.py yamllint:unparsable-with-libyaml
plugins/modules/cnos_save.py validate-modules:doc-choices-do-not-match-spec
plugins/modules/cnos_save.py validate-modules:doc-required-mismatch
-plugins/modules/cnos_save.py yamllint:unparsable-with-libyaml
plugins/modules/cnos_showrun.py validate-modules:doc-required-mismatch
plugins/modules/cnos_showrun.py validate-modules:nonexistent-parameter-documented
-plugins/modules/cnos_showrun.py yamllint:unparsable-with-libyaml
plugins/modules/cnos_static_route.py validate-modules:doc-choices-do-not-match-spec
plugins/modules/cnos_static_route.py validate-modules:doc-elements-mismatch
plugins/modules/cnos_static_route.py validate-modules:doc-missing-type
@@ -478,7 +466,6 @@ plugins/modules/cnos_system.py validate-modules:parameter-type-not-in-doc
plugins/modules/cnos_template.py validate-modules:doc-choices-do-not-match-spec
plugins/modules/cnos_template.py validate-modules:doc-missing-type
plugins/modules/cnos_template.py validate-modules:doc-required-mismatch
-plugins/modules/cnos_template.py yamllint:unparsable-with-libyaml
plugins/modules/cnos_user.py validate-modules:doc-choices-do-not-match-spec
plugins/modules/cnos_user.py validate-modules:doc-elements-mismatch
plugins/modules/cnos_user.py validate-modules:doc-missing-type
@@ -489,7 +476,6 @@ plugins/modules/cnos_user.py validate-modules:undocumented-parameter
plugins/modules/cnos_vlag.py validate-modules:doc-choices-do-not-match-spec
plugins/modules/cnos_vlag.py validate-modules:doc-missing-type
plugins/modules/cnos_vlag.py validate-modules:doc-required-mismatch
-plugins/modules/cnos_vlag.py yamllint:unparsable-with-libyaml
plugins/modules/cnos_vlan.py validate-modules:doc-choices-do-not-match-spec
plugins/modules/cnos_vlan.py validate-modules:doc-elements-mismatch
plugins/modules/cnos_vlan.py validate-modules:doc-missing-type
@@ -536,7 +522,6 @@ plugins/modules/enos_config.py validate-modules:parameter-type-not-in-doc
plugins/modules/enos_facts.py validate-modules:nonexistent-parameter-documented
plugins/modules/enos_facts.py validate-modules:parameter-list-no-elements
plugins/modules/enos_facts.py validate-modules:parameter-type-not-in-doc
-plugins/modules/enos_facts.py yamllint:unparsable-with-libyaml
plugins/modules/eric_eccli_command.py validate-modules:parameter-list-no-elements
plugins/modules/exos_command.py validate-modules:doc-missing-type
plugins/modules/exos_command.py validate-modules:parameter-list-no-elements
@@ -720,7 +705,6 @@ plugins/modules/ordnance_config.py validate-modules:invalid-ansiblemodule-schema
plugins/modules/ordnance_config.py validate-modules:parameter-list-no-elements
plugins/modules/ordnance_config.py validate-modules:parameter-type-not-in-doc
plugins/modules/ordnance_config.py validate-modules:undocumented-parameter
-plugins/modules/ordnance_config.py yamllint:unparsable-with-libyaml
plugins/modules/ordnance_facts.py validate-modules:doc-default-does-not-match-spec
plugins/modules/ordnance_facts.py validate-modules:doc-missing-type
plugins/modules/ordnance_facts.py validate-modules:doc-required-mismatch
@@ -728,7 +712,6 @@ plugins/modules/ordnance_facts.py validate-modules:invalid-ansiblemodule-schema
plugins/modules/ordnance_facts.py validate-modules:parameter-list-no-elements
plugins/modules/ordnance_facts.py validate-modules:parameter-type-not-in-doc
plugins/modules/ordnance_facts.py validate-modules:undocumented-parameter
-plugins/modules/ordnance_facts.py yamllint:unparsable-with-libyaml
plugins/modules/vdirect_commit.py validate-modules:doc-missing-type
plugins/modules/vdirect_commit.py validate-modules:parameter-list-no-elements
plugins/modules/vdirect_commit.py validate-modules:parameter-type-not-in-doc
@@ -839,7 +822,9 @@ plugins/cliconf/weos4.py pylint:unused-import
plugins/lookup/avi.py pylint:unused-import
plugins/module_utils/network/aos/aos.py pylint:unused-import
plugins/module_utils/network/avi/ansible_utils.py pylint:unused-import
+plugins/module_utils/network/avi/ansible_utils.py pylint:unidiomatic-typecheck
plugins/module_utils/network/avi/avi.py pylint:unused-import
+plugins/module_utils/network/avi/avi_api.py pylint:unidiomatic-typecheck
plugins/module_utils/network/cloudengine/ce.py pylint:unused-import
plugins/module_utils/network/cnos/cnos.py pylint:unused-import
plugins/module_utils/network/eric_eccli/eric_eccli.py pylint:unused-import
@@ -851,6 +836,8 @@ plugins/module_utils/network/exos/facts/legacy/base.py pylint:unused-import
plugins/module_utils/network/exos/facts/lldp_global/lldp_global.py pylint:unused-import
plugins/module_utils/network/exos/facts/lldp_interfaces/lldp_interfaces.py pylint:unused-import
plugins/module_utils/network/exos/facts/vlans/vlans.py pylint:unused-import
+plugins/module_utils/network/ftd/common.py pylint:unidiomatic-typecheck
+plugins/module_utils/network/ftd/configuration.py pylint:use-yield-from
plugins/module_utils/network/icx/icx.py pylint:unused-import
plugins/module_utils/network/ironware/ironware.py pylint:unused-import
plugins/module_utils/network/netscaler/netscaler.py pylint:unused-import
@@ -923,6 +910,9 @@ plugins/terminal/netvisor.py pylint:unused-import
plugins/terminal/weos4.py pylint:unused-import
tests/sanity/extra/botmeta.py pylint:unused-import
tests/unit/compat/builtins.py pylint:unused-import
+tests/unit/compat/mock.py pylint:use-yield-from
+tests/unit/mock/loader.py pylint:unidiomatic-typecheck
+tests/unit/plugins/httpapi/test_ftd.py pylint:unidiomatic-typecheck
tests/unit/plugins/modules/test_apconos_command.py pylint:unused-import
tests/unit/plugins/modules/test_icx_logging.py pylint:unused-import
tests/unit/plugins/modules/test_icx_system.py pylint:unused-import
diff --git a/ansible_collections/community/network/tests/sanity/ignore-2.18.txt b/ansible_collections/community/network/tests/sanity/ignore-2.18.txt
new file mode 100644
index 000000000..597bcf203
--- /dev/null
+++ b/ansible_collections/community/network/tests/sanity/ignore-2.18.txt
@@ -0,0 +1,920 @@
+plugins/action/aireos.py action-plugin-docs # base class for deprecated network platform modules using `connection: local`
+plugins/action/aruba.py action-plugin-docs # base class for deprecated network platform modules using `connection: local`
+plugins/action/ce.py action-plugin-docs # base class for deprecated network platform modules using `connection: local`
+plugins/action/ce_template.py action-plugin-docs # undocumented action plugin to fix, existed before sanity test was added
+plugins/action/cnos.py action-plugin-docs # base class for deprecated network platform modules using `connection: local`
+plugins/action/enos.py action-plugin-docs # base class for deprecated network platform modules using `connection: local`
+plugins/action/exos.py action-plugin-docs # undocumented action plugin to fix
+plugins/action/ironware.py action-plugin-docs # base class for deprecated network platform modules using `connection: local`
+plugins/action/slxos.py action-plugin-docs # undocumented action plugin to fix
+plugins/action/sros.py action-plugin-docs # base class for deprecated network platform modules using `connection: local`
+plugins/action/voss.py action-plugin-docs # undocumented action plugin to fix
+plugins/module_utils/network/edgeswitch/edgeswitch_interface.py pylint:duplicate-string-formatting-argument
+plugins/modules/a10_server.py validate-modules:parameter-list-no-elements
+plugins/modules/a10_server.py validate-modules:parameter-type-not-in-doc
+plugins/modules/a10_server_axapi3.py validate-modules:doc-choices-do-not-match-spec
+plugins/modules/a10_server_axapi3.py validate-modules:parameter-list-no-elements
+plugins/modules/a10_server_axapi3.py validate-modules:parameter-type-not-in-doc
+plugins/modules/a10_service_group.py validate-modules:parameter-list-no-elements
+plugins/modules/a10_service_group.py validate-modules:parameter-type-not-in-doc
+plugins/modules/a10_virtual_server.py validate-modules:doc-default-does-not-match-spec
+plugins/modules/a10_virtual_server.py validate-modules:doc-required-mismatch
+plugins/modules/a10_virtual_server.py validate-modules:parameter-list-no-elements
+plugins/modules/a10_virtual_server.py validate-modules:parameter-type-not-in-doc
+plugins/modules/aireos_command.py validate-modules:collection-deprecated-version
+plugins/modules/aireos_command.py validate-modules:doc-missing-type
+plugins/modules/aireos_command.py validate-modules:parameter-list-no-elements
+plugins/modules/aireos_command.py validate-modules:parameter-type-not-in-doc
+plugins/modules/aireos_config.py validate-modules:collection-deprecated-version
+plugins/modules/aireos_config.py validate-modules:doc-missing-type
+plugins/modules/aireos_config.py validate-modules:parameter-list-no-elements
+plugins/modules/aireos_config.py validate-modules:parameter-type-not-in-doc
+plugins/modules/apconos_command.py validate-modules:parameter-list-no-elements
+plugins/modules/aruba_command.py validate-modules:collection-deprecated-version
+plugins/modules/aruba_command.py validate-modules:doc-missing-type
+plugins/modules/aruba_command.py validate-modules:parameter-list-no-elements
+plugins/modules/aruba_command.py validate-modules:parameter-type-not-in-doc
+plugins/modules/aruba_config.py validate-modules:collection-deprecated-version
+plugins/modules/aruba_config.py validate-modules:doc-missing-type
+plugins/modules/aruba_config.py validate-modules:parameter-list-no-elements
+plugins/modules/aruba_config.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_actiongroupconfig.py validate-modules:doc-missing-type
+plugins/modules/avi_actiongroupconfig.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_alertconfig.py validate-modules:doc-missing-type
+plugins/modules/avi_alertconfig.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_alertemailconfig.py validate-modules:doc-missing-type
+plugins/modules/avi_alertemailconfig.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_alertscriptconfig.py validate-modules:doc-missing-type
+plugins/modules/avi_alertscriptconfig.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_alertsyslogconfig.py validate-modules:doc-missing-type
+plugins/modules/avi_alertsyslogconfig.py validate-modules:parameter-list-no-elements
+plugins/modules/avi_alertsyslogconfig.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_analyticsprofile.py validate-modules:doc-missing-type
+plugins/modules/avi_analyticsprofile.py validate-modules:parameter-list-no-elements
+plugins/modules/avi_analyticsprofile.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_api_session.py validate-modules:doc-missing-type
+plugins/modules/avi_api_session.py validate-modules:doc-required-mismatch
+plugins/modules/avi_api_session.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_api_version.py validate-modules:doc-missing-type
+plugins/modules/avi_api_version.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_applicationpersistenceprofile.py validate-modules:doc-missing-type
+plugins/modules/avi_applicationpersistenceprofile.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_applicationprofile.py validate-modules:doc-missing-type
+plugins/modules/avi_applicationprofile.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_authprofile.py validate-modules:doc-missing-type
+plugins/modules/avi_authprofile.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_autoscalelaunchconfig.py validate-modules:doc-missing-type
+plugins/modules/avi_autoscalelaunchconfig.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_backup.py validate-modules:doc-missing-type
+plugins/modules/avi_backup.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_backupconfiguration.py validate-modules:doc-missing-type
+plugins/modules/avi_backupconfiguration.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_certificatemanagementprofile.py validate-modules:doc-missing-type
+plugins/modules/avi_certificatemanagementprofile.py validate-modules:parameter-list-no-elements
+plugins/modules/avi_certificatemanagementprofile.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_cloud.py validate-modules:doc-missing-type
+plugins/modules/avi_cloud.py validate-modules:parameter-list-no-elements
+plugins/modules/avi_cloud.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_cloudconnectoruser.py validate-modules:doc-missing-type
+plugins/modules/avi_cloudconnectoruser.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_cloudproperties.py validate-modules:doc-missing-type
+plugins/modules/avi_cloudproperties.py validate-modules:parameter-list-no-elements
+plugins/modules/avi_cloudproperties.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_cluster.py validate-modules:doc-missing-type
+plugins/modules/avi_cluster.py validate-modules:parameter-list-no-elements
+plugins/modules/avi_cluster.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_clusterclouddetails.py validate-modules:doc-missing-type
+plugins/modules/avi_clusterclouddetails.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_controllerproperties.py validate-modules:doc-missing-type
+plugins/modules/avi_controllerproperties.py validate-modules:parameter-list-no-elements
+plugins/modules/avi_controllerproperties.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_customipamdnsprofile.py validate-modules:doc-missing-type
+plugins/modules/avi_customipamdnsprofile.py validate-modules:parameter-list-no-elements
+plugins/modules/avi_customipamdnsprofile.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_dnspolicy.py validate-modules:doc-missing-type
+plugins/modules/avi_dnspolicy.py validate-modules:parameter-list-no-elements
+plugins/modules/avi_dnspolicy.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_errorpagebody.py validate-modules:doc-missing-type
+plugins/modules/avi_errorpagebody.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_errorpageprofile.py validate-modules:doc-missing-type
+plugins/modules/avi_errorpageprofile.py validate-modules:parameter-list-no-elements
+plugins/modules/avi_errorpageprofile.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_gslb.py validate-modules:doc-missing-type
+plugins/modules/avi_gslb.py validate-modules:parameter-list-no-elements
+plugins/modules/avi_gslb.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_gslbgeodbprofile.py validate-modules:doc-missing-type
+plugins/modules/avi_gslbgeodbprofile.py validate-modules:parameter-list-no-elements
+plugins/modules/avi_gslbgeodbprofile.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_gslbservice.py validate-modules:doc-missing-type
+plugins/modules/avi_gslbservice.py validate-modules:parameter-list-no-elements
+plugins/modules/avi_gslbservice.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_gslbservice_patch_member.py pylint:use-a-generator
+plugins/modules/avi_gslbservice_patch_member.py validate-modules:doc-missing-type
+plugins/modules/avi_gslbservice_patch_member.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_hardwaresecuritymodulegroup.py validate-modules:doc-missing-type
+plugins/modules/avi_hardwaresecuritymodulegroup.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_healthmonitor.py validate-modules:doc-missing-type
+plugins/modules/avi_healthmonitor.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_httppolicyset.py validate-modules:doc-missing-type
+plugins/modules/avi_httppolicyset.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_ipaddrgroup.py validate-modules:doc-missing-type
+plugins/modules/avi_ipaddrgroup.py validate-modules:parameter-list-no-elements
+plugins/modules/avi_ipaddrgroup.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_ipamdnsproviderprofile.py validate-modules:doc-missing-type
+plugins/modules/avi_ipamdnsproviderprofile.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_l4policyset.py validate-modules:doc-missing-type
+plugins/modules/avi_l4policyset.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_microservicegroup.py validate-modules:doc-missing-type
+plugins/modules/avi_microservicegroup.py validate-modules:parameter-list-no-elements
+plugins/modules/avi_microservicegroup.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_network.py validate-modules:doc-missing-type
+plugins/modules/avi_network.py validate-modules:parameter-list-no-elements
+plugins/modules/avi_network.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_networkprofile.py validate-modules:doc-missing-type
+plugins/modules/avi_networkprofile.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_networksecuritypolicy.py validate-modules:doc-missing-type
+plugins/modules/avi_networksecuritypolicy.py validate-modules:parameter-list-no-elements
+plugins/modules/avi_networksecuritypolicy.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_pkiprofile.py validate-modules:doc-missing-type
+plugins/modules/avi_pkiprofile.py validate-modules:parameter-list-no-elements
+plugins/modules/avi_pkiprofile.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_pool.py validate-modules:doc-missing-type
+plugins/modules/avi_pool.py validate-modules:parameter-list-no-elements
+plugins/modules/avi_pool.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_poolgroup.py validate-modules:doc-missing-type
+plugins/modules/avi_poolgroup.py validate-modules:parameter-list-no-elements
+plugins/modules/avi_poolgroup.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_poolgroupdeploymentpolicy.py validate-modules:doc-missing-type
+plugins/modules/avi_poolgroupdeploymentpolicy.py validate-modules:parameter-list-no-elements
+plugins/modules/avi_poolgroupdeploymentpolicy.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_prioritylabels.py validate-modules:doc-missing-type
+plugins/modules/avi_prioritylabels.py validate-modules:parameter-list-no-elements
+plugins/modules/avi_prioritylabels.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_role.py validate-modules:doc-missing-type
+plugins/modules/avi_role.py validate-modules:parameter-list-no-elements
+plugins/modules/avi_role.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_scheduler.py validate-modules:doc-missing-type
+plugins/modules/avi_scheduler.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_seproperties.py validate-modules:doc-missing-type
+plugins/modules/avi_seproperties.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_serverautoscalepolicy.py validate-modules:doc-missing-type
+plugins/modules/avi_serverautoscalepolicy.py validate-modules:parameter-list-no-elements
+plugins/modules/avi_serverautoscalepolicy.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_serviceengine.py validate-modules:doc-missing-type
+plugins/modules/avi_serviceengine.py validate-modules:parameter-list-no-elements
+plugins/modules/avi_serviceengine.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_serviceenginegroup.py validate-modules:doc-missing-type
+plugins/modules/avi_serviceenginegroup.py validate-modules:parameter-list-no-elements
+plugins/modules/avi_serviceenginegroup.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_snmptrapprofile.py validate-modules:doc-missing-type
+plugins/modules/avi_snmptrapprofile.py validate-modules:parameter-list-no-elements
+plugins/modules/avi_snmptrapprofile.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_sslkeyandcertificate.py validate-modules:doc-missing-type
+plugins/modules/avi_sslkeyandcertificate.py validate-modules:parameter-list-no-elements
+plugins/modules/avi_sslkeyandcertificate.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_sslprofile.py validate-modules:doc-missing-type
+plugins/modules/avi_sslprofile.py validate-modules:parameter-list-no-elements
+plugins/modules/avi_sslprofile.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_stringgroup.py validate-modules:doc-missing-type
+plugins/modules/avi_stringgroup.py validate-modules:parameter-list-no-elements
+plugins/modules/avi_stringgroup.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_systemconfiguration.py validate-modules:doc-missing-type
+plugins/modules/avi_systemconfiguration.py validate-modules:parameter-list-no-elements
+plugins/modules/avi_systemconfiguration.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_tenant.py validate-modules:doc-missing-type
+plugins/modules/avi_tenant.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_trafficcloneprofile.py validate-modules:doc-missing-type
+plugins/modules/avi_trafficcloneprofile.py validate-modules:parameter-list-no-elements
+plugins/modules/avi_trafficcloneprofile.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_user.py validate-modules:doc-missing-type
+plugins/modules/avi_user.py validate-modules:parameter-list-no-elements
+plugins/modules/avi_user.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_useraccount.py validate-modules:doc-missing-type
+plugins/modules/avi_useraccount.py validate-modules:doc-required-mismatch
+plugins/modules/avi_useraccount.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_useraccountprofile.py validate-modules:doc-missing-type
+plugins/modules/avi_useraccountprofile.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_virtualservice.py validate-modules:doc-missing-type
+plugins/modules/avi_virtualservice.py validate-modules:parameter-list-no-elements
+plugins/modules/avi_virtualservice.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_vrfcontext.py validate-modules:doc-missing-type
+plugins/modules/avi_vrfcontext.py validate-modules:parameter-list-no-elements
+plugins/modules/avi_vrfcontext.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_vsdatascriptset.py validate-modules:doc-missing-type
+plugins/modules/avi_vsdatascriptset.py validate-modules:parameter-list-no-elements
+plugins/modules/avi_vsdatascriptset.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_vsvip.py validate-modules:doc-missing-type
+plugins/modules/avi_vsvip.py validate-modules:parameter-list-no-elements
+plugins/modules/avi_vsvip.py validate-modules:parameter-type-not-in-doc
+plugins/modules/avi_webhook.py validate-modules:doc-missing-type
+plugins/modules/avi_webhook.py validate-modules:parameter-type-not-in-doc
+plugins/modules/bcf_switch.py validate-modules:doc-missing-type
+plugins/modules/bcf_switch.py validate-modules:parameter-type-not-in-doc
+plugins/modules/bigmon_chain.py validate-modules:doc-missing-type
+plugins/modules/bigmon_chain.py validate-modules:parameter-type-not-in-doc
+plugins/modules/bigmon_policy.py validate-modules:doc-choices-do-not-match-spec
+plugins/modules/bigmon_policy.py validate-modules:doc-default-does-not-match-spec
+plugins/modules/bigmon_policy.py validate-modules:doc-missing-type
+plugins/modules/bigmon_policy.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_aaa_server_host.py validate-modules:doc-missing-type
+plugins/modules/ce_aaa_server_host.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_acl.py validate-modules:doc-missing-type
+plugins/modules/ce_acl.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_acl_advance.py validate-modules:doc-missing-type
+plugins/modules/ce_acl_advance.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_acl_interface.py validate-modules:doc-missing-type
+plugins/modules/ce_acl_interface.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_bfd_global.py validate-modules:doc-missing-type
+plugins/modules/ce_bfd_global.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_bfd_session.py validate-modules:doc-missing-type
+plugins/modules/ce_bfd_session.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_bfd_view.py validate-modules:doc-missing-type
+plugins/modules/ce_bfd_view.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_bgp.py validate-modules:doc-missing-type
+plugins/modules/ce_bgp.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_bgp_af.py validate-modules:doc-default-does-not-match-spec
+plugins/modules/ce_bgp_af.py validate-modules:doc-missing-type
+plugins/modules/ce_bgp_af.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_bgp_neighbor.py validate-modules:doc-choices-do-not-match-spec
+plugins/modules/ce_bgp_neighbor.py validate-modules:doc-default-does-not-match-spec
+plugins/modules/ce_bgp_neighbor.py validate-modules:doc-missing-type
+plugins/modules/ce_bgp_neighbor.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_bgp_neighbor.py validate-modules:undocumented-parameter
+plugins/modules/ce_bgp_neighbor_af.py validate-modules:doc-choices-do-not-match-spec
+plugins/modules/ce_bgp_neighbor_af.py validate-modules:doc-default-does-not-match-spec
+plugins/modules/ce_bgp_neighbor_af.py validate-modules:doc-missing-type
+plugins/modules/ce_bgp_neighbor_af.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_bgp_neighbor_af.py validate-modules:undocumented-parameter
+plugins/modules/ce_command.py validate-modules:doc-choices-do-not-match-spec
+plugins/modules/ce_command.py validate-modules:doc-missing-type
+plugins/modules/ce_command.py validate-modules:parameter-list-no-elements
+plugins/modules/ce_command.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_config.py validate-modules:doc-missing-type
+plugins/modules/ce_config.py validate-modules:parameter-list-no-elements
+plugins/modules/ce_config.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_config.py validate-modules:undocumented-parameter
+plugins/modules/ce_dldp.py validate-modules:nonexistent-parameter-documented
+plugins/modules/ce_dldp.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_dldp.py validate-modules:undocumented-parameter
+plugins/modules/ce_dldp_interface.py validate-modules:doc-missing-type
+plugins/modules/ce_dldp_interface.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_eth_trunk.py validate-modules:doc-missing-type
+plugins/modules/ce_eth_trunk.py validate-modules:parameter-list-no-elements
+plugins/modules/ce_eth_trunk.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_evpn_bd_vni.py validate-modules:doc-missing-type
+plugins/modules/ce_evpn_bd_vni.py validate-modules:doc-required-mismatch
+plugins/modules/ce_evpn_bd_vni.py validate-modules:parameter-list-no-elements
+plugins/modules/ce_evpn_bd_vni.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_evpn_bgp.py validate-modules:doc-missing-type
+plugins/modules/ce_evpn_bgp.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_evpn_bgp_rr.py validate-modules:doc-missing-type
+plugins/modules/ce_evpn_bgp_rr.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_evpn_global.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_facts.py validate-modules:parameter-list-no-elements
+plugins/modules/ce_facts.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_file_copy.py validate-modules:doc-missing-type
+plugins/modules/ce_info_center_debug.py validate-modules:doc-missing-type
+plugins/modules/ce_info_center_debug.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_info_center_global.py validate-modules:doc-default-does-not-match-spec
+plugins/modules/ce_info_center_global.py validate-modules:doc-missing-type
+plugins/modules/ce_info_center_global.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_info_center_log.py validate-modules:doc-missing-type
+plugins/modules/ce_info_center_log.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_info_center_trap.py validate-modules:doc-missing-type
+plugins/modules/ce_info_center_trap.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_interface.py validate-modules:doc-choices-do-not-match-spec
+plugins/modules/ce_interface.py validate-modules:doc-missing-type
+plugins/modules/ce_interface.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_interface_ospf.py validate-modules:doc-missing-type
+plugins/modules/ce_interface_ospf.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_ip_interface.py validate-modules:doc-missing-type
+plugins/modules/ce_ip_interface.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_is_is_view.py validate-modules:doc-required-mismatch
+plugins/modules/ce_link_status.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_mlag_config.py validate-modules:doc-default-does-not-match-spec
+plugins/modules/ce_mlag_config.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_mlag_interface.py validate-modules:doc-default-does-not-match-spec
+plugins/modules/ce_mlag_interface.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_mtu.py validate-modules:doc-missing-type
+plugins/modules/ce_mtu.py validate-modules:doc-required-mismatch
+plugins/modules/ce_mtu.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_netconf.py validate-modules:doc-missing-type
+plugins/modules/ce_netstream_aging.py validate-modules:doc-missing-type
+plugins/modules/ce_netstream_aging.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_netstream_export.py validate-modules:doc-missing-type
+plugins/modules/ce_netstream_export.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_netstream_global.py validate-modules:doc-missing-type
+plugins/modules/ce_netstream_global.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_netstream_template.py validate-modules:doc-missing-type
+plugins/modules/ce_netstream_template.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_ntp.py validate-modules:doc-missing-type
+plugins/modules/ce_ntp.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_ntp_auth.py validate-modules:doc-missing-type
+plugins/modules/ce_ntp_auth.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_ospf.py validate-modules:doc-missing-type
+plugins/modules/ce_ospf.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_ospf_vrf.py validate-modules:doc-missing-type
+plugins/modules/ce_ospf_vrf.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_rollback.py validate-modules:doc-missing-type
+plugins/modules/ce_rollback.py validate-modules:doc-required-mismatch
+plugins/modules/ce_rollback.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_sflow.py validate-modules:doc-missing-type
+plugins/modules/ce_sflow.py validate-modules:parameter-list-no-elements
+plugins/modules/ce_sflow.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_snmp_community.py validate-modules:doc-missing-type
+plugins/modules/ce_snmp_community.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_snmp_contact.py validate-modules:doc-missing-type
+plugins/modules/ce_snmp_contact.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_snmp_location.py validate-modules:doc-missing-type
+plugins/modules/ce_snmp_location.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_snmp_target_host.py validate-modules:doc-choices-do-not-match-spec
+plugins/modules/ce_snmp_target_host.py validate-modules:doc-default-does-not-match-spec
+plugins/modules/ce_snmp_target_host.py validate-modules:doc-missing-type
+plugins/modules/ce_snmp_target_host.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_snmp_target_host.py validate-modules:undocumented-parameter
+plugins/modules/ce_snmp_traps.py validate-modules:doc-choices-do-not-match-spec
+plugins/modules/ce_snmp_traps.py validate-modules:doc-default-does-not-match-spec
+plugins/modules/ce_snmp_traps.py validate-modules:doc-missing-type
+plugins/modules/ce_snmp_traps.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_snmp_traps.py validate-modules:undocumented-parameter
+plugins/modules/ce_snmp_user.py validate-modules:doc-choices-do-not-match-spec
+plugins/modules/ce_snmp_user.py validate-modules:doc-default-does-not-match-spec
+plugins/modules/ce_snmp_user.py validate-modules:doc-missing-type
+plugins/modules/ce_snmp_user.py validate-modules:mutually_exclusive-unknown
+plugins/modules/ce_snmp_user.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_snmp_user.py validate-modules:undocumented-parameter
+plugins/modules/ce_startup.py validate-modules:doc-default-does-not-match-spec
+plugins/modules/ce_startup.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_static_route.py validate-modules:doc-missing-type
+plugins/modules/ce_static_route.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_static_route_bfd.py validate-modules:doc-required-mismatch
+plugins/modules/ce_static_route_bfd.py validate-modules:parameter-list-no-elements
+plugins/modules/ce_stp.py validate-modules:doc-missing-type
+plugins/modules/ce_stp.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_switchport.py validate-modules:doc-missing-type
+plugins/modules/ce_switchport.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_vlan.py validate-modules:doc-missing-type
+plugins/modules/ce_vlan.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_vrf.py validate-modules:doc-missing-type
+plugins/modules/ce_vrf.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_vrf_af.py validate-modules:doc-missing-type
+plugins/modules/ce_vrf_af.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_vrf_interface.py validate-modules:doc-missing-type
+plugins/modules/ce_vrf_interface.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_vrrp.py validate-modules:doc-default-does-not-match-spec
+plugins/modules/ce_vrrp.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_vxlan_arp.py validate-modules:doc-missing-type
+plugins/modules/ce_vxlan_arp.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_vxlan_gateway.py validate-modules:doc-missing-type
+plugins/modules/ce_vxlan_gateway.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_vxlan_global.py validate-modules:doc-missing-type
+plugins/modules/ce_vxlan_global.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_vxlan_tunnel.py validate-modules:doc-missing-type
+plugins/modules/ce_vxlan_tunnel.py validate-modules:parameter-list-no-elements
+plugins/modules/ce_vxlan_tunnel.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ce_vxlan_vap.py validate-modules:doc-missing-type
+plugins/modules/ce_vxlan_vap.py validate-modules:parameter-type-not-in-doc
+plugins/modules/cv_server_provision.py validate-modules:doc-missing-type
+plugins/modules/cv_server_provision.py validate-modules:doc-required-mismatch
+plugins/modules/cv_server_provision.py validate-modules:invalid-ansiblemodule-schema
+plugins/modules/cnos_backup.py validate-modules:doc-choices-do-not-match-spec
+plugins/modules/cnos_backup.py validate-modules:doc-missing-type
+plugins/modules/cnos_backup.py validate-modules:doc-required-mismatch
+plugins/modules/cnos_backup.py validate-modules:nonexistent-parameter-documented
+plugins/modules/cnos_backup.py validate-modules:undocumented-parameter
+plugins/modules/cnos_banner.py validate-modules:doc-missing-type
+plugins/modules/cnos_bgp.py validate-modules:doc-choices-do-not-match-spec
+plugins/modules/cnos_bgp.py validate-modules:doc-missing-type
+plugins/modules/cnos_bgp.py validate-modules:doc-required-mismatch
+plugins/modules/cnos_command.py validate-modules:doc-missing-type
+plugins/modules/cnos_command.py validate-modules:parameter-list-no-elements
+plugins/modules/cnos_command.py validate-modules:parameter-type-not-in-doc
+plugins/modules/cnos_conditional_command.py validate-modules:doc-choices-do-not-match-spec
+plugins/modules/cnos_conditional_command.py validate-modules:doc-missing-type
+plugins/modules/cnos_conditional_command.py validate-modules:doc-required-mismatch
+plugins/modules/cnos_conditional_template.py validate-modules:doc-choices-do-not-match-spec
+plugins/modules/cnos_conditional_template.py validate-modules:doc-missing-type
+plugins/modules/cnos_conditional_template.py validate-modules:doc-required-mismatch
+plugins/modules/cnos_config.py validate-modules:doc-missing-type
+plugins/modules/cnos_config.py validate-modules:parameter-list-no-elements
+plugins/modules/cnos_config.py validate-modules:parameter-type-not-in-doc
+plugins/modules/cnos_factory.py validate-modules:doc-choices-do-not-match-spec
+plugins/modules/cnos_factory.py validate-modules:doc-required-mismatch
+plugins/modules/cnos_facts.py validate-modules:nonexistent-parameter-documented
+plugins/modules/cnos_facts.py validate-modules:parameter-list-no-elements
+plugins/modules/cnos_facts.py validate-modules:parameter-type-not-in-doc
+plugins/modules/cnos_image.py validate-modules:doc-choices-do-not-match-spec
+plugins/modules/cnos_image.py validate-modules:doc-missing-type
+plugins/modules/cnos_image.py validate-modules:doc-required-mismatch
+plugins/modules/cnos_interface.py validate-modules:doc-choices-do-not-match-spec
+plugins/modules/cnos_interface.py validate-modules:doc-elements-mismatch
+plugins/modules/cnos_interface.py validate-modules:doc-missing-type
+plugins/modules/cnos_interface.py validate-modules:doc-required-mismatch
+plugins/modules/cnos_interface.py validate-modules:missing-suboption-docs
+plugins/modules/cnos_interface.py validate-modules:parameter-type-not-in-doc
+plugins/modules/cnos_interface.py validate-modules:undocumented-parameter
+plugins/modules/cnos_l2_interface.py validate-modules:doc-choices-do-not-match-spec
+plugins/modules/cnos_l2_interface.py validate-modules:doc-elements-mismatch
+plugins/modules/cnos_l2_interface.py validate-modules:doc-missing-type
+plugins/modules/cnos_l2_interface.py validate-modules:doc-required-mismatch
+plugins/modules/cnos_l2_interface.py validate-modules:missing-suboption-docs
+plugins/modules/cnos_l2_interface.py validate-modules:parameter-type-not-in-doc
+plugins/modules/cnos_l2_interface.py validate-modules:undocumented-parameter
+plugins/modules/cnos_l3_interface.py validate-modules:doc-choices-do-not-match-spec
+plugins/modules/cnos_l3_interface.py validate-modules:doc-elements-mismatch
+plugins/modules/cnos_l3_interface.py validate-modules:doc-missing-type
+plugins/modules/cnos_l3_interface.py validate-modules:doc-required-mismatch
+plugins/modules/cnos_l3_interface.py validate-modules:missing-suboption-docs
+plugins/modules/cnos_l3_interface.py validate-modules:parameter-type-not-in-doc
+plugins/modules/cnos_l3_interface.py validate-modules:undocumented-parameter
+plugins/modules/cnos_linkagg.py validate-modules:doc-choices-do-not-match-spec
+plugins/modules/cnos_linkagg.py validate-modules:doc-elements-mismatch
+plugins/modules/cnos_linkagg.py validate-modules:doc-missing-type
+plugins/modules/cnos_linkagg.py validate-modules:doc-required-mismatch
+plugins/modules/cnos_linkagg.py validate-modules:missing-suboption-docs
+plugins/modules/cnos_linkagg.py validate-modules:parameter-list-no-elements
+plugins/modules/cnos_linkagg.py validate-modules:parameter-type-not-in-doc
+plugins/modules/cnos_linkagg.py validate-modules:undocumented-parameter
+plugins/modules/cnos_lldp.py validate-modules:doc-missing-type
+plugins/modules/cnos_logging.py validate-modules:doc-choices-do-not-match-spec
+plugins/modules/cnos_logging.py validate-modules:doc-elements-mismatch
+plugins/modules/cnos_logging.py validate-modules:doc-missing-type
+plugins/modules/cnos_logging.py validate-modules:missing-suboption-docs
+plugins/modules/cnos_logging.py validate-modules:parameter-type-not-in-doc
+plugins/modules/cnos_logging.py validate-modules:undocumented-parameter
+plugins/modules/cnos_reload.py validate-modules:doc-choices-do-not-match-spec
+plugins/modules/cnos_reload.py validate-modules:doc-required-mismatch
+plugins/modules/cnos_rollback.py validate-modules:doc-choices-do-not-match-spec
+plugins/modules/cnos_rollback.py validate-modules:doc-missing-type
+plugins/modules/cnos_rollback.py validate-modules:doc-required-mismatch
+plugins/modules/cnos_rollback.py validate-modules:nonexistent-parameter-documented
+plugins/modules/cnos_rollback.py validate-modules:undocumented-parameter
+plugins/modules/cnos_save.py validate-modules:doc-choices-do-not-match-spec
+plugins/modules/cnos_save.py validate-modules:doc-required-mismatch
+plugins/modules/cnos_showrun.py validate-modules:doc-required-mismatch
+plugins/modules/cnos_showrun.py validate-modules:nonexistent-parameter-documented
+plugins/modules/cnos_static_route.py validate-modules:doc-choices-do-not-match-spec
+plugins/modules/cnos_static_route.py validate-modules:doc-elements-mismatch
+plugins/modules/cnos_static_route.py validate-modules:doc-missing-type
+plugins/modules/cnos_static_route.py validate-modules:doc-required-mismatch
+plugins/modules/cnos_static_route.py validate-modules:missing-suboption-docs
+plugins/modules/cnos_static_route.py validate-modules:parameter-type-not-in-doc
+plugins/modules/cnos_static_route.py validate-modules:undocumented-parameter
+plugins/modules/cnos_system.py validate-modules:doc-missing-type
+plugins/modules/cnos_system.py validate-modules:parameter-list-no-elements
+plugins/modules/cnos_system.py validate-modules:parameter-type-not-in-doc
+plugins/modules/cnos_template.py validate-modules:doc-choices-do-not-match-spec
+plugins/modules/cnos_template.py validate-modules:doc-missing-type
+plugins/modules/cnos_template.py validate-modules:doc-required-mismatch
+plugins/modules/cnos_user.py validate-modules:doc-choices-do-not-match-spec
+plugins/modules/cnos_user.py validate-modules:doc-elements-mismatch
+plugins/modules/cnos_user.py validate-modules:doc-missing-type
+plugins/modules/cnos_user.py validate-modules:missing-suboption-docs
+plugins/modules/cnos_user.py validate-modules:parameter-list-no-elements
+plugins/modules/cnos_user.py validate-modules:parameter-type-not-in-doc
+plugins/modules/cnos_user.py validate-modules:undocumented-parameter
+plugins/modules/cnos_vlag.py validate-modules:doc-choices-do-not-match-spec
+plugins/modules/cnos_vlag.py validate-modules:doc-missing-type
+plugins/modules/cnos_vlag.py validate-modules:doc-required-mismatch
+plugins/modules/cnos_vlan.py validate-modules:doc-choices-do-not-match-spec
+plugins/modules/cnos_vlan.py validate-modules:doc-elements-mismatch
+plugins/modules/cnos_vlan.py validate-modules:doc-missing-type
+plugins/modules/cnos_vlan.py validate-modules:doc-required-mismatch
+plugins/modules/cnos_vlan.py validate-modules:missing-suboption-docs
+plugins/modules/cnos_vlan.py validate-modules:parameter-list-no-elements
+plugins/modules/cnos_vlan.py validate-modules:parameter-type-not-in-doc
+plugins/modules/cnos_vlan.py validate-modules:undocumented-parameter
+plugins/modules/cnos_vrf.py validate-modules:doc-choices-do-not-match-spec
+plugins/modules/cnos_vrf.py validate-modules:doc-elements-mismatch
+plugins/modules/cnos_vrf.py validate-modules:doc-missing-type
+plugins/modules/cnos_vrf.py validate-modules:doc-required-mismatch
+plugins/modules/cnos_vrf.py validate-modules:missing-suboption-docs
+plugins/modules/cnos_vrf.py validate-modules:parameter-list-no-elements
+plugins/modules/cnos_vrf.py validate-modules:parameter-type-not-in-doc
+plugins/modules/cnos_vrf.py validate-modules:undocumented-parameter
+plugins/modules/nclu.py validate-modules:parameter-list-no-elements
+plugins/modules/nclu.py validate-modules:parameter-type-not-in-doc
+plugins/modules/edgeos_command.py validate-modules:doc-missing-type
+plugins/modules/edgeos_command.py validate-modules:parameter-list-no-elements
+plugins/modules/edgeos_command.py validate-modules:parameter-type-not-in-doc
+plugins/modules/edgeos_config.py validate-modules:doc-missing-type
+plugins/modules/edgeos_config.py validate-modules:parameter-type-not-in-doc
+plugins/modules/edgeos_facts.py validate-modules:parameter-list-no-elements
+plugins/modules/edgeos_facts.py validate-modules:parameter-type-not-in-doc
+plugins/modules/edgeswitch_facts.py validate-modules:parameter-list-no-elements
+plugins/modules/edgeswitch_facts.py validate-modules:parameter-type-not-in-doc
+plugins/modules/edgeswitch_vlan.py validate-modules:doc-choices-do-not-match-spec
+plugins/modules/edgeswitch_vlan.py validate-modules:doc-elements-mismatch
+plugins/modules/edgeswitch_vlan.py validate-modules:doc-missing-type
+plugins/modules/edgeswitch_vlan.py validate-modules:doc-required-mismatch
+plugins/modules/edgeswitch_vlan.py validate-modules:missing-suboption-docs
+plugins/modules/edgeswitch_vlan.py validate-modules:parameter-list-no-elements
+plugins/modules/edgeswitch_vlan.py validate-modules:parameter-type-not-in-doc
+plugins/modules/edgeswitch_vlan.py validate-modules:undocumented-parameter
+plugins/modules/enos_command.py validate-modules:doc-missing-type
+plugins/modules/enos_command.py validate-modules:nonexistent-parameter-documented
+plugins/modules/enos_command.py validate-modules:parameter-list-no-elements
+plugins/modules/enos_command.py validate-modules:parameter-type-not-in-doc
+plugins/modules/enos_config.py validate-modules:doc-missing-type
+plugins/modules/enos_config.py validate-modules:nonexistent-parameter-documented
+plugins/modules/enos_config.py validate-modules:parameter-list-no-elements
+plugins/modules/enos_config.py validate-modules:parameter-type-not-in-doc
+plugins/modules/enos_facts.py validate-modules:nonexistent-parameter-documented
+plugins/modules/enos_facts.py validate-modules:parameter-list-no-elements
+plugins/modules/enos_facts.py validate-modules:parameter-type-not-in-doc
+plugins/modules/eric_eccli_command.py validate-modules:parameter-list-no-elements
+plugins/modules/exos_command.py validate-modules:doc-missing-type
+plugins/modules/exos_command.py validate-modules:parameter-list-no-elements
+plugins/modules/exos_command.py validate-modules:parameter-type-not-in-doc
+plugins/modules/exos_config.py validate-modules:doc-missing-type
+plugins/modules/exos_config.py validate-modules:parameter-list-no-elements
+plugins/modules/exos_config.py validate-modules:parameter-type-not-in-doc
+plugins/modules/exos_facts.py validate-modules:parameter-list-no-elements
+plugins/modules/exos_l2_interfaces.py validate-modules:parameter-list-no-elements
+plugins/modules/icx_command.py validate-modules:parameter-list-no-elements
+plugins/modules/icx_config.py validate-modules:parameter-list-no-elements
+plugins/modules/icx_facts.py validate-modules:parameter-list-no-elements
+plugins/modules/icx_interface.py validate-modules:doc-required-mismatch
+plugins/modules/icx_l3_interface.py validate-modules:doc-required-mismatch
+plugins/modules/icx_linkagg.py validate-modules:doc-elements-mismatch
+plugins/modules/icx_linkagg.py validate-modules:doc-required-mismatch
+plugins/modules/icx_linkagg.py validate-modules:parameter-list-no-elements
+plugins/modules/icx_lldp.py validate-modules:doc-elements-mismatch
+plugins/modules/icx_lldp.py validate-modules:parameter-list-no-elements
+plugins/modules/icx_logging.py validate-modules:doc-elements-mismatch
+plugins/modules/icx_logging.py validate-modules:parameter-list-no-elements
+plugins/modules/icx_static_route.py validate-modules:doc-required-mismatch
+plugins/modules/icx_system.py validate-modules:doc-elements-mismatch
+plugins/modules/icx_system.py validate-modules:parameter-list-no-elements
+plugins/modules/icx_user.py validate-modules:doc-required-mismatch
+plugins/modules/icx_vlan.py validate-modules:doc-elements-mismatch
+plugins/modules/icx_vlan.py validate-modules:doc-required-mismatch
+plugins/modules/icx_vlan.py validate-modules:parameter-list-no-elements
+plugins/modules/dladm_etherstub.py validate-modules:doc-missing-type
+plugins/modules/dladm_iptun.py validate-modules:doc-missing-type
+plugins/modules/dladm_iptun.py validate-modules:parameter-type-not-in-doc
+plugins/modules/dladm_linkprop.py validate-modules:doc-missing-type
+plugins/modules/dladm_linkprop.py validate-modules:no-default-for-required-parameter
+plugins/modules/dladm_linkprop.py validate-modules:parameter-type-not-in-doc
+plugins/modules/dladm_vlan.py validate-modules:doc-default-does-not-match-spec
+plugins/modules/dladm_vlan.py validate-modules:doc-missing-type
+plugins/modules/dladm_vlan.py validate-modules:doc-required-mismatch
+plugins/modules/dladm_vlan.py validate-modules:parameter-type-not-in-doc
+plugins/modules/dladm_vnic.py validate-modules:doc-default-does-not-match-spec
+plugins/modules/flowadm.py validate-modules:doc-choices-do-not-match-spec
+plugins/modules/flowadm.py validate-modules:doc-missing-type
+plugins/modules/ipadm_addr.py validate-modules:doc-missing-type
+plugins/modules/ipadm_addr.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ipadm_addrprop.py validate-modules:doc-missing-type
+plugins/modules/ipadm_addrprop.py validate-modules:no-default-for-required-parameter
+plugins/modules/ipadm_if.py validate-modules:doc-missing-type
+plugins/modules/ipadm_ifprop.py validate-modules:doc-choices-do-not-match-spec
+plugins/modules/ipadm_ifprop.py validate-modules:doc-missing-type
+plugins/modules/ipadm_ifprop.py validate-modules:no-default-for-required-parameter
+plugins/modules/ipadm_prop.py validate-modules:doc-choices-do-not-match-spec
+plugins/modules/ipadm_prop.py validate-modules:doc-missing-type
+plugins/modules/ig_config.py validate-modules:doc-missing-type
+plugins/modules/ig_config.py validate-modules:doc-required-mismatch
+plugins/modules/ig_config.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ig_config.py validate-modules:return-syntax-error
+plugins/modules/ig_unit_information.py validate-modules:doc-required-mismatch
+plugins/modules/ig_unit_information.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ironware_command.py validate-modules:doc-missing-type
+plugins/modules/ironware_command.py validate-modules:nonexistent-parameter-documented
+plugins/modules/ironware_command.py validate-modules:parameter-list-no-elements
+plugins/modules/ironware_command.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ironware_config.py validate-modules:doc-missing-type
+plugins/modules/ironware_config.py validate-modules:nonexistent-parameter-documented
+plugins/modules/ironware_config.py validate-modules:parameter-list-no-elements
+plugins/modules/ironware_config.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ironware_facts.py validate-modules:nonexistent-parameter-documented
+plugins/modules/ironware_facts.py validate-modules:parameter-list-no-elements
+plugins/modules/ironware_facts.py validate-modules:parameter-type-not-in-doc
+plugins/modules/iap_start_workflow.py validate-modules:doc-required-mismatch
+plugins/modules/iap_token.py validate-modules:parameter-type-not-in-doc
+plugins/modules/netact_cm_command.py validate-modules:doc-choices-do-not-match-spec
+plugins/modules/netact_cm_command.py validate-modules:parameter-type-not-in-doc
+plugins/modules/netscaler_cs_action.py pylint:condition-evals-to-constant # Should be fixed
+plugins/modules/netscaler_cs_action.py validate-modules:nonexistent-parameter-documented
+plugins/modules/netscaler_cs_action.py validate-modules:parameter-type-not-in-doc
+plugins/modules/netscaler_cs_policy.py validate-modules:parameter-type-not-in-doc
+plugins/modules/netscaler_cs_vserver.py validate-modules:doc-choices-do-not-match-spec
+plugins/modules/netscaler_cs_vserver.py validate-modules:nonexistent-parameter-documented
+plugins/modules/netscaler_cs_vserver.py validate-modules:parameter-list-no-elements
+plugins/modules/netscaler_cs_vserver.py validate-modules:parameter-type-not-in-doc
+plugins/modules/netscaler_cs_vserver.py validate-modules:undocumented-parameter
+plugins/modules/netscaler_gslb_service.py validate-modules:parameter-list-no-elements
+plugins/modules/netscaler_gslb_service.py validate-modules:parameter-type-not-in-doc
+plugins/modules/netscaler_gslb_site.py validate-modules:parameter-type-not-in-doc
+plugins/modules/netscaler_gslb_vserver.py validate-modules:parameter-list-no-elements
+plugins/modules/netscaler_gslb_vserver.py validate-modules:parameter-type-not-in-doc
+plugins/modules/netscaler_gslb_vserver.py validate-modules:undocumented-parameter
+plugins/modules/netscaler_lb_monitor.py validate-modules:doc-choices-do-not-match-spec
+plugins/modules/netscaler_lb_monitor.py validate-modules:nonexistent-parameter-documented
+plugins/modules/netscaler_lb_monitor.py validate-modules:parameter-list-no-elements
+plugins/modules/netscaler_lb_monitor.py validate-modules:parameter-type-not-in-doc
+plugins/modules/netscaler_lb_vserver.py validate-modules:nonexistent-parameter-documented
+plugins/modules/netscaler_lb_vserver.py validate-modules:parameter-list-no-elements
+plugins/modules/netscaler_lb_vserver.py validate-modules:parameter-type-not-in-doc
+plugins/modules/netscaler_nitro_request.py pylint:ansible-bad-function
+plugins/modules/netscaler_nitro_request.py validate-modules:doc-missing-type
+plugins/modules/netscaler_nitro_request.py validate-modules:doc-required-mismatch
+plugins/modules/netscaler_nitro_request.py validate-modules:parameter-list-no-elements
+plugins/modules/netscaler_nitro_request.py validate-modules:parameter-type-not-in-doc
+plugins/modules/netscaler_save_config.py validate-modules:doc-missing-type
+plugins/modules/netscaler_save_config.py validate-modules:parameter-type-not-in-doc
+plugins/modules/netscaler_server.py validate-modules:doc-default-does-not-match-spec
+plugins/modules/netscaler_server.py validate-modules:parameter-type-not-in-doc
+plugins/modules/netscaler_service.py validate-modules:nonexistent-parameter-documented
+plugins/modules/netscaler_service.py validate-modules:parameter-list-no-elements
+plugins/modules/netscaler_service.py validate-modules:parameter-type-not-in-doc
+plugins/modules/netscaler_servicegroup.py validate-modules:parameter-list-no-elements
+plugins/modules/netscaler_servicegroup.py validate-modules:parameter-type-not-in-doc
+plugins/modules/netscaler_ssl_certkey.py validate-modules:parameter-type-not-in-doc
+plugins/modules/pn_access_list.py validate-modules:invalid-ansiblemodule-schema
+plugins/modules/pn_access_list.py validate-modules:parameter-type-not-in-doc
+plugins/modules/pn_access_list_ip.py validate-modules:invalid-ansiblemodule-schema
+plugins/modules/pn_access_list_ip.py validate-modules:parameter-type-not-in-doc
+plugins/modules/pn_admin_service.py validate-modules:invalid-ansiblemodule-schema
+plugins/modules/pn_admin_session_timeout.py validate-modules:invalid-ansiblemodule-schema
+plugins/modules/pn_admin_syslog.py validate-modules:invalid-ansiblemodule-schema
+plugins/modules/pn_connection_stats_settings.py validate-modules:invalid-ansiblemodule-schema
+plugins/modules/pn_cpu_class.py validate-modules:invalid-ansiblemodule-schema
+plugins/modules/pn_cpu_class.py validate-modules:parameter-type-not-in-doc
+plugins/modules/pn_cpu_mgmt_class.py validate-modules:invalid-ansiblemodule-schema
+plugins/modules/pn_dhcp_filter.py validate-modules:invalid-ansiblemodule-schema
+plugins/modules/pn_dscp_map.py validate-modules:invalid-ansiblemodule-schema
+plugins/modules/pn_dscp_map.py validate-modules:parameter-type-not-in-doc
+plugins/modules/pn_dscp_map_pri_map.py validate-modules:invalid-ansiblemodule-schema
+plugins/modules/pn_fabric_local.py validate-modules:invalid-ansiblemodule-schema
+plugins/modules/pn_fabric_local.py validate-modules:parameter-type-not-in-doc
+plugins/modules/pn_igmp_snooping.py validate-modules:invalid-ansiblemodule-schema
+plugins/modules/pn_igmp_snooping.py validate-modules:parameter-type-not-in-doc
+plugins/modules/pn_ipv6security_raguard.py validate-modules:invalid-ansiblemodule-schema
+plugins/modules/pn_ipv6security_raguard_port.py validate-modules:invalid-ansiblemodule-schema
+plugins/modules/pn_ipv6security_raguard_vlan.py validate-modules:invalid-ansiblemodule-schema
+plugins/modules/pn_log_audit_exception.py validate-modules:doc-required-mismatch
+plugins/modules/pn_log_audit_exception.py validate-modules:invalid-ansiblemodule-schema
+plugins/modules/pn_port_config.py validate-modules:parameter-type-not-in-doc
+plugins/modules/pn_port_config.py validate-modules:required_one_of-unknown
+plugins/modules/pn_port_cos_bw.py validate-modules:invalid-ansiblemodule-schema
+plugins/modules/pn_port_cos_rate_setting.py validate-modules:invalid-ansiblemodule-schema
+plugins/modules/pn_prefix_list.py validate-modules:invalid-ansiblemodule-schema
+plugins/modules/pn_prefix_list_network.py validate-modules:invalid-ansiblemodule-schema
+plugins/modules/pn_role.py validate-modules:doc-required-mismatch
+plugins/modules/pn_role.py validate-modules:invalid-ansiblemodule-schema
+plugins/modules/pn_snmp_community.py validate-modules:invalid-ansiblemodule-schema
+plugins/modules/pn_snmp_community.py validate-modules:parameter-type-not-in-doc
+plugins/modules/pn_snmp_trap_sink.py validate-modules:invalid-ansiblemodule-schema
+plugins/modules/pn_snmp_vacm.py validate-modules:invalid-ansiblemodule-schema
+plugins/modules/pn_stp.py validate-modules:invalid-ansiblemodule-schema
+plugins/modules/pn_stp_port.py validate-modules:invalid-ansiblemodule-schema
+plugins/modules/pn_switch_setup.py validate-modules:parameter-type-not-in-doc
+plugins/modules/pn_user.py validate-modules:invalid-ansiblemodule-schema
+plugins/modules/pn_vflow_table_profile.py validate-modules:invalid-ansiblemodule-schema
+plugins/modules/pn_vrouter_bgp.py validate-modules:invalid-ansiblemodule-schema
+plugins/modules/pn_vrouter_bgp.py validate-modules:parameter-type-not-in-doc
+plugins/modules/pn_vrouter_bgp_network.py validate-modules:invalid-ansiblemodule-schema
+plugins/modules/pn_vrouter_interface_ip.py validate-modules:invalid-ansiblemodule-schema
+plugins/modules/pn_vrouter_loopback_interface.py validate-modules:invalid-ansiblemodule-schema
+plugins/modules/pn_vrouter_ospf.py validate-modules:invalid-ansiblemodule-schema
+plugins/modules/pn_vrouter_ospf6.py validate-modules:invalid-ansiblemodule-schema
+plugins/modules/pn_vrouter_packet_relay.py validate-modules:invalid-ansiblemodule-schema
+plugins/modules/pn_vrouter_pim_config.py validate-modules:doc-required-mismatch
+plugins/modules/pn_vrouter_pim_config.py validate-modules:invalid-ansiblemodule-schema
+plugins/modules/pn_vtep.py validate-modules:invalid-ansiblemodule-schema
+plugins/modules/nos_command.py validate-modules:doc-missing-type
+plugins/modules/nos_command.py validate-modules:parameter-list-no-elements
+plugins/modules/nos_command.py validate-modules:parameter-type-not-in-doc
+plugins/modules/nos_config.py validate-modules:doc-missing-type
+plugins/modules/nos_config.py validate-modules:parameter-list-no-elements
+plugins/modules/nos_config.py validate-modules:parameter-type-not-in-doc
+plugins/modules/nos_facts.py validate-modules:parameter-list-no-elements
+plugins/modules/nos_facts.py validate-modules:parameter-type-not-in-doc
+plugins/modules/nuage_vspk.py validate-modules:doc-required-mismatch
+plugins/modules/nuage_vspk.py validate-modules:missing-suboption-docs
+plugins/modules/nuage_vspk.py validate-modules:parameter-list-no-elements
+plugins/modules/nuage_vspk.py validate-modules:parameter-type-not-in-doc
+plugins/modules/nuage_vspk.py validate-modules:undocumented-parameter
+plugins/modules/opx_cps.py validate-modules:doc-required-mismatch
+plugins/modules/opx_cps.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ordnance_config.py validate-modules:doc-default-does-not-match-spec
+plugins/modules/ordnance_config.py validate-modules:doc-missing-type
+plugins/modules/ordnance_config.py validate-modules:doc-required-mismatch
+plugins/modules/ordnance_config.py validate-modules:invalid-ansiblemodule-schema
+plugins/modules/ordnance_config.py validate-modules:parameter-list-no-elements
+plugins/modules/ordnance_config.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ordnance_config.py validate-modules:undocumented-parameter
+plugins/modules/ordnance_facts.py validate-modules:doc-default-does-not-match-spec
+plugins/modules/ordnance_facts.py validate-modules:doc-missing-type
+plugins/modules/ordnance_facts.py validate-modules:doc-required-mismatch
+plugins/modules/ordnance_facts.py validate-modules:invalid-ansiblemodule-schema
+plugins/modules/ordnance_facts.py validate-modules:parameter-list-no-elements
+plugins/modules/ordnance_facts.py validate-modules:parameter-type-not-in-doc
+plugins/modules/ordnance_facts.py validate-modules:undocumented-parameter
+plugins/modules/vdirect_commit.py validate-modules:doc-missing-type
+plugins/modules/vdirect_commit.py validate-modules:parameter-list-no-elements
+plugins/modules/vdirect_commit.py validate-modules:parameter-type-not-in-doc
+plugins/modules/vdirect_file.py validate-modules:doc-missing-type
+plugins/modules/vdirect_file.py validate-modules:parameter-type-not-in-doc
+plugins/modules/vdirect_runnable.py validate-modules:doc-missing-type
+plugins/modules/vdirect_runnable.py validate-modules:parameter-type-not-in-doc
+plugins/modules/slxos_command.py validate-modules:doc-missing-type
+plugins/modules/slxos_command.py validate-modules:parameter-list-no-elements
+plugins/modules/slxos_command.py validate-modules:parameter-type-not-in-doc
+plugins/modules/slxos_config.py validate-modules:doc-missing-type
+plugins/modules/slxos_config.py validate-modules:parameter-list-no-elements
+plugins/modules/slxos_config.py validate-modules:parameter-type-not-in-doc
+plugins/modules/slxos_facts.py validate-modules:parameter-list-no-elements
+plugins/modules/slxos_facts.py validate-modules:parameter-type-not-in-doc
+plugins/modules/slxos_interface.py validate-modules:doc-choices-do-not-match-spec
+plugins/modules/slxos_interface.py validate-modules:doc-elements-mismatch
+plugins/modules/slxos_interface.py validate-modules:doc-missing-type
+plugins/modules/slxos_interface.py validate-modules:doc-required-mismatch
+plugins/modules/slxos_interface.py validate-modules:missing-suboption-docs
+plugins/modules/slxos_interface.py validate-modules:parameter-type-not-in-doc
+plugins/modules/slxos_interface.py validate-modules:undocumented-parameter
+plugins/modules/slxos_l2_interface.py validate-modules:doc-choices-do-not-match-spec
+plugins/modules/slxos_l2_interface.py validate-modules:doc-elements-mismatch
+plugins/modules/slxos_l2_interface.py validate-modules:doc-missing-type
+plugins/modules/slxos_l2_interface.py validate-modules:doc-required-mismatch
+plugins/modules/slxos_l2_interface.py validate-modules:missing-suboption-docs
+plugins/modules/slxos_l2_interface.py validate-modules:parameter-type-not-in-doc
+plugins/modules/slxos_l2_interface.py validate-modules:undocumented-parameter
+plugins/modules/slxos_l3_interface.py validate-modules:doc-choices-do-not-match-spec
+plugins/modules/slxos_l3_interface.py validate-modules:doc-elements-mismatch
+plugins/modules/slxos_l3_interface.py validate-modules:doc-missing-type
+plugins/modules/slxos_l3_interface.py validate-modules:doc-required-mismatch
+plugins/modules/slxos_l3_interface.py validate-modules:missing-suboption-docs
+plugins/modules/slxos_l3_interface.py validate-modules:parameter-type-not-in-doc
+plugins/modules/slxos_l3_interface.py validate-modules:undocumented-parameter
+plugins/modules/slxos_linkagg.py validate-modules:doc-choices-do-not-match-spec
+plugins/modules/slxos_linkagg.py validate-modules:doc-elements-mismatch
+plugins/modules/slxos_linkagg.py validate-modules:doc-missing-type
+plugins/modules/slxos_linkagg.py validate-modules:doc-required-mismatch
+plugins/modules/slxos_linkagg.py validate-modules:missing-suboption-docs
+plugins/modules/slxos_linkagg.py validate-modules:parameter-list-no-elements
+plugins/modules/slxos_linkagg.py validate-modules:parameter-type-not-in-doc
+plugins/modules/slxos_linkagg.py validate-modules:undocumented-parameter
+plugins/modules/slxos_lldp.py validate-modules:doc-missing-type
+plugins/modules/slxos_vlan.py validate-modules:doc-choices-do-not-match-spec
+plugins/modules/slxos_vlan.py validate-modules:doc-elements-mismatch
+plugins/modules/slxos_vlan.py validate-modules:doc-missing-type
+plugins/modules/slxos_vlan.py validate-modules:doc-required-mismatch
+plugins/modules/slxos_vlan.py validate-modules:missing-suboption-docs
+plugins/modules/slxos_vlan.py validate-modules:parameter-list-no-elements
+plugins/modules/slxos_vlan.py validate-modules:parameter-type-not-in-doc
+plugins/modules/slxos_vlan.py validate-modules:undocumented-parameter
+plugins/modules/sros_command.py validate-modules:collection-deprecated-version
+plugins/modules/sros_command.py validate-modules:doc-missing-type
+plugins/modules/sros_command.py validate-modules:parameter-list-no-elements
+plugins/modules/sros_command.py validate-modules:parameter-type-not-in-doc
+plugins/modules/sros_config.py validate-modules:collection-deprecated-version
+plugins/modules/sros_config.py validate-modules:doc-missing-type
+plugins/modules/sros_config.py validate-modules:nonexistent-parameter-documented
+plugins/modules/sros_config.py validate-modules:parameter-list-no-elements
+plugins/modules/sros_config.py validate-modules:parameter-type-not-in-doc
+plugins/modules/sros_rollback.py validate-modules:collection-deprecated-version
+plugins/modules/sros_rollback.py validate-modules:doc-missing-type
+plugins/modules/sros_rollback.py validate-modules:parameter-type-not-in-doc
+plugins/modules/voss_command.py validate-modules:doc-missing-type
+plugins/modules/voss_command.py validate-modules:parameter-list-no-elements
+plugins/modules/voss_command.py validate-modules:parameter-type-not-in-doc
+plugins/modules/voss_config.py validate-modules:doc-missing-type
+plugins/modules/voss_config.py validate-modules:parameter-list-no-elements
+plugins/modules/voss_config.py validate-modules:parameter-type-not-in-doc
+plugins/modules/voss_facts.py validate-modules:parameter-list-no-elements
+plugins/modules/voss_facts.py validate-modules:parameter-type-not-in-doc
+tests/unit/plugins/modules/test_edgeswitch_facts.py pylint:use-maxsplit-arg
+tests/unit/plugins/modules/test_edgeswitch_vlan.py pylint:use-maxsplit-arg
+tests/unit/plugins/modules/test_icx_ping.py pylint:use-maxsplit-arg
+tests/unit/plugins/modules/test_ironware_config.py pylint:arguments-renamed
+tests/unit/plugins/modules/test_ironware_facts.py pylint:use-maxsplit-arg
+tests/unit/plugins/modules/test_netscaler_cs_action.py pylint:invalid-class-object
+tests/unit/plugins/modules/test_netscaler_cs_vserver.py pylint:invalid-class-object
+tests/unit/plugins/modules/test_netscaler_lb_vserver.py pylint:invalid-class-object
+tests/unit/plugins/modules/test_netscaler_server.py pylint:invalid-class-object
+tests/unit/plugins/modules/test_netscaler_service.py pylint:invalid-class-object
+tests/unit/plugins/modules/test_netscaler_servicegroup.py pylint:invalid-class-object
+tests/unit/plugins/modules/test_netscaler_ssl_certkey.py pylint:invalid-class-object
+tests/unit/plugins/modules/test_nos_facts.py pylint:use-maxsplit-arg
+tests/unit/plugins/modules/test_slxos_facts.py pylint:use-maxsplit-arg
+tests/unit/plugins/modules/test_voss_facts.py pylint:use-maxsplit-arg
+plugins/action/aireos.py pylint:unused-import
+plugins/action/aruba.py pylint:unused-import
+plugins/action/ce.py pylint:unused-import
+plugins/action/cnos.py pylint:unused-import
+plugins/action/enos.py pylint:unused-import
+plugins/action/ironware.py pylint:unused-import
+plugins/action/sros.py pylint:unused-import
+plugins/cliconf/aireos.py pylint:unused-import
+plugins/cliconf/apconos.py pylint:unused-import
+plugins/cliconf/aruba.py pylint:unused-import
+plugins/cliconf/cnos.py pylint:unused-import
+plugins/cliconf/edgeswitch.py pylint:unused-import
+plugins/cliconf/eric_eccli.py pylint:unused-import
+plugins/cliconf/exos.py pylint:unused-import
+plugins/cliconf/icx.py pylint:unused-import
+plugins/cliconf/ironware.py pylint:unused-import
+plugins/cliconf/slxos.py pylint:unused-import
+plugins/cliconf/voss.py pylint:unused-import
+plugins/cliconf/weos4.py pylint:unused-import
+plugins/lookup/avi.py pylint:unused-import
+plugins/module_utils/network/aos/aos.py pylint:unused-import
+plugins/module_utils/network/avi/ansible_utils.py pylint:unused-import
+plugins/module_utils/network/avi/ansible_utils.py pylint:unidiomatic-typecheck
+plugins/module_utils/network/avi/avi.py pylint:unused-import
+plugins/module_utils/network/avi/avi_api.py pylint:unidiomatic-typecheck
+plugins/module_utils/network/cloudengine/ce.py pylint:unused-import
+plugins/module_utils/network/cnos/cnos.py pylint:unused-import
+plugins/module_utils/network/eric_eccli/eric_eccli.py pylint:unused-import
+plugins/module_utils/network/exos/config/lldp_interfaces/lldp_interfaces.py pylint:unused-import
+plugins/module_utils/network/exos/exos.py pylint:unused-import
+plugins/module_utils/network/exos/facts/facts.py pylint:unused-import
+plugins/module_utils/network/exos/facts/l2_interfaces/l2_interfaces.py pylint:unused-import
+plugins/module_utils/network/exos/facts/legacy/base.py pylint:unused-import
+plugins/module_utils/network/exos/facts/lldp_global/lldp_global.py pylint:unused-import
+plugins/module_utils/network/exos/facts/lldp_interfaces/lldp_interfaces.py pylint:unused-import
+plugins/module_utils/network/exos/facts/vlans/vlans.py pylint:unused-import
+plugins/module_utils/network/ftd/common.py pylint:unidiomatic-typecheck
+plugins/module_utils/network/ftd/configuration.py pylint:use-yield-from
+plugins/module_utils/network/icx/icx.py pylint:unused-import
+plugins/module_utils/network/ironware/ironware.py pylint:unused-import
+plugins/module_utils/network/netscaler/netscaler.py pylint:unused-import
+plugins/module_utils/network/slxos/slxos.py pylint:unused-import
+plugins/module_utils/network/sros/sros.py pylint:unused-import
+plugins/module_utils/version.py pylint:unused-import
+plugins/modules/avi_api_version.py pylint:unused-import
+plugins/modules/avi_gslbservice_patch_member.py pylint:unused-import
+plugins/modules/avi_user.py pylint:unused-import
+plugins/modules/avi_useraccount.py pylint:unused-import
+plugins/modules/ce_acl_interface.py pylint:unused-import
+plugins/modules/ce_bfd_global.py pylint:unused-import
+plugins/modules/ce_bfd_session.py pylint:unused-import
+plugins/modules/ce_bfd_view.py pylint:unused-import
+plugins/modules/ce_config.py pylint:unused-import
+plugins/modules/ce_file_copy.py pylint:unused-import
+plugins/modules/ce_interface.py pylint:unused-import
+plugins/modules/ce_lldp.py pylint:unused-import
+plugins/modules/ce_lldp_interface.py pylint:unused-import
+plugins/modules/ce_mdn_interface.py pylint:unused-import
+plugins/modules/ce_rollback.py pylint:unused-import
+plugins/modules/ce_sflow.py pylint:unused-import
+plugins/modules/ce_switchport.py pylint:unused-import
+plugins/modules/cnos_backup.py pylint:unused-import
+plugins/modules/cnos_banner.py pylint:unused-import
+plugins/modules/cnos_bgp.py pylint:unused-import
+plugins/modules/cnos_command.py pylint:unused-import
+plugins/modules/cnos_conditional_command.py pylint:unused-import
+plugins/modules/cnos_conditional_template.py pylint:unused-import
+plugins/modules/cnos_factory.py pylint:unused-import
+plugins/modules/cnos_image.py pylint:unused-import
+plugins/modules/cnos_interface.py pylint:unused-import
+plugins/modules/cnos_l2_interface.py pylint:unused-import
+plugins/modules/cnos_lldp.py pylint:unused-import
+plugins/modules/cnos_logging.py pylint:unused-import
+plugins/modules/cnos_reload.py pylint:unused-import
+plugins/modules/cnos_rollback.py pylint:unused-import
+plugins/modules/cnos_save.py pylint:unused-import
+plugins/modules/cnos_showrun.py pylint:unused-import
+plugins/modules/cnos_static_route.py pylint:unused-import
+plugins/modules/cnos_system.py pylint:unused-import
+plugins/modules/cnos_template.py pylint:unused-import
+plugins/modules/cnos_user.py pylint:unused-import
+plugins/modules/cnos_vlag.py pylint:unused-import
+plugins/modules/cnos_vlan.py pylint:unused-import
+plugins/modules/cnos_vrf.py pylint:unused-import
+plugins/modules/enos_command.py pylint:unused-import
+plugins/modules/eric_eccli_command.py pylint:unused-import
+plugins/modules/icx_banner.py pylint:unused-import
+plugins/modules/icx_command.py pylint:unused-import
+plugins/modules/icx_config.py pylint:unused-import
+plugins/modules/icx_facts.py pylint:unused-import
+plugins/modules/icx_interface.py pylint:unused-import
+plugins/modules/icx_linkagg.py pylint:unused-import
+plugins/modules/icx_logging.py pylint:unused-import
+plugins/modules/icx_ping.py pylint:unused-import
+plugins/modules/icx_static_route.py pylint:unused-import
+plugins/modules/icx_system.py pylint:unused-import
+plugins/modules/icx_user.py pylint:unused-import
+plugins/modules/icx_vlan.py pylint:unused-import
+plugins/modules/ordnance_config.py pylint:unused-import
+plugins/modules/slxos_l3_interface.py pylint:unused-import
+plugins/modules/sros_rollback.py pylint:unused-import
+plugins/netconf/ce.py pylint:unused-import
+plugins/terminal/aireos.py pylint:unused-import
+plugins/terminal/apconos.py pylint:unused-import
+plugins/terminal/aruba.py pylint:unused-import
+plugins/terminal/eric_eccli.py pylint:unused-import
+plugins/terminal/netvisor.py pylint:unused-import
+plugins/terminal/weos4.py pylint:unused-import
+tests/sanity/extra/botmeta.py pylint:unused-import
+tests/unit/compat/builtins.py pylint:unused-import
+tests/unit/compat/mock.py pylint:use-yield-from
+tests/unit/mock/loader.py pylint:unidiomatic-typecheck
+tests/unit/plugins/httpapi/test_ftd.py pylint:unidiomatic-typecheck
+tests/unit/plugins/modules/test_apconos_command.py pylint:unused-import
+tests/unit/plugins/modules/test_icx_logging.py pylint:unused-import
+tests/unit/plugins/modules/test_icx_system.py pylint:unused-import
+tests/unit/plugins/modules/test_pn_log_audit_exception.py pylint:unused-import
+tests/unit/plugins/modules/test_pn_vtep.py pylint:unused-import
diff --git a/ansible_collections/community/network/tests/utils/shippable/shippable.sh b/ansible_collections/community/network/tests/utils/shippable/shippable.sh
index ad0bf940d..afe46650a 100755
--- a/ansible_collections/community/network/tests/utils/shippable/shippable.sh
+++ b/ansible_collections/community/network/tests/utils/shippable/shippable.sh
@@ -62,6 +62,7 @@ else
retry pip install "https://github.com/ansible/ansible/archive/stable-${ansible_version}.tar.gz" --disable-pip-version-check
fi
+# shellcheck disable=SC2153
if [ "${SHIPPABLE_BUILD_ID:-}" ]; then
export ANSIBLE_COLLECTIONS_PATHS="${HOME}/.ansible"
SHIPPABLE_RESULT_DIR="$(pwd)/shippable"
diff --git a/ansible_collections/community/routeros/.github/workflows/ansible-test.yml b/ansible_collections/community/routeros/.github/workflows/ansible-test.yml
index f743ae51a..fa6241963 100644
--- a/ansible_collections/community/routeros/.github/workflows/ansible-test.yml
+++ b/ansible_collections/community/routeros/.github/workflows/ansible-test.yml
@@ -49,7 +49,9 @@ jobs:
- name: Perform sanity testing
uses: felixfontein/ansible-test-gh-action@main
with:
+ ansible-core-github-repository-slug: ${{ contains(fromJson('["stable-2.9", "stable-2.10", "stable-2.11"]'), matrix.ansible) && 'ansible-community/eol-ansible' || 'ansible/ansible' }}
ansible-core-version: ${{ matrix.ansible }}
+ codecov-token: ${{ secrets.CODECOV_TOKEN }}
testing-type: sanity
# NOTE: we're installing with git to work around Galaxy being a huge PITA (https://github.com/ansible/galaxy/issues/2429)
pre-test-cmd: |-
@@ -89,7 +91,9 @@ jobs:
Ansible version ${{ matrix.ansible }}
uses: felixfontein/ansible-test-gh-action@main
with:
+ ansible-core-github-repository-slug: ${{ contains(fromJson('["stable-2.9", "stable-2.10", "stable-2.11"]'), matrix.ansible) && 'ansible-community/eol-ansible' || 'ansible/ansible' }}
ansible-core-version: ${{ matrix.ansible }}
+ codecov-token: ${{ secrets.CODECOV_TOKEN }}
testing-type: units
# NOTE: we're installing with git to work around Galaxy being a huge PITA (https://github.com/ansible/galaxy/issues/2429)
pre-test-cmd: |-
@@ -156,7 +160,9 @@ jobs:
under Python ${{ matrix.python }}
uses: felixfontein/ansible-test-gh-action@main
with:
+ ansible-core-github-repository-slug: ${{ contains(fromJson('["stable-2.9", "stable-2.10", "stable-2.11"]'), matrix.ansible) && 'ansible-community/eol-ansible' || 'ansible/ansible' }}
ansible-core-version: ${{ matrix.ansible }}
+ codecov-token: ${{ secrets.CODECOV_TOKEN }}
integration-continue-on-error: 'false'
integration-diff: 'false'
integration-retry-on-error: 'true'
diff --git a/ansible_collections/community/routeros/.github/workflows/ee.yml b/ansible_collections/community/routeros/.github/workflows/ee.yml
index 523efaf58..67735c394 100644
--- a/ansible_collections/community/routeros/.github/workflows/ee.yml
+++ b/ansible_collections/community/routeros/.github/workflows/ee.yml
@@ -70,14 +70,6 @@ jobs:
package_system: python39 python39-pip python39-wheel python39-cryptography
base_image: docker.io/redhat/ubi8:latest
pre_base: '"#"'
- - name: ansible-core 2.12 @ CentOS Stream 8
- ansible_core: https://github.com/ansible/ansible/archive/stable-2.12.tar.gz
- ansible_runner: ansible-runner
- other_deps: |2
- python_interpreter:
- package_system: python39 python39-pip python39-wheel python39-cryptography
- base_image: quay.io/centos/centos:stream8
- pre_base: '"#"'
runs-on: ubuntu-latest
steps:
- name: Check out code
diff --git a/ansible_collections/community/routeros/CHANGELOG.md b/ansible_collections/community/routeros/CHANGELOG.md
index adb4f7420..a17b8fcb8 100644
--- a/ansible_collections/community/routeros/CHANGELOG.md
+++ b/ansible_collections/community/routeros/CHANGELOG.md
@@ -2,109 +2,112 @@
**Topics**
-- <a href="#v2-15-0">v2\.15\.0</a>
+- <a href="#v2-16-0">v2\.16\.0</a>
- <a href="#release-summary">Release Summary</a>
- <a href="#minor-changes">Minor Changes</a>
-- <a href="#v2-14-0">v2\.14\.0</a>
+- <a href="#v2-15-0">v2\.15\.0</a>
- <a href="#release-summary-1">Release Summary</a>
- <a href="#minor-changes-1">Minor Changes</a>
-- <a href="#v2-13-0">v2\.13\.0</a>
+- <a href="#v2-14-0">v2\.14\.0</a>
- <a href="#release-summary-2">Release Summary</a>
- <a href="#minor-changes-2">Minor Changes</a>
- - <a href="#bugfixes">Bugfixes</a>
-- <a href="#v2-12-0">v2\.12\.0</a>
+- <a href="#v2-13-0">v2\.13\.0</a>
- <a href="#release-summary-3">Release Summary</a>
- <a href="#minor-changes-3">Minor Changes</a>
-- <a href="#v2-11-0">v2\.11\.0</a>
+ - <a href="#bugfixes">Bugfixes</a>
+- <a href="#v2-12-0">v2\.12\.0</a>
- <a href="#release-summary-4">Release Summary</a>
- <a href="#minor-changes-4">Minor Changes</a>
-- <a href="#v2-10-0">v2\.10\.0</a>
+- <a href="#v2-11-0">v2\.11\.0</a>
- <a href="#release-summary-5">Release Summary</a>
- <a href="#minor-changes-5">Minor Changes</a>
- - <a href="#bugfixes-1">Bugfixes</a>
-- <a href="#v2-9-0">v2\.9\.0</a>
+- <a href="#v2-10-0">v2\.10\.0</a>
- <a href="#release-summary-6">Release Summary</a>
- <a href="#minor-changes-6">Minor Changes</a>
+ - <a href="#bugfixes-1">Bugfixes</a>
+- <a href="#v2-9-0">v2\.9\.0</a>
+ - <a href="#release-summary-7">Release Summary</a>
+ - <a href="#minor-changes-7">Minor Changes</a>
- <a href="#bugfixes-2">Bugfixes</a>
- <a href="#v2-8-3">v2\.8\.3</a>
- - <a href="#release-summary-7">Release Summary</a>
+ - <a href="#release-summary-8">Release Summary</a>
- <a href="#known-issues">Known Issues</a>
- <a href="#v2-8-2">v2\.8\.2</a>
- - <a href="#release-summary-8">Release Summary</a>
+ - <a href="#release-summary-9">Release Summary</a>
- <a href="#bugfixes-3">Bugfixes</a>
- <a href="#v2-8-1">v2\.8\.1</a>
- - <a href="#release-summary-9">Release Summary</a>
+ - <a href="#release-summary-10">Release Summary</a>
- <a href="#bugfixes-4">Bugfixes</a>
- <a href="#v2-8-0">v2\.8\.0</a>
- - <a href="#release-summary-10">Release Summary</a>
- - <a href="#minor-changes-7">Minor Changes</a>
- - <a href="#bugfixes-5">Bugfixes</a>
-- <a href="#v2-7-0">v2\.7\.0</a>
- <a href="#release-summary-11">Release Summary</a>
- <a href="#minor-changes-8">Minor Changes</a>
- - <a href="#bugfixes-6">Bugfixes</a>
-- <a href="#v2-6-0">v2\.6\.0</a>
+ - <a href="#bugfixes-5">Bugfixes</a>
+- <a href="#v2-7-0">v2\.7\.0</a>
- <a href="#release-summary-12">Release Summary</a>
- <a href="#minor-changes-9">Minor Changes</a>
- - <a href="#bugfixes-7">Bugfixes</a>
-- <a href="#v2-5-0">v2\.5\.0</a>
+ - <a href="#bugfixes-6">Bugfixes</a>
+- <a href="#v2-6-0">v2\.6\.0</a>
- <a href="#release-summary-13">Release Summary</a>
- <a href="#minor-changes-10">Minor Changes</a>
- - <a href="#bugfixes-8">Bugfixes</a>
-- <a href="#v2-4-0">v2\.4\.0</a>
+ - <a href="#bugfixes-7">Bugfixes</a>
+- <a href="#v2-5-0">v2\.5\.0</a>
- <a href="#release-summary-14">Release Summary</a>
- <a href="#minor-changes-11">Minor Changes</a>
+ - <a href="#bugfixes-8">Bugfixes</a>
+- <a href="#v2-4-0">v2\.4\.0</a>
+ - <a href="#release-summary-15">Release Summary</a>
+ - <a href="#minor-changes-12">Minor Changes</a>
- <a href="#bugfixes-9">Bugfixes</a>
- <a href="#known-issues-1">Known Issues</a>
- <a href="#v2-3-1">v2\.3\.1</a>
- - <a href="#release-summary-15">Release Summary</a>
+ - <a href="#release-summary-16">Release Summary</a>
- <a href="#known-issues-2">Known Issues</a>
- <a href="#v2-3-0">v2\.3\.0</a>
- - <a href="#release-summary-16">Release Summary</a>
- - <a href="#minor-changes-12">Minor Changes</a>
+ - <a href="#release-summary-17">Release Summary</a>
+ - <a href="#minor-changes-13">Minor Changes</a>
- <a href="#bugfixes-10">Bugfixes</a>
- <a href="#v2-2-1">v2\.2\.1</a>
- - <a href="#release-summary-17">Release Summary</a>
+ - <a href="#release-summary-18">Release Summary</a>
- <a href="#bugfixes-11">Bugfixes</a>
- <a href="#v2-2-0">v2\.2\.0</a>
- - <a href="#release-summary-18">Release Summary</a>
- - <a href="#minor-changes-13">Minor Changes</a>
+ - <a href="#release-summary-19">Release Summary</a>
+ - <a href="#minor-changes-14">Minor Changes</a>
- <a href="#bugfixes-12">Bugfixes</a>
- <a href="#new-modules">New Modules</a>
- <a href="#v2-1-0">v2\.1\.0</a>
- - <a href="#release-summary-19">Release Summary</a>
- - <a href="#minor-changes-14">Minor Changes</a>
+ - <a href="#release-summary-20">Release Summary</a>
+ - <a href="#minor-changes-15">Minor Changes</a>
- <a href="#bugfixes-13">Bugfixes</a>
- <a href="#new-modules-1">New Modules</a>
- <a href="#v2-0-0">v2\.0\.0</a>
- - <a href="#release-summary-20">Release Summary</a>
- - <a href="#minor-changes-15">Minor Changes</a>
+ - <a href="#release-summary-21">Release Summary</a>
+ - <a href="#minor-changes-16">Minor Changes</a>
- <a href="#breaking-changes--porting-guide">Breaking Changes / Porting Guide</a>
- <a href="#bugfixes-14">Bugfixes</a>
- <a href="#new-plugins">New Plugins</a>
- <a href="#filter">Filter</a>
- <a href="#v1-2-0">v1\.2\.0</a>
- - <a href="#release-summary-21">Release Summary</a>
- - <a href="#minor-changes-16">Minor Changes</a>
- - <a href="#bugfixes-15">Bugfixes</a>
-- <a href="#v1-1-0">v1\.1\.0</a>
- <a href="#release-summary-22">Release Summary</a>
- <a href="#minor-changes-17">Minor Changes</a>
-- <a href="#v1-0-1">v1\.0\.1</a>
+ - <a href="#bugfixes-15">Bugfixes</a>
+- <a href="#v1-1-0">v1\.1\.0</a>
- <a href="#release-summary-23">Release Summary</a>
+ - <a href="#minor-changes-18">Minor Changes</a>
+- <a href="#v1-0-1">v1\.0\.1</a>
+ - <a href="#release-summary-24">Release Summary</a>
- <a href="#bugfixes-16">Bugfixes</a>
- <a href="#v1-0-0">v1\.0\.0</a>
- - <a href="#release-summary-24">Release Summary</a>
+ - <a href="#release-summary-25">Release Summary</a>
- <a href="#bugfixes-17">Bugfixes</a>
- <a href="#v0-1-1">v0\.1\.1</a>
- - <a href="#release-summary-25">Release Summary</a>
+ - <a href="#release-summary-26">Release Summary</a>
- <a href="#bugfixes-18">Bugfixes</a>
- <a href="#v0-1-0">v0\.1\.0</a>
- - <a href="#release-summary-26">Release Summary</a>
- - <a href="#minor-changes-18">Minor Changes</a>
+ - <a href="#release-summary-27">Release Summary</a>
+ - <a href="#minor-changes-19">Minor Changes</a>
-<a id="v2-15-0"></a>
-## v2\.15\.0
+<a id="v2-16-0"></a>
+## v2\.16\.0
<a id="release-summary"></a>
### Release Summary
@@ -114,6 +117,20 @@ Feature release\.
<a id="minor-changes"></a>
### Minor Changes
+* api\_info\, api\_modify \- add missing path <code>/ppp secret</code> \([https\://github\.com/ansible\-collections/community\.routeros/pull/286](https\://github\.com/ansible\-collections/community\.routeros/pull/286)\)\.
+* api\_info\, api\_modify \- minor changes <code>/interface ethernet</code> path fields \([https\://github\.com/ansible\-collections/community\.routeros/pull/288](https\://github\.com/ansible\-collections/community\.routeros/pull/288)\)\.
+
+<a id="v2-15-0"></a>
+## v2\.15\.0
+
+<a id="release-summary-1"></a>
+### Release Summary
+
+Feature release\.
+
+<a id="minor-changes-1"></a>
+### Minor Changes
+
* api\_info\, api\_modify \- Add RouterOS 7\.x support to <code>/mpls ldp</code> path \([https\://github\.com/ansible\-collections/community\.routeros/pull/271](https\://github\.com/ansible\-collections/community\.routeros/pull/271)\)\.
* api\_info\, api\_modify \- add <code>/ip route rule</code> path for RouterOS 6\.x \([https\://github\.com/ansible\-collections/community\.routeros/pull/278](https\://github\.com/ansible\-collections/community\.routeros/pull/278)\)\.
* api\_info\, api\_modify \- add <code>/routing filter</code> path for RouterOS 6\.x \([https\://github\.com/ansible\-collections/community\.routeros/pull/279](https\://github\.com/ansible\-collections/community\.routeros/pull/279)\)\.
@@ -128,12 +145,12 @@ Feature release\.
<a id="v2-14-0"></a>
## v2\.14\.0
-<a id="release-summary-1"></a>
+<a id="release-summary-2"></a>
### Release Summary
Feature release\.
-<a id="minor-changes-1"></a>
+<a id="minor-changes-2"></a>
### Minor Changes
* api\_info\, api\_modify \- add read\-only fields <code>installed\-version</code>\, <code>latest\-version</code> and <code>status</code> in <code>system package update</code> \([https\://github\.com/ansible\-collections/community\.routeros/pull/263](https\://github\.com/ansible\-collections/community\.routeros/pull/263)\)\.
@@ -143,12 +160,12 @@ Feature release\.
<a id="v2-13-0"></a>
## v2\.13\.0
-<a id="release-summary-2"></a>
+<a id="release-summary-3"></a>
### Release Summary
Bugfix and feature release\.
-<a id="minor-changes-2"></a>
+<a id="minor-changes-3"></a>
### Minor Changes
* api\_info\, api\_modify \- make path <code>user group</code> modifiable and add <code>comment</code> attribute \([https\://github\.com/ansible\-collections/community\.routeros/issues/256](https\://github\.com/ansible\-collections/community\.routeros/issues/256)\, [https\://github\.com/ansible\-collections/community\.routeros/pull/257](https\://github\.com/ansible\-collections/community\.routeros/pull/257)\)\.
@@ -162,12 +179,12 @@ Bugfix and feature release\.
<a id="v2-12-0"></a>
## v2\.12\.0
-<a id="release-summary-3"></a>
+<a id="release-summary-4"></a>
### Release Summary
Feature release\.
-<a id="minor-changes-3"></a>
+<a id="minor-changes-4"></a>
### Minor Changes
* api\_info\, api\_modify \- add <code>interface ovpn\-client</code> path \([https\://github\.com/ansible\-collections/community\.routeros/issues/242](https\://github\.com/ansible\-collections/community\.routeros/issues/242)\, [https\://github\.com/ansible\-collections/community\.routeros/pull/244](https\://github\.com/ansible\-collections/community\.routeros/pull/244)\)\.
@@ -181,12 +198,12 @@ Feature release\.
<a id="v2-11-0"></a>
## v2\.11\.0
-<a id="release-summary-4"></a>
+<a id="release-summary-5"></a>
### Release Summary
Feature and bugfix release\.
-<a id="minor-changes-4"></a>
+<a id="minor-changes-5"></a>
### Minor Changes
* api\_info\, api\_modify \- add missing DoH parameters <code>doh\-max\-concurrent\-queries</code>\, <code>doh\-max\-server\-connections</code>\, and <code>doh\-timeout</code> to the <code>ip dns</code> path \([https\://github\.com/ansible\-collections/community\.routeros/issues/230](https\://github\.com/ansible\-collections/community\.routeros/issues/230)\, [https\://github\.com/ansible\-collections/community\.routeros/pull/235](https\://github\.com/ansible\-collections/community\.routeros/pull/235)\)
@@ -201,12 +218,12 @@ Feature and bugfix release\.
<a id="v2-10-0"></a>
## v2\.10\.0
-<a id="release-summary-5"></a>
+<a id="release-summary-6"></a>
### Release Summary
Bugfix and feature release\.
-<a id="minor-changes-5"></a>
+<a id="minor-changes-6"></a>
### Minor Changes
* api\_info \- add new <code>include\_read\_only</code> option to select behavior for read\-only values\. By default these are not returned \([https\://github\.com/ansible\-collections/community\.routeros/pull/213](https\://github\.com/ansible\-collections/community\.routeros/pull/213)\)\.
@@ -238,12 +255,12 @@ Bugfix and feature release\.
<a id="v2-9-0"></a>
## v2\.9\.0
-<a id="release-summary-6"></a>
+<a id="release-summary-7"></a>
### Release Summary
Bugfix and feature release\.
-<a id="minor-changes-6"></a>
+<a id="minor-changes-7"></a>
### Minor Changes
* api\_info\, api\_modify \- add path <code>caps\-man channel</code> and enable path <code>caps\-man manager interface</code> \([https\://github\.com/ansible\-collections/community\.routeros/issues/193](https\://github\.com/ansible\-collections/community\.routeros/issues/193)\, [https\://github\.com/ansible\-collections/community\.routeros/pull/194](https\://github\.com/ansible\-collections/community\.routeros/pull/194)\)\.
@@ -257,7 +274,7 @@ Bugfix and feature release\.
<a id="v2-8-3"></a>
## v2\.8\.3
-<a id="release-summary-7"></a>
+<a id="release-summary-8"></a>
### Release Summary
Maintenance release with updated documentation\.
@@ -278,7 +295,7 @@ for the rendered HTML version of the documentation of the latest release\.
<a id="v2-8-2"></a>
## v2\.8\.2
-<a id="release-summary-8"></a>
+<a id="release-summary-9"></a>
### Release Summary
Bugfix release\.
@@ -291,7 +308,7 @@ Bugfix release\.
<a id="v2-8-1"></a>
## v2\.8\.1
-<a id="release-summary-9"></a>
+<a id="release-summary-10"></a>
### Release Summary
Bugfix release\.
@@ -304,12 +321,12 @@ Bugfix release\.
<a id="v2-8-0"></a>
## v2\.8\.0
-<a id="release-summary-10"></a>
+<a id="release-summary-11"></a>
### Release Summary
Bugfix and feature release\.
-<a id="minor-changes-7"></a>
+<a id="minor-changes-8"></a>
### Minor Changes
* api\_modify \- adapt data for API paths <code>ip dhcp\-server network</code> \([https\://github\.com/ansible\-collections/community\.routeros/pull/156](https\://github\.com/ansible\-collections/community\.routeros/pull/156)\)\.
@@ -329,12 +346,12 @@ Bugfix and feature release\.
<a id="v2-7-0"></a>
## v2\.7\.0
-<a id="release-summary-11"></a>
+<a id="release-summary-12"></a>
### Release Summary
Bugfix and feature release\.
-<a id="minor-changes-8"></a>
+<a id="minor-changes-9"></a>
### Minor Changes
* api\_modify\, api\_info \- support API paths <code>ip arp</code>\, <code>ip firewall raw</code>\, <code>ipv6 firewall raw</code> \([https\://github\.com/ansible\-collections/community\.routeros/pull/144](https\://github\.com/ansible\-collections/community\.routeros/pull/144)\)\.
@@ -347,12 +364,12 @@ Bugfix and feature release\.
<a id="v2-6-0"></a>
## v2\.6\.0
-<a id="release-summary-12"></a>
+<a id="release-summary-13"></a>
### Release Summary
Regular bugfix and feature release\.
-<a id="minor-changes-9"></a>
+<a id="minor-changes-10"></a>
### Minor Changes
* api\_modify\, api\_info \- add field <code>regexp</code> to <code>ip dns static</code> \([https\://github\.com/ansible\-collections/community\.routeros/issues/141](https\://github\.com/ansible\-collections/community\.routeros/issues/141)\)\.
@@ -367,12 +384,12 @@ Regular bugfix and feature release\.
<a id="v2-5-0"></a>
## v2\.5\.0
-<a id="release-summary-13"></a>
+<a id="release-summary-14"></a>
### Release Summary
Feature and bugfix release\.
-<a id="minor-changes-10"></a>
+<a id="minor-changes-11"></a>
### Minor Changes
* api\_info\, api\_modify \- support API paths <code>interface ethernet poe</code>\, <code>interface gre6</code>\, <code>interface vrrp</code> and also support all previously missing fields of entries in <code>ip dhcp\-server</code> \([https\://github\.com/ansible\-collections/community\.routeros/pull/137](https\://github\.com/ansible\-collections/community\.routeros/pull/137)\)\.
@@ -385,12 +402,12 @@ Feature and bugfix release\.
<a id="v2-4-0"></a>
## v2\.4\.0
-<a id="release-summary-14"></a>
+<a id="release-summary-15"></a>
### Release Summary
Feature release improving the <code>api\*</code> modules\.
-<a id="minor-changes-11"></a>
+<a id="minor-changes-12"></a>
### Minor Changes
* api\* modules \- Add new option <code>force\_no\_cert</code> to connect with ADH ciphers \([https\://github\.com/ansible\-collections/community\.routeros/pull/124](https\://github\.com/ansible\-collections/community\.routeros/pull/124)\)\.
@@ -429,7 +446,7 @@ Feature release improving the <code>api\*</code> modules\.
<a id="v2-3-1"></a>
## v2\.3\.1
-<a id="release-summary-15"></a>
+<a id="release-summary-16"></a>
### Release Summary
Maintenance release with improved documentation\.
@@ -442,12 +459,12 @@ Maintenance release with improved documentation\.
<a id="v2-3-0"></a>
## v2\.3\.0
-<a id="release-summary-16"></a>
+<a id="release-summary-17"></a>
### Release Summary
Feature and bugfix release\.
-<a id="minor-changes-12"></a>
+<a id="minor-changes-13"></a>
### Minor Changes
* The collection repository conforms to the [REUSE specification](https\://reuse\.software/spec/) except for the changelog fragments \([https\://github\.com/ansible\-collections/community\.routeros/pull/108](https\://github\.com/ansible\-collections/community\.routeros/pull/108)\)\.
@@ -462,7 +479,7 @@ Feature and bugfix release\.
<a id="v2-2-1"></a>
## v2\.2\.1
-<a id="release-summary-17"></a>
+<a id="release-summary-18"></a>
### Release Summary
Bugfix release\.
@@ -476,12 +493,12 @@ Bugfix release\.
<a id="v2-2-0"></a>
## v2\.2\.0
-<a id="release-summary-18"></a>
+<a id="release-summary-19"></a>
### Release Summary
New feature release\.
-<a id="minor-changes-13"></a>
+<a id="minor-changes-14"></a>
### Minor Changes
* All software licenses are now in the <code>LICENSES/</code> directory of the collection root\. Moreover\, <code>SPDX\-License\-Identifier\:</code> is used to declare the applicable license for every file that is not automatically generated \([https\://github\.com/ansible\-collections/community\.routeros/pull/101](https\://github\.com/ansible\-collections/community\.routeros/pull/101)\)\.
@@ -494,18 +511,18 @@ New feature release\.
<a id="new-modules"></a>
### New Modules
-* api\_info \- Retrieve information from API
-* api\_modify \- Modify data at paths with API
+* community\.routeros\.api\_info \- Retrieve information from API
+* community\.routeros\.api\_modify \- Modify data at paths with API
<a id="v2-1-0"></a>
## v2\.1\.0
-<a id="release-summary-19"></a>
+<a id="release-summary-20"></a>
### Release Summary
Feature and bugfix release with new modules\.
-<a id="minor-changes-14"></a>
+<a id="minor-changes-15"></a>
### Minor Changes
* Added a <code>community\.routeros\.api</code> module defaults group\. Use with <code>group/community\.routeros\.api</code> to provide options for all API\-based modules \([https\://github\.com/ansible\-collections/community\.routeros/pull/89](https\://github\.com/ansible\-collections/community\.routeros/pull/89)\)\.
@@ -523,18 +540,18 @@ Feature and bugfix release with new modules\.
<a id="new-modules-1"></a>
### New Modules
-* api\_facts \- Collect facts from remote devices running MikroTik RouterOS using the API
-* api\_find\_and\_modify \- Find and modify information using the API
+* community\.routeros\.api\_facts \- Collect facts from remote devices running MikroTik RouterOS using the API
+* community\.routeros\.api\_find\_and\_modify \- Find and modify information using the API
<a id="v2-0-0"></a>
## v2\.0\.0
-<a id="release-summary-20"></a>
+<a id="release-summary-21"></a>
### Release Summary
A new major release with breaking changes in the behavior of <code>community\.routeros\.api</code> and <code>community\.routeros\.command</code>\.
-<a id="minor-changes-15"></a>
+<a id="minor-changes-16"></a>
### Minor Changes
* api \- make validation of <code>WHERE</code> for <code>query</code> more strict \([https\://github\.com/ansible\-collections/community\.routeros/pull/53](https\://github\.com/ansible\-collections/community\.routeros/pull/53)\)\.
@@ -561,21 +578,21 @@ A new major release with breaking changes in the behavior of <code>community\.ro
<a id="filter"></a>
#### Filter
-* join \- Join a list of arguments to a command
-* list\_to\_dict \- Convert a list of arguments to a list of dictionary
-* quote\_argument \- Quote an argument
-* quote\_argument\_value \- Quote an argument value
-* split \- Split a command into arguments
+* community\.routeros\.join \- Join a list of arguments to a command
+* community\.routeros\.list\_to\_dict \- Convert a list of arguments to a list of dictionary
+* community\.routeros\.quote\_argument \- Quote an argument
+* community\.routeros\.quote\_argument\_value \- Quote an argument value
+* community\.routeros\.split \- Split a command into arguments
<a id="v1-2-0"></a>
## v1\.2\.0
-<a id="release-summary-21"></a>
+<a id="release-summary-22"></a>
### Release Summary
Bugfix and feature release\.
-<a id="minor-changes-16"></a>
+<a id="minor-changes-17"></a>
### Minor Changes
* Avoid internal ansible\-core module\_utils in favor of equivalent public API available since at least Ansible 2\.9 \([https\://github\.com/ansible\-collections/community\.routeros/pull/38](https\://github\.com/ansible\-collections/community\.routeros/pull/38)\)\.
@@ -591,12 +608,12 @@ Bugfix and feature release\.
<a id="v1-1-0"></a>
## v1\.1\.0
-<a id="release-summary-22"></a>
+<a id="release-summary-23"></a>
### Release Summary
This release allow dashes in usernames for SSH\-based modules\.
-<a id="minor-changes-17"></a>
+<a id="minor-changes-18"></a>
### Minor Changes
* command \- added support for a dash \(<code>\-</code>\) in username \([https\://github\.com/ansible\-collections/community\.routeros/pull/18](https\://github\.com/ansible\-collections/community\.routeros/pull/18)\)\.
@@ -605,7 +622,7 @@ This release allow dashes in usernames for SSH\-based modules\.
<a id="v1-0-1"></a>
## v1\.0\.1
-<a id="release-summary-23"></a>
+<a id="release-summary-24"></a>
### Release Summary
Maintenance release with a bugfix for <code>api</code>\.
@@ -618,7 +635,7 @@ Maintenance release with a bugfix for <code>api</code>\.
<a id="v1-0-0"></a>
## v1\.0\.0
-<a id="release-summary-24"></a>
+<a id="release-summary-25"></a>
### Release Summary
This is the first production \(non\-prerelease\) release of <code>community\.routeros</code>\.
@@ -631,7 +648,7 @@ This is the first production \(non\-prerelease\) release of <code>community\.rou
<a id="v0-1-1"></a>
## v0\.1\.1
-<a id="release-summary-25"></a>
+<a id="release-summary-26"></a>
### Release Summary
Small improvements and bugfixes over the initial release\.
@@ -644,12 +661,12 @@ Small improvements and bugfixes over the initial release\.
<a id="v0-1-0"></a>
## v0\.1\.0
-<a id="release-summary-26"></a>
+<a id="release-summary-27"></a>
### Release Summary
The <code>community\.routeros</code> continues the work on the Ansible RouterOS modules from their state in <code>community\.network</code> 1\.2\.0\. The changes listed here are thus relative to the modules <code>community\.network\.routeros\_\*</code>\.
-<a id="minor-changes-18"></a>
+<a id="minor-changes-19"></a>
### Minor Changes
* facts \- now also collecting data about BGP and OSPF \([https\://github\.com/ansible\-collections/community\.network/pull/101](https\://github\.com/ansible\-collections/community\.network/pull/101)\)\.
diff --git a/ansible_collections/community/routeros/CHANGELOG.rst b/ansible_collections/community/routeros/CHANGELOG.rst
index 1b3a76077..1a735af72 100644
--- a/ansible_collections/community/routeros/CHANGELOG.rst
+++ b/ansible_collections/community/routeros/CHANGELOG.rst
@@ -4,6 +4,20 @@ Community RouterOS Release Notes
.. contents:: Topics
+v2.16.0
+=======
+
+Release Summary
+---------------
+
+Feature release.
+
+Minor Changes
+-------------
+
+- api_info, api_modify - add missing path ``/ppp secret`` (https://github.com/ansible-collections/community.routeros/pull/286).
+- api_info, api_modify - minor changes ``/interface ethernet`` path fields (https://github.com/ansible-collections/community.routeros/pull/288).
+
v2.15.0
=======
@@ -396,8 +410,8 @@ Bugfixes
New Modules
-----------
-- api_info - Retrieve information from API
-- api_modify - Modify data at paths with API
+- community.routeros.api_info - Retrieve information from API
+- community.routeros.api_modify - Modify data at paths with API
v2.1.0
======
@@ -425,8 +439,8 @@ Bugfixes
New Modules
-----------
-- api_facts - Collect facts from remote devices running MikroTik RouterOS using the API
-- api_find_and_modify - Find and modify information using the API
+- community.routeros.api_facts - Collect facts from remote devices running MikroTik RouterOS using the API
+- community.routeros.api_find_and_modify - Find and modify information using the API
v2.0.0
======
@@ -463,11 +477,11 @@ New Plugins
Filter
~~~~~~
-- join - Join a list of arguments to a command
-- list_to_dict - Convert a list of arguments to a list of dictionary
-- quote_argument - Quote an argument
-- quote_argument_value - Quote an argument value
-- split - Split a command into arguments
+- community.routeros.join - Join a list of arguments to a command
+- community.routeros.list_to_dict - Convert a list of arguments to a list of dictionary
+- community.routeros.quote_argument - Quote an argument
+- community.routeros.quote_argument_value - Quote an argument value
+- community.routeros.split - Split a command into arguments
v1.2.0
======
diff --git a/ansible_collections/community/routeros/FILES.json b/ansible_collections/community/routeros/FILES.json
index 483d87e4f..0ca97eafc 100644
--- a/ansible_collections/community/routeros/FILES.json
+++ b/ansible_collections/community/routeros/FILES.json
@@ -25,7 +25,7 @@
"name": ".github/workflows/ansible-test.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bf90bb7d4687afa28b6d64eed2a4ef8761badbd5759319daff0621394fe8a1fe",
+ "chksum_sha256": "58be3451b2513cc47d2604e6d743c617cfacc8c4a1c438b7771d92ca68092736",
"format": 1
},
{
@@ -46,7 +46,7 @@
"name": ".github/workflows/ee.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "99536bd592be0fadd2e2c9acce48152aa671b04ba69636097bffd65035c473d6",
+ "chksum_sha256": "f1d97d8c8bdfc87fdcf56527e57a3516637362a1e15bb76cb000a15d93712953",
"format": 1
},
{
@@ -151,7 +151,7 @@
"name": "changelogs/changelog.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5780b9cd9fd37dcad0b122ae762afbe426e800c7fda9496e50163c628d77a823",
+ "chksum_sha256": "47b733d8817c1ed863d03ef0da8cc81790186ef231cb87440f46be1aca54de27",
"format": 1
},
{
@@ -165,7 +165,7 @@
"name": "changelogs/config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3cb54c894797529118cca5be83152602e8db6353062d795b6a9aaeecfc188870",
+ "chksum_sha256": "d8830903d5505b4360fc0c0cfd5e22dbd4e8622e3bddb142e7f116f51528e077",
"format": 1
},
{
@@ -361,7 +361,7 @@
"name": "plugins/module_utils/_api_data.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2b2dd685aa6a3fd4d481f12ced0ed0284a65d4773844c4c9a30ea3b4cbd38297",
+ "chksum_sha256": "653685a731169bb6d2598b8b458fedbaf4e28bfc5a3bc1ae32bde2ca8ca198ec",
"format": 1
},
{
@@ -431,14 +431,14 @@
"name": "plugins/modules/api_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f86b949f2a5f6da6df2061711adf35ff727c034c7c9b7dc4e3bbb50fe80f0dd8",
+ "chksum_sha256": "fe639cf4de746ab0ac66d586a6961f304ce7b525345f630e3c96b433e07f36d5",
"format": 1
},
{
"name": "plugins/modules/api_modify.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2ec95cb3af816b5ee9b3e54ead9258c177f559038d57de7f253f4d9b6f179784",
+ "chksum_sha256": "df615eb928e479aeb090938c1332fc8a96fb1dfc0f3900eec827515075d1ce48",
"format": 1
},
{
@@ -1278,7 +1278,7 @@
"name": "CHANGELOG.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3add5cc457f4a9b36baea353affd9965843342f141333acdce5af15d84d7ae2b",
+ "chksum_sha256": "09650110723cae9b5ddfe03f49a7fce8941b96d386f4a5f5386ba965f0991b67",
"format": 1
},
{
@@ -1292,7 +1292,7 @@
"name": "CHANGELOG.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ed20bebf70ccd5e1e94da8f399771dddf40744a2c2da0283fe210fc6c47d77ca",
+ "chksum_sha256": "112f6303c14f159a0c8552647b7c3ff3b031ed36c7d5d607fdfe095c8431ceaa",
"format": 1
},
{
@@ -1313,7 +1313,7 @@
"name": "README.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f5ae35bdf559206e7f48123c0928ef6b5d9bc29f8a6b6eef90aa731e81aaa516",
+ "chksum_sha256": "aeb12aade87adf256a4a8ff1fc128a66346f1b44dde60de4a764fc4dd63339d2",
"format": 1
},
{
diff --git a/ansible_collections/community/routeros/MANIFEST.json b/ansible_collections/community/routeros/MANIFEST.json
index ddec534d7..1e0c3475a 100644
--- a/ansible_collections/community/routeros/MANIFEST.json
+++ b/ansible_collections/community/routeros/MANIFEST.json
@@ -2,7 +2,7 @@
"collection_info": {
"namespace": "community",
"name": "routeros",
- "version": "2.15.0",
+ "version": "2.16.0",
"authors": [
"Egor Zaitsev (github.com/heuels)",
"Nikolay Dachev (github.com/NikolayDachev)",
@@ -31,7 +31,7 @@
"name": "FILES.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a36cd24eef5f055d20b8836410e23e222aa2aa857c025faadc9e10767b95fec6",
+ "chksum_sha256": "f2db917057798ddb8c2142983e1e2ac0d786099bd5d9c01ffd480014518dd27a",
"format": 1
},
"format": 1
diff --git a/ansible_collections/community/routeros/README.md b/ansible_collections/community/routeros/README.md
index e617a8f89..f34a74921 100644
--- a/ansible_collections/community/routeros/README.md
+++ b/ansible_collections/community/routeros/README.md
@@ -5,7 +5,9 @@ SPDX-License-Identifier: GPL-3.0-or-later
-->
# Community RouterOS Collection
-[![CI](https://github.com/ansible-collections/community.routeros/workflows/CI/badge.svg?event=push)](https://github.com/ansible-collections/community.routeros/actions) [![Codecov](https://img.shields.io/codecov/c/github/ansible-collections/community.routeros)](https://codecov.io/gh/ansible-collections/community.routeros)
+[![CI](https://github.com/ansible-collections/community.routeros/workflows/CI/badge.svg?event=push)](https://github.com/ansible-collections/community.routeros/actions)
+[![Codecov](https://img.shields.io/codecov/c/github/ansible-collections/community.routeros)](https://codecov.io/gh/ansible-collections/community.routeros)
+[![REUSE status](https://api.reuse.software/badge/github.com/ansible-collections/community.routeros)](https://api.reuse.software/info/github.com/ansible-collections/community.routeros)
Provides modules for [Ansible](https://www.ansible.com/community) to manage [MikroTik RouterOS](http://www.mikrotik-routeros.net/routeros.aspx) instances.
diff --git a/ansible_collections/community/routeros/changelogs/changelog.yaml b/ansible_collections/community/routeros/changelogs/changelog.yaml
index 62320dd0e..71044fbfd 100644
--- a/ansible_collections/community/routeros/changelogs/changelog.yaml
+++ b/ansible_collections/community/routeros/changelogs/changelog.yaml
@@ -371,6 +371,17 @@ releases:
- 278-add_ip_route_rule_path.yml
- 279-add_routing_filter_path.yml
release_date: '2024-04-20'
+ 2.16.0:
+ changes:
+ minor_changes:
+ - api_info, api_modify - add missing path ``/ppp secret`` (https://github.com/ansible-collections/community.routeros/pull/286).
+ - api_info, api_modify - minor changes ``/interface ethernet`` path fields (https://github.com/ansible-collections/community.routeros/pull/288).
+ release_summary: Feature release.
+ fragments:
+ - 2.16.0.yml
+ - 286-add_ppp_secret_path.yml
+ - 288-interface_ethernet_values.yml
+ release_date: '2024-06-16'
2.2.0:
changes:
bugfixes:
diff --git a/ansible_collections/community/routeros/changelogs/config.yaml b/ansible_collections/community/routeros/changelogs/config.yaml
index 86412bafc..bb3c08b24 100644
--- a/ansible_collections/community/routeros/changelogs/config.yaml
+++ b/ansible_collections/community/routeros/changelogs/config.yaml
@@ -35,3 +35,6 @@ sections:
- - known_issues
- Known Issues
title: Community RouterOS
+trivial_section_name: trivial
+use_fqcn: true
+add_plugin_period: true
diff --git a/ansible_collections/community/routeros/plugins/module_utils/_api_data.py b/ansible_collections/community/routeros/plugins/module_utils/_api_data.py
index fdf5cba58..0883a2f55 100644
--- a/ansible_collections/community/routeros/plugins/module_utils/_api_data.py
+++ b/ansible_collections/community/routeros/plugins/module_utils/_api_data.py
@@ -327,9 +327,9 @@ PATHS = {
'combo-mode': KeyInfo(can_disable=True),
'comment': KeyInfo(can_disable=True, remove_value=''),
'disabled': KeyInfo(default=False),
- 'fec-mode': KeyInfo(can_disable=True),
+ 'fec-mode': KeyInfo(can_disable=True, remove_value='auto'),
'full-duplex': KeyInfo(default=True),
- 'l2mtu': KeyInfo(default=1598),
+ 'l2mtu': KeyInfo(),
'loop-protect': KeyInfo(default='default'),
'loop-protect-disable-time': KeyInfo(default='5m'),
'loop-protect-send-interval': KeyInfo(default='5s'),
@@ -338,8 +338,8 @@ PATHS = {
'mtu': KeyInfo(default=1500),
'name': KeyInfo(),
'orig-mac-address': KeyInfo(),
- 'poe-out': KeyInfo(can_disable=True),
- 'poe-priority': KeyInfo(can_disable=True),
+ 'poe-out': KeyInfo(can_disable=True, remove_value='auto-on'),
+ 'poe-priority': KeyInfo(can_disable=True, remove_value=10),
'poe-voltage': KeyInfo(can_disable=True),
'power-cycle-interval': KeyInfo(),
'power-cycle-ping-address': KeyInfo(can_disable=True),
@@ -347,7 +347,7 @@ PATHS = {
'power-cycle-ping-timeout': KeyInfo(can_disable=True),
'rx-flow-control': KeyInfo(default='off'),
'sfp-rate-select': KeyInfo(default='high'),
- 'sfp-shutdown-temperature': KeyInfo(default='95C'),
+ 'sfp-shutdown-temperature': KeyInfo(default=95),
'speed': KeyInfo(),
'tx-flow-control': KeyInfo(default='off'),
},
@@ -4600,6 +4600,27 @@ PATHS = {
},
),
),
+ ('ppp', 'secret'): APIData(
+ unversioned=VersionedAPIData(
+ fully_understood=True,
+ primary_keys=('name', ),
+ fields={
+ 'caller-id': KeyInfo(default=''),
+ 'disabled': KeyInfo(default=False),
+ 'ipv6-routes': KeyInfo(default=''),
+ 'limit-bytes-in': KeyInfo(default=0),
+ 'limit-bytes-out': KeyInfo(default=0),
+ 'local-address': KeyInfo(can_disable=True),
+ 'name': KeyInfo(required=True),
+ 'password': KeyInfo(),
+ 'profile': KeyInfo(default='default'),
+ 'remote-address': KeyInfo(can_disable=True),
+ 'remote-ipv6-prefix': KeyInfo(can_disable=True),
+ 'routes': KeyInfo(can_disable=True),
+ 'service': KeyInfo(default='any'),
+ },
+ ),
+ ),
('routing', 'bgp', 'aggregate'): APIData(
unversioned=VersionedAPIData(
primary_keys=('prefix',),
diff --git a/ansible_collections/community/routeros/plugins/modules/api_info.py b/ansible_collections/community/routeros/plugins/modules/api_info.py
index a3e800ee3..1d5dea56f 100644
--- a/ansible_collections/community/routeros/plugins/modules/api_info.py
+++ b/ansible_collections/community/routeros/plugins/modules/api_info.py
@@ -186,6 +186,7 @@ options:
- port remote-access
- ppp aaa
- ppp profile
+ - ppp secret
- queue interface
- queue simple
- queue tree
diff --git a/ansible_collections/community/routeros/plugins/modules/api_modify.py b/ansible_collections/community/routeros/plugins/modules/api_modify.py
index cd5b3a93d..73a08eff0 100644
--- a/ansible_collections/community/routeros/plugins/modules/api_modify.py
+++ b/ansible_collections/community/routeros/plugins/modules/api_modify.py
@@ -195,6 +195,7 @@ options:
- port remote-access
- ppp aaa
- ppp profile
+ - ppp secret
- queue interface
- queue simple
- queue tree
diff --git a/ansible_collections/community/zabbix/.github/workflows/agent.yml b/ansible_collections/community/zabbix/.github/workflows/agent.yml
index 55b7e1b72..e081e6721 100644
--- a/ansible_collections/community/zabbix/.github/workflows/agent.yml
+++ b/ansible_collections/community/zabbix/.github/workflows/agent.yml
@@ -22,6 +22,7 @@ jobs:
container:
- rockylinux9
- rockylinux8
+ - ubuntu2404
- ubuntu2204
- ubuntu2004
- ubuntu1804
@@ -40,6 +41,8 @@ jobs:
exclude:
- container: debian12
version: v62
+ - container: ubuntu2404
+ version: v62
steps:
- name: Check out code
diff --git a/ansible_collections/community/zabbix/.github/workflows/javagateway.yml b/ansible_collections/community/zabbix/.github/workflows/javagateway.yml
index 22bc5ede1..96ca2a8b7 100644
--- a/ansible_collections/community/zabbix/.github/workflows/javagateway.yml
+++ b/ansible_collections/community/zabbix/.github/workflows/javagateway.yml
@@ -23,6 +23,7 @@ jobs:
- rockylinux9
- rockylinux8
- centos7
+ - ubuntu2404
- ubuntu2204
- ubuntu2004
- ubuntu1804
@@ -40,6 +41,8 @@ jobs:
exclude:
- container: debian12
version: v62
+ - container: ubuntu2404
+ version: v62
collection_role:
- zabbix_javagateway
steps:
diff --git a/ansible_collections/community/zabbix/.github/workflows/proxy.yml b/ansible_collections/community/zabbix/.github/workflows/proxy.yml
index ad5f61b34..5618a0633 100644
--- a/ansible_collections/community/zabbix/.github/workflows/proxy.yml
+++ b/ansible_collections/community/zabbix/.github/workflows/proxy.yml
@@ -23,6 +23,7 @@ jobs:
- rockylinux9
- rockylinux8
- centos7
+ - ubuntu2404
- ubuntu2204
- ubuntu2004
- ubuntu1804
@@ -39,12 +40,13 @@ jobs:
- v64
- v62
- v60
- include:
- - interpreter: python3
- - interpreter: python
- container: centos7
exclude:
- - container: debian12
+ - container:
+ name: debian12
+ base_image: debian:12
+ os_family: debian
+ version: v62
+ - container: ubuntu2404
version: v62
steps:
- name: Check out code
@@ -78,10 +80,9 @@ jobs:
- name: Run role tests
run: >-
- MY_MOLECULE_CONTAINER=${{ matrix.container }}
- MY_MOLECULE_IMAGE=${{ matrix.container }}
+ MY_MOLECULE_CONTAINER=${{ matrix.container.name }}
+ MY_MOLECULE_IMAGE=${{ matrix.container.base_image }}
+ MY_MOLECULE_OS_FAMILY=${{ matrix.container.os_family }}
MY_MOLECULE_VERSION=${{ matrix.version }}
MY_MOLECULE_DATABASE=${{ matrix.database }}
- MY_MOLECULE_INTERPRETER=${{ matrix.interpreter }}
- MY_MOLECULE_DOCKER_COMMAND=${{ matrix.command }}
molecule test -s ${{ matrix.collection_role }}
diff --git a/ansible_collections/community/zabbix/.github/workflows/server.yml b/ansible_collections/community/zabbix/.github/workflows/server.yml
index 33f5cd5fd..9ca8f3761 100644
--- a/ansible_collections/community/zabbix/.github/workflows/server.yml
+++ b/ansible_collections/community/zabbix/.github/workflows/server.yml
@@ -22,6 +22,7 @@ jobs:
container:
- rockylinux9
- rockylinux8
+ - ubuntu2404
- ubuntu2204
- ubuntu2004
- ubuntu1804
@@ -37,13 +38,13 @@ jobs:
- v64
- v62
- v60
- include:
- - interpreter: python3
exclude:
- container: debian12
version: v62
- container: debian10
version: v62
+ - container: ubuntu2404
+ version: v62
- container: ubuntu1804
version: v62
- container: ubuntu1804
@@ -86,6 +87,5 @@ jobs:
MY_MOLECULE_IMAGE=${{ matrix.container }}
MY_MOLECULE_VERSION=${{ matrix.version }}
MY_MOLECULE_DATABASE=${{ matrix.database }}
- MY_MOLECULE_INTERPRETER=${{ matrix.interpreter }}
MY_MOLECULE_DOCKER_COMMAND=${{ matrix.command }}
molecule test -s ${{ matrix.collection_role }}
diff --git a/ansible_collections/community/zabbix/.github/workflows/web.yml b/ansible_collections/community/zabbix/.github/workflows/web.yml
index 0541a2457..ec60c540d 100644
--- a/ansible_collections/community/zabbix/.github/workflows/web.yml
+++ b/ansible_collections/community/zabbix/.github/workflows/web.yml
@@ -22,6 +22,7 @@ jobs:
container:
- rockylinux9
- rockylinux8
+ - ubuntu2404
- ubuntu2204
- ubuntu2004
- ubuntu1804
@@ -35,6 +36,7 @@ jobs:
- pgsql
web_server:
- nginx
+ - apache
version:
- v64
- v62
@@ -44,6 +46,8 @@ jobs:
exclude:
- container: debian10
version: v62
+ - container: ubuntu2404
+ version: v62
- container: ubuntu1804
version: v62
- container: debian10
diff --git a/ansible_collections/community/zabbix/CHANGELOG.rst b/ansible_collections/community/zabbix/CHANGELOG.rst
index 4675d1a5a..e108bccf4 100644
--- a/ansible_collections/community/zabbix/CHANGELOG.rst
+++ b/ansible_collections/community/zabbix/CHANGELOG.rst
@@ -4,6 +4,38 @@ community.zabbix Release Notes
.. contents:: Topics
+v2.5.1
+======
+
+Bugfixes
+-------------
+
+- zabbix_agent - Fix reading existing psk
+- zabbix_agent - Fix role when zabbix_agent_listenip is undefined
+
+v2.5.0
+======
+
+Minor Changes
+-------------
+
+- agent role - Standardized all configuration variables using the `zabbix_agent` prefix vs `zabbix_agent2`. Support for `zabbix_agent2` to be removed in 3.0.0
+- agent role - Standardized templating of agent.conf file
+- all roles - Added support for Ubuntu 24.04 (Noble Numbat)
+- zabbix_discoveryrule module added
+- zabbix_host_events_update module added
+- zabbix_item - add support for setting master items by name
+- zabbix_item module added
+- zabbix_itemprototype - add support for setting master items by name
+- zabbix_itemprototype module added
+- zabbix_trigger module added
+- zabbix_triggerprototype module added
+
+Bugfixes
+--------
+
+- zabbix_web - make the FPM socket group-writable so the web server can properly forward requests to the FPM process
+
v2.4.0
======
diff --git a/ansible_collections/community/zabbix/FILES.json b/ansible_collections/community/zabbix/FILES.json
index 694f37957..260909068 100644
--- a/ansible_collections/community/zabbix/FILES.json
+++ b/ansible_collections/community/zabbix/FILES.json
@@ -46,14 +46,14 @@
"name": ".github/workflows/agent.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ab09e77c1cfb09d0ead5281bcfcd6cfb54d6eaa8380affbd53ec47c38c577855",
+ "chksum_sha256": "1a3f73038e09c3ca8fd53f6d8efaa24e465a1a08178521b22755ba7056394069",
"format": 1
},
{
"name": ".github/workflows/javagateway.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5fece5724408d5a2170a22b1fdedf601451248c723f84066b7d1076dff06dbec",
+ "chksum_sha256": "dc04855a3440f2cf55603398b8d8706fa83d3e7932b9f38f166d2a08a7c98b58",
"format": 1
},
{
@@ -67,7 +67,7 @@
"name": ".github/workflows/proxy.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f21f654a95b514e188c7cc090b21685521112a90610fef14bc487301141b17af",
+ "chksum_sha256": "6c52fb341cd0a40742fd5439bfe8e9712be2c6f8e010bbb6823e68600deeabee",
"format": 1
},
{
@@ -81,14 +81,14 @@
"name": ".github/workflows/server.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1925557e00452541d0105523033dba1edd9e3ee8ca95b3373c006c1f96973380",
+ "chksum_sha256": "390206c1dc52ff10f3b070a6e3c7622a3739c3c20bddd1eff81452d10b7f0c12",
"format": 1
},
{
"name": ".github/workflows/web.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b8665c7b983f6b76cba7da881ab0cd450439217a5bd519ae7f30933a3ded554a",
+ "chksum_sha256": "941a823245cc7a3a02b6ca32253be2ebe01cf508f528e30db8adc186da94cbce",
"format": 1
},
{
@@ -116,14 +116,14 @@
"name": "changelogs/.plugin-cache.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ddbed2431f771d0b08ec1059fcfc03df63768d78c59488ffadfddae737f52028",
+ "chksum_sha256": "1cfb6d46aa54a0ee195c96b60658034de0b050f738aa69088189fa7050795117",
"format": 1
},
{
"name": "changelogs/changelog.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "25653a4be548f83eb8db9059d833a8298cc278f4ff812ff3b79abaa9fcb63f1e",
+ "chksum_sha256": "91af3d023ce10a9f731f26bbf291cff608e2d77e8ba8a6fd8536740eb7334207",
"format": 1
},
{
@@ -158,35 +158,35 @@
"name": "docs/ZABBIX_AGENT_ROLE.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d256f34122a50d20bf5d074df98558de74774e591bb2bc309e858ab0f2c7a7b8",
+ "chksum_sha256": "78d04c48530cd39414bee760f67411ba56a003f0b367155248a0bf883a54d923",
"format": 1
},
{
"name": "docs/ZABBIX_JAVAGATEWAY_ROLE.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "168aae7236576e4baf77c7b056833d5c90527c99365d7c9487f05f7af4d7281a",
+ "chksum_sha256": "73778abc24b8eb27b6243c4d04bd38dc8030f8092bfde3297c15221ba565eec6",
"format": 1
},
{
"name": "docs/ZABBIX_PROXY_ROLE.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5fb7298372379197055d9221660fa37ec078d7b2c5aaa4545ed09a943daa7baf",
+ "chksum_sha256": "830cf0d2a76423c1146ab807bee53a80bb9f5839ee4954b1cc804ae20207321c",
"format": 1
},
{
"name": "docs/ZABBIX_SERVER_ROLE.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e7b83a2f01c4ef1388cba62476dbbe0596c483b345f4a79aabc2cd107cf16b61",
+ "chksum_sha256": "dd08d146c7fbbdef236be08abb9c035456e7d5f635a5748200fb7cbdb965f734",
"format": 1
},
{
"name": "docs/ZABBIX_WEB_ROLE.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c52def2249d4416ac228eb03d6b57322578e2fb5c118967fb1e11634ff66f64f",
+ "chksum_sha256": "836bbcc7e6ca298e93435670eefa94582bb44bfc7abf6a42e76ee814d41b88cb",
"format": 1
},
{
@@ -319,7 +319,7 @@
"name": "molecule/zabbix_agent_tests/common/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9ce9330c02c7d06dca39d7b31e1683ffd552bb341f35603fbb42659e3915a0ac",
+ "chksum_sha256": "7a4976cef41847257588e2004f9f8373cdbba1e59063335f5d4fcc531e964e4d",
"format": 1
},
{
@@ -375,7 +375,7 @@
"name": "molecule/zabbix_agent_tests/molecule/agent2/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "54630476f85b1859cf8f37f43b64631c0b6b87b1d1a151ae9f0ab8baaa7ce7ad",
+ "chksum_sha256": "40a1bfa5c04054e27b104891af91a489467b3cbc1b8c0c59f206a75a96154a74",
"format": 1
},
{
@@ -575,6 +575,20 @@
"format": 1
},
{
+ "name": "molecule/zabbix_proxy/Dockerfile.debian.j2",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "f98266549b4f1b7f99ae7054a70f01178b67f2490e7a32b5545fa0a162a5ee81",
+ "format": 1
+ },
+ {
+ "name": "molecule/zabbix_proxy/Dockerfile.redhat.j2",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "a1b4ae72a87e1e9e345c160fd67ef58426b17818be0c241e71f70456c57fd608",
+ "format": 1
+ },
+ {
"name": "molecule/zabbix_proxy/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
@@ -592,14 +606,14 @@
"name": "molecule/zabbix_proxy/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4800b75f49e7496f80ace71d84f8f07dc7b9335c8f158bc4155c44a8d8492cc1",
+ "chksum_sha256": "842bcdb7a60fc380948cb88038df9bf01b519339f43307ed294504acaad49f24",
"format": 1
},
{
"name": "molecule/zabbix_proxy/prepare.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4495bcd19fac7b44c8d02c80133e37f554c1f66244a10c4a88e85c11ce97b814",
+ "chksum_sha256": "8fd3ba0ba2765ed0f99a4ebc6e67382df921b9b39e7f2eb2b8ff644a3c6cd3d0",
"format": 1
},
{
@@ -641,7 +655,7 @@
"name": "molecule/zabbix_server/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7c0b54e3f4f40288f5a772263d1f56a44aebb39e70aad6ec542546b40d7653a9",
+ "chksum_sha256": "f3d361e2f88015820269ebb93c8ff98e0a399a67adf30d3ab38ff375a148fae1",
"format": 1
},
{
@@ -697,7 +711,7 @@
"name": "molecule/zabbix_web/prepare.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7628246084a0692187d2408ee30afc478999d24c1e08d6fc0609f59ed828b23e",
+ "chksum_sha256": "63917b4d79946207e2ad7207270b94a11e25b058c7eafb040934f1b3560affc8",
"format": 1
},
{
@@ -711,7 +725,7 @@
"name": "molecule/requirements.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "26a9987c8545a84ddf26c2774fc1bf16c0dc68fa790dde49b46ce17a1720700e",
+ "chksum_sha256": "ad376f71f1e8d1bcff190d8588e99e6840df2e7296451c4eda7ce7b6663c13a2",
"format": 1
},
{
@@ -862,6 +876,13 @@
"format": 1
},
{
+ "name": "plugins/modules/zabbix_discoveryrule.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "bc1a58588a502206fdfc5e4aa1858e276c4e3e6605a5493a314ce81f4b217be1",
+ "format": 1
+ },
+ {
"name": "plugins/modules/zabbix_globalmacro.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -904,6 +925,13 @@
"format": 1
},
{
+ "name": "plugins/modules/zabbix_host_events_update.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "c83f644554957ef349547a69fa1c7e31b3a03ee5fc5813e94ca1ac16de580a4b",
+ "format": 1
+ },
+ {
"name": "plugins/modules/zabbix_host_info.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -925,6 +953,20 @@
"format": 1
},
{
+ "name": "plugins/modules/zabbix_item.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "db8b030def44167e1055ec383657a9f36b3fadc94eaf1868b506affa4e019f51",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/zabbix_itemprototype.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "875437704fa98e70323feaf57e93a67b043f10e99d93eb1adfd4444b53260ed8",
+ "format": 1
+ },
+ {
"name": "plugins/modules/zabbix_maintenance.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -970,7 +1012,7 @@
"name": "plugins/modules/zabbix_script.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3fd732f7f929a0dbe91dbaa4ce1d2c7fcd86b0c5199ee9ca5d9db8e3f471b613",
+ "chksum_sha256": "f3f5e363e8f868bc48cbb55a1783bceb6d023994090c7e03aeff87032cca3a76",
"format": 1
},
{
@@ -1023,6 +1065,20 @@
"format": 1
},
{
+ "name": "plugins/modules/zabbix_trigger.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d754c83db1214459d67ebefcab4143303dc057e87a9bf98ca3e9022ddee68eba",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/zabbix_triggerprototype.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "01a3cf27999c442150706f08c9b6b220a726672605e454cdbfce3bba620f7baf",
+ "format": 1
+ },
+ {
"name": "plugins/modules/zabbix_user.py",
"ftype": "file",
"chksum_type": "sha256",
@@ -1082,7 +1138,7 @@
"name": "roles/zabbix_agent/README.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d256f34122a50d20bf5d074df98558de74774e591bb2bc309e858ab0f2c7a7b8",
+ "chksum_sha256": "78d04c48530cd39414bee760f67411ba56a003f0b367155248a0bf883a54d923",
"format": 1
},
{
@@ -1096,7 +1152,7 @@
"name": "roles/zabbix_agent/defaults/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7465f741a1390bc62b1f30f3daf28f271ad94942c2f727fabfe576277fd4c1f3",
+ "chksum_sha256": "0e723f2914d13b84e8ca7411a5d7679a5a051c213a157862ba81456ba7103ae7",
"format": 1
},
{
@@ -1243,7 +1299,7 @@
"name": "roles/zabbix_agent/tasks/Debian.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "97c991dcccf9ab92bad90d36caf6e2d3bdd2b9eb9461aded42696b3e76c7b1ad",
+ "chksum_sha256": "90e5f8ceaae327a890c2c4a77b6d3a194442d27327344cf7f07d7487826a8913",
"format": 1
},
{
@@ -1257,7 +1313,7 @@
"name": "roles/zabbix_agent/tasks/Linux.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4cbe916769cde150073a71e1ac8b5e9a3a531ba6a17782944e4e758433877ff8",
+ "chksum_sha256": "f547026740b272e5dc1b65993e088c876c6bb31d90c7a872af82f62f74ab6de2",
"format": 1
},
{
@@ -1271,21 +1327,21 @@
"name": "roles/zabbix_agent/tasks/Windows.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "709a0466886c5bf5ed5f560b987cf68d268fff906a6dd26b667c25654d37fe81",
+ "chksum_sha256": "d224fef134ce06d2723ea4f565e72f34dab2c890d2152374080ec67c23223bbd",
"format": 1
},
{
"name": "roles/zabbix_agent/tasks/Windows_conf.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ba0da2675127cf58cfa66ba4e977f15f2f856654fb7583826865276fa10c6262",
+ "chksum_sha256": "626952830d71ad912b6007a647ce3c1f58129f26dee4cccb0f8d96b8338aedd6",
"format": 1
},
{
"name": "roles/zabbix_agent/tasks/api.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8201b0dc1b00dd86383f272dd81f06e792f8bab1951aee62c03a46791ca5b4bb",
+ "chksum_sha256": "bf5750468e57278a4174ff2accecd2aac7431678cdc32b9f8822ab9f592e6eb5",
"format": 1
},
{
@@ -1299,77 +1355,35 @@
"name": "roles/zabbix_agent/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7ceb0be05f5fb90c9d007ae64a43ca66fa855c484410df95c46e41853a44f5bd",
- "format": 1
- },
- {
- "name": "roles/zabbix_agent/tasks/remove.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "a61614e0f51ef087ef82eb2319b8546f213c22aff785afdf56f007c442b6cb2c",
- "format": 1
- },
- {
- "name": "roles/zabbix_agent/tasks/selinux.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "5b48575676d826c5e6321daddab3713fbbd76ba575400f29eeafc277017b9fe2",
- "format": 1
- },
- {
- "name": "roles/zabbix_agent/tasks/tlspsk_auto.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "bcf8266fdd6b7d01d5a6bb97a3f4c1982ca7dc099dedcaa407390c69bbeaa7e6",
- "format": 1
- },
- {
- "name": "roles/zabbix_agent/tasks/tlspsk_auto_agent2.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "1d0eafd6fefb287832c5b42c80c37c2b2e049b937aa50f44d7040ce56d81db86",
- "format": 1
- },
- {
- "name": "roles/zabbix_agent/tasks/tlspsk_auto_agent2_common.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "3f194c4720b68b5eb4003effe148b6c379a650f91d5325147d3e43485d37ba7b",
- "format": 1
- },
- {
- "name": "roles/zabbix_agent/tasks/tlspsk_auto_agent2_linux.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "3d651d820f7be9fd52d0b752f6ad07fc3e41a7ba52f178ae1221e1e2419980d9",
+ "chksum_sha256": "6b5494d41937dcbb34b1265a741f93d5a1f3731d969853b404394d6800256066",
"format": 1
},
{
- "name": "roles/zabbix_agent/tasks/tlspsk_auto_agent2_windows.yml",
+ "name": "roles/zabbix_agent/tasks/psk_identity.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "416c701d66781d97032e1104e5c25abe4157740f517fdcc5ffea5f5bb14fc9cf",
+ "chksum_sha256": "907f08566fb0f5f7f9c3b0e870372670668a736ad901992d9a8636b47bc419cf",
"format": 1
},
{
- "name": "roles/zabbix_agent/tasks/tlspsk_auto_common.yml",
+ "name": "roles/zabbix_agent/tasks/psk_secret.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4be3dadd8bfa5cc2aae3de2b975040f6352fee46127b7ccba2b746f11cfdd39a",
+ "chksum_sha256": "eb90d955ee19eec7588a60aa07734565c56aa91d1ddc615b8624ca50e078b73d",
"format": 1
},
{
- "name": "roles/zabbix_agent/tasks/tlspsk_auto_linux.yml",
+ "name": "roles/zabbix_agent/tasks/remove.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1b494062174c84263883f71ed50842255bc270246afbe12f8573bc2e81ea10a9",
+ "chksum_sha256": "a61614e0f51ef087ef82eb2319b8546f213c22aff785afdf56f007c442b6cb2c",
"format": 1
},
{
- "name": "roles/zabbix_agent/tasks/tlspsk_auto_windows.yml",
+ "name": "roles/zabbix_agent/tasks/selinux.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bf3c3976e28fb3846de16e0a0e01eae0184d2cf57b79da0a342a5d5b35b187d8",
+ "chksum_sha256": "4e5323cf7121df09fffb2b53749f46bbd6fae95eb715febddc01a086330f55ee",
"format": 1
},
{
@@ -1408,17 +1422,10 @@
"format": 1
},
{
- "name": "roles/zabbix_agent/templates/zabbix_agent2.conf.j2",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "123866b68a1511d56e14a523035583e6a3adcf8d234abd086f4b7ba0e322df4b",
- "format": 1
- },
- {
- "name": "roles/zabbix_agent/templates/zabbix_agentd.conf.j2",
+ "name": "roles/zabbix_agent/templates/agent.conf.j2",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d3c56ebbe34908eaa981e1838ee17ab90729814dce584c1b3e66ab2455ac6f0f",
+ "chksum_sha256": "25f9094a4ad3df674d6c2a69b70b2533931e5dc1a3fa5b96e9165583e3f0546f",
"format": 1
},
{
@@ -1432,14 +1439,14 @@
"name": "roles/zabbix_agent/vars/Debian.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b3fdab5e28c143947f965aa38b670a83726994f8e6fb37b7fac24350c9471a28",
+ "chksum_sha256": "ac1d08f8f30dcacd8fb63cd281bb86864df6fc75fa64bb259c172351361ebf25",
"format": 1
},
{
"name": "roles/zabbix_agent/vars/RedHat.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "587c2d75417faa8978fdcc729a4b940b4530c5a02df59a049985ab684742e90c",
+ "chksum_sha256": "39a539d0a9c528df8dae3d18f2c39c567f335c40a0da40d3f0798734ad0b78fa",
"format": 1
},
{
@@ -1450,6 +1457,20 @@
"format": 1
},
{
+ "name": "roles/zabbix_agent/vars/agent2_vars.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "3fddc386eaaf3a02f856496a82ba80cfd55b5141be86e8065c1dfffd53d2c40b",
+ "format": 1
+ },
+ {
+ "name": "roles/zabbix_agent/vars/agent_vars.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "c685651c7d6a30c6f777b6e03b127415be7fa3c117ac92a8deadf143b30c0eda",
+ "format": 1
+ },
+ {
"name": "roles/zabbix_agent/vars/main.yml",
"ftype": "file",
"chksum_type": "sha256",
@@ -1467,7 +1488,7 @@
"name": "roles/zabbix_javagateway/README.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "168aae7236576e4baf77c7b056833d5c90527c99365d7c9487f05f7af4d7281a",
+ "chksum_sha256": "73778abc24b8eb27b6243c4d04bd38dc8030f8092bfde3297c15221ba565eec6",
"format": 1
},
{
@@ -1537,7 +1558,7 @@
"name": "roles/zabbix_javagateway/tasks/Debian.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "44f9adfe675690b1901bf67f5c60da034bbbc4448476f3a2d69993a180f9dedc",
+ "chksum_sha256": "374d257d5e917832cb625815075a3cb2f2c9fed5f84a78d66b357ce5b49590ed",
"format": 1
},
{
@@ -1579,7 +1600,7 @@
"name": "roles/zabbix_javagateway/vars/Debian.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be7796f9ccb7e43552833863142669694201bba88e396406f29f8881e28f381a",
+ "chksum_sha256": "d10507c4456ec9dc27e7b7f40e30021b77c6fdeeda4a5d0d05c8e40c8e7130d7",
"format": 1
},
{
@@ -1607,7 +1628,7 @@
"name": "roles/zabbix_proxy/README.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5fb7298372379197055d9221660fa37ec078d7b2c5aaa4545ed09a943daa7baf",
+ "chksum_sha256": "830cf0d2a76423c1146ab807bee53a80bb9f5839ee4954b1cc804ae20207321c",
"format": 1
},
{
@@ -1621,7 +1642,7 @@
"name": "roles/zabbix_proxy/defaults/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "33dcb508919b88bf2ef0b1c6bc5b121a67392553e54a58b55bf572bf00ea5d53",
+ "chksum_sha256": "8d5ca9ac229870d22e2dca4de415d75bfacab2b28936226e2de5287f15231064",
"format": 1
},
{
@@ -1677,49 +1698,49 @@
"name": "roles/zabbix_proxy/tasks/Debian.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "92c82d7303b5ec4b9e688bdb8017e5f213993f9c66007ad95846b2db30cfb414",
+ "chksum_sha256": "57390fe9245af2542ec93677de19af09d5a87bd49118e814a032d3890a7e0d31",
"format": 1
},
{
"name": "roles/zabbix_proxy/tasks/RedHat.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4b521fd4998782a7fd2e5e8c4bea84d5436dd9e63b0732a0590034181e5c6329",
+ "chksum_sha256": "933044c1ae7c99784e5136f6bedb840df2db4a7c6b7ab294438dda1041e42251",
"format": 1
},
{
- "name": "roles/zabbix_proxy/tasks/main.yml",
+ "name": "roles/zabbix_proxy/tasks/initialize-mysql.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3751caa6aaa36f2f878f60bd81a53ff4fb6dc1e45c0e9ec6c8c195fc3da765a1",
+ "chksum_sha256": "832de47bd3469c7c00fd3e5ec1550cf7a288b7e00b021b14e2e673231380bc2e",
"format": 1
},
{
- "name": "roles/zabbix_proxy/tasks/mysql.yml",
+ "name": "roles/zabbix_proxy/tasks/initialize-pgsql.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ea5c5f6380e122e9b658d4acd5e8a382fa6c564673ec37c947a92f95f0b24ec5",
+ "chksum_sha256": "066bd0942f8b3e81ed6ef627f6ea8d15e4a487cdfcf94052c0cae32cefb04b92",
"format": 1
},
{
- "name": "roles/zabbix_proxy/tasks/postgresql.yml",
+ "name": "roles/zabbix_proxy/tasks/initialize-sqlite3.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1b97ce83bb12d6f435c0363e4f39888c87d0be53db6561168771add354fdd41f",
+ "chksum_sha256": "039ea53c97e8493e8911b8cbec378fb88c960be4eb0c116b30ad75a45e9bc2ee",
"format": 1
},
{
- "name": "roles/zabbix_proxy/tasks/selinux.yml",
+ "name": "roles/zabbix_proxy/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5fcaf2583198e89e108cc2b2a7b777fa6e2dac1a2a1efd88a6c44308d79a6362",
+ "chksum_sha256": "4302458bdf7683522790560b0d9f74c6f7ad3874ec3941e13516f556f26a63d5",
"format": 1
},
{
- "name": "roles/zabbix_proxy/tasks/sqlite3.yml",
+ "name": "roles/zabbix_proxy/tasks/selinux.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4c3b276cddc6afa88a48224fa274ca8ea7b7b59d74655197bddec0545a38309d",
+ "chksum_sha256": "5fcaf2583198e89e108cc2b2a7b777fa6e2dac1a2a1efd88a6c44308d79a6362",
"format": 1
},
{
@@ -1733,7 +1754,7 @@
"name": "roles/zabbix_proxy/templates/zabbix_proxy.conf.j2",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8133d6ef95a09b0e435324b3005c524745eb0e194cfa591cb3d6f4c36a312590",
+ "chksum_sha256": "491e64286da2b61338dffd1e5967dbdca598acdd16fb190b45f7d35046255791",
"format": 1
},
{
@@ -1747,21 +1768,14 @@
"name": "roles/zabbix_proxy/vars/Debian.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0c3453870dcd59ed9f69f3fdac8a1b46bf683a87ab86e87cc69b28d3951cce43",
+ "chksum_sha256": "e1893711fa151d4072c1195d178fcaad62099e4d972220bece3ef76efc69f1c1",
"format": 1
},
{
"name": "roles/zabbix_proxy/vars/RedHat.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "50f4bb5b1ab62a8f1128dde98e820349b164ef4c1583e6f78d61279d19e56c48",
- "format": 1
- },
- {
- "name": "roles/zabbix_proxy/vars/main.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "599afb847ce1287ea279d2f5864a6fc6eb240323ef2b92a408e11c8472b08189",
+ "chksum_sha256": "9ba0777a21b8a56b138adba0270f51603d5f95c3c10e82d4b6351cc08bb9a8cd",
"format": 1
},
{
@@ -1782,7 +1796,7 @@
"name": "roles/zabbix_server/README.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e7b83a2f01c4ef1388cba62476dbbe0596c483b345f4a79aabc2cd107cf16b61",
+ "chksum_sha256": "dd08d146c7fbbdef236be08abb9c035456e7d5f635a5748200fb7cbdb965f734",
"format": 1
},
{
@@ -1796,7 +1810,7 @@
"name": "roles/zabbix_server/defaults/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e77fe177f086c46a5fbce843360203fcd6b0bd3cb70cbce6582b73e50fb4f1d8",
+ "chksum_sha256": "da72f5ec897d8b46a5a73686d679955d63f7e50ca8877b8905beb1bd915adfb1",
"format": 1
},
{
@@ -1859,21 +1873,21 @@
"name": "roles/zabbix_server/tasks/RedHat.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c843b63e5dc9c44dfa748b6f6b5524f796d1b7a7bbf6151f89e7726770c399ce",
+ "chksum_sha256": "04cf0ce5034e975fcfe02cefd9ba91e58bd1b89c224a26c17da0a569f9c12f7e",
"format": 1
},
{
"name": "roles/zabbix_server/tasks/initialize-mysql.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f391750aff2b4e7c74c292036829fe7a44bb3fbfaed9bdb12393542b59132502",
+ "chksum_sha256": "5be33df40ae618c8bc7654155c7b2c697569c7cc16e5edecfd7805d3943fad24",
"format": 1
},
{
"name": "roles/zabbix_server/tasks/initialize-pgsql.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "df0005a12cc1d89f1a361f1d5f2764600a24527a567cce4b38f69490651f5541",
+ "chksum_sha256": "dcfebdf9a3d57022bfc9287c44933fdb1675ebc6cd51c874941d6d1385da0a7f",
"format": 1
},
{
@@ -1894,7 +1908,7 @@
"name": "roles/zabbix_server/tasks/selinux.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f82baa01937f5e3e29b02be6fefd51f7c6cafcefebd44e9a3cf2a8c22d9c5e8c",
+ "chksum_sha256": "927e5e8dbd9766b738e96aa48c8282283e34d0a6a8b3887e73d6128af5c6b184",
"format": 1
},
{
@@ -1922,7 +1936,7 @@
"name": "roles/zabbix_server/vars/Debian.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "428c1f85055c5afdccf997e4c6ab8fd4b9999760f93517dcbea2f41c88f82cb5",
+ "chksum_sha256": "568d7280dc9153f941220ba07fc94d66f92669a6b64ca1fffc38c847e2748c77",
"format": 1
},
{
@@ -1957,7 +1971,7 @@
"name": "roles/zabbix_web/README.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c52def2249d4416ac228eb03d6b57322578e2fb5c118967fb1e11634ff66f64f",
+ "chksum_sha256": "836bbcc7e6ca298e93435670eefa94582bb44bfc7abf6a42e76ee814d41b88cb",
"format": 1
},
{
@@ -2076,7 +2090,7 @@
"name": "roles/zabbix_web/templates/php-fpm.conf.j2",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d81d68fe5d62c8199e5d360be16bf1b1f27d72c4a3926d1e45abe720bb458cb8",
+ "chksum_sha256": "ac3253913106eb5e48e15335f95f3a7cbd8fe40eec84ca35fc023f296fd9341f",
"format": 1
},
{
@@ -2097,7 +2111,7 @@
"name": "roles/zabbix_web/vars/Debian.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "42d65bb39698381f067773966ab6605f96b6b7ced0e5d63e217ac0877d7dc7ad",
+ "chksum_sha256": "8754339ac10bbedd824445da405c7725251784224922cfede41aa132ea302314",
"format": 1
},
{
@@ -2409,6 +2423,62 @@
"format": 1
},
{
+ "name": "tests/integration/targets/test_zabbix_discoveryrule",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/test_zabbix_discoveryrule/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/test_zabbix_discoveryrule/meta/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "63ae23343a8854777c0e014be5fdd33db6a6b78f96d22a889ad78556dc0ffa81",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/test_zabbix_discoveryrule/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/test_zabbix_discoveryrule/tasks/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "4eec6e2f6d02642f42b6f8b8f1e82d1928d24d796a598d739ba43d6b61778835",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/test_zabbix_discoveryrule/tasks/zabbix_setup.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "b452b8a9c1c973f25dd408e2cc94966005496f9398ad3b4caf0daf63f603e034",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/test_zabbix_discoveryrule/tasks/zabbix_teardown.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "c0784486108c91c1c600e6fe9274fe41c74cfac179cc85a682a6f96a860fb33f",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/test_zabbix_discoveryrule/tasks/zabbix_tests.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "ddd02d4b606be9ca946c00f373c9db8c205847555e1d73fd2cb9f65cc220e90a",
+ "format": 1
+ },
+ {
"name": "tests/integration/targets/test_zabbix_globalmacro",
"ftype": "dir",
"chksum_type": null,
@@ -2626,6 +2696,62 @@
"format": 1
},
{
+ "name": "tests/integration/targets/test_zabbix_host_events_update",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/test_zabbix_host_events_update/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/test_zabbix_host_events_update/meta/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "63ae23343a8854777c0e014be5fdd33db6a6b78f96d22a889ad78556dc0ffa81",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/test_zabbix_host_events_update/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/test_zabbix_host_events_update/tasks/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "4a202829b4dcf0556eda80dcfdf315aa40540997e4286cfd441eac6ef95fa131",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/test_zabbix_host_events_update/tasks/zabbix_setup.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "3b0a0d8ba4b38321e60d42f0d93331473371e2dbd1e0e53a3f7a1ddce6cfa317",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/test_zabbix_host_events_update/tasks/zabbix_teardown.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "f37b00a4d8ab112a8b6d43378eb9fc0c2a334ef12811ed4c69e20fe466339594",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/test_zabbix_host_events_update/tasks/zabbix_tests.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "34d77f1f1cbb6e639aca44c1f91b55106091f8e2ac0c22f6fbb90ecaeb1b28f5",
+ "format": 1
+ },
+ {
"name": "tests/integration/targets/test_zabbix_host_info",
"ftype": "dir",
"chksum_type": null,
@@ -2731,6 +2857,118 @@
"format": 1
},
{
+ "name": "tests/integration/targets/test_zabbix_item",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/test_zabbix_item/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/test_zabbix_item/meta/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "63ae23343a8854777c0e014be5fdd33db6a6b78f96d22a889ad78556dc0ffa81",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/test_zabbix_item/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/test_zabbix_item/tasks/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "88728d40518ddfcaba88ae038b478cbb6758c601b93c7e384c4fc69ab3561915",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/test_zabbix_item/tasks/zabbix_setup.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "b452b8a9c1c973f25dd408e2cc94966005496f9398ad3b4caf0daf63f603e034",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/test_zabbix_item/tasks/zabbix_teardown.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "c0784486108c91c1c600e6fe9274fe41c74cfac179cc85a682a6f96a860fb33f",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/test_zabbix_item/tasks/zabbix_tests.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "bf05aaec5b1e5775d76151e653629c2fac4e5f28b73fb68e6a77faa2e20892ae",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/test_zabbix_itemprototype",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/test_zabbix_itemprototype/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/test_zabbix_itemprototype/meta/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "63ae23343a8854777c0e014be5fdd33db6a6b78f96d22a889ad78556dc0ffa81",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/test_zabbix_itemprototype/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/test_zabbix_itemprototype/tasks/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "eb08b68052b69e34134588369ffe3aed732f71416d7782a2a1148ac95a83e94d",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/test_zabbix_itemprototype/tasks/zabbix_setup.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "82f652382e99595d8c98490c12357554a80fb3f44e0d9266bbd7048f81b6683e",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/test_zabbix_itemprototype/tasks/zabbix_teardown.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "c0784486108c91c1c600e6fe9274fe41c74cfac179cc85a682a6f96a860fb33f",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/test_zabbix_itemprototype/tasks/zabbix_tests.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "0b0361125b18999c1b70bc8794c06759651b7c84dd9dba77c9ce2e3b88d8c55b",
+ "format": 1
+ },
+ {
"name": "tests/integration/targets/test_zabbix_maintenance",
"ftype": "dir",
"chksum_type": null,
@@ -3333,6 +3571,118 @@
"format": 1
},
{
+ "name": "tests/integration/targets/test_zabbix_trigger",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/test_zabbix_trigger/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/test_zabbix_trigger/meta/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "63ae23343a8854777c0e014be5fdd33db6a6b78f96d22a889ad78556dc0ffa81",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/test_zabbix_trigger/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/test_zabbix_trigger/tasks/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "51268021168f4a1d99aedf7f8721ad5181edc8307b770daf5781b13193ce0849",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/test_zabbix_trigger/tasks/zabbix_setup.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "ad14bba00c032393d931c90dc09d984ddae41f37b444ce60a5a6262148f7f029",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/test_zabbix_trigger/tasks/zabbix_teardown.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "a8d4e25b566a1d73a8a5d60d4402836c129001a84c3b91091b8fd7ac7ec3b5f3",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/test_zabbix_trigger/tasks/zabbix_tests.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "970d7065d3f99a4611ea746417e9a09c921821ab538e5911c4834f142d170dea",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/test_zabbix_triggerprototype",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/test_zabbix_triggerprototype/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/test_zabbix_triggerprototype/meta/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "63ae23343a8854777c0e014be5fdd33db6a6b78f96d22a889ad78556dc0ffa81",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/test_zabbix_triggerprototype/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/test_zabbix_triggerprototype/tasks/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d039addf6c1a402806568d87a796cc7c5cd65c84fe688e5a68b1fbb55a69f582",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/test_zabbix_triggerprototype/tasks/zabbix_setup.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d85a1ca01851714d4c4c4f5149ee6c414b553f062cb00ae1071f108e0cba3533",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/test_zabbix_triggerprototype/tasks/zabbix_teardown.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e0a57d0d626d8a91bb298104a2dad6504054134224057ba4371cb3c362d524da",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/test_zabbix_triggerprototype/tasks/zabbix_tests.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "539f9a3ddec14e95a711e2622142d000ffd677cc77bf8f237d8ac196262e6986",
+ "format": 1
+ },
+ {
"name": "tests/integration/targets/test_zabbix_user",
"ftype": "dir",
"chksum_type": null,
@@ -3539,7 +3889,7 @@
"name": "CHANGELOG.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "841378afa489d7ab18a26f89a6c8a91949c1fb0c05c54b00ab5518157d83f3fa",
+ "chksum_sha256": "4cb82a3565187516b368d9f5dfceeb756f5d9aff8eb9cd867bfea2bf96595cf7",
"format": 1
},
{
@@ -3581,7 +3931,7 @@
"name": "README.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e78a31db573596c05c1b54e4da2ab59b53b57c8e19b41694e16fc8a600d366e8",
+ "chksum_sha256": "21818a52c2f3c431ee8614f87a2be5dcec3e8350b613d20028f83a3fe3085d8d",
"format": 1
},
{
diff --git a/ansible_collections/community/zabbix/MANIFEST.json b/ansible_collections/community/zabbix/MANIFEST.json
index 803b2a538..e4395622f 100644
--- a/ansible_collections/community/zabbix/MANIFEST.json
+++ b/ansible_collections/community/zabbix/MANIFEST.json
@@ -2,7 +2,7 @@
"collection_info": {
"namespace": "community",
"name": "zabbix",
- "version": "2.4.0",
+ "version": "2.5.1",
"authors": [
"Dusan Matejka (@D3DeFi)",
"sky-joker (@sky-joker)",
@@ -35,7 +35,7 @@
"name": "FILES.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "48923f5d0d4edaa37a77a0dda92c0f41f762a24a9d7ab4bd04c816cf9f3bd73a",
+ "chksum_sha256": "2240b795e1121857fa2806c32e30cfc339d18ffad8914f097414ceaba4733eac",
"format": 1
},
"format": 1
diff --git a/ansible_collections/community/zabbix/README.md b/ansible_collections/community/zabbix/README.md
index 889645483..6663beb2d 100644
--- a/ansible_collections/community/zabbix/README.md
+++ b/ansible_collections/community/zabbix/README.md
@@ -104,7 +104,7 @@ You can also include it in a `requirements.yml` file along with other required c
---
collections:
- name: community.zabbix
- version: 2.4.0
+ version: 2.5.1
- name: ansible.posix
version: 1.3.0
- name: community.general
diff --git a/ansible_collections/community/zabbix/changelogs/.plugin-cache.yaml b/ansible_collections/community/zabbix/changelogs/.plugin-cache.yaml
index 78da79cd3..1fd62c57e 100644
--- a/ansible_collections/community/zabbix/changelogs/.plugin-cache.yaml
+++ b/ansible_collections/community/zabbix/changelogs/.plugin-cache.yaml
@@ -47,6 +47,11 @@ plugins:
name: zabbix_discovery_rule
namespace: ''
version_added: null
+ zabbix_discoveryrule:
+ description: Create/delete Zabbix discovery rules
+ name: zabbix_discoveryrule
+ namespace: ''
+ version_added: null
zabbix_globalmacro:
description: Create/update/delete Zabbix Global macros
name: zabbix_globalmacro
@@ -77,6 +82,11 @@ plugins:
name: zabbix_host_events_info
namespace: ''
version_added: null
+ zabbix_host_events_update:
+ description: update the status of event(s).
+ name: zabbix_host_events_update
+ namespace: ''
+ version_added: null
zabbix_host_info:
description: Gather information about Zabbix host
name: zabbix_host_info
@@ -92,6 +102,16 @@ plugins:
name: zabbix_housekeeping
namespace: ''
version_added: 1.6.0
+ zabbix_item:
+ description: Create/delete Zabbix items
+ name: zabbix_item
+ namespace: ''
+ version_added: null
+ zabbix_itemprototype:
+ description: Create/delete Zabbix item prototypes
+ name: zabbix_itemprototype
+ namespace: ''
+ version_added: null
zabbix_maintenance:
description: Create Zabbix maintenance windows
name: zabbix_maintenance
@@ -162,6 +182,16 @@ plugins:
name: zabbix_token
namespace: ''
version_added: 2.1.0
+ zabbix_trigger:
+ description: Create/delete Zabbix triggers
+ name: zabbix_trigger
+ namespace: ''
+ version_added: null
+ zabbix_triggerprototype:
+ description: Create/delete Zabbix triggerprototypes
+ name: zabbix_triggerprototype
+ namespace: ''
+ version_added: null
zabbix_user:
description: Create/update/delete Zabbix users
name: zabbix_user
@@ -196,4 +226,4 @@ plugins:
shell: {}
strategy: {}
vars: {}
-version: 2.4.0
+version: 2.5.1
diff --git a/ansible_collections/community/zabbix/changelogs/changelog.yaml b/ansible_collections/community/zabbix/changelogs/changelog.yaml
index 608160c0b..e83d608fc 100644
--- a/ansible_collections/community/zabbix/changelogs/changelog.yaml
+++ b/ansible_collections/community/zabbix/changelogs/changelog.yaml
@@ -3,1150 +3,1187 @@ releases:
0.1.0:
changes:
bugfixes:
- - zabbix_action - allow str values for ``esc_period`` options (see `#66841 <https://github.com/ansible/ansible/pull/66841>`_).
- - zabbix_action - no longer requires ``esc_period`` and ``event_source`` arguments
- when ``state=absent``.
- - zabbix_host - now supports configuring user macros and host tags on the managed
- host (see `#66777 <https://github.com/ansible/ansible/pull/66777>`_).
- - zabbix_host_info - ``host_name`` based search results now include host groups.
- - zabbix_hostmacro - ``macro_name`` now accepts macros in zabbix native format
- as well (e.g. ``{$MACRO}``).
- - zabbix_hostmacro - ``macro_value`` is no longer required when ``state=absent``.
- - zabbix_proxy (module) - ``interface`` sub-options ``type`` and ``main`` are
- now deprecated and will be removed in community.general 3.0.0. Also, the values
- passed to ``interface`` are now checked for correct types and unexpected keys.
- - zabbix_proxy (module) - added option proxy_address for comma-delimited list
- of IP/CIDR addresses or DNS names to accept active proxy requests from.
- - zabbix_template - add new option omit_date to remove date from exported/dumped
- template (see `#67302 <https://github.com/ansible/ansible/pull/67302>`_).
- - zabbix_template - adding new update rule templateLinkage.deleteMissing for
- newer zabbix versions (see `#66747 <https://github.com/ansible/ansible/pull/66747>`_).
- - zabbix_template_info - add new option omit_date to remove date from exported/dumped
- template (see `#67302 <https://github.com/ansible/ansible/pull/67302>`_).
+ - zabbix_action - allow str values for ``esc_period`` options (see `#66841 <https://github.com/ansible/ansible/pull/66841>`_).
+ - zabbix_action - no longer requires ``esc_period`` and ``event_source`` arguments
+ when ``state=absent``.
+ - zabbix_host - now supports configuring user macros and host tags on the managed
+ host (see `#66777 <https://github.com/ansible/ansible/pull/66777>`_).
+ - zabbix_host_info - ``host_name`` based search results now include host groups.
+ - zabbix_hostmacro - ``macro_name`` now accepts macros in zabbix native format
+ as well (e.g. ``{$MACRO}``).
+ - zabbix_hostmacro - ``macro_value`` is no longer required when ``state=absent``.
+ - zabbix_proxy (module) - ``interface`` sub-options ``type`` and ``main`` are
+ now deprecated and will be removed in community.general 3.0.0. Also, the values
+ passed to ``interface`` are now checked for correct types and unexpected keys.
+ - zabbix_proxy (module) - added option proxy_address for comma-delimited list
+ of IP/CIDR addresses or DNS names to accept active proxy requests from.
+ - zabbix_template - add new option omit_date to remove date from exported/dumped
+ template (see `#67302 <https://github.com/ansible/ansible/pull/67302>`_).
+ - zabbix_template - adding new update rule templateLinkage.deleteMissing for
+ newer zabbix versions (see `#66747 <https://github.com/ansible/ansible/pull/66747>`_).
+ - zabbix_template_info - add new option omit_date to remove date from exported/dumped
+ template (see `#67302 <https://github.com/ansible/ansible/pull/67302>`_).
deprecated_features:
- - zabbix_proxy (module) - deprecates ``interface`` sub-options ``type`` and
- ``main`` when proxy type is set to passive via ``status=passive``. Make sure
- these suboptions are removed from your playbook as they were never supported
- by Zabbix in the first place.
+ - zabbix_proxy (module) - deprecates ``interface`` sub-options ``type`` and
+ ``main`` when proxy type is set to passive via ``status=passive``. Make sure
+ these suboptions are removed from your playbook as they were never supported
+ by Zabbix in the first place.
minor_changes:
- - zabbix inventory plugin now no longer prints DeprecationWarning when used
- with Python3 due to SafeConfigParser.
- - zabbix_action - arguments ``event_source`` and ``esc_period`` no longer required
- when ``state=absent``.
- - zabbix_host - fixed inventory_mode key error, which occurs with Zabbix 4.4.1
- or more (see `#65304 <https://github.com/ansible/ansible/issues/65304>`_).
- - zabbix_host - was not possible to update a host where visible_name was not
- set in zabbix.
- - zabbix_mediatype - Fixed to support zabbix 4.4 or more and python3 (see `#67693
- <https://github.com/ansible/ansible/pull/67693>`_).
- - zabbix_template - fixed error when providing empty ``link_templates`` to the
- module (see `#66417 <https://github.com/ansible/ansible/issues/66417>`_).
- - zabbix_template - fixed invalid (non-importable) output provided by exporting
- XML (see `#66466 <https://github.com/ansible/ansible/issues/66466>`_).
- - zabbix_user - Fixed an issue where module failed with zabbix 4.4 or above
- (see `#67475 <https://github.com/ansible/ansible/pull/67475>`_).
- release_summary: '| Release date: 2020-06-15
+ - zabbix inventory plugin now no longer prints DeprecationWarning when used
+ with Python3 due to SafeConfigParser.
+ - zabbix_action - arguments ``event_source`` and ``esc_period`` no longer required
+ when ``state=absent``.
+ - zabbix_host - fixed inventory_mode key error, which occurs with Zabbix 4.4.1
+ or more (see `#65304 <https://github.com/ansible/ansible/issues/65304>`_).
+ - zabbix_host - was not possible to update a host where visible_name was not
+ set in zabbix.
+ - zabbix_mediatype - Fixed to support zabbix 4.4 or more and python3 (see `#67693
+ <https://github.com/ansible/ansible/pull/67693>`_).
+ - zabbix_template - fixed error when providing empty ``link_templates`` to the
+ module (see `#66417 <https://github.com/ansible/ansible/issues/66417>`_).
+ - zabbix_template - fixed invalid (non-importable) output provided by exporting
+ XML (see `#66466 <https://github.com/ansible/ansible/issues/66466>`_).
+ - zabbix_user - Fixed an issue where module failed with zabbix 4.4 or above
+ (see `#67475 <https://github.com/ansible/ansible/pull/67475>`_).
+ release_summary: "| Release date: 2020-06-15
- '
+ "
fragments:
- - 010release.yml
- release_date: '2020-06-15'
+ - 010release.yml
+ release_date: "2020-06-15"
0.2.0:
changes:
bugfixes:
- - zabbix_action - documented ``value2`` parameter and ``notify_all_involved``
- option.
- - zabbix_maintenance - changing value of ``description`` parameter now actually
- updates maintenance's description.
- - zabbix_template - is now able to perform ``state=dump`` when using ``ansible-playbook
- --check``.
- - zabbix_template - no longer imports template from ``template_json`` or ``template_xml``
- when using ``ansible-playbook --check``.
+ - zabbix_action - documented ``value2`` parameter and ``notify_all_involved``
+ option.
+ - zabbix_maintenance - changing value of ``description`` parameter now actually
+ updates maintenance's description.
+ - zabbix_template - is now able to perform ``state=dump`` when using ``ansible-playbook
+ --check``.
+ - zabbix_template - no longer imports template from ``template_json`` or ``template_xml``
+ when using ``ansible-playbook --check``.
minor_changes:
- - Documentation for roles moved to ``docs/`` sub-directory in the collection.
- - New **role zabbix_agent** - previously known as dj-wasabi/zabbix-agent (also
- see `UPGRADE.md <https://github.com/ansible-collections/community.zabbix/blob/main/docs/UPGRADE.md>`_
- for each role).
- - New **role zabbix_javagateway** - previously known as dj-wasabi/zabbix-javagateway.
- - New **role zabbix_proxy** - previously known as dj-wasabi/zabbix-proxy.
- - New **role zabbix_server** - previously known as dj-wasabi/zabbix-server.
- - New **role zabbix_web** - previously known as dj-wasabi/zabbix-web.
- - zabbix_action - new alias ``update_operations`` for ``acknowledge_operations``
- parameter.
- - zabbix_host - ``macros`` now support new macro types ``text`` and ``secret``.
- - zabbix_host - new option ``details`` (additional SNMP details) for ``interfaces``
- parameter.
- - zabbix_host - now supports Zabbix 5.0.
- - zabbix_proxy (module) - now supports Zabbix 5.0.
- - zabbix_screen - ``host_group`` parameter now accepts multiple groups.
- release_summary: '| Release date: 2020-06-15 '
+ - Documentation for roles moved to ``docs/`` sub-directory in the collection.
+ - New **role zabbix_agent** - previously known as dj-wasabi/zabbix-agent (also
+ see `UPGRADE.md <https://github.com/ansible-collections/community.zabbix/blob/main/docs/UPGRADE.md>`_
+ for each role).
+ - New **role zabbix_javagateway** - previously known as dj-wasabi/zabbix-javagateway.
+ - New **role zabbix_proxy** - previously known as dj-wasabi/zabbix-proxy.
+ - New **role zabbix_server** - previously known as dj-wasabi/zabbix-server.
+ - New **role zabbix_web** - previously known as dj-wasabi/zabbix-web.
+ - zabbix_action - new alias ``update_operations`` for ``acknowledge_operations``
+ parameter.
+ - zabbix_host - ``macros`` now support new macro types ``text`` and ``secret``.
+ - zabbix_host - new option ``details`` (additional SNMP details) for ``interfaces``
+ parameter.
+ - zabbix_host - now supports Zabbix 5.0.
+ - zabbix_proxy (module) - now supports Zabbix 5.0.
+ - zabbix_screen - ``host_group`` parameter now accepts multiple groups.
+ release_summary: "| Release date: 2020-06-15 "
fragments:
- - 020release.yml
- release_date: '2020-06-15'
+ - 020release.yml
+ release_date: "2020-06-15"
0.3.0:
changes:
bugfixes:
- - zabbix_action - choices for the ``inventory`` paramter sub option in ``*operations``
- arguments have been clarified to ``manual`` and ``automatic``.
- - zabbix_action - fixed error on changed API fields ``*default_message`` and
- ``*default_subject`` for Zabbix 5.0 (see `#92 <https://github.com/ansible-collections/community.zabbix/pull/92>`_).
- - zabbix_action - module will no longer fail when searching for global script
- provided to ``script_name`` parameter.
- - zabbix_action - now correctly selects mediatype for the (normal|recovery|update)
- operations with Zabbix 4.4 and newer.
- - zabbix_agent - fixed installation of agent on Windows to directories with
- spaces.
- - zabbix_agent - role should no longer fail when looking for ``getenforce``
- binary.
- - zabbix_host - module will no longer convert context part of user macro to
- upper case.
- - zabbix_proxy (role) - will now correctly install python3-libsemanage on RHEL
- OS family.
- - zabbix_service - fixed the zabbix_service has no idempotency with Zabbix 5.0.
- - zabbix_web - now no longer fails when rendering apache vhost template.
+ - zabbix_action - choices for the ``inventory`` paramter sub option in ``*operations``
+ arguments have been clarified to ``manual`` and ``automatic``.
+ - zabbix_action - fixed error on changed API fields ``*default_message`` and
+ ``*default_subject`` for Zabbix 5.0 (see `#92 <https://github.com/ansible-collections/community.zabbix/pull/92>`_).
+ - zabbix_action - module will no longer fail when searching for global script
+ provided to ``script_name`` parameter.
+ - zabbix_action - now correctly selects mediatype for the (normal|recovery|update)
+ operations with Zabbix 4.4 and newer.
+ - zabbix_agent - fixed installation of agent on Windows to directories with
+ spaces.
+ - zabbix_agent - role should no longer fail when looking for ``getenforce``
+ binary.
+ - zabbix_host - module will no longer convert context part of user macro to
+ upper case.
+ - zabbix_proxy (role) - will now correctly install python3-libsemanage on RHEL
+ OS family.
+ - zabbix_service - fixed the zabbix_service has no idempotency with Zabbix 5.0.
+ - zabbix_web - now no longer fails when rendering apache vhost template.
minor_changes:
- - All roles now **support Zabbix 5.0** and by default install this version (see
- `#131 <https://github.com/ansible-collections/community.zabbix/pull/131>`_
- and `#121 <https://github.com/ansible-collections/community.zabbix/pull/121>`_).
- - Roles will now install gnupg on Debian OS family if not present.
- - zabbix_action - no longer requires ``password`` and ``ssh_*key_file`` parameters
- at the same time for ``remote_command`` operations of type SSH.
- - zabbix_action - parameter ``ssh_auth_type`` for SSH ``remote_command`` operation
- now correctly identifies which other parameters are required.
- - zabbix_discovery_rule - refactoring module to use ``module_utils`` classes
- and functions, adjust return values on success, add documentation for return
- values.
- - zabbix_discovery_rule - refactoring the module to remove unnecessary variables
- and fix a variable typo.
- - zabbix_mediatype - new options ``message_templates``, ``description`` and
- many more related to ``type=webhook``.
- - zabbix_mediatype - now supports new ``webhook`` media type.
- release_summary: '| Release date: 2020-07-26
+ - All roles now **support Zabbix 5.0** and by default install this version (see
+ `#131 <https://github.com/ansible-collections/community.zabbix/pull/131>`_
+ and `#121 <https://github.com/ansible-collections/community.zabbix/pull/121>`_).
+ - Roles will now install gnupg on Debian OS family if not present.
+ - zabbix_action - no longer requires ``password`` and ``ssh_*key_file`` parameters
+ at the same time for ``remote_command`` operations of type SSH.
+ - zabbix_action - parameter ``ssh_auth_type`` for SSH ``remote_command`` operation
+ now correctly identifies which other parameters are required.
+ - zabbix_discovery_rule - refactoring module to use ``module_utils`` classes
+ and functions, adjust return values on success, add documentation for return
+ values.
+ - zabbix_discovery_rule - refactoring the module to remove unnecessary variables
+ and fix a variable typo.
+ - zabbix_mediatype - new options ``message_templates``, ``description`` and
+ many more related to ``type=webhook``.
+ - zabbix_mediatype - now supports new ``webhook`` media type.
+ release_summary: "| Release date: 2020-07-26
- '
+ "
fragments:
- - 030release.yml
+ - 030release.yml
modules:
- - description: Create/delete/update Zabbix discovery rules
- name: zabbix_discovery_rule
- namespace: ''
- - description: Create/delete/update Zabbix user groups
- name: zabbix_usergroup
- namespace: ''
- release_date: '2020-07-26'
+ - description: Create/delete/update Zabbix discovery rules
+ name: zabbix_discovery_rule
+ namespace: ""
+ - description: Create/delete/update Zabbix user groups
+ name: zabbix_usergroup
+ namespace: ""
+ release_date: "2020-07-26"
1.0.0:
changes:
breaking_changes:
- - zabbix_javagateway - options ``javagateway_pidfile``, ``javagateway_listenip``,
- ``javagateway_listenport`` and ``javagateway_startpollers`` renamed to ``zabbix_javagateway_xyz``
- (see `UPGRADE.md <https://github.com/ansible-collections/community.zabbix/blob/main/docs/UPGRADE.md>`_).
+ - zabbix_javagateway - options ``javagateway_pidfile``, ``javagateway_listenip``,
+ ``javagateway_listenport`` and ``javagateway_startpollers`` renamed to ``zabbix_javagateway_xyz``
+ (see `UPGRADE.md <https://github.com/ansible-collections/community.zabbix/blob/main/docs/UPGRADE.md>`_).
bugfixes:
- - all roles - a ``handler`` is configured when ``zabbix_http(s)_proxy`` is defined
- which will remove the proxy line from the repository files. This results that
- execution of the roles are not idempotent anymore.
- - zabbix_proxy (role) - ``StartPreprocessors`` only works with version 4.2 or
- higher. When a lower version is used, it will not be added to the configuration.
- - zabbix_proxy (role) - only install the sql files that needs to be executed
- for when ``zabbix_repo`` is set to ``epel``.
- - zabbix_server - ``StartPreprocessors`` only works with version 4.2 or higher.
- When a lower version is used, it will not be added to the configuration.
- - zabbix_server - only install the sql files that needs to be executed for when
- ``zabbix_repo`` is set to ``epel``.
+ - all roles - a ``handler`` is configured when ``zabbix_http(s)_proxy`` is defined
+ which will remove the proxy line from the repository files. This results that
+ execution of the roles are not idempotent anymore.
+ - zabbix_proxy (role) - ``StartPreprocessors`` only works with version 4.2 or
+ higher. When a lower version is used, it will not be added to the configuration.
+ - zabbix_proxy (role) - only install the sql files that needs to be executed
+ for when ``zabbix_repo`` is set to ``epel``.
+ - zabbix_server - ``StartPreprocessors`` only works with version 4.2 or higher.
+ When a lower version is used, it will not be added to the configuration.
+ - zabbix_server - only install the sql files that needs to be executed for when
+ ``zabbix_repo`` is set to ``epel``.
minor_changes:
- - Added the possibility to configure the ``mode`` for the ``zabbix_{agent,server,proxy}_include``
- directories.
- - all roles - added the possibility to configure the ``mode`` for the ``yum``
- repositories files in case it contains credentials.
- - zabbix_agent - ``zabbix-sender`` and ``zabbix-get`` will not be installed
- when ``zabbix_repo`` is set to ``epel``, as they are not part of the repository.
- - zabbix_agent - added option to change between HTTP/HTTPS with ``zabbix_repo_yum_schema``.
- - zabbix_agent - can also install the zabbix-agent2 application when ``zabbix_agent2``
- is set to ``true``.
- - zabbix_proxy (role) - a user and group are created on the host when ``zabbix_repo``
- is set to ``epel``.
- - zabbix_proxy (role) - now supports ``startpreprocessors`` setting and encryption
- when connecting to database (see `#164 <https://github.com/ansible-collections/community.zabbix/pull/164>`_).
- - zabbix_server - a user and group are created on the host when ``zabbix_repo``
- is set to ``epel``.
- - zabbix_server - added option to change between HTTP/HTTPS with ``zabbix_repo_yum_schema``.
- - zabbix_server - now supports ``startpreprocessors`` setting and encryption
- when connecting to database (see `#164 <https://github.com/ansible-collections/community.zabbix/pull/164>`_).
- - zabbix_web - a property is added ``zabbix_web_doubleprecision`` which currently
- is set to ``false`` for default installations. For new installations this
- should be set to ``True``. For upgraded installations, please read database
- `upgrade notes <https://www.zabbix.com/documentation/current/manual/installation/upgrade_notes_500>`_
- (Paragraph "Enabling extended range of numeric (float) values") before enabling
- this option.
- - zabbix_web - added option to change between HTTP/HTTPS with ``zabbix_repo_yum_schema``.
- - zabbix_web - don't remove the files that Zabbix will install during installation
- when you don't want to configure a virtual host configuration.
- release_summary: '| Release date: 2020-08-16
+ - Added the possibility to configure the ``mode`` for the ``zabbix_{agent,server,proxy}_include``
+ directories.
+ - all roles - added the possibility to configure the ``mode`` for the ``yum``
+ repositories files in case it contains credentials.
+ - zabbix_agent - ``zabbix-sender`` and ``zabbix-get`` will not be installed
+ when ``zabbix_repo`` is set to ``epel``, as they are not part of the repository.
+ - zabbix_agent - added option to change between HTTP/HTTPS with ``zabbix_repo_yum_schema``.
+ - zabbix_agent - can also install the zabbix-agent2 application when ``zabbix_agent2``
+ is set to ``true``.
+ - zabbix_proxy (role) - a user and group are created on the host when ``zabbix_repo``
+ is set to ``epel``.
+ - zabbix_proxy (role) - now supports ``startpreprocessors`` setting and encryption
+ when connecting to database (see `#164 <https://github.com/ansible-collections/community.zabbix/pull/164>`_).
+ - zabbix_server - a user and group are created on the host when ``zabbix_repo``
+ is set to ``epel``.
+ - zabbix_server - added option to change between HTTP/HTTPS with ``zabbix_repo_yum_schema``.
+ - zabbix_server - now supports ``startpreprocessors`` setting and encryption
+ when connecting to database (see `#164 <https://github.com/ansible-collections/community.zabbix/pull/164>`_).
+ - zabbix_web - a property is added ``zabbix_web_doubleprecision`` which currently
+ is set to ``false`` for default installations. For new installations this
+ should be set to ``True``. For upgraded installations, please read database
+ `upgrade notes <https://www.zabbix.com/documentation/current/manual/installation/upgrade_notes_500>`_
+ (Paragraph "Enabling extended range of numeric (float) values") before enabling
+ this option.
+ - zabbix_web - added option to change between HTTP/HTTPS with ``zabbix_repo_yum_schema``.
+ - zabbix_web - don't remove the files that Zabbix will install during installation
+ when you don't want to configure a virtual host configuration.
+ release_summary: "| Release date: 2020-08-16
- '
+ "
fragments:
- - 100-release-prep.yaml
- - role-agent-supports-zabbix-agent2.yaml
- - role-all-configure-mode-include.yaml
- - role-all-configure-mode-with-handlers.yaml
- - role-all-fix-epel-installations.yaml
- - role-web-add-doubleprecision.yaml
- - role-web-zabbix_vhost-dont-remove-files.yaml
- release_date: '2020-08-16'
+ - 100-release-prep.yaml
+ - role-agent-supports-zabbix-agent2.yaml
+ - role-all-configure-mode-include.yaml
+ - role-all-configure-mode-with-handlers.yaml
+ - role-all-fix-epel-installations.yaml
+ - role-web-add-doubleprecision.yaml
+ - role-web-zabbix_vhost-dont-remove-files.yaml
+ release_date: "2020-08-16"
1.1.0:
changes:
bugfixes:
- - all roles - missing ``become`` set to ``true`` was added to each task that
- requires admin privleges.
- - zabbix_agent - added new properties and updated documentation to allow for
- correct Zabbix Agent2 configuration.
- - zabbix_agent - fixed bug where Nginx prevented Apache from working as it was
- part of the FPM configuration.
+ - all roles - missing ``become`` set to ``true`` was added to each task that
+ requires admin privleges.
+ - zabbix_agent - added new properties and updated documentation to allow for
+ correct Zabbix Agent2 configuration.
+ - zabbix_agent - fixed bug where Nginx prevented Apache from working as it was
+ part of the FPM configuration.
minor_changes:
- - all roles - added ``zabbix_{agent,web,server,proxy,javagateway}_conf_mode``
- option for configuring a mode of the configuration file for each Zabbix service.
- - zabbix_proxy (role) - added an option ``innodb_default_row_format`` for MariaDB/MySQL
- if it isn't set to ``dynamic``.
- - zabbix_server - fixed installation output when using MySQL database to not
- print PostgreSQL.
- - zabbix_user - ``passwd`` no longer required when ALL groups in ``usrgrps``
- use LDAP as ``gui_access`` (see `#240 <https://github.com/ansible-collections/community.zabbix/issues/232>`_).
- - zabbix_user - no longer requires ``usrgrps`` when ``state=absent`` (see `#240
- <https://github.com/ansible-collections/community.zabbix/issues/232>`_).
- - zabbix_web - added several configuration options for the PHP-FPM setup to
- configure the listen (socket) file.
- - zabbix_web - added support for configuring Zabbix Web with Nginx, same way
- as with Apache.
- release_summary: '| Release date: 2020-10-22
+ - all roles - added ``zabbix_{agent,web,server,proxy,javagateway}_conf_mode``
+ option for configuring a mode of the configuration file for each Zabbix service.
+ - zabbix_proxy (role) - added an option ``innodb_default_row_format`` for MariaDB/MySQL
+ if it isn't set to ``dynamic``.
+ - zabbix_server - fixed installation output when using MySQL database to not
+ print PostgreSQL.
+ - zabbix_user - ``passwd`` no longer required when ALL groups in ``usrgrps``
+ use LDAP as ``gui_access`` (see `#240 <https://github.com/ansible-collections/community.zabbix/issues/232>`_).
+ - zabbix_user - no longer requires ``usrgrps`` when ``state=absent`` (see `#240
+ <https://github.com/ansible-collections/community.zabbix/issues/232>`_).
+ - zabbix_web - added several configuration options for the PHP-FPM setup to
+ configure the listen (socket) file.
+ - zabbix_web - added support for configuring Zabbix Web with Nginx, same way
+ as with Apache.
+ release_summary: "| Release date: 2020-10-22
- '
+ "
fragments:
- - 240-zabbix-user-nopass-ldap.yaml
- - role-agent-correct-usage-acl.yaml
- - role-agent-zabbix-agent2-props.yaml
- - role-server-screen-output.yaml
- - role-web-add-support-for-nginx.yaml
- - role-web-added-some-properties.yaml
- - roles-all-add-become.yaml
- - roles-all-configure-conf-mode.yaml
- - roles-server-proxy-add-innodb-check.yaml
- release_date: '2020-10-22'
+ - 240-zabbix-user-nopass-ldap.yaml
+ - role-agent-correct-usage-acl.yaml
+ - role-agent-zabbix-agent2-props.yaml
+ - role-server-screen-output.yaml
+ - role-web-add-support-for-nginx.yaml
+ - role-web-added-some-properties.yaml
+ - roles-all-add-become.yaml
+ - roles-all-configure-conf-mode.yaml
+ - roles-server-proxy-add-innodb-check.yaml
+ release_date: "2020-10-22"
1.2.0:
changes:
bugfixes:
- - When installing the Zabbix packages, we disable all other yum repositories
- except the one for the Zabbix.
- - zabbix_agent - Agent 2 also be able to use userparameters file.
- - zabbix_agent - Also work on SLES 12 sp5
- - zabbix_agent - Documented the property 'zabbix_proxy_ip' in the documentation.
- - zabbix_agent - There was an task that wasn't able to use an http(s)_proxy
- environment while installing an package.
- - zabbix_agent - Windows - Able to create PSK file
- - zabbix_agent - Windows - Fixing download links to proper version/url
- - zabbix_agent - Windows - Removal of not working property
- - zabbix_agent - Zabbix packages were not able to install properly on Fedora.
- When the packages are installed, the version will be appended to the package
- name. This is eofr all RedHat related OS'es.
- - zabbix_agent - fixed issue with zabbix_agent2_tlspsk_auto having no effect
- when using zabbix_agent2
- - zabbix_agent - fixed issue with zabbix_api_create_hosts and TLS configuration
- when using zabbix_agent2, where zabbix_agent_tls* settings were used instead
- of zabbix_agent2_tls*
- - zabbix_host - module will no longer require ``interfaces`` to be present when
- creating host with Zabbix 5.2 (https://github.com/ansible-collections/community.zabbix/pull/291).
- - zabbix_host - should no longer fail with 'host cannot have more than one default
- interface' error (https://github.com/ansible-collections/community.zabbix/pull/309).
- - zabbix_proxy (role) - Added missing paragraph for the SQLite3 as database.
- - zabbix_proxy (role) - The become option was missing in some essential tasks
- when installing the Zabbix Proxy with SQLite3 as database.
- - zabbix_proxy (role) - Various documentation fixes removing the Zabbix Server
- and replaced it with actual Zabbix Proxy information.
- - zabbix_proxy - Added new property 'zabbix_proxy_ip' to determine ip for host
- running the Zabbix Proxy.
- - zabbix_proxy - The 'interface' option was missing when creating an Proxy via
- the API.
- - zabbix_template - fixed documentation for ``macros`` argument (https://github.com/ansible-collections/community.zabbix/pull/296).
- - zabbix_template - fixed encode error when using Python2 (https://github.com/ansible-collections/community.zabbix/pull/297).
- - zabbix_template - fixed issue when importing templates to zabbix version.
- >= 5.2
- - zabbix_template_info - fixed encode error when using Python2 (https://github.com/ansible-collections/community.zabbix/pull/297).
- - zabbix_user - disable no_log warning for option override_password.
- - zabbix_user - fixed issue where module couldn't create a user since Zabbix
- 5.2 (https://github.com/ansible-collections/community.zabbix/pull/260).
- - zabbix_web - fixed issue Role cannot install Zabbix web 5.0 on RHEL 7 (https://github.com/ansible-collections/community.zabbix/issues/202).
+ - When installing the Zabbix packages, we disable all other yum repositories
+ except the one for the Zabbix.
+ - zabbix_agent - Agent 2 also be able to use userparameters file.
+ - zabbix_agent - Also work on SLES 12 sp5
+ - zabbix_agent - Documented the property 'zabbix_proxy_ip' in the documentation.
+ - zabbix_agent - There was an task that wasn't able to use an http(s)_proxy
+ environment while installing an package.
+ - zabbix_agent - Windows - Able to create PSK file
+ - zabbix_agent - Windows - Fixing download links to proper version/url
+ - zabbix_agent - Windows - Removal of not working property
+ - zabbix_agent - Zabbix packages were not able to install properly on Fedora.
+ When the packages are installed, the version will be appended to the package
+ name. This is eofr all RedHat related OS'es.
+ - zabbix_agent - fixed issue with zabbix_agent2_tlspsk_auto having no effect
+ when using zabbix_agent2
+ - zabbix_agent - fixed issue with zabbix_api_create_hosts and TLS configuration
+ when using zabbix_agent2, where zabbix_agent_tls* settings were used instead
+ of zabbix_agent2_tls*
+ - zabbix_host - module will no longer require ``interfaces`` to be present when
+ creating host with Zabbix 5.2 (https://github.com/ansible-collections/community.zabbix/pull/291).
+ - zabbix_host - should no longer fail with 'host cannot have more than one default
+ interface' error (https://github.com/ansible-collections/community.zabbix/pull/309).
+ - zabbix_proxy (role) - Added missing paragraph for the SQLite3 as database.
+ - zabbix_proxy (role) - The become option was missing in some essential tasks
+ when installing the Zabbix Proxy with SQLite3 as database.
+ - zabbix_proxy (role) - Various documentation fixes removing the Zabbix Server
+ and replaced it with actual Zabbix Proxy information.
+ - zabbix_proxy - Added new property 'zabbix_proxy_ip' to determine ip for host
+ running the Zabbix Proxy.
+ - zabbix_proxy - The 'interface' option was missing when creating an Proxy via
+ the API.
+ - zabbix_template - fixed documentation for ``macros`` argument (https://github.com/ansible-collections/community.zabbix/pull/296).
+ - zabbix_template - fixed encode error when using Python2 (https://github.com/ansible-collections/community.zabbix/pull/297).
+ - zabbix_template - fixed issue when importing templates to zabbix version.
+ >= 5.2
+ - zabbix_template_info - fixed encode error when using Python2 (https://github.com/ansible-collections/community.zabbix/pull/297).
+ - zabbix_user - disable no_log warning for option override_password.
+ - zabbix_user - fixed issue where module couldn't create a user since Zabbix
+ 5.2 (https://github.com/ansible-collections/community.zabbix/pull/260).
+ - zabbix_web - fixed issue Role cannot install Zabbix web 5.0 on RHEL 7 (https://github.com/ansible-collections/community.zabbix/issues/202).
minor_changes:
- - Updated the roles to support Zabbix 5.2.
- - zabbix_agent - Added a new property `zabbix_agent_dont_detect_ip` when set
- to true, it won't detect the ips and no need to install the python module
- `netaddr`.
- - zabbix_agent - Added parameter `zabbix_agent_package_remove` when set to `true`
- and `zabbix_agent2` is set to `true` it will uninstall the `zabbix-agent`
- service and package.
- - zabbix_agent - added `zabbix_agent_install_agent_only` Will only install the
- Zabbix Agent package and not the `zabbix-sender` or `zabbix-get` packages.
- - zabbix_template - Fixed to decode Unicode Escape of multibyte strings in an
- importing template data(https://github.com/ansible-collections/community.zabbix/pull/226).
- - zabbix_user - added new parameters to set timezone and role_name for users
- (https://github.com/ansible-collections/community.zabbix/pull/260).
- - zabbix_user - user_medias now defaults to None and is optional (https://github.com/ansible-collections/community.zabbix/pull/264).
- - zabbix_web - added `zabbix_web_rhel_release` which enable scl on RHEL (https://github.com/ansible-collections/community.zabbix/pull/266).
- - zabbix_web - quality of life improvements when using Nginx (https://github.com/ansible-collections/community.zabbix/pull/304).
- release_summary: '| Release date: 2021-01-11 | Last major release to support
- Zabbix server 3.X versions in plugins.'
+ - Updated the roles to support Zabbix 5.2.
+ - zabbix_agent - Added a new property `zabbix_agent_dont_detect_ip` when set
+ to true, it won't detect the ips and no need to install the python module
+ `netaddr`.
+ - zabbix_agent - Added parameter `zabbix_agent_package_remove` when set to `true`
+ and `zabbix_agent2` is set to `true` it will uninstall the `zabbix-agent`
+ service and package.
+ - zabbix_agent - added `zabbix_agent_install_agent_only` Will only install the
+ Zabbix Agent package and not the `zabbix-sender` or `zabbix-get` packages.
+ - zabbix_template - Fixed to decode Unicode Escape of multibyte strings in an
+ importing template data(https://github.com/ansible-collections/community.zabbix/pull/226).
+ - zabbix_user - added new parameters to set timezone and role_name for users
+ (https://github.com/ansible-collections/community.zabbix/pull/260).
+ - zabbix_user - user_medias now defaults to None and is optional (https://github.com/ansible-collections/community.zabbix/pull/264).
+ - zabbix_web - added `zabbix_web_rhel_release` which enable scl on RHEL (https://github.com/ansible-collections/community.zabbix/pull/266).
+ - zabbix_web - quality of life improvements when using Nginx (https://github.com/ansible-collections/community.zabbix/pull/304).
+ release_summary:
+ "| Release date: 2021-01-11 | Last major release to support
+ Zabbix server 3.X versions in plugins."
fragments:
- - 226-zabbix_template.yaml
- - 254-disable-no-log-warning.yml
- - 260-zabbix_user.yml
- - 264-allow-user_medias-to-be-optional.yaml
- - 266-zabbix_web.yml
- - 297-zabbix_template_modules.yml
- - 309-host-interfaces.yaml
- - agent-missing-proxy-task.yaml
- - agent-not-detect-ips.yml
- - agent-use-version-for-rh.yml
- - agent2-psk-fixes.yml
- - module-zabbix_host-interfaces-not-required.yaml
- - module-zabbix_template-argspec-doc.yaml
- - proxy-able-to-use-interface.yaml
- - role-agent-allow-uninstall-agent.yaml
- - role-agent-userparameter.yaml
- - role-agent-windows-service-handling.yaml
- - role-agent-work-with-sles.yaml
- - role-proxy-sqlite3-zabbix-proxy.yaml
- - roles-all-enable-zabbix-disable-rest.yaml
- - roles-all-update-to-zabbix-52.yml
- - templatescreens-fix.yml
- release_date: '2021-01-11'
+ - 226-zabbix_template.yaml
+ - 254-disable-no-log-warning.yml
+ - 260-zabbix_user.yml
+ - 264-allow-user_medias-to-be-optional.yaml
+ - 266-zabbix_web.yml
+ - 297-zabbix_template_modules.yml
+ - 309-host-interfaces.yaml
+ - agent-missing-proxy-task.yaml
+ - agent-not-detect-ips.yml
+ - agent-use-version-for-rh.yml
+ - agent2-psk-fixes.yml
+ - module-zabbix_host-interfaces-not-required.yaml
+ - module-zabbix_template-argspec-doc.yaml
+ - proxy-able-to-use-interface.yaml
+ - role-agent-allow-uninstall-agent.yaml
+ - role-agent-userparameter.yaml
+ - role-agent-windows-service-handling.yaml
+ - role-agent-work-with-sles.yaml
+ - role-proxy-sqlite3-zabbix-proxy.yaml
+ - roles-all-enable-zabbix-disable-rest.yaml
+ - roles-all-update-to-zabbix-52.yml
+ - templatescreens-fix.yml
+ release_date: "2021-01-11"
1.3.0:
changes:
bugfixes:
- - zabbix_action - now properly filters discovery rule checks by name (https://github.com/ansible-collections/community.zabbix/pull/349).
- - zabbix_agent - corrected version for Windows agents (https://github.com/ansible-collections/community.zabbix/pull/316).
- - zabbix_agent - fixed download URL for MacOS (https://github.com/ansible-collections/community.zabbix/pull/325).
- - zabbix_server - now installs correct MySQL client packages on RHEL8 systems
- (https://github.com/ansible-collections/community.zabbix/pull/343).
- - zabbix_template - fixed an issue with Python2 where module wouldn't decode
- Unicode characters (https://github.com/ansible-collections/community.zabbix/pull/322).
- - zabbix_web - fixed installation of python3-libsemanage package RHEL7 and older
- systems (https://github.com/ansible-collections/community.zabbix/pull/330).
- - zabbix_web - role should now correctly determine naming of PHP packages on
- older systems (https://github.com/ansible-collections/community.zabbix/pull/344).
- - zabbix_web - updated default PHP version for Debian10 (https://github.com/ansible-collections/community.zabbix/pull/323).
+ - zabbix_action - now properly filters discovery rule checks by name (https://github.com/ansible-collections/community.zabbix/pull/349).
+ - zabbix_agent - corrected version for Windows agents (https://github.com/ansible-collections/community.zabbix/pull/316).
+ - zabbix_agent - fixed download URL for MacOS (https://github.com/ansible-collections/community.zabbix/pull/325).
+ - zabbix_server - now installs correct MySQL client packages on RHEL8 systems
+ (https://github.com/ansible-collections/community.zabbix/pull/343).
+ - zabbix_template - fixed an issue with Python2 where module wouldn't decode
+ Unicode characters (https://github.com/ansible-collections/community.zabbix/pull/322).
+ - zabbix_web - fixed installation of python3-libsemanage package RHEL7 and older
+ systems (https://github.com/ansible-collections/community.zabbix/pull/330).
+ - zabbix_web - role should now correctly determine naming of PHP packages on
+ older systems (https://github.com/ansible-collections/community.zabbix/pull/344).
+ - zabbix_web - updated default PHP version for Debian10 (https://github.com/ansible-collections/community.zabbix/pull/323).
minor_changes:
- - zabbix_agent - added support for installations on arm64 systems (https://github.com/ansible-collections/community.zabbix/pull/320).
- - zabbix_proxy - now supports configuring StatsAllowedIP (https://github.com/ansible-collections/community.zabbix/pull/337).
- - zabbix_server - added support for installtions on arm64 systems (https://github.com/ansible-collections/community.zabbix/pull/320).
- - zabbix_web - added support for installtions on arm64 systems (https://github.com/ansible-collections/community.zabbix/pull/320).
- release_summary: '| Release date: 2021-03-20 | Last major release to support
- Zabbix server 3.X versions in plugins.'
+ - zabbix_agent - added support for installations on arm64 systems (https://github.com/ansible-collections/community.zabbix/pull/320).
+ - zabbix_proxy - now supports configuring StatsAllowedIP (https://github.com/ansible-collections/community.zabbix/pull/337).
+ - zabbix_server - added support for installtions on arm64 systems (https://github.com/ansible-collections/community.zabbix/pull/320).
+ - zabbix_web - added support for installtions on arm64 systems (https://github.com/ansible-collections/community.zabbix/pull/320).
+ release_summary:
+ "| Release date: 2021-03-20 | Last major release to support
+ Zabbix server 3.X versions in plugins."
security_fixes:
- - zabbix_action - no longer exposes remote SSH command password used in operations,
- recovery & acknowledge operations to system logs (https://github.com/ansible-collections/community.zabbix/pull/345).
- - zabbix_discovery_rule - no longer exposes SNMPv3 auth and priv passphrases
- to system logs (https://github.com/ansible-collections/community.zabbix/pull/345).
- - zabbix_host - no longer exposes SNMPv3 auth and priv passphrases to system
- logs (https://github.com/ansible-collections/community.zabbix/pull/345).
+ - zabbix_action - no longer exposes remote SSH command password used in operations,
+ recovery & acknowledge operations to system logs (https://github.com/ansible-collections/community.zabbix/pull/345).
+ - zabbix_discovery_rule - no longer exposes SNMPv3 auth and priv passphrases
+ to system logs (https://github.com/ansible-collections/community.zabbix/pull/345).
+ - zabbix_host - no longer exposes SNMPv3 auth and priv passphrases to system
+ logs (https://github.com/ansible-collections/community.zabbix/pull/345).
fragments:
- - 316-zbx-agent-windows-long-version.yaml
- - 322-zabbix_template.yml
- - 323-zabbix_web.yml
- - 325-zbx-agent-macos.yaml
- - 330-zabbix_web-selinux.yaml
- - 337-zabbix_proxy-statsAllowedIP.yaml
- - 343-zabbix_server-rhel8-mysql.yaml
- - 344-zabbix_web-legacyphp.yaml
- - 345-security-fixes-no_log.yaml
- - 349-zabbix_action-dcheck-filter.yaml
- - raspberry-server.yaml
- - raspberry.yaml
- release_date: '2021-03-20'
+ - 316-zbx-agent-windows-long-version.yaml
+ - 322-zabbix_template.yml
+ - 323-zabbix_web.yml
+ - 325-zbx-agent-macos.yaml
+ - 330-zabbix_web-selinux.yaml
+ - 337-zabbix_proxy-statsAllowedIP.yaml
+ - 343-zabbix_server-rhel8-mysql.yaml
+ - 344-zabbix_web-legacyphp.yaml
+ - 345-security-fixes-no_log.yaml
+ - 349-zabbix_action-dcheck-filter.yaml
+ - raspberry-server.yaml
+ - raspberry.yaml
+ release_date: "2021-03-20"
1.4.0:
changes:
bugfixes:
- - zabbix_agent - StatusPort will be configured only when `zabbix_agent2_statusport`
- is defined (https://github.com/ansible-collections/community.zabbix/pull/378)
- - zabbix_agent - fixed issue preventing installation of zabbix-agent 4.2 on
- Ubuntu Focal 20.04 (https://github.com/ansible-collections/community.zabbix/pull/390)
- - zabbix_agent - role will now configure correct port for hostinterface in Zabbix
- Server if `zabbix_agent2_listenport` is defined (https://github.com/ansible-collections/community.zabbix/pull/400)
- - zabbix_agent - should no longer be failing on Windows platform due to re-running
- all of the tasks for the 2nd time (https://github.com/ansible-collections/community.zabbix/pull/376)
- - zabbix_agent - should no longer fail while cleaning up zabbix_agent installation
- if Zabbix Agent2 is being used (https://github.com/ansible-collections/community.zabbix/pull/409)
- - zabbix_agent - will no longer install zabbix_get package on Debian systems
- when `zabbix_agent_install_agent_only` is defined (https://github.com/ansible-collections/community.zabbix/pull/363)
- - zabbix_host - fixed issue where module was idempotent when multiple host interfaces
- of the same type were present (https://github.com/ansible-collections/community.zabbix/pull/391)
- - zabbix_proxy (role) - will no longer fail on proxy creation in Zabbix Server
- when TLS parameters are used (https://github.com/ansible-collections/community.zabbix/pull/388)
- - zabbix_server - Removed the removal everything from /tmp directory command
- as it removes things that it shouldnt do.
- - zabbix_template - first time import of template now works with Zabbix 5.4
- (https://github.com/ansible-collections/community.zabbix/pull/407), please
- note that rerunning the task will fail as there are breaking changes in Zabbix
- 5.4 API that module not yet covers.
- - zabbix_user - now works with Zabbix 5.4 (https://github.com/ansible-collections/community.zabbix/pull/406)
+ - zabbix_agent - StatusPort will be configured only when `zabbix_agent2_statusport`
+ is defined (https://github.com/ansible-collections/community.zabbix/pull/378)
+ - zabbix_agent - fixed issue preventing installation of zabbix-agent 4.2 on
+ Ubuntu Focal 20.04 (https://github.com/ansible-collections/community.zabbix/pull/390)
+ - zabbix_agent - role will now configure correct port for hostinterface in Zabbix
+ Server if `zabbix_agent2_listenport` is defined (https://github.com/ansible-collections/community.zabbix/pull/400)
+ - zabbix_agent - should no longer be failing on Windows platform due to re-running
+ all of the tasks for the 2nd time (https://github.com/ansible-collections/community.zabbix/pull/376)
+ - zabbix_agent - should no longer fail while cleaning up zabbix_agent installation
+ if Zabbix Agent2 is being used (https://github.com/ansible-collections/community.zabbix/pull/409)
+ - zabbix_agent - will no longer install zabbix_get package on Debian systems
+ when `zabbix_agent_install_agent_only` is defined (https://github.com/ansible-collections/community.zabbix/pull/363)
+ - zabbix_host - fixed issue where module was idempotent when multiple host interfaces
+ of the same type were present (https://github.com/ansible-collections/community.zabbix/pull/391)
+ - zabbix_proxy (role) - will no longer fail on proxy creation in Zabbix Server
+ when TLS parameters are used (https://github.com/ansible-collections/community.zabbix/pull/388)
+ - zabbix_server - Removed the removal everything from /tmp directory command
+ as it removes things that it shouldnt do.
+ - zabbix_template - first time import of template now works with Zabbix 5.4
+ (https://github.com/ansible-collections/community.zabbix/pull/407), please
+ note that rerunning the task will fail as there are breaking changes in Zabbix
+ 5.4 API that module not yet covers.
+ - zabbix_user - now works with Zabbix 5.4 (https://github.com/ansible-collections/community.zabbix/pull/406)
minor_changes:
- - all roles were updated to support Zabbix 5.4 release (https://github.com/ansible-collections/community.zabbix/pull/405)
- - new inventory plugin zabbix_inventory (https://github.com/ansible-collections/community.zabbix/pull/373)
- - new module plugin zabbix_globalmacro (https://github.com/ansible-collections/community.zabbix/pull/377)
- - zabbix_agent - `zabbix_agent_src_reinstall` now defaults to `False` (https://github.com/ansible-collections/community.zabbix/pull/403)
- - zabbix_agent - now supports setting AllowKey (https://github.com/ansible-collections/community.zabbix/pull/358)
- - zabbix_globalmacros - it is now possible to create global macros using this
- module (https://github.com/ansible-collections/community.zabbix/pull/377).
- - zabbix_inventory - Created Ansible - Zabbix inventory plugin to create dynamic
- inventory from Zabbix.
- - zabbix_maintenance - it is now possible to target hosts by their technical
- name if it differs from the visible name
- - zabbix_proxy - Add MySQL Python 3 package installation.
- - zabbix_server - Add MySQL Python 3 package installation.
- - zabbix_server - now supports setting StartLLDProcessors (https://github.com/ansible-collections/community.zabbix/pull/361)
- - zabbix_user - now supports parameter `username` as an alternative to `alias`
- (https://github.com/ansible-collections/community.zabbix/pull/406)
- - zabbix_user - removed some of the default values because a configuration should
- be changed only if specified as a parameter (https://github.com/ansible-collections/community.zabbix/pull/382).
- - zabbix_web - now supports setting SAML certificates (https://github.com/ansible-collections/community.zabbix/pull/408)
+ - all roles were updated to support Zabbix 5.4 release (https://github.com/ansible-collections/community.zabbix/pull/405)
+ - new inventory plugin zabbix_inventory (https://github.com/ansible-collections/community.zabbix/pull/373)
+ - new module plugin zabbix_globalmacro (https://github.com/ansible-collections/community.zabbix/pull/377)
+ - zabbix_agent - `zabbix_agent_src_reinstall` now defaults to `False` (https://github.com/ansible-collections/community.zabbix/pull/403)
+ - zabbix_agent - now supports setting AllowKey (https://github.com/ansible-collections/community.zabbix/pull/358)
+ - zabbix_globalmacros - it is now possible to create global macros using this
+ module (https://github.com/ansible-collections/community.zabbix/pull/377).
+ - zabbix_inventory - Created Ansible - Zabbix inventory plugin to create dynamic
+ inventory from Zabbix.
+ - zabbix_maintenance - it is now possible to target hosts by their technical
+ name if it differs from the visible name
+ - zabbix_proxy - Add MySQL Python 3 package installation.
+ - zabbix_server - Add MySQL Python 3 package installation.
+ - zabbix_server - now supports setting StartLLDProcessors (https://github.com/ansible-collections/community.zabbix/pull/361)
+ - zabbix_user - now supports parameter `username` as an alternative to `alias`
+ (https://github.com/ansible-collections/community.zabbix/pull/406)
+ - zabbix_user - removed some of the default values because a configuration should
+ be changed only if specified as a parameter (https://github.com/ansible-collections/community.zabbix/pull/382).
+ - zabbix_web - now supports setting SAML certificates (https://github.com/ansible-collections/community.zabbix/pull/408)
fragments:
- - 350-zabbix_maintenance-visible_name.yaml
- - 373-create_zabbix_inventory.yaml
- - 377-create_globalmacros_module.yaml
- - 382-zabbix_user.yml
- - 391-zabbix_host-intf_idempotency_fix.yml
- - zabbix-proxy-package-installation.yaml
- - zabbix-server-tmp-dir.yml
- - zbx-catch-all.yml
+ - 350-zabbix_maintenance-visible_name.yaml
+ - 373-create_zabbix_inventory.yaml
+ - 377-create_globalmacros_module.yaml
+ - 382-zabbix_user.yml
+ - 391-zabbix_host-intf_idempotency_fix.yml
+ - zabbix-proxy-package-installation.yaml
+ - zabbix-server-tmp-dir.yml
+ - zbx-catch-all.yml
modules:
- - description: Create/update/delete Zabbix Global macros
- name: zabbix_globalmacro
- namespace: ''
+ - description: Create/update/delete Zabbix Global macros
+ name: zabbix_globalmacro
+ namespace: ""
plugins:
inventory:
- - description: Zabbix Inventory Plugin
- name: zabbix_inventory
- namespace: null
- release_date: '2021-06-29'
+ - description: Zabbix Inventory Plugin
+ name: zabbix_inventory
+ namespace: null
+ release_date: "2021-06-29"
1.5.0:
changes:
breaking_changes:
- - all roles now reference other roles and modules via their fully qualified
- collection names, which makes Ansible 2.10 minimum supported version for roles
- (See https://github.com/ansible-collections/community.zabbix/pull/477).
+ - all roles now reference other roles and modules via their fully qualified
+ collection names, which makes Ansible 2.10 minimum supported version for roles
+ (See https://github.com/ansible-collections/community.zabbix/pull/477).
bugfixes:
- - all roles now support installing zabbix 4.0 version on Ubuntu 20.04.
- - all roles now supports installations on Debian 11.
- - zabbix inventory - Change default value for host_zapi_query from list "[]"
- to dict "{}".
- - zabbix_action - should no longer fail with Zabbix version 5.4.
- - zabbix_agent - `zabbix_win_install_dir` no longer ignored for zabbix_agentd.d
- and zabbix log directories.
- - zabbix_agent - auto-recovery for Windows installation has been fixed (https://github.com/ansible-collections/community.zabbix/pull/470).
- - zabbix_agent - deploying zabbix_agent2 under Windows should now be possible
- (Thanks to https://github.com/ansible-collections/community.zabbix/pull/433
- and https://github.com/ansible-collections/community.zabbix/pull/453).
- - zabbix_agent - fixed AutoPSK for Windows deployments (https://github.com/ansible-collections/community.zabbix/pull/450).
- - zabbix_host - Fix error when updating hosts caused by Zabbix bug not returning
- the inventory_mode field for hosts(https://github.com/ansible-collections/community.zabbix/issues/385).
- - zabbix_host - will not break when `tls_psk*` parameters are set with Zabbix
- version 5.4.
- - zabbix_proxy (module) - now supports configuring `tls_psk*` parameters.
- - zabbix_proxy (role) - TLS config should now properly configure certificates.
- - zabbix_proxy (role) - should no longer fail on permission problems wren configured
- to use SQLite database and now installs correct package sqlite3 on Debian
- systems.
- - zabbix_web - `zabbix_nginx_vhost_*` parameters are no longer ignored.
- - zabbix_web - executing role with `--tags` should now correctly include distribution
- specific variables (https://github.com/ansible-collections/community.zabbix/pull/448).
- - zabbix_web - now correctly restarts php-fpm service (https://github.com/ansible-collections/community.zabbix/pull/427).
- - zabbix_web - permissions for accesing php-fpm socket has been fixed (See https://github.com/ansible-collections/community.zabbix/pull/426).
+ - all roles now support installing zabbix 4.0 version on Ubuntu 20.04.
+ - all roles now supports installations on Debian 11.
+ - zabbix inventory - Change default value for host_zapi_query from list "[]"
+ to dict "{}".
+ - zabbix_action - should no longer fail with Zabbix version 5.4.
+ - zabbix_agent - `zabbix_win_install_dir` no longer ignored for zabbix_agentd.d
+ and zabbix log directories.
+ - zabbix_agent - auto-recovery for Windows installation has been fixed (https://github.com/ansible-collections/community.zabbix/pull/470).
+ - zabbix_agent - deploying zabbix_agent2 under Windows should now be possible
+ (Thanks to https://github.com/ansible-collections/community.zabbix/pull/433
+ and https://github.com/ansible-collections/community.zabbix/pull/453).
+ - zabbix_agent - fixed AutoPSK for Windows deployments (https://github.com/ansible-collections/community.zabbix/pull/450).
+ - zabbix_host - Fix error when updating hosts caused by Zabbix bug not returning
+ the inventory_mode field for hosts(https://github.com/ansible-collections/community.zabbix/issues/385).
+ - zabbix_host - will not break when `tls_psk*` parameters are set with Zabbix
+ version 5.4.
+ - zabbix_proxy (module) - now supports configuring `tls_psk*` parameters.
+ - zabbix_proxy (role) - TLS config should now properly configure certificates.
+ - zabbix_proxy (role) - should no longer fail on permission problems wren configured
+ to use SQLite database and now installs correct package sqlite3 on Debian
+ systems.
+ - zabbix_web - `zabbix_nginx_vhost_*` parameters are no longer ignored.
+ - zabbix_web - executing role with `--tags` should now correctly include distribution
+ specific variables (https://github.com/ansible-collections/community.zabbix/pull/448).
+ - zabbix_web - now correctly restarts php-fpm service (https://github.com/ansible-collections/community.zabbix/pull/427).
+ - zabbix_web - permissions for accesing php-fpm socket has been fixed (See https://github.com/ansible-collections/community.zabbix/pull/426).
minor_changes:
- - Added requirements.txt to collection root to be used with Ansible Builder.
- See https://ansible-builder.readthedocs.io/en/latest/collection_metadata.html
- - some roles are now using new naming for API connection parameters (https://github.com/ansible-collections/community.zabbix/pull/492
- and https://github.com/ansible-collections/community.zabbix/pull/495).
- - some roles can now utilize an option `zabbix_repo_yum_gpgcheck` to enable/disable
- GPG check for YUM repository (https://github.com/ansible-collections/community.zabbix/pull/438).
- - zabbix inventory - Enabled the usage of environment variables in zabbix inventory
- plugin.
- - zabbix inventory plugin - can now use environment variables ZABBIX_SERVER,
- ZABBIX_USERNAME and ZABBIX_PASSWORD for connection purposes to the Zabbix
- API.
- - zabbix_agent - `zabbix_agent_loadmodule` can also be configured with a list.
- - zabbix_agent - new `zabbix_api_timeout` option.
- - zabbix_agent - now supports DenyKeys configuration.
- - zabbix_hostmacro - now supports creating macros of type secret and vault.
- - zabbix_proxy (role) - new `zabbix_api_timeout` option.
- - zabbix_proxy_info - new module that allows to retrieve information about configured
- Zabbix Proxies.
- - zabbix_server - added support for TimescaleDB (https://github.com/ansible-collections/community.zabbix/pull/428).
+ - Added requirements.txt to collection root to be used with Ansible Builder.
+ See https://ansible-builder.readthedocs.io/en/latest/collection_metadata.html
+ - some roles are now using new naming for API connection parameters (https://github.com/ansible-collections/community.zabbix/pull/492
+ and https://github.com/ansible-collections/community.zabbix/pull/495).
+ - some roles can now utilize an option `zabbix_repo_yum_gpgcheck` to enable/disable
+ GPG check for YUM repository (https://github.com/ansible-collections/community.zabbix/pull/438).
+ - zabbix inventory - Enabled the usage of environment variables in zabbix inventory
+ plugin.
+ - zabbix inventory plugin - can now use environment variables ZABBIX_SERVER,
+ ZABBIX_USERNAME and ZABBIX_PASSWORD for connection purposes to the Zabbix
+ API.
+ - zabbix_agent - `zabbix_agent_loadmodule` can also be configured with a list.
+ - zabbix_agent - new `zabbix_api_timeout` option.
+ - zabbix_agent - now supports DenyKeys configuration.
+ - zabbix_hostmacro - now supports creating macros of type secret and vault.
+ - zabbix_proxy (role) - new `zabbix_api_timeout` option.
+ - zabbix_proxy_info - new module that allows to retrieve information about configured
+ Zabbix Proxies.
+ - zabbix_server - added support for TimescaleDB (https://github.com/ansible-collections/community.zabbix/pull/428).
fragments:
- - 0-copy_ignore_txt.yml
- - 415-inventory_mode-fix.yaml
- - 456-zabbix-hostmacro-secret-vault.yml
- - 505-awx_compatibility-fixes.yml
- - zbx-missing-1.5.0.yml
- - zbx-proxy-info.yaml
+ - 0-copy_ignore_txt.yml
+ - 415-inventory_mode-fix.yaml
+ - 456-zabbix-hostmacro-secret-vault.yml
+ - 505-awx_compatibility-fixes.yml
+ - zbx-missing-1.5.0.yml
+ - zbx-proxy-info.yaml
modules:
- - description: Gather information about Zabbix proxy
- name: zabbix_proxy_info
- namespace: ''
- release_date: '2021-11-02'
+ - description: Gather information about Zabbix proxy
+ name: zabbix_proxy_info
+ namespace: ""
+ release_date: "2021-11-02"
1.5.1:
changes:
bugfixes:
- - template - use templateid property when linking templates for ``template.create``
- and ``template.update`` API calls.
- - zabbix inventory - Moved ZABBIX_VALIDATE_CERTS to correct option, validate_certs.
- - zabbix_agent - Create the actual configuration file for Windows setups.
- - zabbix_agent - Fix typo for correct using the zabbix_windows_service.exists
- - zabbix_agent - tlspsk_auto to support become on Linux and ignore on windows
- - zabbix_user - fix zabbix_user require password only on internal.
+ - template - use templateid property when linking templates for ``template.create``
+ and ``template.update`` API calls.
+ - zabbix inventory - Moved ZABBIX_VALIDATE_CERTS to correct option, validate_certs.
+ - zabbix_agent - Create the actual configuration file for Windows setups.
+ - zabbix_agent - Fix typo for correct using the zabbix_windows_service.exists
+ - zabbix_agent - tlspsk_auto to support become on Linux and ignore on windows
+ - zabbix_user - fix zabbix_user require password only on internal.
minor_changes:
- - Enabled usage of environment variables for modules by adding a fallback lookup
- in the module_utils/helpers.py - zabbix_common_argument_spec
+ - Enabled usage of environment variables for modules by adding a fallback lookup
+ in the module_utils/helpers.py - zabbix_common_argument_spec
fragments:
- - 508-zabbix-env-vars.yml
- - 512-inventory-bugfix.yaml
- - 521-agent-typo.yaml
- - 523-templateid-property.yml
- - 527-agent-windows-config-creation.yaml
- - 537-TLSPSK.yaml
- - 543-internal-user.yaml
- release_date: '2021-12-05'
+ - 508-zabbix-env-vars.yml
+ - 512-inventory-bugfix.yaml
+ - 521-agent-typo.yaml
+ - 523-templateid-property.yml
+ - 527-agent-windows-config-creation.yaml
+ - 537-TLSPSK.yaml
+ - 543-internal-user.yaml
+ release_date: "2021-12-05"
1.6.0:
changes:
bugfixes:
- - 'Various modules and plugins - use vendored version of ``distutils.version``
- instead of the deprecated Python standard library ``distutils`` (https://github.com/ansible-collections/community.zabbix/pull/603).
- This superseedes #597.'
- - ZapiWrapper (module_utils) - fix only partial zabbix version is returned.
- - zabbix_agent - Install Zabbix packages when zabbix_repo == other is used with
- yum.
- - zabbix_agent - Install the Agent for MacOSX sooner than its configuration.
- - zabbix_agent - The ``Install gpg key`` task for Debian did not work when a
- http proxy is configured.
- - zabbix_agent - Use the correct URL with correct version.
- - zabbix_agent - Use the correct path to determine Zabbix Agent 2 installation
- on Windows.
- - zabbix_agent - Using the correct hostgroup as default now.
- - zabbix_agent - fix for the autopsk, incl. tests with Molecule.
- - zabbix_host - Added small notification that an user should have read access
- to get hostgroups overview.
- - zabbix_host - adapter changed properties for interface comparisson
- - zabbix_maintenance - should now work when creating maintenace on Zabbix 6.0
- server
- - zabbix_proxy - 'zcat' the zipped sql files to /tmp before executing it.
- - zabbix_proxy - Check MySQL version before settings mysql_innodb_default_row_format
- value.
- - zabbix_proxy - Install Zabbix packages when zabbix_repo == other is used with
- yum.
- - zabbix_server - 'zcat' the zipped sql files to /tmp before executing it.
- - zabbix_server - Check MySQL version before settings mysql_innodb_default_row_format
- value.
- - zabbix_server - Install Zabbix packages when zabbix_repo == other is used
- with yum.
- - zabbix_template - setting correct null values to fix unintentional changes
- - zabbix_web - Added some default variables if the geerlingguys apache role
- is not used.
- - zabbix_web - Specified the correct versions for php.
+ - "Various modules and plugins - use vendored version of ``distutils.version``
+ instead of the deprecated Python standard library ``distutils`` (https://github.com/ansible-collections/community.zabbix/pull/603).
+ This superseedes #597."
+ - ZapiWrapper (module_utils) - fix only partial zabbix version is returned.
+ - zabbix_agent - Install Zabbix packages when zabbix_repo == other is used with
+ yum.
+ - zabbix_agent - Install the Agent for MacOSX sooner than its configuration.
+ - zabbix_agent - The ``Install gpg key`` task for Debian did not work when a
+ http proxy is configured.
+ - zabbix_agent - Use the correct URL with correct version.
+ - zabbix_agent - Use the correct path to determine Zabbix Agent 2 installation
+ on Windows.
+ - zabbix_agent - Using the correct hostgroup as default now.
+ - zabbix_agent - fix for the autopsk, incl. tests with Molecule.
+ - zabbix_host - Added small notification that an user should have read access
+ to get hostgroups overview.
+ - zabbix_host - adapter changed properties for interface comparisson
+ - zabbix_maintenance - should now work when creating maintenace on Zabbix 6.0
+ server
+ - zabbix_proxy - 'zcat' the zipped sql files to /tmp before executing it.
+ - zabbix_proxy - Check MySQL version before settings mysql_innodb_default_row_format
+ value.
+ - zabbix_proxy - Install Zabbix packages when zabbix_repo == other is used with
+ yum.
+ - zabbix_server - 'zcat' the zipped sql files to /tmp before executing it.
+ - zabbix_server - Check MySQL version before settings mysql_innodb_default_row_format
+ value.
+ - zabbix_server - Install Zabbix packages when zabbix_repo == other is used
+ with yum.
+ - zabbix_template - setting correct null values to fix unintentional changes
+ - zabbix_web - Added some default variables if the geerlingguys apache role
+ is not used.
+ - zabbix_web - Specified the correct versions for php.
minor_changes:
- - all modules - prepare for deprecation of distutils LooseVersion.
- - collection - Add dependencies to other collections. This helps Ansible Galaxy
- automatically downloading collections that this collection relies on to run.
- - connection.httpapi (plugin) - add initial httpapi connection plugin.
- - httpapi.jsonrpc (plugin) - add initial httpapi for future handling of json-rpc.
- - new module zabbix authentication for configuring global authentication settings
- in Zabbix Server's Settings section of GUI.
- - new module zabbix_autoregister for configuring global autoregistration settings
- in Zabbix Server's Settings section of GUI.
- - new module zabbix_housekeeping for configuring global housekeeping settings
- in Zabbix Server's Settings section of GUI.
- - test_zabbix_host_info - fix Template/Group names for 5.4
- - test_zabbix_screen - disable testing for screen in 5.4 (deprecated)
- - zabbix_action - additional fixes to make module work with Zabbix 6.0 (https://github.com/ansible-collections/community.zabbix/pull/664)
- - zabbix_action - module ported to work with Zabbix 6.0 (https://github.com/ansible-collections/community.zabbix/pull/648,
- https://github.com/ansible-collections/community.zabbix/pull/653)
- - zabbix_action - should now correctly actions with maintenance_status conditions
- (https://github.com/ansible-collections/community.zabbix/pull/667)
- - zabbix_agent - Check if 'firewalld' exist and is running when handler is executed.
- - zabbix_agent - Fixed use of bare variables in conditions (https://github.com/ansible-collections/community.zabbix/pull/663)
- - zabbix_agent - Install the correct Python libxml2 package on SLES15
- - zabbix_agent - Move inclusion of the apache.yml tasks to later stage during
- execution of role.
- - zabbix_agent - Prepare for Zabbix 6.0.
- - zabbix_agent - Specify a minor version with zabbix_agent_version_minor for
- RH systems.
- - zabbix_agent - There was no way to configure a specific type for the macro.
- - zabbix_agent - Use multiple aliases in the configuration file with ``zabbix_agent_zabbix_alias``
- or ``zabbix_agent2_zabbix_alias``.
- - zabbix_maintenance - added new module parameter `tags`, which allows configuring
- Problem Tags on maintenances.
- - zabbix_maintenance - fixed to work with Zabbix 6.0+ and Python 3.9+ (https://github.com/ansible-collections/community.zabbix/pull/665)
- - zabbix_proxy - Prepare for Zabbix 6.0.
- - zabbix_proxy - Specify a minor version with zabbix_proxy_version_minor for
- RH systems.
- - zabbix_proxy - Support for Sangoma and treat it like a RHEL system.
- - zabbix_server - Check the 'zabbix_server_install_database_client' variable
- in RedHat tasks.
- - zabbix_server - Prepare for Zabbix 6.0.
- - zabbix_server - Specify a minor version with zabbix_server_version_minor for
- RH systems.
- - zabbix_user - change alias property to username (changed in 5.4) (alias is
- now an alias for username)
- - zabbix_user_info - change alias property to username (changed in 5.4) (alias
- is now an alias for username)
- - zabbix_web - Change format ENCRYPTION, VERIFY_HOST from string to boolean.
- - zabbix_web - Specify a minor version with zabbix_web_version_minor for RH
- systems.
+ - all modules - prepare for deprecation of distutils LooseVersion.
+ - collection - Add dependencies to other collections. This helps Ansible Galaxy
+ automatically downloading collections that this collection relies on to run.
+ - connection.httpapi (plugin) - add initial httpapi connection plugin.
+ - httpapi.jsonrpc (plugin) - add initial httpapi for future handling of json-rpc.
+ - new module zabbix authentication for configuring global authentication settings
+ in Zabbix Server's Settings section of GUI.
+ - new module zabbix_autoregister for configuring global autoregistration settings
+ in Zabbix Server's Settings section of GUI.
+ - new module zabbix_housekeeping for configuring global housekeeping settings
+ in Zabbix Server's Settings section of GUI.
+ - test_zabbix_host_info - fix Template/Group names for 5.4
+ - test_zabbix_screen - disable testing for screen in 5.4 (deprecated)
+ - zabbix_action - additional fixes to make module work with Zabbix 6.0 (https://github.com/ansible-collections/community.zabbix/pull/664)
+ - zabbix_action - module ported to work with Zabbix 6.0 (https://github.com/ansible-collections/community.zabbix/pull/648,
+ https://github.com/ansible-collections/community.zabbix/pull/653)
+ - zabbix_action - should now correctly actions with maintenance_status conditions
+ (https://github.com/ansible-collections/community.zabbix/pull/667)
+ - zabbix_agent - Check if 'firewalld' exist and is running when handler is executed.
+ - zabbix_agent - Fixed use of bare variables in conditions (https://github.com/ansible-collections/community.zabbix/pull/663)
+ - zabbix_agent - Install the correct Python libxml2 package on SLES15
+ - zabbix_agent - Move inclusion of the apache.yml tasks to later stage during
+ execution of role.
+ - zabbix_agent - Prepare for Zabbix 6.0.
+ - zabbix_agent - Specify a minor version with zabbix_agent_version_minor for
+ RH systems.
+ - zabbix_agent - There was no way to configure a specific type for the macro.
+ - zabbix_agent - Use multiple aliases in the configuration file with ``zabbix_agent_zabbix_alias``
+ or ``zabbix_agent2_zabbix_alias``.
+ - zabbix_maintenance - added new module parameter `tags`, which allows configuring
+ Problem Tags on maintenances.
+ - zabbix_maintenance - fixed to work with Zabbix 6.0+ and Python 3.9+ (https://github.com/ansible-collections/community.zabbix/pull/665)
+ - zabbix_proxy - Prepare for Zabbix 6.0.
+ - zabbix_proxy - Specify a minor version with zabbix_proxy_version_minor for
+ RH systems.
+ - zabbix_proxy - Support for Sangoma and treat it like a RHEL system.
+ - zabbix_server - Check the 'zabbix_server_install_database_client' variable
+ in RedHat tasks.
+ - zabbix_server - Prepare for Zabbix 6.0.
+ - zabbix_server - Specify a minor version with zabbix_server_version_minor for
+ RH systems.
+ - zabbix_user - change alias property to username (changed in 5.4) (alias is
+ now an alias for username)
+ - zabbix_user_info - change alias property to username (changed in 5.4) (alias
+ is now an alias for username)
+ - zabbix_web - Change format ENCRYPTION, VERIFY_HOST from string to boolean.
+ - zabbix_web - Specify a minor version with zabbix_web_version_minor for RH
+ systems.
fragments:
- - 144-check-if-service-is-running.yml
- - 329-only-do-apache-stuff.yml
- - 356-zbx-maint-tags.yml
- - 360-fix-macos-install.yml
- - 367-add-dependencies.yml
- - 414-specify-minor-version.yml
- - 444-add-initial-httpapi.yml
- - 560-prepare-zabbix-60-roles.yml
- - 570-fix-autopsk.yml
- - 574-fix-windows-url.yml
- - 583-change-var-type.yml
- - 587-host-cannot-have-same-type.yml
- - 588-template-not-found.yml
- - 589-failed-to-get-screen.yml
- - 591-invalid-parameter.yml
- - 592-has-no-attribute.yml
- - 597-prepare-distutils.yml
- - 599-multiple-aliases.yml
- - 603-vendor-distutils.yml
- - 612-use-correct-value.yml
- - 620-add-macro-type.yml
- - 622-fix-environmentsettings.yml
- - 630-zabbix-autoreg.yml
- - 638-sangoma-rhel.yml
- - 645-zabbix-authentication.yml
- - 647-zabbix-housekeeping.yml
- - 648-653-zbx_action.yaml
- - 654-check-zabbix_server_install_database_client-rhel.yml
- - 664-zbx_action.yaml
- - 665-zbx_maint.yaml
- - 668-zabbix_agent-sles15.yml
- - various-wd.yml
+ - 144-check-if-service-is-running.yml
+ - 329-only-do-apache-stuff.yml
+ - 356-zbx-maint-tags.yml
+ - 360-fix-macos-install.yml
+ - 367-add-dependencies.yml
+ - 414-specify-minor-version.yml
+ - 444-add-initial-httpapi.yml
+ - 560-prepare-zabbix-60-roles.yml
+ - 570-fix-autopsk.yml
+ - 574-fix-windows-url.yml
+ - 583-change-var-type.yml
+ - 587-host-cannot-have-same-type.yml
+ - 588-template-not-found.yml
+ - 589-failed-to-get-screen.yml
+ - 591-invalid-parameter.yml
+ - 592-has-no-attribute.yml
+ - 597-prepare-distutils.yml
+ - 599-multiple-aliases.yml
+ - 603-vendor-distutils.yml
+ - 612-use-correct-value.yml
+ - 620-add-macro-type.yml
+ - 622-fix-environmentsettings.yml
+ - 630-zabbix-autoreg.yml
+ - 638-sangoma-rhel.yml
+ - 645-zabbix-authentication.yml
+ - 647-zabbix-housekeeping.yml
+ - 648-653-zbx_action.yaml
+ - 654-check-zabbix_server_install_database_client-rhel.yml
+ - 664-zbx_action.yaml
+ - 665-zbx_maint.yaml
+ - 668-zabbix_agent-sles15.yml
+ - various-wd.yml
modules:
- - description: Update Zabbix authentication
- name: zabbix_authentication
- namespace: ''
- - description: Update Zabbix autoregistration
- name: zabbix_autoregister
- namespace: ''
- - description: Update Zabbix housekeeping
- name: zabbix_housekeeping
- namespace: ''
+ - description: Update Zabbix authentication
+ name: zabbix_authentication
+ namespace: ""
+ - description: Update Zabbix autoregistration
+ name: zabbix_autoregister
+ namespace: ""
+ - description: Update Zabbix housekeeping
+ name: zabbix_housekeeping
+ namespace: ""
plugins:
connection:
- - description: Use httpapi to run command on network appliances
- name: httpapi
- namespace: null
+ - description: Use httpapi to run command on network appliances
+ name: httpapi
+ namespace: null
httpapi:
- - description: HttpApi Plugin for Zabbix
- name: jsonrpc
- namespace: null
- release_date: '2022-03-21'
+ - description: HttpApi Plugin for Zabbix
+ name: jsonrpc
+ namespace: null
+ release_date: "2022-03-21"
1.7.0:
changes:
bugfixes:
- - Include ``PSF-license.txt`` file for ``plugins/module_utils/_version.py``.
- - zabbix_action - will no longer wipe `esc_step_to` and `esc_step_from` (https://github.com/ansible-collections/community.zabbix/issues/692)
- - zabbix_agent - added support for zabbix-agent on Ubuntu 22.04 (https://github.com/ansible-collections/community.zabbix/pull/681)
- - zabbix_agent - now properly creates webroot for issuing LE certificates (https://github.com/ansible-collections/community.zabbix/pull/677,
- https://github.com/ansible-collections/community.zabbix/pull/682)
- - zabbix_proxy (module) - passive proxy should be now correctly created in Zabbix
- 6.0 (https://github.com/ansible-collections/community.zabbix/pull/697)
- - zabbix_proxy (role) - fixed accidental regression of TLS psk file being generated
- for passive agent (#528) caused in (#663) (https://github.com/ansible-collections/community.zabbix/issues/680)
+ - Include ``PSF-license.txt`` file for ``plugins/module_utils/_version.py``.
+ - zabbix_action - will no longer wipe `esc_step_to` and `esc_step_from` (https://github.com/ansible-collections/community.zabbix/issues/692)
+ - zabbix_agent - added support for zabbix-agent on Ubuntu 22.04 (https://github.com/ansible-collections/community.zabbix/pull/681)
+ - zabbix_agent - now properly creates webroot for issuing LE certificates (https://github.com/ansible-collections/community.zabbix/pull/677,
+ https://github.com/ansible-collections/community.zabbix/pull/682)
+ - zabbix_proxy (module) - passive proxy should be now correctly created in Zabbix
+ 6.0 (https://github.com/ansible-collections/community.zabbix/pull/697)
+ - zabbix_proxy (role) - fixed accidental regression of TLS psk file being generated
+ for passive agent (#528) caused in (#663) (https://github.com/ansible-collections/community.zabbix/issues/680)
minor_changes:
- - helpers.helper_compare_lists() changed logic to not consider the order of
- elements in lists. (https://github.com/ansible-collections/community.zabbix/pull/683)
- - zabbix_action, zabbix_maintenance, zabbix_mediatype, zabbix_proxy, zabbix_service
- - updated to work with Zabbix 6.0. (https://github.com/ansible-collections/community.zabbix/pull/683)
- - zabbix_script module added (https://github.com/ansible-collections/community.zabbix/issues/634)
+ - helpers.helper_compare_lists() changed logic to not consider the order of
+ elements in lists. (https://github.com/ansible-collections/community.zabbix/pull/683)
+ - zabbix_action, zabbix_maintenance, zabbix_mediatype, zabbix_proxy, zabbix_service
+ - updated to work with Zabbix 6.0. (https://github.com/ansible-collections/community.zabbix/pull/683)
+ - zabbix_script module added (https://github.com/ansible-collections/community.zabbix/issues/634)
fragments:
- - 1.7.0-missing-fragments.yml
- - 683-zbx60.yml
- - 685-zabbix-script.yml
- - 699-proxy-role-regression.yml
- - psf-license.yml
+ - 1.7.0-missing-fragments.yml
+ - 683-zbx60.yml
+ - 685-zabbix-script.yml
+ - 699-proxy-role-regression.yml
+ - psf-license.yml
modules:
- - description: Create/update/delete Zabbix scripts
- name: zabbix_script
- namespace: ''
- release_date: '2022-05-24'
+ - description: Create/update/delete Zabbix scripts
+ name: zabbix_script
+ namespace: ""
+ release_date: "2022-05-24"
1.8.0:
changes:
bugfixes:
- - zabbix_host - fixed idempotency of the module when hostmacros or snmp interfaces
- are used
- - zabbix_script - fix compatibility with Zabbix <5.4.
- - zabbix_script - should no longer fail when description is not set
+ - zabbix_host - fixed idempotency of the module when hostmacros or snmp interfaces
+ are used
+ - zabbix_script - fix compatibility with Zabbix <5.4.
+ - zabbix_script - should no longer fail when description is not set
minor_changes:
- - roles - Minimized the config templates for the zabbix_agent, zabbix_javagateway,
- zabbix_proxy, and zabbix_server roles to make them version independent.
- - roles - Support for Zabbix 6.2 has been added
- - roles - Updated the version defaults to select the latest version supported
- by an operating system.
- - zabbix_action - added another condition operator naming options (contains,
- does not contain,...)
- - zabbix_agent - Set a ansible_python_interpreter to localhost based on the
- env the playbook is executed from.
- - zabbix_agent - add option to set host tags using ``zabbix_agent_tags``.
- - zabbix_agent - add possiblity to set include file pattern using ``zabbix_agent(2)_include_pattern``
- variable.
- - zabbix_agent - is now able to manage directories and upload files for TLS
- PSK configuration used with Windows operating systems
- - zabbix_agent - new options for Windows installations zabbix_win_install_dir_conf/bin
- - zabbix_agent - when configuring firewalld, make sure the new rule is applied
- immediately
- - zabbix_authentication - module updated to support Zabbix 6.2
- - zabbix_host - using ``tls_psk_identity`` or ``tls_psk`` parameters with Zabbix
- >= 5.4 makes this module non-idempotent
- - zabbix_host - will no longer wipe tls_connect en tls_accept settings when
- not specified on update
- - zabbix_mediatype - added support for time units in ``attempt_interval`` parameter
- - zabbix_template - added support for template groups (Zabbix >= 6.2)
- - zabbix_template_info - add template_id return value
- - zabbix_template_info - add yaml and none formats
- - zabbix_user_directory - added new module to support multiple sources for LDAP
- authentication
+ - roles - Minimized the config templates for the zabbix_agent, zabbix_javagateway,
+ zabbix_proxy, and zabbix_server roles to make them version independent.
+ - roles - Support for Zabbix 6.2 has been added
+ - roles - Updated the version defaults to select the latest version supported
+ by an operating system.
+ - zabbix_action - added another condition operator naming options (contains,
+ does not contain,...)
+ - zabbix_agent - Set a ansible_python_interpreter to localhost based on the
+ env the playbook is executed from.
+ - zabbix_agent - add option to set host tags using ``zabbix_agent_tags``.
+ - zabbix_agent - add possiblity to set include file pattern using ``zabbix_agent(2)_include_pattern``
+ variable.
+ - zabbix_agent - is now able to manage directories and upload files for TLS
+ PSK configuration used with Windows operating systems
+ - zabbix_agent - new options for Windows installations zabbix_win_install_dir_conf/bin
+ - zabbix_agent - when configuring firewalld, make sure the new rule is applied
+ immediately
+ - zabbix_authentication - module updated to support Zabbix 6.2
+ - zabbix_host - using ``tls_psk_identity`` or ``tls_psk`` parameters with Zabbix
+ >= 5.4 makes this module non-idempotent
+ - zabbix_host - will no longer wipe tls_connect en tls_accept settings when
+ not specified on update
+ - zabbix_mediatype - added support for time units in ``attempt_interval`` parameter
+ - zabbix_template - added support for template groups (Zabbix >= 6.2)
+ - zabbix_template_info - add template_id return value
+ - zabbix_template_info - add yaml and none formats
+ - zabbix_user_directory - added new module to support multiple sources for LDAP
+ authentication
fragments:
- - 602-change-intrepeter.yml
- - 711-minimize-role-config-templates.yml
- - 727-script-compatibility.yml
- - 731-template-info-formats.yml
- - 738-template-groups.yml
- - 741-host-tls-psk-fix.yml
- - 743-agent-tags.yml
- - 744-mediatype-attemptinterval-timeunits.yml
- - 749-action-operator-naming.yml
- - 753-host-tls-settings.yml
- - 755-user-directory-module.yml
- - 770-agent-include-pattern.yml
- - 771-auto-select-version.yml
- - firewalld_immediate.yaml
- - missing.yml
- release_date: '2022-08-15'
+ - 602-change-intrepeter.yml
+ - 711-minimize-role-config-templates.yml
+ - 727-script-compatibility.yml
+ - 731-template-info-formats.yml
+ - 738-template-groups.yml
+ - 741-host-tls-psk-fix.yml
+ - 743-agent-tags.yml
+ - 744-mediatype-attemptinterval-timeunits.yml
+ - 749-action-operator-naming.yml
+ - 753-host-tls-settings.yml
+ - 755-user-directory-module.yml
+ - 770-agent-include-pattern.yml
+ - 771-auto-select-version.yml
+ - firewalld_immediate.yaml
+ - missing.yml
+ release_date: "2022-08-15"
1.9.0:
changes:
bugfixes:
- - The inventory script had insufficient error handling in case the Zabbix API
- provided an empty interfaces list. This bugfix checks for an exisiting interfaces
- element, then for the minimal length of 1 so that the first interface will
- only be accessed when it really exists in the api response. (https://github.com/ansible-collections/community.zabbix/issues/826)
- - zabbix-proxy - updated to install correct sources for Debian arm64 family
- - zabbix_agent - Filter IPv6 addresses from list of IP as Zabbix host creation
- expects IPv4
- - zabbix_agent - installation on Windows will no longer fail when zabbix_agent2
- is used
- - zabbix_host - fix updating of host without interfaces
- - zabbix_proxy - correctly provision tls_accept and tls_connect on Zabbix backend
- - zabbix_proxy - updated the datafiles_path fact for the zabbix_proxy and zabbix_server
- roles due to upstream change
- - zabbix_server - move location of the fping(6) variables to distribution specific
- files (https://github.com/ansible-collections/community.zabbix/issues/812)
- - zabbix_server - updated the datafiles_path fact for the zabbix_proxy and zabbix_server
- roles due to upstream change
+ - The inventory script had insufficient error handling in case the Zabbix API
+ provided an empty interfaces list. This bugfix checks for an exisiting interfaces
+ element, then for the minimal length of 1 so that the first interface will
+ only be accessed when it really exists in the api response. (https://github.com/ansible-collections/community.zabbix/issues/826)
+ - zabbix-proxy - updated to install correct sources for Debian arm64 family
+ - zabbix_agent - Filter IPv6 addresses from list of IP as Zabbix host creation
+ expects IPv4
+ - zabbix_agent - installation on Windows will no longer fail when zabbix_agent2
+ is used
+ - zabbix_host - fix updating of host without interfaces
+ - zabbix_proxy - correctly provision tls_accept and tls_connect on Zabbix backend
+ - zabbix_proxy - updated the datafiles_path fact for the zabbix_proxy and zabbix_server
+ roles due to upstream change
+ - zabbix_server - move location of the fping(6) variables to distribution specific
+ files (https://github.com/ansible-collections/community.zabbix/issues/812)
+ - zabbix_server - updated the datafiles_path fact for the zabbix_proxy and zabbix_server
+ roles due to upstream change
major_changes:
- - all modules are opting away from zabbix-api and using httpapi ansible.netcommon
- plugin. We will support zabbix-api for backwards compatibility until next
- major release. See our README.md for more information about how to migrate
- - zabbix_agent and zabbix_proxy roles are opting away from zabbix-api and use
- httpapi ansible.netcommon plugin. We will support zabbix-api for backwards
- compatibility until next major release. See our README.md for more information
- about how to migrate
+ - all modules are opting away from zabbix-api and using httpapi ansible.netcommon
+ plugin. We will support zabbix-api for backwards compatibility until next
+ major release. See our README.md for more information about how to migrate
+ - zabbix_agent and zabbix_proxy roles are opting away from zabbix-api and use
+ httpapi ansible.netcommon plugin. We will support zabbix-api for backwards
+ compatibility until next major release. See our README.md for more information
+ about how to migrate
minor_changes:
- - ansible_zabbix_url_path introduced to be able to specify non-default Zabbix
- WebUI path, e.g. http://<FQDN>/zabbixeu
- - collection now supports creating ``module_defaults`` for ``group/community.zabbix.zabbix``
- (see https://github.com/ansible-collections/community.zabbix/issues/326)
- - fixed ``zabbix_server`` role failure running in check_mode (see https://github.com/ansible-collections/community.zabbix/issues/804)
- - zabbix_agent - give Zabbix Agent access to the Linux DMI table allowing system.hw.chassis
- info to populate.
- - zabbix_template - add support for template tags
- - zabbix_user_role module added
- - zabbix_web - add support for Ubuntu 22.04 jammy
+ - ansible_zabbix_url_path introduced to be able to specify non-default Zabbix
+ WebUI path, e.g. http://<FQDN>/zabbixeu
+ - collection now supports creating ``module_defaults`` for ``group/community.zabbix.zabbix``
+ (see https://github.com/ansible-collections/community.zabbix/issues/326)
+ - fixed ``zabbix_server`` role failure running in check_mode (see https://github.com/ansible-collections/community.zabbix/issues/804)
+ - zabbix_agent - give Zabbix Agent access to the Linux DMI table allowing system.hw.chassis
+ info to populate.
+ - zabbix_template - add support for template tags
+ - zabbix_user_role module added
+ - zabbix_web - add support for Ubuntu 22.04 jammy
fragments:
- - 326-module_defaults-group.yml
- - 526-swap-tls-accept-connect-in-proxy.yaml
- - 778-zbx-agent2-win.yml
- - 784-allow-system.hw.chassis-to-populate-on-linux.yml
- - 786-web-add-ubuntu-22-04-support.yml
- - 792-update-host-without-iface-fix.yml
- - 795-zbx-agent-win-ipv6.yml
- - 800-add-template-tags.yml
- - 806-httpapi-connection-plugin.yml
- - 812-fping-binary-location.yml
- - 813-sql-scripts-moved-to-usr-share.yml
- - 821-zabbix-proxy-debian-arm64.yml
- - 822-roles-to-httpapi.yml
- - 823-zabbix_server-role-in-check-mode.yml
- - 827-fixing-bug-for-empty-interfaces-list.yml
- - module_zabbix-user-role.yml
- - modules-url-path.yml
- release_date: '2022-11-07'
+ - 326-module_defaults-group.yml
+ - 526-swap-tls-accept-connect-in-proxy.yaml
+ - 778-zbx-agent2-win.yml
+ - 784-allow-system.hw.chassis-to-populate-on-linux.yml
+ - 786-web-add-ubuntu-22-04-support.yml
+ - 792-update-host-without-iface-fix.yml
+ - 795-zbx-agent-win-ipv6.yml
+ - 800-add-template-tags.yml
+ - 806-httpapi-connection-plugin.yml
+ - 812-fping-binary-location.yml
+ - 813-sql-scripts-moved-to-usr-share.yml
+ - 821-zabbix-proxy-debian-arm64.yml
+ - 822-roles-to-httpapi.yml
+ - 823-zabbix_server-role-in-check-mode.yml
+ - 827-fixing-bug-for-empty-interfaces-list.yml
+ - module_zabbix-user-role.yml
+ - modules-url-path.yml
+ release_date: "2022-11-07"
1.9.1:
changes:
bugfixes:
- - all modules - remove deprecation warnings for modules parameters related to
- zabbix-api when these parapmeters are not explicetely defined
- - all roles and modules integration tests - replace deprecated include module
- whith include_tasks
- - zabbix_agent, zabbix_proxy roles, all modules - make httpapi connection work
- with HTTP Basic Authorization
- - zabbix_proxy - do not set ServerPort config parameter which was removed in
- Zabbix 6.0
- - 'zabbix_server role Debian.yml task - remove warn: arg for shell module as
- the arg is deprecated since ansible-core above 2.13'
- - zabbix_user_role module - creation of a User Role with Super Admin type
+ - all modules - remove deprecation warnings for modules parameters related to
+ zabbix-api when these parapmeters are not explicetely defined
+ - all roles and modules integration tests - replace deprecated include module
+ whith include_tasks
+ - zabbix_agent, zabbix_proxy roles, all modules - make httpapi connection work
+ with HTTP Basic Authorization
+ - zabbix_proxy - do not set ServerPort config parameter which was removed in
+ Zabbix 6.0
+ - "zabbix_server role Debian.yml task - remove warn: arg for shell module as
+ the arg is deprecated since ansible-core above 2.13"
+ - zabbix_user_role module - creation of a User Role with Super Admin type
minor_changes:
- - zabbix suport for rhel 9
+ - zabbix suport for rhel 9
fragments:
- - 845-rhel-9-support.yml
- - 850-proxy-serverport.yml
- - 857-deprecation-warnings.yml
- - 859-remove-warn-arg.yml
- - 866-user-role-superadmin.yml
- - 868-include-to-include-tasks.yml
- - fix-httpapi-http-auth.yml
- release_date: '2023-01-09'
+ - 845-rhel-9-support.yml
+ - 850-proxy-serverport.yml
+ - 857-deprecation-warnings.yml
+ - 859-remove-warn-arg.yml
+ - 866-user-role-superadmin.yml
+ - 868-include-to-include-tasks.yml
+ - fix-httpapi-http-auth.yml
+ release_date: "2023-01-09"
1.9.2:
changes:
bugfixes:
- - zabbix_agent and zabbix_proxy roles - fixed a bug whith ansible_python_interpreter
- not being set correctly in some corner cases
- - zabbix_agent role - Fix MacOS install never executed because of the missing
- include_tasks "Darwin.yml" in the "main.yml" task file and wrong user permission
- on folder/files.
- - zabbix_agent, zabbix_proxy and zabbix_server roles - make Ansible 2.14 compatible
- by removing warn parameter
+ - zabbix_agent and zabbix_proxy roles - fixed a bug whith ansible_python_interpreter
+ not being set correctly in some corner cases
+ - zabbix_agent role - Fix MacOS install never executed because of the missing
+ include_tasks "Darwin.yml" in the "main.yml" task file and wrong user permission
+ on folder/files.
+ - zabbix_agent, zabbix_proxy and zabbix_server roles - make Ansible 2.14 compatible
+ by removing warn parameter
fragments:
- - 897.yml
- - missing.yml
- release_date: '2023-02-11'
+ - 897.yml
+ - missing.yml
+ release_date: "2023-02-11"
1.9.3:
changes:
bugfixes:
- - compatibility with ansible.netcommon 5.0.0
- - treat sendto parameter in module zabbix_user according to real media type,
- do not rely on media name
- - zabbix-proxy role - fix tags for postgresql task.
- - zabbix_agent role - Fix MacOS install never executed because of the missing
- include_tasks "Darwin.yml" in the "main.yml" task file and wrong user permission
- on folder/files.
- - zabbix_user module - ability to specify several e-mail addresses in Zabbix
- User's media
+ - compatibility with ansible.netcommon 5.0.0
+ - treat sendto parameter in module zabbix_user according to real media type,
+ do not rely on media name
+ - zabbix-proxy role - fix tags for postgresql task.
+ - zabbix_agent role - Fix MacOS install never executed because of the missing
+ include_tasks "Darwin.yml" in the "main.yml" task file and wrong user permission
+ on folder/files.
+ - zabbix_user module - ability to specify several e-mail addresses in Zabbix
+ User's media
minor_changes:
- - httpapi plugin - updated to work with Zabbix 6.4.
- - zabbix_action, zabbix_authentication, zabbix_discovery_rule, zabbix_mediatype,
- zabbix_user, zabbix_user_directory, zabbix_usergroup - updated to work with
- Zabbix 6.4.
- - zabbix_agent role - Add support for SUSE Linux Enterprise Server for SAP Applications
- ("SLES_SAP").
- - zabbix_host - add missing variants for SNMPv3 authprotocol and privprotocol
- introduced by Zabbix 6
- - zabbix_proxy role - Add variable zabbix_proxy_dbpassword_hash_method to control
- whether you want postgresql user password to be hashed with md5 or want to
- use db default. When zabbix_proxy_dbpassword_hash_method is set to anything
- other than md5 then do not hash the password with md5 so you could use postgresql
- scram-sha-256 hashing method.
- - zabbix_server role - Add variable zabbix_server_dbpassword_hash_method to
- control whether you want postgresql user password to be hashed with md5 or
- want to use db default. When zabbix_server_dbpassword_hash_method is set to
- anything other than md5 then do not hash the password with md5 so you could
- use postgresql scram-sha-256 hashing method.
- - zabbix_usergroup module - userdirectory, hostgroup_rights and templategroup_rights
- parameters added (Zabbix >= 6.2)
- - zabbix_web role - possibility to add custom includes in apache vhost config
+ - httpapi plugin - updated to work with Zabbix 6.4.
+ - zabbix_action, zabbix_authentication, zabbix_discovery_rule, zabbix_mediatype,
+ zabbix_user, zabbix_user_directory, zabbix_usergroup - updated to work with
+ Zabbix 6.4.
+ - zabbix_agent role - Add support for SUSE Linux Enterprise Server for SAP Applications
+ ("SLES_SAP").
+ - zabbix_host - add missing variants for SNMPv3 authprotocol and privprotocol
+ introduced by Zabbix 6
+ - zabbix_proxy role - Add variable zabbix_proxy_dbpassword_hash_method to control
+ whether you want postgresql user password to be hashed with md5 or want to
+ use db default. When zabbix_proxy_dbpassword_hash_method is set to anything
+ other than md5 then do not hash the password with md5 so you could use postgresql
+ scram-sha-256 hashing method.
+ - zabbix_server role - Add variable zabbix_server_dbpassword_hash_method to
+ control whether you want postgresql user password to be hashed with md5 or
+ want to use db default. When zabbix_server_dbpassword_hash_method is set to
+ anything other than md5 then do not hash the password with md5 so you could
+ use postgresql scram-sha-256 hashing method.
+ - zabbix_usergroup module - userdirectory, hostgroup_rights and templategroup_rights
+ parameters added (Zabbix >= 6.2)
+ - zabbix_web role - possibility to add custom includes in apache vhost config
fragments:
- - 871.yml
- - 880.yml
- - 886.yml
- - 890.yml
- - 897.yml
- - 899.yml
- - 910-postgres-proxy.yml
- - 935-custom-includes-apache.yml
- - netcommon50-fix.yaml
- - zabbix64.yml
- - zabbix_user_media_mailto.yml
- - zabbix_user_sendto.yaml
- release_date: '2023-04-03'
+ - 871.yml
+ - 880.yml
+ - 886.yml
+ - 890.yml
+ - 897.yml
+ - 899.yml
+ - 910-postgres-proxy.yml
+ - 935-custom-includes-apache.yml
+ - netcommon50-fix.yaml
+ - zabbix64.yml
+ - zabbix_user_media_mailto.yml
+ - zabbix_user_sendto.yaml
+ release_date: "2023-04-03"
2.0.0:
changes:
breaking_changes:
- - agent role - removed support for Darwin, Amazon, Fedora, XCP-ng, Suse, Mint,
- and Sangoma operating systems
- - agent role - removed support for zabbix_create_host and replaced it with zabbix_agent_host_state
- - agent role - removed support for zabbix_create_hostgroup and replaced it with
- zabbix_agent_hostgroups_state
- - agent role - removed support for zabbix_http_password, zabbix_api_http_password,
- zabbix_api_pass, and zabbix_api_login_pass and replaced it with zabbix_api_login_pass
- - agent role - removed support for zabbix_http_user, zabbix_api_http_user, zabbix_api_user,
- and zabbix_api_login_user and replaced it with zabbix_api_login_user
- - agent role - removed support for zabbix_inventory_mode and replaced it with
- zabbix_agent_inventory_mode
- - agent role - removed support for zabbix_link_templates adn replaced it with
- zabbix_agent_link_templates
- - agent role - removed support for zabbix_macros and replaced it with zabbix_agent_macros
- - agent role - removed support for zabbix_proxy and replaced it with zabbix_agent_proxy
- - agent role - removed support for zabbix_update_host and replaced it with zabbix_agent_host_update
- - all modules - dropped support of Zabbix versions < 6.0
- - all roles - removed support for the zabbix_version variable.
- - all roles - removed support for all versions of Zabbix < 6.0.
- - all roles - removed support for installation from epel and non-standard repositories
- - dropped support of zabbix-api to make REST API calls to Zabbix
- - proxy role - removed support for zabbix_database_creation and replaced it
- with zabbix_proxy_database_creation
- - proxy role - removed support for zabbix_database_sqlload and replaced it
- with zabbix_proxy_database_sqlload
- - proxy role - removed support for zabbix_selinux and replaced it with zabbix_proxy_selinux
- - server role - removed support for zabbix_server_mysql_login_password and replaced
- with zabbix_server_dbpassword
- - server role - removed support for zabbix_server_mysql_login_user and replaced
- with zabbix_server_dbuser
- - stopped supporting Ansible < 2.12
- - stopped supporting Python < 3.9
- - zabbix_action - message parameter renamed to op_message
- - zabbix_group_facts module - removed in favour of zabbix_group_info
- - zabbix_host_facts module - removed in favour of zabbix_host_info
+ - agent role - removed support for Darwin, Amazon, Fedora, XCP-ng, Suse, Mint,
+ and Sangoma operating systems
+ - agent role - removed support for zabbix_create_host and replaced it with zabbix_agent_host_state
+ - agent role - removed support for zabbix_create_hostgroup and replaced it with
+ zabbix_agent_hostgroups_state
+ - agent role - removed support for zabbix_http_password, zabbix_api_http_password,
+ zabbix_api_pass, and zabbix_api_login_pass and replaced it with zabbix_api_login_pass
+ - agent role - removed support for zabbix_http_user, zabbix_api_http_user, zabbix_api_user,
+ and zabbix_api_login_user and replaced it with zabbix_api_login_user
+ - agent role - removed support for zabbix_inventory_mode and replaced it with
+ zabbix_agent_inventory_mode
+ - agent role - removed support for zabbix_link_templates adn replaced it with
+ zabbix_agent_link_templates
+ - agent role - removed support for zabbix_macros and replaced it with zabbix_agent_macros
+ - agent role - removed support for zabbix_proxy and replaced it with zabbix_agent_proxy
+ - agent role - removed support for zabbix_update_host and replaced it with zabbix_agent_host_update
+ - all modules - dropped support of Zabbix versions < 6.0
+ - all roles - removed support for the zabbix_version variable.
+ - all roles - removed support for all versions of Zabbix < 6.0.
+ - all roles - removed support for installation from epel and non-standard repositories
+ - dropped support of zabbix-api to make REST API calls to Zabbix
+ - proxy role - removed support for zabbix_database_creation and replaced it
+ with zabbix_proxy_database_creation
+ - proxy role - removed support for zabbix_database_sqlload and replaced it
+ with zabbix_proxy_database_sqlload
+ - proxy role - removed support for zabbix_selinux and replaced it with zabbix_proxy_selinux
+ - server role - removed support for zabbix_server_mysql_login_password and replaced
+ with zabbix_server_dbpassword
+ - server role - removed support for zabbix_server_mysql_login_user and replaced
+ with zabbix_server_dbuser
+ - stopped supporting Ansible < 2.12
+ - stopped supporting Python < 3.9
+ - zabbix_action - message parameter renamed to op_message
+ - zabbix_group_facts module - removed in favour of zabbix_group_info
+ - zabbix_host_facts module - removed in favour of zabbix_host_info
minor_changes:
- - Replaced usage of deprecated apt key management in Debian based distros -
- See https://wiki.debian.org/DebianRepository/UseThirdParty
- - Standardized tags across all roles.
- - Updated all roles to default to version 6.4 for install.
- - all roles - removed unused variables from defaults
- - all roles - standardized testing matrix to check all supported versions and
- operating systems.
- - all roles - temporarily disable epel repo on zabbix installation tasks
- - all roles - updated documentation.
- - inventory plugin - switched from using zabbix-api to custom implementation
- adding authentication with tokens
- - inventory script - re-coded to stop using zabbix-api. API tokens support added.
- - web role - removed support for htpasswd
+ - Replaced usage of deprecated apt key management in Debian based distros -
+ See https://wiki.debian.org/DebianRepository/UseThirdParty
+ - Standardized tags across all roles.
+ - Updated all roles to default to version 6.4 for install.
+ - all roles - removed unused variables from defaults
+ - all roles - standardized testing matrix to check all supported versions and
+ operating systems.
+ - all roles - temporarily disable epel repo on zabbix installation tasks
+ - all roles - updated documentation.
+ - inventory plugin - switched from using zabbix-api to custom implementation
+ adding authentication with tokens
+ - inventory script - re-coded to stop using zabbix-api. API tokens support added.
+ - web role - removed support for htpasswd
removed_features:
- - agent role - removed support to configure firewall
- - web role - removed installation of apache, debian, and php
+ - agent role - removed support to configure firewall
+ - web role - removed installation of apache, debian, and php
fragments:
- - 2_0_update.yml
- - 941_github_runner.yml
- - 963-drop-zabbix-api.yml
- - 964_tag_cleanup.yml
- - 969_replace_apt_key.yml
- - 976_old_zabbix_versions_removal.yml
- - default_ver.yml
- - inventory_script.yml
- - python-ansible.yml
- release_date: '2023-05-04'
+ - 2_0_update.yml
+ - 941_github_runner.yml
+ - 963-drop-zabbix-api.yml
+ - 964_tag_cleanup.yml
+ - 969_replace_apt_key.yml
+ - 976_old_zabbix_versions_removal.yml
+ - default_ver.yml
+ - inventory_script.yml
+ - python-ansible.yml
+ release_date: "2023-05-04"
2.0.1:
changes:
bugfixes:
- - Proxy and Agent Roles - Added `zabbix_api_use_ssl` variable to allow secure
- API connections
- - Web Role - Added defaults and documentation for `zabbix_apache_custom_includes`
- - agent - Handled undefined variable error for Windows default versions
- - all roles - Added option to selectively disable a repo on Redhat installs
+ - Proxy and Agent Roles - Added `zabbix_api_use_ssl` variable to allow secure
+ API connections
+ - Web Role - Added defaults and documentation for `zabbix_apache_custom_includes`
+ - agent - Handled undefined variable error for Windows default versions
+ - all roles - Added option to selectively disable a repo on Redhat installs
fragments:
- - 986.yml
- - 990.yml
- - api_ssl.yml
- - disable_repo.yml
- release_date: '2023-05-26'
+ - 986.yml
+ - 990.yml
+ - api_ssl.yml
+ - disable_repo.yml
+ release_date: "2023-05-26"
2.1.0:
changes:
bugfixes:
- - agent role - Added missing become statement to allow run to role as nonroot
- - zabbix_host module - fix updating hosts that were discovered via LLD
- - zabbix_proxy role - failed at version validation. Fix adds cast of zabbix_proxy_version
- to float, similarly to the other roles.
- - zabbix_proxy role - undefined vars at updating proxy definition. Fix adds
- null defaults for zabbix_proxy_tlsaccept and zabbix_proxy_tlsconnect.
- - zabbix_web role - removed 'ssl on;' nginx configuration, which is no longer
- supported since nginx version 1.25.1.
+ - agent role - Added missing become statement to allow run to role as nonroot
+ - zabbix_host module - fix updating hosts that were discovered via LLD
+ - zabbix_proxy role - failed at version validation. Fix adds cast of zabbix_proxy_version
+ to float, similarly to the other roles.
+ - zabbix_proxy role - undefined vars at updating proxy definition. Fix adds
+ null defaults for zabbix_proxy_tlsaccept and zabbix_proxy_tlsconnect.
+ - zabbix_web role - removed 'ssl on;' nginx configuration, which is no longer
+ supported since nginx version 1.25.1.
minor_changes:
- - Multiple Roles - Replaced depricated 'include' statements with 'include_tasks'
- - Update action_groups variable in runtime.yml
- - all roles - Added support for Debian 12 (Bookworm)
- - all roles - Delete gpg ids variable.
- - all roles - Modified to allow a non-root user to run the role.
- - all roles - Updated testing to account for the correct version of Zabbix
- - zabbix_hostmacro module - Add description property for Host macro creation/update.
- Allow to set/update description of Zabbix host macros.
- - zabbix_proxy - Added installation of PyMySQL pip package
- - zabbix_proxy - Modified installation of Centos 7 MySQL client
- - zabbix_proxy - Standardized MySQL client installed on Debian and Ubuntu
- - zabbix_regexp module added
- - zabbix_settings module added
- - zabbix_token module added
+ - Multiple Roles - Replaced depricated 'include' statements with 'include_tasks'
+ - Update action_groups variable in runtime.yml
+ - all roles - Added support for Debian 12 (Bookworm)
+ - all roles - Delete gpg ids variable.
+ - all roles - Modified to allow a non-root user to run the role.
+ - all roles - Updated testing to account for the correct version of Zabbix
+ - zabbix_hostmacro module - Add description property for Host macro creation/update.
+ Allow to set/update description of Zabbix host macros.
+ - zabbix_proxy - Added installation of PyMySQL pip package
+ - zabbix_proxy - Modified installation of Centos 7 MySQL client
+ - zabbix_proxy - Standardized MySQL client installed on Debian and Ubuntu
+ - zabbix_regexp module added
+ - zabbix_settings module added
+ - zabbix_token module added
fragments:
- - 1025-delete_vars_zabbix.yml
- - 1029-update_action_groups.yml
- - 732-zabbix-regexp.yml
- - 991-zabbix-token.yml
- - 993-zabbix-settings.yml
- - agent_become.yml
- - debian12.yml
- - discovered_hosts_update.yml
- - hostmacro_modules.yml
- - include_tasks.yml
- - missing.yml
- - nginx_ssl_fix.yml
- - non_root.yml
- - proxy_role_fix.yml
+ - 1025-delete_vars_zabbix.yml
+ - 1029-update_action_groups.yml
+ - 732-zabbix-regexp.yml
+ - 991-zabbix-token.yml
+ - 993-zabbix-settings.yml
+ - agent_become.yml
+ - debian12.yml
+ - discovered_hosts_update.yml
+ - hostmacro_modules.yml
+ - include_tasks.yml
+ - missing.yml
+ - nginx_ssl_fix.yml
+ - non_root.yml
+ - proxy_role_fix.yml
modules:
- - description: Create/update/delete Zabbix regular expression
- name: zabbix_regexp
- namespace: ''
- - description: Update Zabbix global settings.
- name: zabbix_settings
- namespace: ''
- - description: Create/Update/Generate/Delete Zabbix token.
- name: zabbix_token
- namespace: ''
- release_date: '2023-06-01'
+ - description: Create/update/delete Zabbix regular expression
+ name: zabbix_regexp
+ namespace: ""
+ - description: Update Zabbix global settings.
+ name: zabbix_settings
+ namespace: ""
+ - description: Create/Update/Generate/Delete Zabbix token.
+ name: zabbix_token
+ namespace: ""
+ release_date: "2023-06-01"
2.2.0:
changes:
bugfixes:
- - zabbix_inventory - fixed handeling of add_zabbix_groups option
- - zabbix_template - fix template export when template's content has "error"
- word
- - zabbix_web role - fix variable naming issues (undefined) to zabbix_web_version
- and zabbix_web_apt_repository
+ - zabbix_inventory - fixed handeling of add_zabbix_groups option
+ - zabbix_template - fix template export when template's content has "error"
+ word
+ - zabbix_web role - fix variable naming issues (undefined) to zabbix_web_version
+ and zabbix_web_apt_repository
minor_changes:
- - Added zabbix_group_events_info module
- - action module - Added notify_if_canceled property
- - agent and proxy roles - Set default `zabbix_api_server_port` to 80 or 443
- based on `zabbix_api_use_ssl`
- - agent role - Removed duplicative Windows agent task
- - agent role - Standardized default yum priority to 99
- - all roles - Re-added ability to override Debian repo source
- - all roles - Updated Debian repository format to 822 standard
- - various - updated testing modules
- - various - updated to fully qualified module names
- - zabbix agent - Added capability to add additional configuration includes
- - zabbix_api_info module added
- - zabbix_user module - add current_passwd optional parameter to enable password
- updating of the currently logged in user (https://www.zabbix.com/documentation/6.4/en/manual/api/reference/user/update)
+ - Added zabbix_group_events_info module
+ - action module - Added notify_if_canceled property
+ - agent and proxy roles - Set default `zabbix_api_server_port` to 80 or 443
+ based on `zabbix_api_use_ssl`
+ - agent role - Removed duplicative Windows agent task
+ - agent role - Standardized default yum priority to 99
+ - all roles - Re-added ability to override Debian repo source
+ - all roles - Updated Debian repository format to 822 standard
+ - various - updated testing modules
+ - various - updated to fully qualified module names
+ - zabbix agent - Added capability to add additional configuration includes
+ - zabbix_api_info module added
+ - zabbix_user module - add current_passwd optional parameter to enable password
+ updating of the currently logged in user (https://www.zabbix.com/documentation/6.4/en/manual/api/reference/user/update)
fragments:
- - 1058-zabbix_template_corner_case.yml
- - 1081_fqmn.yml
- - 733-zabbix-api_info.yml
- - 921-action_notify_if_cancled.yml
- - additional_includes.yml
- - api-port.yml
- - current_passwd_user_module.yml
- - deb822.yml
- - duplicate-windows-agent.yml
- - module_group_events_info.yml
- - web_role_vars_fix.yml
- - yum-priority.yml
- release_date: '2023-10-06'
+ - 1058-zabbix_template_corner_case.yml
+ - 1081_fqmn.yml
+ - 733-zabbix-api_info.yml
+ - 921-action_notify_if_cancled.yml
+ - additional_includes.yml
+ - api-port.yml
+ - current_passwd_user_module.yml
+ - deb822.yml
+ - duplicate-windows-agent.yml
+ - module_group_events_info.yml
+ - web_role_vars_fix.yml
+ - yum-priority.yml
+ release_date: "2023-10-06"
2.3.0:
changes:
bugfixes:
- - api module - Fixed certificiate errors
- - proxy and server roles - Defaulted location of fping and fping6 based on OS.
- - proxy role - Removed requirement for mysql group definition.
- - server role - typo in configuration var StasAllowedIP to StatsAllowedIP
- - zabbix-{agent, javagateway, proxy, server, web} - support raspberry pi without
- repository url specification
+ - api module - Fixed certificiate errors
+ - proxy and server roles - Defaulted location of fping and fping6 based on OS.
+ - proxy role - Removed requirement for mysql group definition.
+ - server role - typo in configuration var StasAllowedIP to StatsAllowedIP
+ - zabbix-{agent, javagateway, proxy, server, web} - support raspberry pi without
+ repository url specification
minor_changes:
- - api_requests - Handled error from depricated CertificateError class
- - multiple roles - Removed unneeded Apt Clean commands.
- - proxy role - Updated MariaDB version for Centos 7 to 10.11
- - zabbix web - Allowed the independent configuration of php-fpm without creating
- vhost.
- - zabbix_host_info - added ability to get all the hosts configured in Zabbix
- - zabbix_proxy role - Add variable zabbix_proxy_dbpassword_hash_method to control
- whether you want postgresql user password to be hashed with md5 or want to
- use db default. When zabbix_proxy_dbpassword_hash_method is set to anything
- other than md5 then do not hash the password with md5 so you could use postgresql
- scram-sha-256 hashing method.
- - zabbix_server role - Add variable zabbix_server_dbpassword_hash_method to
- control whether you want postgresql user password to be hashed with md5 or
- want to use db default. When zabbix_server_dbpassword_hash_method is set to
- anything other than md5 then do not hash the password with md5 so you could
- use postgresql scram-sha-256 hashing method.
- - zabbix_templategroup module added
+ - api_requests - Handled error from depricated CertificateError class
+ - multiple roles - Removed unneeded Apt Clean commands.
+ - proxy role - Updated MariaDB version for Centos 7 to 10.11
+ - zabbix web - Allowed the independent configuration of php-fpm without creating
+ vhost.
+ - zabbix_host_info - added ability to get all the hosts configured in Zabbix
+ - zabbix_proxy role - Add variable zabbix_proxy_dbpassword_hash_method to control
+ whether you want postgresql user password to be hashed with md5 or want to
+ use db default. When zabbix_proxy_dbpassword_hash_method is set to anything
+ other than md5 then do not hash the password with md5 so you could use postgresql
+ scram-sha-256 hashing method.
+ - zabbix_server role - Add variable zabbix_server_dbpassword_hash_method to
+ control whether you want postgresql user password to be hashed with md5 or
+ want to use db default. When zabbix_server_dbpassword_hash_method is set to
+ anything other than md5 then do not hash the password with md5 so you could
+ use postgresql scram-sha-256 hashing method.
+ - zabbix_templategroup module added
fragments:
- - 1134.yml
- - 1136.yml
- - 1142.yml
- - fping.yml
- - host_info_all_hosts.yml
- - php_fpm_config.yml
- - pr_1104.yml
- - pr_647.yml
- - proxy_mysql.yml
- - pymysql.yml
- - raspberrypi.yml
- release_date: '2024-01-02'
+ - 1134.yml
+ - 1136.yml
+ - 1142.yml
+ - fping.yml
+ - host_info_all_hosts.yml
+ - php_fpm_config.yml
+ - pr_1104.yml
+ - pr_647.yml
+ - proxy_mysql.yml
+ - pymysql.yml
+ - raspberrypi.yml
+ release_date: "2024-01-02"
2.3.1:
changes:
bugfixes:
- - Avoid to update user-directory configuration in dry run.
+ - Avoid to update user-directory configuration in dry run.
fragments:
- - 1156-bugfix_zabbix_user_directory_dryrun.yml
- release_date: '2024-01-10'
+ - 1156-bugfix_zabbix_user_directory_dryrun.yml
+ release_date: "2024-01-10"
2.4.0:
changes:
bugfixes:
- - zabbix_agent - Fixed IPMI authentication algorithm default setting
- - zabbix_agent - Fixed issue to where scripts can be deployed alongside userparameters
- - zabbix_host - Don't reset IPMI setting when update inventory data of a host
- - zabbix_host - Finish task with failed if host_group parameter is empty list
- - zabbix_server - proper indentaion of become in selinux.yaml
- - zabbix_web - Added missing semicolon to nginx vhost template.
- - zabbix_web role, Add missing selinux.yml tasks.
+ - zabbix_agent - Fixed IPMI authentication algorithm default setting
+ - zabbix_agent - Fixed issue to where scripts can be deployed alongside userparameters
+ - zabbix_host - Don't reset IPMI setting when update inventory data of a host
+ - zabbix_host - Finish task with failed if host_group parameter is empty list
+ - zabbix_server - proper indentaion of become in selinux.yaml
+ - zabbix_web - Added missing semicolon to nginx vhost template.
+ - zabbix_web role, Add missing selinux.yml tasks.
minor_changes:
- - Add slash at the end of the location directives, to prevent path traversal
- attacks.
- - Added active_since and active_till in zabbix_maintenance
- - Added content_type for email in zabbix_mediatypes
- - Introduce flag `enable_version_check` to allow installations on non-supported
- platforms.
- - agent, javagateway, proxy, server, and web role - added the http_proxy and
- https_proxy environment variables to "Debian | Download gpg key" analog to
- other tasks
- - agent, javagateway, proxy, server, and web role - introduced default variable
- zabbix_repo_deb_gpg_key_url with value http://repo.zabbix.com/zabbix-official-repo.key
- - agent, javagateway, proxy, server, and web role - introduced default variable
- zabbix_repo_deb_include_deb_src with value true
- - agent, javagateway, proxy, server, and web role - removed superfluous slash
- in zabbix_gpg_key of the Debian vars and renamed key to zabbix-repo instead
- of zabbix-official-repo
- - agent, javagateway, proxy, server, and web role - used variable zabbix_repo_deb_include_deb_src
- in "Debian | Installing repository" to determine whether deb-src should be
- added to /etc/apt/sources.list.d/zabbix.sources
- - agent, javagateway, proxy, server, and web role - used zabbix_repo_deb_gpg_key_url
- in "Debian | Download gpg key" instead of hardcoded url
- - zabbix_correlation module added
- - zabbix_service_info module added
- - zabbix_template - Add template_yaml parameter.
- - zabbix_web role, Refactored zabbix_selinux variable names to correlate with
- selinux boolean names.
+ - Add slash at the end of the location directives, to prevent path traversal
+ attacks.
+ - Added active_since and active_till in zabbix_maintenance
+ - Added content_type for email in zabbix_mediatypes
+ - Introduce flag `enable_version_check` to allow installations on non-supported
+ platforms.
+ - agent, javagateway, proxy, server, and web role - added the http_proxy and
+ https_proxy environment variables to "Debian | Download gpg key" analog to
+ other tasks
+ - agent, javagateway, proxy, server, and web role - introduced default variable
+ zabbix_repo_deb_gpg_key_url with value http://repo.zabbix.com/zabbix-official-repo.key
+ - agent, javagateway, proxy, server, and web role - introduced default variable
+ zabbix_repo_deb_include_deb_src with value true
+ - agent, javagateway, proxy, server, and web role - removed superfluous slash
+ in zabbix_gpg_key of the Debian vars and renamed key to zabbix-repo instead
+ of zabbix-official-repo
+ - agent, javagateway, proxy, server, and web role - used variable zabbix_repo_deb_include_deb_src
+ in "Debian | Installing repository" to determine whether deb-src should be
+ added to /etc/apt/sources.list.d/zabbix.sources
+ - agent, javagateway, proxy, server, and web role - used zabbix_repo_deb_gpg_key_url
+ in "Debian | Download gpg key" instead of hardcoded url
+ - zabbix_correlation module added
+ - zabbix_service_info module added
+ - zabbix_template - Add template_yaml parameter.
+ - zabbix_web role, Refactored zabbix_selinux variable names to correlate with
+ selinux boolean names.
fragments:
- - 1157-add_template_yaml.yml
- - 1162-do_not_update_ipmi_options.yml
- - 1165-indent-become.yml
- - 1168-finish-task-with-failed-if-host_group-parameter-is-empty_list.yml
- - 1169-add_zabbix_correlation.yml
- - 1176-content_type-in-mediatype.yml
- - 1180-active_since-in-maintenance.yml
- - 1183-nginx-fix-against-path-traversal.yml
- - 1186-custom_gpg_key_url.yml
- - 1188-agent_ipmi_authtype_fix.yml
- - 1205-fix-zabbix-agent-scripts.yml
- - 1206-fix-nginx-template-ssl-params.yml
- - 1210-add_zabbix_service_info.yml
- - 1220-refactor_selinux_tasks_and_variable_names_role_zabbix_web.yml
- - 1224-enable-version-check.yml
+ - 1157-add_template_yaml.yml
+ - 1162-do_not_update_ipmi_options.yml
+ - 1165-indent-become.yml
+ - 1168-finish-task-with-failed-if-host_group-parameter-is-empty_list.yml
+ - 1169-add_zabbix_correlation.yml
+ - 1176-content_type-in-mediatype.yml
+ - 1180-active_since-in-maintenance.yml
+ - 1183-nginx-fix-against-path-traversal.yml
+ - 1186-custom_gpg_key_url.yml
+ - 1188-agent_ipmi_authtype_fix.yml
+ - 1205-fix-zabbix-agent-scripts.yml
+ - 1206-fix-nginx-template-ssl-params.yml
+ - 1210-add_zabbix_service_info.yml
+ - 1220-refactor_selinux_tasks_and_variable_names_role_zabbix_web.yml
+ - 1224-enable-version-check.yml
modules:
- - description: Create/update/delete Zabbix correlation
- name: zabbix_correlation
- namespace: ''
- release_date: '2024-05-13'
+ - description: Create/update/delete Zabbix correlation
+ name: zabbix_correlation
+ namespace: ""
+ release_date: "2024-05-13"
+ 2.5.0:
+ changes:
+ bugfixes:
+ - zabbix_web - make the FPM socket group-writable so the web server can properly
+ forward requests to the FPM process
+ minor_changes:
+ - agent role - Standardized all configuration variables using the `zabbix_agent`
+ prefix vs `zabbix_agent2`. Support for `zabbix_agent2` to be removed in 3.0.0
+ - agent role - Standardized templating of agent.conf file
+ - all roles - Added support for Ubuntu 24.04 (Noble Numbat)
+ - zabbix_discoveryrule module added
+ - zabbix_host_events_update module added
+ - zabbix_item - add support for setting master items by name
+ - zabbix_item module added
+ - zabbix_itemprototype - add support for setting master items by name
+ - zabbix_itemprototype module added
+ - zabbix_trigger module added
+ - zabbix_triggerprototype module added
+ fragments:
+ - 1221-add_support_for_ubuntu_24.04.yml
+ - 1227-fpm-socket-permissions.yml
+ - agent.yml
+ - pr_1222.yml
+ - pr_1234.yml
+ - pr_1238.yml
+ release_date: "2024-05-30"
+ 2.5.1:
+ changes:
+ bugfixes:
+ - zabbix_agent - Fix reading existing psk
+ - zabbix_agent - Fix role when zabbix_agent_listenip is undefined
+ fragments:
+ - pr_1245.yml
+ - pr_1246.yml
+ release_date: "2024-05-30"
diff --git a/ansible_collections/community/zabbix/docs/ZABBIX_AGENT_ROLE.md b/ansible_collections/community/zabbix/docs/ZABBIX_AGENT_ROLE.md
index fe4a601b3..f9a2fb5a5 100644
--- a/ansible_collections/community/zabbix/docs/ZABBIX_AGENT_ROLE.md
+++ b/ansible_collections/community/zabbix/docs/ZABBIX_AGENT_ROLE.md
@@ -28,7 +28,6 @@
* [proxy](#proxy)
- [Dependencies](#dependencies)
- [Example Playbook](#example-playbook)
- * [zabbix_agent2_plugins](#zabbix-agent2-plugins)
* [agent_interfaces](#agent-interfaces)
* [Other interfaces](#other-interfaces)
* [Vars in role configuration](#vars-in-role-configuration)
@@ -95,6 +94,7 @@ See the following list of supported Operating systems with the Zabbix releases:
| Red Hat Fam 9 | V | V | V |
| Red Hat Fam 8 | V | V | V |
| Red Hat Fam 7 | V | V | V |
+| Ubuntu 24.04 noble | V | | V |
| Ubuntu 22.04 jammy | V | V | V |
| Ubuntu 20.04 focal | V | V | V |
| Ubuntu 18.04 bionic | V | V | V |
@@ -104,24 +104,6 @@ See the following list of supported Operating systems with the Zabbix releases:
You can bypass this matrix by setting `enable_version_check: false`
-# Getting started
-
-## Minimal Configuration
-
-In order to get the Zabbix Agent running, you'll have to define the following properties before executing the role:
-
-* `zabbix_agent_version`
-* `zabbix_agent(2)_server`
-* `zabbix_agent(2)_serveractive` (When using active checks)
-
-The `zabbix_agent_version` is optional. The latest available major.minor version of Zabbix will be installed on the host(s). If you want to use an older version, please specify this in the major.minor format. Example: `zabbix_agent_version: 6.0`.
-
-The `zabbix_agent(2)_server` (and `zabbix_agent(2)_serveractive`) should contain the ip or fqdn of the host running the Zabbix Server.
-
-## Issues
-
-Due to issue discussed on [#291](https://github.com/dj-wasabi/ansible-zabbix-agent/issues/291), the Ansible Version 2.9.{0,1,2} isn't working correctly on Windows related targets.
-
# Role Variables
## Main variables
@@ -133,114 +115,66 @@ The following is an overview of all available configuration default for this rol
* `zabbix_agent_version`: This is the version of zabbix. Default: The highest supported version for the operating system. Can be overridden to 6.4, 6.2, or 6.0
* `zabbix_agent_version_minor`: When you want to specify a minor version to be installed. Is also used for `zabbix_sender` and `zabbix_get`. RedHat only. Default set to: `*` (latest available)
* `zabbix_repo_yum`: A list with Yum repository configuration.
+* `zabbix_repo_yum_gpgcheck`: If Yum should check GPG keys on installation
* `zabbix_repo_yum_schema`: Default: `https`. Option to change the web schema for the yum repository(http/https)
* `zabbix_agent_disable_repo`: A list of repos to disable during install. Default `epel`.
* `zabbix_repo_deb_url`: The URL to the Zabbix repository. Default `http://repo.zabbix.com/zabbix/{{ zabbix_agent_version }}/{{ ansible_distribution.lower() }}`
* `zabbix_repo_deb_component`: The repository component for Debian installs. Default `main`.
-* `zabbix_repo_deb_gpg_key_url`: The URL to download the Zabbix GPG key from. Default `http://repo.zabbix.com/zabbix-official-repo.key`.
+* `zabbix_repo_deb_gpg_key_url`: The URL to download the Zabbix GPG key from. Default `http://repo.zabbix.com/zabbix-official-repo.key.
* `zabbix_repo_deb_include_deb_src`: True, if deb-src should be included in the zabbix.sources entry. Default `true`.
### SElinux
* `zabbix_selinux`: Default: `False`. Enables an SELinux policy so that the server will run.
+* `selinux_allow_zabbix_run_sudo`: Default: `False`. Enable Zabbix root access on system.
### Zabbix Agent
-* `zabbix_agent_ip`: The IP address of the host. When not provided, it will be determined via the `ansible_default_ipv4` fact.
* `zabbix_agent2`: Default: `False`. When you want to install the `Zabbix Agent2` instead of the "old" `Zabbix Agent`.zabbix_agent_version
+* `zabbix_agent_apt_priority`: Add a weight (`Pin-Priority`) for the APT repository.
+* `zabbix_agent_chassis`: Default: `false`. When set to `true`, it will give Zabbix Agent access to the Linux DMI table allowing system.hw.chassis info to populate.
+* `zabbix_agent_conf_mode`: Default: `0644`. The "mode" for the Zabbix configuration file.
+* `zabbix_agent_dont_detect_ip`: Default `false`. When set to `true`, it won't detect available ip addresses on the host and no need for the Python module `netaddr` to be installed.
+* `zabbix_agent_get_package`: The name of the zabbix-get package. Default: `zabbix-get`.
+* `zabbix_agent_include_mode`: The mode for the directory mentioned above.
+* `zabbix_agent_install_agent_only`: Only install the Zabbix Agent and not the `zabbix-sender` and `zabbix-get` packages. Default: `False`
* `zabbix_agent_listeninterface`: Interface zabbix-agent listens on. Leave blank for all.
* `zabbix_agent_package_remove`: If `zabbix_agent2: True` and you want to remove the old installation. Default: `False`.
-* `zabbix_agent_package`: The name of the zabbix-agent package. Default: `zabbix-agent`. In case for EPEL, it is automatically renamed.
-* `zabbix_sender_package`: The name of the zabbix-sender package. Default: `zabbix-sender`. In case for EPEL, it is automatically renamed.
-* `zabbix_get_package`: The name of the zabbix-get package. Default: `zabbix-get`. In case for EPEL, it is automatically renamed.
-* `zabbix_agent_package_state`: If Zabbix-agent needs to be `present` or `latest`.
-* `zabbix_agent_interfaces`: A list that configured the interfaces you can use when configuring via API.
-* `zabbix_agent_install_agent_only`: Only install the Zabbix Agent and not the `zabbix-sender` and `zabbix-get` packages. Default: `False`
+* `zabbix_agent_package_state`: If Zabbix-agent needs to be `present` (default) or `latest`.
+* `zabbix_agent_package`: The name of the zabbix-agent package. Default: `zabbix-agent` if `zabbix_agent2` is fale and `zabbix-agent2` if `true`.
+* `zabbix_agent_sender_package`: The name of the zabbix-sender package. Default: `zabbix-sender`.
* `zabbix_agent_userparameters`: Default: `[]]`. List of userparameter names and scripts (if any). Detailed description is given in the [Deploying Userparameters](#deploying-userparameters) section.
- * `name`: Userparameter name (should be the same with userparameter template file name)
- * `scripts_dir`: Directory name of the custom scripts needed for userparameters
-* `zabbix_agent_userparameters_templates_src`: indicates the relative path (from `templates/`) where userparameter templates are searched
+ * `name`: Userparameter name (should be the same with userparameter template file name)
+ * `scripts_dir`: Directory name of the custom scripts needed for userparameters
* `zabbix_agent_userparameters_scripts_src`: indicates the relative path (from `files/`) where userparameter scripts are searched
-* `zabbix_agent_runas_user`: Drop privileges to a specific, existing user on the system. Only has effect if run as 'root' and AllowRoot is disabled.
-* `zabbix_agent_become_on_localhost`: Default: `True`. Set to `False` if you don't need to elevate privileges on localhost to install packages locally with pip.
-* `zabbix_agent_apt_priority`: Add a weight (`Pin-Priority`) for the APT repository.
-* `zabbix_agent_conf_mode`: Default: `0644`. The "mode" for the Zabbix configuration file.
-* `zabbix_agent_dont_detect_ip`: Default `false`. When set to `true`, it won't detect available ip addresses on the host and no need for the Python module `netaddr` to be installed.
-* `zabbix_agent_chassis`: Default: `false`. When set to `true`, it will give Zabbix Agent access to the Linux DMI table allowing system.hw.chassis info to populate.
-
-### Zabbix Agent vs Zabbix Agent 2 configuration
-
-The following provides an overview of all the properties that can be set in the Zabbix Agent configuration file. When `(2)` is used in the name of the property, like `zabbix_agent(2)_pidfile`, it will show that you can configure `zabbix_agent_pidfile` for the Zabbix Agent configuration file and `zabbix_agent2_pidfile` for the Zabbix Agent 2 configuration file.
-
-Otherwise it just for the Zabbix Agent or for the Zabbix Agent 2.
-
-* `zabbix_agent(2)_server`: The ip address for the zabbix-server or zabbix-proxy.
-* `zabbix_agent(2)_serveractive`: The ip address for the zabbix-server or zabbix-proxy for active checks.
-* `zabbix_agent(2)_allow_key`: list of AllowKey configurations.
-* `zabbix_agent(2)_deny_key`: list of DenyKey configurations.
-* `zabbix_agent(2)_pidfile`: name of pid file.
-* `zabbix_agent(2)_logfile`: name of log file.
-* `zabbix_agent(2)_logfilesize`: maximum size of log file in mb.
-* `zabbix_agent(2)_additional_include`: A list of additional complete paths to include in configuration
-* `zabbix_agent(2)_logtype`: Specifies where log messages are written to
-* `zabbix_agent(2)_debuglevel`: specifies debug level
-* `zabbix_agent(2)_sourceip`: source ip address for outgoing connections.
-* `zabbix_agent_enableremotecommands`: whether remote commands from zabbix server are allowed.
-* `zabbix_agent_logremotecommands`: enable logging of executed shell commands as warnings.
-* `zabbix_agent(2)_listenport`: agent will listen on this port for connections from the server.
-* `zabbix_agent2_statusport`: Agent will listen on this port for HTTP status requests.
-* `zabbix_agent(2)_listenip`: list of comma delimited ip addresses that the agent should listen on.
-* `zabbix_agent_startagents`: number of pre-forked instances of zabbix_agentd that process passive checks.
-* `zabbix_agent(2)_hostname`: unique, case sensitive hostname.
-* `zabbix_agent(2)_hostnameitem`: item used for generating hostname if it is undefined.
-* `zabbix_agent(2)_hostmetadata`: optional parameter that defines host metadata.
-* `zabbix_agent(2)_hostmetadataitem`: optional parameter that defines an item used for getting the metadata.
-* `zabbix_agent(2)_refreshactivechecks`: how often list of active checks is refreshed, in seconds.
-* `zabbix_agent(2)_buffersend`: do not keep data longer than n seconds in buffer.
-* `zabbix_agent(2)_buffersize`: maximum number of values in a memory buffer. the agent will send all collected data to zabbix server or proxy if the buffer is full.
-* `zabbix_agent2_enablepersistentbuffer`: 0 - disabled, in-memory buffer is used (default); 1 - use persistent buffer
-* `zabbix_agent2_persistentbufferperiod`: Zabbix Agent2 will keep data for this time period in case of no connectivity with Zabbix server or proxy. Older data will be lost. Log data will be preserved.
-* `zabbix_agent2_persistentbufferfile`: Zabbix Agent2 will keep SQLite database in this file * n is valid if `EnablePersistentBuffer=1`
-* `zabbix_agent_maxlinespersecond`: maximum number of new lines the agent will send per second to zabbix server or proxy processing 'log' and 'logrt' active checks.
-* `zabbix_agent_allowroot`: allow the agent to run as 'root'. if disabled and the agent is started by 'root', the agent will try to switch to user 'zabbix' instead. has no effect if started under a regular user.
-* `zabbix_agent(2)_zabbix_alias`: sets an alias for parameter. it can be useful to substitute long and complex parameter name with a smaller and simpler one. Can be both a string as an list.
-* `zabbix_agent(2)_timeout`: spend no more than timeout seconds on processing
-* `zabbix_agent(2)_include`: you may include individual files or all files in a directory in the configuration file.
-* `zabbix_agent(2)_include_pattern`: Optional file pattern used for included files.
-* `zabbix_agent(2)_include_mode`: The mode for the directory mentioned above.
-* `zabbix_agent(2)_unsafeuserparameters`: allow all characters to be passed in arguments to user-defined parameters.
-* `zabbix_agent_loadmodulepath`: Full path to location of agent modules.
-* `zabbix_agent_loadmodule`: Module to load at agent startup. Modules are used to extend functionality of the agent.
-* `zabbix_agent2_controlsocket`: The control socket, used to send runtime commands with '-R' option.
-* `zabbix_agent_allowroot`: Allow the agent to run as 'root'. 0 - do not allow, 1 - allow
-* `zabbix_agent2_plugins`: A list containing plugin configuration.
-* `zabbix_agent(2)_listenbacklog`: The maximum number of pending connections in the queue.
+* `zabbix_agent_userparameters_templates_src`: indicates the relative path (from `templates/`) where userparameter templates are searched
## TLS Specific configuration
+* `zabbix_agent_tlspsk_auto`: Enables auto generation and storing of individual pre-shared keys and identities on clients. Is false by default. If set to true and if `zabbix_agent_tlspskfile` and `zabbix_agent_tlspsk_secret` are undefined, it generates the files `/etc/zabbix/tls_psk_auto.identity` and `/etc/zabbix/tls_psk_auto.secret`, which are populated by values automatically (identity is set to hostname, underscore and 4 random alphanumeric digits; secret is 64 random alphanumeric digits) in such a way that the values are generated once and are never overwritten.
-These variables are specific for Zabbix 3.0 and higher. When `(2)` is used in the name of the property, like `zabbix_agent(2)_tlsconnect`, it will show that you can configure `zabbix_agent_tlsconnect` for the Zabbix Agent configuration file and `zabbix_agent2_tlsconnect` for the Zabbix Agent 2 configuration file.
-
-* `zabbix_agent(2)_tlsconnect`: How the agent should connect to server or proxy. Used for active checks.
+* `zabbix_agent_tlsconnect`: How the agent should connect to server or proxy. Used for active checks.
Possible values:
* unencrypted
* psk
* cert
-* `zabbix_agent(2)_tlsaccept`: What incoming connections to accept.
+* `zabbix_agent_tlsaccept`: What incoming connections to accept.
Possible values:
* unencrypted
* psk
* cert
-* `zabbix_agent(2)_tlscafile`: Full pathname of a file containing the top-level CA(s) certificates for peer certificate verification.
-* `zabbix_agent(2)_tlscrlfile`: Full pathname of a file containing revoked certificates.
-* `zabbix_agent(2)_tlsservercertissuer`: Allowed server certificate issuer.
-* `zabbix_agent(2)_tlsservercertsubject`: Allowed server certificate subject.
-* `zabbix_agent(2)_tlscertfile`: Full pathname of a file containing the agent certificate or certificate chain.
-* `zabbix_agent(2)_tlskeyfile`: Full pathname of a file containing the agent private key.
-* `zabbix_agent(2)_tlspskidentity`: Unique, case sensitive string used to identify the pre-shared key.
-* `zabbix_agent(2)_tlspskidentity_file`: Full pathname of a file containing the pre-shared key identity.
-* `zabbix_agent(2)_tlspskfile`: Full pathname of a file containing the pre-shared key.
-* `zabbix_agent(2)_tlspsk_secret`: The pre-shared secret key that should be placed in the file configured with `agent_tlspskfile`.
-* `zabbix_agent(2)_tlspsk_auto`: Enables auto generation and storing of individual pre-shared keys and identities on clients. Is false by default. If set to true and if `zabbix_agent_tlspskfile` and `zabbix_agent_tlspsk_secret` are undefined, it generates the files `/etc/zabbix/tls_psk_auto.identity` and `/etc/zabbix/tls_psk_auto.secret`, which are populated by values automatically (identity is set to hostname, underscore and 4 random alphanumeric digits; secret is 64 random alphanumeric digits) in such a way that the values are generated once and are never overwritten.
+
+* `zabbix_agent_tlscafile`: Full pathname of a file containing the top-level CA(s) certificates for peer certificate verification.
+* `zabbix_agent_tlscertfile`: Full pathname of a file containing the agent certificate or certificate chain.
+* `zabbix_agent_tlscrlfile`: Full pathname of a file containing revoked certificates.
+* `zabbix_agent_tlskeyfile`: Full pathname of a file containing the agent private key.
+* `zabbix_agent_tlspskfile`: Full pathname of a file containing the pre-shared key.
+* `zabbix_agent_tlspskidentity`: Unique, case sensitive string used to identify the pre-shared key.
+* `zabbix_agent_tlspskidentity_file`: Full pathname of a file containing the pre-shared key identity.
+* `zabbix_agent_tlspsk_secret`: The pre-shared secret key for the agent.
+* `zabbix_agent_tlsservercertissuer`: Allowed server certificate issuer.
+* `zabbix_agent_tlsservercertsubject`: Allowed server certificate subject.
+* `zabbix_agent_tls_subject`: The subject of the TLS certificate.
+* `zabbix_agent_visible_hostname` : Configure Zabbix visible name inside Zabbix web UI for the node.
The results are stored in the Ansible variables `zabbix_agent_tlspskidentity` and `zabbix_agent_tlspsk_secret`, so that they may be used later in the code, for example with [zabbix_host](https://docs.ansible.com/ansible/latest/collections/community/zabbix/zabbix_host_module.html) to configure the Zabbix server or with `debug: msg:` to display them to the user.
@@ -250,51 +184,49 @@ These variables need to be overridden when you want to make use of the Zabbix AP
Host encryption configuration will be set to match agent configuration.
-* `zabbix_api_server_host`: The IP or hostname/FQDN of Zabbix server. Example: zabbix.example.com
-* `zabbix_api_use_ssl`: Is SSL required to connect to the Zabbix API server? Default: `false`
-* `zabbix_api_server_port`: 80 if `zabbix_api_use_ssl` is `false` and 443 if `true` (Default) TCP port to use to connect to Zabbix server. Example: 8080
-* `zabbix_api_login_user`: Username of user which has API access.
-* `zabbix_api_login_pass`: Password for the user which has API access.
-* `zabbix_api_http_user`: The http user to access zabbix url with Basic Auth (if your Zabbix is behind a proxy with HTTP Basic Auth).
-* `zabbix_api_http_password`: The http password to access zabbix url with Basic Auth (if your Zabbix is behind a proxy with HTTP Basic Auth).
-* `zabbix_api_validate_certs`: yes (Default) if we need to validate tls certificates of the API. Use `no` in case self-signed certificates are used.
-* `zabbix_api_timeout`: How many seconds to wait for API response (default 30s).
-* `zabbix_api_create_hosts`: Default: `False`. When you want to enable the Zabbix API to create/delete the host. This has to be set to `True` if you want to make use of `zabbix_agent_host_state`.
-* `zabbix_api_create_hostgroup`: When you want to enable the Zabbix API to create/delete the hostgroups. This has to be set to `True` if you want to make use of `zabbix_agent_hostgroups_state`.Default: `False`
-* `ansible_zabbix_url_path`: URL path if Zabbix WebUI running on non-default (zabbix) path, e.g. if http://<FQDN>/zabbixeu then set to `zabbixeu`
-* `zabbix_agent_hostgroups_state`: present (Default) if the hostgroup needs to be created or absent if you want to delete it. This only works when `zabbix_api_create_hostgroup` is set to `True`.
-* `zabbix_host_status`: enabled (Default) when host in monitored, disabled when host is disabled for monitoring.
+* `zabbix_agent_description`: Description of the host in Zabbix.
* `zabbix_agent_host_state`: present (Default) if the host needs to be created or absent is you want to delete it. This only works when `zabbix_api_create_hosts` is set to `True`.
* `zabbix_agent_host_update`: yes (Default) if the host should be updated if already present. This only works when `zabbix_api_create_hosts` is set to `True`.
-* `zabbix_useuip`: 1 if connection to zabbix-agent is made via ip, 0 for fqdn.
-* `zabbix_host_groups`: A list of hostgroups which this host belongs to.
-* `zabbix_agent_link_templates`: A list of templates which needs to be link to this host. The templates should exist.
+* `zabbix_agent_interfaces`: A list of interfaces and their configurations you can use when configuring via API.
+* `zabbix_agent_inventory_mode`: Configure Zabbix inventory mode. Needed for building inventory data, manually when configuring a host or automatically by using some automatic population options. This has to be set to `automatic` if you want to make automatically building inventory data. Default `disabled`
+* `zabbix_agent_inventory_zabbix`: Adds Facts for a zabbix inventory. Default `{}`
+* `zabbix_agent_ip`: The IP address of the host. When not provided, it will be determined via the `ansible_default_ipv4` fact.
+* `zabbix_agent_link_templates`: A list of templates which needs to be link to this host. The templates should exist. Default: "Templated Linux by Zabbix agent"
* `zabbix_agent_macros`: A list with macro_key and macro_value for creating hostmacro's.
+* `zabbix_agent_proxy`: The name of the Zabbix proxy (if used). Default `null`
* `zabbix_agent_tags`: A list with tag and (optionally) value for creating host tags.
-* `zabbix_agent_inventory_mode`: Configure Zabbix inventory mode. Needed for building inventory data, manually when configuring a host or automatically by using some automatic population options. This has to be set to `automatic` if you want to make automatically building inventory data.
-* `zabbix_agent_visible_hostname` : Configure Zabbix visible name inside Zabbix web UI for the node.
-* `zabbix_agent_description`: Description of the host in Zabbix.
-* `zabbix_agent_inventory_zabbix`: Adds Facts for a zabbix inventory
+* `zabbix_api_create_hostgroup`: When you want to enable the Zabbix API to create/delete the hostgroups. Default: `False`
+* `zabbix_api_create_hosts`: Default: `False`. When you want to enable the Zabbix API to create/delete the host. This has to be set to `True` if you want to make use of `zabbix_agent_host_state`.
+* `zabbix_api_http_password`: The http password to access zabbix url with Basic Auth (if your Zabbix is behind a proxy with HTTP Basic Auth).
+* `zabbix_api_http_user`: The http user to access zabbix url with Basic Auth (if your Zabbix is behind a proxy with HTTP Basic Auth).
+* `zabbix_api_login_pass`: Password for the user which has API access.
+* `zabbix_api_login_user`: Username of user which has API access.
+* `zabbix_api_server_host`: The IP or hostname/FQDN of Zabbix server. Example: zabbix.example.com
+* `zabbix_api_server_port`: 80 if `zabbix_api_use_ssl` is `false` and 443 if `true` (Default) TCP port to use to connect to Zabbix server. Example: 8080
+* `zabbix_api_use_ssl`: Is SSL required to connect to the Zabbix API server? Default: `false`
+* `zabbix_api_validate_certs`: `True` if we need to validate tls certificates of the API. Use `False` in case self-signed certificates are used. Default: `False`
+* `zabbix_host_groups`: A list of hostgroups which this host belongs to. Default: "Linux Servers"
+* `zabbix_host_status`: enabled (Default) when host in monitored, disabled when host is disabled for monitoring.
+* `zabbix_useuip`: 1 if connection to zabbix-agent is made via ip, 0 for fqdn.
## Windows Variables
**NOTE**
-_Supporting Windows is a best effort (We don't have the possibility to either test/verify changes on the various amount of available Windows instances). PRs specific to Windows will almost immediately be merged, unless someone is able to provide a Windows test mechanism via Travis for Pull Requests._
-When `(2)` is used in the name of the property, like `zabbix_agent(2)_win_logfile`, it will show that you can configure `zabbix_agent_win_logfile` for the Zabbix Agent configuration file and `zabbix_agent2_win_logfile` for the Zabbix Agent 2 configuration file.
+Supporting Windows is a best effort (We don't have the possibility to either test/verify changes on the various amount of available Windows instances). PRs specific to Windows will almost immediately be merged, unless someone is able to provide a Windows test mechanism via Travis for Pull Requests._
+When `` is used in the name of the property, like `zabbix_agent_win_logfile`, it will show that you can configure `zabbix_agent_win_logfile` for the Zabbix Agent configuration file and `zabbix_agent2_win_logfile` for the Zabbix Agent 2 configuration file.
Otherwise it just for the Zabbix Agent or for the Zabbix Agent 2.
-* `zabbix(2)_win_package`: file name pattern (zip only). This will be used to generate the `zabbix(2)_win_download_link` variable.
-* `zabbix_version_long`: The long (major.minor.patch) version of the Zabbix Agent. This will be used to generate the `zabbix(2)_win_package` and `zabbix(2)_win_download_link` variables. This takes precedence over `zabbix_agent_version`.
-* `zabbix(2)_win_download_link`: The download url to the `win.zip` file.
+* `zabbix_agent_win_include`: The directory in which the Zabbix Agent specific configuration files are stored.
+* `zabbix_agent_win_logfile`: The full path to the logfile for the Zabbix Agent.
+* `zabbix_version_long`: The long (major.minor.patch) version of the Zabbix Agent. This will be used to generate the `zabbix_win_package` and `zabbix_win_download_link` variables. This takes precedence over `zabbix_agent_version`.
+* `zabbix_win_download_link`: The download url to the `win.zip` file.
+* `zabbix_win_firewall_management`: Enable Windows firewall management (add service and port to allow rules). Default: `True`
* `zabbix_win_install_dir`: The directory where Zabbix needs to be installed.
* `zabbix_win_install_dir_conf`: The directory where Zabbix configuration file needs to be installed.
* `zabbix_win_install_dir_bin`: The directory where Zabbix binary file needs to be installed.
-* `zabbix_agent(2)_win_logfile`: The full path to the logfile for the Zabbix Agent.
-* `zabbix_agent_win_include`: The directory in which the Zabbix Agent specific configuration files are stored.
-* `zabbix_agent_win_svc_recovery`: Enable Zabbix Agent service auto-recovery settings.
-* `zabbix_win_firewall_management`: Enable Windows firewall management (add service and port to allow rules). Default: `True`
+* `zabbix_win_package`: file name pattern (zip only). This will be used to generate the `zabbix_win_download_link` variable.
## macOS Variables
@@ -302,8 +234,10 @@ Otherwise it just for the Zabbix Agent or for the Zabbix Agent 2.
_Supporting Windows is a best effort (We don't have the possibility to either test/verify changes on the various amount of available Windows instances). PRs specific to Windows will almost immediately be merged, unless someone is able to provide a Windows test mechanism via Travis for Pull Requests._
-* `zabbix_version_long`: The long (major.minor.patch) version of the Zabbix Agent. This will be used to generate the `zabbix_mac_download_link` link.
* `zabbix_mac_download_link`: The download url to the `pkg` file.
+* `zabbix_mac_download_url`: The download url. Default `https://cdn.zabbix.com/zabbix/binaries/stable`
+* `zabbix_mac_package`: The name of the mac install package. Default `zabbix_agent-{{ zabbix_version_long }}-macos-amd64-openssl.pkg`
+* `zabbix_version_long`: The long (major.minor.patch) version of the Zabbix Agent. This will be used to generate the `zabbix_mac_download_link` link.
## Docker Variables
@@ -324,19 +258,19 @@ The following directories are mounted in the Container:
Keep in mind that using the Zabbix Agent in a Container requires changes to the Zabbix Template for Linux as `/proc`, `/sys` and `/etc` are mounted in a directory `/hostfs`.
* `zabbix_agent_docker`: Default: `False`. When set to `True`, it will install a Docker container on the target host instead of installation on the target.
-* `zabbix_agent_docker_state`: Default: `started`
-* `zabbix_agent_docker_name`: The name of the Container. Default: `zabbix-agent`
+* `zabbix_agent_docker_env`: A dict with all environment variables that needs to be set for the Container.
* `zabbix_agent_docker_image`: The name of the Docker image. Default: `zabbix/zabbix-agent`
* `zabbix_agent_docker_image_tag`: The tag of the Docker image.
-* `zabbix_agent_docker_user_gid`: The group id of the zabbix user in the Container.
-* `zabbix_agent_docker_user_uid`: The user id of the zabbix user in the Container.
+* `zabbix_agent_docker_name`: The name of the Container. Default: `zabbix-agent`
* `zabbix_agent_docker_network_mode`: The name of the (Docker) network that should be used for the Container. Default `host`.
* `zabbix_agent_docker_restart_policy`: Default: `unless-stopped`. The restart policy of the Container.
-* `zabbix_agent_docker_privileged`: When set to `True`, the container is running in privileged mode.
-* `zabbix_agent_docker_ports`: A list with `<PORT>:<PORT>` values to open ports to the container.
+* `zabbix_agent_docker_ports`: A list with `<PORT>:<PORT>` values to open ports to the container. Default `10050`
+* `zabbix_agent_docker_privileged`: When set to `True`, the container is running in privileged mode. Default `false`
* `zabbix_agent_docker_security_opts`: A list with available security options.
+* `zabbix_agent_docker_state`: Default: `started`
+* `zabbix_agent_docker_user_gid`: The group id of the zabbix user in the Container.
+* `zabbix_agent_docker_user_uid`: The user id of the zabbix user in the Container.
* `zabbix_agent_docker_volumes`: A list with all directories that needs to be available in the Container.
-* `zabbix_agent_docker_env`: A dict with all environment variables that needs to be set for the Container.
## IPMI variables
@@ -345,6 +279,77 @@ Keep in mind that using the Zabbix Agent in a Container requires changes to the
* `zabbix_agent_ipmi_privilege`: IPMI privilege level. Possible values are 1 (callback), 2 (user), 3 (operator), 4 (admin), 5 (OEM), with 2 being the API default.
* `zabbix_agent_ipmi_username`: IPMI username.
+## Configuration Variables
+The following table lists all variables that are exposed to modify the configuration of the zabbix_agent.conf file. Specific details of each variable can be found in the Zabbix documentation.
+
+**NOTE**: Only variables with a default value appear in the defaults file, all others must be added.
+
+| Zabbix Name | Variable Name | Default Value | Notes |
+|-----------|------------------|--------|--------|
+| Alias | zabbix_agent_aliases | | Can be a string or list |
+| AllowKey | zabbix_agent_allowkeys | | |
+| AllowRoot | zabbix_agent_allowroot | | Linux Systems Only |
+| BufferSend | zabbix_agent_buffersend | 5 | |
+| BufferSize | zabbix_agent_buffersize | 100 | |
+| ControlSocket | zabbix_agent_controlsocket | /tmp/agent.sock | Agent 2 Only |
+| DebugLevel | zabbix_agent_debuglevel | 3 | |
+| DenyKey | zabbix_agent_denykeys | | Can be a string or a list |
+| EnableRemoteCommands | zabbix_agent_enableremotecommands | 0 | Agent Only |
+| ForceActiveChecksOnStart | zabbix_agent_forceactivechecksonstart | | Agent 2 Only |
+| HeartbeatFrequency | zabbix_agent_heartbeatfrequency | 60 | Version >= 6.2 |
+| HostInterface | zabbix_agent_hostinterface | | |
+| HostInterfaceItem | zabbix_agent_hostinterfaceitem | | |
+| HostMetadata | zabbix_agent_hostmetadata | | |
+| HostMetadataItem | zabbix_agent_hostmetadataitem | | |
+| Hostname | zabbix_agent_hostname | | `{{ inventory_hostname }}` |
+| HostnameItem | zabbix_agent_hostnameitem | | |
+| Include | zabbix_agent_include | /etc/zabbix/`{ agent version specific }`.d | |
+| ListenBacklog | zabbix_agent_listenbacklog | | Agent Only |
+| ListenIP | zabbix_agent_listenip | | list of IPs |
+| ListenPort | zabbix_agent_listenport | 10050 | |
+| LoadModule | zabbix_agent_loadmodule | | Agent On Linux Only |
+| LoadModulePath | zabbix_agent_loadmodulepath | | Agent On Linux Only |
+| LogFile | zabbix_agent_logfile | /var/log/zabbix/`{ agent version specific }`.log | |
+| LogFileSize | zabbix_agent_logfilesize | 100 | |
+| LogRemoteCommands | zabbix_agent_logremotecommands | | Agent Only |
+| LogType | zabbix_agent_logtype | file | |
+| MaxLinesPerSecond | zabbix_agent_maxlinespersecond | | Agent Only |
+| PerfCounter | zabbix_agent_perfcounter | | Agent Only |
+| PerfCounterEn | zabbix_agent_perfcounteren | | Agent Only |
+| PersistentBufferFile | zabbix_agent_persistentbufferfile | | Agent 2 Only |
+| PersistentBufferPeriod | zabbix_agent_persistentbufferperiod | 1h | Agent 2 Only |
+| PidFile | zabbix_agent_pidfile | /var/run/zabbix/`{ agent version specific }`.pid | Linux Systems Only |
+| Plugin | zabbix_agent_plugins | | |
+| PluginSocket | zabbix_agent_pluginsocket | | |
+| PluginTimeout | zabbix_agent_plugintimeout | | |
+| RefreshActiveChecks | zabbix_agent_refreshactivechecks | 120 | |
+| Server | zabbix_agent_server | | |
+| ServerActive | zabbix_agent_serveractive | | |
+| SourceIP | zabbix_agent_sourceip | | |
+| StartAgents | zabbix_agent_startagents | | Agent Only |
+| StatusPort | zabbix_agent_statusport | 9999 | Agent 2 Only |
+| Timeout | zabbix_agent_timeout | 3 | |
+| TLSAccept | zabbix_agent_tlsconnect | | |
+| TLSCAFile | zabbix_agent_tlscafile | /etc/zabbix/tls_psk_auto.secret | |
+| TLSCertFile | zabbix_agent_tlscertfile | | |
+| TLSCipherAll | zabbix_agent_tlscipherall | | Agent on Linux Only |
+| TLSCipherAll13 | zabbix_agent_tlscipherall13 | | Agent on Linux Only |
+| TLSCipherCert | zabbix_agent_tlsciphercert | | Agent on Linux Only |
+| TLSCipherCert13 | zabbix_agent_tlsciphercert13 | | Agent on Linux Only |
+| TLSCipherPSK | zabbix_agent_tlscipherpsk | | Agent on Linux Only |
+| TLSCipherPSK13 | zabbix_agent_tlscipherpsk13 | | Agent on Linux Only |
+| TLSConnect | zabbix_agent_tlsconnect | | |
+| TLSCRLFile | zabbix_agent_tlscrlfile | | |
+| TLSKeyFile | zabbix_agent_tlskeyfile | | |
+| TLSPSKFile | zabbix_agent_tlspskfile | | |
+| TLSPSKIdentity | zabbix_agent_tlspskidentity | | |
+| TLSServerCertIssuer | zabbix_agent_tlsservercertissuer | | |
+| TLSServerCertSubject | zabbix_agent_tlsservercertsubject | | |
+| UnsafeUserParameters | zabbix_agent_unsafeuserparameters | | |
+| User | zabbix_agent_runas_user | | Agent on Linux Only |
+| UserParameter | zabbix_agent_userparamater | 0 | |
+
+
## proxy
When the target host does not have access to the internet, but you do have a proxy available then the following properties needs to be set to download the packages via the proxy:
@@ -369,18 +374,6 @@ There are no dependencies on other roles.
# Example Playbook
-## zabbix_agent2_plugins
-
-Specifically for the Zabbix Agent 2, a list of extra plugins can be configured. The following provides an overview of configuring the `SystemRun` plugin by setting the `LogRemoteCommands` to `0`:
-
-```yaml
-zabbix_agent2_plugins:
- - name: SystemRun
- options:
- - parameter: LogRemoteCommands
- value: 0
-```
-
In the `zabbix_agent2.conf` an entry will be created with the following content:
```
diff --git a/ansible_collections/community/zabbix/docs/ZABBIX_JAVAGATEWAY_ROLE.md b/ansible_collections/community/zabbix/docs/ZABBIX_JAVAGATEWAY_ROLE.md
index 47092a6a0..0de7af814 100644
--- a/ansible_collections/community/zabbix/docs/ZABBIX_JAVAGATEWAY_ROLE.md
+++ b/ansible_collections/community/zabbix/docs/ZABBIX_JAVAGATEWAY_ROLE.md
@@ -39,6 +39,7 @@ See the following list of supported Operating systems with the Zabbix releases.
| Red Hat Fam 9 | V | V | V |
| Red Hat Fam 8 | V | V | V |
| Red Hat Fam 7 | V | V | V |
+| Ubuntu 24.04 noble | V | | V |
| Ubuntu 22.04 jammy | V | V | V |
| Ubuntu 20.04 focal | V | V | V |
| Ubuntu 18.04 bionic | V | V | V |
diff --git a/ansible_collections/community/zabbix/docs/ZABBIX_PROXY_ROLE.md b/ansible_collections/community/zabbix/docs/ZABBIX_PROXY_ROLE.md
index ee558c8b7..703ec6059 100644
--- a/ansible_collections/community/zabbix/docs/ZABBIX_PROXY_ROLE.md
+++ b/ansible_collections/community/zabbix/docs/ZABBIX_PROXY_ROLE.md
@@ -82,6 +82,7 @@ See the following list of supported Operating systems with the Zabbix releases.
| Red Hat Fam 9 | V | V | V |
| Red Hat Fam 8 | V | V | V |
| Red Hat Fam 7 | V | V | V |
+| Ubuntu 24.04 noble | V | | V |
| Ubuntu 22.04 jammy | V | V | V |
| Ubuntu 20.04 focal | V | V | V |
| Ubuntu 18.04 bionic | V | V | V |
diff --git a/ansible_collections/community/zabbix/docs/ZABBIX_SERVER_ROLE.md b/ansible_collections/community/zabbix/docs/ZABBIX_SERVER_ROLE.md
index 9557281c3..20f462c48 100644
--- a/ansible_collections/community/zabbix/docs/ZABBIX_SERVER_ROLE.md
+++ b/ansible_collections/community/zabbix/docs/ZABBIX_SERVER_ROLE.md
@@ -79,6 +79,7 @@ See the following list of supported Operating systems with the Zabbix releases:
|---------------------|-----|-----|-----|
| Red Hat Fam 9 | V | V | V |
| Red Hat Fam 8 | V | V | V |
+| Ubuntu 24.04 noble | V | | V |
| Ubuntu 22.04 jammy | V | V | V |
| Ubuntu 20.04 focal | V | V | V |
| Ubuntu 18.04 bionic | | | V |
diff --git a/ansible_collections/community/zabbix/docs/ZABBIX_WEB_ROLE.md b/ansible_collections/community/zabbix/docs/ZABBIX_WEB_ROLE.md
index aac6f9dc2..c1875f208 100644
--- a/ansible_collections/community/zabbix/docs/ZABBIX_WEB_ROLE.md
+++ b/ansible_collections/community/zabbix/docs/ZABBIX_WEB_ROLE.md
@@ -59,6 +59,7 @@ See the following list of supported Operating Systems with the Zabbix releases.
|---------------------|-----|-----|-----|
| Red Hat Fam 9 | V | V | V |
| Red Hat Fam 8 | V | V | V |
+| Ubuntu 24.04 noble | V | | V |
| Ubuntu 22.04 jammy | V | V | V |
| Ubuntu 20.04 focal | V | V | V |
| Ubuntu 18.04 bionic | | | V |
diff --git a/ansible_collections/community/zabbix/molecule/requirements.txt b/ansible_collections/community/zabbix/molecule/requirements.txt
index 9ccb5e157..814a46a91 100644
--- a/ansible_collections/community/zabbix/molecule/requirements.txt
+++ b/ansible_collections/community/zabbix/molecule/requirements.txt
@@ -1,7 +1,7 @@
# Install CI dependencies for the Zabbix Roles
-ansible==7.0.0
+#ansible==8.0.0 # commented out to avoid installing collections in site-packages/ansible_collections
ansible-compat==3.0.0
-ansible-core==2.14.2
+ansible-core==2.15.11
docker==6.1.3
molecule<5
molecule-docker @ git+https://github.com/ansible-community/molecule-docker@main
diff --git a/ansible_collections/community/zabbix/molecule/zabbix_agent_tests/common/molecule.yml b/ansible_collections/community/zabbix/molecule/zabbix_agent_tests/common/molecule.yml
index a48b6ae60..4666f0192 100644
--- a/ansible_collections/community/zabbix/molecule/zabbix_agent_tests/common/molecule.yml
+++ b/ansible_collections/community/zabbix/molecule/zabbix_agent_tests/common/molecule.yml
@@ -28,8 +28,6 @@ provisioner:
inventory:
group_vars:
all:
- zabbix_agent_src_reinstall: false
- zabbix_install_pip_packages: false
zabbix_agent_server: 192.168.3.33
zabbix_agent_serveractive: 192.168.3.33
zabbix_agent_listenip: 0.0.0.0
diff --git a/ansible_collections/community/zabbix/molecule/zabbix_agent_tests/molecule/agent2/molecule.yml b/ansible_collections/community/zabbix/molecule/zabbix_agent_tests/molecule/agent2/molecule.yml
index 76edc902e..3d28d94c9 100644
--- a/ansible_collections/community/zabbix/molecule/zabbix_agent_tests/molecule/agent2/molecule.yml
+++ b/ansible_collections/community/zabbix/molecule/zabbix_agent_tests/molecule/agent2/molecule.yml
@@ -9,6 +9,7 @@ provisioner:
zabbix_agent2: true
zabbix_agent2_tlsconnect: psk
zabbix_agent2_tlsaccept: psk
+ zabbix_agent2_tlspsk_auto: True
zabbix_agent2_tlspskidentity: my_Identity
zabbix_agent2_tlspskfile: /data/certs/zabbix.psk
zabbix_agent2_tlspsk_secret: 97defd6bd126d5ba7fa5f296595f82eac905d5eda270207a580ab7c0cb9e8eab
@@ -16,4 +17,4 @@ provisioner:
- name: SystemRun
options:
- parameter: LogRemoteCommands
- value: 0 \ No newline at end of file
+ value: 0
diff --git a/ansible_collections/community/zabbix/molecule/zabbix_proxy/Dockerfile.debian.j2 b/ansible_collections/community/zabbix/molecule/zabbix_proxy/Dockerfile.debian.j2
new file mode 100644
index 000000000..b3d90f2a5
--- /dev/null
+++ b/ansible_collections/community/zabbix/molecule/zabbix_proxy/Dockerfile.debian.j2
@@ -0,0 +1,3 @@
+FROM {{ item.base_image }}
+RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y python3-pip systemd-sysv
+CMD ["/sbin/init"]
diff --git a/ansible_collections/community/zabbix/molecule/zabbix_proxy/Dockerfile.redhat.j2 b/ansible_collections/community/zabbix/molecule/zabbix_proxy/Dockerfile.redhat.j2
new file mode 100644
index 000000000..dd48ee3b7
--- /dev/null
+++ b/ansible_collections/community/zabbix/molecule/zabbix_proxy/Dockerfile.redhat.j2
@@ -0,0 +1,3 @@
+FROM {{ item.base_image }}
+RUN yum install -y python3-pip
+CMD ["/sbin/init"]
diff --git a/ansible_collections/community/zabbix/molecule/zabbix_proxy/molecule.yml b/ansible_collections/community/zabbix/molecule/zabbix_proxy/molecule.yml
index 4eadd60ab..0002a0a49 100644
--- a/ansible_collections/community/zabbix/molecule/zabbix_proxy/molecule.yml
+++ b/ansible_collections/community/zabbix/molecule/zabbix_proxy/molecule.yml
@@ -3,10 +3,11 @@ driver:
name: docker
platforms:
- name: zabbix-proxy-${MY_MOLECULE_VERSION:-v64}-${MY_MOLECULE_DATABASE:-mysql}-${MY_MOLECULE_CONTAINER:-rockylinux8}
- image: geerlingguy/docker-${MY_MOLECULE_IMAGE:-rockylinux8}-ansible:latest
+ dockerfile: Dockerfile.${MY_MOLECULE_OS_FAMILY:-redhat}.j2
+ base_image: ${MY_MOLECULE_IMAGE:-rockylinux/rockylinux:8-ubi-init}
+ image: ${MY_MOLECULE_CONTAINER:-rockylinux8}
privileged: true
- pre_build_image: true
- command: ${MOLECULE_DOCKER_COMMAND:-""}
+ command: /sbin/init
networks:
- name: zabbix
volumes:
@@ -22,10 +23,8 @@ provisioner:
ANSIBLE_ROLES_PATH: $HOME/.ansible/collections/ansible_collections/community/zabbix/roles
inventory:
group_vars:
- python3:
+ all:
ansible_python_interpreter: /usr/bin/python3
- python:
- ansible_python_interpreter: /usr/bin/python
v64:
zabbix_proxy_version: 6.4
v62:
diff --git a/ansible_collections/community/zabbix/molecule/zabbix_proxy/prepare.yml b/ansible_collections/community/zabbix/molecule/zabbix_proxy/prepare.yml
index 1ff492d62..5a3c3139f 100644
--- a/ansible_collections/community/zabbix/molecule/zabbix_proxy/prepare.yml
+++ b/ansible_collections/community/zabbix/molecule/zabbix_proxy/prepare.yml
@@ -95,19 +95,3 @@
dest: /etc/sudoers
line: "Defaults !requiretty"
state: present
-
- - name: "Make sure the docs can be installed. (RedHat)"
- ansible.builtin.lineinfile:
- dest: /etc/yum.conf
- line: "tsflags=nodocs"
- state: absent
- when:
- - ansible_os_family == 'RedHat'
-
- - name: "Make sure the docs can be installed. (Debian)"
- ansible.builtin.lineinfile:
- path: /etc/dpkg/dpkg.cfg.d/excludes
- state: absent
- regexp: "path-exclude=/usr/share/doc/*"
- when:
- - ansible_os_family != 'RedHat'
diff --git a/ansible_collections/community/zabbix/molecule/zabbix_server/molecule.yml b/ansible_collections/community/zabbix/molecule/zabbix_server/molecule.yml
index 52df5ac4e..4638adfe2 100644
--- a/ansible_collections/community/zabbix/molecule/zabbix_server/molecule.yml
+++ b/ansible_collections/community/zabbix/molecule/zabbix_server/molecule.yml
@@ -14,7 +14,6 @@ platforms:
groups:
- ${MY_MOLECULE_DATABASE:-mysql}
- ${MY_MOLECULE_VERSION:-v64}
- - ${MY_MOLECULE_INTERPRETER:-python3}
provisioner:
name: ansible
@@ -23,23 +22,18 @@ provisioner:
ANSIBLE_ROLES_PATH: $HOME/.ansible/collections/ansible_collections/community/zabbix/roles
inventory:
group_vars:
- python3:
+ all:
ansible_python_interpreter: /usr/bin/python3
- python:
- ansible_python_interpreter: /usr/bin/python
v64:
zabbix_server_version: 6.4
v62:
zabbix_server_version: 6.2
v60:
zabbix_server_version: 6.0
- v50:
- zabbix_server_version: 5.0
mysql:
zabbix_server_dbname: zabbix
zabbix_server_dbuser: zabbix-dbuser
zabbix_server_database: mysql
- zabbix_server_dbport: 3306
zabbix_server_dbhost: "{{ inventory_hostname }}-db"
zabbix_server_dbhost_run_install: false
zabbix_server_privileged_host: "%"
@@ -49,7 +43,6 @@ provisioner:
zabbix_server_mysql_login_port: 3306
pgsql:
zabbix_server_database: pgsql
- zabbix_server_dbport: 5432
zabbix_server_dbhost: "{{ inventory_hostname }}-db"
zabbix_server_dbhost_run_install: false
zabbix_server_pgsql_login_host: "{{ inventory_hostname }}-db"
diff --git a/ansible_collections/community/zabbix/molecule/zabbix_web/prepare.yml b/ansible_collections/community/zabbix/molecule/zabbix_web/prepare.yml
index d3dcc1ad0..7d8142763 100644
--- a/ansible_collections/community/zabbix/molecule/zabbix_web/prepare.yml
+++ b/ansible_collections/community/zabbix/molecule/zabbix_web/prepare.yml
@@ -83,7 +83,7 @@
when: ansible_version.full is version("2.14", "<")
- name: "Apt update"
- ansible.builtin.shell: "apt-get update && echo exit 0 > /usr/sbin/policy-rc.d"
+ ansible.builtin.shell: "apt-get update"
args:
warn: "{{ produce_warn | default(omit) }}"
register: installation_dependencies
@@ -162,7 +162,14 @@
- ansible_distribution_major_version >= '22'
- ansible_os_family == "Debian"
- - name: Set PHP packages (Ubuntu 2204)
+ - name: Set PHP Version (Ubuntu 2404)
+ ansible.builtin.set_fact:
+ __php_default_version_debian: "8.3"
+ when:
+ - ansible_distribution_major_version >= '24'
+ - ansible_os_family == "Debian"
+
+ - name: Set PHP packages (Ubuntu 2204 & 2404)
ansible.builtin.set_fact:
__php_packages:
- php{{ __php_default_version_debian }}-common
diff --git a/ansible_collections/community/zabbix/plugins/modules/zabbix_discoveryrule.py b/ansible_collections/community/zabbix/plugins/modules/zabbix_discoveryrule.py
new file mode 100644
index 000000000..725fd43e3
--- /dev/null
+++ b/ansible_collections/community/zabbix/plugins/modules/zabbix_discoveryrule.py
@@ -0,0 +1,460 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+#
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+
+DOCUMENTATION = r'''
+---
+module: zabbix_discoveryrule
+short_description: Create/delete Zabbix discovery rules
+description:
+ - Create discoveryrules if they do not exist.
+ - Delete existing discoveryrules if they exist.
+author:
+ - "Andrew Lathrop (@aplathrop)"
+requirements:
+ - "python >= 2.6"
+
+options:
+ state:
+ description:
+ - Create or delete discovery rule.
+ required: false
+ type: str
+ default: "present"
+ choices: [ "present", "absent" ]
+ name:
+ description:
+ - Name of discovery rule to create or delete.
+ required: true
+ type: str
+ host_name:
+ description:
+ - Name of host to add discovery rule to.
+ - Required when I(template_name) is not used.
+ - Mutually exclusive with I(template_name).
+ required: false
+ type: str
+ template_name:
+ description:
+ - Name of template to add discovery rule to.
+ - Required when I(host_name) is not used.
+ - Mutually exclusive with I(host_name).
+ required: false
+ type: str
+ params:
+ description:
+ - Parameters to create/update discovery rule with.
+ - Required if state is "present".
+ - Parameters as defined at https://www.zabbix.com/documentation/current/en/manual/api/reference/discoveryrule/object
+ - Additionally supported parameters are below
+ required: false
+ type: dict
+ suboptions:
+ key:
+ description:
+ - LLD rule key.
+ - Alias for "key_" in API docs
+ required: false
+ type: str
+ interval:
+ description:
+ - Update interval of the LLD rule.
+ - Alias for "delay" in API docs
+ required: false
+ type: str
+ status:
+ description:
+ - Status of the LLD rule.
+ required: false
+ type: str
+ choices: [ "enabled", "disabled" ]
+ enabled:
+ description:
+ - Status of the LLD rule.
+ - Overrides "status" in API docs
+ required: false
+ type: bool
+ type:
+ description:
+ - Type of the LLD rule.
+ - Required if state is "present".
+ required: false
+ type: str
+ choices:
+ - zabbix_agent
+ - zabbix_trapper
+ - simple_check
+ - zabbix_internal
+ - zabbix_agent_active
+ - web_item
+ - external_check
+ - database_monitor
+ - ipmi_agent
+ - ssh_agent
+ - telnet_agent
+ - calculated
+ - jmx_agent
+ - snmp_trap
+ - dependent_item
+ - http_agent
+ - snmp_agent
+ - script
+ preprocessing:
+ description:
+ - discovery rules preprocessing options.
+ - Parameters as defined at https://www.zabbix.com/documentation/current/en/manual/api/reference/discoveryrule/object#lld-rule-preprocessing
+ - Additionally supported parameters are below
+ required: false
+ type: list
+ elements: dict
+ suboptions:
+ type:
+ description:
+ - The preprocessing option type.
+ required: true
+ type: str
+ choices:
+ - xml_xpath
+ - jsonpath
+ - does_not_match_regular_expression
+ - not_match_regex
+ - check_for_error_in_json
+ - check_for_json_error
+ - check_for_error_in_xml
+ - check_for_xml_error
+ - discard_unchanged_with_heartbeat
+ - javascript
+ - prometheus_to_json
+ - csv_to_json
+ - replace
+ - xml_to_json
+ - snmp_walk_value
+ - snmp_walk_to_json
+ error_handler:
+ description:
+ - Action type used in case of preprocessing step failure.
+ required: false
+ type: str
+ choices:
+ - zabbix_server
+ - discard
+ - set_custom_value
+ - set_custom_error_message
+
+extends_documentation_fragment:
+- community.zabbix.zabbix
+'''
+
+EXAMPLES = r'''
+
+# If you want to use Username and Password to be authenticated by Zabbix Server
+- name: Set credentials to access Zabbix Server API
+ ansible.builtin.set_fact:
+ ansible_user: Admin
+ ansible_httpapi_pass: zabbix
+
+# If you want to use API token to be authenticated by Zabbix Server
+# https://www.zabbix.com/documentation/current/en/manual/web_interface/frontend_sections/administration/general#api-tokens
+- name: Set API token
+ ansible.builtin.set_fact:
+ ansible_zabbix_auth_key: 8ec0d52432c15c91fcafe9888500cf9a607f44091ab554dbee860f6b44fac895
+
+# Create LLD rule on example_host
+- name: create rule
+ # set task level variables as we change ansible_connection plugin here
+ vars:
+ ansible_network_os: community.zabbix.zabbix
+ ansible_connection: httpapi
+ ansible_httpapi_port: 443
+ ansible_httpapi_use_ssl: true
+ ansible_httpapi_validate_certs: false
+ ansible_zabbix_url_path: 'zabbixeu' # If Zabbix WebUI runs on non-default (zabbix) path ,e.g. http://<FQDN>/zabbixeu
+ ansible_host: zabbix-example-fqdn.org
+ community.zabbix.zabbix_discoveryrule:
+ name: mounted_filesystem_discovery
+ host_name: example_host
+ params:
+ type: zabbix_agent
+ key: 'vfs.fs.discovery'
+ interval: 1h
+ enabled: True
+ state: present
+
+# Create LLD rule on example_template
+- name: create rule
+ # set task level variables as we change ansible_connection plugin here
+ vars:
+ ansible_network_os: community.zabbix.zabbix
+ ansible_connection: httpapi
+ ansible_httpapi_port: 443
+ ansible_httpapi_use_ssl: true
+ ansible_httpapi_validate_certs: false
+ ansible_zabbix_url_path: 'zabbixeu' # If Zabbix WebUI runs on non-default (zabbix) path ,e.g. http://<FQDN>/zabbixeu
+ ansible_host: zabbix-example-fqdn.org
+ community.zabbix.zabbix_discoveryrule:
+ name: mounted_filesystem_discovery
+ template_name: example_template
+ params:
+ type: zabbix_agent
+ key: 'vfs.fs.discovery'
+ interval: 1h
+ enabled: True
+ state: present
+
+# Add tags to the existing Zabbix LLD rule
+- name: update rule
+ # set task level variables as we change ansible_connection plugin here
+ vars:
+ ansible_network_os: community.zabbix.zabbix
+ ansible_connection: httpapi
+ ansible_httpapi_port: 443
+ ansible_httpapi_use_ssl: true
+ ansible_httpapi_validate_certs: false
+ ansible_zabbix_url_path: 'zabbixeu' # If Zabbix WebUI runs on non-default (zabbix) path ,e.g. http://<FQDN>/zabbixeu
+ ansible_host: zabbix-example-fqdn.org
+ community.zabbix.zabbix_discoveryrule:
+ name: mounted_filesystem_discovery
+ template_name: example_template
+ params:
+ type: zabbix_agent
+ key: 'vfs.fs.discovery'
+ interval: 1h
+ enabled: True
+ tags:
+ - tag: class
+ value: application
+ state: present
+
+# Delete LLD rule
+- name: delete rule
+ # set task level variables as we change ansible_connection plugin here
+ vars:
+ ansible_network_os: community.zabbix.zabbix
+ ansible_connection: httpapi
+ ansible_httpapi_port: 443
+ ansible_httpapi_use_ssl: true
+ ansible_httpapi_validate_certs: false
+ ansible_zabbix_url_path: 'zabbixeu' # If Zabbix WebUI runs on non-default (zabbix) path ,e.g. http://<FQDN>/zabbixeu
+ ansible_host: zabbix-example-fqdn.org
+ community.zabbix.zabbix_discoveryrule:
+ name: mounted_filesystem_discovery
+ template_name: example_template
+ state: absent
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+
+from ansible_collections.community.zabbix.plugins.module_utils.base import ZabbixBase
+import ansible_collections.community.zabbix.plugins.module_utils.helpers as zabbix_utils
+
+
+class Discoveryrule(ZabbixBase):
+ ITEM_TYPES = {'zabbix_agent': 0,
+ 'zabbix_trapper': 2,
+ 'simple_check': 3,
+ 'zabbix_internal': 5,
+ 'zabbix_agent_active': 7,
+ 'web_item': 9,
+ 'external_check': 10,
+ 'database_monitor': 11,
+ 'ipmi_agent': 12,
+ 'ssh_agent': 13,
+ 'telnet_agent': 14,
+ 'calculated': 15,
+ 'jmx_agent': 16,
+ 'snmp_trap': 17,
+ 'dependent_item': 18,
+ 'http_agent': 19,
+ 'snmp_agent': 20,
+ 'script': 21}
+
+ PREPROCESSING_TYPES = {'regex': 5,
+ 'xml_xpath': 11,
+ 'jsonpath': 12,
+ 'does_not_match_regular_expression': 15,
+ 'not_match_regex': 15,
+ 'check_for_error_in_json': 16,
+ 'check_for_json_error': 16,
+ 'check_for_error_in_xml': 17,
+ 'check_for_xml_error': 17,
+ 'discard_unchanged_with_heartbeat': 20,
+ 'javascript': 21,
+ 'prometheus_to_json': 23,
+ 'csv_to_json': 24,
+ 'replace': 25,
+ 'xml_to_json': 27,
+ 'snmp_walk_value': 28,
+ 'snmp_walk_to_json': 29}
+
+ PREPROCESSING_ERROR_HANDLERS = {'zabbix_server': 0,
+ 'discard': 1,
+ 'set_custom_value': 2,
+ 'set_custom_error_message': 3}
+
+ def get_hosts_templates(self, host_name, template_name):
+ if host_name is not None:
+ try:
+ return self._zapi.host.get({"filter": {"host": host_name}})
+ except Exception as e:
+ self._module.fail_json(msg="Failed to get host: %s" % e)
+ else:
+ try:
+ return self._zapi.template.get({"filter": {"host": template_name}})
+ except Exception as e:
+ self._module.fail_json(msg="Failed to get template: %s" % e)
+
+ def get_discoveryrules(self, discoveryrule_name, host_name, template_name):
+ if host_name is not None:
+ host = host_name
+ else:
+ host = template_name
+ discoveryrules = []
+ try:
+ discoveryrules = self._zapi.discoveryrule.get({'filter': {'name': discoveryrule_name, 'host': host}})
+ except Exception as e:
+ self._module.fail_json(msg="Failed to get discovery rules: %s" % e)
+ return discoveryrules
+
+ def sanitize_params(self, name, params):
+ params['name'] = name
+ if 'key' in params:
+ params['key_'] = params['key']
+ params.pop("key")
+ if 'type' in params:
+ item_type_int = self.ITEM_TYPES[params['type']]
+ params['type'] = item_type_int
+ if 'interval' in params:
+ params['delay'] = params['interval']
+ params.pop("interval")
+ if 'enabled' in params:
+ if params['enabled']:
+ params['status'] = 'enabled'
+ else:
+ params['status'] = 'disabled'
+ params.pop("enabled")
+ if 'status' in params:
+ status = params['status']
+ if status == 'enabled':
+ params['status'] = 0
+ elif status == 'disabled':
+ params['status'] = 1
+ else:
+ self._module.fail_json(msg="Status must be 'enabled' or 'disabled', got %s" % status)
+ if 'preprocessing' in params:
+ for param in params['preprocessing']:
+ preprocess_type_int = self.PREPROCESSING_TYPES[param['type']]
+ param['type'] = preprocess_type_int
+ if 'error_handler' in param:
+ error_handler_int = self.PREPROCESSING_ERROR_HANDLERS[param['error_handler']]
+ param['error_handler'] = error_handler_int
+
+ def add_discoveryrule(self, params):
+ if self._module.check_mode:
+ self._module.exit_json(changed=True)
+ try:
+ results = self._zapi.discoveryrule.create(params)
+ except Exception as e:
+ self._module.fail_json(msg="Failed to create discoveryrule: %s" % e)
+ return results
+
+ def update_discoveryrule(self, params):
+ if self._module.check_mode:
+ self._module.exit_json(changed=True)
+ try:
+ results = self._zapi.discoveryrule.update(params)
+ except Exception as e:
+ self._module.fail_json(msg="Failed to update discoveryrule: %s" % e)
+ return results
+
+ def check_discoveryrule_changed(self, old_discoveryrule):
+ try:
+ new_discoveryrule = self._zapi.discoveryrule.get({'itemids': "%s" % old_discoveryrule['itemid']})[0]
+ except Exception as e:
+ self._module.fail_json(msg="Failed to get discoveryrule: %s" % e)
+ return old_discoveryrule != new_discoveryrule
+
+ def delete_discoveryrule(self, discoveryrule_id):
+ if self._module.check_mode:
+ self._module.exit_json(changed=True)
+ try:
+ results = self._zapi.discoveryrule.delete(discoveryrule_id)
+ except Exception as e:
+ self._module.fail_json(msg="Failed to delete discoveryrule: %s" % e)
+ return results
+
+
+def main():
+ argument_spec = zabbix_utils.zabbix_common_argument_spec()
+ argument_spec.update(dict(
+ name=dict(type='str', required=True),
+ host_name=dict(type='str', required=False),
+ template_name=dict(type='str', required=False),
+ params=dict(type='dict', required=False),
+ state=dict(type='str', default="present", choices=['present', 'absent']),
+ ))
+ module = AnsibleModule(
+ argument_spec=argument_spec,
+ required_one_of=[
+ ['host_name', 'template_name']
+ ],
+ mutually_exclusive=[
+ ['host_name', 'template_name']
+ ],
+ required_if=[
+ ['state', 'present', ['params']]
+ ],
+ supports_check_mode=True
+ )
+
+ name = module.params['name']
+ host_name = module.params['host_name']
+ template_name = module.params['template_name']
+ params = module.params['params']
+ state = module.params['state']
+
+ discoveryrule = Discoveryrule(module)
+
+ if state == "absent":
+ discoveryrules = discoveryrule.get_discoveryrules(name, host_name, template_name)
+ if len(discoveryrules) == 0:
+ module.exit_json(changed=False, result="No discoveryrule to delete.")
+ else:
+ delete_ids = []
+ for d in discoveryrules:
+ delete_ids.append(d['itemid'])
+ results = discoveryrule.delete_discoveryrule(delete_ids)
+ module.exit_json(changed=True, result=results)
+
+ elif state == "present":
+ discoveryrule.sanitize_params(name, params)
+ discoveryrules = discoveryrule.get_discoveryrules(name, host_name, template_name)
+ results = []
+ if len(discoveryrules) == 0:
+ hosts_templates = discoveryrule.get_hosts_templates(host_name, template_name)
+ for host_template in hosts_templates:
+ if 'hostid' in host_template:
+ params['hostid'] = host_template['hostid']
+ elif 'templateid' in host_template:
+ params['hostid'] = host_template['templateid']
+ else:
+ module.fail_json(msg="host/template did not return id")
+ results.append(discoveryrule.add_discoveryrule(params))
+ module.exit_json(changed=True, result=results)
+ else:
+ changed = False
+ for d in discoveryrules:
+ params['itemid'] = d['itemid']
+ results.append(discoveryrule.update_discoveryrule(params))
+ changed_rule = discoveryrule.check_discoveryrule_changed(d)
+ if changed_rule:
+ changed = True
+ module.exit_json(changed=changed, result=results)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/community/zabbix/plugins/modules/zabbix_host_events_update.py b/ansible_collections/community/zabbix/plugins/modules/zabbix_host_events_update.py
new file mode 100644
index 000000000..ec0025467
--- /dev/null
+++ b/ansible_collections/community/zabbix/plugins/modules/zabbix_host_events_update.py
@@ -0,0 +1,244 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+#
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+
+DOCUMENTATION = r'''
+---
+module: zabbix_host_events_update
+short_description: update the status of event(s).
+description:
+ - Updates the status of event(s).
+author:
+ - "Andrew Lathrop (@aplathrop)"
+requirements:
+ - "python >= 2.6"
+
+options:
+ params:
+ description:
+ - Parameters to update event(s) with.
+ - Parameters as defined at https://www.zabbix.com/documentation/current/en/manual/api/reference/event/acknowledge
+ - Additionally supported parameters are below
+ required: true
+ type: dict
+ suboptions:
+ action:
+ description:
+ - action to update the event with
+ - Overrides "action" in API docs
+ - Required when I(actions) is not used.
+ - Mutually exclusive with I(actions).
+ required: false
+ type: str
+ choices:
+ - close_problem
+ - close
+ - acknowledge_event
+ - acknowledge
+ - ack
+ - add_message
+ - message
+ - msg
+ - change_severity
+ - severity
+ - unacknowledge_event
+ - unacknowledge
+ - unack
+ - suppress_event
+ - suppress
+ - unsuppress_event
+ - unsuppress
+ - change_event_rank_to_cause
+ - convert_to_cause
+ - change_event_rank_to_symptom
+ - convert_to_symptom
+ actions:
+ description:
+ - actions to update the event with
+ - Overrides "action" in API docs
+ - Required when I(action) is not used.
+ - Mutually exclusive with I(action).
+ required: false
+ type: list
+ elements: str
+ choices:
+ - close_problem
+ - close
+ - acknowledge_event
+ - acknowledge
+ - ack
+ - add_message
+ - message
+ - msg
+ - change_severity
+ - severity
+ - unacknowledge_event
+ - unacknowledge
+ - unack
+ - suppress_event
+ - suppress
+ - unsuppress_event
+ - unsuppress
+ - change_event_rank_to_cause
+ - convert_to_cause
+ - change_event_rank_to_symptom
+ - convert_to_symptom
+ severity:
+ description:
+ - New severity for events.
+ - Overrides "severity" in API docs
+ required: False
+ type: str
+ choices:
+ - not_classified
+ - information
+ - warning
+ - average
+ - high
+ - disaster
+ msg:
+ description:
+ - Text of the message.
+ - Alias for "message" in API docs
+ required: False
+ type: str
+
+extends_documentation_fragment:
+- community.zabbix.zabbix
+'''
+
+EXAMPLES = r'''
+
+# If you want to use Username and Password to be authenticated by Zabbix Server
+- name: Set credentials to access Zabbix Server API
+ ansible.builtin.set_fact:
+ ansible_user: Admin
+ ansible_httpapi_pass: zabbix
+
+# If you want to use API token to be authenticated by Zabbix Server
+# https://www.zabbix.com/documentation/current/en/manual/web_interface/frontend_sections/administration/general#api-tokens
+- name: Set API token
+ ansible.builtin.set_fact:
+ ansible_zabbix_auth_key: 8ec0d52432c15c91fcafe9888500cf9a607f44091ab554dbee860f6b44fac895
+
+# Acknowledge single event
+- name: ack event
+ community.zabbix.zabbix_host_events_update:
+ params:
+ eventids: 12345
+ actions: ack
+
+- name: ack and close event with a message
+ community.zabbix.zabbix_host_events_update:
+ params:
+ eventids: [12345, 67890]
+ actions: ['ack', 'msg', 'close']
+ msg: 'closed by user'
+
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+
+from ansible_collections.community.zabbix.plugins.module_utils.base import ZabbixBase
+import ansible_collections.community.zabbix.plugins.module_utils.helpers as zabbix_utils
+
+
+class Hosteventsupdate(ZabbixBase):
+ ACTIONS = {'close_problem': 1,
+ 'close': 1,
+ 'acknowledge_event': 2,
+ 'acknowledge': 2,
+ 'ack': 2,
+ 'add_message': 4,
+ 'message': 4,
+ 'msg': 4,
+ 'change_severity': 8,
+ 'severity': 8,
+ 'unacknowledge_event': 16,
+ 'unacknowledge': 16,
+ 'unack': 16,
+ 'suppress_event': 32,
+ 'suppress': 32,
+ 'unsuppress_event': 64,
+ 'unsuppress': 64,
+ 'change_event_rank_to_cause': 128,
+ 'convert_to_cause': 128,
+ 'change_event_rank_to_symptom': 256,
+ 'convert_to_symptom': 256}
+
+ SEVERITY_TYPES = {'not_classified': 0,
+ 'information': 1,
+ 'warning': 2,
+ 'average': 3,
+ 'high': 4,
+ 'disaster': 5}
+
+ def get_events(self, eventids):
+ try:
+ results = self._zapi.event.get({'eventids': eventids})
+ except Exception as e:
+ self._module.fail_json(msg="Failed to get event: %s" % e)
+ return results
+
+ def update_event(self, params):
+ if 'severity' in params:
+ if params['severity'] not in self.SEVERITY_TYPES:
+ self._module.fail_json(msg="%s is not a valid severity type" % params['severity'])
+ severity = self.SEVERITY_TYPES[params['severity']]
+ params['severity'] = severity
+ if 'action' in params:
+ if params['action'] not in self.ACTIONS:
+ self._module.fail_json(msg="%s is not a valid action" % params['action'])
+ action_id = self.ACTIONS[params['action']]
+ elif 'actions' in params:
+ action_id = 0
+ for action in params['actions']:
+ if action not in self.ACTIONS:
+ self._module.fail_json(msg="%s is not a valid action" % action)
+ action_id += self.ACTIONS[action]
+ params.pop('actions')
+ else:
+ self._module.fail_json(msg="params must contain either 'action' or 'actions'")
+ params['action'] = action_id
+ if 'msg' in params:
+ params['message'] = params['msg']
+ params.pop('msg')
+ if self._module.check_mode:
+ self._module.exit_json(changed=True)
+ try:
+ results = self._zapi.event.acknowledge(params)
+ except Exception as e:
+ self._module.fail_json(msg="Failed to update event: %s" % e)
+ return results
+
+ def check_events_changed(self, eventids, old_events):
+ new_events = self.get_events(eventids)
+ return old_events != new_events
+
+
+def main():
+ argument_spec = zabbix_utils.zabbix_common_argument_spec()
+ argument_spec.update(
+ params=dict(type='dict', required=True))
+ module = AnsibleModule(
+ argument_spec=argument_spec,
+ supports_check_mode=True
+ )
+
+ params = module.params['params']
+
+ hosteventsupdate = Hosteventsupdate(module)
+
+ events = hosteventsupdate.get_events(params['eventids'])
+ results = hosteventsupdate.update_event(params)
+ changed = hosteventsupdate.check_events_changed(params['eventids'], events)
+ module.exit_json(changed=changed, result=results)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/community/zabbix/plugins/modules/zabbix_item.py b/ansible_collections/community/zabbix/plugins/modules/zabbix_item.py
new file mode 100644
index 000000000..2d6bb95eb
--- /dev/null
+++ b/ansible_collections/community/zabbix/plugins/modules/zabbix_item.py
@@ -0,0 +1,579 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+#
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+
+DOCUMENTATION = r'''
+---
+module: zabbix_item
+short_description: Create/delete Zabbix items
+description:
+ - Create items if they do not exist.
+ - Delete existing items if they exist.
+author:
+ - "Andrew Lathrop (@aplathrop)"
+requirements:
+ - "python >= 2.6"
+
+options:
+ state:
+ description:
+ - Create or delete item.
+ required: false
+ type: str
+ default: "present"
+ choices: [ "present", "absent" ]
+ name:
+ description:
+ - Name of item to create or delete.
+ required: true
+ type: str
+ host_name:
+ description:
+ - Name of host to add item to.
+ - Required when I(template_name) is not used.
+ - Mutually exclusive with I(template_name).
+ required: false
+ type: str
+ template_name:
+ description:
+ - Name of template to add item to.
+ - Required when I(host_name) is not used.
+ - Mutually exclusive with I(host_name).
+ required: false
+ type: str
+ params:
+ description:
+ - Parameters to create/update item with.
+ - Required if state is "present".
+ - Parameters as defined at https://www.zabbix.com/documentation/current/en/manual/api/reference/item/object
+ - Additionally supported parameters are below
+ required: false
+ type: dict
+ suboptions:
+ key:
+ description:
+ - Item key.
+ - Alias for "key_" in API docs
+ required: false
+ type: str
+ interval:
+ description:
+ - Update interval of the item.
+ - Alias for "delay" in API docs
+ required: false
+ type: str
+ status:
+ description:
+ - Status of the item.
+ required: false
+ type: str
+ choices: [ "enabled", "disabled" ]
+ enabled:
+ description:
+ - Status of the item.
+ - Overrides "status" in API docs
+ required: false
+ type: bool
+ type:
+ description:
+ - Type of the item.
+ - Required if state is "present".
+ required: false
+ type: str
+ choices:
+ - zabbix_agent
+ - zabbix_trapper
+ - simple_check
+ - zabbix_internal
+ - zabbix_agent_active
+ - web_item
+ - external_check
+ - database_monitor
+ - ipmi_agent
+ - ssh_agent
+ - telnet_agent
+ - calculated
+ - jmx_agent
+ - snmp_trap
+ - dependent_item
+ - http_agent
+ - snmp_agent
+ - script
+ value_type:
+ description:
+ - Type of information of the item.
+ - Required if state is "present".
+ required: false
+ type: str
+ choices:
+ - numeric_float
+ - character
+ - log
+ - numeric_unsigned
+ - text
+ master_item:
+ description:
+ - item that is the master of the current one
+ - Overrides "master_itemid" in API docs
+ required: false
+ type: dict
+ suboptions:
+ item_name:
+ description:
+ - name of the master item
+ required: true
+ type: str
+ host_name:
+ description:
+ - name of the host the master item belongs to
+ - Required when I(template_name) is not used.
+ - Mutually exclusive with I(template_name).
+ required: false
+ template_name:
+ description:
+ - name of the template the master item belongs to
+ - Required when I(host_name) is not used.
+ - Mutually exclusive with I(host_name).
+ preprocessing:
+ description:
+ - Item preprocessing options.
+ - Parameters as defined at https://www.zabbix.com/documentation/current/en/manual/api/reference/item/object#item-preprocessing
+ - Additionally supported parameters are below
+ required: false
+ type: list
+ elements: dict
+ suboptions:
+ type:
+ description:
+ - The preprocessing option type.
+ required: true
+ type: str
+ choices:
+ - custom_multiplier
+ - right_trim
+ - left_trim
+ - trim
+ - regular_expressions
+ - regex
+ - boolean_to_decimal
+ - octal_to_decimal
+ - hexadecimal_to_decimal
+ - simple_change
+ - change_per_second
+ - xml_xpath
+ - jsonpath
+ - in_range
+ - matches_regular_expression
+ - matches_regex
+ - does_not_match_regular_expression
+ - not_match_regex
+ - check_for_error_in_json
+ - check_for_json_error
+ - check_for_error_in_xml
+ - check_for_xml_error
+ - check_for_error_using_regular_expression
+ - check_for_error_regex
+ - discard_unchanged
+ - discard_unchanged_with_heartbeat
+ - javascript
+ - prometheus_pattern
+ - prometheus_to_json
+ - csv_to_json
+ - replace
+ - check_unsupported
+ - xml_to_json
+ - snmp_walk_value
+ - snmp_walk_to_json
+ error_handler:
+ description:
+ - Action type used in case of preprocessing step failure.
+ required: false
+ type: str
+ choices:
+ - zabbix_server
+ - discard
+ - set_custom_value
+ - set_custom_error_message
+
+extends_documentation_fragment:
+- community.zabbix.zabbix
+'''
+
+EXAMPLES = r'''
+
+# If you want to use Username and Password to be authenticated by Zabbix Server
+- name: Set credentials to access Zabbix Server API
+ ansible.builtin.set_fact:
+ ansible_user: Admin
+ ansible_httpapi_pass: zabbix
+
+# If you want to use API token to be authenticated by Zabbix Server
+# https://www.zabbix.com/documentation/current/en/manual/web_interface/frontend_sections/administration/general#api-tokens
+- name: Set API token
+ ansible.builtin.set_fact:
+ ansible_zabbix_auth_key: 8ec0d52432c15c91fcafe9888500cf9a607f44091ab554dbee860f6b44fac895
+
+# Create ping item on example_host
+- name: create ping item
+ # set task level variables as we change ansible_connection plugin here
+ vars:
+ ansible_network_os: community.zabbix.zabbix
+ ansible_connection: httpapi
+ ansible_httpapi_port: 443
+ ansible_httpapi_use_ssl: true
+ ansible_httpapi_validate_certs: false
+ ansible_zabbix_url_path: 'zabbixeu' # If Zabbix WebUI runs on non-default (zabbix) path ,e.g. http://<FQDN>/zabbixeu
+ ansible_host: zabbix-example-fqdn.org
+ community.zabbix.zabbix_item:
+ name: agent_ping
+ host_name: example_host
+ params:
+ type: zabbix_agent
+ key: agent.ping
+ value_type: numeric_unsigned
+ interval: 1m
+ state: present
+
+# Create ping item on example_template
+- name: create ping item
+ # set task level variables as we change ansible_connection plugin here
+ vars:
+ ansible_network_os: community.zabbix.zabbix
+ ansible_connection: httpapi
+ ansible_httpapi_port: 443
+ ansible_httpapi_use_ssl: true
+ ansible_httpapi_validate_certs: false
+ ansible_zabbix_url_path: 'zabbixeu' # If Zabbix WebUI runs on non-default (zabbix) path ,e.g. http://<FQDN>/zabbixeu
+ ansible_host: zabbix-example-fqdn.org
+ community.zabbix.zabbix_item:
+ name: agent_ping
+ template_name: example_template
+ params:
+ type: zabbix_agent
+ key: agent.ping
+ value_type: numeric_unsigned
+ interval: 1m
+ state: present
+
+- name: Add tags to the existing Zabbix item
+ # set task level variables as we change ansible_connection plugin here
+ vars:
+ ansible_network_os: community.zabbix.zabbix
+ ansible_connection: httpapi
+ ansible_httpapi_port: 443
+ ansible_httpapi_use_ssl: true
+ ansible_httpapi_validate_certs: false
+ ansible_zabbix_url_path: "zabbixeu" # If Zabbix WebUI runs on non-default (zabbix) path ,e.g. http://<FQDN>/zabbixeu
+ ansible_host: zabbix-example-fqdn.org
+ community.zabbix.zabbix_item:
+ name: agent_ping
+ template_name: example_template
+ params:
+ type: zabbix_agent
+ key: agent.ping
+ value_type: numeric_unsigned
+ interval: 1m
+ tags:
+ - tag: class
+ value: application
+ state: present
+
+- name: create a dependent item
+ # set task level variables as we change ansible_connection plugin here
+ vars:
+ ansible_network_os: community.zabbix.zabbix
+ ansible_connection: httpapi
+ ansible_httpapi_port: 443
+ ansible_httpapi_use_ssl: true
+ ansible_httpapi_validate_certs: false
+ ansible_zabbix_url_path: "zabbixeu" # If Zabbix WebUI runs on non-default (zabbix) path ,e.g. http://<FQDN>/zabbixeu
+ ansible_host: zabbix-example-fqdn.org
+ community.zabbix.zabbix_item:
+ name: depend_item
+ host_name: example_host
+ params:
+ type: dependent_item
+ key: vfs.fs.pused
+ value_type: numeric_float
+ units: '%'
+ master_item:
+ item_name: example_item
+ host_name: example_host
+ preprocessing:
+ - type: jsonpath
+ params: '$[?(@.fstype == "ext4")]'
+ error_handler: zabbix_server
+ - type: jsonpath
+ params: "$[*].['bytes', 'inodes'].pused.max()"
+ error_handler: zabbix_server
+ state: present
+
+- name: Delete Zabbix item
+ # set task level variables as we change ansible_connection plugin here
+ vars:
+ ansible_network_os: community.zabbix.zabbix
+ ansible_connection: httpapi
+ ansible_httpapi_port: 443
+ ansible_httpapi_use_ssl: true
+ ansible_httpapi_validate_certs: false
+ ansible_zabbix_url_path: "zabbixeu" # If Zabbix WebUI runs on non-default (zabbix) path ,e.g. http://<FQDN>/zabbixeu
+ ansible_host: zabbix-example-fqdn.org
+ community.zabbix.zabbix_item:
+ name: agent_ping
+ template_name: example_template
+ state: absent
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+
+from ansible_collections.community.zabbix.plugins.module_utils.base import ZabbixBase
+import ansible_collections.community.zabbix.plugins.module_utils.helpers as zabbix_utils
+
+
+class Item(ZabbixBase):
+ ITEM_TYPES = {'zabbix_agent': 0,
+ 'zabbix_trapper': 2,
+ 'simple_check': 3,
+ 'zabbix_internal': 5,
+ 'zabbix_agent_active': 7,
+ 'web_item': 9,
+ 'external_check': 10,
+ 'database_monitor': 11,
+ 'ipmi_agent': 12,
+ 'ssh_agent': 13,
+ 'telnet_agent': 14,
+ 'calculated': 15,
+ 'jmx_agent': 16,
+ 'snmp_trap': 17,
+ 'dependent_item': 18,
+ 'http_agent': 19,
+ 'snmp_agent': 20,
+ 'script': 21}
+
+ VALUE_TYPES = {'numeric_float': 0,
+ 'character': 1,
+ 'log': 2,
+ 'numeric_unsigned': 3,
+ 'text': 4}
+
+ PREPROCESSING_TYPES = {'custom_multiplier': 1,
+ 'right_trim': 2,
+ 'left_trim': 3,
+ 'trim': 4,
+ 'regular_expressions': 5,
+ 'regex': 5,
+ 'boolean_to_decimal': 6,
+ 'octal_to_decimal': 7,
+ 'hexadecimal_to_decimal': 8,
+ 'simple_change': 9,
+ 'change_per_second': 10,
+ 'xml_xpath': 11,
+ 'jsonpath': 12,
+ 'in_range': 13,
+ 'matches_regular_expression': 14,
+ 'matches_regex': 14,
+ 'does_not_match_regular_expression': 15,
+ 'not_match_regex': 15,
+ 'check_for_error_in_json': 16,
+ 'check_for_json_error': 16,
+ 'check_for_error_in_xml': 17,
+ 'check_for_xml_error': 17,
+ 'check_for_error_using_regular_expression': 18,
+ 'check_for_error_regex': 18,
+ 'discard_unchanged': 19,
+ 'discard_unchanged_with_heartbeat': 20,
+ 'javascript': 21,
+ 'prometheus_pattern': 22,
+ 'prometheus_to_json': 23,
+ 'csv_to_json': 24,
+ 'replace': 25,
+ 'check_unsupported': 26,
+ 'xml_to_json': 27,
+ 'snmp_walk_value': 28,
+ 'snmp_walk_to_json': 29}
+
+ PREPROCESSING_ERROR_HANDLERS = {'zabbix_server': 0,
+ 'discard': 1,
+ 'set_custom_value': 2,
+ 'set_custom_error_message': 3}
+
+ def get_hosts_templates(self, host_name, template_name):
+ if host_name is not None:
+ try:
+ return self._zapi.host.get({"filter": {"host": host_name}})
+ except Exception as e:
+ self._module.fail_json(msg="Failed to get host: %s" % e)
+ else:
+ try:
+ return self._zapi.template.get({"filter": {"host": template_name}})
+ except Exception as e:
+ self._module.fail_json(msg="Failed to get template: %s" % e)
+
+ def get_items(self, item_name, host_name, template_name):
+ if host_name is not None:
+ host = host_name
+ else:
+ host = template_name
+ items = []
+ try:
+ items = self._zapi.item.get({'filter': {'name': item_name, 'host': host}})
+ except Exception as e:
+ self._module.fail_json(msg="Failed to get item: %s" % e)
+ return items
+
+ def sanitize_params(self, name, params):
+ params['name'] = name
+ if 'key' in params:
+ params['key_'] = params['key']
+ params.pop("key")
+ if 'type' in params:
+ item_type_int = self.ITEM_TYPES[params['type']]
+ params['type'] = item_type_int
+ if 'value_type' in params:
+ value_type_int = self.VALUE_TYPES[params['value_type']]
+ params['value_type'] = value_type_int
+ if 'interval' in params:
+ params['delay'] = params['interval']
+ params.pop("interval")
+ if 'enabled' in params:
+ if params['enabled']:
+ params['status'] = 'enabled'
+ else:
+ params['status'] = 'disabled'
+ params.pop("enabled")
+ if 'status' in params:
+ status = params['status']
+ if status == 'enabled':
+ params['status'] = 0
+ elif status == 'disabled':
+ params['status'] = 1
+ else:
+ self._module.fail_json(msg="Status must be 'enabled' or 'disabled', got %s" % status)
+ if 'master_item' in params:
+ if 'host_name' not in params['master_item']:
+ params['master_item']['host_name'] = None
+ if 'template_name' not in params['master_item']:
+ params['master_item']['template_name'] = None
+ master_items = self.get_items(params['master_item']['item_name'], params['master_item']['host_name'], params['master_item']['template_name'])
+ if len(master_items) == 0:
+ self._module.fail_json(msg="No items with the name %s exist to depend on" % params['master_item']['item_name'])
+ params['master_itemid'] = master_items[0]['itemid']
+ params.pop('master_item')
+ if 'preprocessing' in params:
+ for param in params['preprocessing']:
+ preprocess_type_int = self.PREPROCESSING_TYPES[param['type']]
+ param['type'] = preprocess_type_int
+ if 'error_handler' in param:
+ error_handler_int = self.PREPROCESSING_ERROR_HANDLERS[param['error_handler']]
+ param['error_handler'] = error_handler_int
+
+ def add_item(self, params):
+ if self._module.check_mode:
+ self._module.exit_json(changed=True)
+ try:
+ results = self._zapi.item.create(params)
+ except Exception as e:
+ self._module.fail_json(msg="Failed to create item: %s" % e)
+ return results
+
+ def update_item(self, params):
+ if self._module.check_mode:
+ self._module.exit_json(changed=True)
+ try:
+ results = self._zapi.item.update(params)
+ except Exception as e:
+ self._module.fail_json(msg="Failed to update item: %s" % e)
+ return results
+
+ def check_item_changed(self, old_item):
+ try:
+ new_item = self._zapi.item.get({'itemids': "%s" % old_item['itemid']})[0]
+ except Exception as e:
+ self._module.fail_json(msg="Failed to get item: %s" % e)
+ return old_item != new_item
+
+ def delete_item(self, item_id):
+ if self._module.check_mode:
+ self._module.exit_json(changed=True)
+ try:
+ results = self._zapi.item.delete(item_id)
+ except Exception as e:
+ self._module.fail_json(msg="Failed to delete item: %s" % e)
+ return results
+
+
+def main():
+ argument_spec = zabbix_utils.zabbix_common_argument_spec()
+ argument_spec.update(dict(
+ name=dict(type='str', required=True),
+ host_name=dict(type='str', required=False),
+ template_name=dict(type='str', required=False),
+ params=dict(type='dict', required=False),
+ state=dict(type='str', default="present", choices=['present', 'absent']),
+ ))
+ module = AnsibleModule(
+ argument_spec=argument_spec,
+ required_one_of=[
+ ['host_name', 'template_name']
+ ],
+ mutually_exclusive=[
+ ['host_name', 'template_name']
+ ],
+ required_if=[
+ ['state', 'present', ['params']]
+ ],
+ supports_check_mode=True
+ )
+
+ name = module.params['name']
+ host_name = module.params['host_name']
+ template_name = module.params['template_name']
+ params = module.params['params']
+ state = module.params['state']
+
+ item = Item(module)
+
+ if state == "absent":
+ items = item.get_items(name, host_name, template_name)
+ if len(items) == 0:
+ module.exit_json(changed=False, result="No item to delete.")
+ else:
+ delete_ids = []
+ for i in items:
+ delete_ids.append(i['itemid'])
+ results = item.delete_item(delete_ids)
+ module.exit_json(changed=True, result=results)
+
+ elif state == "present":
+ item.sanitize_params(name, params)
+ items = item.get_items(name, host_name, template_name)
+ results = []
+ if len(items) == 0:
+ hosts_templates = item.get_hosts_templates(host_name, template_name)
+ for host_template in hosts_templates:
+ if 'hostid' in host_template:
+ params['hostid'] = host_template['hostid']
+ elif 'templateid' in host_template:
+ params['hostid'] = host_template['templateid']
+ else:
+ module.fail_json(msg="host/template did not return id")
+ results.append(item.add_item(params))
+ module.exit_json(changed=True, result=results)
+ else:
+ changed = False
+ for i in items:
+ params['itemid'] = i['itemid']
+ results.append(item.update_item(params))
+ changed_item = item.check_item_changed(i)
+ if changed_item:
+ changed = True
+ module.exit_json(changed=changed, result=results)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/community/zabbix/plugins/modules/zabbix_itemprototype.py b/ansible_collections/community/zabbix/plugins/modules/zabbix_itemprototype.py
new file mode 100644
index 000000000..1fe5b9560
--- /dev/null
+++ b/ansible_collections/community/zabbix/plugins/modules/zabbix_itemprototype.py
@@ -0,0 +1,609 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+#
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+
+DOCUMENTATION = r'''
+---
+module: zabbix_itemprototype
+short_description: Create/delete Zabbix item prototypes
+description:
+ - Create item prototypes if they do not exist.
+ - Delete existing item prototypes if they exist.
+author:
+ - "Andrew Lathrop (@aplathrop)"
+requirements:
+ - "python >= 2.6"
+
+options:
+ state:
+ description:
+ - Create or delete item prototype.
+ required: false
+ type: str
+ default: "present"
+ choices: [ "present", "absent" ]
+ name:
+ description:
+ - Name of item prototype to create or delete.
+ required: true
+ type: str
+ host_name:
+ description:
+ - Name of host to add item prototype to.
+ - Required when I(template_name) is not used.
+ - Mutually exclusive with I(template_name).
+ required: false
+ type: str
+ template_name:
+ description:
+ - Name of template to add item prototype to.
+ - Required when I(host_name) is not used.
+ - Mutually exclusive with I(host_name).
+ required: false
+ type: str
+ discoveryrule_name:
+ description:
+ - Name of the LLD rule that the item belongs to.
+ required: true
+ type: str
+ params:
+ description:
+ - Parameters to create/update item prototype with.
+ - Required if state is "present".
+ - Parameters as defined at https://www.zabbix.com/documentation/current/en/manual/api/reference/itemprototype/object
+ - Additionally supported parameters are below
+ required: false
+ type: dict
+ suboptions:
+ key:
+ description:
+ - Item prototype key.
+ - Alias for "key_" in API docs
+ required: false
+ type: str
+ interval:
+ description:
+ - Update interval of the item prototype.
+ - Alias for "delay" in API docs
+ required: false
+ type: str
+ status:
+ description:
+ - Status of the item prototype.
+ required: false
+ type: str
+ choices: [ "enabled", "disabled" ]
+ enabled:
+ description:
+ - Status of the item prototype.
+ - Overrides "status" in API docs
+ required: false
+ type: bool
+ type:
+ description:
+ - Type of the item prototype.
+ - Required if state is "present".
+ required: false
+ type: str
+ choices:
+ - zabbix_agent
+ - zabbix_trapper
+ - simple_check
+ - zabbix_internal
+ - zabbix_agent_active
+ - web_item
+ - external_check
+ - database_monitor
+ - ipmi_agent
+ - ssh_agent
+ - telnet_agent
+ - calculated
+ - jmx_agent
+ - snmp_trap
+ - dependent_item
+ - http_agent
+ - snmp_agent
+ - script
+ value_type:
+ description:
+ - Type of information of the item prototype.
+ - Required if state is "present".
+ required: false
+ type: str
+ choices:
+ - numeric_float
+ - character
+ - log
+ - numeric_unsigned
+ - text
+ master_item:
+ description:
+ - item that is the master of the current one
+ - Overrides "master_itemid" in API docs
+ required: false
+ type: dict
+ suboptions:
+ item_name:
+ description:
+ - name of the master item
+ required: true
+ type: str
+ discovery_rule:
+ description:
+ - name of the discovery rule the master item belongs to
+ required: true
+ type: str
+ host_name:
+ description:
+ - name of the host the master item belongs to
+ - Required when I(template_name) is not used.
+ - Mutually exclusive with I(template_name).
+ required: false
+ template_name:
+ description:
+ - name of the template the master item belongs to
+ - Required when I(host_name) is not used.
+ - Mutually exclusive with I(host_name).
+ preprocessing:
+ description:
+ - Item preprocessing options.
+ - Parameters as defined at
+ - https://www.zabbix.com/documentation/current/en/manual/api/reference/itemprototype/object#item-prototype-preprocessing
+ - Additionally supported parameters are below
+ required: false
+ type: list
+ elements: dict
+ suboptions:
+ type:
+ description:
+ - The preprocessing option type.
+ required: true
+ type: str
+ choices:
+ - custom_multiplier
+ - right_trim
+ - left_trim
+ - trim
+ - regular_expressions
+ - regex
+ - boolean_to_decimal
+ - octal_to_decimal
+ - hexadecimal_to_decimal
+ - simple_change
+ - change_per_second
+ - xml_xpath
+ - jsonpath
+ - in_range
+ - matches_regular_expression
+ - matches_regex
+ - does_not_match_regular_expression
+ - not_match_regex
+ - check_for_error_in_json
+ - check_for_json_error
+ - check_for_error_in_xml
+ - check_for_xml_error
+ - check_for_error_using_regular_expression
+ - check_for_error_regex
+ - discard_unchanged
+ - discard_unchanged_with_heartbeat
+ - javascript
+ - prometheus_pattern
+ - prometheus_to_json
+ - csv_to_json
+ - replace
+ - check_unsupported
+ - xml_to_json
+ - snmp_walk_value
+ - snmp_walk_to_json
+ error_handler:
+ description:
+ - Action type used in case of preprocessing step failure.
+ required: false
+ type: str
+ choices:
+ - zabbix_server
+ - discard
+ - set_custom_value
+ - set_custom_error_message
+
+extends_documentation_fragment:
+- community.zabbix.zabbix
+'''
+
+EXAMPLES = r'''
+
+
+# If you want to use Username and Password to be authenticated by Zabbix Server
+- name: Set credentials to access Zabbix Server API
+ ansible.builtin.set_fact:
+ ansible_user: Admin
+ ansible_httpapi_pass: zabbix
+
+# If you want to use API token to be authenticated by Zabbix Server
+# https://www.zabbix.com/documentation/current/en/manual/web_interface/frontend_sections/administration/general#api-tokens
+- name: Set API token
+ ansible.builtin.set_fact:
+ ansible_zabbix_auth_key: 8ec0d52432c15c91fcafe9888500cf9a607f44091ab554dbee860f6b44fac895
+
+# Create item prototype on example_host using example_rule
+- name: create item prototype
+ # set task level variables as we change ansible_connection plugin here
+ vars:
+ ansible_network_os: community.zabbix.zabbix
+ ansible_connection: httpapi
+ ansible_httpapi_port: 443
+ ansible_httpapi_use_ssl: true
+ ansible_httpapi_validate_certs: false
+ ansible_zabbix_url_path: 'zabbixeu' # If Zabbix WebUI runs on non-default (zabbix) path ,e.g. http://<FQDN>/zabbixeu
+ ansible_host: zabbix-example-fqdn.org
+ community.zabbix.zabbix_itemprototype:
+ name: '{% raw %}{#FSNAME}:Used space{% endraw %}'
+ discoveryrule_name: example_rule
+ host_name: example_host
+ params:
+ type: zabbix_agent
+ key: "{% raw %}vfs.fs.size[{#FSNAME},used]{% endraw %}"
+ value_type: numeric_unsigned
+ interval: 1m
+ state: present
+
+# Create item prototype on example_template using example_rule
+- name: create item prototype
+ # set task level variables as we change ansible_connection plugin here
+ vars:
+ ansible_network_os: community.zabbix.zabbix
+ ansible_connection: httpapi
+ ansible_httpapi_port: 443
+ ansible_httpapi_use_ssl: true
+ ansible_httpapi_validate_certs: false
+ ansible_zabbix_url_path: 'zabbixeu' # If Zabbix WebUI runs on non-default (zabbix) path ,e.g. http://<FQDN>/zabbixeu
+ ansible_host: zabbix-example-fqdn.org
+ community.zabbix.zabbix_itemprototype:
+ name: '{% raw %}{#FSNAME}:Used space{% endraw %}'
+ discoveryrule_name: example_rule
+ template_name: example_template
+ params:
+ type: zabbix_agent
+ key: "{% raw %}vfs.fs.size[{#FSNAME},used]{% endraw %}"
+ value_type: numeric_unsigned
+ interval: 1m
+ state: present
+
+
+# Add tags to the existing Zabbix item prototype
+- name: update item prototype
+ # set task level variables as we change ansible_connection plugin here
+ vars:
+ ansible_network_os: community.zabbix.zabbix
+ ansible_connection: httpapi
+ ansible_httpapi_port: 443
+ ansible_httpapi_use_ssl: true
+ ansible_httpapi_validate_certs: false
+ ansible_zabbix_url_path: 'zabbixeu' # If Zabbix WebUI runs on non-default (zabbix) path ,e.g. http://<FQDN>/zabbixeu
+ ansible_host: zabbix-example-fqdn.org
+ community.zabbix.zabbix_itemprototype:
+ name: '{% raw %}{#FSNAME}:Used space{% endraw %}'
+ discoveryrule_name: example_rule
+ template_name: example_template
+ params:
+ type: zabbix_agent
+ key: "{% raw %}vfs.fs.size[{#FSNAME},used]{% endraw %}"
+ value_type: numeric_unsigned
+ interval: 1m
+ tags:
+ - tag: class
+ value: application
+ state: present
+
+- name: create dependent item
+ # set task level variables as we change ansible_connection plugin here
+ vars:
+ ansible_network_os: community.zabbix.zabbix
+ ansible_connection: httpapi
+ ansible_httpapi_port: 443
+ ansible_httpapi_use_ssl: true
+ ansible_httpapi_validate_certs: false
+ ansible_zabbix_url_path: 'zabbixeu' # If Zabbix WebUI runs on non-default (zabbix) path ,e.g. http://<FQDN>/zabbixeu
+ ansible_host: zabbix-example-fqdn.org
+ community.zabbix.zabbix_itemprototype:
+ name: '{% raw %}{#FSNAME}:example_depend_item_prototype{% endraw %}'
+ discoveryrule_name: example_rule
+ host_name: example_host
+ params:
+ type: dependent_item
+ key: '{% raw %}vfs.fs.size.half[{#FSNAME}]{% endraw %}'
+ value_type: numeric_float
+ units: B
+ master_item:
+ item_name: '{% raw %}{#FSNAME}:example_item_prototype{% endraw %}'
+ discoveryrule_name: example_rule
+ host_name: example_host
+ preprocessing:
+ - type: javascript
+ params: 'return value / 2;'
+ error_handler: zabbix_server
+ state: present
+
+- name: Delete Zabbix item prototype
+ # set task level variables as we change ansible_connection plugin here
+ vars:
+ ansible_network_os: community.zabbix.zabbix
+ ansible_connection: httpapi
+ ansible_httpapi_port: 443
+ ansible_httpapi_use_ssl: true
+ ansible_httpapi_validate_certs: false
+ ansible_zabbix_url_path: 'zabbixeu' # If Zabbix WebUI runs on non-default (zabbix) path ,e.g. http://<FQDN>/zabbixeu
+ ansible_host: zabbix-example-fqdn.org
+ community.zabbix.zabbix_itemprototype:
+ name: '{% raw %}{#FSNAME}:Used space{% endraw %}'
+ discoveryrule_name: example_rule
+ template_name: example_template
+ state: absent
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+
+from ansible_collections.community.zabbix.plugins.module_utils.base import ZabbixBase
+import ansible_collections.community.zabbix.plugins.module_utils.helpers as zabbix_utils
+
+
+class Itemprototype(ZabbixBase):
+ ITEM_TYPES = {'zabbix_agent': 0,
+ 'zabbix_trapper': 2,
+ 'simple_check': 3,
+ 'zabbix_internal': 5,
+ 'zabbix_agent_active': 7,
+ 'web_item': 9,
+ 'external_check': 10,
+ 'database_monitor': 11,
+ 'ipmi_agent': 12,
+ 'ssh_agent': 13,
+ 'telnet_agent': 14,
+ 'calculated': 15,
+ 'jmx_agent': 16,
+ 'snmp_trap': 17,
+ 'dependent_item': 18,
+ 'http_agent': 19,
+ 'snmp_agent': 20,
+ 'script': 21}
+
+ VALUE_TYPES = {'numeric_float': 0,
+ 'character': 1,
+ 'log': 2,
+ 'numeric_unsigned': 3,
+ 'text': 4}
+
+ PREPROCESSING_TYPES = {'custom_multiplier': 1,
+ 'right_trim': 2,
+ 'left_trim': 3,
+ 'trim': 4,
+ 'regular_expressions': 5,
+ 'regex': 5,
+ 'boolean_to_decimal': 6,
+ 'octal_to_decimal': 7,
+ 'hexadecimal_to_decimal': 8,
+ 'simple_change': 9,
+ 'change_per_second': 10,
+ 'xml_xpath': 11,
+ 'jsonpath': 12,
+ 'in_range': 13,
+ 'matches_regular_expression': 14,
+ 'matches_regex': 14,
+ 'does_not_match_regular_expression': 15,
+ 'not_match_regex': 15,
+ 'check_for_error_in_json': 16,
+ 'check_for_json_error': 16,
+ 'check_for_error_in_xml': 17,
+ 'check_for_xml_error': 17,
+ 'check_for_error_using_regular_expression': 18,
+ 'check_for_error_regex': 18,
+ 'discard_unchanged': 19,
+ 'discard_unchanged_with_heartbeat': 20,
+ 'javascript': 21,
+ 'prometheus_pattern': 22,
+ 'prometheus_to_json': 23,
+ 'csv_to_json': 24,
+ 'replace': 25,
+ 'check_unsupported': 26,
+ 'xml_to_json': 27,
+ 'snmp_walk_value': 28,
+ 'snmp_walk_to_json': 29}
+
+ PREPROCESSING_ERROR_HANDLERS = {'zabbix_server': 0,
+ 'discard': 1,
+ 'set_custom_value': 2,
+ 'set_custom_error_message': 3}
+
+ def get_hosts_templates(self, host_name, template_name):
+ if host_name is not None:
+ try:
+ return self._zapi.host.get({"filter": {"host": host_name}})
+ except Exception as e:
+ self._module.fail_json(msg="Failed to get host: %s" % e)
+ else:
+ try:
+ return self._zapi.template.get({"filter": {"host": template_name}})
+ except Exception as e:
+ self._module.fail_json(msg="Failed to get template: %s" % e)
+
+ def get_discoveryrules(self, discoveryrule_name, host_name, template_name):
+ if host_name is not None:
+ host = host_name
+ else:
+ host = template_name
+ discoveryrules = []
+ try:
+ discoveryrules = self._zapi.discoveryrule.get({'filter': {'name': discoveryrule_name, 'host': host}})
+ except Exception as e:
+ self._module.fail_json(msg="Failed to get discovery rules: %s" % e)
+ return discoveryrules
+
+ def get_itemprototypes(self, itemprototype_name, discoveryrule_name, host_name, template_name):
+ if host_name is not None:
+ host = host_name
+ else:
+ host = template_name
+ discoveryrules = self.get_discoveryrules(discoveryrule_name, host_name, template_name)
+ rule_ids = []
+ for d in discoveryrules:
+ rule_ids.append(d['itemid'])
+ itemprototypes = []
+ try:
+ itemprototypes = self._zapi.itemprototype.get({'filter': {'name': itemprototype_name, 'host': host, 'discoveryids': rule_ids}})
+ except Exception as e:
+ self._module.fail_json(msg="Failed to get item: %s" % e)
+ return itemprototypes
+
+ def sanitize_params(self, name, discoveryrule_name, params, host_name=None, template_name=None):
+ params['name'] = name
+ if 'key' in params:
+ params['key_'] = params['key']
+ params.pop("key")
+ rules = self.get_discoveryrules(discoveryrule_name, host_name, template_name)
+ if len(rules) == 0:
+ self._module.fail_json(msg="Failed to get discoveryrule: %s" % discoveryrule_name)
+ params['ruleid'] = self.get_discoveryrules(discoveryrule_name, host_name, template_name)[0]['itemid']
+ if 'type' in params:
+ item_type_int = self.ITEM_TYPES[params['type']]
+ params['type'] = item_type_int
+ if 'value_type' in params:
+ value_type_int = self.VALUE_TYPES[params['value_type']]
+ params['value_type'] = value_type_int
+ if 'interval' in params:
+ params['delay'] = params['interval']
+ params.pop("interval")
+ if 'enabled' in params:
+ params['status'] = params['enabled']
+ params.pop('enabled')
+ if 'master_item' in params:
+ if 'host_name' not in params['master_item']:
+ params['master_item']['host_name'] = None
+ if 'template_name' not in params['master_item']:
+ params['master_item']['template_name'] = None
+ master_items = self.get_itemprototypes(params['master_item']['item_name'], params['master_item']['discoveryrule_name'],
+ params['master_item']['host_name'], params['master_item']['template_name'])
+ if len(master_items) == 0:
+ self._module.fail_json(msg="No items with the name %s exist to depend on" % params['master_item']['item_name'])
+ params['master_itemid'] = master_items[0]['itemid']
+ params.pop('master_item')
+ if 'preprocessing' in params:
+ for param in params['preprocessing']:
+ preprocess_type_int = self.PREPROCESSING_TYPES[param['type']]
+ param['type'] = preprocess_type_int
+ if 'error_handler' in param:
+ error_handler_int = self.PREPROCESSING_ERROR_HANDLERS[param['error_handler']]
+ param['error_handler'] = error_handler_int
+
+ def add_itemprototype(self, params):
+ if self._module.check_mode:
+ self._module.exit_json(changed=True)
+ try:
+ results = self._zapi.itemprototype.create(params)
+ except Exception as e:
+ self._module.fail_json(msg="Failed to create itemprototype: %s" % e)
+ return results
+
+ def update_itemprototype(self, params):
+ if self._module.check_mode:
+ self._module.exit_json(changed=True)
+ try:
+ results = self._zapi.itemprototype.update(params)
+ except Exception as e:
+ self._module.fail_json(msg="Failed to update itemprototype: %s" % e)
+ return results
+
+ def check_itemprototype_changed(self, old_itemprototype):
+ try:
+ new_itemprototype = self._zapi.itemprototype.get({'itemids': '%s' % old_itemprototype['itemid']})[0]
+ except Exception as e:
+ self._module.fail_json(msg="Failed to get itemprototype: %s" % e)
+ return old_itemprototype != new_itemprototype
+
+ def delete_itemprototype(self, itemprototype_id):
+ if self._module.check_mode:
+ self._module.exit_json(changed=True)
+ try:
+ results = self._zapi.itemprototype.delete(itemprototype_id)
+ except Exception as e:
+ self._module.fail_json(msg="Failed to delete itemprototype: %s" % e)
+ return results
+
+
+def main():
+ argument_spec = zabbix_utils.zabbix_common_argument_spec()
+ argument_spec.update(dict(
+ name=dict(type='str', required=True),
+ discoveryrule_name=dict(type='str', required=True),
+ host_name=dict(type='str', required=False),
+ template_name=dict(type='str', required=False),
+ params=dict(type='dict', required=False),
+ state=dict(type='str', default="present", choices=['present', 'absent']),
+ ))
+ module = AnsibleModule(
+ argument_spec=argument_spec,
+ required_one_of=[
+ ['host_name', 'template_name']
+ ],
+ mutually_exclusive=[
+ ['host_name', 'template_name']
+ ],
+ required_if=[
+ ['state', 'present', ['params']]
+ ],
+ supports_check_mode=True
+ )
+
+ name = module.params['name']
+ discoveryrule_name = module.params['discoveryrule_name']
+ host_name = module.params['host_name']
+ template_name = module.params['template_name']
+ params = module.params['params']
+ state = module.params['state']
+
+ itemprototype = Itemprototype(module)
+
+ if state == "absent":
+ itemprototypes = itemprototype.get_itemprototypes(name, discoveryrule_name, host_name, template_name)
+ if len(itemprototypes) == 0:
+ module.exit_json(changed=False, result="No itemprototype to delete.")
+ else:
+ delete_ids = []
+ for i in itemprototypes:
+ delete_ids.append(i['itemid'])
+ results = itemprototype.delete_itemprototype(delete_ids)
+ module.exit_json(changed=True, result=results)
+
+ elif state == "present":
+ itemprototype.sanitize_params(name, discoveryrule_name, params, host_name, template_name)
+ itemprototypes = itemprototype.get_itemprototypes(name, discoveryrule_name, host_name, template_name)
+ results = []
+ if len(itemprototypes) == 0:
+ hosts_templates = itemprototype.get_hosts_templates(host_name, template_name)
+ for host_template in hosts_templates:
+ if 'hostid' in host_template:
+ params['hostid'] = host_template['hostid']
+ elif 'templateid' in host_template:
+ params['hostid'] = host_template['templateid']
+ else:
+ module.fail_json(msg="host/template did not return id")
+ results.append(itemprototype.add_itemprototype(params))
+ module.exit_json(changed=True, result=results)
+ else:
+ changed = False
+ params.pop('ruleid')
+ for i in itemprototypes:
+ params['itemid'] = i['itemid']
+ results.append(itemprototype.update_itemprototype(params))
+ changed_item = itemprototype.check_itemprototype_changed(i)
+ if changed_item:
+ changed = True
+ module.exit_json(changed=changed, result=results)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/community/zabbix/plugins/modules/zabbix_script.py b/ansible_collections/community/zabbix/plugins/modules/zabbix_script.py
index 643851885..402ec10a8 100644
--- a/ansible_collections/community/zabbix/plugins/modules/zabbix_script.py
+++ b/ansible_collections/community/zabbix/plugins/modules/zabbix_script.py
@@ -26,15 +26,15 @@ options:
type: str
script_type:
description:
- - Script type.
+ - Script type. Required when state is 'present'.
type: str
- required: true
+ required: false
choices: ["script", "ipmi", "ssh", "telnet", "webhook"]
command:
description:
- - Command to run.
+ - Command to run. Required when state is 'present'
type: str
- required: true
+ required: false
scope:
description:
- Script scope.
@@ -137,9 +137,9 @@ options:
suboptions:
name:
description:
- - Parameter name.
+ - Parameter name. Required when 'parameters' is specified for a 'webhook' script.
type: str
- required: true
+ required: false
value:
description:
- Parameter value. Supports macros.
@@ -292,24 +292,16 @@ class Script(ZabbixBase):
request["confirmation"] = confirmation
if script_type == "ssh":
- if authtype is None:
- self._module.fail_json(changed=False, msg="authtype must be provided for ssh script type")
request["authtype"] = str(zabbix_utils.helper_to_numeric_value([
"password",
"public_key"], authtype))
if authtype == "public_key":
- if publickey is None or privatekey is None:
- self._module.fail_json(changed=False, msg="publickey and privatekey must be provided for ssh script type with publickey authtype")
request["publickey"] = publickey
request["privatekey"] = privatekey
if script_type in ["ssh", "telnet"]:
- if username is None:
- self._module.fail_json(changed=False, msg="username must be provided for 'ssh' and 'telnet' script types")
request["username"] = username
if (script_type == "ssh" and authtype == "password") or script_type == "telnet":
- if password is None:
- self._module.fail_json(changed=False, msg="password must be provided for telnet script type or ssh script type with password autheype")
request["password"] = password
if port is not None:
request["port"] = port
@@ -317,6 +309,9 @@ class Script(ZabbixBase):
if script_type == "webhook":
request["timeout"] = script_timeout
if parameters:
+ for parameter in parameters:
+ if "name" not in parameter.keys() or parameter["name"] is None:
+ self._module.fail_json(msg="When providing parameters to a webhook script, the 'name' option is required.")
request["parameters"] = parameters
return request
@@ -347,56 +342,59 @@ def main():
name=dict(type="str", required=True),
script_type=dict(
type="str",
- required=True,
choices=["script", "ipmi", "ssh", "telnet", "webhook"]),
- command=dict(type="str", required=True),
+ command=dict(type="str"),
scope=dict(
type="str",
- required=False,
choices=["action_operation", "manual_host_action", "manual_event_action"],
default="action_operation"),
execute_on=dict(
type="str",
- required=False,
choices=["zabbix_agent", "zabbix_server", "zabbix_server_proxy"],
default="zabbix_server_proxy"),
- menu_path=dict(type="str", required=False),
+ menu_path=dict(type="str"),
authtype=dict(
type="str",
- required=False,
choices=["password", "public_key"]),
- username=dict(type="str", required=False),
- password=dict(type="str", required=False, no_log=True),
- publickey=dict(type="str", required=False),
- privatekey=dict(type="str", required=False, no_log=True),
- port=dict(type="str", required=False),
- host_group=dict(type="str", required=False, default="all"),
- user_group=dict(type="str", required=False, default="all"),
+ username=dict(type="str"),
+ password=dict(type="str", no_log=True),
+ publickey=dict(type="str"),
+ privatekey=dict(type="str", no_log=True),
+ port=dict(type="str"),
+ host_group=dict(type="str", default="all"),
+ user_group=dict(type="str", default="all"),
host_access=dict(
type="str",
- required=False,
choices=["read", "write"],
default="read"),
- confirmation=dict(type="str", required=False),
- script_timeout=dict(type="str", default="30s", required=False),
+ confirmation=dict(type="str"),
+ script_timeout=dict(type="str", default="30s"),
parameters=dict(
type="list",
elements="dict",
options=dict(
- name=dict(type="str", required=True),
- value=dict(type="str", required=False, default="")
+ name=dict(type="str"),
+ value=dict(type="str", default="")
)
),
- description=dict(type="str", required=False),
+ description=dict(type="str"),
state=dict(
type="str",
- required=False,
default="present",
choices=["present", "absent"])
))
+ required_if = [
+ ("state", "present", ("script_type", "command",)),
+ ("script_type", "ssh", ("authtype", "username",)),
+ ("authtype", "password", ("password",)),
+ ("authtype", "public_key", ("publickey", "privatekey",)),
+ ("script_type", "telnet", ("username", "password")),
+ ]
+
module = AnsibleModule(
argument_spec=argument_spec,
+ required_if=required_if,
supports_check_mode=True
)
diff --git a/ansible_collections/community/zabbix/plugins/modules/zabbix_trigger.py b/ansible_collections/community/zabbix/plugins/modules/zabbix_trigger.py
new file mode 100644
index 000000000..9a84448c6
--- /dev/null
+++ b/ansible_collections/community/zabbix/plugins/modules/zabbix_trigger.py
@@ -0,0 +1,444 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+#
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+
+DOCUMENTATION = r'''
+---
+module: zabbix_trigger
+short_description: Create/delete Zabbix triggers
+description:
+ - Create triggers if they do not exist.
+ - Delete existing triggers if they exist.
+author:
+ - "Andrew Lathrop (@aplathrop)"
+requirements:
+ - "python >= 2.6"
+
+options:
+ state:
+ description:
+ - Create or delete trigger.
+ required: false
+ type: str
+ default: "present"
+ choices: [ "present", "absent" ]
+ name:
+ description:
+ - Name of trigger to create or delete.
+ - Overrides "description" in API docs.
+ - Cannot be changed. If a trigger's name needs to be changed, it needs to deleted and recreated
+ required: true
+ type: str
+ host_name:
+ description:
+ - Name of host to add trigger to.
+ - Required when I(template_name) is not used.
+ - Mutually exclusive with I(template_name).
+ required: false
+ type: str
+ template_name:
+ description:
+ - Name of template to add trigger to.
+ - Required when I(host_name) is not used.
+ - Mutually exclusive with I(host_name).
+ required: false
+ type: str
+ desc:
+ description:
+ - Additional description of the trigger.
+ - Overrides "comments" in API docs.
+ required: false
+ type: str
+ aliases: [ "description" ]
+ dependencies:
+ description:
+ - list of triggers that this trigger is dependent on
+ required: false
+ type: list
+ elements: dict
+ suboptions:
+ name:
+ description:
+ - Name of dependent trigger.
+ required: true
+ type: str
+ host_name:
+ description:
+ - Name of host containing dependent trigger.
+ - Required when I(template_name) is not used.
+ - Mutually exclusive with I(template_name).
+ required: false
+ type: str
+ template_name:
+ description:
+ - Name of template containing dependent trigger.
+ - Required when I(host_name) is not used.
+ - Mutually exclusive with I(host_name).
+ required: false
+ type: str
+ params:
+ description:
+ - Parameters to create/update trigger with.
+ - Required if state is "present".
+ - Parameters as defined at https://www.zabbix.com/documentation/current/en/manual/api/reference/trigger/object
+ - Additionally supported parameters are below.
+ required: false
+ type: dict
+ suboptions:
+ severity:
+ description:
+ - Severity of the trigger.
+ - Alias for "priority" in API docs.
+ required: false
+ type: str
+ aliases: [ "priority" ]
+ choices:
+ - not_classified
+ - information
+ - warning
+ - average
+ - high
+ - disaster
+ status:
+ description:
+ - Status of the trigger.
+ required: false
+ type: str
+ choices: [ "enabled", "disabled" ]
+ enabled:
+ description:
+ - Status of the trigger.
+ - Overrides "status" in API docs.
+ required: false
+ type: bool
+ generate_multiple_events:
+ description:
+ - Whether the trigger can generate multiple problem events.
+ - Alias for "type" in API docs.
+ required: false
+ type: bool
+ recovery_mode:
+ description:
+ - OK event generation mode.
+ - Overrides "recovery_mode" in API docs.
+ required: false
+ type: str
+ choices:
+ - expression
+ - recovery_expression
+ - none
+ correlation_mode:
+ description:
+ - OK event closes.
+ - Overrides "correlation_mode" in API docs.
+ required: false
+ type: str
+ choices: [ "all", "tag" ]
+ manual_close:
+ description:
+ - Allow manual close.
+ - Overrides "manual_close" in API docs.
+ required: false
+ type: bool
+
+extends_documentation_fragment:
+- community.zabbix.zabbix
+'''
+
+EXAMPLES = r'''
+
+# If you want to use Username and Password to be authenticated by Zabbix Server
+- name: Set credentials to access Zabbix Server API
+ ansible.builtin.set_fact:
+ ansible_user: Admin
+ ansible_httpapi_pass: zabbix
+
+# If you want to use API token to be authenticated by Zabbix Server
+# https://www.zabbix.com/documentation/current/en/manual/web_interface/frontend_sections/administration/general#api-tokens
+- name: Set API token
+ ansible.builtin.set_fact:
+ ansible_zabbix_auth_key: 8ec0d52432c15c91fcafe9888500cf9a607f44091ab554dbee860f6b44fac895
+
+# Create ping trigger on example_host
+- name: create ping trigger
+ # set task level variables as we change ansible_connection plugin here
+ vars:
+ ansible_network_os: community.zabbix.zabbix
+ ansible_connection: httpapi
+ ansible_httpapi_port: 443
+ ansible_httpapi_use_ssl: true
+ ansible_httpapi_validate_certs: false
+ ansible_zabbix_url_path: 'zabbixeu' # If Zabbix WebUI runs on non-default (zabbix) path ,e.g. http://<FQDN>/zabbixeu
+ ansible_host: zabbix-example-fqdn.org
+ community.zabbix.zabbix_trigger:
+ name: agent_ping
+ host_name: example_host
+ params:
+ severity: high
+ expression: 'nodata(/example_host/agent.ping,1m)=1'
+ manual_close: True
+ enabled: True
+ state: present
+
+# Create ping trigger on example_template
+- name: create ping trigger
+ # set task level variables as we change ansible_connection plugin here
+ vars:
+ ansible_network_os: community.zabbix.zabbix
+ ansible_connection: httpapi
+ ansible_httpapi_port: 443
+ ansible_httpapi_use_ssl: true
+ ansible_httpapi_validate_certs: false
+ ansible_zabbix_url_path: 'zabbixeu' # If Zabbix WebUI runs on non-default (zabbix) path ,e.g. http://<FQDN>/zabbixeu
+ ansible_host: zabbix-example-fqdn.org
+ community.zabbix.zabbix_trigger:
+ name: agent_ping
+ host_name: example_template
+ params:
+ severity: high
+ expression: 'nodata(/example_template/agent.ping,1m)=1'
+ manual_close: True
+ enabled: True
+ state: present
+
+# Add tags to the existing Zabbix trigger
+- name: update ping trigger
+ # set task level variables as we change ansible_connection plugin here
+ vars:
+ ansible_network_os: community.zabbix.zabbix
+ ansible_connection: httpapi
+ ansible_httpapi_port: 443
+ ansible_httpapi_use_ssl: true
+ ansible_httpapi_validate_certs: false
+ ansible_zabbix_url_path: 'zabbixeu' # If Zabbix WebUI runs on non-default (zabbix) path ,e.g. http://<FQDN>/zabbixeu
+ ansible_host: zabbix-example-fqdn.org
+ community.zabbix.zabbix_trigger:
+ name: agent_ping
+ host_name: example_template
+ params:
+ severity: high
+ expression: 'nodata(/example_template/agent.ping,1m)=1'
+ manual_close: True
+ enabled: True
+ tags:
+ - tag: class
+ value: application
+ state: present
+
+# delete Zabbix trigger
+- name: delete ping trigger
+ # set task level variables as we change ansible_connection plugin here
+ vars:
+ ansible_network_os: community.zabbix.zabbix
+ ansible_connection: httpapi
+ ansible_httpapi_port: 443
+ ansible_httpapi_use_ssl: true
+ ansible_httpapi_validate_certs: false
+ ansible_zabbix_url_path: 'zabbixeu' # If Zabbix WebUI runs on non-default (zabbix) path ,e.g. http://<FQDN>/zabbixeu
+ ansible_host: zabbix-example-fqdn.org
+ community.zabbix.zabbix_trigger:
+ name: agent_ping
+ host_name: example_template
+ state: absent
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+
+from ansible_collections.community.zabbix.plugins.module_utils.base import ZabbixBase
+import ansible_collections.community.zabbix.plugins.module_utils.helpers as zabbix_utils
+
+
+class Trigger(ZabbixBase):
+
+ PRIORITY_TYPES = {'not_classified': 0,
+ 'information': 1,
+ 'warning': 2,
+ 'average': 3,
+ 'high': 4,
+ 'disaster': 5}
+
+ RECOVERY_MODES = {'expression': 0,
+ 'recovery_expression': 1,
+ 'none': 2}
+
+ def get_triggers(self, trigger_name, host_name, template_name):
+ if host_name is not None:
+ host = host_name
+ else:
+ host = template_name
+ triggers = []
+ try:
+ triggers = self._zapi.trigger.get({'filter': {'description': trigger_name, 'host': host}})
+ except Exception as e:
+ self._module.fail_json(msg="Failed to get trigger: %s" % e)
+ return triggers
+
+ def sanitize_params(self, name, params, desc=None, dependencies=None):
+ params['description'] = name
+ if desc is not None:
+ params['comments'] = desc
+ if 'severity' in params:
+ params['priority'] = params['severity']
+ params.pop("severity")
+ if 'priority' in params:
+ priority_id = self.PRIORITY_TYPES[params['priority']]
+ params['priority'] = priority_id
+ if 'enabled' in params:
+ if params['enabled']:
+ params['status'] = 'enabled'
+ else:
+ params['status'] = 'disabled'
+ params.pop("enabled")
+ if 'status' in params:
+ status = params['status']
+ if status == 'enabled':
+ params['status'] = 0
+ elif status == 'disabled':
+ params['status'] = 1
+ else:
+ self._module.fail_json(msg="Status must be 'enabled' or 'disabled', got %s" % status)
+ if 'generate_multiple_events' in params:
+ multiple_event_type = params['generate_multiple_events']
+ if multiple_event_type:
+ params['type'] = 1
+ else:
+ params['type'] = 0
+ if 'recovery_mode' in params:
+ recovery_mode_id = self.RECOVERY_MODES[params['recovery_mode']]
+ params['recovery_mode'] = recovery_mode_id
+ if 'correlation_mode' in params:
+ correlation_mode = params['correlation_mode']
+ if correlation_mode == 'all':
+ params['correlation_mode'] = 0
+ elif correlation_mode == 'tag':
+ params['correlation_mode'] = 1
+ else:
+ self._module.fail_json(msg="correlation_mode must be all or tag, got %s" % correlation_mode)
+ if 'manual_close' in params:
+ manual_close = params['manual_close']
+ if manual_close:
+ params['manual_close'] = 1
+ else:
+ params['manual_close'] = 0
+ if dependencies is not None:
+ params['dependencies'] = []
+ for dependency in dependencies:
+ host_name = None
+ template_name = None
+ if 'host_name' in dependency:
+ host_name = dependency
+ elif 'template_name' in dependency:
+ template_name = dependency
+ else:
+ self._module.fail_json(msg="Each dependency must contain either the host_name or the template_name")
+ triggers = self.get_triggers(dependency['name'], host_name, template_name)
+ for trigger in triggers:
+ params['dependencies'].append({'triggerid': trigger['triggerid']})
+
+ def add_trigger(self, params):
+ if self._module.check_mode:
+ self._module.exit_json(changed=True)
+ try:
+ results = self._zapi.trigger.create(params)
+ except Exception as e:
+ self._module.fail_json(msg="Failed to create trigger: %s" % e)
+ return results
+
+ def update_trigger(self, params):
+ if self._module.check_mode:
+ self._module.exit_json(changed=True)
+ try:
+ results = self._zapi.trigger.update(params)
+ except Exception as e:
+ self._module.fail_json(msg="Failed to update trigger: %s" % e)
+ return results
+
+ def check_trigger_changed(self, old_trigger):
+ try:
+ new_trigger = self._zapi.trigger.get({"triggerids": "%s" % old_trigger['triggerid']})[0]
+ except Exception as e:
+ self._module.fail_json(msg="Failed to get trigger: %s" % e)
+ return old_trigger != new_trigger
+
+ def delete_trigger(self, trigger_id):
+ if self._module.check_mode:
+ self._module.exit_json(changed=True)
+ try:
+ results = self._zapi.trigger.delete(trigger_id)
+ except Exception as e:
+ self._module.fail_json(msg="Failed to delete trigger: %s" % e)
+ return results
+
+
+def main():
+ argument_spec = zabbix_utils.zabbix_common_argument_spec()
+ argument_spec.update(dict(
+ name=dict(type='str', required=True),
+ host_name=dict(type='str', required=False),
+ template_name=dict(type='str', required=False),
+ params=dict(type='dict', required=False),
+ desc=dict(type='str', required=False, aliases=['description']),
+ dependencies=dict(type='list', elements='dict', required=False),
+ state=dict(type='str', default="present", choices=['present', 'absent']),
+ ))
+ module = AnsibleModule(
+ argument_spec=argument_spec,
+ required_one_of=[
+ ['host_name', 'template_name']
+ ],
+ mutually_exclusive=[
+ ['host_name', 'template_name']
+ ],
+ required_if=[
+ ['state', 'present', ['params']]
+ ],
+ supports_check_mode=True
+ )
+
+ name = module.params['name']
+ host_name = module.params['host_name']
+ template_name = module.params['template_name']
+ params = module.params['params']
+ desc = module.params['desc']
+ dependencies = module.params['dependencies']
+ state = module.params['state']
+
+ trigger = Trigger(module)
+
+ if state == "absent":
+ triggers = trigger.get_triggers(name, host_name, template_name)
+ if len(triggers) == 0:
+ module.exit_json(changed=False, result="No trigger to delete.")
+ else:
+ delete_ids = []
+ for t in triggers:
+ delete_ids.append(t['triggerid'])
+ results = trigger.delete_trigger(delete_ids)
+ module.exit_json(changed=True, result=results)
+
+ elif state == "present":
+ trigger.sanitize_params(name, params, desc, dependencies)
+ triggers = trigger.get_triggers(name, host_name, template_name)
+ if len(triggers) == 0:
+ results = trigger.add_trigger(params)
+ module.exit_json(changed=True, result=results)
+ else:
+ results = []
+ changed = False
+ for t in triggers:
+ params['triggerid'] = t['triggerid']
+ params.pop('description')
+ results.append(trigger.update_trigger(params))
+ changed_trigger = trigger.check_trigger_changed(t)
+ if changed_trigger:
+ changed = True
+ module.exit_json(changed=changed, result=results)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/community/zabbix/plugins/modules/zabbix_triggerprototype.py b/ansible_collections/community/zabbix/plugins/modules/zabbix_triggerprototype.py
new file mode 100644
index 000000000..410955be9
--- /dev/null
+++ b/ansible_collections/community/zabbix/plugins/modules/zabbix_triggerprototype.py
@@ -0,0 +1,448 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+#
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+
+DOCUMENTATION = r'''
+---
+module: zabbix_triggerprototype
+short_description: Create/delete Zabbix triggerprototypes
+description:
+ - Create triggerprototypes if they do not exist.
+ - Delete existing triggerprototypes if they exist.
+author:
+ - "Andrew Lathrop (@aplathrop)"
+requirements:
+ - "python >= 2.6"
+
+options:
+ state:
+ description:
+ - Create or delete trigger prototype.
+ required: false
+ type: str
+ default: "present"
+ choices: [ "present", "absent" ]
+ name:
+ description:
+ - Name of trigger prototype to create or delete.
+ - Overrides "description" in API docs.
+ - Cannot be changed. If a trigger prototype's name needs to be changed, it needs to deleted and recreated
+ required: true
+ type: str
+ host_name:
+ description:
+ - Name of host to add trigger prototype to.
+ - Required when I(template_name) is not used.
+ - Mutually exclusive with I(template_name).
+ required: false
+ type: str
+ template_name:
+ description:
+ - Name of template to add trigger prototype to.
+ - Required when I(host_name) is not used.
+ - Mutually exclusive with I(host_name).
+ required: false
+ type: str
+ desc:
+ description:
+ - Additional description of the trigger prototype.
+ - Overrides "comments" in API docs.
+ required: false
+ type: str
+ aliases: [ "description" ]
+ dependencies:
+ description:
+ - list of trigger prototypes that this trigger prototype is dependent on
+ required: false
+ type: list
+ elements: dict
+ suboptions:
+ name:
+ description:
+ - Name of dependent trigger.
+ required: true
+ type: str
+ host_name:
+ description:
+ - Name of host containing dependent trigger.
+ - Required when I(template_name) is not used.
+ - Mutually exclusive with I(template_name).
+ required: false
+ type: str
+ template_name:
+ description:
+ - Name of template containing dependent trigger.
+ - Required when I(host_name) is not used.
+ - Mutually exclusive with I(host_name).
+ required: false
+ type: str
+
+ params:
+ description:
+ - Parameters to create/update trigger prototype with.
+ - Required if state is "present".
+ - Parameters as defined at https://www.zabbix.com/documentation/current/en/manual/api/reference/triggerprototype/object
+ - Additionally supported parameters are below.
+ required: false
+ type: dict
+ suboptions:
+ severity:
+ description:
+ - Severity of the trigger prototype.
+ - Alias for "priority" in API docs.
+ required: false
+ type: str
+ aliases: [ "priority" ]
+ choices:
+ - not_classified
+ - information
+ - warning
+ - average
+ - high
+ - disaster
+ status:
+ description:
+ - Status of the trigger prototype.
+ required: false
+ type: str
+ choices: [ "enabled", "disabled" ]
+ enabled:
+ description:
+ - Status of the trigger prototype.
+ - Overrides "status" in API docs.
+ required: false
+ type: bool
+ generate_multiple_events:
+ description:
+ - Whether the trigger prototype can generate multiple problem events.
+ - Alias for "type" in API docs.
+ required: false
+ type: bool
+ recovery_mode:
+ description:
+ - OK event generation mode.
+ - Overrides "recovery_mode" in API docs.
+ required: false
+ type: str
+ choices:
+ - expression
+ - recovery_expression
+ - none
+ correlation_mode:
+ description:
+ - OK event closes.
+ - Overrides "correlation_mode" in API docs.
+ required: false
+ type: str
+ choices: [ "all", "tag" ]
+ manual_close:
+ description:
+ - Allow manual close.
+ - Overrides "manual_close" in API docs.
+ required: false
+ type: bool
+
+extends_documentation_fragment:
+- community.zabbix.zabbix
+'''
+
+EXAMPLES = r'''
+
+# If you want to use Username and Password to be authenticated by Zabbix Server
+- name: Set credentials to access Zabbix Server API
+ ansible.builtin.set_fact:
+ ansible_user: Admin
+ ansible_httpapi_pass: zabbix
+
+# If you want to use API token to be authenticated by Zabbix Server
+# https://www.zabbix.com/documentation/current/en/manual/web_interface/frontend_sections/administration/general#api-tokens
+- name: Set API token
+ ansible.builtin.set_fact:
+ ansible_zabbix_auth_key: 8ec0d52432c15c91fcafe9888500cf9a607f44091ab554dbee860f6b44fac895
+
+# Create trigger prototype on example_host using example_rule
+- name: create trigger prototype
+ # set task level variables as we change ansible_connection plugin here
+ vars:
+ ansible_network_os: community.zabbix.zabbix
+ ansible_connection: httpapi
+ ansible_httpapi_port: 443
+ ansible_httpapi_use_ssl: true
+ ansible_httpapi_validate_certs: false
+ ansible_zabbix_url_path: 'zabbixeu' # If Zabbix WebUI runs on non-default (zabbix) path ,e.g. http://<FQDN>/zabbixeu
+ ansible_host: zabbix-example-fqdn.org
+ community.zabbix.zabbix_triggerprototype:
+ name: '{% raw %}Free disk space is less than 20% on volume {#FSNAME}{% endraw %}'
+ host_name: example_host
+ params:
+ severity: high
+ expression: "{% raw %}last(/example_host/vfs.fs.size[{#FSNAME}, pused])>80{% endraw %}"
+ recovery_mode: none
+ manual_close: True
+ enabled: True
+ state: present
+
+# Create trigger prototype on example_template using example_rule
+- name: create trigger prototype
+ # set task level variables as we change ansible_connection plugin here
+ vars:
+ ansible_network_os: community.zabbix.zabbix
+ ansible_connection: httpapi
+ ansible_httpapi_port: 443
+ ansible_httpapi_use_ssl: true
+ ansible_httpapi_validate_certs: false
+ ansible_zabbix_url_path: 'zabbixeu' # If Zabbix WebUI runs on non-default (zabbix) path ,e.g. http://<FQDN>/zabbixeu
+ ansible_host: zabbix-example-fqdn.org
+ community.zabbix.zabbix_triggerprototype:
+ name: '{% raw %}Free disk space is less than 20% on volume {#FSNAME}{% endraw %}'
+ template_name: example_template
+ params:
+ severity: high
+ expression: "{% raw %}last(/example_host/vfs.fs.size[{#FSNAME}, pused])>80{% endraw %}"
+ recovery_mode: none
+ manual_close: True
+ enabled: True
+ state: present
+
+# Add tags to the existing Zabbix trigger prototype
+- name: update trigger prototype
+ # set task level variables as we change ansible_connection plugin here
+ vars:
+ ansible_network_os: community.zabbix.zabbix
+ ansible_connection: httpapi
+ ansible_httpapi_port: 443
+ ansible_httpapi_use_ssl: true
+ ansible_httpapi_validate_certs: false
+ ansible_zabbix_url_path: 'zabbixeu' # If Zabbix WebUI runs on non-default (zabbix) path ,e.g. http://<FQDN>/zabbixeu
+ ansible_host: zabbix-example-fqdn.org
+ community.zabbix.zabbix_triggerprototype:
+ name: '{% raw %}Free disk space is less than 20% on volume {#FSNAME}{% endraw %}'
+ template_name: example_template
+ params:
+ severity: high
+ expression: "{% raw %}last(/example_host/vfs.fs.size[{#FSNAME}, pused])>80{% endraw %}"
+ recovery_mode: none
+ manual_close: True
+ enabled: True
+ tags:
+ - tag: class
+ value: application
+ state: present
+
+# Delete Zabbix trigger prototype
+- name: delete trigger prototype
+ # set task level variables as we change ansible_connection plugin here
+ vars:
+ ansible_network_os: community.zabbix.zabbix
+ ansible_connection: httpapi
+ ansible_httpapi_port: 443
+ ansible_httpapi_use_ssl: true
+ ansible_httpapi_validate_certs: false
+ ansible_zabbix_url_path: 'zabbixeu' # If Zabbix WebUI runs on non-default (zabbix) path ,e.g. http://<FQDN>/zabbixeu
+ ansible_host: zabbix-example-fqdn.org
+ community.zabbix.zabbix_triggerprototype:
+ name: '{% raw %}Free disk space is less than 20% on volume {#FSNAME}{% endraw %}'
+ template_name: example_template
+ state: absent
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+
+from ansible_collections.community.zabbix.plugins.module_utils.base import ZabbixBase
+import ansible_collections.community.zabbix.plugins.module_utils.helpers as zabbix_utils
+
+
+class Triggerprototype(ZabbixBase):
+
+ PRIORITY_TYPES = {'not_classified': 0,
+ 'information': 1,
+ 'warning': 2,
+ 'average': 3,
+ 'high': 4,
+ 'disaster': 5}
+
+ RECOVERY_MODES = {'expression': 0,
+ 'recovery_expression': 1,
+ 'none': 2}
+
+ def get_triggerprototypes(self, triggerprototype_name, host_name, template_name):
+ if host_name is not None:
+ host = host_name
+ else:
+ host = template_name
+ triggerprototypes = []
+ try:
+ triggerprototypes = self._zapi.triggerprototype.get({'filter': {'description': triggerprototype_name, 'host': host}})
+ except Exception as e:
+ self._module.fail_json(msg="Failed to get triggerprototype: %s" % e)
+ return triggerprototypes
+
+ def sanitize_params(self, name, params, desc=None, dependencies=None):
+ params['description'] = name
+ if desc is not None:
+ params['comments'] = desc
+ if 'severity' in params:
+ params['priority'] = params['severity']
+ params.pop('severity')
+ if 'priority' in params:
+ priority_id = self.PRIORITY_TYPES[params['priority']]
+ params['priority'] = priority_id
+ if 'enabled' in params:
+ if params['enabled']:
+ params['status'] = 'enabled'
+ else:
+ params['status'] = 'disabled'
+ params.pop('enabled')
+ if 'status' in params:
+ status = params['status']
+ if status == 'enabled':
+ params['status'] = 0
+ elif status == 'disabled':
+ params['status'] = 1
+ else:
+ self._module.fail_json(msg="Status must be 'enabled' or 'disabled', got %s" % status)
+ if 'generate_multiple_events' in params:
+ multiple_event_type = params['generate_multiple_events']
+ if multiple_event_type:
+ params['type'] = 1
+ else:
+ params['type'] = 0
+ if 'recovery_mode' in params:
+ recovery_mode_id = self.RECOVERY_MODES[params['recovery_mode']]
+ params['recovery_mode'] = recovery_mode_id
+ if 'correlation_mode' in params:
+ correlation_mode = params['correlation_mode']
+ if correlation_mode == 'all':
+ params['correlation_mode'] = 0
+ elif correlation_mode == 'tag':
+ params['correlation_mode'] = 1
+ else:
+ self._module.fail_json(msg="correlation_mode must be all or tag, got %s" % correlation_mode)
+ if 'manual_close' in params:
+ manual_close = params['manual_close']
+ if manual_close:
+ params['manual_close'] = 1
+ else:
+ params['manual_close'] = 0
+ if dependencies is not None:
+ params['dependencies'] = []
+ for dependency in dependencies:
+ host_name = None
+ template_name = None
+ if 'host_name' in dependency:
+ host_name = dependency
+ elif 'template_name' in dependency:
+ template_name = dependency
+ else:
+ self._module.fail_json(msg="Each dependency must contain either the host_name or the template_name")
+ triggers = self.get_triggerprototypes(dependency['name'], host_name, template_name)
+ for trigger in triggers:
+ params['dependencies'].append({'triggerid': trigger['triggerid']})
+
+ def add_triggerprototype(self, params):
+ if self._module.check_mode:
+ self._module.exit_json(changed=True)
+ try:
+ results = self._zapi.triggerprototype.create(params)
+ except Exception as e:
+ self._module.fail_json(msg="Failed to create triggerprototype: %s" % e)
+ return results
+
+ def update_triggerprototype(self, params):
+ if self._module.check_mode:
+ self._module.exit_json(changed=True)
+ try:
+ results = self._zapi.triggerprototype.update(params)
+ except Exception as e:
+ self._module.fail_json(msg="Failed to update triggerprototype: %s" % e)
+ return results
+
+ def check_triggerprototype_changed(self, old_triggerprototype):
+ try:
+ new_triggerprototype = self._zapi.triggerprototype.get({'triggerids': '%s' % old_triggerprototype['triggerid']})[0]
+ except Exception as e:
+ self._module.fail_json(msg="Failed to get triggerprototype: %s" % e)
+ return old_triggerprototype != new_triggerprototype
+
+ def delete_triggerprototype(self, trigger_id):
+ if self._module.check_mode:
+ self._module.exit_json(changed=True)
+ try:
+ results = self._zapi.triggerprototype.delete(trigger_id)
+ except Exception as e:
+ self._module.fail_json(msg="Failed to delete triggerprototype: %s" % e)
+ return results
+
+
+def main():
+ argument_spec = zabbix_utils.zabbix_common_argument_spec()
+ argument_spec.update(dict(
+ name=dict(type='str', required=True),
+ host_name=dict(type='str', required=False),
+ template_name=dict(type='str', required=False),
+ params=dict(type='dict', required=False),
+ desc=dict(type='str', required=False, aliases=['description']),
+ dependencies=dict(type='list', elements='dict', required=False),
+ state=dict(type='str', default="present", choices=['present', 'absent']),
+ ))
+ module = AnsibleModule(
+ argument_spec=argument_spec,
+ required_one_of=[
+ ['host_name', 'template_name']
+ ],
+ mutually_exclusive=[
+ ['host_name', 'template_name']
+ ],
+ required_if=[
+ ['state', 'present', ['params']]
+ ],
+ supports_check_mode=True
+ )
+
+ name = module.params['name']
+ host_name = module.params['host_name']
+ template_name = module.params['template_name']
+ params = module.params['params']
+ desc = module.params['desc']
+ dependencies = module.params['dependencies']
+ state = module.params['state']
+
+ triggerprototype = Triggerprototype(module)
+
+ if state == "absent":
+ triggerprototypes = triggerprototype.get_triggerprototypes(name, host_name, template_name)
+ if len(triggerprototypes) == 0:
+ module.exit_json(changed=False, result="No triggerprototype to delete.")
+ else:
+ delete_ids = []
+ for t in triggerprototypes:
+ delete_ids.append(t['triggerid'])
+ results = triggerprototype.delete_triggerprototype(delete_ids)
+ module.exit_json(changed=True, result=results)
+
+ elif state == "present":
+ triggerprototype.sanitize_params(name, params, desc, dependencies)
+ triggerprototypes = triggerprototype.get_triggerprototypes(name, host_name, template_name)
+ if len(triggerprototypes) == 0:
+ results = triggerprototype.add_triggerprototype(params)
+ module.exit_json(changed=True, result=results)
+ else:
+ results = []
+ changed = False
+ for t in triggerprototypes:
+ params['triggerid'] = t['triggerid']
+ params.pop('description')
+ results.append(triggerprototype.update_triggerprototype(params))
+ changed_trigger = triggerprototype.check_triggerprototype_changed(t)
+ if changed_trigger:
+ changed = True
+ module.exit_json(changed=changed, result=results)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/community/zabbix/roles/zabbix_agent/README.md b/ansible_collections/community/zabbix/roles/zabbix_agent/README.md
index fe4a601b3..f9a2fb5a5 100644
--- a/ansible_collections/community/zabbix/roles/zabbix_agent/README.md
+++ b/ansible_collections/community/zabbix/roles/zabbix_agent/README.md
@@ -28,7 +28,6 @@
* [proxy](#proxy)
- [Dependencies](#dependencies)
- [Example Playbook](#example-playbook)
- * [zabbix_agent2_plugins](#zabbix-agent2-plugins)
* [agent_interfaces](#agent-interfaces)
* [Other interfaces](#other-interfaces)
* [Vars in role configuration](#vars-in-role-configuration)
@@ -95,6 +94,7 @@ See the following list of supported Operating systems with the Zabbix releases:
| Red Hat Fam 9 | V | V | V |
| Red Hat Fam 8 | V | V | V |
| Red Hat Fam 7 | V | V | V |
+| Ubuntu 24.04 noble | V | | V |
| Ubuntu 22.04 jammy | V | V | V |
| Ubuntu 20.04 focal | V | V | V |
| Ubuntu 18.04 bionic | V | V | V |
@@ -104,24 +104,6 @@ See the following list of supported Operating systems with the Zabbix releases:
You can bypass this matrix by setting `enable_version_check: false`
-# Getting started
-
-## Minimal Configuration
-
-In order to get the Zabbix Agent running, you'll have to define the following properties before executing the role:
-
-* `zabbix_agent_version`
-* `zabbix_agent(2)_server`
-* `zabbix_agent(2)_serveractive` (When using active checks)
-
-The `zabbix_agent_version` is optional. The latest available major.minor version of Zabbix will be installed on the host(s). If you want to use an older version, please specify this in the major.minor format. Example: `zabbix_agent_version: 6.0`.
-
-The `zabbix_agent(2)_server` (and `zabbix_agent(2)_serveractive`) should contain the ip or fqdn of the host running the Zabbix Server.
-
-## Issues
-
-Due to issue discussed on [#291](https://github.com/dj-wasabi/ansible-zabbix-agent/issues/291), the Ansible Version 2.9.{0,1,2} isn't working correctly on Windows related targets.
-
# Role Variables
## Main variables
@@ -133,114 +115,66 @@ The following is an overview of all available configuration default for this rol
* `zabbix_agent_version`: This is the version of zabbix. Default: The highest supported version for the operating system. Can be overridden to 6.4, 6.2, or 6.0
* `zabbix_agent_version_minor`: When you want to specify a minor version to be installed. Is also used for `zabbix_sender` and `zabbix_get`. RedHat only. Default set to: `*` (latest available)
* `zabbix_repo_yum`: A list with Yum repository configuration.
+* `zabbix_repo_yum_gpgcheck`: If Yum should check GPG keys on installation
* `zabbix_repo_yum_schema`: Default: `https`. Option to change the web schema for the yum repository(http/https)
* `zabbix_agent_disable_repo`: A list of repos to disable during install. Default `epel`.
* `zabbix_repo_deb_url`: The URL to the Zabbix repository. Default `http://repo.zabbix.com/zabbix/{{ zabbix_agent_version }}/{{ ansible_distribution.lower() }}`
* `zabbix_repo_deb_component`: The repository component for Debian installs. Default `main`.
-* `zabbix_repo_deb_gpg_key_url`: The URL to download the Zabbix GPG key from. Default `http://repo.zabbix.com/zabbix-official-repo.key`.
+* `zabbix_repo_deb_gpg_key_url`: The URL to download the Zabbix GPG key from. Default `http://repo.zabbix.com/zabbix-official-repo.key.
* `zabbix_repo_deb_include_deb_src`: True, if deb-src should be included in the zabbix.sources entry. Default `true`.
### SElinux
* `zabbix_selinux`: Default: `False`. Enables an SELinux policy so that the server will run.
+* `selinux_allow_zabbix_run_sudo`: Default: `False`. Enable Zabbix root access on system.
### Zabbix Agent
-* `zabbix_agent_ip`: The IP address of the host. When not provided, it will be determined via the `ansible_default_ipv4` fact.
* `zabbix_agent2`: Default: `False`. When you want to install the `Zabbix Agent2` instead of the "old" `Zabbix Agent`.zabbix_agent_version
+* `zabbix_agent_apt_priority`: Add a weight (`Pin-Priority`) for the APT repository.
+* `zabbix_agent_chassis`: Default: `false`. When set to `true`, it will give Zabbix Agent access to the Linux DMI table allowing system.hw.chassis info to populate.
+* `zabbix_agent_conf_mode`: Default: `0644`. The "mode" for the Zabbix configuration file.
+* `zabbix_agent_dont_detect_ip`: Default `false`. When set to `true`, it won't detect available ip addresses on the host and no need for the Python module `netaddr` to be installed.
+* `zabbix_agent_get_package`: The name of the zabbix-get package. Default: `zabbix-get`.
+* `zabbix_agent_include_mode`: The mode for the directory mentioned above.
+* `zabbix_agent_install_agent_only`: Only install the Zabbix Agent and not the `zabbix-sender` and `zabbix-get` packages. Default: `False`
* `zabbix_agent_listeninterface`: Interface zabbix-agent listens on. Leave blank for all.
* `zabbix_agent_package_remove`: If `zabbix_agent2: True` and you want to remove the old installation. Default: `False`.
-* `zabbix_agent_package`: The name of the zabbix-agent package. Default: `zabbix-agent`. In case for EPEL, it is automatically renamed.
-* `zabbix_sender_package`: The name of the zabbix-sender package. Default: `zabbix-sender`. In case for EPEL, it is automatically renamed.
-* `zabbix_get_package`: The name of the zabbix-get package. Default: `zabbix-get`. In case for EPEL, it is automatically renamed.
-* `zabbix_agent_package_state`: If Zabbix-agent needs to be `present` or `latest`.
-* `zabbix_agent_interfaces`: A list that configured the interfaces you can use when configuring via API.
-* `zabbix_agent_install_agent_only`: Only install the Zabbix Agent and not the `zabbix-sender` and `zabbix-get` packages. Default: `False`
+* `zabbix_agent_package_state`: If Zabbix-agent needs to be `present` (default) or `latest`.
+* `zabbix_agent_package`: The name of the zabbix-agent package. Default: `zabbix-agent` if `zabbix_agent2` is fale and `zabbix-agent2` if `true`.
+* `zabbix_agent_sender_package`: The name of the zabbix-sender package. Default: `zabbix-sender`.
* `zabbix_agent_userparameters`: Default: `[]]`. List of userparameter names and scripts (if any). Detailed description is given in the [Deploying Userparameters](#deploying-userparameters) section.
- * `name`: Userparameter name (should be the same with userparameter template file name)
- * `scripts_dir`: Directory name of the custom scripts needed for userparameters
-* `zabbix_agent_userparameters_templates_src`: indicates the relative path (from `templates/`) where userparameter templates are searched
+ * `name`: Userparameter name (should be the same with userparameter template file name)
+ * `scripts_dir`: Directory name of the custom scripts needed for userparameters
* `zabbix_agent_userparameters_scripts_src`: indicates the relative path (from `files/`) where userparameter scripts are searched
-* `zabbix_agent_runas_user`: Drop privileges to a specific, existing user on the system. Only has effect if run as 'root' and AllowRoot is disabled.
-* `zabbix_agent_become_on_localhost`: Default: `True`. Set to `False` if you don't need to elevate privileges on localhost to install packages locally with pip.
-* `zabbix_agent_apt_priority`: Add a weight (`Pin-Priority`) for the APT repository.
-* `zabbix_agent_conf_mode`: Default: `0644`. The "mode" for the Zabbix configuration file.
-* `zabbix_agent_dont_detect_ip`: Default `false`. When set to `true`, it won't detect available ip addresses on the host and no need for the Python module `netaddr` to be installed.
-* `zabbix_agent_chassis`: Default: `false`. When set to `true`, it will give Zabbix Agent access to the Linux DMI table allowing system.hw.chassis info to populate.
-
-### Zabbix Agent vs Zabbix Agent 2 configuration
-
-The following provides an overview of all the properties that can be set in the Zabbix Agent configuration file. When `(2)` is used in the name of the property, like `zabbix_agent(2)_pidfile`, it will show that you can configure `zabbix_agent_pidfile` for the Zabbix Agent configuration file and `zabbix_agent2_pidfile` for the Zabbix Agent 2 configuration file.
-
-Otherwise it just for the Zabbix Agent or for the Zabbix Agent 2.
-
-* `zabbix_agent(2)_server`: The ip address for the zabbix-server or zabbix-proxy.
-* `zabbix_agent(2)_serveractive`: The ip address for the zabbix-server or zabbix-proxy for active checks.
-* `zabbix_agent(2)_allow_key`: list of AllowKey configurations.
-* `zabbix_agent(2)_deny_key`: list of DenyKey configurations.
-* `zabbix_agent(2)_pidfile`: name of pid file.
-* `zabbix_agent(2)_logfile`: name of log file.
-* `zabbix_agent(2)_logfilesize`: maximum size of log file in mb.
-* `zabbix_agent(2)_additional_include`: A list of additional complete paths to include in configuration
-* `zabbix_agent(2)_logtype`: Specifies where log messages are written to
-* `zabbix_agent(2)_debuglevel`: specifies debug level
-* `zabbix_agent(2)_sourceip`: source ip address for outgoing connections.
-* `zabbix_agent_enableremotecommands`: whether remote commands from zabbix server are allowed.
-* `zabbix_agent_logremotecommands`: enable logging of executed shell commands as warnings.
-* `zabbix_agent(2)_listenport`: agent will listen on this port for connections from the server.
-* `zabbix_agent2_statusport`: Agent will listen on this port for HTTP status requests.
-* `zabbix_agent(2)_listenip`: list of comma delimited ip addresses that the agent should listen on.
-* `zabbix_agent_startagents`: number of pre-forked instances of zabbix_agentd that process passive checks.
-* `zabbix_agent(2)_hostname`: unique, case sensitive hostname.
-* `zabbix_agent(2)_hostnameitem`: item used for generating hostname if it is undefined.
-* `zabbix_agent(2)_hostmetadata`: optional parameter that defines host metadata.
-* `zabbix_agent(2)_hostmetadataitem`: optional parameter that defines an item used for getting the metadata.
-* `zabbix_agent(2)_refreshactivechecks`: how often list of active checks is refreshed, in seconds.
-* `zabbix_agent(2)_buffersend`: do not keep data longer than n seconds in buffer.
-* `zabbix_agent(2)_buffersize`: maximum number of values in a memory buffer. the agent will send all collected data to zabbix server or proxy if the buffer is full.
-* `zabbix_agent2_enablepersistentbuffer`: 0 - disabled, in-memory buffer is used (default); 1 - use persistent buffer
-* `zabbix_agent2_persistentbufferperiod`: Zabbix Agent2 will keep data for this time period in case of no connectivity with Zabbix server or proxy. Older data will be lost. Log data will be preserved.
-* `zabbix_agent2_persistentbufferfile`: Zabbix Agent2 will keep SQLite database in this file * n is valid if `EnablePersistentBuffer=1`
-* `zabbix_agent_maxlinespersecond`: maximum number of new lines the agent will send per second to zabbix server or proxy processing 'log' and 'logrt' active checks.
-* `zabbix_agent_allowroot`: allow the agent to run as 'root'. if disabled and the agent is started by 'root', the agent will try to switch to user 'zabbix' instead. has no effect if started under a regular user.
-* `zabbix_agent(2)_zabbix_alias`: sets an alias for parameter. it can be useful to substitute long and complex parameter name with a smaller and simpler one. Can be both a string as an list.
-* `zabbix_agent(2)_timeout`: spend no more than timeout seconds on processing
-* `zabbix_agent(2)_include`: you may include individual files or all files in a directory in the configuration file.
-* `zabbix_agent(2)_include_pattern`: Optional file pattern used for included files.
-* `zabbix_agent(2)_include_mode`: The mode for the directory mentioned above.
-* `zabbix_agent(2)_unsafeuserparameters`: allow all characters to be passed in arguments to user-defined parameters.
-* `zabbix_agent_loadmodulepath`: Full path to location of agent modules.
-* `zabbix_agent_loadmodule`: Module to load at agent startup. Modules are used to extend functionality of the agent.
-* `zabbix_agent2_controlsocket`: The control socket, used to send runtime commands with '-R' option.
-* `zabbix_agent_allowroot`: Allow the agent to run as 'root'. 0 - do not allow, 1 - allow
-* `zabbix_agent2_plugins`: A list containing plugin configuration.
-* `zabbix_agent(2)_listenbacklog`: The maximum number of pending connections in the queue.
+* `zabbix_agent_userparameters_templates_src`: indicates the relative path (from `templates/`) where userparameter templates are searched
## TLS Specific configuration
+* `zabbix_agent_tlspsk_auto`: Enables auto generation and storing of individual pre-shared keys and identities on clients. Is false by default. If set to true and if `zabbix_agent_tlspskfile` and `zabbix_agent_tlspsk_secret` are undefined, it generates the files `/etc/zabbix/tls_psk_auto.identity` and `/etc/zabbix/tls_psk_auto.secret`, which are populated by values automatically (identity is set to hostname, underscore and 4 random alphanumeric digits; secret is 64 random alphanumeric digits) in such a way that the values are generated once and are never overwritten.
-These variables are specific for Zabbix 3.0 and higher. When `(2)` is used in the name of the property, like `zabbix_agent(2)_tlsconnect`, it will show that you can configure `zabbix_agent_tlsconnect` for the Zabbix Agent configuration file and `zabbix_agent2_tlsconnect` for the Zabbix Agent 2 configuration file.
-
-* `zabbix_agent(2)_tlsconnect`: How the agent should connect to server or proxy. Used for active checks.
+* `zabbix_agent_tlsconnect`: How the agent should connect to server or proxy. Used for active checks.
Possible values:
* unencrypted
* psk
* cert
-* `zabbix_agent(2)_tlsaccept`: What incoming connections to accept.
+* `zabbix_agent_tlsaccept`: What incoming connections to accept.
Possible values:
* unencrypted
* psk
* cert
-* `zabbix_agent(2)_tlscafile`: Full pathname of a file containing the top-level CA(s) certificates for peer certificate verification.
-* `zabbix_agent(2)_tlscrlfile`: Full pathname of a file containing revoked certificates.
-* `zabbix_agent(2)_tlsservercertissuer`: Allowed server certificate issuer.
-* `zabbix_agent(2)_tlsservercertsubject`: Allowed server certificate subject.
-* `zabbix_agent(2)_tlscertfile`: Full pathname of a file containing the agent certificate or certificate chain.
-* `zabbix_agent(2)_tlskeyfile`: Full pathname of a file containing the agent private key.
-* `zabbix_agent(2)_tlspskidentity`: Unique, case sensitive string used to identify the pre-shared key.
-* `zabbix_agent(2)_tlspskidentity_file`: Full pathname of a file containing the pre-shared key identity.
-* `zabbix_agent(2)_tlspskfile`: Full pathname of a file containing the pre-shared key.
-* `zabbix_agent(2)_tlspsk_secret`: The pre-shared secret key that should be placed in the file configured with `agent_tlspskfile`.
-* `zabbix_agent(2)_tlspsk_auto`: Enables auto generation and storing of individual pre-shared keys and identities on clients. Is false by default. If set to true and if `zabbix_agent_tlspskfile` and `zabbix_agent_tlspsk_secret` are undefined, it generates the files `/etc/zabbix/tls_psk_auto.identity` and `/etc/zabbix/tls_psk_auto.secret`, which are populated by values automatically (identity is set to hostname, underscore and 4 random alphanumeric digits; secret is 64 random alphanumeric digits) in such a way that the values are generated once and are never overwritten.
+
+* `zabbix_agent_tlscafile`: Full pathname of a file containing the top-level CA(s) certificates for peer certificate verification.
+* `zabbix_agent_tlscertfile`: Full pathname of a file containing the agent certificate or certificate chain.
+* `zabbix_agent_tlscrlfile`: Full pathname of a file containing revoked certificates.
+* `zabbix_agent_tlskeyfile`: Full pathname of a file containing the agent private key.
+* `zabbix_agent_tlspskfile`: Full pathname of a file containing the pre-shared key.
+* `zabbix_agent_tlspskidentity`: Unique, case sensitive string used to identify the pre-shared key.
+* `zabbix_agent_tlspskidentity_file`: Full pathname of a file containing the pre-shared key identity.
+* `zabbix_agent_tlspsk_secret`: The pre-shared secret key for the agent.
+* `zabbix_agent_tlsservercertissuer`: Allowed server certificate issuer.
+* `zabbix_agent_tlsservercertsubject`: Allowed server certificate subject.
+* `zabbix_agent_tls_subject`: The subject of the TLS certificate.
+* `zabbix_agent_visible_hostname` : Configure Zabbix visible name inside Zabbix web UI for the node.
The results are stored in the Ansible variables `zabbix_agent_tlspskidentity` and `zabbix_agent_tlspsk_secret`, so that they may be used later in the code, for example with [zabbix_host](https://docs.ansible.com/ansible/latest/collections/community/zabbix/zabbix_host_module.html) to configure the Zabbix server or with `debug: msg:` to display them to the user.
@@ -250,51 +184,49 @@ These variables need to be overridden when you want to make use of the Zabbix AP
Host encryption configuration will be set to match agent configuration.
-* `zabbix_api_server_host`: The IP or hostname/FQDN of Zabbix server. Example: zabbix.example.com
-* `zabbix_api_use_ssl`: Is SSL required to connect to the Zabbix API server? Default: `false`
-* `zabbix_api_server_port`: 80 if `zabbix_api_use_ssl` is `false` and 443 if `true` (Default) TCP port to use to connect to Zabbix server. Example: 8080
-* `zabbix_api_login_user`: Username of user which has API access.
-* `zabbix_api_login_pass`: Password for the user which has API access.
-* `zabbix_api_http_user`: The http user to access zabbix url with Basic Auth (if your Zabbix is behind a proxy with HTTP Basic Auth).
-* `zabbix_api_http_password`: The http password to access zabbix url with Basic Auth (if your Zabbix is behind a proxy with HTTP Basic Auth).
-* `zabbix_api_validate_certs`: yes (Default) if we need to validate tls certificates of the API. Use `no` in case self-signed certificates are used.
-* `zabbix_api_timeout`: How many seconds to wait for API response (default 30s).
-* `zabbix_api_create_hosts`: Default: `False`. When you want to enable the Zabbix API to create/delete the host. This has to be set to `True` if you want to make use of `zabbix_agent_host_state`.
-* `zabbix_api_create_hostgroup`: When you want to enable the Zabbix API to create/delete the hostgroups. This has to be set to `True` if you want to make use of `zabbix_agent_hostgroups_state`.Default: `False`
-* `ansible_zabbix_url_path`: URL path if Zabbix WebUI running on non-default (zabbix) path, e.g. if http://<FQDN>/zabbixeu then set to `zabbixeu`
-* `zabbix_agent_hostgroups_state`: present (Default) if the hostgroup needs to be created or absent if you want to delete it. This only works when `zabbix_api_create_hostgroup` is set to `True`.
-* `zabbix_host_status`: enabled (Default) when host in monitored, disabled when host is disabled for monitoring.
+* `zabbix_agent_description`: Description of the host in Zabbix.
* `zabbix_agent_host_state`: present (Default) if the host needs to be created or absent is you want to delete it. This only works when `zabbix_api_create_hosts` is set to `True`.
* `zabbix_agent_host_update`: yes (Default) if the host should be updated if already present. This only works when `zabbix_api_create_hosts` is set to `True`.
-* `zabbix_useuip`: 1 if connection to zabbix-agent is made via ip, 0 for fqdn.
-* `zabbix_host_groups`: A list of hostgroups which this host belongs to.
-* `zabbix_agent_link_templates`: A list of templates which needs to be link to this host. The templates should exist.
+* `zabbix_agent_interfaces`: A list of interfaces and their configurations you can use when configuring via API.
+* `zabbix_agent_inventory_mode`: Configure Zabbix inventory mode. Needed for building inventory data, manually when configuring a host or automatically by using some automatic population options. This has to be set to `automatic` if you want to make automatically building inventory data. Default `disabled`
+* `zabbix_agent_inventory_zabbix`: Adds Facts for a zabbix inventory. Default `{}`
+* `zabbix_agent_ip`: The IP address of the host. When not provided, it will be determined via the `ansible_default_ipv4` fact.
+* `zabbix_agent_link_templates`: A list of templates which needs to be link to this host. The templates should exist. Default: "Templated Linux by Zabbix agent"
* `zabbix_agent_macros`: A list with macro_key and macro_value for creating hostmacro's.
+* `zabbix_agent_proxy`: The name of the Zabbix proxy (if used). Default `null`
* `zabbix_agent_tags`: A list with tag and (optionally) value for creating host tags.
-* `zabbix_agent_inventory_mode`: Configure Zabbix inventory mode. Needed for building inventory data, manually when configuring a host or automatically by using some automatic population options. This has to be set to `automatic` if you want to make automatically building inventory data.
-* `zabbix_agent_visible_hostname` : Configure Zabbix visible name inside Zabbix web UI for the node.
-* `zabbix_agent_description`: Description of the host in Zabbix.
-* `zabbix_agent_inventory_zabbix`: Adds Facts for a zabbix inventory
+* `zabbix_api_create_hostgroup`: When you want to enable the Zabbix API to create/delete the hostgroups. Default: `False`
+* `zabbix_api_create_hosts`: Default: `False`. When you want to enable the Zabbix API to create/delete the host. This has to be set to `True` if you want to make use of `zabbix_agent_host_state`.
+* `zabbix_api_http_password`: The http password to access zabbix url with Basic Auth (if your Zabbix is behind a proxy with HTTP Basic Auth).
+* `zabbix_api_http_user`: The http user to access zabbix url with Basic Auth (if your Zabbix is behind a proxy with HTTP Basic Auth).
+* `zabbix_api_login_pass`: Password for the user which has API access.
+* `zabbix_api_login_user`: Username of user which has API access.
+* `zabbix_api_server_host`: The IP or hostname/FQDN of Zabbix server. Example: zabbix.example.com
+* `zabbix_api_server_port`: 80 if `zabbix_api_use_ssl` is `false` and 443 if `true` (Default) TCP port to use to connect to Zabbix server. Example: 8080
+* `zabbix_api_use_ssl`: Is SSL required to connect to the Zabbix API server? Default: `false`
+* `zabbix_api_validate_certs`: `True` if we need to validate tls certificates of the API. Use `False` in case self-signed certificates are used. Default: `False`
+* `zabbix_host_groups`: A list of hostgroups which this host belongs to. Default: "Linux Servers"
+* `zabbix_host_status`: enabled (Default) when host in monitored, disabled when host is disabled for monitoring.
+* `zabbix_useuip`: 1 if connection to zabbix-agent is made via ip, 0 for fqdn.
## Windows Variables
**NOTE**
-_Supporting Windows is a best effort (We don't have the possibility to either test/verify changes on the various amount of available Windows instances). PRs specific to Windows will almost immediately be merged, unless someone is able to provide a Windows test mechanism via Travis for Pull Requests._
-When `(2)` is used in the name of the property, like `zabbix_agent(2)_win_logfile`, it will show that you can configure `zabbix_agent_win_logfile` for the Zabbix Agent configuration file and `zabbix_agent2_win_logfile` for the Zabbix Agent 2 configuration file.
+Supporting Windows is a best effort (We don't have the possibility to either test/verify changes on the various amount of available Windows instances). PRs specific to Windows will almost immediately be merged, unless someone is able to provide a Windows test mechanism via Travis for Pull Requests._
+When `` is used in the name of the property, like `zabbix_agent_win_logfile`, it will show that you can configure `zabbix_agent_win_logfile` for the Zabbix Agent configuration file and `zabbix_agent2_win_logfile` for the Zabbix Agent 2 configuration file.
Otherwise it just for the Zabbix Agent or for the Zabbix Agent 2.
-* `zabbix(2)_win_package`: file name pattern (zip only). This will be used to generate the `zabbix(2)_win_download_link` variable.
-* `zabbix_version_long`: The long (major.minor.patch) version of the Zabbix Agent. This will be used to generate the `zabbix(2)_win_package` and `zabbix(2)_win_download_link` variables. This takes precedence over `zabbix_agent_version`.
-* `zabbix(2)_win_download_link`: The download url to the `win.zip` file.
+* `zabbix_agent_win_include`: The directory in which the Zabbix Agent specific configuration files are stored.
+* `zabbix_agent_win_logfile`: The full path to the logfile for the Zabbix Agent.
+* `zabbix_version_long`: The long (major.minor.patch) version of the Zabbix Agent. This will be used to generate the `zabbix_win_package` and `zabbix_win_download_link` variables. This takes precedence over `zabbix_agent_version`.
+* `zabbix_win_download_link`: The download url to the `win.zip` file.
+* `zabbix_win_firewall_management`: Enable Windows firewall management (add service and port to allow rules). Default: `True`
* `zabbix_win_install_dir`: The directory where Zabbix needs to be installed.
* `zabbix_win_install_dir_conf`: The directory where Zabbix configuration file needs to be installed.
* `zabbix_win_install_dir_bin`: The directory where Zabbix binary file needs to be installed.
-* `zabbix_agent(2)_win_logfile`: The full path to the logfile for the Zabbix Agent.
-* `zabbix_agent_win_include`: The directory in which the Zabbix Agent specific configuration files are stored.
-* `zabbix_agent_win_svc_recovery`: Enable Zabbix Agent service auto-recovery settings.
-* `zabbix_win_firewall_management`: Enable Windows firewall management (add service and port to allow rules). Default: `True`
+* `zabbix_win_package`: file name pattern (zip only). This will be used to generate the `zabbix_win_download_link` variable.
## macOS Variables
@@ -302,8 +234,10 @@ Otherwise it just for the Zabbix Agent or for the Zabbix Agent 2.
_Supporting Windows is a best effort (We don't have the possibility to either test/verify changes on the various amount of available Windows instances). PRs specific to Windows will almost immediately be merged, unless someone is able to provide a Windows test mechanism via Travis for Pull Requests._
-* `zabbix_version_long`: The long (major.minor.patch) version of the Zabbix Agent. This will be used to generate the `zabbix_mac_download_link` link.
* `zabbix_mac_download_link`: The download url to the `pkg` file.
+* `zabbix_mac_download_url`: The download url. Default `https://cdn.zabbix.com/zabbix/binaries/stable`
+* `zabbix_mac_package`: The name of the mac install package. Default `zabbix_agent-{{ zabbix_version_long }}-macos-amd64-openssl.pkg`
+* `zabbix_version_long`: The long (major.minor.patch) version of the Zabbix Agent. This will be used to generate the `zabbix_mac_download_link` link.
## Docker Variables
@@ -324,19 +258,19 @@ The following directories are mounted in the Container:
Keep in mind that using the Zabbix Agent in a Container requires changes to the Zabbix Template for Linux as `/proc`, `/sys` and `/etc` are mounted in a directory `/hostfs`.
* `zabbix_agent_docker`: Default: `False`. When set to `True`, it will install a Docker container on the target host instead of installation on the target.
-* `zabbix_agent_docker_state`: Default: `started`
-* `zabbix_agent_docker_name`: The name of the Container. Default: `zabbix-agent`
+* `zabbix_agent_docker_env`: A dict with all environment variables that needs to be set for the Container.
* `zabbix_agent_docker_image`: The name of the Docker image. Default: `zabbix/zabbix-agent`
* `zabbix_agent_docker_image_tag`: The tag of the Docker image.
-* `zabbix_agent_docker_user_gid`: The group id of the zabbix user in the Container.
-* `zabbix_agent_docker_user_uid`: The user id of the zabbix user in the Container.
+* `zabbix_agent_docker_name`: The name of the Container. Default: `zabbix-agent`
* `zabbix_agent_docker_network_mode`: The name of the (Docker) network that should be used for the Container. Default `host`.
* `zabbix_agent_docker_restart_policy`: Default: `unless-stopped`. The restart policy of the Container.
-* `zabbix_agent_docker_privileged`: When set to `True`, the container is running in privileged mode.
-* `zabbix_agent_docker_ports`: A list with `<PORT>:<PORT>` values to open ports to the container.
+* `zabbix_agent_docker_ports`: A list with `<PORT>:<PORT>` values to open ports to the container. Default `10050`
+* `zabbix_agent_docker_privileged`: When set to `True`, the container is running in privileged mode. Default `false`
* `zabbix_agent_docker_security_opts`: A list with available security options.
+* `zabbix_agent_docker_state`: Default: `started`
+* `zabbix_agent_docker_user_gid`: The group id of the zabbix user in the Container.
+* `zabbix_agent_docker_user_uid`: The user id of the zabbix user in the Container.
* `zabbix_agent_docker_volumes`: A list with all directories that needs to be available in the Container.
-* `zabbix_agent_docker_env`: A dict with all environment variables that needs to be set for the Container.
## IPMI variables
@@ -345,6 +279,77 @@ Keep in mind that using the Zabbix Agent in a Container requires changes to the
* `zabbix_agent_ipmi_privilege`: IPMI privilege level. Possible values are 1 (callback), 2 (user), 3 (operator), 4 (admin), 5 (OEM), with 2 being the API default.
* `zabbix_agent_ipmi_username`: IPMI username.
+## Configuration Variables
+The following table lists all variables that are exposed to modify the configuration of the zabbix_agent.conf file. Specific details of each variable can be found in the Zabbix documentation.
+
+**NOTE**: Only variables with a default value appear in the defaults file, all others must be added.
+
+| Zabbix Name | Variable Name | Default Value | Notes |
+|-----------|------------------|--------|--------|
+| Alias | zabbix_agent_aliases | | Can be a string or list |
+| AllowKey | zabbix_agent_allowkeys | | |
+| AllowRoot | zabbix_agent_allowroot | | Linux Systems Only |
+| BufferSend | zabbix_agent_buffersend | 5 | |
+| BufferSize | zabbix_agent_buffersize | 100 | |
+| ControlSocket | zabbix_agent_controlsocket | /tmp/agent.sock | Agent 2 Only |
+| DebugLevel | zabbix_agent_debuglevel | 3 | |
+| DenyKey | zabbix_agent_denykeys | | Can be a string or a list |
+| EnableRemoteCommands | zabbix_agent_enableremotecommands | 0 | Agent Only |
+| ForceActiveChecksOnStart | zabbix_agent_forceactivechecksonstart | | Agent 2 Only |
+| HeartbeatFrequency | zabbix_agent_heartbeatfrequency | 60 | Version >= 6.2 |
+| HostInterface | zabbix_agent_hostinterface | | |
+| HostInterfaceItem | zabbix_agent_hostinterfaceitem | | |
+| HostMetadata | zabbix_agent_hostmetadata | | |
+| HostMetadataItem | zabbix_agent_hostmetadataitem | | |
+| Hostname | zabbix_agent_hostname | | `{{ inventory_hostname }}` |
+| HostnameItem | zabbix_agent_hostnameitem | | |
+| Include | zabbix_agent_include | /etc/zabbix/`{ agent version specific }`.d | |
+| ListenBacklog | zabbix_agent_listenbacklog | | Agent Only |
+| ListenIP | zabbix_agent_listenip | | list of IPs |
+| ListenPort | zabbix_agent_listenport | 10050 | |
+| LoadModule | zabbix_agent_loadmodule | | Agent On Linux Only |
+| LoadModulePath | zabbix_agent_loadmodulepath | | Agent On Linux Only |
+| LogFile | zabbix_agent_logfile | /var/log/zabbix/`{ agent version specific }`.log | |
+| LogFileSize | zabbix_agent_logfilesize | 100 | |
+| LogRemoteCommands | zabbix_agent_logremotecommands | | Agent Only |
+| LogType | zabbix_agent_logtype | file | |
+| MaxLinesPerSecond | zabbix_agent_maxlinespersecond | | Agent Only |
+| PerfCounter | zabbix_agent_perfcounter | | Agent Only |
+| PerfCounterEn | zabbix_agent_perfcounteren | | Agent Only |
+| PersistentBufferFile | zabbix_agent_persistentbufferfile | | Agent 2 Only |
+| PersistentBufferPeriod | zabbix_agent_persistentbufferperiod | 1h | Agent 2 Only |
+| PidFile | zabbix_agent_pidfile | /var/run/zabbix/`{ agent version specific }`.pid | Linux Systems Only |
+| Plugin | zabbix_agent_plugins | | |
+| PluginSocket | zabbix_agent_pluginsocket | | |
+| PluginTimeout | zabbix_agent_plugintimeout | | |
+| RefreshActiveChecks | zabbix_agent_refreshactivechecks | 120 | |
+| Server | zabbix_agent_server | | |
+| ServerActive | zabbix_agent_serveractive | | |
+| SourceIP | zabbix_agent_sourceip | | |
+| StartAgents | zabbix_agent_startagents | | Agent Only |
+| StatusPort | zabbix_agent_statusport | 9999 | Agent 2 Only |
+| Timeout | zabbix_agent_timeout | 3 | |
+| TLSAccept | zabbix_agent_tlsconnect | | |
+| TLSCAFile | zabbix_agent_tlscafile | /etc/zabbix/tls_psk_auto.secret | |
+| TLSCertFile | zabbix_agent_tlscertfile | | |
+| TLSCipherAll | zabbix_agent_tlscipherall | | Agent on Linux Only |
+| TLSCipherAll13 | zabbix_agent_tlscipherall13 | | Agent on Linux Only |
+| TLSCipherCert | zabbix_agent_tlsciphercert | | Agent on Linux Only |
+| TLSCipherCert13 | zabbix_agent_tlsciphercert13 | | Agent on Linux Only |
+| TLSCipherPSK | zabbix_agent_tlscipherpsk | | Agent on Linux Only |
+| TLSCipherPSK13 | zabbix_agent_tlscipherpsk13 | | Agent on Linux Only |
+| TLSConnect | zabbix_agent_tlsconnect | | |
+| TLSCRLFile | zabbix_agent_tlscrlfile | | |
+| TLSKeyFile | zabbix_agent_tlskeyfile | | |
+| TLSPSKFile | zabbix_agent_tlspskfile | | |
+| TLSPSKIdentity | zabbix_agent_tlspskidentity | | |
+| TLSServerCertIssuer | zabbix_agent_tlsservercertissuer | | |
+| TLSServerCertSubject | zabbix_agent_tlsservercertsubject | | |
+| UnsafeUserParameters | zabbix_agent_unsafeuserparameters | | |
+| User | zabbix_agent_runas_user | | Agent on Linux Only |
+| UserParameter | zabbix_agent_userparamater | 0 | |
+
+
## proxy
When the target host does not have access to the internet, but you do have a proxy available then the following properties needs to be set to download the packages via the proxy:
@@ -369,18 +374,6 @@ There are no dependencies on other roles.
# Example Playbook
-## zabbix_agent2_plugins
-
-Specifically for the Zabbix Agent 2, a list of extra plugins can be configured. The following provides an overview of configuring the `SystemRun` plugin by setting the `LogRemoteCommands` to `0`:
-
-```yaml
-zabbix_agent2_plugins:
- - name: SystemRun
- options:
- - parameter: LogRemoteCommands
- value: 0
-```
-
In the `zabbix_agent2.conf` an entry will be created with the following content:
```
diff --git a/ansible_collections/community/zabbix/roles/zabbix_agent/defaults/main.yml b/ansible_collections/community/zabbix/roles/zabbix_agent/defaults/main.yml
index 12424a6da..219182e96 100644
--- a/ansible_collections/community/zabbix/roles/zabbix_agent/defaults/main.yml
+++ b/ansible_collections/community/zabbix/roles/zabbix_agent/defaults/main.yml
@@ -2,27 +2,19 @@
# defaults file for zabbix_agent
zabbix_agent2: false
-# zabbix_agent_version: 6.4
zabbix_agent_version_minor: "*"
zabbix_version_patch: 0
zabbix_agent_package_remove: false
-zabbix_agent_package: zabbix-agent
-zabbix_sender_package: zabbix-sender
-zabbix_get_package: zabbix-get
+zabbix_sender_package: zabbix-sender # Depricate in 3.0
+zabbix_agent_sender_package: "{{ zabbix_sender_package }}"
+zabbix_get_package: zabbix-get # Depricate in 3.0
+zabbox_agent_get_package: "{{ zabbix_get_package }}"
zabbix_agent_package_state: present
-zabbix_agent_server:
-zabbix_agent_serveractive:
-zabbix_agent2_server: "{{ zabbix_agent_server }}"
-zabbix_agent2_serveractive: "{{ zabbix_agent_serveractive }}"
zabbix_selinux: false
-zabbix_agent_apt_priority:
zabbix_agent_conf_mode: "0644"
zabbix_agent_dont_detect_ip: false
-zabbix_agent_allow_key: []
-zabbix_agent_deny_key: []
-zabbix_agent2_allow_key: "{{ zabbix_agent_allow_key }}"
-zabbix_agent2_deny_key: "{{ zabbix_agent_deny_key }}"
+zabbix_agent_tlspskidentity_file: "/etc/zabbix/tls_psk_auto.identity"
# Selinux related vars
selinux_allow_zabbix_run_sudo: false
@@ -30,18 +22,12 @@ zabbix_repo_deb_gpg_key_url: http://repo.zabbix.com/zabbix-official-repo.key
zabbix_repo_deb_include_deb_src: true
zabbix_agent_install_agent_only: false
-zabbix_agent_packages:
- - "{{ zabbix_agent_package }}"
- - "{{ zabbix_sender_package }}"
- - "{{ zabbix_get_package }}"
# Zabbix role related vars
zabbix_apt_force_apt_get: true
zabbix_apt_install_recommends: false
# Override Ansible specific facts
-zabbix_agent_distribution_major_version: "{{ ansible_distribution_major_version }}"
-zabbix_agent_distribution_release: "{{ ansible_distribution_release }}"
zabbix_repo_yum_gpgcheck: 0
zabbix_repo_yum_schema: https
zabbix_agent_disable_repo:
@@ -49,21 +35,21 @@ zabbix_agent_disable_repo:
zabbix_repo_yum:
- name: zabbix
description: Zabbix Official Repository - $basearch
- baseurl: "{{ zabbix_repo_yum_schema }}://repo.zabbix.com/zabbix/{{ zabbix_agent_version }}/rhel/{{ zabbix_agent_distribution_major_version }}/$basearch/"
+ baseurl: "{{ zabbix_repo_yum_schema }}://repo.zabbix.com/zabbix/{{ zabbix_agent_version }}/rhel/{{ ansible_distribution_major_version }}/$basearch/"
mode: "0644"
gpgcheck: "{{ zabbix_repo_yum_gpgcheck }}"
gpgkey: file:///etc/pki/rpm-gpg/RPM-GPG-KEY-ZABBIX
state: present
- name: zabbix-non-supported
description: Zabbix Official Repository non-supported - $basearch
- baseurl: "{{ zabbix_repo_yum_schema }}://repo.zabbix.com/non-supported/rhel/{{ zabbix_agent_distribution_major_version }}/$basearch/"
+ baseurl: "{{ zabbix_repo_yum_schema }}://repo.zabbix.com/non-supported/rhel/{{ ansible_distribution_major_version }}/$basearch/"
mode: "0644"
gpgcheck: "{{ zabbix_repo_yum_gpgcheck }}"
gpgkey: file:///etc/pki/rpm-gpg/RPM-GPG-KEY-ZABBIX
state: present
- name: zabbix-agent2-plugins
description: Zabbix Official Repository (Agent2 Plugins) - $basearch
- baseurl: "{{ zabbix_repo_yum_schema }}://repo.zabbix.com/zabbix-agent2-plugins/1/rhel/{{ zabbix_agent_distribution_major_version }}/$basearch/"
+ baseurl: "{{ zabbix_repo_yum_schema }}://repo.zabbix.com/zabbix-agent2-plugins/1/rhel/{{ ansible_distribution_major_version }}/$basearch/"
mode: "0644"
gpgcheck: "{{ zabbix_repo_yum_gpgcheck }}"
gpgkey: file:///etc/pki/rpm-gpg/RPM-GPG-KEY-ZABBIX
@@ -74,7 +60,6 @@ zabbix_repo_deb_component: main
# Zabbix API stuff
zabbix_api_server_host: localhost
-# zabbix_api_server_port: 80
zabbix_api_login_user: Admin
zabbix_api_use_ssl: false
zabbix_api_login_pass: !unsafe zabbix
@@ -82,10 +67,10 @@ zabbix_api_validate_certs: false
ansible_httpapi_pass: "{{ zabbix_api_login_pass }}"
ansible_httpapi_port: "{{ zabbix_api_server_port }}"
ansible_httpapi_validate_certs: "{{ zabbix_api_validate_certs }}"
-zabbix_api_timeout: 30
+
+# API Related Variables
zabbix_api_create_hostgroup: false
zabbix_api_create_hosts: false
-zabbix_agent_hostgroups_state: present # or absent
zabbix_agent_host_state: present # or absent
zabbix_agent_host_update: true
zabbix_host_status: enabled # or disabled
@@ -106,67 +91,19 @@ zabbix_agent_interfaces:
port: "{{ (zabbix_agent2 == True) | ternary(zabbix_agent2_listenport, zabbix_agent_listenport) }}"
# Zabbix configuration variables
-zabbix_agent_pidfile: /var/run/zabbix/zabbix_agentd.pid
-zabbix_agent_logtype: file
-zabbix_agent_logfile: /var/log/zabbix/zabbix_agentd.log
-zabbix_agent_logfilesize: 100
-zabbix_agent_debuglevel: 3
-zabbix_agent_sourceip:
-zabbix_agent_enableremotecommands: 0
-zabbix_agent_allowkeys:
-zabbix_agent_denykeys:
-zabbix_agent_logremotecommands: 0
-zabbix_agent_listenport: 10050
-zabbix_agent_jmx_listenport:
zabbix_agent_listeninterface:
-zabbix_agent_listenip:
-zabbix_agent_startagents: 3
-zabbix_agent_hostname: "{{ inventory_hostname }}"
-zabbix_agent_hostnameitem:
-zabbix_agent_hostmetadata:
-zabbix_agent_hostmetadataitem:
-zabbix_agent_refreshactivechecks: 120
-zabbix_agent_buffersend: 5
-zabbix_agent_buffersize: 100
-zabbix_agent_maxlinespersecond: 100
-zabbix_agent_allowroot: 0
-zabbix_agent_zabbix_alias:
-zabbix_agent_timeout: 3
-zabbix_agent_include: /etc/zabbix/zabbix_agentd.d
-zabbix_agent_include_pattern:
-zabbix_agent_include_mode: "0750"
-zabbix_agent_unsafeuserparameters: 0
-zabbix_agent_userparameters: []
+
+# statusportzabbix_agent_zabbix_alias: # Deprecate after 3.0
+zabbix_agent_alias: "{{ statusportzabbix_agent_zabbix_alias is defined | ternary(statusportzabbix_agent_zabbix_alias, zabbix_agent_alias) | default(omit) }}"
zabbix_agent_userparameters_templates_src: "userparameters"
zabbix_agent_userparameters_scripts_src: "scripts"
-zabbix_agent_custom_scripts: false
-zabbix_agent_loadmodulepath: ${libdir}/modules
-zabbix_agent_loadmodule:
-zabbix_agent_become_on_localhost: true
-zabbix_agent_description:
-zabbix_agent_inventory_zabbix: {}
-zabbix_agent_heartbeatfrequency: 60
-zabbix_agent_macros: []
-zabbix_agent_tags: []
zabbix_agent_chassis: false
-# TLS settings
-zabbix_agent_tlsconnect:
-zabbix_agent_tlsaccept:
-zabbix_agent_tlscafile:
-zabbix_agent_tlscrlfile:
-zabbix_agent_tlsservercertissuer:
-zabbix_agent_tlsservercertsubject:
-zabbix_agent_tls_subject: "{{ zabbix_agent_tlsservercertsubject }}" # FIXME this is not correct and should be removed with 2.0.0, here only to prevent regression
-zabbix_agent_tlscertfile:
-zabbix_agent_tlskeyfile:
-zabbix_agent_tlspskidentity:
-zabbix_agent_tlspsk_auto: false
-
zabbix_agent_tls_config:
unencrypted: "1"
psk: "2"
- cert: "4"
+ cert:
+ "4"
# IPMI settings
zabbix_agent_ipmi_authtype: -1
@@ -174,66 +111,22 @@ zabbix_agent_ipmi_password:
zabbix_agent_ipmi_privilege: 2
zabbix_agent_ipmi_username:
-# Zabbix Agent2
-zabbix_agent2_pidfile: /var/run/zabbix/zabbix_agent2.pid
-zabbix_agent2_logfile: /var/log/zabbix/zabbix_agent2.log
-zabbix_agent2_logtype: file
-zabbix_agent2_statusport: 9999
-zabbix_agent2_include: /etc/zabbix/zabbix_agent2.d
-zabbix_agent2_include_pattern:
-zabbix_agent2_logfilesize: 100
-zabbix_agent2_debuglevel: 3
-zabbix_agent2_sourceip:
-zabbix_agent2_listenport: 10050
-zabbix_agent2_listenip:
-zabbix_agent2_hostname: "{{ inventory_hostname }}"
-zabbix_agent2_hostnameitem:
-zabbix_agent2_hostmetadata:
-zabbix_agent2_hostmetadataitem:
-zabbix_agent2_hostinterface:
-zabbix_agent2_hostinterfaceitem:
-zabbix_agent2_enablepersistentbuffer: 0
-zabbix_agent2_persistentbufferperiod: 1h
-zabbix_agent2_persistentbufferfile:
-zabbix_agent2_refreshactivechecks: 120
-zabbix_agent2_buffersend: 5
-zabbix_agent2_buffersize: 100
-zabbix_agent2_zabbix_alias:
-zabbix_agent2_timeout: 3
-zabbix_agent2_include_mode: "0750"
-zabbix_agent2_unsafeuserparameters: 0
-zabbix_agent2_controlsocket: /tmp/agent.sock
-zabbix_agent2_plugins: []
+####### Agent 1 Stuff Only
-# Zabbix Agent2 TLS settings
-zabbix_agent2_tlsconnect:
-zabbix_agent2_tlsaccept:
-zabbix_agent2_tlscafile:
-zabbix_agent2_tlscrlfile:
-zabbix_agent2_tlsservercertissuer:
-zabbix_agent2_tlsservercertsubject:
-zabbix_agent2_tls_subject: "{{ zabbix_agent2_tlsservercertsubject }}" # FIXME this is not correct and should be removed with 2.0.0, here only to prevent regression
-zabbix_agent2_tlscertfile:
-zabbix_agent2_tlskeyfile:
-zabbix_agent2_tlspskidentity:
-zabbix_agent2_tlspsk_auto: false
+zabbix_agent_loadmodulepath: ${libdir}/modules
+zabbix_agent_logremotecommands: 0
+zabbix_agent_maxlinespersecond: 100
+zabbix_agent_startagents: 3
# Windows/macOS Related
zabbix_version_long: 5.2.4
# Windows Related
-zabbix_win_package: zabbix_agent-{{ zabbix_version_long }}-windows-amd64-openssl.zip
-zabbix2_win_package: zabbix_agent2-{{ zabbix_version_long }}-windows-amd64-openssl-static.zip
zabbix_win_download_url: https://cdn.zabbix.com/zabbix/binaries/stable
-zabbix_win_download_link: "{{ zabbix_win_download_url }}/{{ zabbix_version_long | regex_search('^\\d+\\.\\d+') }}/{{ zabbix_version_long }}/{{ zabbix_win_package }}"
-zabbix2_win_download_link: "{{ zabbix_win_download_url }}/{{ zabbix_version_long | regex_search('^\\d+\\.\\d+') }}/{{ zabbix_version_long }}/{{ zabbix2_win_package }}"
zabbix_win_install_dir: 'C:\Zabbix'
zabbix_win_install_dir_conf: '{{ zabbix_win_install_dir }}\\conf'
zabbix_win_install_dir_bin: '{{ zabbix_win_install_dir }}\\bin'
-zabbix_agent_win_logfile: "{{ zabbix_win_install_dir }}\\zabbix_agentd.log"
zabbix_agent_win_include: "{{ zabbix_win_install_dir }}\\zabbix_agent.d\\"
-zabbix_agent2_win_logfile: "{{ zabbix_win_install_dir }}\\zabbix_agent2.log"
-zabbix_agent_win_svc_recovery: true
zabbix_win_firewall_management: true
# macOS Related
diff --git a/ansible_collections/community/zabbix/roles/zabbix_agent/tasks/Debian.yml b/ansible_collections/community/zabbix/roles/zabbix_agent/tasks/Debian.yml
index 92d56b179..940f1e2f0 100644
--- a/ansible_collections/community/zabbix/roles/zabbix_agent/tasks/Debian.yml
+++ b/ansible_collections/community/zabbix/roles/zabbix_agent/tasks/Debian.yml
@@ -38,7 +38,7 @@
(ansible_distribution == "Debian" and ansible_distribution_major_version < "12")
- name: "Debian | Download gpg key"
- when: not ansible_check_mode # Because get_url always has changed status in check_mode.
+ when: not ansible_check_mode # Because get_url always has changed status in check_mode.
ansible.builtin.get_url:
url: "{{ zabbix_repo_deb_gpg_key_url }}"
dest: "{{ zabbix_gpg_key }}"
@@ -69,29 +69,28 @@
tags:
- install
-- name: "Debian | Create /etc/apt/preferences.d/"
- ansible.builtin.file:
- path: /etc/apt/preferences.d/
- state: directory
- mode: "0755"
+- name: Configure APT Prefrence
when:
+ - zabbix_agent_apt_priority is defined
- zabbix_agent_apt_priority | int
- become: true
- tags:
- - install
+ block:
+ - name: "Debian | Create /etc/apt/preferences.d/"
+ ansible.builtin.file:
+ path: /etc/apt/preferences.d/
+ state: directory
+ mode: "0755"
+ become: true
-- name: "Debian | Configuring the weight for APT"
- ansible.builtin.copy:
- dest: "/etc/apt/preferences.d/zabbix-agent-{{ zabbix_underscore_version }}"
- content: |
- Package: {{ zabbix_agent_package }}
- Pin: origin repo.zabbix.com
- Pin-Priority: {{ zabbix_agent_apt_priority | int }}
- owner: root
- mode: "0644"
- when:
- - zabbix_agent_apt_priority | int
- become: true
+ - name: "Debian | Configuring the weight for APT"
+ ansible.builtin.copy:
+ dest: "/etc/apt/preferences.d/zabbix-agent-{{ zabbix_underscore_version }}"
+ content: |
+ Package: {{ zabbix_agent_package }}
+ Pin: origin repo.zabbix.com
+ Pin-Priority: {{ zabbix_agent_apt_priority | int }}
+ owner: root
+ mode: "0644"
+ become: true
tags:
- install
@@ -115,8 +114,8 @@
- name: "Debian | Installing zabbix-{sender,get}"
ansible.builtin.apt:
pkg:
- - "{{ zabbix_sender_package }}"
- - "{{ zabbix_get_package }}"
+ - "{{ zabbix_agent_sender_package }}"
+ - "{{ zabbox_agent_get_package }}"
state: "{{ zabbix_agent_package_state }}"
update_cache: true
cache_valid_time: 0
diff --git a/ansible_collections/community/zabbix/roles/zabbix_agent/tasks/Linux.yml b/ansible_collections/community/zabbix/roles/zabbix_agent/tasks/Linux.yml
index c4c8fc401..aa8bca132 100644
--- a/ansible_collections/community/zabbix/roles/zabbix_agent/tasks/Linux.yml
+++ b/ansible_collections/community/zabbix/roles/zabbix_agent/tasks/Linux.yml
@@ -20,10 +20,6 @@
- name: "Set first public ip address for zabbix_agent_ip"
ansible.builtin.set_fact:
zabbix_agent_ip: "{{ ansible_all_ipv4_addresses | ansible.netcommon.ipaddr('public') | first }}"
- zabbix_agent_server: "{{ zabbix_agent_server_public_ip | default(zabbix_agent_server) }}"
- zabbix_agent_serveractive: "{{ zabbix_agent_serveractive_public_ip | default(zabbix_agent_serveractive) }}"
- zabbix_agent2_server: "{{ zabbix_agent_server_public_ip | default(zabbix_agent2_server) }}"
- zabbix_agent2_serveractive: "{{ zabbix_agent_serveractive_public_ip | default(zabbix_agent2_serveractive) }}"
when:
- zabbix_agent_ip is not defined
- total_private_ip_addresses is defined
@@ -55,7 +51,7 @@
network_interface: ansible_{{ zabbix_agent_listeninterface }}
when:
- (zabbix_agent_listeninterface)
- - not zabbix_agent_listenip
+ - zabbix_agent_listenip is undefined
tags:
- config
@@ -64,7 +60,7 @@
zabbix_agent_listenip: "{{ hostvars[inventory_hostname][network_interface]['ipv4'].address | default('0.0.0.0') }}"
when:
- (zabbix_agent_listeninterface)
- - not zabbix_agent_listenip
+ - zabbix_agent_listenip is undefined
tags:
- config
- api
@@ -73,7 +69,7 @@
ansible.builtin.set_fact:
zabbix_agent_listenip: "0.0.0.0"
when:
- - not (zabbix_agent_listenip)
+ - zabbix_agent_listenip is undefined
tags:
- config
@@ -120,8 +116,8 @@
- name: "Configure zabbix-agent"
ansible.builtin.template:
- src: "{{ 'zabbix_agentd.conf.j2' if not zabbix_agent2 else 'zabbix_agent2.conf.j2' }}"
- dest: "/etc/zabbix/{{ zabbix_agent_conf if not zabbix_agent2 else zabbix_agent2_conf }}"
+ src: agent.conf.j2
+ dest: "/etc/zabbix/zabbix_agent{{ (zabbix_agent2 | bool) | ternary('2', 'd') }}.conf"
owner: root
group: root
mode: "{{ zabbix_agent_conf_mode }}"
@@ -133,74 +129,12 @@
tags:
- config
-- name: "Create directory for PSK file if not exist."
- ansible.builtin.file:
- path: "{{ zabbix_agent_tlspskfile | dirname }}"
- mode: 0755
- state: directory
- become: true
- when:
- - zabbix_agent_tlspskfile is defined
- - zabbix_agent_tlspskfile # https://github.com/ansible-collections/community.zabbix/issues/680
- - not (zabbix_agent2 | bool)
- tags:
- - config
-
-- name: "Create directory for PSK file if not exist (zabbix-agent2)"
- ansible.builtin.file:
- path: "{{ zabbix_agent2_tlspskfile | dirname }}"
- mode: 0755
- state: directory
- become: true
- when:
- - zabbix_agent2_tlspskfile is defined
- - zabbix_agent2_tlspskfile # https://github.com/ansible-collections/community.zabbix/issues/680
- - zabbix_agent2 | bool
- tags:
- - config
-
-- name: "Place TLS PSK File"
- ansible.builtin.copy:
- dest: "{{ zabbix_agent_tlspskfile }}"
- content: "{{ zabbix_agent_tlspsk_secret }}"
- owner: zabbix
- group: zabbix
- mode: 0400
- become: true
- when:
- - zabbix_agent_tlspskfile is defined
- - zabbix_agent_tlspskfile # https://github.com/ansible-collections/community.zabbix/issues/680
- - zabbix_agent_tlspsk_secret is defined
- - not (zabbix_agent2 | bool)
- notify:
- - restart zabbix-agent
- tags:
- - config
-
-- name: "Place TLS PSK File (zabbix-agent2)"
- ansible.builtin.copy:
- dest: "{{ zabbix_agent2_tlspskfile }}"
- content: "{{ zabbix_agent2_tlspsk_secret }}"
- owner: zabbix
- group: zabbix
- mode: 0400
- become: true
- when:
- - zabbix_agent2_tlspskfile is defined
- - zabbix_agent2_tlspskfile # https://github.com/ansible-collections/community.zabbix/issues/680
- - zabbix_agent2_tlspsk_secret is defined
- - zabbix_agent2 | bool
- notify:
- - restart zabbix-agent
- tags:
- - config
-
- name: "Create include dir zabbix-agent"
ansible.builtin.file:
- path: "{{ zabbix_agent_include if not zabbix_agent2 else zabbix_agent2_include }}"
+ path: "{{ zabbix_agent_include }}"
owner: root
group: zabbix
- mode: "{{ zabbix_agent_include_mode if not zabbix_agent2 else zabbix_agent2_include_mode }}"
+ mode: "{{ zabbix_agent_include_mode }}"
state: directory
become: true
tags:
@@ -234,6 +168,8 @@
owner: root
group: zabbix
become: true
- when: zabbix_agent_chassis | bool
+ when:
+ - zabbix_agent_chassis is defined
+ - zabbix_agent_chassis | bool
tags:
- config
diff --git a/ansible_collections/community/zabbix/roles/zabbix_agent/tasks/Windows.yml b/ansible_collections/community/zabbix/roles/zabbix_agent/tasks/Windows.yml
index 9b7501d9a..2f9758249 100644
--- a/ansible_collections/community/zabbix/roles/zabbix_agent/tasks/Windows.yml
+++ b/ansible_collections/community/zabbix/roles/zabbix_agent/tasks/Windows.yml
@@ -1,4 +1,10 @@
---
+- name: "Windows | Set some variables"
+ ansible.builtin.set_fact:
+ zabbix_agent_win_download_link: "{{ zabbix_agent_win_download_link is defined | ternary(zabbix_agent_win_download_link, zabbix_agent2_win_download_link) | default(_win_download_link) }}"
+ zabbix_agent_win_logfile: "{{ zabbix_agent_win_logfile is defined | ternary(zabbix_agent_win_logfile, zabbix_agent2_win_logfile) | default(_win_logfile) }}"
+ zabbix_agent_win_package: "{{ zabbix_agent_win_package is defined | ternary(zabbix_agent_win_package, zabbix_agent2_win_package) | default(_win_package) }}"
+
- name: "Windows | Set default architecture"
ansible.builtin.set_fact:
windows_arch: 32
@@ -26,7 +32,7 @@
zabbix_win_config_name: "zabbix_agentd.conf"
zabbix2_win_svc_name: Zabbix Agent 2
zabbix2_win_exe_path: '{{ zabbix_win_install_dir }}\bin\zabbix_agent2.exe'
- zabbix2_win_config_name: "zabbix_agent2.conf"
+ zabbix2_win_config_name: "zabbix_agentd2.conf"
tags:
- always
@@ -288,56 +294,6 @@
tags:
- install
-- name: "Create directory for PSK file if not exist."
- ansible.windows.win_file:
- path: "{{ zabbix_agent_tlspskfile | win_dirname }}"
- state: directory
- when:
- - zabbix_agent_tlspskfile is defined
- - zabbix_agent_tlspskfile
- - not (zabbix_agent2 | bool)
- tags:
- - config
-
-- name: "Create directory for PSK file if not exist (zabbix-agent2)"
- ansible.windows.win_file:
- path: "{{ zabbix_agent2_tlspskfile | win_dirname }}"
- state: directory
- when:
- - zabbix_agent2_tlspskfile is defined
- - zabbix_agent2_tlspskfile
- - zabbix_agent2 | bool
- tags:
- - config
-
-- name: "Place TLS PSK File"
- ansible.windows.win_copy:
- dest: "{{ zabbix_agent_tlspskfile }}"
- content: "{{ zabbix_agent_tlspsk_secret }}"
- when:
- - zabbix_agent_tlspskfile is defined
- - zabbix_agent_tlspskfile
- - zabbix_agent_tlspsk_secret is defined
- - not (zabbix_agent2 | bool)
- notify:
- - restart win zabbix agent
- tags:
- - config
-
-- name: "Place TLS PSK File (zabbix-agent2)"
- ansible.windows.win_copy:
- dest: "{{ zabbix_agent2_tlspskfile }}"
- content: "{{ zabbix_agent2_tlspsk_secret }}"
- when:
- - zabbix_agent2_tlspskfile is defined
- - zabbix_agent2_tlspskfile
- - zabbix_agent2_tlspsk_secret is defined
- - zabbix_agent2 | bool
- notify:
- - restart win zabbix agent
- tags:
- - config
-
- name: "Windows | Check if windows service exist"
ansible.windows.win_service:
name: "{{ zabbix_win_svc_name }}"
diff --git a/ansible_collections/community/zabbix/roles/zabbix_agent/tasks/Windows_conf.yml b/ansible_collections/community/zabbix/roles/zabbix_agent/tasks/Windows_conf.yml
index 72dee230f..cc7c09d50 100644
--- a/ansible_collections/community/zabbix/roles/zabbix_agent/tasks/Windows_conf.yml
+++ b/ansible_collections/community/zabbix/roles/zabbix_agent/tasks/Windows_conf.yml
@@ -1,4 +1,9 @@
---
+- name: "Set Log File Info"
+ ansible.builtin.set_fact:
+ zabbix_agent_logfile: "{{ zabbix_agent_win_logfile is defined | ternary(zabbix_agent_win_logfile, zabbix_agent2_win_logfile) | default(_win_logfile) }}"
+ when: zabbix_agent_logfile is undefined
+
- name: "Set default ip address for zabbix_agent_ip"
ansible.builtin.set_fact:
zabbix_agent_ip: "{{ hostvars[inventory_hostname]['ansible_ip_addresses'] | ansible.utils.ipv4 | first }}"
diff --git a/ansible_collections/community/zabbix/roles/zabbix_agent/tasks/api.yml b/ansible_collections/community/zabbix/roles/zabbix_agent/tasks/api.yml
index 4de342645..0013f1783 100644
--- a/ansible_collections/community/zabbix/roles/zabbix_agent/tasks/api.yml
+++ b/ansible_collections/community/zabbix/roles/zabbix_agent/tasks/api.yml
@@ -1,8 +1,8 @@
---
- name: "API | Create host groups"
community.zabbix.zabbix_group:
- host_group: "{{ zabbix_host_groups }}"
- state: "{{ zabbix_agent_hostgroups_state }}"
+ host_groups: "{{ zabbix_host_groups }}"
+ state: present
when:
- zabbix_api_create_hostgroup | bool
register: zabbix_api_hostgroup_created
@@ -35,43 +35,7 @@
ipmi_password: "{{ zabbix_agent_ipmi_password| default(omit) }}"
ipmi_privilege: "{{ zabbix_agent_ipmi_privilege | default(omit) }}"
ipmi_username: "{{ zabbix_agent_ipmi_username | default(omit) }}"
- tags: "{{ zabbix_agent_tags }}"
- when:
- - not zabbix_agent2
- register: zabbix_api_host_created
- until: zabbix_api_host_created is succeeded
- delegate_to: "{{ zabbix_api_server_host }}"
- changed_when: false
- tags:
- - api
-
-- name: "API | Create a new host using agent2 or update an existing host's info"
- community.zabbix.zabbix_host:
- host_name: "{{ zabbix_agent2_hostname }}"
- host_groups: "{{ zabbix_host_groups }}"
- link_templates: "{{ zabbix_agent_link_templates }}"
- status: "{{ zabbix_host_status }}"
- state: "{{ zabbix_agent_host_state }}"
- force: "{{ zabbix_agent_host_update }}"
- proxy: "{{ zabbix_agent_proxy }}"
- inventory_mode: "{{ zabbix_agent_inventory_mode }}"
- interfaces: "{{ zabbix_agent_interfaces }}"
- visible_name: "{{ zabbix_agent_visible_hostname | default(zabbix_agent2_hostname) }}"
- tls_psk: "{{ zabbix_agent2_tlspsk_secret | default(omit) }}"
- tls_psk_identity: "{{ zabbix_agent2_tlspskidentity | default(omit) }}"
- tls_issuer: "{{ zabbix_agent2_tlsservercertissuer | default(omit) }}"
- tls_subject: "{{ zabbix_agent2_tls_subject | default(omit) }}"
- tls_accept: "{{ zabbix_agent_tls_config[zabbix_agent2_tlsaccept if zabbix_agent2_tlsaccept else 'unencrypted'] }}"
- tls_connect: "{{ zabbix_agent_tls_config[zabbix_agent2_tlsconnect if zabbix_agent2_tlsconnect else 'unencrypted'] }}"
- description: "{{ zabbix_agent_description | default(omit) }}"
- inventory_zabbix: "{{ zabbix_agent_inventory_zabbix | default({}) }}"
- ipmi_authtype: "{{ zabbix_agent_ipmi_authtype | default(omit) }}"
- ipmi_password: "{{ zabbix_agent_ipmi_password| default(omit) }}"
- ipmi_privilege: "{{ zabbix_agent_ipmi_privilege | default(omit) }}"
- ipmi_username: "{{ zabbix_agent_ipmi_username | default(omit) }}"
- tags: "{{ zabbix_agent_tags }}"
- when:
- - zabbix_agent2 | bool
+ tags: "{{ zabbix_agent_tags | default(omit) }}"
register: zabbix_api_host_created
until: zabbix_api_host_created is succeeded
delegate_to: "{{ zabbix_api_server_host }}"
@@ -81,13 +45,12 @@
- name: "API | Updating host configuration with macros"
community.zabbix.zabbix_hostmacro:
- host_name: "{{ (zabbix_agent2 | bool) | ternary(zabbix_agent2_hostname, zabbix_agent_hostname) }}"
+ host_name: "{{ zabbix_agent_hostname }}"
macro_name: "{{ item.macro_key }}"
macro_value: "{{ item.macro_value }}"
macro_type: "{{ item.macro_type|default('text') }}"
with_items: "{{ zabbix_agent_macros | default([]) }}"
when:
- - zabbix_agent_macros is defined
- item.macro_key is defined
register: zabbix_api_hostmarcro_created
until: zabbix_api_hostmarcro_created is succeeded
diff --git a/ansible_collections/community/zabbix/roles/zabbix_agent/tasks/main.yml b/ansible_collections/community/zabbix/roles/zabbix_agent/tasks/main.yml
index c5fd06480..f6bbd0f2a 100644
--- a/ansible_collections/community/zabbix/roles/zabbix_agent/tasks/main.yml
+++ b/ansible_collections/community/zabbix/roles/zabbix_agent/tasks/main.yml
@@ -21,41 +21,89 @@
tags:
- always
+- name: Load Appropriate Defaults
+ ansible.builtin.include_vars: "agent{{ '2' if zabbix_agent2 is defined and zabbix_agent2|bool }}_vars.yml"
+
+- name: Set Variables
+ ansible.builtin.set_fact:
+ zabbix_agent_include: "{{ zabbix_agent_include is defined | ternary(zabbix_agent_include, zabbix_agent2_include) | default(_include) }}"
+ zabbix_agent_logfile: "{{ zabbix_agent_logfilee is defined | ternary(zabbix_agent_logfile, zabbix_agent2_logfile) | default(_logfile) }}"
+ zabbix_agent_package: "{{ zabbix_agent_package is defined | ternary(zabbix_agent_package, zabbix_agent2_package) | default(_agent_package) }}"
+ zabbix_agent_pidfile: "{{ zabbix_agent_pidfile is defined | ternary(zabbix_agent_pidfile, zabbix_agent2_pidfile) | default(_pidfile) }}"
+ zabbix_agent_service: "{{ zabbix_agent_service is defined | ternary(zabbix_agent_service, zabbix_agent2_service) | default(_agent_service) }}"
+ zabbix_agent_tls_subject: "{{ zabbix_agent_tls_subject is defined | ternary(zabbix_agent_tls_subject, zabbix_agent2_tls_subject) | default(_tls_subject) }}"
+
+- name: Set More Varaibles # Move to defaults after 3.0
+ ansible.builtin.set_fact:
+ zabbix_agent_buffersend: "{{ zabbix_agent_buffersend is defined | ternary(zabbix_agent_buffersend, zabbix_agent2_buffersend) | default(5) }}"
+ zabbix_agent_buffersize: "{{ zabbix_agent_buffersize is defined | ternary(zabbix_agent_buffersize, zabbix_agent2_buffersize) | default(100) }}"
+ zabbix_agent_controlsocket: "{{ zabbix_agent_controlsocket is defined | ternary(zabbix_agent_controlsocket, zabbix_agent2_controlsocket) | default('/tmp/agent.sock') }}"
+ zabbix_agent_debuglevel: "{{ zabbix_agent_debuglevel is defined | ternary(zabbix_agent_debuglevel, zabbix_agent2_debuglevel) | default(3) }}"
+ zabbix_agent_enableremotecommands: "{{ zabbix_agent_enableremotecommands is defined | ternary(zabbix_agent_enableremotecommands, zabbix_agent2_enableremotecommands) | default(0) }}"
+ zabbix_agent_heartbeatfrequency: "{{ zabbix_agent_heartbeatfrequency is defined | ternary(zabbix_agent_heartbeatfrequency, zabbix_agent2_heartbeatfrequency) | default(60) }}"
+ zabbix_agent_hostname: "{{ zabbix_agent_hostname is defined | ternary(zabbix_agent_hostname, zabbix_agent2_hostname) | default(inventory_hostname) }}"
+ zabbix_agent_include_mode: "{{ zabbix_agent_include_mode is defined | ternary(zabbix_agent_include_mode, zabbix_agent2_include_mode) | default('0750') }}"
+ zabbix_agent_listenport: "{{ zabbix_agent_listenport is defined | ternary(zabbix_agent_listenport, zabbix_agent2_listenport) | default(10050) }}"
+ zabbix_agent_logfilesize: "{{ zabbix_agent_logfilesize is defined | ternary(zabbix_agent_logfilesize, zabbix_agent2_logfilesize) | default(100) }}"
+ zabbix_agent_logtype: "{{ zabbix_agent_logtype is defined | ternary(zabbix_agent_logtype, zabbix_agent2_logtype) | default('file') }}"
+ zabbix_agent_persistentbufferperiod: "{{ zabbix_agent_persistentbufferperiod is defined | ternary(zabbix_agent_persistentbufferperiod, zabbix_agent2_persistentbufferperiod) | default('1h') }}"
+ zabbix_agent_refreshactivechecks: "{{ zabbix_agent_refreshactivechecks is defined | ternary(zabbix_agent_refreshactivechecks, zabbix_agent2_refreshactivechecks) | default(120) }}"
+ zabbix_agent_statusport: "{{ zabbix_agent_statusport is defined | ternary(zabbix_agent_statusport, zabbix_agent2_statusport) | default(9999) }}"
+ zabbix_agent_timeout: "{{ zabbix_agent_timeout is defined | ternary(zabbix_agent_timeout, zabbix_agent2_timeout) | default(3) }}"
+ zabbix_agent_tlspsk_auto: "{{ zabbix_agent_tlspsk_auto is defined | ternary(zabbix_agent_tlspsk_auto, zabbix_agent2_tlspsk_auto) | default(false) }}"
+ zabbix_agent_tlspskfile: "{{ zabbix_agent_tlspskfile is defined | ternary(zabbix_agent_tlspskfile, zabbix_agent2_tlspskfile) | default('/etc/zabbix/tls_psk_auto.secret') }}"
+ zabbix_agent_unsafeuserparameters: "{{ zabbix_agent_unsafeuserparameters is defined | ternary(zabbix_agent_unsafeuserparameters, zabbix_agent2_unsafeuserparameters) | default(0) }}"
+
+- name: Set More Variables # Remove for 3.0 release
+ ansible.builtin.set_fact:
+ zabbix_agent_allowkeys: "{{ zabbix_agent_allowkeys is defined | ternary(zabbix_agent_allowkeys, zabbix_agent2_allow_key) | default(omit) }}"
+ zabbix_agent_denykeys: "{{ zabbix_agent_denykeys is defined | ternary(zabbix_agent_denykeys, zabbix_agent2_deny_key) | default(omit) }}"
+ zabbix_agent_hostinterface: "{{ zabbix_agent_hostinterface is defined | ternary(zabbix_agent_hostinterface, zabbix_agent2_hostinterface) | default(omit) }}"
+ zabbix_agent_hostinterfaceitem: "{{ zabbix_agent_hostinterfaceitem is defined | ternary(zabbix_agent_hostinterfaceitem, zabbix_agent2_hostinterfaceitem) | default(omit) }}"
+ zabbix_agent_hostmetadata: "{{ zabbix_agent_hostmetadata is defined | ternary(zabbix_agent_hostmetadata, zabbix_agent2_hostmetadata) | default(omit) }}"
+ zabbix_agent_hostmetadataitem: "{{ zabbix_agent_hostmetadataitem is defined | ternary(zabbix_agent_hostmetadataitem, zabbix_agent2_hostmetadataitem) | default(omit) }}"
+ zabbix_agent_hostnameitem: "{{ zabbix_agent_hostnameitem is defined | ternary(zabbix_agent_hostnameitem, zabbix_agent2_hostnameitem) | default(omit) }}"
+ zabbix_agent_listenip: "{{ zabbix_agent_listenip is defined | ternary(zabbix_agent_listenip, zabbix_agent2_listenip) | default(omit) }}"
+ zabbix_agent_persistentbufferfile: "{{ zabbix_agent_persistentbufferfile is defined | ternary(zabbix_agent_persistentbufferfile, zabbix_agent2_persistentbufferfile) | default(omit) }}"
+ zabbix_agent_plugins: "{{ zabbix_agent_plugins is defined | ternary(zabbix_agent_plugins, zabbix_agent2_plugins) | default(omit) }}"
+ zabbix_agent_server: "{{ zabbix_agent_server is defined | ternary(zabbix_agent_server, zabbix_agent2_server) | default(omit) }}"
+ zabbix_agent_serveractive: "{{ zabbix_agent_serveractive is defined | ternary(zabbix_agent_serveractive, zabbix_agent2_serveractive) | default(omit) }}"
+ zabbix_agent_sourceip: "{{ zabbix_agent_sourceip is defined | ternary(zabbix_agent_sourceip, zabbix_agent2_sourceip) | default(omit) }}"
+ zabbix_agent_tlsaccept: "{{ zabbix_agent_tlsaccept is defined | ternary(zabbix_agent_tlsaccept, zabbix_agent2_tlsaccept) | default(omit) }}"
+ zabbix_agent_tlscafile: "{{ zabbix_agent_tlscafile is defined | ternary(zabbix_agent_tlscafile, zabbix_agent2_tlscafile) | default(omit) }}"
+ zabbix_agent_tlscertfile: "{{ zabbix_agent_tlscertfile is defined | ternary(zabbix_agent_tlscertfile, zabbix_agent2_tlscertfile) | default(omit) }}"
+ zabbix_agent_tlsconnect: "{{ zabbix_agent_tlsconnect is defined | ternary(zabbix_agent_tlsconnect, zabbix_agent2_tlsconnect) | default(omit) }}"
+ zabbix_agent_tlscrlfile: "{{ zabbix_agent_tlscrlfile is defined | ternary(zabbix_agent_tlscrlfile, zabbix_agent2_tlscrlfile) | default(omit) }}"
+ zabbix_agent_tlskeyfile: "{{ zabbix_agent_tlskeyfile is defined | ternary(zabbix_agent_tlskeyfile, zabbix_agent2_tlskeyfile) | default(omit) }}"
+ zabbix_agent_tlspskidentity: "{{ zabbix_agent_tlspskidentity is defined | ternary(zabbix_agent_tlspskidentity, zabbix_agent2_tlspskidentity) | default(omit) }}"
+ zabbix_agent_tlspsk_secret: "{{ zabbix_agent_tlspsk_secret is defined | ternary(zabbix_agent_tlspsk_secret, zabbix_agent2_tlspsk_secret) | default(omit) }}"
+ zabbix_agent_tlsservercertissuer: "{{ zabbix_agent_tlsservercertissuer is defined | ternary(zabbix_agent_tlsservercertissuer, zabbix_agent2_tlsservercertissuer) | default(omit) }}"
+ zabbix_agent_tlsservercertsubject: "{{ zabbix_agent_tlsservercertsubject is defined | ternary(zabbix_agent_tlsservercertsubject, zabbix_agent2_tlsservercertsubject) | default(omit) }}"
+ zabbix_agent_aliases: "{{ zabbix_agent_aliases is defined | ternary(zabbix_agent_aliases, zabbix_agent_zabbix_alias) | default(omit) }}"
+
- name: Setting Zabbix API Server Port
ansible.builtin.set_fact:
zabbix_api_server_port: "{{ '443' if zabbix_api_use_ssl|bool else '80' }}"
when: zabbix_api_server_port is undefined
-- name: "Set variables specific for Zabbix Agent 2"
- ansible.builtin.set_fact:
- zabbix_agent_service: zabbix-agent2
- zabbix_agent_package: zabbix-agent2
- when:
- - zabbix_agent2 is defined
- - zabbix_agent2
- tags:
- - always
-
- name: "Install the correct repository"
ansible.builtin.include_tasks: "{{ ansible_os_family }}.yml"
when:
- not (zabbix_agent_docker | bool)
-- name: "Encrypt with TLS PSK auto management"
- ansible.builtin.include_tasks: tlspsk_auto.yml
- when:
- - not zabbix_agent2
- - zabbix_agent_tlspsk_auto | bool
- - (zabbix_agent_tlspskfile is undefined) or (zabbix_agent_tlspskfile | length == '0')
- - (zabbix_agent_tlspsk_secret is undefined) or (zabbix_agent_tlspsk_secret | length == '0')
+- name: Gather PSK Secret Info
+ ansible.builtin.include_tasks: psk_secret.yml
-- name: "Encrypt with TLS PSK auto management"
- ansible.builtin.include_tasks: tlspsk_auto_agent2.yml
- when:
- - zabbix_agent2 | bool
- - zabbix_agent2_tlspsk_auto | bool
- - (zabbix_agent2_tlspskfile is undefined) or (zabbix_agent2_tlspskfile | length == '0')
- - (zabbix_agent2_tlspsk_secret is undefined) or (zabbix_agent2_tlspsk_secret | length == '0')
+- name: Gather PSK Identity Info
+ ansible.builtin.include_tasks: psk_identity.yml
+
+- name: AutoPSK | Default tlsaccept and tlsconnect to enforce PSK
+ ansible.builtin.set_fact:
+ zabbix_agent_tlsaccept: psk
+ zabbix_agent_tlsconnect: psk
+ when: zabbix_agent_tlspsk_auto | bool
+ tags:
+ - config
- name: "Configure Agent"
ansible.builtin.include_tasks: Windows_conf.yml
@@ -85,6 +133,6 @@
- name: "Including userparameters"
ansible.builtin.include_tasks: "userparameter.yml"
- when: zabbix_agent_userparameters|length > 0
+ when: zabbix_agent_userparameters | default ([]) | length > 0
tags:
- config
diff --git a/ansible_collections/community/zabbix/roles/zabbix_agent/tasks/psk_identity.yml b/ansible_collections/community/zabbix/roles/zabbix_agent/tasks/psk_identity.yml
new file mode 100644
index 000000000..12df8572b
--- /dev/null
+++ b/ansible_collections/community/zabbix/roles/zabbix_agent/tasks/psk_identity.yml
@@ -0,0 +1,84 @@
+- name: AutoPSK | Check for existing TLS PSK identity
+ block:
+ - name: AutoPSK | Check for existing TLS PSK identity | Windows
+ ansible.windows.win_stat:
+ path: "{{ zabbix_agent_tlspskidentity_file }}"
+ register: zabbix_agent_tlspskidentity_check
+ when:
+ - ansible_os_family == "Windows"
+
+ - name: AutoPSK | Check for existing TLS PSK identity | Linux
+ ansible.builtin.stat:
+ path: "{{ zabbix_agent_tlspskidentity_file }}"
+ register: zabbix_agent_tlspskidentity_check
+ become: true
+ when:
+ - ansible_os_family != "Windows"
+ tags:
+ - config
+
+- name: AutoPSK | Read existing TLS PSK identity file
+ ansible.builtin.slurp:
+ src: "{{ zabbix_agent_tlspskidentity_file }}"
+ register: zabbix_agent_tlspskidentity_base64
+ become: true
+ when: zabbix_agent_tlspskidentity_check.stat.exists
+ no_log: "{{ ansible_verbosity < 3 }}"
+ tags:
+ - config
+
+- name: AutoPSK | Use existing TLS PSK identity
+ ansible.builtin.set_fact:
+ zabbix_agent_tlspskidentity: "{{ zabbix_agent_tlspskidentity_base64['content'] | b64decode | trim }}"
+ when:
+ - zabbix_agent_tlspskidentity_check.stat.exists
+ no_log: "{{ ansible_verbosity < 3 }}"
+ tags:
+ - config
+
+- name: AutoPSK | Generate new TLS PSK identity
+ ansible.builtin.set_fact:
+ zabbix_agent_tlspskidentity: >-
+ {{
+ zabbix_agent_visible_hostname
+ | default(zabbix_agent_hostname)
+ + '_'
+ + lookup('password', '/dev/null chars=hexdigits length=4')
+ }}
+ when:
+ - not zabbix_agent_tlspskidentity_check.stat.exists
+ - zabbix_agent_tlspskidentity is undefined
+ - zabbix_agent_tlspsk_auto | bool
+ no_log: "{{ ansible_verbosity < 3 }}"
+ tags:
+ - config
+
+- name: AutoPSK | Template PSK Identity
+ block:
+ - name: AutoPSK | Template PSK Identity | Windows
+ ansible.windows.win_copy:
+ dest: "{{ zabbix_agent_tlspskidentity_file }}"
+ content: "{{ zabbix_agent_tlspskidentity }}"
+ when:
+ - ansible_os_family == "Windows"
+ notify:
+ - restart win zabbix agent
+
+ - name: AutoPSK | Template PSK Identity | Linux
+ ansible.builtin.copy:
+ dest: "{{ zabbix_agent_tlspskidentity_file }}"
+ content: "{{ zabbix_agent_tlspskidentity }}"
+ owner: zabbix
+ group: zabbix
+ mode: 0400
+ become: true
+ when:
+ - ansible_os_family != "Windows"
+ notify:
+ - restart zabbix-agent
+ - restart mac zabbix agent
+ when:
+ - zabbix_agent_tlspskidentity_file is defined
+ - zabbix_agent_tlspskidentity is defined
+ tags:
+ - config
diff --git a/ansible_collections/community/zabbix/roles/zabbix_agent/tasks/psk_secret.yml b/ansible_collections/community/zabbix/roles/zabbix_agent/tasks/psk_secret.yml
new file mode 100644
index 000000000..4fedb605a
--- /dev/null
+++ b/ansible_collections/community/zabbix/roles/zabbix_agent/tasks/psk_secret.yml
@@ -0,0 +1,112 @@
+- name: AutoPSK | Set Path Variables for Windows
+ ansible.builtin.set_fact:
+ zabbix_agent_tlspskfile: "{{ zabbix_win_install_dir }}\\tls_psk_auto.secret.txt"
+ zabbix_agent_tlspskidentity_file: "{{ zabbix_win_install_dir }}\\tls_psk_auto.identity.txt"
+ when:
+ - ansible_os_family == "Windows"
+ tags:
+ - config
+
+- name: AutoPSK | Check for existing TLS PSK file
+ block:
+ - name: AutoPSK | Check for existing TLS PSK file | Windows
+ ansible.windows.win_stat:
+ path: "{{ zabbix_agent_tlspskfile }}"
+ register: zabbix_agent_tlspskcheck
+ when:
+ - ansible_os_family == "Windows"
+
+ - name: AutoPSK | Check for existing TLS PSK file | Linux
+ ansible.builtin.stat:
+ path: "{{ zabbix_agent_tlspskfile }}"
+ register: zabbix_agent_tlspskcheck
+ become: true
+ when:
+ - ansible_os_family != "Windows"
+ tags:
+ - config
+
+- name: AutoPSK | read existing TLS PSK file
+ ansible.builtin.slurp:
+ src: "{{ zabbix_agent_tlspskfile }}"
+ register: zabbix_agent_tlspsk_base64
+ become: true
+ when:
+ - zabbix_agent_tlspskcheck.stat.exists
+ no_log: "{{ ansible_verbosity < 3 }}"
+ tags:
+ - config
+
+- name: AutoPSK | Save Existing PSK Secret
+ ansible.builtin.set_fact:
+ zabbix_agent_tlspsk_read: "{{ zabbix_agent_tlspsk_base64['content'] | b64decode | trim }}"
+ when: zabbix_agent_tlspskcheck.stat.exists
+ no_log: "{{ ansible_verbosity < 3 }}"
+ tags:
+ - config
+
+- name: AutoPSK | Use Existing PSK Secret
+ ansible.builtin.set_fact:
+ zabbix_agent_tlspsk_secret: "{{ zabbix_agent_tlspsk_read }}"
+ when:
+ - zabbix_agent_tlspskcheck.stat.exists
+ - zabbix_agent_tlspsk_read|length >= 32
+ no_log: "{{ ansible_verbosity < 3 }}"
+ tags:
+ - config
+
+- name: AutoPSK | Generate New PSK Secret
+ ansible.builtin.set_fact:
+ zabbix_agent_tlspsk_secret: "{{ lookup('password', '/dev/null chars=hexdigits length=64') }}"
+ when:
+ - zabbix_agent_tlspsk_secret is undefined
+ - zabbix_agent_tlspsk_auto | bool
+ no_log: "{{ ansible_verbosity < 3 }}"
+ tags:
+ - config
+
+- name: AutoPSK | Template PSK Secret
+ block:
+ - name: Create Directory for PSK File | Windows
+ ansible.windows.win_file:
+ path: "{{ zabbix_agent_tlspskfile | win_dirname }}"
+ state: directory
+ when:
+ - ansible_os_family == "Windows"
+
+ - name: AutoPSK | Template PSK Secret | Windows
+ ansible.windows.win_copy:
+ dest: "{{ zabbix_agent_tlspskfile }}"
+ content: "{{ zabbix_agent_tlspsk_secret }}"
+ when:
+ - ansible_os_family == "Windows"
+ notify:
+ - restart win zabbix agent
+
+ - name: Create Directory for PSK File | Linux
+ ansible.builtin.file:
+ path: "{{ zabbix_agent_tlspskfile | dirname }}"
+ mode: 0755
+ state: directory
+ become: true
+ when:
+ - ansible_os_family != "Windows"
+
+ - name: AutoPSK | Template PSK Secret | Linux
+ ansible.builtin.copy:
+ dest: "{{ zabbix_agent_tlspskfile }}"
+ content: "{{ zabbix_agent_tlspsk_secret }}"
+ owner: zabbix
+ group: zabbix
+ mode: 0400
+ become: true
+ when:
+ - ansible_os_family != "Windows"
+ notify:
+ - restart zabbix-agent
+ - restart mac zabbix agent
+ when:
+ - zabbix_agent_tlspskfile is defined
+ - zabbix_agent_tlspsk_secret is defined
+ tags:
+ - config
diff --git a/ansible_collections/community/zabbix/roles/zabbix_agent/tasks/selinux.yml b/ansible_collections/community/zabbix/roles/zabbix_agent/tasks/selinux.yml
index 2b11d1a47..21e176db6 100644
--- a/ansible_collections/community/zabbix/roles/zabbix_agent/tasks/selinux.yml
+++ b/ansible_collections/community/zabbix/roles/zabbix_agent/tasks/selinux.yml
@@ -29,7 +29,7 @@
until: zabbix_agent_policycoreutils_installed is succeeded
when:
- ansible_os_family == "RedHat"
- - (zabbix_agent_distribution_major_version == "6" or zabbix_agent_distribution_major_version == "7")
+ - ansible_distribution_major_version == "7"
become: true
tags:
- install
diff --git a/ansible_collections/community/zabbix/roles/zabbix_agent/tasks/tlspsk_auto.yml b/ansible_collections/community/zabbix/roles/zabbix_agent/tasks/tlspsk_auto.yml
deleted file mode 100644
index ad7d49aa3..000000000
--- a/ansible_collections/community/zabbix/roles/zabbix_agent/tasks/tlspsk_auto.yml
+++ /dev/null
@@ -1,14 +0,0 @@
----
-- ansible.builtin.include_tasks: tlspsk_auto_linux.yml
- when: (ansible_os_family != "Windows") or (zabbix_agent_docker | bool)
-
-- ansible.builtin.include_tasks: tlspsk_auto_windows.yml
- when: ansible_os_family == "Windows"
-
-- name: AutoPSK | Default tlsaccept and tlsconnect to enforce PSK
- ansible.builtin.set_fact:
- zabbix_agent_tlsaccept: psk
- zabbix_agent_tlsconnect: psk
- when: zabbix_api_create_hosts
- tags:
- - config
diff --git a/ansible_collections/community/zabbix/roles/zabbix_agent/tasks/tlspsk_auto_agent2.yml b/ansible_collections/community/zabbix/roles/zabbix_agent/tasks/tlspsk_auto_agent2.yml
deleted file mode 100644
index 6e5f8dc4d..000000000
--- a/ansible_collections/community/zabbix/roles/zabbix_agent/tasks/tlspsk_auto_agent2.yml
+++ /dev/null
@@ -1,14 +0,0 @@
----
-- include_tasks: tlspsk_auto_agent2_linux.yml
- when: (ansible_os_family != "Windows") or (zabbix_agent_docker | bool)
-
-- include_tasks: tlspsk_auto_agent2_windows.yml
- when: ansible_os_family == "Windows"
-
-- name: AutoPSK | Default tlsaccept and tlsconnect to enforce PSK
- ansible.builtin.set_fact:
- zabbix_agent2_tlsaccept: psk
- zabbix_agent2_tlsconnect: psk
- when: zabbix_api_create_hosts
- tags:
- - config
diff --git a/ansible_collections/community/zabbix/roles/zabbix_agent/tasks/tlspsk_auto_agent2_common.yml b/ansible_collections/community/zabbix/roles/zabbix_agent/tasks/tlspsk_auto_agent2_common.yml
deleted file mode 100644
index 3f6e0d2cd..000000000
--- a/ansible_collections/community/zabbix/roles/zabbix_agent/tasks/tlspsk_auto_agent2_common.yml
+++ /dev/null
@@ -1,53 +0,0 @@
----
-# Process PSK Secret
-- name: AutoPSK | Save existing TLS PSK secret
- ansible.builtin.set_fact:
- zabbix_agent2_tlspsk_read: "{{ zabbix_agent2_tlspsk_base64['content'] | b64decode | trim }}"
- when: zabbix_agent2_tlspskcheck.stat.exists
- no_log: "{{ ansible_verbosity < 3 }}"
- tags:
- - config
-
-- name: AutoPSK | Use existing TLS PSK secret
- ansible.builtin.set_fact:
- zabbix_agent2_tlspsk_secret: "{{ zabbix_agent2_tlspsk_read }}"
- when:
- - zabbix_agent2_tlspskcheck.stat.exists
- - zabbix_agent2_tlspsk_read|length >= 32
- no_log: "{{ ansible_verbosity < 3 }}"
- tags:
- - config
-
-- name: AutoPSK | Generate new TLS PSK secret
- ansible.builtin.set_fact:
- zabbix_agent2_tlspsk_secret: "{{ lookup('password', '/dev/null chars=hexdigits length=64') }}"
- when:
- - not zabbix_agent2_tlspskcheck.stat.exists
- - (zabbix_agent2_tlspsk_read is not defined) or (zabbix_agent2_tlspsk_read|length < 32)
- no_log: "{{ ansible_verbosity < 3 }}"
- tags:
- - config
-
-# Process PSK Identity
-- name: AutoPSK | Use existing TLS PSK identity
- ansible.builtin.set_fact:
- zabbix_agent2_tlspskidentity: "{{ zabbix_agent2_tlspskidentity_base64['content'] | b64decode | trim }}"
- when:
- - zabbix_agent2_tlspskidentity_check.stat.exists
- no_log: "{{ ansible_verbosity < 3 }}"
- tags:
- - config
-
-- name: AutoPSK | Generate new TLS PSK identity
- ansible.builtin.set_fact:
- zabbix_agent2_tlspskidentity: >-
- {{
- zabbix_agent_visible_hostname
- | default(((zabbix_agent2 == True) | ternary(zabbix_agent2_hostname, zabbix_agent_hostname)))
- + '_'
- + lookup('password', '/dev/null chars=hexdigits length=4')
- }}
- when: not zabbix_agent2_tlspskidentity_check.stat.exists
- no_log: "{{ ansible_verbosity < 3 }}"
- tags:
- - config
diff --git a/ansible_collections/community/zabbix/roles/zabbix_agent/tasks/tlspsk_auto_agent2_linux.yml b/ansible_collections/community/zabbix/roles/zabbix_agent/tasks/tlspsk_auto_agent2_linux.yml
deleted file mode 100644
index aaff36128..000000000
--- a/ansible_collections/community/zabbix/roles/zabbix_agent/tasks/tlspsk_auto_agent2_linux.yml
+++ /dev/null
@@ -1,80 +0,0 @@
----
-- name: AutoPSK | Set default path variables (Linux)
- ansible.builtin.set_fact:
- zabbix_agent2_tlspskfile: "/etc/zabbix/tls_psk_auto.secret"
- zabbix_agent2_tlspskidentity_file: "/etc/zabbix/tls_psk_auto.identity"
- tags:
- - config
-
-- name: AutoPSK | Check for existing TLS PSK file (Linux)
- ansible.builtin.stat:
- path: "{{ zabbix_agent2_tlspskfile }}"
- register: zabbix_agent2_tlspskcheck
- become: true
- tags:
- - config
-
-- name: AutoPSK | Check for existing TLS PSK identity (Linux)
- ansible.builtin.stat:
- path: "{{ zabbix_agent2_tlspskidentity_file }}"
- register: zabbix_agent2_tlspskidentity_check
- become: true
- tags:
- - config
-
-- name: AutoPSK | read existing TLS PSK file (Linux)
- ansible.builtin.slurp:
- src: "{{ zabbix_agent2_tlspskfile }}"
- register: zabbix_agent2_tlspsk_base64
- become: true
- when:
- - zabbix_agent2_tlspskcheck.stat.exists
- no_log: "{{ ansible_verbosity < 3 }}"
- tags:
- - config
-
-- name: AutoPSK | Read existing TLS PSK identity file (Linux)
- ansible.builtin.slurp:
- src: "{{ zabbix_agent2_tlspskidentity_file }}"
- register: zabbix_agent2_tlspskidentity_base64
- become: true
- when: zabbix_agent2_tlspskidentity_check.stat.exists
- no_log: "{{ ansible_verbosity < 3 }}"
- tags:
- - config
-
-- include_tasks: tlspsk_auto_agent2_common.yml
-
-- name: AutoPSK | Template TLS PSK identity in file (Linux)
- ansible.builtin.copy:
- dest: "{{ zabbix_agent2_tlspskidentity_file }}"
- content: "{{ zabbix_agent2_tlspskidentity }}"
- owner: zabbix
- group: zabbix
- mode: 0400
- become: true
- when:
- - zabbix_agent2_tlspskidentity_file is defined
- - zabbix_agent2_tlspskidentity is defined
- notify:
- - restart zabbix-agent
- - restart mac zabbix agent
- tags:
- - config
-
-- name: AutoPSK | Template TLS PSK secret in file (Linux)
- ansible.builtin.copy:
- dest: "{{ zabbix_agent2_tlspskfile }}"
- content: "{{ zabbix_agent2_tlspsk_secret }}"
- owner: zabbix
- group: zabbix
- mode: 0400
- become: true
- when:
- - zabbix_agent2_tlspskfile is defined
- - zabbix_agent2_tlspsk_secret is defined
- notify:
- - restart zabbix-agent
- - restart mac zabbix agent
- tags:
- - config
diff --git a/ansible_collections/community/zabbix/roles/zabbix_agent/tasks/tlspsk_auto_agent2_windows.yml b/ansible_collections/community/zabbix/roles/zabbix_agent/tasks/tlspsk_auto_agent2_windows.yml
deleted file mode 100644
index 3e1529e6b..000000000
--- a/ansible_collections/community/zabbix/roles/zabbix_agent/tasks/tlspsk_auto_agent2_windows.yml
+++ /dev/null
@@ -1,66 +0,0 @@
----
-- name: AutoPSK | Set default path variables for Windows
- ansible.builtin.set_fact:
- zabbix_agent2_tlspskfile: "{{ zabbix_win_install_dir }}\\tls_psk_auto.secret.txt"
- zabbix_agent2_tlspskidentity_file: "{{ zabbix_win_install_dir }}\\tls_psk_auto.identity.txt"
- tags:
- - config
-
-- name: AutoPSK | Check for existing TLS PSK file (Windows)
- ansible.windows.win_stat:
- path: "{{ zabbix_agent2_tlspskfile }}"
- register: zabbix_agent2_tlspskcheck
- tags:
- - config
-
-- name: AutoPSK | Check for existing TLS PSK identity (Windows)
- ansible.windows.win_stat:
- path: "{{ zabbix_agent2_tlspskidentity_file }}"
- register: zabbix_agent2_tlspskidentity_check
- tags:
- - config
-
-- name: AutoPSK | read existing TLS PSK file (Windows)
- ansible.builtin.slurp:
- src: "{{ zabbix_agent2_tlspskfile }}"
- register: zabbix_agent2_tlspsk_base64
- when:
- - zabbix_agent2_tlspskcheck.stat.exists
- no_log: "{{ ansible_verbosity < 3 }}"
- tags:
- - config
-
-- name: AutoPSK | Read existing TLS PSK identity file (Windows)
- ansible.builtin.slurp:
- src: "{{ zabbix_agent2_tlspskidentity_file }}"
- register: zabbix_agent2_tlspskidentity_base64
- when: zabbix_agent2_tlspskidentity_check.stat.exists
- no_log: "{{ ansible_verbosity < 3 }}"
- tags:
- - config
-
-- ansible.builtin.include_tasks: tlspsk_auto_agent2_common.yml
-
-- name: Windows | AutoPSK | Template TLS PSK identity in file (Windows)
- ansible.windows.win_copy:
- dest: "{{ zabbix_agent2_tlspskidentity_file }}"
- content: "{{ zabbix_agent2_tlspskidentity }}"
- when:
- - zabbix_agent2_tlspskidentity_file is defined
- - zabbix_agent2_tlspskidentity is defined
- notify:
- - restart win zabbix agent
- tags:
- - config
-
-- name: AutoPSK | Template TLS PSK secret in file (Windows)
- ansible.windows.win_copy:
- dest: "{{ zabbix_agent2_tlspskfile }}"
- content: "{{ zabbix_agent2_tlspsk_secret }}"
- when:
- - zabbix_agent2_tlspskfile is defined
- - zabbix_agent2_tlspsk_secret is defined
- notify:
- - restart win zabbix agent
- tags:
- - config
diff --git a/ansible_collections/community/zabbix/roles/zabbix_agent/tasks/tlspsk_auto_common.yml b/ansible_collections/community/zabbix/roles/zabbix_agent/tasks/tlspsk_auto_common.yml
deleted file mode 100644
index 05ef24d0e..000000000
--- a/ansible_collections/community/zabbix/roles/zabbix_agent/tasks/tlspsk_auto_common.yml
+++ /dev/null
@@ -1,52 +0,0 @@
----
-# Process PSK Secret
-- name: AutoPSK | Save existing TLS PSK secret
- ansible.builtin.set_fact:
- zabbix_agent_tlspsk_read: "{{ zabbix_agent_tlspsk_base64['content'] | b64decode | trim }}"
- when: zabbix_agent_tlspskcheck.stat.exists
- no_log: "{{ ansible_verbosity < 3 }}"
- tags:
- - config
-
-- name: AutoPSK | Use existing TLS PSK secret
- ansible.builtin.set_fact:
- zabbix_agent_tlspsk_secret: "{{ zabbix_agent_tlspsk_read }}"
- when:
- - zabbix_agent_tlspskcheck.stat.exists
- - zabbix_agent_tlspsk_read|length >= 32
- no_log: "{{ ansible_verbosity < 3 }}"
- tags:
- - config
-
-- name: AutoPSK | Generate new TLS PSK secret
- ansible.builtin.set_fact:
- zabbix_agent_tlspsk_secret: "{{ lookup('password', '/dev/null chars=hexdigits length=64') }}"
- when:
- - (not zabbix_agent_tlspskcheck.stat.exists) or (zabbix_agent_tlspsk_read|length < 32)
- no_log: "{{ ansible_verbosity < 3 }}"
- tags:
- - config
-
-# Process PSK Identity
-- name: AutoPSK | Use existing TLS PSK identity
- ansible.builtin.set_fact:
- zabbix_agent_tlspskidentity: "{{ zabbix_agent_tlspskidentity_base64['content'] | b64decode | trim }}"
- when:
- - zabbix_agent_tlspskidentity_check.stat.exists
- no_log: "{{ ansible_verbosity < 3 }}"
- tags:
- - config
-
-- name: AutoPSK | Generate new TLS PSK identity
- ansible.builtin.set_fact:
- zabbix_agent_tlspskidentity: >-
- {{
- zabbix_agent_visible_hostname
- | default(((zabbix_agent2 != True) | ternary(zabbix_agent_hostname, zabbix_agent_hostname)))
- + '_'
- + lookup('password', '/dev/null chars=hexdigits length=4')
- }}
- when: not zabbix_agent_tlspskidentity_check.stat.exists
- no_log: "{{ ansible_verbosity < 3 }}"
- tags:
- - config
diff --git a/ansible_collections/community/zabbix/roles/zabbix_agent/tasks/tlspsk_auto_linux.yml b/ansible_collections/community/zabbix/roles/zabbix_agent/tasks/tlspsk_auto_linux.yml
deleted file mode 100644
index 8cc711fcb..000000000
--- a/ansible_collections/community/zabbix/roles/zabbix_agent/tasks/tlspsk_auto_linux.yml
+++ /dev/null
@@ -1,80 +0,0 @@
----
-- name: AutoPSK | Set default path variables (Linux)
- ansible.builtin.set_fact:
- zabbix_agent_tlspskfile: "/etc/zabbix/tls_psk_auto.secret"
- zabbix_agent_tlspskidentity_file: "/etc/zabbix/tls_psk_auto.identity"
- tags:
- - config
-
-- name: AutoPSK | Check for existing TLS PSK file (Linux)
- ansible.builtin.stat:
- path: "{{ zabbix_agent_tlspskfile }}"
- register: zabbix_agent_tlspskcheck
- become: true
- tags:
- - config
-
-- name: AutoPSK | Check for existing TLS PSK identity (Linux)
- ansible.builtin.stat:
- path: "{{ zabbix_agent_tlspskidentity_file }}"
- register: zabbix_agent_tlspskidentity_check
- become: true
- tags:
- - config
-
-- name: AutoPSK | read existing TLS PSK file (Linux)
- ansible.builtin.slurp:
- src: "{{ zabbix_agent_tlspskfile }}"
- register: zabbix_agent_tlspsk_base64
- become: true
- when:
- - zabbix_agent_tlspskcheck.stat.exists
- no_log: "{{ ansible_verbosity < 3 }}"
- tags:
- - config
-
-- name: AutoPSK | Read existing TLS PSK identity file (Linux)
- ansible.builtin.slurp:
- src: "{{ zabbix_agent_tlspskidentity_file }}"
- register: zabbix_agent_tlspskidentity_base64
- become: true
- when: zabbix_agent_tlspskidentity_check.stat.exists
- no_log: "{{ ansible_verbosity < 3 }}"
- tags:
- - config
-
-- include_tasks: tlspsk_auto_common.yml
-
-- name: AutoPSK | Template TLS PSK identity in file (Linux)
- ansible.builtin.copy:
- dest: "{{ zabbix_agent_tlspskidentity_file }}"
- content: "{{ zabbix_agent_tlspskidentity }}"
- owner: zabbix
- group: zabbix
- mode: 0400
- become: true
- when:
- - zabbix_agent_tlspskidentity_file is defined
- - zabbix_agent_tlspskidentity is defined
- notify:
- - restart zabbix-agent
- - restart mac zabbix agent
- tags:
- - config
-
-- name: AutoPSK | Template TLS PSK secret in file (Linux)
- ansible.builtin.copy:
- dest: "{{ zabbix_agent_tlspskfile }}"
- content: "{{ zabbix_agent_tlspsk_secret }}"
- owner: zabbix
- group: zabbix
- mode: 0400
- become: true
- when:
- - zabbix_agent_tlspskfile is defined
- - zabbix_agent_tlspsk_secret is defined
- notify:
- - restart zabbix-agent
- - restart mac zabbix agent
- tags:
- - config
diff --git a/ansible_collections/community/zabbix/roles/zabbix_agent/tasks/tlspsk_auto_windows.yml b/ansible_collections/community/zabbix/roles/zabbix_agent/tasks/tlspsk_auto_windows.yml
deleted file mode 100644
index b9289ac49..000000000
--- a/ansible_collections/community/zabbix/roles/zabbix_agent/tasks/tlspsk_auto_windows.yml
+++ /dev/null
@@ -1,67 +0,0 @@
----
-- name: AutoPSK | Set default path variables for Windows
- ansible.builtin.set_fact:
- zabbix_agent_tlspskfile: "{{ zabbix_win_install_dir }}\\tls_psk_auto.secret.txt"
- zabbix_agent_tlspskidentity_file: "{{ zabbix_win_install_dir }}\\tls_psk_auto.identity.txt"
- tags:
- - config
-
-- name: AutoPSK | Check for existing TLS PSK file (Windows)
- ansible.windows.win_stat:
- path: "{{ zabbix_agent_tlspskfile }}"
- register: zabbix_agent_tlspskcheck
- tags:
- - config
-
-- name: AutoPSK | Check for existing TLS PSK identity (Windows)
- ansible.windows.win_stat:
- path: "{{ zabbix_agent_tlspskidentity_file }}"
- register: zabbix_agent_tlspskidentity_check
- tags:
- - config
-
-- name: AutoPSK | read existing TLS PSK file (Windows)
- ansible.builtin.slurp:
- src: "{{ zabbix_agent_tlspskfile }}"
- register: zabbix_agent_tlspsk_base64
- when:
- - zabbix_agent_tlspskcheck.stat.exists
- no_log: "{{ ansible_verbosity < 3 }}"
- tags:
- - config
-
-- name: AutoPSK | Read existing TLS PSK identity file (Windows)
- ansible.builtin.slurp:
- src: "{{ zabbix_agent_tlspskidentity_file }}"
- register: zabbix_agent_tlspskidentity_base64
- when: zabbix_agent_tlspskidentity_check.stat.exists
- no_log: "{{ ansible_verbosity < 3 }}"
- tags:
- - config
-
-- include_tasks: tlspsk_auto_common.yml
-
-- name: AutoPSK | Template TLS PSK identity in file (Windows)
- ansible.windows.win_copy:
- dest: "{{ zabbix_agent_tlspskidentity_file }}"
- content: "{{ zabbix_agent_tlspskidentity }}"
- when:
- - zabbix_agent_tlspskidentity_file is defined
- - zabbix_agent_tlspskidentity is defined
- notify:
- - restart win zabbix agent
- tags:
- - config
-
-- name: AutoPSK | Template TLS PSK secret in file (Windows)
- ansible.windows.win_copy:
- dest: "{{ zabbix_agent_tlspskfile }}"
- content: "{{ zabbix_agent_tlspsk_secret }}"
- when:
- - zabbix_agent_tlspskfile is defined
- - zabbix_agent_tlspsk_secret is defined
- - ansible_os_family == "Windows"
- notify:
- - restart win zabbix agent
- tags:
- - config
diff --git a/ansible_collections/community/zabbix/roles/zabbix_agent/templates/agent.conf.j2 b/ansible_collections/community/zabbix/roles/zabbix_agent/templates/agent.conf.j2
new file mode 100644
index 000000000..7cf1d97e4
--- /dev/null
+++ b/ansible_collections/community/zabbix/roles/zabbix_agent/templates/agent.conf.j2
@@ -0,0 +1,142 @@
+{{ ansible_managed | comment }}
+# This is a configuration file for Zabbix Server process
+# To get more information about Zabbix, visit http://www.zabbix.com
+
+# This configuration file is "minimalized", which means all the original comments
+# are removed. The full documentation for your Zabbix Server can be found here:
+# https://www.zabbix.com/documentation/{{ zabbix_agent_version }}/en/manual/appendix/config/
+
+{% if zabbix_agent_aliases is defined and zabbix_agent_aliases %}
+{% if zabbix_agent_aliases is string %}
+Alias={{ zabbix_agent_aliases }}
+{% else %}
+{% for item in zabbix_agent_aliases %}
+Alias={{ item }}
+{% endfor %}
+{% endif %}
+{% else %}
+# Alias=
+{% endif %}
+{% if zabbix_agent_allowkeys is defined and zabbix_agent_allowkeys %}
+{% if zabbix_agent_allowkeys is string %}
+AllowKey={{ zabbix_agent_allowkeys }}
+{% else %}
+{% for item in zabbix_agent_allowkeys %}
+AllowKey={{ item }}
+{% endfor %}
+{% endif %}
+{% else %}
+# AllowKey=
+{% endif %}
+{% if not zabbix_agent2 and ansible_os_family != "Windows" %}
+{{ (zabbix_agent_allowroot is defined and zabbix_agent_allowroot is not none) | ternary('', '# ') }}AllowRoot={{ zabbix_agent_allowroot | default('') }}
+{% endif %}
+{{ (zabbix_agent_buffersend is defined and zabbix_agent_buffersend is not none) | ternary('', '# ') }}BufferSend={{ zabbix_agent_buffersend | default('') }}
+{{ (zabbix_agent_buffersize is defined and zabbix_agent_buffersize is not none) | ternary('', '# ') }}BufferSize={{ zabbix_agent_buffersize | default('') }}
+{% if zabbix_agent2 %}
+{{ (zabbix_agent_controlsocket is defined and zabbix_agent_controlsocket is not none) | ternary('', '# ') }}ControlSocket={{ zabbix_agent_controlsocket | default('') }}
+{% endif %}
+{{ (zabbix_agent_debuglevel is defined and zabbix_agent_debuglevel is not none) | ternary('', '# ') }}DebugLevel={{ zabbix_agent_debuglevel | default('') }}
+{% if zabbix_agent_denykeys is defined and zabbix_agent_denykeys %}
+{% if zabbix_agent_denykeys is string %}
+DenyKey={{ zabbix_agent_denykeys }}
+{% else %}
+{% for item in zabbix_agent_denykeys %}
+DenyKey={{ item }}
+{% endfor %}
+{% endif %}
+{% else %}
+# DenyKey=
+{% endif %}
+{% if not zabbix_agent2 %}
+{{ (zabbix_agent_enableremotecommands is defined and zabbix_agent_enableremotecommands is not none) | ternary('', '# ') }}EnableRemoteCommands={{ zabbix_agent_enableremotecommands | default('') }}
+{% endif %}
+{% if zabbix_agent2 %}
+{{ (zabbix_agent_forceactivechecksonstart is defined and zabbix_agent_forceactivechecksonstart is not none) | ternary('', '# ') }}ForceActiveChecksOnStart={{ zabbix_agent_forceactivechecksonstart | default('') }}
+{% endif %}
+{% if zabbix_agent_version is version_compare('6.2', '>=') %}
+{{ (zabbix_agent_heartbeatfrequency is defined and zabbix_agent_heartbeatfrequency is not none) | ternary('', '# ') }}HeartbeatFrequency={{ zabbix_agent_heartbeatfrequency | default('') }}
+{% endif %}
+{{ (zabbix_agent_hostinterface is defined and zabbix_agent_hostinterface is not none) | ternary('', '# ') }}HostInterface={{ zabbix_agent_hostinterface | default('') }}
+{{ (zabbix_agent_hostinterfaceitem is defined and zabbix_agent_hostinterfaceitem is not none) | ternary('', '# ') }}HostInterfaceItem={{ zabbix_agent_hostinterfaceitem | default('') }}
+{{ (zabbix_agent_hostmetadata is defined and zabbix_agent_hostmetadata is not none) | ternary('', '# ') }}HostMetadata={{ zabbix_agent_hostmetadata | default('') }}
+{{ (zabbix_agent_hostmetadataitem is defined and zabbix_agent_hostmetadataitem is not none) | ternary('', '# ') }}HostMetadataItem={{ zabbix_agent_hostmetadataitem | default('') }}
+{{ (zabbix_agent_hostname is defined and zabbix_agent_hostname is not none) | ternary('', '# ') }}Hostname={{ zabbix_agent_hostname | default('') }}
+{{ (zabbix_agent_hostnameitem is defined and zabbix_agent_hostnameitem is not none) | ternary('', '# ') }}HostnameItem={{ zabbix_agent_hostnameitem | default('') }}
+{{ (zabbix_agent_include is defined and zabbix_agent_include is not none) | ternary('', '# ') }}Include={{ zabbix_agent_include | default('') }}
+{% if not zabbix_agent2 %}
+{{ (zabbix_agent_listenbacklog is defined and zabbix_agent_listenbacklog is not none) | ternary('', '# ') }}ListenBacklog={{ zabbix_agent_listenbacklog | default('') }}
+{% endif %}
+{{ (zabbix_agent_listenip is defined and zabbix_agent_listenip is not none) | ternary('', '# ') }}ListenIP={{ zabbix_agent_listenip | default('') }}
+{{ (zabbix_agent_listenport is defined and zabbix_agent_listenport is not none) | ternary('', '# ') }}ListenPort={{ zabbix_agent_listenport | default('') }}
+{% if not zabbix_agent2 and ansible_os_family != "Windows" %}
+{{ (zabbix_agent_loadmodule is defined and zabbix_agent_loadmodule is not none) | ternary('', '# ') }}LoadModule={{ zabbix_agent_loadmodule | default('') }}
+{{ (zabbix_agent_loadmodulepath is defined and zabbix_agent_loadmodulepath is not none) | ternary('', '# ') }}LoadModulePath={{ zabbix_agent_loadmodulepath | default('') }}
+{% endif %}
+{{ (zabbix_agent_logfile is defined and zabbix_agent_logfile is not none) | ternary('', '# ') }}LogFile={{ zabbix_agent_logfile | default('') }}
+{{ (zabbix_agent_logfilesize is defined and zabbix_agent_logfilesize is not none) | ternary('', '# ') }}LogFileSize={{ zabbix_agent_logfilesize | default('') }}
+{% if not zabbix_agent2 %}
+{{ (zabbix_agent_logremotecommands is defined and zabbix_agent_logremotecommands is not none) | ternary('', '# ') }}LogRemoteCommands={{ zabbix_agent_logremotecommands | default('') }}
+{% endif %}
+{{ (zabbix_agent_logtype is defined and zabbix_agent_logtype is not none) | ternary('', '# ') }}LogType={{ zabbix_agent_logtype | default('') }}
+{% if not zabbix_agent2 %}
+{{ (zabbix_agent_maxlinespersecond is defined and zabbix_agent_maxlinespersecond is not none) | ternary('', '# ') }}MaxLinesPerSecond={{ zabbix_agent_maxlinespersecond | default('') }}
+{% endif %}
+{% if not zabbix_agent2 and ansible_os_family == "Windows" %}
+{{ (zabbix_agent_perfcounter is defined and zabbix_agent_perfcounter is not none) | ternary('', '# ') }}PerfCounter={{ zabbix_agent_perfcounter | default('') }}
+{{ (zabbix_agent_perfcounteren is defined and zabbix_agent_perfcounteren is not none) | ternary('', '# ') }}PerfCounterEn={{ zabbix_agent_perfcounteren | default('') }}
+{% endif %}
+{% if zabbix_agent2 %}
+{{ (zabbix_agent_persistentbufferfile is defined and zabbix_agent_persistentbufferfile is not none) | ternary('', '# ') }}PersistentBufferFile={{ zabbix_agent_persistentbufferfile | default('') }}
+{{ (zabbix_agent_persistentbufferperiod is defined and zabbix_agent_persistentbufferperiod is not none) | ternary('', '# ') }}PersistentBufferPeriod={{ zabbix_agent_persistentbufferperiod | default('') }}
+{% endif %}
+{% if ansible_os_family != "Windows" %}
+{{ (zabbix_agent_pidfile is defined and zabbix_agent_pidfile is not none) | ternary('', '# ') }}PidFile={{ zabbix_agent_pidfile | default('') }}
+{% endif %}
+{% if zabbix_agent_plugins is defined and zabbix_agent_plugins %}
+{% for entry in zabbix_agent_plugins %}
+{% set my_name = entry['name'] %}
+{% for property in entry['options'] %}
+{% set param = property['parameter'] %}
+{% set value = property['value'] %}
+Plugins.{{ my_name }}.{{ param }}={{ value }}
+{% endfor %}
+{% endfor %}
+{% endif %}
+{{ (zabbix_agent_pluginsocket is defined and zabbix_agent_pluginsocket is not none) | ternary('', '# ') }}PluginSocket={{ zabbix_agent_pluginsocket | default('') }}
+{{ (zabbix_agent_plugintimeout is defined and zabbix_agent_plugintimeout is not none) | ternary('', '# ') }}PluginTimeout={{ zabbix_agent_plugintimeout | default('') }}
+{{ (zabbix_agent_refreshactivechecks is defined and zabbix_agent_refreshactivechecks is not none) | ternary('', '# ') }}RefreshActiveChecks={{ zabbix_agent_refreshactivechecks | default('') }}
+{{ (zabbix_agent_server is defined and zabbix_agent_server is not none) | ternary('', '# ') }}Server={{ zabbix_agent_server | default('') }}
+{{ (zabbix_agent_serveractive is defined and zabbix_agent_serveractive is not none) | ternary('', '# ') }}ServerActive={{ zabbix_agent_serveractive | default('') }}
+{{ (zabbix_agent_sourceip is defined and zabbix_agent_sourceip is not none) | ternary('', '# ') }}SourceIP={{ zabbix_agent_sourceip | default('') }}
+{% if not zabbix_agent2 %}
+{{ (zabbix_agent_startagents is defined and zabbix_agent_startagents is not none) | ternary('', '# ') }}StartAgents={{ zabbix_agent_startagents | default('') }}
+{% endif %}
+{% if zabbix_agent2 %}
+{{ (zabbix_agent_statusport is defined and zabbix_agent_statusport is not none) | ternary('', '# ') }}StatusPort={{ zabbix_agent_statusport | default('') }}
+{% endif %}
+{{ (zabbix_agent_timeout is defined and zabbix_agent_timeout is not none) | ternary('', '# ') }}Timeout={{ zabbix_agent_timeout | default('') }}
+{{ (zabbix_agent_tlsconnect is defined and zabbix_agent_tlsconnect is not none) | ternary('', '# ') }}TLSAccept={{ zabbix_agent_tlsconnect | default('') }}
+{{ (zabbix_agent_tlscafile is defined and zabbix_agent_tlscafile is not none) | ternary('', '# ') }}TLSCAFile={{ zabbix_agent_tlscafile | default('') }}
+{{ (zabbix_agent_tlscertfile is defined and zabbix_agent_tlscertfile is not none) | ternary('', '# ') }}TLSCertFile={{ zabbix_agent_tlscertfile | default('') }}
+{% if not zabbix_agent2 and ansible_os_family != "Windows" %}
+{{ (zabbix_agent_tlscipherall is defined and zabbix_agent_tlscipherall is not none) | ternary('', '# ') }}TLSCipherAll={{ zabbix_agent_tlscipherall | default('') }}
+{{ (zabbix_agent_tlscipherall13 is defined and zabbix_agent_tlscipherall13 is not none) | ternary('', '# ') }}TLSCipherAll13={{ zabbix_agent_tlscipherall13 | default('') }}
+{{ (zabbix_agent_tlsciphercert is defined and zabbix_agent_tlsciphercert is not none) | ternary('', '# ') }}TLSCipherCert={{ zabbix_agent_tlsciphercert | default('') }}
+{{ (zabbix_agent_tlsciphercert13 is defined and zabbix_agent_tlsciphercert13 is not none) | ternary('', '# ') }}TLSCipherCert13={{ zabbix_agent_tlsciphercert13 | default('') }}
+{{ (zabbix_agent_tlscipherpsk is defined and zabbix_agent_tlscipherpsk is not none) | ternary('', '# ') }}TLSCipherPSK={{ zabbix_agent_tlscipherpsk | default('') }}
+{{ (zabbix_agent_tlscipherpsk13 is defined and zabbix_agent_tlscipherpsk13 is not none) | ternary('', '# ') }}TLSCipherPSK13={{ zabbix_agent_tlscipherpsk13 | default('') }}
+{% endif %}
+{{ (zabbix_agent_tlsconnect is defined and zabbix_agent_tlsconnect is not none) | ternary('', '# ') }}TLSConnect={{ zabbix_agent_tlsconnect | default('') }}
+{{ (zabbix_agent_tlscrlfile is defined and zabbix_agent_tlscrlfile is not none) | ternary('', '# ') }}TLSCRLFile={{ zabbix_agent_tlscrlfile | default('') }}
+{{ (zabbix_agent_tlskeyfile is defined and zabbix_agent_tlskeyfile is not none) | ternary('', '# ') }}TLSKeyFile={{ zabbix_agent_tlskeyfile | default('') }}
+{{ (zabbix_agent_tlspskfile is defined and zabbix_agent_tlspskfile is not none) | ternary('', '# ') }}TLSPSKFile={{ zabbix_agent_tlspskfile | default('') }}
+{{ (zabbix_agent_tlspskidentity is defined and zabbix_agent_tlspskidentity is not none) | ternary('', '# ') }}TLSPSKIdentity={{ zabbix_agent_tlspskidentity | default('') }}
+{{ (zabbix_agent_tlsservercertissuer is defined and zabbix_agent_tlsservercertissuer is not none) | ternary('', '# ') }}TLSServerCertIssuer={{ zabbix_agent_tlsservercertissuer | default('') }}
+{{ (zabbix_agent_tlsservercertsubject is defined and zabbix_agent_tlsservercertsubject is not none) | ternary('', '# ') }}TLSServerCertSubject={{ zabbix_agent_tlsservercertsubject | default('') }}
+{{ (zabbix_agent_unsafeuserparameters is defined and zabbix_agent_unsafeuserparameters is not none) | ternary('', '# ') }}UnsafeUserParameters={{ zabbix_agent_unsafeuserparameters | default('') }}
+{% if not zabbix_agent2 and ansible_os_family != "Windows" %}
+{{ (zabbix_agent_runas_user is defined and zabbix_agent_runas_user is not none) | ternary('', '# ') }}User={{ zabbix_agent_runas_user | default('') }}
+{% endif %}
+{{ (zabbix_agent_userparamater is defined and zabbix_agent_userparamater is not none) | ternary('', '# ') }}UserParameter={{ zabbix_agent_userparamater | default('') }}
+{{ (zabbix_agent_userparamaterdir is defined and zabbix_agent_userparamaterdir is not none) | ternary('', '# ') }}UserParameterDir={{ zabbix_agent_userparamaterdir | default('') }}
diff --git a/ansible_collections/community/zabbix/roles/zabbix_agent/templates/zabbix_agent2.conf.j2 b/ansible_collections/community/zabbix/roles/zabbix_agent/templates/zabbix_agent2.conf.j2
deleted file mode 100644
index ea60d032e..000000000
--- a/ansible_collections/community/zabbix/roles/zabbix_agent/templates/zabbix_agent2.conf.j2
+++ /dev/null
@@ -1,140 +0,0 @@
-{{ ansible_managed | comment }}
-# This is a configuration file for Zabbix Agent 2
-# To get more information about Zabbix, visit http://www.zabbix.com
-
-# This configuration file is "minimalized", which means all the original comments
-# are removed. The full documentation for your Zabbix Agent 2 can be found here:
-# https://www.zabbix.com/documentation/{{ zabbix_agent_version }}/en/manual/appendix/config/zabbix_agent2{{ "_win" if ansible_os_family == "Windows" else "" }}
-
-{% if ansible_os_family != "Windows" %}
-PidFile={{ zabbix_agent2_pidfile }}
-{% endif %}
-LogType={{ zabbix_agent2_logtype }}
-{% if ansible_os_family == "Windows" %}
-LogFile={{ zabbix_agent2_win_logfile }}
-{% else %}
-LogFile={{ zabbix_agent2_logfile }}
-{% endif %}
-LogFileSize={{ zabbix_agent2_logfilesize }}
-DebugLevel={{ zabbix_agent2_debuglevel }}
-{% if zabbix_agent2_sourceip is defined and zabbix_agent2_sourceip %}
-SourceIP={{ zabbix_agent2_sourceip }}
-{% endif %}
-Server={{ zabbix_agent2_server }}
-ListenPort={{ zabbix_agent2_listenport }}
-{% if zabbix_agent2_listenip is defined and zabbix_agent2_listenip !='0.0.0.0' and zabbix_agent2_listenip %}
-ListenIP={{ zabbix_agent2_listenip }}
-{% endif %}
-{% if zabbix_agent2_statusport is defined and zabbix_agent2_statusport %}
-StatusPort={{ zabbix_agent2_statusport }}
-{% endif %}
-ServerActive={{ zabbix_agent2_serveractive }}
-{% if zabbix_agent2_hostname is defined and zabbix_agent2_hostname %}
-Hostname={{ zabbix_agent2_hostname }}
-{% endif %}
-{% if zabbix_agent2_hostnameitem is defined and zabbix_agent2_hostnameitem %}
-HostnameItem={{ zabbix_agent2_hostnameitem }}
-{% endif %}
-{% if zabbix_agent2_hostmetadata is defined and zabbix_agent2_hostmetadata %}
-HostMetadata={{ zabbix_agent2_hostmetadata }}
-{% endif %}
-{% if zabbix_agent2_hostmetadataitem is defined and zabbix_agent2_hostmetadataitem %}
-HostMetadataItem={{ zabbix_agent2_hostmetadataitem }}
-{% endif %}
-{% if zabbix_agent2_hostinterface is defined and zabbix_agent2_hostinterface %}
-HostInterface={{ zabbix_agent2_hostinterface }}
-{% endif %}
-{% if zabbix_agent2_hostinterfaceitem is defined and zabbix_agent2_hostinterfaceitem %}
-HostInterfaceItem={{ zabbix_agent2_hostinterfaceitem }}
-{% endif %}
-{% if zabbix_agent2_allow_key is defined and zabbix_agent2_allow_key %}
-{% for item in zabbix_agent2_allow_key %}
-AllowKey={{ item }}
-{% endfor %}
-{% endif %}
-{% if zabbix_agent2_deny_key is defined and zabbix_agent2_deny_key %}
-{% for item in zabbix_agent2_deny_key %}
-DenyKey={{ item }}
-{% endfor %}
-{% endif %}
-RefreshActiveChecks={{ zabbix_agent2_refreshactivechecks }}
-BufferSend={{ zabbix_agent2_buffersend }}
-BufferSize={{ zabbix_agent2_buffersize }}
-{% if zabbix_agent2_enablepersistentbuffer is defined and zabbix_agent2_enablepersistentbuffer %}
-EnablePersistentBuffer={{ zabbix_agent2_enablepersistentbuffer }}
-{% endif %}
-{% if zabbix_agent2_persistentbufferperiod is defined and zabbix_agent2_persistentbufferperiod %}
-PersistentBufferPeriod={{ zabbix_agent2_persistentbufferperiod }}
-{% endif %}
-{% if zabbix_agent2_persistentbufferfile is defined and zabbix_agent2_persistentbufferfile %}
-PersistentBufferFile={{ zabbix_agent2_persistentbufferfile }}
-{% endif %}
-{% if zabbix_agent2_zabbix_alias is defined and zabbix_agent2_zabbix_alias %}
-{% if zabbix_agent2_zabbix_alias is string %}
-Alias={{ zabbix_agent2_zabbix_alias }}
-{% else %}
-{% for item in zabbix_agent2_zabbix_alias %}
-Alias={{ item }}
-{% endfor %}
-{% endif %}
-{% endif %}
-Timeout={{ zabbix_agent2_timeout }}
-{% if ansible_os_family == "Windows" %}
-Include={{ zabbix_agent_win_include }}
-{% else %}
-Include={{ zabbix_agent2_include }}/{{ zabbix_agent2_include_pattern }}
-{% endif %}
-{% if zabbix_agent2_additional_include is defined and zabbix_agent2_additional_include is iterable and zabbix_agent2_additional_include is not string %}
-{% for include in zabbix_agent2_additional_include %}
-Include={{ include }}
-{% endfor %}
-{% endif %}
-UnsafeUserParameters={{ zabbix_agent2_unsafeuserparameters }}
-{% if ansible_os_family != "Windows" %}
-ControlSocket={{ zabbix_agent2_controlsocket }}
-{% endif %}
-{% if zabbix_agent2_tlsconnect is defined and zabbix_agent2_tlsconnect %}
-TLSConnect={{ zabbix_agent2_tlsconnect }}
-{% endif %}
-{% if zabbix_agent2_tlsaccept is defined and zabbix_agent2_tlsaccept %}
-TLSAccept={{ zabbix_agent2_tlsaccept }}
-{% endif %}
-{% if zabbix_agent2_tlscafile is defined and zabbix_agent2_tlscafile %}
-TLSCAFile={{ zabbix_agent2_tlscafile }}
-{% endif %}
-{% if zabbix_agent2_tlscrlfile is defined and zabbix_agent2_tlscrlfile %}
-TLSCRLFile={{ zabbix_agent2_tlscrlfile }}
-{% endif %}
-{% if zabbix_agent2_tlsservercertissuer is defined and zabbix_agent2_tlsservercertissuer %}
-TLSServerCertIssuer={{ zabbix_agent2_tlsservercertissuer }}
-{% endif %}
-{% if zabbix_agent2_tlsservercertsubject is defined and zabbix_agent2_tlsservercertsubject %}
-TLSServerCertSubject={{ zabbix_agent2_tlsservercertsubject }}
-{% endif %}
-{% if zabbix_agent2_tlscertfile is defined and zabbix_agent2_tlscertfile %}
-TLSCertFile={{ zabbix_agent2_tlscertfile }}
-{% endif %}
-{% if zabbix_agent2_tlskeyfile is defined and zabbix_agent2_tlskeyfile %}
-TLSKeyFile={{ zabbix_agent2_tlskeyfile }}
-{% endif %}
-{% if zabbix_agent2_tlspskidentity is defined and zabbix_agent2_tlspskidentity %}
-TLSPSKIdentity={{ zabbix_agent2_tlspskidentity }}
-{% endif %}
-{% if zabbix_agent2_tlspskfile is defined and zabbix_agent2_tlspskfile %}
-TLSPSKFile={{ zabbix_agent2_tlspskfile }}
-{% endif %}
-{% if zabbix_agent2_plugins is defined and zabbix_agent2_plugins is iterable %}
-{% for entry in zabbix_agent2_plugins %}
-{% set my_name = entry['name'] %}
-{% for property in entry['options'] %}
-{% set param = property['parameter'] %}
-{% set value = property['value'] %}
-Plugins.{{ my_name }}.{{ param }}={{ value }}
-{% endfor %}
-{% endfor %}
-{% endif %}
-{% if zabbix_agent_version is version('6.0', '>=') %}
-{% if zabbix_agent2_listenbacklog is defined and zabbix_agent2_listenbacklog %}
-ListenBacklog={{ zabbix_agent2_listenbacklog }}
-{% endif %}
-{% endif %}
diff --git a/ansible_collections/community/zabbix/roles/zabbix_agent/templates/zabbix_agentd.conf.j2 b/ansible_collections/community/zabbix/roles/zabbix_agent/templates/zabbix_agentd.conf.j2
deleted file mode 100644
index 24af45bc3..000000000
--- a/ansible_collections/community/zabbix/roles/zabbix_agent/templates/zabbix_agentd.conf.j2
+++ /dev/null
@@ -1,149 +0,0 @@
-{{ ansible_managed | comment }}
-# This is a configuration file for Zabbix Agent
-# To get more information about Zabbix, visit http://www.zabbix.com
-
-# This configuration file is "minimalized", which means all the original comments
-# are removed. The full documentation for your Zabbix Agent can be found here:
-# https://www.zabbix.com/documentation/{{ zabbix_agent_version }}/en/manual/appendix/config/zabbix_agentd{{ "_win" if ansible_os_family == "Windows" else "" }}
-
-{% if ansible_os_family != "Windows" %}
-PidFile={{ zabbix_agent_pidfile }}
-{% endif %}
-{% if zabbix_agent_version is version('3.0', '>=') %}
-LogType={{ zabbix_agent_logtype }}
-{% endif %}
-{% if ansible_os_family == "Windows" %}
-LogFile={{ zabbix_agent_win_logfile }}
-{% else %}
-LogFile={{ zabbix_agent_logfile }}
-{% endif %}
-LogFileSize={{ zabbix_agent_logfilesize }}
-DebugLevel={{ zabbix_agent_debuglevel }}
-{% if zabbix_agent_sourceip is defined and zabbix_agent_sourceip %}
-SourceIP={{ zabbix_agent_sourceip }}
-{% endif %}
-{% if zabbix_agent_version is version('6.0', '<=') %}
-EnableRemoteCommands={{ zabbix_agent_enableremotecommands }}
-{% else %}
-{% if zabbix_agent_allowkeys is defined and zabbix_agent_allowkeys %}
-AllowKey={{ zabbix_agent_allowkeys }}
-{% endif %}
-{% if zabbix_agent_denykeys is defined and zabbix_agent_denykeys %}
-DenyKey={{ zabbix_agent_denykeys }}
-{% endif %}
-{% endif %}
-LogRemoteCommands={{ zabbix_agent_logremotecommands }}
-Server={{ zabbix_agent_server }}
-ListenPort={{ zabbix_agent_listenport }}
-{% if zabbix_agent_listenip is defined and zabbix_agent_listenip !='0.0.0.0' and zabbix_agent_listenip %}
-ListenIP={{ zabbix_agent_listenip }}
-{% endif %}
-StartAgents={{ zabbix_agent_startagents }}
-ServerActive={{ zabbix_agent_serveractive }}
-{% if zabbix_agent_hostname is defined and zabbix_agent_hostname %}
-Hostname={{ zabbix_agent_hostname }}
-{% endif %}
-{% if zabbix_agent_hostnameitem is defined and zabbix_agent_hostnameitem %}
-HostnameItem={{ zabbix_agent_hostnameitem }}
-{% endif %}
-{% if zabbix_agent_hostmetadata is defined and zabbix_agent_hostmetadata %}
-HostMetadata={{ zabbix_agent_hostmetadata }}
-{% endif %}
-{% if zabbix_agent_hostmetadataitem is defined and zabbix_agent_hostmetadataitem %}
-HostMetadataItem={{ zabbix_agent_hostmetadataitem }}
-{% endif %}
-{% if zabbix_agent_allow_key is defined and zabbix_agent_allow_key %}
-{% for item in zabbix_agent_allow_key %}
-AllowKey={{ item }}
-{% endfor %}
-{% endif %}
-{% if zabbix_agent_deny_key is defined and zabbix_agent_deny_key %}
-{% for item in zabbix_agent_deny_key %}
-DenyKey={{ item }}
-{% endfor %}
-{% endif %}
-RefreshActiveChecks={{ zabbix_agent_refreshactivechecks }}
-BufferSend={{ zabbix_agent_buffersend }}
-BufferSize={{ zabbix_agent_buffersize }}
-MaxLinesPerSecond={{ zabbix_agent_maxlinespersecond }}
-{% if zabbix_agent_version is version_compare('6.2', '>=') %}
-HeartbeatFrequency={{ zabbix_agent_heartbeatfrequency }}
-{% endif %}
-{% if zabbix_agent_zabbix_alias is defined and zabbix_agent_zabbix_alias %}
-{% if zabbix_agent_zabbix_alias is string %}
-Alias={{ zabbix_agent_zabbix_alias }}
-{% else %}
-{% for item in zabbix_agent_zabbix_alias %}
-Alias={{ item }}
-{% endfor %}
-{% endif %}
-{% endif %}
-Timeout={{ zabbix_agent_timeout }}
-{% if ansible_os_family != "Windows" %}
-AllowRoot={{ zabbix_agent_allowroot }}
-{% endif %}
-{% if zabbix_agent_runas_user is defined and zabbix_agent_runas_user %}
-User={{ zabbix_agent_runas_user }}
-{% endif %}
-{% if ansible_os_family == "Windows" %}
-Include={{ zabbix_agent_win_include }}
-{% else %}
-Include={{ zabbix_agent_include }}/{{ zabbix_agent_include_pattern }}
-{% endif %}
-{% if zabbix_agent_additional_include is defined and zabbix_agent_additional_include is iterable and zabbix_agent_additional_include is not string %}
-{% for include in zabbix_agent_additional_include %}
-Include={{ include }}
-{% endfor %}
-{% endif %}
-UnsafeUserParameters={{ zabbix_agent_unsafeuserparameters }}
-{% if zabbix_agent_version is version_compare('2.2', '>=') %}
-{% if ansible_os_family != "Windows" %}
-LoadModulePath={{ zabbix_agent_loadmodulepath }}
-{% endif %}
-{% endif %}
-{% if zabbix_agent_loadmodule is defined and zabbix_agent_loadmodule %}
-{% if zabbix_agent_loadmodule is string %}
-LoadModule={{ zabbix_agent_loadmodule }}
-{% else %}
-{% for module in zabbix_agent_loadmodule %}
-LoadModule={{ module }}
-{% endfor %}
-{% endif %}
-{% endif %}
-{% if zabbix_agent_version is version_compare('3.0', '>=') %}
-{% if zabbix_agent_tlsconnect is defined and zabbix_agent_tlsconnect %}
-TLSConnect={{ zabbix_agent_tlsconnect }}
-{% endif %}
-{% if zabbix_agent_tlsaccept is defined and zabbix_agent_tlsaccept %}
-TLSAccept={{ zabbix_agent_tlsaccept }}
-{% endif %}
-{% if zabbix_agent_tlscafile is defined and zabbix_agent_tlscafile %}
-TLSCAFile={{ zabbix_agent_tlscafile }}
-{% endif %}
-{% if zabbix_agent_tlscrlfile is defined and zabbix_agent_tlscrlfile %}
-TLSCRLFile={{ zabbix_agent_tlscrlfile }}
-{% endif %}
-{% if zabbix_agent_tlsservercertissuer is defined and zabbix_agent_tlsservercertissuer %}
-TLSServerCertIssuer={{ zabbix_agent_tlsservercertissuer }}
-{% endif %}
-{% if zabbix_agent_tlsservercertsubject is defined and zabbix_agent_tlsservercertsubject %}
-TLSServerCertSubject={{ zabbix_agent_tlsservercertsubject }}
-{% endif %}
-{% if zabbix_agent_tlscertfile is defined and zabbix_agent_tlscertfile %}
-TLSCertFile={{ zabbix_agent_tlscertfile }}
-{% endif %}
-{% if zabbix_agent_tlskeyfile is defined and zabbix_agent_tlskeyfile %}
-TLSKeyFile={{ zabbix_agent_tlskeyfile }}
-{% endif %}
-{% if zabbix_agent_tlspskidentity is defined and zabbix_agent_tlspskidentity %}
-TLSPSKIdentity={{ zabbix_agent_tlspskidentity }}
-{% endif %}
-{% if zabbix_agent_tlspskfile is defined and zabbix_agent_tlspskfile %}
-TLSPSKFile={{ zabbix_agent_tlspskfile }}
-{% endif %}
-{% endif %}
-{% if zabbix_agent_version is version('6.0', '>=') %}
-{% if zabbix_agent_listenbacklog is defined and zabbix_agent_listenbacklog %}
-ListenBacklog={{ zabbix_agent_listenbacklog }}
-{% endif %}
-{% endif %}
diff --git a/ansible_collections/community/zabbix/roles/zabbix_agent/vars/Debian.yml b/ansible_collections/community/zabbix/roles/zabbix_agent/vars/Debian.yml
index 4a88411f1..0a9b14650 100644
--- a/ansible_collections/community/zabbix/roles/zabbix_agent/vars/Debian.yml
+++ b/ansible_collections/community/zabbix/roles/zabbix_agent/vars/Debian.yml
@@ -2,9 +2,6 @@
# vars file for zabbix_agent (Debian)
zabbix_agent: zabbix-agent
-zabbix_agent_service: zabbix-agent
-zabbix_agent_conf: zabbix_agentd.conf
-zabbix_agent2_conf: zabbix_agent2.conf
zabbix_valid_agent_versions:
# Debian
@@ -28,6 +25,10 @@ zabbix_valid_agent_versions:
- 6.2
- 6.0
# Ubuntu
+ "24":
+ - 6.4
+ - 6.0
+
"22":
- 6.4
- 6.2
diff --git a/ansible_collections/community/zabbix/roles/zabbix_agent/vars/RedHat.yml b/ansible_collections/community/zabbix/roles/zabbix_agent/vars/RedHat.yml
index 50f0b01ec..d4e6c6e3b 100644
--- a/ansible_collections/community/zabbix/roles/zabbix_agent/vars/RedHat.yml
+++ b/ansible_collections/community/zabbix/roles/zabbix_agent/vars/RedHat.yml
@@ -2,9 +2,6 @@
# vars file for zabbix_agent (RedHat)
zabbix_agent: zabbix-agent
-zabbix_agent_service: zabbix-agent
-zabbix_agent_conf: zabbix_agentd.conf
-zabbix_agent2_conf: zabbix_agent2.conf
zabbix_valid_agent_versions:
"9":
diff --git a/ansible_collections/community/zabbix/roles/zabbix_agent/vars/agent2_vars.yml b/ansible_collections/community/zabbix/roles/zabbix_agent/vars/agent2_vars.yml
new file mode 100644
index 000000000..8c03fa23b
--- /dev/null
+++ b/ansible_collections/community/zabbix/roles/zabbix_agent/vars/agent2_vars.yml
@@ -0,0 +1,9 @@
+_pidfile: /var/run/zabbix/zabbix_agent2.pid
+_logfile: /var/log/zabbix/zabbix_agent2.log
+_include: /etc/zabbix/zabbix_agent2.d
+_tls_subject: "{{ zabbix_agent_tlsservercertsubject | default(omit) }}" # FIXME this is not correct and should be removed with 2.0.0, here only to prevent regression
+_win_package: zabbix_agent2-{{ zabbix_version_long }}-windows-amd64-openssl-static.zip
+_win_download_link: "{{ zabbix_win_download_url }}/{{ zabbix_version_long | regex_search('^\\d+\\.\\d+') }}/{{ zabbix_version_long }}/{{ zabbix2_win_package }}"
+_win_logfile: "{{ zabbix_win_install_dir }}\\zabbix_agent2.log"
+_agent_service: zabbix-agent2
+_agent_package: zabbix-agent2
diff --git a/ansible_collections/community/zabbix/roles/zabbix_agent/vars/agent_vars.yml b/ansible_collections/community/zabbix/roles/zabbix_agent/vars/agent_vars.yml
new file mode 100644
index 000000000..c37a5ecc3
--- /dev/null
+++ b/ansible_collections/community/zabbix/roles/zabbix_agent/vars/agent_vars.yml
@@ -0,0 +1,10 @@
+_pidfile: /var/run/zabbix/zabbix_agentd.pid
+_logfile: /var/log/zabbix/zabbix_agentd.log
+_include: /etc/zabbix/zabbix_agentd.d
+_tls_subject: "{{ zabbix_agent_tlsservercertsubject | default(omit) }}" # FIXME this is not correct and should be removed with 2.0.0, here only to prevent regression
+_win_package: zabbix_agent-{{ zabbix_version_long }}-windows-amd64-openssl.zip
+_win_download_link: "{{ zabbix_win_download_url }}/{{ zabbix_version_long | regex_search('^\\d+\\.\\d+') }}/{{ zabbix_version_long }}/{{ zabbix_win_package }}"
+_win_logfile: "{{ zabbix_win_install_dir }}\\zabbix_agentd.log"
+_agent_service: zabbix-agent
+_agent_package: zabbix-agent
+_sender_package: zabbix-sender
diff --git a/ansible_collections/community/zabbix/roles/zabbix_javagateway/README.md b/ansible_collections/community/zabbix/roles/zabbix_javagateway/README.md
index 47092a6a0..0de7af814 100644
--- a/ansible_collections/community/zabbix/roles/zabbix_javagateway/README.md
+++ b/ansible_collections/community/zabbix/roles/zabbix_javagateway/README.md
@@ -39,6 +39,7 @@ See the following list of supported Operating systems with the Zabbix releases.
| Red Hat Fam 9 | V | V | V |
| Red Hat Fam 8 | V | V | V |
| Red Hat Fam 7 | V | V | V |
+| Ubuntu 24.04 noble | V | | V |
| Ubuntu 22.04 jammy | V | V | V |
| Ubuntu 20.04 focal | V | V | V |
| Ubuntu 18.04 bionic | V | V | V |
diff --git a/ansible_collections/community/zabbix/roles/zabbix_javagateway/tasks/Debian.yml b/ansible_collections/community/zabbix/roles/zabbix_javagateway/tasks/Debian.yml
index ad762aa3b..fd970ab75 100644
--- a/ansible_collections/community/zabbix/roles/zabbix_javagateway/tasks/Debian.yml
+++ b/ansible_collections/community/zabbix/roles/zabbix_javagateway/tasks/Debian.yml
@@ -46,10 +46,15 @@
Components: {{ zabbix_repo_deb_component }}
Architectures: {{ 'amd64' if ansible_machine != 'aarch64' else 'arm64'}}
Signed-By: {{ zabbix_gpg_key }}
+ register: zabbix_repo
become: true
tags:
- install
+- name: "Debian | Update apt cache if repo was added"
+ ansible.builtin.apt: update_cache=yes
+ when: zabbix_repo is changed
+
- name: "Debian | Installing zabbix-java-gateway"
ansible.builtin.apt:
pkg: zabbix-java-gateway
diff --git a/ansible_collections/community/zabbix/roles/zabbix_javagateway/vars/Debian.yml b/ansible_collections/community/zabbix/roles/zabbix_javagateway/vars/Debian.yml
index 7c36d2d3a..0ea96ac7e 100644
--- a/ansible_collections/community/zabbix/roles/zabbix_javagateway/vars/Debian.yml
+++ b/ansible_collections/community/zabbix/roles/zabbix_javagateway/vars/Debian.yml
@@ -12,6 +12,9 @@ zabbix_valid_javagateway_versions:
- 6.2
- 6.0
# Ubuntu
+ "24":
+ - 6.4
+ - 6.0
"22":
- 6.4
- 6.2
diff --git a/ansible_collections/community/zabbix/roles/zabbix_proxy/README.md b/ansible_collections/community/zabbix/roles/zabbix_proxy/README.md
index ee558c8b7..703ec6059 100644
--- a/ansible_collections/community/zabbix/roles/zabbix_proxy/README.md
+++ b/ansible_collections/community/zabbix/roles/zabbix_proxy/README.md
@@ -82,6 +82,7 @@ See the following list of supported Operating systems with the Zabbix releases.
| Red Hat Fam 9 | V | V | V |
| Red Hat Fam 8 | V | V | V |
| Red Hat Fam 7 | V | V | V |
+| Ubuntu 24.04 noble | V | | V |
| Ubuntu 22.04 jammy | V | V | V |
| Ubuntu 20.04 focal | V | V | V |
| Ubuntu 18.04 bionic | V | V | V |
diff --git a/ansible_collections/community/zabbix/roles/zabbix_proxy/defaults/main.yml b/ansible_collections/community/zabbix/roles/zabbix_proxy/defaults/main.yml
index b2b74ebec..635c40f40 100644
--- a/ansible_collections/community/zabbix/roles/zabbix_proxy/defaults/main.yml
+++ b/ansible_collections/community/zabbix/roles/zabbix_proxy/defaults/main.yml
@@ -43,8 +43,6 @@ zabbix_repo_yum_schema: https
zabbix_repo_yum_gpgcheck: 0
zabbix_repo_deb_url: "http://repo.zabbix.com/zabbix/{{ zabbix_proxy_version }}/{{ ansible_facts.lsb.id | default(ansible_facts['distribution']) | lower }}{% if ansible_facts['architecture'] == 'aarch64' and ansible_facts.lsb.id | default(ansible_facts['distribution']) in ['Debian', 'Ubuntu'] %}-arm64{% endif %}"
zabbix_repo_deb_component: main
-zabbix_proxy_disable_repo:
- - epel
zabbix_repo_yum:
- name: zabbix
description: Zabbix Official Repository - $basearch
diff --git a/ansible_collections/community/zabbix/roles/zabbix_proxy/tasks/Debian.yml b/ansible_collections/community/zabbix/roles/zabbix_proxy/tasks/Debian.yml
index bef68b27a..94b56890c 100644
--- a/ansible_collections/community/zabbix/roles/zabbix_proxy/tasks/Debian.yml
+++ b/ansible_collections/community/zabbix/roles/zabbix_proxy/tasks/Debian.yml
@@ -2,7 +2,6 @@
- name: "Debian | Set short version name"
ansible.builtin.set_fact:
zabbix_short_version: "{{ zabbix_proxy_version | regex_replace('\\.', '') }}"
- zabbix_python_prefix: "python{% if ansible_python_version is version('3', '>=') %}3{% endif %}"
zabbix_underscore_version: "{{ zabbix_proxy_version | regex_replace('\\.', '_') }}"
tags:
- always
@@ -102,108 +101,3 @@
become: true
tags:
- install
-
-- name: "Debian | Installing zabbix-proxy-{{ zabbix_proxy_database }}"
- ansible.builtin.apt:
- pkg: "zabbix-proxy-{{ zabbix_proxy_database }}"
- update_cache: true
- cache_valid_time: 0
- force: true
- state: "{{ zabbix_proxy_package_state }}"
- default_release: "{{ ansible_distribution_release }}"
- environment:
- http_proxy: "{{ zabbix_http_proxy | default(None) | default(omit) }}"
- https_proxy: "{{ zabbix_https_proxy | default(None) | default(omit) }}"
- register: is_zabbix_proxy_package_installed
- until: is_zabbix_proxy_package_installed is succeeded
- become: true
- tags:
- - install
-
-- name: "Debian | Installing zabbix-sql-scripts"
- ansible.builtin.apt:
- pkg: zabbix-sql-scripts
- state: "{{ zabbix_proxy_package_state }}"
- update_cache: true
- cache_valid_time: 0
- default_release: "{{ ansible_distribution_release }}"
- environment:
- http_proxy: "{{ zabbix_http_proxy | default(None) | default(omit) }}"
- https_proxy: "{{ zabbix_https_proxy | default(None) | default(omit) }}"
- register: zabbix_proxy_package_sql_installed
- until: zabbix_proxy_package_sql_installed is succeeded
- when:
- - zabbix_proxy_version is version('6.0', '>=')
- become: true
- tags:
- - install
-
-- name: "Debian | Install Ansible module dependencies"
- ansible.builtin.apt:
- name: "{{ zabbix_python_prefix }}-psycopg2"
- state: present
- environment:
- http_proxy: "{{ zabbix_http_proxy | default(None) | default(omit) }}"
- https_proxy: "{{ zabbix_https_proxy | default(None) | default(omit) }}"
- register: zabbix_proxy_dependencies_installed
- until: zabbix_proxy_dependencies_installed is succeeded
- become: true
- when:
- - zabbix_proxy_database_creation
- tags:
- - install
- - dependencies
-
-- name: "Debian | Install Mysql Client package"
- ansible.builtin.apt:
- name: "{{ mysql_client_pkgs[ansible_distribution_major_version] }}"
- state: present
- environment:
- http_proxy: "{{ zabbix_http_proxy | default(None) | default(omit) }}"
- https_proxy: "{{ zabbix_https_proxy | default(None) | default(omit) }}"
- register: zabbix_proxy_dependencies_installed
- until: zabbix_proxy_dependencies_installed is succeeded
- become: true
- when:
- - zabbix_proxy_database == 'mysql'
- - zabbix_proxy_install_database_client
- tags:
- - install
- - dependencies
- - database
-
-- name: "Debian | Install PostgreSQL Client package"
- ansible.builtin.apt:
- name: postgresql-client
- state: present
- environment:
- http_proxy: "{{ zabbix_http_proxy | default(None) | default(omit) }}"
- https_proxy: "{{ zabbix_https_proxy | default(None) | default(omit) }}"
- register: are_zabbix_proxy_dependency_packages_installed
- until: are_zabbix_proxy_dependency_packages_installed is succeeded
- become: true
- when:
- - zabbix_proxy_database_creation or zabbix_proxy_database_sqlload
- - zabbix_proxy_database == 'pgsql'
- - zabbix_proxy_install_database_client
- tags:
- - install
- - dependencies
- - database
-
-- name: "Debian | Install sqlite3"
- ansible.builtin.apt:
- name: sqlite3
- state: present
- environment:
- http_proxy: "{{ zabbix_http_proxy | default(None) | default(omit) }}"
- https_proxy: "{{ zabbix_https_proxy | default(None) | default(omit) }}"
- register: are_zabbix_proxy_dependency_packages_installed
- until: are_zabbix_proxy_dependency_packages_installed is succeeded
- become: true
- when:
- - zabbix_proxy_database == 'sqlite3'
- tags:
- - install
- - dependencies
- - database
diff --git a/ansible_collections/community/zabbix/roles/zabbix_proxy/tasks/RedHat.yml b/ansible_collections/community/zabbix/roles/zabbix_proxy/tasks/RedHat.yml
index f35b3c7b3..a51baee63 100644
--- a/ansible_collections/community/zabbix/roles/zabbix_proxy/tasks/RedHat.yml
+++ b/ansible_collections/community/zabbix/roles/zabbix_proxy/tasks/RedHat.yml
@@ -7,19 +7,6 @@
tags:
- always
-- name: "RedHat | Define package without version"
- ansible.builtin.set_fact:
- zabbix_proxy_package: "zabbix-proxy-{{ zabbix_proxy_database }}"
- cacheable: true
- tags:
- - always
-
-- name: "RedHat | Set facts for Zabbix"
- ansible.builtin.set_fact:
- datafiles_path: "/usr/share/doc/zabbix-sql-scripts/{{ zabbix_proxy_db_long }}"
- tags:
- - always
-
- name: "RedHat | Make sure old file is absent"
ansible.builtin.file:
path: /etc/yum.repos.d/zabbix-supported.repo
@@ -47,99 +34,10 @@
tags:
- install
-- name: Install packages for Zabbix Repository
- block:
- - name: "RedHat | Installing zabbix-proxy-{{ zabbix_proxy_database }}"
- ansible.builtin.yum:
- pkg: "{{ zabbix_proxy_package }}-{{ zabbix_proxy_version }}.{{ zabbix_proxy_version_minor }}"
- state: "{{ zabbix_proxy_package_state }}"
- disablerepo: "{{ zabbix_proxy_disable_repo | default(omit) }}"
- environment:
- http_proxy: "{{ zabbix_http_proxy | default(None) | default(omit) }}"
- https_proxy: "{{ zabbix_https_proxy | default(None) | default(omit) }}"
- become: true
- register: is_zabbix_proxy_package_installed
- until: is_zabbix_proxy_package_installed is succeeded
-
- - name: "RedHat | Installing zabbix-sql-scripts"
- ansible.builtin.yum:
- pkg: "zabbix-sql-scripts-{{ zabbix_proxy_version }}.{{ zabbix_proxy_version_minor }}"
- state: "{{ zabbix_proxy_package_state }}"
- disablerepo: "{{ zabbix_proxy_disable_repo | default(omit) }}"
- environment:
- http_proxy: "{{ zabbix_http_proxy | default(None) | default(omit) }}"
- https_proxy: "{{ zabbix_https_proxy | default(None) | default(omit) }}"
- register: zabbix_proxy_sql_package_installed
- until: zabbix_proxy_sql_package_installed is succeeded
- become: true
- tags:
- - install
-
-- name: "RedHat | Install Ansible PostgreSQL Client package"
+- name: "RedHat | Add EPEL Repo (Centos 7 Only)"
+ when: ansible_facts['distribution_major_version'] == '7'
ansible.builtin.yum:
- name: "{{ pgsql_depenencies[ansible_distribution_major_version] }}"
- state: present
- environment:
- http_proxy: "{{ zabbix_http_proxy | default(None) | default(omit) }}"
- https_proxy: "{{ zabbix_https_proxy | default(None) | default(omit) }}"
- become: true
- register: are_zabbix_proxy_pgsql_packages_installed
- until: are_zabbix_proxy_pgsql_packages_installed is succeeded
- when:
- - zabbix_proxy_database_creation or zabbix_proxy_database_sqlload
- - zabbix_proxy_database == 'pgsql'
- tags:
- - install
- - database
- - dependencies
-
-- name: "RedHat | Install Mysql Client Package"
- block:
- - name: "RedHat | Add Mysql Repo (Centos 7 Only)"
- ansible.builtin.yum_repository:
- name: mariadb
- description: MariaDB 10.8 CentOS repository list
- file: mariadb
- baseurl: "https://mirror.rackspace.com/mariadb/yum/10.11/centos{{ ansible_distribution_major_version }}-amd64"
- gpgcheck: no
- when: ansible_distribution_major_version == '7'
-
- - name: "RedHat | Install Mysql Client package"
- ansible.builtin.yum:
- name: "{{ mysql_client_pkgs[ansible_distribution_major_version] }}"
- state: installed
- environment:
- http_proxy: "{{ zabbix_http_proxy | default(None) | default(omit) }}"
- https_proxy: "{{ zabbix_https_proxy | default(None) | default(omit) }}"
- become: true
- register: are_zabbix_proxy_mysql_packages_installed
- until: are_zabbix_proxy_mysql_packages_installed is succeeded
- when:
- - zabbix_proxy_database_creation or zabbix_proxy_database_sqlload
- - zabbix_proxy_install_database_client
- - zabbix_proxy_database == 'mysql'
- tags:
- - install
- - database
- - dependencies
-
-- name: "RedHat | Install sqlite3"
- ansible.builtin.yum:
- name:
- - sqlite
- state: present
- environment:
- http_proxy: "{{ zabbix_http_proxy | default(None) | default(omit) }}"
- https_proxy: "{{ zabbix_https_proxy | default(None) | default(omit) }}"
- register: zabbix_proxy_sqlite_packages_installed
- until: zabbix_proxy_sqlite_packages_installed is succeeded
- become: true
- when:
- - zabbix_proxy_database == 'sqlite3'
- tags:
- - install
- - database
- - dependencies
+ name: epel-release
- name: "Configure SELinux when enabled"
ansible.builtin.include_tasks: selinux.yml
diff --git a/ansible_collections/community/zabbix/roles/zabbix_proxy/tasks/initialize-mysql.yml b/ansible_collections/community/zabbix/roles/zabbix_proxy/tasks/initialize-mysql.yml
new file mode 100644
index 000000000..e3b525a83
--- /dev/null
+++ b/ansible_collections/community/zabbix/roles/zabbix_proxy/tasks/initialize-mysql.yml
@@ -0,0 +1,144 @@
+---
+# task file for mysql
+- name: "Install MySQL dependencies"
+ when: zabbix_proxy_database_creation or zabbix_proxy_database_sqlload
+ ansible.builtin.package:
+ name: "{{ _zabbix_proxy_mysql_dependencies[ ansible_facts['distribution_major_version'] ] | select | list }}"
+ environment:
+ http_proxy: "{{ zabbix_http_proxy | default(None) | default(omit) }}"
+ https_proxy: "{{ zabbix_https_proxy | default(None) | default(omit) }}"
+ become: true
+ register: _zabbix_proxy_dependencies_installed
+ until: _zabbix_proxy_dependencies_installed is succeeded
+ tags:
+ - install
+ - database
+ - dependencies
+
+# NOTE: Upgrading system-packages with pip is generally a bad idea, but
+# these packaged older versions seems to have a problem with mysql 8 and above
+- name: Upgrade pymysql
+ when:
+ - ansible_facts['distribution'] in ['CentOS', 'Debian', 'Ubuntu']
+ - ansible_facts['distribution_release'] in ['Core', 'buster', 'bullseye', 'bionic', 'focal']
+ ansible.builtin.pip:
+ name: "pymysql>=0.10.0,<0.11.0"
+ state: latest
+
+- name: "MySQL Database prep"
+ when: zabbix_proxy_database_creation | bool
+ delegate_to: "{{ zabbix_proxy_real_dbhost | default(zabbix_proxy_dbhost_run_install | ternary(delegated_dbhost, inventory_hostname)) }}"
+ vars:
+ delegated_dbhost: "{{ (zabbix_proxy_dbhost == 'localhost') | ternary(inventory_hostname, zabbix_proxy_dbhost) }}"
+ tags:
+ - database
+ - skip_ansible_lint
+ block:
+ - name: "MySQL | Create database"
+ community.mysql.mysql_db:
+ login_user: "{{ zabbix_proxy_mysql_login_user | default(omit) }}"
+ login_password: "{{ zabbix_proxy_mysql_login_password | default(omit) }}"
+ login_host: "{{ zabbix_proxy_mysql_login_host | default(omit) }}"
+ login_port: "{{ zabbix_proxy_mysql_login_port | default(omit) }}"
+ login_unix_socket: "{{ zabbix_proxy_mysql_login_unix_socket | default(omit) }}"
+ name: "{{ zabbix_proxy_dbname }}"
+ encoding: "{{ zabbix_proxy_dbencoding }}"
+ collation: "{{ zabbix_proxy_dbcollation }}"
+ state: present
+ register: zabbix_database_created
+
+ - name: "MySQL | Create database user"
+ community.mysql.mysql_user:
+ login_user: "{{ zabbix_proxy_mysql_login_user | default(omit) }}"
+ login_password: "{{ zabbix_proxy_mysql_login_password | default(omit) }}"
+ login_host: "{{ zabbix_proxy_mysql_login_host | default(omit) }}"
+ login_port: "{{ zabbix_proxy_mysql_login_port | default(omit) }}"
+ login_unix_socket: "{{ zabbix_proxy_mysql_login_unix_socket | default(omit) }}"
+ name: "{{ zabbix_proxy_dbuser }}"
+ password: "{{ zabbix_proxy_dbpassword }}"
+ priv: "{{ zabbix_proxy_dbname }}.*:ALL"
+ host: "{{ zabbix_proxy_privileged_host }}"
+ plugin: "{{ 'mysql_native_password' if (ansible_os_family == 'RedHat' and ansible_distribution_major_version == '7') else omit }}"
+ state: present
+
+- name: "MySQL verify or create schema"
+ when: zabbix_proxy_database_sqlload | bool
+ tags:
+ - database
+ block:
+ - name: "MySQL | Get current database version"
+ community.mysql.mysql_query:
+ login_user: "{{ zabbix_proxy_dbuser }}"
+ login_password: "{{ zabbix_proxy_dbpassword }}"
+ login_host: "{{ zabbix_proxy_dbhost }}"
+ login_port: "{{ zabbix_proxy_dbport }}"
+ login_db: "{{ zabbix_proxy_dbname }}"
+ query: 'SELECT mandatory FROM dbversion'
+ rescue:
+ - name: "MySQL | Get and set schema import overrides"
+ delegate_to: "{{ zabbix_proxy_real_dbhost | default(zabbix_proxy_dbhost_run_install | ternary(delegated_dbhost, inventory_hostname)) }}"
+ vars:
+ delegated_dbhost: "{{ (zabbix_proxy_dbhost == 'localhost') | ternary(inventory_hostname, zabbix_proxy_dbhost) }}"
+ block:
+ - name: "MySQL | Get current value for variables"
+ community.mysql.mysql_variables:
+ variable: "{{ name }}"
+ login_user: "{{ zabbix_proxy_mysql_login_user | default(omit) }}"
+ login_password: "{{ zabbix_proxy_mysql_login_password | default(omit) }}"
+ login_host: "{{ zabbix_proxy_mysql_login_host | default(omit) }}"
+ login_port: "{{ zabbix_proxy_mysql_login_port | default(omit) }}"
+ login_unix_socket: "{{ zabbix_proxy_mysql_login_unix_socket | default(omit) }}"
+ loop:
+ - innodb_default_row_format
+ - log_bin_trust_function_creators
+ loop_control:
+ loop_var: name
+ register: _mysql_variable_defaults
+
+ - name: "MySQL | Set variable overrides for schema import"
+ when: item.msg != _mysql_schema_import_overrides[item.name]
+ community.mysql.mysql_variables:
+ variable: "{{ item.name }}"
+ value: "{{ _mysql_schema_import_overrides[item.name] }}"
+ login_host: "{{ zabbix_proxy_mysql_login_host | default(omit) }}"
+ login_user: "{{ zabbix_proxy_mysql_login_user | default(omit) }}"
+ login_password: "{{ zabbix_proxy_mysql_login_password | default(omit) }}"
+ login_port: "{{ zabbix_proxy_mysql_login_port | default(omit) }}"
+ login_unix_socket: "{{ zabbix_proxy_mysql_login_unix_socket | default(omit) }}"
+ loop: "{{ _mysql_variable_defaults.results }}"
+ loop_control:
+ label: "{{ item.name }}: {{ _mysql_schema_import_overrides[item.name] }}"
+ vars:
+ _mysql_schema_import_overrides:
+ innodb_default_row_format: "dynamic"
+ log_bin_trust_function_creators: "ON"
+
+ - name: "MySQL | Import schema"
+ community.mysql.mysql_db:
+ login_user: "{{ zabbix_proxy_dbuser }}"
+ login_password: "{{ zabbix_proxy_dbpassword }}"
+ login_host: "{{ zabbix_proxy_dbhost }}"
+ login_port: "{{ zabbix_proxy_dbport }}"
+ name: "{{ zabbix_proxy_dbname }}"
+ encoding: "{{ zabbix_proxy_dbencoding }}"
+ collation: "{{ zabbix_proxy_dbcollation }}"
+ state: import
+ target: /usr/share/zabbix-sql-scripts/mysql/proxy.sql
+
+ always:
+ - name: "MySQL | Revert variable overrides for schema import"
+ when: _mysql_variable_defaults is defined
+ delegate_to: "{{ zabbix_proxy_real_dbhost | default(zabbix_proxy_dbhost_run_install | ternary(delegated_dbhost, inventory_hostname)) }}"
+ vars:
+ delegated_dbhost: "{{ (zabbix_proxy_dbhost == 'localhost') | ternary(inventory_hostname, zabbix_proxy_dbhost) }}"
+ community.mysql.mysql_variables:
+ variable: "{{ item.name }}"
+ value: "{{ item.msg }}"
+ login_user: "{{ zabbix_proxy_mysql_login_user | default(omit) }}"
+ login_password: "{{ zabbix_proxy_mysql_login_password | default(omit) }}"
+ login_host: "{{ zabbix_proxy_mysql_login_host | default(omit) }}"
+ login_port: "{{ zabbix_proxy_mysql_login_port | default(omit) }}"
+ login_unix_socket: "{{ zabbix_proxy_mysql_login_unix_socket | default(omit) }}"
+ loop: "{{ _mysql_variable_defaults.results | default([]) }}"
+ loop_control:
+ label: "{{ item.name }}: {{ item.msg }}"
diff --git a/ansible_collections/community/zabbix/roles/zabbix_proxy/tasks/initialize-pgsql.yml b/ansible_collections/community/zabbix/roles/zabbix_proxy/tasks/initialize-pgsql.yml
new file mode 100644
index 000000000..fc2c675ab
--- /dev/null
+++ b/ansible_collections/community/zabbix/roles/zabbix_proxy/tasks/initialize-pgsql.yml
@@ -0,0 +1,73 @@
+---
+# task file for postgresql
+- name: "Install PostgreSQL dependencies"
+ when: zabbix_proxy_database_creation or zabbix_proxy_database_sqlload
+ ansible.builtin.package:
+ name: "{{ _zabbix_proxy_pgsql_dependencies | select | list }}"
+ environment:
+ http_proxy: "{{ zabbix_http_proxy | default(None) | default(omit) }}"
+ https_proxy: "{{ zabbix_https_proxy | default(None) | default(omit) }}"
+ become: true
+ register: _zabbix_proxy_pgsql_packages_installed
+ until: _zabbix_proxy_pgsql_packages_installed is succeeded
+ tags:
+ - install
+ - database
+ - dependencies
+
+- name: "PostgreSQL Database prep"
+ when: zabbix_proxy_database_creation | bool
+ become: "{{ zabbix_proxy_dbhost_run_install }}"
+ become_user: postgres
+ delegate_to: "{{ zabbix_proxy_dbhost_run_install | ternary(delegated_dbhost, inventory_hostname) }}"
+ vars:
+ delegated_dbhost: "{{ (zabbix_proxy_dbhost == 'localhost') | ternary(inventory_hostname, zabbix_proxy_dbhost) }}"
+ tags:
+ - database
+ block:
+ - name: "PostgreSQL | Create database"
+ community.postgresql.postgresql_db:
+ login_user: "{{ zabbix_proxy_pgsql_login_user | default(omit) }}"
+ login_password: "{{ zabbix_proxy_pgsql_login_password | default(omit) }}"
+ login_host: "{{ zabbix_proxy_pgsql_login_host | default(omit) }}"
+ port: "{{ zabbix_proxy_dbport }}"
+ login_unix_socket: "{{ zabbix_proxy_pgsql_login_unix_socket | default(omit) }}"
+ name: "{{ zabbix_proxy_dbname }}"
+ state: present
+
+ - name: "PostgreSQL | Create database user"
+ community.postgresql.postgresql_user:
+ login_user: "{{ zabbix_proxy_pgsql_login_user | default(omit) }}"
+ login_password: "{{ zabbix_proxy_pgsql_login_password | default(omit) }}"
+ login_host: "{{ zabbix_proxy_pgsql_login_host | default(omit) }}"
+ port: "{{ zabbix_proxy_dbport }}"
+ name: "{{ zabbix_proxy_dbuser }}"
+ password: "{{ ('md5' + (zabbix_proxy_dbpassword + zabbix_proxy_dbuser)|hash('md5')) if zabbix_proxy_dbpassword_hash_method == 'md5' else zabbix_proxy_dbpassword }}"
+ db: "{{ zabbix_proxy_dbname }}"
+ priv: ALL
+ state: present
+ encrypted: true
+
+- name: "PostgreSQL verify or create schema"
+ when: zabbix_proxy_database_sqlload | bool
+ tags:
+ - database
+ block:
+ - name: "PostgreSQL | Get current database version"
+ community.postgresql.postgresql_query:
+ login_user: "{{ zabbix_proxy_dbuser }}"
+ login_password: "{{ zabbix_proxy_dbpassword }}"
+ login_host: "{{ zabbix_proxy_dbhost }}"
+ port: "{{ zabbix_proxy_dbport }}"
+ db: "{{ zabbix_proxy_dbname }}"
+ query: 'SELECT mandatory FROM dbversion'
+ rescue:
+ - name: "PostgreSQL | Import schema"
+ community.postgresql.postgresql_db:
+ login_user: "{{ zabbix_proxy_dbuser }}"
+ login_password: "{{ zabbix_proxy_dbpassword }}"
+ login_host: "{{ zabbix_proxy_dbhost }}"
+ port: "{{ zabbix_proxy_dbport }}"
+ db: "{{ zabbix_proxy_dbname }}"
+ state: restore
+ target: /usr/share/zabbix-sql-scripts/postgresql/proxy.sql
diff --git a/ansible_collections/community/zabbix/roles/zabbix_proxy/tasks/initialize-sqlite3.yml b/ansible_collections/community/zabbix/roles/zabbix_proxy/tasks/initialize-sqlite3.yml
new file mode 100644
index 000000000..ae8f8602b
--- /dev/null
+++ b/ansible_collections/community/zabbix/roles/zabbix_proxy/tasks/initialize-sqlite3.yml
@@ -0,0 +1,62 @@
+---
+# task file for sqlite3
+- name: "Install SQLite3 dependencies"
+ when: zabbix_proxy_database_creation or zabbix_proxy_database_sqlload
+ ansible.builtin.package:
+ name: "{{ _zabbix_proxy_sqlite3_dependencies }}"
+ environment:
+ http_proxy: "{{ zabbix_http_proxy | default(None) | default(omit) }}"
+ https_proxy: "{{ zabbix_https_proxy | default(None) | default(omit) }}"
+ become: true
+ register: _zabbix_proxy_sqlite3_packages_installed
+ until: _zabbix_proxy_sqlite3_packages_installed is succeeded
+ tags:
+ - install
+ - database
+ - dependencies
+
+- name: "SQLite3 | Set path for default dbname"
+ when: zabbix_proxy_dbname == "zabbix_proxy"
+ ansible.builtin.set_fact:
+ zabbix_proxy_dbname: /var/lib/zabbix/zabbix_proxy.db
+ tags:
+ - database
+
+- name: "SQLite3 Database prep"
+ when: zabbix_proxy_database_creation | bool
+ become: true
+ tags:
+ - database
+ block:
+ - name: "SQLite3 | Create path directories"
+ ansible.builtin.file:
+ state: directory
+ name: "{{ zabbix_proxy_dbname | dirname }}"
+ mode: "0744"
+ owner: "{{ zabbix_os_user }}"
+ group: "{{ zabbix_os_user }}"
+ seuser: system_u
+ serole: object_r
+ setype: zabbix_var_lib_t
+
+ - name: "SQLite3 | Import schema"
+ when: zabbix_proxy_database_sqlload
+ become_user: "{{ zabbix_os_user }}"
+ ansible.builtin.shell: |
+ set -euxo pipefail
+ sqlite3 {{ zabbix_proxy_dbname }} < /usr/share/zabbix-sql-scripts/sqlite3/proxy.sql
+ args:
+ creates: "{{ zabbix_proxy_dbname }}"
+ executable: /bin/bash
+
+ - name: "Sqlite3 | Fix zabbix db file permission (SELinux)"
+ when: ansible_selinux.status == "enabled"
+ ansible.builtin.file:
+ state: file
+ path: "{{ zabbix_proxy_dbname }}"
+ mode: "0600"
+ owner: "{{ zabbix_os_user }}"
+ group: "{{ zabbix_os_user }}"
+ seuser: system_u
+ serole: object_r
+ setype: zabbix_var_lib_t
diff --git a/ansible_collections/community/zabbix/roles/zabbix_proxy/tasks/main.yml b/ansible_collections/community/zabbix/roles/zabbix_proxy/tasks/main.yml
index 1e8831c35..3043bdaef 100644
--- a/ansible_collections/community/zabbix/roles/zabbix_proxy/tasks/main.yml
+++ b/ansible_collections/community/zabbix/roles/zabbix_proxy/tasks/main.yml
@@ -35,13 +35,6 @@
zabbix_api_server_port: "{{ '443' if zabbix_api_use_ssl|bool else '80' }}"
when: zabbix_api_server_port is undefined
-- name: Set Path to SQL File
- ansible.builtin.set_fact:
- datafile_path: "{{ db_file_path[zabbix_short_version] }}"
- tags:
- - install
- - config
-
- name: "Set default ip address for zabbix_proxy_ip"
ansible.builtin.set_fact:
zabbix_proxy_ip: "{{ hostvars[inventory_hostname]['ansible_default_ipv4'].address }}"
@@ -56,54 +49,45 @@
- name: "Complete OS Specific Tasks"
ansible.builtin.include_tasks: "{{ ansible_os_family }}.yml"
-- name: "Get the file for database schema"
- ansible.builtin.shell: ls -1 {{ db_file_path[zabbix_short_version] }}
- changed_when: false
+- name: "Install zabbix-proxy packages"
+ ansible.builtin.package:
+ name: "{{ _zabbix_proxy_packages }}"
+ state: "{{ zabbix_proxy_package_state }}"
+ update_cache: true
+ disablerepo: "{{ zabbix_proxy_disable_repo | default(_zabbix_proxy_disable_repo | default(omit)) }}"
+ environment:
+ http_proxy: "{{ zabbix_http_proxy | default(None) | default(omit) }}"
+ https_proxy: "{{ zabbix_https_proxy | default(None) | default(omit) }}"
+ register: _zabbix_proxy_package_installed
+ until: _zabbix_proxy_package_installed is succeeded
become: true
- when:
- - zabbix_proxy_database_sqlload
- register: ls_output_schema
tags:
- - database
+ - install
-- name: "Installing the database"
- ansible.builtin.include_tasks: "{{ zabbix_proxy_db_long }}.yml"
+- name: "Initialize the database"
+ ansible.builtin.include_tasks: "initialize-{{ zabbix_proxy_database }}.yml"
-- name: "Create include dir zabbix-proxy"
+- name: "Create directories"
ansible.builtin.file:
- path: "{{ zabbix_proxy_include }}"
- owner: "{{ zabbix_os_user }}"
- group: "{{ zabbix_os_user }}"
- mode: "{{ zabbix_proxy_include_mode }}"
state: directory
- become: true
- tags:
- - install
- - config
-
-- name: "Create module dir zabbix-proxy"
- ansible.builtin.file:
- path: "{{ zabbix_proxy_loadmodulepath }}"
+ path: "{{ item.path }}"
owner: "{{ zabbix_os_user }}"
group: "{{ zabbix_os_user }}"
- state: directory
- mode: "0755"
+ mode: "{{ item.mode | default('0755') }}"
become: true
+ loop:
+ - path: "{{ zabbix_proxy_include }}"
+ mode: "{{ zabbix_proxy_include_mode }}"
+ - path: "{{ zabbix_proxy_loadmodulepath }}"
+ - required: "{{ zabbix_proxy_tlspskfile is defined }}"
+ path: "{{ zabbix_proxy_tlspskfile | default('/path/to/zabbix_proxy_tlspskfile/zabbix_proxy.psk') | dirname }}"
+ loop_control:
+ label: "{{ item.path }}"
+ when: item.required | default(true)
tags:
- install
- config
-- name: "Create directory for PSK file if not exist."
- ansible.builtin.file:
- path: "{{ zabbix_proxy_tlspskfile | dirname }}"
- mode: 0755
- state: directory
- become: true
- when:
- - zabbix_proxy_tlspskfile is defined
- tags:
- - config
-
- name: "Place TLS PSK File"
ansible.builtin.copy:
dest: "{{ zabbix_proxy_tlspskfile }}"
diff --git a/ansible_collections/community/zabbix/roles/zabbix_proxy/tasks/mysql.yml b/ansible_collections/community/zabbix/roles/zabbix_proxy/tasks/mysql.yml
deleted file mode 100644
index dde847a53..000000000
--- a/ansible_collections/community/zabbix/roles/zabbix_proxy/tasks/mysql.yml
+++ /dev/null
@@ -1,172 +0,0 @@
----
-# task file for mysql
-- name: "MySQL | Set the correct delegated_dbhost (to support MySQL db deployment on a remote dbhost)"
- ansible.builtin.set_fact:
- delegated_dbhost: "{{ zabbix_proxy_dbhost if (zabbix_proxy_dbhost != 'localhost') else inventory_hostname }}"
- when:
- - zabbix_proxy_dbhost_run_install
- tags:
- - database
-
-- name: "MySQL | Set the correct delegated_dbhost (to support MySQL db deployment on a remote dbhost)"
- ansible.builtin.set_fact:
- delegated_dbhost: "{{ inventory_hostname }}"
- when:
- - not zabbix_proxy_dbhost_run_install
- tags:
- - database
-
-- name: "MySQL | Override delegated_dbhost with real dbhost when dbhost is behind loadbalancer"
- ansible.builtin.set_fact:
- delegated_dbhost: "{{ zabbix_proxy_real_dbhost }}"
- when: zabbix_proxy_real_dbhost | default(false)
- tags:
- - database
-
-- name: PyMySQL
- ansible.builtin.pip:
- name: PyMySQL
- register: installation_dependencies
- until: installation_dependencies is succeeded
- tags:
- - database
-
-- name: "MySQL | Create database"
- community.mysql.mysql_db:
- name: "{{ zabbix_proxy_dbname }}"
- encoding: "{{ zabbix_proxy_dbencoding }}"
- collation: "{{ zabbix_proxy_dbcollation }}"
- login_host: "{{ zabbix_proxy_mysql_login_host | default(omit) }}"
- login_user: "{{ zabbix_proxy_mysql_login_user | default(omit) }}"
- login_password: "{{ zabbix_proxy_mysql_login_password | default(omit) }}"
- login_port: "{{ zabbix_proxy_mysql_login_port | default(omit) }}"
- login_unix_socket: "{{ zabbix_proxy_mysql_login_unix_socket | default(omit) }}"
- state: present
- when: zabbix_proxy_database_creation
- register: zabbix_database_created
- delegate_to: "{{ delegated_dbhost }}"
- tags:
- - database
- - skip_ansible_lint
-
-- name: "MySQL | Create database user"
- community.mysql.mysql_user:
- login_host: "{{ zabbix_proxy_mysql_login_host | default(omit) }}"
- login_user: "{{ zabbix_proxy_mysql_login_user | default(omit) }}"
- login_password: "{{ zabbix_proxy_mysql_login_password | default(omit) }}"
- login_port: "{{ zabbix_proxy_mysql_login_port | default(omit) }}"
- login_unix_socket: "{{ zabbix_proxy_mysql_login_unix_socket | default(omit) }}"
- name: "{{ zabbix_proxy_dbuser }}"
- password: "{{ zabbix_proxy_dbpassword }}"
- priv: "{{ zabbix_proxy_dbname }}.*:ALL"
- host: "{{ zabbix_proxy_privileged_host }}"
- plugin: "{{ 'mysql_native_password' if (ansible_os_family == 'RedHat' and ansible_distribution_major_version == '7') else omit }}"
- state: present
- when: zabbix_proxy_database_creation
- delegate_to: "{{ delegated_dbhost }}"
- tags:
- - database
-
-- name: "MySQL | Check if we have done files"
- ansible.builtin.stat:
- path: /etc/zabbix/schema.done
- register: done_file
- become: true
- when:
- - zabbix_proxy_database_sqlload
- tags:
- - database
-
-- name: "MySQL | Get version_comment"
- community.mysql.mysql_variables:
- variable: version
- login_host: "{{ zabbix_proxy_mysql_login_host | default(omit) }}"
- login_user: "{{ zabbix_proxy_mysql_login_user | default(omit) }}"
- login_password: "{{ zabbix_proxy_mysql_login_password | default(omit) }}"
- login_port: "{{ zabbix_proxy_mysql_login_port | default(omit) }}"
- login_unix_socket: "{{ zabbix_proxy_mysql_login_unix_socket | default(omit) }}"
- delegate_to: "{{ delegated_dbhost }}"
- register: install_mysql_version
- tags:
- - database
-
-- name: "MySQL | Get current value for innodb_default_row_format"
- community.mysql.mysql_variables:
- variable: innodb_default_row_format
- login_host: "{{ zabbix_proxy_mysql_login_host | default(omit) }}"
- login_user: "{{ zabbix_proxy_mysql_login_user | default(omit) }}"
- login_password: "{{ zabbix_proxy_mysql_login_password | default(omit) }}"
- login_port: "{{ zabbix_proxy_mysql_login_port | default(omit) }}"
- login_unix_socket: "{{ zabbix_proxy_mysql_login_unix_socket | default(omit) }}"
- delegate_to: "{{ delegated_dbhost }}"
- register: mysql_innodb_default_row_format
- when:
- - install_mysql_version.msg is version('5.6', '>=')
- tags:
- - database
-
-- name: "MySQL | Set innodb_default_row_format to dynamic"
- community.mysql.mysql_variables:
- variable: innodb_default_row_format
- value: dynamic
- login_host: "{{ zabbix_proxy_mysql_login_host | default(omit) }}"
- login_user: "{{ zabbix_proxy_mysql_login_user | default(omit) }}"
- login_password: "{{ zabbix_proxy_mysql_login_password | default(omit) }}"
- login_port: "{{ zabbix_proxy_mysql_login_port | default(omit) }}"
- login_unix_socket: "{{ zabbix_proxy_mysql_login_unix_socket | default(omit) }}"
- when:
- - zabbix_proxy_database_sqlload | bool
- - not done_file.stat.exists
- - install_mysql_version.msg is version('5.6', '>=')
- - mysql_innodb_default_row_format.msg != 'dynamic'
- delegate_to: "{{ delegated_dbhost }}"
- tags:
- - database
-
-- name: "MySQL | Create database and import file"
- community.mysql.mysql_db:
- login_host: "{{ zabbix_proxy_mysql_login_host | default(omit) }}"
- login_user: "{{ zabbix_proxy_mysql_login_user | default(omit) }}"
- login_password: "{{ zabbix_proxy_mysql_login_password | default(omit) }}"
- login_port: "{{ zabbix_proxy_mysql_login_port | default(omit) }}"
- login_unix_socket: "{{ zabbix_proxy_mysql_login_unix_socket | default(omit) }}"
- name: "{{ zabbix_proxy_dbname }}"
- encoding: "{{ zabbix_proxy_dbencoding }}"
- collation: "{{ zabbix_proxy_dbcollation }}"
- state: import
- target: "{{ ls_output_schema.stdout }}"
- when:
- - zabbix_proxy_database_sqlload
- - not done_file.stat.exists
- delegate_to: "{{ delegated_dbhost }}"
- tags:
- - database
-
-- name: "MySQL | Revert innodb_default_row_format to previous value"
- community.mysql.mysql_variables:
- variable: innodb_default_row_format
- value: "{{ mysql_innodb_default_row_format.msg }}"
- login_host: "{{ zabbix_proxy_mysql_login_host | default(omit) }}"
- login_user: "{{ zabbix_proxy_mysql_login_user | default(omit) }}"
- login_password: "{{ zabbix_proxy_mysql_login_password | default(omit) }}"
- login_port: "{{ zabbix_proxy_mysql_login_port | default(omit) }}"
- login_unix_socket: "{{ zabbix_proxy_mysql_login_unix_socket | default(omit) }}"
- when:
- - zabbix_proxy_database_sqlload | bool
- - not done_file.stat.exists
- - mysql_innodb_default_row_format.msg != 'dynamic'
- delegate_to: "{{ delegated_dbhost }}"
- tags:
- - database
-
-- name: "MySQL | Create done file"
- ansible.builtin.file:
- path: /etc/zabbix/schema.done
- state: touch
- mode: "0644"
- become: true
- when:
- - zabbix_proxy_database_sqlload
- - not done_file.stat.exists
- tags:
- - database
diff --git a/ansible_collections/community/zabbix/roles/zabbix_proxy/tasks/postgresql.yml b/ansible_collections/community/zabbix/roles/zabbix_proxy/tasks/postgresql.yml
deleted file mode 100644
index e71af9aba..000000000
--- a/ansible_collections/community/zabbix/roles/zabbix_proxy/tasks/postgresql.yml
+++ /dev/null
@@ -1,96 +0,0 @@
----
-# task file for postgresql
-
-- name: "PostgreSQL | Set the correct delegated_dbhost (to support postgres db deployment on a remote dbhost)"
- ansible.builtin.set_fact:
- delegated_dbhost: "{{ zabbix_proxy_dbhost if (zabbix_proxy_dbhost != 'localhost') else inventory_hostname }}"
- when:
- - zabbix_proxy_dbhost_run_install
- tags:
- - database
-
-- name: "PostgreSQL | Set the correct delegated_dbhost (to support postgres db deployment on a remote dbhost)"
- ansible.builtin.set_fact:
- delegated_dbhost: "{{ inventory_hostname }}"
- when:
- - not zabbix_proxy_dbhost_run_install
- tags:
- - database
-
-- name: "PostgreSQL | Delegated"
- block:
- - name: "PostgreSQL | Delegated | Create database"
- community.postgresql.postgresql_db:
- name: "{{ zabbix_proxy_dbname }}"
- port: "{{ zabbix_proxy_dbport }}"
- state: present
-
- - name: "PostgreSQL | Delegated | Create database user"
- community.postgresql.postgresql_user:
- db: "{{ zabbix_proxy_dbname }}"
- name: "{{ zabbix_proxy_dbuser }}"
- password: "{{ ('md5' + (zabbix_proxy_dbpassword + zabbix_proxy_dbuser)|hash('md5')) if zabbix_proxy_dbpassword_hash_method == 'md5' else zabbix_proxy_dbpassword }}"
- port: "{{ zabbix_proxy_dbport }}"
- priv: ALL
- state: present
- encrypted: true
- become: true
- become_user: postgres
- delegate_to: "{{ delegated_dbhost }}"
- when:
- - zabbix_proxy_database_creation
- - zabbix_proxy_pgsql_login_host is not defined
- tags:
- - database
-
-- name: "PostgreSQL | Remote"
- block:
- - name: "PostgreSQL | Remote | Create database"
- community.postgresql.postgresql_db:
- login_host: "{{ zabbix_proxy_pgsql_login_host | default(omit) }}"
- login_user: "{{ zabbix_proxy_pgsql_login_user | default(omit) }}"
- login_password: "{{ zabbix_proxy_pgsql_login_password | default(omit) }}"
- login_unix_socket: "{{ zabbix_proxy_pgsql_login_unix_socket | default(omit) }}"
- name: "{{ zabbix_proxy_dbname }}"
- port: "{{ zabbix_proxy_dbport }}"
- state: present
- - name: "PostgreSQL | Remote | Create database user"
- community.postgresql.postgresql_user:
- login_host: "{{ zabbix_proxy_pgsql_login_host | default(omit) }}"
- login_user: "{{ zabbix_proxy_pgsql_login_user | default(omit) }}"
- login_password: "{{ zabbix_proxy_pgsql_login_password | default(omit) }}"
- db: "{{ zabbix_proxy_dbname }}"
- name: "{{ zabbix_proxy_dbuser }}"
- password: "{{ ('md5' + (zabbix_proxy_dbpassword + zabbix_proxy_dbuser)|hash('md5')) if zabbix_proxy_dbpassword_hash_method == 'md5' else zabbix_proxy_dbpassword }}"
- port: "{{ zabbix_proxy_dbport }}"
- priv: ALL
- state: present
- encrypted: true
- when:
- - zabbix_proxy_database_creation
- - zabbix_proxy_pgsql_login_host is defined
- tags:
- - database
-
-- name: "PostgreSQL | Handle Compressed Schema File"
- ansible.builtin.set_fact:
- zabbix_proxy_cat_cmd: zcat
- when: "'.gz' in ls_output_schema.stdout"
- tags:
- - database
-
-- name: "PostgreSQL | Importing schema file"
- ansible.builtin.shell: |
- set -euxo pipefail
- {{ zabbix_proxy_cat_cmd }} {{ ls_output_schema.stdout }} | psql -h '{{ zabbix_proxy_dbhost }}' -U '{{ zabbix_proxy_dbuser }}' -d '{{ zabbix_proxy_dbname }}'
- touch /etc/zabbix/schema.done
- args:
- creates: /etc/zabbix/schema.done
- executable: /bin/bash
- environment:
- PGPASSWORD: "{{ zabbix_proxy_dbpassword }}"
- become: true
- when:
- - zabbix_proxy_database_creation
- tags:
- - database
diff --git a/ansible_collections/community/zabbix/roles/zabbix_proxy/tasks/sqlite3.yml b/ansible_collections/community/zabbix/roles/zabbix_proxy/tasks/sqlite3.yml
deleted file mode 100644
index 3d74b73e7..000000000
--- a/ansible_collections/community/zabbix/roles/zabbix_proxy/tasks/sqlite3.yml
+++ /dev/null
@@ -1,63 +0,0 @@
----
-# task file for sqlite3
-
-- name: "Sqlite3 | Default Database Path"
- ansible.builtin.set_fact:
- zabbix_proxy_dbname: /var/lib/zabbix/zabbix_proxy.db
- when:
- - zabbix_proxy_dbname == "zabbix_proxy"
- tags:
- - database
-
-- name: "Sqlite3 | Create database"
- ansible.builtin.file:
- name: "{{ zabbix_proxy_dbname | dirname }}"
- mode: 0744
- owner: "{{ zabbix_os_user }}"
- group: "{{ zabbix_os_user }}"
- seuser: system_u
- serole: object_r
- setype: zabbix_var_lib_t
- state: directory
- become: true
- when:
- - zabbix_proxy_database_creation
- tags:
- - database
-
-- name: "Sqlite3 | Handle Compressed Schema File"
- ansible.builtin.set_fact:
- zabbix_proxy_cat_cmd: zcat
- when: "'.gz' in ls_output_schema.stdout"
- tags:
- - database
-
-- name: "Sqlite3 | Importing schema file"
- become: true
- become_user: "{{ zabbix_os_user }}"
- ansible.builtin.shell: |
- set -euxo pipefail
- {{ zabbix_proxy_cat_cmd }} {{ ls_output_schema.stdout }} | sqlite3 {{ zabbix_proxy_dbname }}
- args:
- creates: "{{ zabbix_proxy_dbname }}"
- executable: /bin/bash
- environment:
- PGPASSWORD: "{{ zabbix_proxy_dbpassword }}"
- when:
- - zabbix_proxy_database_creation
- tags:
- - database
-
-- name: "Sqlite3 | Fix zabbix db file permission (SELinux)"
- ansible.builtin.file:
- path: "{{ zabbix_proxy_dbname }}"
- state: file
- seuser: system_u
- serole: object_r
- setype: zabbix_var_lib_t
- become: true
- when:
- - ansible_selinux.status == "enabled"
- - zabbix_proxy_database_creation
- tags:
- - database
diff --git a/ansible_collections/community/zabbix/roles/zabbix_proxy/templates/zabbix_proxy.conf.j2 b/ansible_collections/community/zabbix/roles/zabbix_proxy/templates/zabbix_proxy.conf.j2
index 60ae3f0a5..3d585cbab 100644
--- a/ansible_collections/community/zabbix/roles/zabbix_proxy/templates/zabbix_proxy.conf.j2
+++ b/ansible_collections/community/zabbix/roles/zabbix_proxy/templates/zabbix_proxy.conf.j2
@@ -7,9 +7,7 @@
# https://www.zabbix.com/documentation/{{ zabbix_proxy_version }}/en/manual/appendix/config/zabbix_proxy
{{ (zabbix_proxy_allowroot is defined and zabbix_proxy_allowroot is not none) | ternary('','# ') }}AllowRoot={{ zabbix_proxy_allowroot | default('') }}
-{% if zabbix_proxy_version is version('6.0', '>=') %}
{{ (zabbix_proxy_allowunsupporteddbversions is defined and zabbix_proxy_allowunsupporteddbversions is not none) | ternary('','# ') }}AllowUnsupportedDBVersions={{ zabbix_proxy_allowunsupporteddbversions | default('') }}
-{% endif %}
{{ (zabbix_proxy_cachesize is defined and zabbix_proxy_cachesize is not none) | ternary('','# ') }}CacheSize={{ zabbix_proxy_cachesize | default('') }}
{{ (zabbix_proxy_configfrequency is defined and zabbix_proxy_configfrequency is not none) | ternary('','# ') }}ConfigFrequency={{ zabbix_proxy_configfrequency | default('') }}
{{ (zabbix_proxy_datasenderfrequency is defined and zabbix_proxy_datasenderfrequency is not none) | ternary('','# ') }}DataSenderFrequency={{ zabbix_proxy_datasenderfrequency | default('') }}
diff --git a/ansible_collections/community/zabbix/roles/zabbix_proxy/vars/Debian.yml b/ansible_collections/community/zabbix/roles/zabbix_proxy/vars/Debian.yml
index 1362e557e..af274952b 100644
--- a/ansible_collections/community/zabbix/roles/zabbix_proxy/vars/Debian.yml
+++ b/ansible_collections/community/zabbix/roles/zabbix_proxy/vars/Debian.yml
@@ -11,6 +11,9 @@ zabbix_valid_proxy_versions:
- 6.4
- 6.2
- 6.0
+ "24":
+ - 6.4
+ - 6.0
"22":
- 6.4
- 6.2
@@ -24,27 +27,37 @@ zabbix_valid_proxy_versions:
- 6.2
- 6.0
-mysql_client_pkgs:
+_zabbix_proxy_pgsql_dependencies:
+ - "{{ zabbix_proxy_install_database_client | ternary('postgresql-client', '') }}"
+ - python3-psycopg2
+
+_zabbix_proxy_mysql_dependencies:
# Debian
"12":
- default-mysql-client
- - "{{ zabbix_python_prefix }}-mysqldb"
+ - python3-pymysql
"11":
- default-mysql-client
- - "{{ zabbix_python_prefix }}-mysqldb"
+ - python3-pymysql
"10":
- mariadb-client
- - "{{ zabbix_python_prefix }}-mysqldb"
+ - python3-pymysql
# Ubuntu
- "22":
+ "24":
- default-mysql-client
- "{{ zabbix_python_prefix }}-mysqldb"
+ "22":
+ - default-mysql-client
+ - python3-pymysql
"20":
- default-mysql-client
- - "{{ zabbix_python_prefix }}-mysqldb"
+ - python3-pymysql
"18":
- default-mysql-client
- - "{{ zabbix_python_prefix }}-mysqldb"
+ - python3-pymysql
+
+_zabbix_proxy_sqlite3_dependencies:
+ - sqlite3
mysql_plugin:
"18": mysql_native_password
@@ -54,3 +67,7 @@ debian_keyring_path: /etc/apt/keyrings/
zabbix_gpg_key: "{{ debian_keyring_path }}zabbix-repo.asc"
_zabbix_proxy_fping6location: /usr/bin/fping6
_zabbix_proxy_fpinglocation: /usr/bin/fping
+
+_zabbix_proxy_packages:
+ - "zabbix-proxy-{{ zabbix_proxy_database }}"
+ - "zabbix-sql-scripts"
diff --git a/ansible_collections/community/zabbix/roles/zabbix_proxy/vars/RedHat.yml b/ansible_collections/community/zabbix/roles/zabbix_proxy/vars/RedHat.yml
index e8ee7e2ae..6a22ec36d 100644
--- a/ansible_collections/community/zabbix/roles/zabbix_proxy/vars/RedHat.yml
+++ b/ansible_collections/community/zabbix/roles/zabbix_proxy/vars/RedHat.yml
@@ -12,27 +12,24 @@ zabbix_valid_proxy_versions:
- 6.2
- 6.0
-pgsql_depenencies:
- "9":
- - python3-psycopg2
- - postgresql
- "8":
- - python3-psycopg2
- - postgresql
- "7":
- - python-psycopg2
- - postgresql
+_zabbix_proxy_pgsql_dependencies:
+ - "{{ zabbix_proxy_install_database_client | ternary('postgresql', '') }}"
+ - python3-psycopg2
-mysql_client_pkgs:
+_zabbix_proxy_mysql_dependencies:
"9":
- - mysql
+ - "{{ zabbix_proxy_install_database_client | ternary('mysql', '') }}"
- python3-PyMySQL
"8":
- - mysql
+ - "{{ zabbix_proxy_install_database_client | ternary('mysql', '') }}"
- python3-PyMySQL
"7":
- - MariaDB-client
- - MySQL-python
+ - "{{ zabbix_proxy_install_database_client | ternary('mariadb', '') }}"
+ - python3-PyMySQL
+ - python36-cryptography
+
+_zabbix_proxy_sqlite3_dependencies:
+ - sqlite
selinux_pkgs:
"9":
@@ -53,3 +50,10 @@ mysql_plugin:
_zabbix_proxy_fping6location: /usr/sbin/fping6
_zabbix_proxy_fpinglocation: /usr/sbin/fping
+
+_zabbix_proxy_packages:
+ - "zabbix-proxy-{{ zabbix_proxy_database }}-{{ zabbix_proxy_version }}.{{ zabbix_proxy_version_minor }}"
+ - "zabbix-sql-scripts-{{ zabbix_proxy_version }}.{{ zabbix_proxy_version_minor }}"
+
+_zabbix_proxy_disable_repo:
+ - epel
diff --git a/ansible_collections/community/zabbix/roles/zabbix_proxy/vars/main.yml b/ansible_collections/community/zabbix/roles/zabbix_proxy/vars/main.yml
deleted file mode 100644
index 90779c270..000000000
--- a/ansible_collections/community/zabbix/roles/zabbix_proxy/vars/main.yml
+++ /dev/null
@@ -1,7 +0,0 @@
----
-# vars file for zabbix_proxy
-db_file_path:
- "62": "/usr/share/zabbix-sql-scripts/{{ zabbix_proxy_db_long }}/proxy.sql"
- "64": "/usr/share/zabbix-sql-scripts/{{ zabbix_proxy_db_long }}/proxy.sql"
- "60": "/usr/share/zabbix-sql-scripts/{{ zabbix_proxy_db_long }}/proxy.sql"
- "50": "/usr/share/doc/zabbix-proxy-{{ zabbix_proxy_database }}*/schema.sql.gz"
diff --git a/ansible_collections/community/zabbix/roles/zabbix_server/README.md b/ansible_collections/community/zabbix/roles/zabbix_server/README.md
index 9557281c3..20f462c48 100644
--- a/ansible_collections/community/zabbix/roles/zabbix_server/README.md
+++ b/ansible_collections/community/zabbix/roles/zabbix_server/README.md
@@ -79,6 +79,7 @@ See the following list of supported Operating systems with the Zabbix releases:
|---------------------|-----|-----|-----|
| Red Hat Fam 9 | V | V | V |
| Red Hat Fam 8 | V | V | V |
+| Ubuntu 24.04 noble | V | | V |
| Ubuntu 22.04 jammy | V | V | V |
| Ubuntu 20.04 focal | V | V | V |
| Ubuntu 18.04 bionic | | | V |
diff --git a/ansible_collections/community/zabbix/roles/zabbix_server/defaults/main.yml b/ansible_collections/community/zabbix/roles/zabbix_server/defaults/main.yml
index 933e0339a..b3c669ba9 100644
--- a/ansible_collections/community/zabbix/roles/zabbix_server/defaults/main.yml
+++ b/ansible_collections/community/zabbix/roles/zabbix_server/defaults/main.yml
@@ -8,25 +8,27 @@ zabbix_server_manage_service: true
# Database
zabbix_server_database_sqlload: true
zabbix_server_database_timescaledb: false
-zabbix_server_real_dbhost:
+#zabbix_server_real_dbhost:
zabbix_server_dbhost: localhost
zabbix_server_dbname: zabbix-server
zabbix_server_privileged_host: localhost
zabbix_server_dbencoding: utf8
zabbix_server_dbcollation: utf8_bin
-zabbix_server_dbschema:
+#zabbix_server_dbschema:
zabbix_server_dbuser: zabbix-server
zabbix_server_dbpassword: zabbix-server
zabbix_server_dbpassword_hash_method: md5
-zabbix_server_dbsocket:
-zabbix_server_dbport: 5432
+#zabbix_server_dbsocket:
+_zabbix_server_database_default_port:
+ mysql: 3306
+ pgsql: 5432
+zabbix_server_dbport: "{{ _zabbix_server_database_default_port[zabbix_server_database] }}"
zabbix_server_dbhost_run_install: true
zabbix_server_database: pgsql
zabbix_server_database_creation: true
zabbix_server_install_database_client: true
# SELinux specific
-zabbix_server_selinux: false
selinux_allow_zabbix_can_network: false
selinux_allow_zabbix_can_http: false
diff --git a/ansible_collections/community/zabbix/roles/zabbix_server/tasks/RedHat.yml b/ansible_collections/community/zabbix/roles/zabbix_server/tasks/RedHat.yml
index 77fb7cd8a..7fdb5bb72 100644
--- a/ansible_collections/community/zabbix/roles/zabbix_server/tasks/RedHat.yml
+++ b/ansible_collections/community/zabbix/roles/zabbix_server/tasks/RedHat.yml
@@ -36,5 +36,4 @@
- name: "RedHat | Configure SELinux when enabled"
ansible.builtin.include_tasks: selinux.yml
- when:
- - zabbix_server_selinux | bool
+ when: ansible_facts.selinux.status | default('disabled') == 'enabled'
diff --git a/ansible_collections/community/zabbix/roles/zabbix_server/tasks/initialize-mysql.yml b/ansible_collections/community/zabbix/roles/zabbix_server/tasks/initialize-mysql.yml
index c3fd67c6d..b16b475c9 100644
--- a/ansible_collections/community/zabbix/roles/zabbix_server/tasks/initialize-mysql.yml
+++ b/ansible_collections/community/zabbix/roles/zabbix_server/tasks/initialize-mysql.yml
@@ -27,6 +27,7 @@
- name: "MySQL Database prep"
when: zabbix_server_database_creation
+ become: "{{ zabbix_server_dbhost_run_install }}"
delegate_to: "{{ zabbix_server_real_dbhost | default(zabbix_server_dbhost_run_install | ternary(delegated_dbhost, inventory_hostname)) }}"
vars:
delegated_dbhost: "{{ (zabbix_server_dbhost == 'localhost') | ternary(inventory_hostname, zabbix_server_dbhost) }}"
@@ -81,7 +82,7 @@
query: 'SELECT mandatory FROM dbversion'
rescue:
- name: "MySQL | Get and set schema import overrides"
- delegate_to: "{{ zabbix_server_real_dbhost | default(zabbix_server_dbhost_run_install | ternary(delegated_dbhost, inventory_hostname)) }}"
+ delegate_to: "{{ zabbix_server_real_dbhost | default(zabbix_server_dbhost_run_install | ternary(delegated_dbhost, inventory_hostname), true) }}"
block:
- name: "MySQL | Get current value for variables"
community.mysql.mysql_variables:
@@ -141,7 +142,7 @@
always:
- name: "MySQL | Revert variable overrides for schema import"
- delegate_to: "{{ zabbix_server_real_dbhost | default(zabbix_server_dbhost_run_install | ternary(delegated_dbhost, inventory_hostname)) }}"
+ delegate_to: "{{ zabbix_server_real_dbhost | default(zabbix_server_dbhost_run_install | ternary(delegated_dbhost, inventory_hostname), true) }}"
community.mysql.mysql_variables:
variable: "{{ item.name }}"
value: "{{ item.msg }}"
diff --git a/ansible_collections/community/zabbix/roles/zabbix_server/tasks/initialize-pgsql.yml b/ansible_collections/community/zabbix/roles/zabbix_server/tasks/initialize-pgsql.yml
index 65bd0beec..6f40c66f2 100644
--- a/ansible_collections/community/zabbix/roles/zabbix_server/tasks/initialize-pgsql.yml
+++ b/ansible_collections/community/zabbix/roles/zabbix_server/tasks/initialize-pgsql.yml
@@ -32,7 +32,6 @@
port: "{{ zabbix_server_dbport }}"
login_unix_socket: "{{ zabbix_server_pgsql_login_unix_socket | default(omit) }}"
name: "{{ zabbix_server_dbname }}"
- state: present
- name: "PostgreSQL | Create database user"
community.postgresql.postgresql_user:
@@ -43,10 +42,19 @@
login_unix_socket: "{{ zabbix_server_pgsql_login_unix_socket | default(omit) }}"
name: "{{ zabbix_server_dbuser }}"
password: "{{ ('md5' + (zabbix_server_dbpassword + zabbix_server_dbuser)|hash('md5')) if zabbix_server_dbpassword_hash_method == 'md5' else zabbix_server_dbpassword }}"
+
+ - name: "PostgreSQL | Set database/user permissions"
+ community.postgresql.postgresql_privs:
+ login_user: "{{ zabbix_server_pgsql_login_user | default(omit) }}"
+ login_password: "{{ zabbix_server_pgsql_login_password | default(omit) }}"
+ login_host: "{{ zabbix_server_pgsql_login_host | default(omit) }}"
+ port: "{{ zabbix_server_dbport }}"
+ login_unix_socket: "{{ zabbix_server_pgsql_login_unix_socket | default(omit) }}"
db: "{{ zabbix_server_dbname }}"
- priv: ALL
- state: present
- encrypted: true
+ privs: ALL
+ type: schema
+ objs: public
+ role: "{{ zabbix_server_dbuser }}"
- name: "PostgreSQL | Create timescaledb extension"
when: zabbix_server_database_timescaledb
diff --git a/ansible_collections/community/zabbix/roles/zabbix_server/tasks/selinux.yml b/ansible_collections/community/zabbix/roles/zabbix_server/tasks/selinux.yml
index cd13dbbfd..e30480cd2 100644
--- a/ansible_collections/community/zabbix/roles/zabbix_server/tasks/selinux.yml
+++ b/ansible_collections/community/zabbix/roles/zabbix_server/tasks/selinux.yml
@@ -1,125 +1,33 @@
---
-# straight to getenforce binary , workaround for missing python_selinux library
-- name: "SELinux | Get getenforce binary"
- ansible.builtin.stat:
- path: /usr/sbin/getenforce
- register: getenforce_bin
- become: true
- tags:
- - always
-
-- name: "SELinux | Collect getenforce output"
- ansible.builtin.command: getenforce
- register: sestatus
- when: "getenforce_bin.stat.exists"
- changed_when: false
- become: true
- check_mode: false
- tags:
- - always
-
-- name: "Set zabbix_server_selinux to true if getenforce returns Enforcing or Permissive"
- ansible.builtin.set_fact:
- zabbix_server_selinux: "{{ true }}"
- when: 'getenforce_bin.stat.exists and ("Enforcing" in sestatus.stdout or "Permissive" in sestatus.stdout)'
- tags:
- - config
-
-- name: "SELinux | RedHat | Install related SELinux package"
- ansible.builtin.yum:
+- name: "SELinux | Install related SELinux packages"
+ ansible.builtin.package:
name:
- - libsemanage-python
- - policycoreutils
- checkpolicy
- state: present
- environment:
- http_proxy: "{{ zabbix_http_proxy | default(None) | default(omit) }}"
- https_proxy: "{{ zabbix_https_proxy | default(None) | default(omit) }}"
- register: zabbix_server_dependencies_installed
- until: zabbix_server_dependencies_installed is succeeded
- become: true
- when:
- - ansible_os_family == "RedHat"
- - selinux_allow_zabbix_can_network
- - ansible_distribution_major_version == "7" or ansible_distribution_major_version == "6"
- tags:
- - install
-
-- name: "SELinux | RedHat | Install related SELinux package on RHEL8"
- ansible.builtin.yum:
- name:
+ - policycoreutils
- python3-libsemanage
- state: present
+ - python3-policycoreutils
environment:
http_proxy: "{{ zabbix_http_proxy | default(None) | default(omit) }}"
https_proxy: "{{ zabbix_https_proxy | default(None) | default(omit) }}"
register: zabbix_server_dependencies_installed
until: zabbix_server_dependencies_installed is succeeded
become: true
- when:
- - ansible_os_family == "RedHat"
- - selinux_allow_zabbix_can_network
- - ansible_distribution_major_version == "8"
tags:
- install
-- name: "SELinux | RedHat | Enable httpd_can_connect_zabbix SELinux boolean"
- ansible.posix.seboolean:
- name: httpd_can_connect_zabbix
- state: true
- persistent: true
- become: true
- when:
- - selinux_allow_zabbix_can_http
- tags:
- - config
-
-- name: "SELinux | RedHat | Enable zabbix_can_network SELinux boolean"
+- name: "SELinux | Set booleans"
ansible.posix.seboolean:
- name: zabbix_can_network
- state: true
+ name: "{{ item.name }}"
+ state: "{{ item.state }}"
persistent: true
become: true
- when:
- - selinux_allow_zabbix_can_network
tags:
- config
-
-- name: "SELinux | RedHat | Install related SELinux package to fix issues"
- ansible.builtin.yum:
- name:
- - policycoreutils-python
- state: present
- environment:
- http_proxy: "{{ zabbix_http_proxy | default(None) | default(omit) }}"
- https_proxy: "{{ zabbix_https_proxy | default(None) | default(omit) }}"
- register: zabbix_server_dependencies_installed
- until: zabbix_server_dependencies_installed is succeeded
- become: true
- when:
- - ansible_os_family == "RedHat"
- - ansible_distribution_major_version == "7" or ansible_distribution_major_version == "6"
- tags:
- - install
-
-- name: "SELinux | RedHat | Install related SELinux package to fix issues on RHEL8"
- ansible.builtin.yum:
- name:
- - policycoreutils
- - checkpolicy
- - python3-libsemanage
- state: present
- environment:
- http_proxy: "{{ zabbix_http_proxy | default(None) | default(omit) }}"
- https_proxy: "{{ zabbix_https_proxy | default(None) | default(omit) }}"
- register: zabbix_server_dependencies_installed
- until: zabbix_server_dependencies_installed is succeeded
- become: true
- when:
- - ansible_os_family == "RedHat"
- - ansible_distribution_major_version == "8"
- tags:
- - install
+ loop:
+ - name: httpd_can_connect_zabbix
+ state: "{{ selinux_allow_zabbix_can_http }}"
+ - name: zabbix_can_network
+ state: "{{ selinux_allow_zabbix_can_network }}"
- name: "SELinux | RedHat | Add SEmodule to fix SELinux issue: zabbix_server_alerter.sock"
ansible.builtin.script:
diff --git a/ansible_collections/community/zabbix/roles/zabbix_server/vars/Debian.yml b/ansible_collections/community/zabbix/roles/zabbix_server/vars/Debian.yml
index 75f3751c2..277fa8580 100644
--- a/ansible_collections/community/zabbix/roles/zabbix_server/vars/Debian.yml
+++ b/ansible_collections/community/zabbix/roles/zabbix_server/vars/Debian.yml
@@ -17,6 +17,9 @@ zabbix_valid_server_versions:
"10":
- 6.0
# Ubuntu
+ "24":
+ - 6.4
+ - 6.0
"22":
- 6.4
- 6.2
diff --git a/ansible_collections/community/zabbix/roles/zabbix_web/README.md b/ansible_collections/community/zabbix/roles/zabbix_web/README.md
index aac6f9dc2..c1875f208 100644
--- a/ansible_collections/community/zabbix/roles/zabbix_web/README.md
+++ b/ansible_collections/community/zabbix/roles/zabbix_web/README.md
@@ -59,6 +59,7 @@ See the following list of supported Operating Systems with the Zabbix releases.
|---------------------|-----|-----|-----|
| Red Hat Fam 9 | V | V | V |
| Red Hat Fam 8 | V | V | V |
+| Ubuntu 24.04 noble | V | | V |
| Ubuntu 22.04 jammy | V | V | V |
| Ubuntu 20.04 focal | V | V | V |
| Ubuntu 18.04 bionic | | | V |
diff --git a/ansible_collections/community/zabbix/roles/zabbix_web/templates/php-fpm.conf.j2 b/ansible_collections/community/zabbix/roles/zabbix_web/templates/php-fpm.conf.j2
index e6b02cc9e..3dd337e4d 100644
--- a/ansible_collections/community/zabbix/roles/zabbix_web/templates/php-fpm.conf.j2
+++ b/ansible_collections/community/zabbix/roles/zabbix_web/templates/php-fpm.conf.j2
@@ -8,7 +8,7 @@ listen.acl_users = {{ zabbix_php_fpm_conf_user if zabbix_php_fpm_conf_user is de
{% endif %}
listen.owner = {{ zabbix_php_fpm_conf_user if zabbix_php_fpm_conf_user is defined else zabbix_web_user }}
listen.group = {{ _nginx_group if zabbix_web_http_server=='nginx' else _apache_group }}
-listen.mode = 0644
+listen.mode = 0660
listen.allowed_clients = 127.0.0.1
pm = dynamic
diff --git a/ansible_collections/community/zabbix/roles/zabbix_web/vars/Debian.yml b/ansible_collections/community/zabbix/roles/zabbix_web/vars/Debian.yml
index f49b27155..643b226f5 100644
--- a/ansible_collections/community/zabbix/roles/zabbix_web/vars/Debian.yml
+++ b/ansible_collections/community/zabbix/roles/zabbix_web/vars/Debian.yml
@@ -35,6 +35,9 @@ zabbix_valid_web_versions:
"10":
- 6.0
# Ubuntu
+ "24":
+ - 6.4
+ - 6.0
"22":
- 6.4
- 6.2
diff --git a/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_discoveryrule/meta/main.yml b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_discoveryrule/meta/main.yml
new file mode 100644
index 000000000..acdb704c8
--- /dev/null
+++ b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_discoveryrule/meta/main.yml
@@ -0,0 +1,3 @@
+---
+dependencies:
+ - setup_zabbix
diff --git a/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_discoveryrule/tasks/main.yml b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_discoveryrule/tasks/main.yml
new file mode 100644
index 000000000..8642afe46
--- /dev/null
+++ b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_discoveryrule/tasks/main.yml
@@ -0,0 +1,29 @@
+---
+- name: test - do not run tests for Zabbix < 6.4
+ meta: end_play
+ when: zabbix_version is version('6.4', '<')
+
+- block:
+ # setup stuff
+ - include_tasks: zabbix_setup.yml
+
+ # zabbix_discoveryrule module tests
+ - include_tasks: zabbix_tests.yml
+
+ # tear down stuff set up earlier
+ - include_tasks: zabbix_teardown.yml
+
+ always:
+ - name: "cleanup discoveryrule if tests failed"
+ community.zabbix.zabbix_discoveryrule:
+ host_name: ExampleHost
+ name: ExampleRule
+ state: absent
+ ignore_errors: true
+
+ - name: "cleanup discoveryrule if tests failed"
+ community.zabbix.zabbix_discoveryrule:
+ template_name: ExampleTemplate
+ name: ExampleRule
+ state: absent
+ ignore_errors: true
diff --git a/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_discoveryrule/tasks/zabbix_setup.yml b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_discoveryrule/tasks/zabbix_setup.yml
new file mode 100644
index 000000000..4daf66abf
--- /dev/null
+++ b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_discoveryrule/tasks/zabbix_setup.yml
@@ -0,0 +1,25 @@
+---
+
+- name: Create test template
+ community.zabbix.zabbix_template:
+ template_name: ExampleTemplate
+ template_groups:
+ - Templates
+
+- name: Create test host
+ community.zabbix.zabbix_host:
+ host_name: ExampleHost
+ host_groups:
+ - Linux servers
+ - Zabbix servers
+ link_templates:
+ - ExampleTemplate
+ status: enabled
+ state: present
+ interfaces:
+ - type: 1
+ main: 1
+ useip: 1
+ ip: 10.1.1.1
+ dns: ""
+ port: "10050"
diff --git a/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_discoveryrule/tasks/zabbix_teardown.yml b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_discoveryrule/tasks/zabbix_teardown.yml
new file mode 100644
index 000000000..4a2ec158f
--- /dev/null
+++ b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_discoveryrule/tasks/zabbix_teardown.yml
@@ -0,0 +1,9 @@
+- name: remove test host
+ community.zabbix.zabbix_host:
+ host_name: ExampleHost
+ state: absent
+
+- name: remove test template
+ community.zabbix.zabbix_template:
+ template_name: ExampleTemplate
+ state: absent
diff --git a/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_discoveryrule/tasks/zabbix_tests.yml b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_discoveryrule/tasks/zabbix_tests.yml
new file mode 100644
index 000000000..ffed13e05
--- /dev/null
+++ b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_discoveryrule/tasks/zabbix_tests.yml
@@ -0,0 +1,147 @@
+---
+
+- name: test - create new Zabbix discoveryrule on host with many options set
+ community.zabbix.zabbix_discoveryrule:
+ name: TestRule
+ host_name: ExampleHost
+ params:
+ type: zabbix_agent_active
+ key: 'vfs.fs.discovery'
+ interval: 1m
+ enabled: True
+ tags:
+ - tag: tag
+ value: value
+ state: present
+ register: zbxhostrule_new
+
+- name: assert that rule was created
+ ansible.builtin.assert:
+ that: zbxhostrule_new is changed
+
+- name: test - create same Zabbix discoveryrule once again
+ community.zabbix.zabbix_discoveryrule:
+ name: TestRule
+ host_name: ExampleHost
+ params:
+ type: zabbix_agent_active
+ key: 'vfs.fs.discovery'
+ interval: 1m
+ enabled: True
+ tags:
+ - tag: tag
+ value: value
+ state: present
+ register: zbxhostrule_existing
+
+- name: assert that nothing has been changed
+ ansible.builtin.assert:
+ that: not zbxhostrule_existing is changed
+
+- name: test - update existing zabbix discoveryrule
+ community.zabbix.zabbix_discoveryrule:
+ name: TestRule
+ host_name: ExampleHost
+ params:
+ interval: 2m
+ state: present
+ register: zbxhostrule_changed
+
+- name: expect to succeed and that things changed
+ ansible.builtin.assert:
+ that: zbxhostrule_changed is changed
+
+- name: test - attempt to delete previously created zabbix discoveryrule
+ community.zabbix.zabbix_discoveryrule:
+ name: TestRule
+ host_name: ExampleHost
+ state: absent
+ register: zbxhostrule_existing_delete
+
+- name: assert that trigger was deleted
+ ansible.builtin.assert:
+ that: zbxhostrule_existing_delete is changed
+
+- name: test - attempt to delete non-existing zabbix discoveryrule
+ community.zabbix.zabbix_discoveryrule:
+ name: TestRule
+ host_name: ExampleHost
+ state: absent
+ register: zbxhostrule_missing_delete
+
+- name: assert that nothing has been changed
+ ansible.builtin.assert:
+ that: not zbxhostrule_missing_delete is changed
+
+- name: test - create new Zabbix discoveryrule on template with many options set
+ community.zabbix.zabbix_discoveryrule:
+ name: TestRule
+ template_name: ExampleTemplate
+ params:
+ type: zabbix_agent_active
+ key: 'vfs.fs.discovery'
+ interval: 1m
+ enabled: True
+ tags:
+ - tag: tag
+ value: value
+ state: present
+ register: zbxtemprule_new
+
+- name: assert that rule was created
+ ansible.builtin.assert:
+ that: zbxtemprule_new is changed
+
+- name: test - create same Zabbix discoveryrule once again
+ community.zabbix.zabbix_discoveryrule:
+ name: TestRule
+ template_name: ExampleTemplate
+ params:
+ type: zabbix_agent_active
+ key: 'vfs.fs.discovery'
+ interval: 1m
+ enabled: True
+ tags:
+ - tag: tag
+ value: value
+ state: present
+ register: zbxtemprule_existing
+
+- name: assert that nothing has been changed
+ ansible.builtin.assert:
+ that: not zbxtemprule_existing is changed
+
+- name: test - update existing zabbix discoveryrule
+ community.zabbix.zabbix_discoveryrule:
+ name: TestRule
+ template_name: ExampleTemplate
+ params:
+ interval: 2m
+ state: present
+ register: zbxtemprule_changed
+
+- name: expect to succeed and that things changed
+ ansible.builtin.assert:
+ that: zbxtemprule_changed is changed
+
+- name: test - attempt to delete previously created zabbix discoveryrule
+ community.zabbix.zabbix_discoveryrule:
+ name: TestRule
+ template_name: ExampleTemplate
+ state: absent
+ register: zbxtemprule_existing_delete
+
+- name: assert that trigger was deleted
+ ansible.builtin.assert:
+ that: zbxtemprule_existing_delete is changed
+
+- name: test - attempt to delete non-existing zabbix discoveryrule
+ community.zabbix.zabbix_discoveryrule:
+ name: TestRule
+ template_name: ExampleTemplate
+ state: absent
+ register: zbxtemprule_missing_delete
+
+- name: assert that nothing has been changed
+ ansible.builtin.assert:
+ that: not zbxtemprule_missing_delete is changed
diff --git a/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_host_events_update/meta/main.yml b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_host_events_update/meta/main.yml
new file mode 100644
index 000000000..acdb704c8
--- /dev/null
+++ b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_host_events_update/meta/main.yml
@@ -0,0 +1,3 @@
+---
+dependencies:
+ - setup_zabbix
diff --git a/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_host_events_update/tasks/main.yml b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_host_events_update/tasks/main.yml
new file mode 100644
index 000000000..131e87ac9
--- /dev/null
+++ b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_host_events_update/tasks/main.yml
@@ -0,0 +1,13 @@
+---
+- name: test - do not run tests for Zabbix < 6.4
+ meta: end_play
+ when: zabbix_version is version('6.4', '<')
+
+# setup stuff
+- include_tasks: zabbix_setup.yml
+
+# zabbix_item module tests
+- include_tasks: zabbix_tests.yml
+
+# tear down stuff set up earlier
+- include_tasks: zabbix_teardown.yml
diff --git a/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_host_events_update/tasks/zabbix_setup.yml b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_host_events_update/tasks/zabbix_setup.yml
new file mode 100644
index 000000000..c02a08595
--- /dev/null
+++ b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_host_events_update/tasks/zabbix_setup.yml
@@ -0,0 +1,61 @@
+---
+
+- name: Create example template
+ community.zabbix.zabbix_template:
+ template_name: ExampleTemplate
+ template_groups:
+ - Templates
+
+- name: Create example host
+ community.zabbix.zabbix_host:
+ host_name: ExampleHost
+ host_groups:
+ - Linux servers
+ - Zabbix servers
+ link_templates:
+ - ExampleTemplate
+ status: enabled
+ state: present
+ interfaces:
+ - type: 1
+ main: 1
+ useip: 1
+ ip: 10.1.1.1
+ dns: ""
+ port: "10050"
+
+- name: create ping item
+ community.zabbix.zabbix_item:
+ name: ping
+ template_name: ExampleTemplate
+ params:
+ type: zabbix_agent_active
+ key: agent.ping
+ value_type: numeric_unsigned
+ interval: 20s
+ state: present
+
+- name: create ping trigger
+ community.zabbix.zabbix_trigger:
+ name: ping
+ template_name: ExampleTemplate
+ params:
+ severity: warning
+ expression: 'nodata(/ExampleTemplate/agent.ping,1m)=1'
+ manual_close: True
+ state: present
+
+- name: Wait to ensure triggers are firing
+ ansible.builtin.wait_for:
+ timeout: 120
+
+- name: get events for host
+ community.zabbix.zabbix_host_events_info:
+ host_identifier: ExampleHost
+ host_id_type: hostname
+ trigger_severity: warning
+ register: zabbix_host_events
+
+- name: get eventid
+ ansible.builtin.set_fact:
+ zabbix_eventid: "{{ zabbix_host_events.triggers_problem[0].last_event.eventid }}"
diff --git a/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_host_events_update/tasks/zabbix_teardown.yml b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_host_events_update/tasks/zabbix_teardown.yml
new file mode 100644
index 000000000..2df4dd571
--- /dev/null
+++ b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_host_events_update/tasks/zabbix_teardown.yml
@@ -0,0 +1,9 @@
+- name: remove example host
+ community.zabbix.zabbix_host:
+ host_name: ExampleHost
+ state: absent
+
+- name: remove example template
+ community.zabbix.zabbix_template:
+ template_name: ExampleTemplate
+ state: absent
diff --git a/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_host_events_update/tasks/zabbix_tests.yml b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_host_events_update/tasks/zabbix_tests.yml
new file mode 100644
index 000000000..48d63b47d
--- /dev/null
+++ b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_host_events_update/tasks/zabbix_tests.yml
@@ -0,0 +1,59 @@
+---
+
+- name: test - acknowledge event
+ community.zabbix.zabbix_host_events_update:
+ params:
+ eventids: "{{ zabbix_eventid }}"
+ action: ack
+ msg: "event acknowledged"
+ register: zbxevent_ack
+
+- name: assert that event was changed
+ ansible.builtin.assert:
+ that: zbxevent_ack is changed
+
+- name: get events updated status
+ community.zabbix.zabbix_host_events_info:
+ host_identifier: ExampleHost
+ host_id_type: hostname
+ trigger_severity: warning
+ register: zabbix_host_events_ack
+
+- name: assert that event was acknowledged
+ ansible.builtin.assert:
+ that: zabbix_host_events_ack.triggers_problem[0].last_event.acknowledged
+
+- name: test - change severity and unacknowledge
+ community.zabbix.zabbix_host_events_update:
+ params:
+ eventids: "{{ zabbix_eventid }}"
+ actions: ['severity', 'unack']
+ severity: high
+ register: zbxevent_sev
+
+- name: assert that event was changed
+ ansible.builtin.assert:
+ that: zbxevent_sev is changed
+
+- name: get events updated status
+ community.zabbix.zabbix_host_events_info:
+ host_identifier: ExampleHost
+ host_id_type: hostname
+ trigger_severity: warning
+ register: zabbix_host_events_unack
+
+- name: assert that event was unacknowledged
+ ansible.builtin.assert:
+ that: zabbix_host_events_unack.triggers_problem[0].last_event.acknowledged == "0"
+
+- name: test - change severity to same
+ community.zabbix.zabbix_host_events_update:
+ params:
+ eventids: "{{ zabbix_eventid }}"
+ action: severity
+ severity: high
+ register: zbxevent_sev_existing
+
+- name: assert that event was not changed
+ ansible.builtin.assert:
+ that: zbxevent_sev_existing is not changed
diff --git a/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_item/meta/main.yml b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_item/meta/main.yml
new file mode 100644
index 000000000..acdb704c8
--- /dev/null
+++ b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_item/meta/main.yml
@@ -0,0 +1,3 @@
+---
+dependencies:
+ - setup_zabbix
diff --git a/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_item/tasks/main.yml b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_item/tasks/main.yml
new file mode 100644
index 000000000..531720193
--- /dev/null
+++ b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_item/tasks/main.yml
@@ -0,0 +1,28 @@
+---
+- name: test - do not run tests for Zabbix < 6.4
+ meta: end_play
+ when: zabbix_version is version('6.4', '<')
+
+- block:
+ # setup stuff
+ - include_tasks: zabbix_setup.yml
+
+ # zabbix_item module tests
+ - include_tasks: zabbix_tests.yml
+
+ # tear down stuff set up earlier
+ - include_tasks: zabbix_teardown.yml
+ always:
+ - name: "cleanup host item if tests failed"
+ community.zabbix.zabbix_item:
+ host_name: ExampleHost
+ name: TestItem
+ state: absent
+ ignore_errors: true
+
+ - name: "cleanup template item if tests failed"
+ community.zabbix.zabbix_item:
+ template_name: ExampleTemplate
+ name: TestItem
+ state: absent
+ ignore_errors: true
diff --git a/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_item/tasks/zabbix_setup.yml b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_item/tasks/zabbix_setup.yml
new file mode 100644
index 000000000..4daf66abf
--- /dev/null
+++ b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_item/tasks/zabbix_setup.yml
@@ -0,0 +1,25 @@
+---
+
+- name: Create test template
+ community.zabbix.zabbix_template:
+ template_name: ExampleTemplate
+ template_groups:
+ - Templates
+
+- name: Create test host
+ community.zabbix.zabbix_host:
+ host_name: ExampleHost
+ host_groups:
+ - Linux servers
+ - Zabbix servers
+ link_templates:
+ - ExampleTemplate
+ status: enabled
+ state: present
+ interfaces:
+ - type: 1
+ main: 1
+ useip: 1
+ ip: 10.1.1.1
+ dns: ""
+ port: "10050"
diff --git a/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_item/tasks/zabbix_teardown.yml b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_item/tasks/zabbix_teardown.yml
new file mode 100644
index 000000000..4a2ec158f
--- /dev/null
+++ b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_item/tasks/zabbix_teardown.yml
@@ -0,0 +1,9 @@
+- name: remove test host
+ community.zabbix.zabbix_host:
+ host_name: ExampleHost
+ state: absent
+
+- name: remove test template
+ community.zabbix.zabbix_template:
+ template_name: ExampleTemplate
+ state: absent
diff --git a/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_item/tasks/zabbix_tests.yml b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_item/tasks/zabbix_tests.yml
new file mode 100644
index 000000000..86767a716
--- /dev/null
+++ b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_item/tasks/zabbix_tests.yml
@@ -0,0 +1,246 @@
+---
+
+- name: test - create new Zabbix item on host with many options set
+ community.zabbix.zabbix_item:
+ name: TestItem
+ host_name: ExampleHost
+ params:
+ type: zabbix_agent_active
+ key: vfs.fs.get
+ value_type: numeric_float
+ units: '%'
+ interval: 1m
+ preprocessing:
+ - type: jsonpath
+ params: '$[?(@.fstype == "ext4")]'
+ error_handler: zabbix_server
+ - type: jsonpath
+ params: "$[*].['bytes', 'inodes'].pused.max()"
+ error_handler: zabbix_server
+ tags:
+ - tag: tag
+ value: value
+ state: present
+ register: zbxhostitem_new
+
+- name: assert that item was created
+ ansible.builtin.assert:
+ that: zbxhostitem_new is changed
+
+- name: test - create same Zabbix item group once again
+ community.zabbix.zabbix_item:
+ name: TestItem
+ host_name: ExampleHost
+ params:
+ type: zabbix_agent_active
+ key: vfs.fs.get
+ value_type: numeric_float
+ units: '%'
+ interval: 1m
+ preprocessing:
+ - type: jsonpath
+ params: '$[?(@.fstype == "ext4")]'
+ error_handler: zabbix_server
+ - type: jsonpath
+ params: "$[*].['bytes', 'inodes'].pused.max()"
+ error_handler: zabbix_server
+ tags:
+ - tag: tag
+ value: value
+ state: present
+ register: zbxhostitem_existing
+
+- name: assert that nothing has been changed
+ ansible.builtin.assert:
+ that: not zbxhostitem_existing is changed
+
+- name: test - update existing zabbix item
+ community.zabbix.zabbix_item:
+ name: TestItem
+ host_name: ExampleHost
+ params:
+ interval: 2m
+ state: present
+ register: zbxhostitem_changed
+
+- name: expect to succeed and that things changed
+ ansible.builtin.assert:
+ that: zbxhostitem_changed is changed
+
+- name: test - attempt to delete previously created zabbix item
+ community.zabbix.zabbix_item:
+ name: TestItem
+ host_name: ExampleHost
+ state: absent
+ register: zbxhostitem_existing_delete
+
+- name: assert that item was deleted
+ ansible.builtin.assert:
+ that: zbxhostitem_existing_delete is changed
+
+- name: test - attempt to delete non-existing zabbix item
+ community.zabbix.zabbix_item:
+ name: TestItem
+ host_name: ExampleHost
+ state: absent
+ register: zbxhostitem_missing_delete
+
+- name: assert that nothing has been changed
+ ansible.builtin.assert:
+ that: not zbxhostitem_missing_delete is changed
+
+- name: test - create new Zabbix master item on host
+ community.zabbix.zabbix_item:
+ name: TestItem
+ host_name: ExampleHost
+ params:
+ type: zabbix_agent_active
+ key: vfs.fs.get
+ value_type: text
+ interval: 1m
+ tags:
+ - tag: tag
+ value: value
+ state: present
+ register: zbxhostmstitem_new
+
+- name: assert that item was created
+ ansible.builtin.assert:
+ that: zbxhostmstitem_new is changed
+
+- name: create dependent item
+ community.zabbix.zabbix_item:
+ name: TestDependItem
+ host_name: ExampleHost
+ params:
+ type: dependent_item
+ key: vfs.fs.pused
+ value_type: numeric_float
+ units: '%'
+ master_item:
+ item_name: TestItem
+ host_name: ExampleHost
+ preprocessing:
+ - type: jsonpath
+ params: '$[?(@.fstype == "ext4")]'
+ error_handler: zabbix_server
+ - type: jsonpath
+ params: "$[*].['bytes', 'inodes'].pused.max()"
+ error_handler: zabbix_server
+ state: present
+ register: zbxhostdependitem_new
+
+- name: assert that item was created
+ ansible.builtin.assert:
+ that: zbxhostdependitem_new is changed
+
+- name: test - attempt to delete previously created zabbix master item
+ community.zabbix.zabbix_item:
+ name: TestItem
+ host_name: ExampleHost
+ state: absent
+ register: zbxhostmstitem_existing_delete
+
+- name: assert that item was deleted
+ ansible.builtin.assert:
+ that: zbxhostmstitem_existing_delete is changed
+
+- name: test - attempt to delete dependent item
+ community.zabbix.zabbix_item:
+ name: TestDependItem
+ host_name: ExampleHost
+ state: absent
+ register: zbxhostdependitem_delete
+
+- name: assert that the item had been removed with its master
+ ansible.builtin.assert:
+ that: not zbxhostdependitem_delete is changed
+
+- name: test - create new Zabbix item on template with many options set
+ community.zabbix.zabbix_item:
+ name: TestItem
+ template_name: ExampleTemplate
+ params:
+ type: zabbix_agent_active
+ key: vfs.fs.get
+ value_type: numeric_float
+ units: '%'
+ interval: 1m
+ preprocessing:
+ - type: jsonpath
+ params: '$[?(@.fstype == "ext4")]'
+ error_handler: zabbix_server
+ - type: jsonpath
+ params: "$[*].['bytes', 'inodes'].pused.max()"
+ error_handler: zabbix_server
+ tags:
+ - tag: tag
+ value: value
+ state: present
+ register: zbxtempitem_new
+
+- name: assert that item was created
+ ansible.builtin.assert:
+ that: zbxtempitem_new is changed
+
+- name: test - create same Zabbix item once again
+ community.zabbix.zabbix_item:
+ name: TestItem
+ template_name: ExampleTemplate
+ params:
+ type: zabbix_agent_active
+ key: vfs.fs.get
+ value_type: numeric_float
+ units: '%'
+ interval: 1m
+ preprocessing:
+ - type: jsonpath
+ params: '$[?(@.fstype == "ext4")]'
+ error_handler: zabbix_server
+ - type: jsonpath
+ params: "$[*].['bytes', 'inodes'].pused.max()"
+ error_handler: zabbix_server
+ tags:
+ - tag: tag
+ value: value
+ state: present
+ register: zbxtempitem_existing
+
+- name: assert that nothing has been changed
+ ansible.builtin.assert:
+ that: not zbxtempitem_existing is changed
+
+- name: test - update existing zabbix item
+ community.zabbix.zabbix_item:
+ name: TestItem
+ template_name: ExampleTemplate
+ params:
+ interval: 2m
+ state: present
+ register: zbxtempitem_changed
+
+- name: expect to succeed and that things changed
+ ansible.builtin.assert:
+ that: zbxtempitem_changed is changed
+
+- name: test - attempt to delete previously created zabbix item
+ community.zabbix.zabbix_item:
+ name: TestItem
+ template_name: ExampleTemplate
+ state: absent
+ register: zbxtempitem_existing_delete
+
+- name: assert that item was deleted
+ ansible.builtin.assert:
+ that: zbxtempitem_existing_delete is changed
+
+- name: test - attempt to delete non-existing zabbix item
+ community.zabbix.zabbix_item:
+ name: TestItem
+ template_name: ExampleTemplate
+ state: absent
+ register: zbxtempitem_missing_delete
+
+- name: assert that nothing has been changed
+ ansible.builtin.assert:
+ that: not zbxtempitem_missing_delete is changed
diff --git a/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_itemprototype/meta/main.yml b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_itemprototype/meta/main.yml
new file mode 100644
index 000000000..acdb704c8
--- /dev/null
+++ b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_itemprototype/meta/main.yml
@@ -0,0 +1,3 @@
+---
+dependencies:
+ - setup_zabbix
diff --git a/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_itemprototype/tasks/main.yml b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_itemprototype/tasks/main.yml
new file mode 100644
index 000000000..ed6ca6d62
--- /dev/null
+++ b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_itemprototype/tasks/main.yml
@@ -0,0 +1,30 @@
+---
+- name: test - do not run tests for Zabbix < 6.4
+ meta: end_play
+ when: zabbix_version is version('6.4', '<')
+
+- block:
+ # setup stuff
+ - include_tasks: zabbix_setup.yml
+
+ # zabbix_itemprototype module tests
+ - include_tasks: zabbix_tests.yml
+
+ # tear down stuff set up earlier
+ - include_tasks: zabbix_teardown.yml
+ always:
+ - name: "cleanup host item if tests failed"
+ community.zabbix.zabbix_itemprototype:
+ host_name: ExampleHost
+ discoveryrule_name: ExampleHostRule
+ name: TestItem
+ state: absent
+ ignore_errors: true
+
+ - name: "cleanup template item if tests failed"
+ community.zabbix.zabbix_itemprototype:
+ template_name: ExampleTemplate
+ discoveryrule_name: ExampleTemplateRule
+ name: TestItem
+ state: absent
+ ignore_errors: true
diff --git a/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_itemprototype/tasks/zabbix_setup.yml b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_itemprototype/tasks/zabbix_setup.yml
new file mode 100644
index 000000000..62d4d5e92
--- /dev/null
+++ b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_itemprototype/tasks/zabbix_setup.yml
@@ -0,0 +1,36 @@
+---
+
+- name: Create test template
+ community.zabbix.zabbix_template:
+ template_name: ExampleTemplate
+ template_groups:
+ - Templates
+
+- name: Create test host
+ community.zabbix.zabbix_host:
+ host_name: ExampleHost
+ host_groups:
+ - Linux servers
+ - Zabbix servers
+ link_templates:
+ - ExampleTemplate
+ status: enabled
+ state: present
+ interfaces:
+ - type: 1
+ main: 1
+ useip: 1
+ ip: 10.1.1.1
+ dns: ""
+ port: "10050"
+
+- name: Create new Zabbix discoveryrule on host
+ community.zabbix.zabbix_discoveryrule:
+ name: ExampleHostRule
+ host_name: ExampleHost
+ params:
+ type: zabbix_agent_active
+ key: 'vfs.fs.discovery'
+ interval: 1m
+ enabled: True
+ state: present
diff --git a/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_itemprototype/tasks/zabbix_teardown.yml b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_itemprototype/tasks/zabbix_teardown.yml
new file mode 100644
index 000000000..4a2ec158f
--- /dev/null
+++ b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_itemprototype/tasks/zabbix_teardown.yml
@@ -0,0 +1,9 @@
+- name: remove test host
+ community.zabbix.zabbix_host:
+ host_name: ExampleHost
+ state: absent
+
+- name: remove test template
+ community.zabbix.zabbix_template:
+ template_name: ExampleTemplate
+ state: absent
diff --git a/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_itemprototype/tasks/zabbix_tests.yml b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_itemprototype/tasks/zabbix_tests.yml
new file mode 100644
index 000000000..604ecf4d4
--- /dev/null
+++ b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_itemprototype/tasks/zabbix_tests.yml
@@ -0,0 +1,248 @@
+---
+
+- name: test - create new Zabbix item on host with many options set
+ community.zabbix.zabbix_itemprototype:
+ name: '{% raw %}{#FSNAME}:TestItemPrototype{% endraw %}'
+ discoveryrule_name: ExampleHostRule
+ host_name: ExampleHost
+ params:
+ type: zabbix_agent_active
+ key: '{% raw %}vfs.fs.size[{#FSNAME},used]{% endraw %}'
+ value_type: numeric_unsigned
+ units: GB
+ interval: 1m
+ tags:
+ - tag: tag
+ value: value
+ state: present
+ register: zbxhostitem_new
+
+- name: assert that item was created
+ ansible.builtin.assert:
+ that: zbxhostitem_new is changed
+
+- name: test - create same Zabbix item group once again
+ community.zabbix.zabbix_itemprototype:
+ name: '{% raw %}{#FSNAME}:TestItemPrototype{% endraw %}'
+ discoveryrule_name: ExampleHostRule
+ host_name: ExampleHost
+ params:
+ type: zabbix_agent_active
+ key: '{% raw %}vfs.fs.size[{#FSNAME},used]{% endraw %}'
+ value_type: numeric_unsigned
+ units: GB
+ interval: 1m
+ tags:
+ - tag: tag
+ value: value
+ state: present
+ register: zbxhostitem_existing
+
+- name: assert that nothing has been changed
+ ansible.builtin.assert:
+ that: not zbxhostitem_existing is changed
+
+- name: test - update existing zabbix item
+ community.zabbix.zabbix_itemprototype:
+ name: '{% raw %}{#FSNAME}:TestItemPrototype{% endraw %}'
+ discoveryrule_name: ExampleHostRule
+ host_name: ExampleHost
+ params:
+ interval: 2m
+ state: present
+ register: zbxhostitem_changed
+
+- name: expect to succeed and that things changed
+ ansible.builtin.assert:
+ that: zbxhostitem_changed is changed
+
+- name: test - attempt to delete previously created zabbix item
+ community.zabbix.zabbix_itemprototype:
+ name: '{% raw %}{#FSNAME}:TestItemPrototype{% endraw %}'
+ discoveryrule_name: ExampleHostRule
+ host_name: ExampleHost
+ state: absent
+ register: zbxhostitem_existing_delete
+
+- name: assert that item was deleted
+ ansible.builtin.assert:
+ that: zbxhostitem_existing_delete is changed
+
+- name: test - attempt to delete non-existing zabbix item
+ community.zabbix.zabbix_itemprototype:
+ name: '{% raw %}{#FSNAME}:TestItemPrototype{% endraw %}'
+ discoveryrule_name: ExampleHostRule
+ host_name: ExampleHost
+ state: absent
+ register: zbxhostitem_missing_delete
+
+- name: assert that nothing has been changed
+ ansible.builtin.assert:
+ that: not zbxhostitem_missing_delete is changed
+
+- name: test - create new Zabbix master item on host
+ community.zabbix.zabbix_itemprototype:
+ name: '{% raw %}{#FSNAME}:TestItemPrototype{% endraw %}'
+ discoveryrule_name: ExampleHostRule
+ host_name: ExampleHost
+ params:
+ type: zabbix_agent_active
+ key: '{% raw %}vfs.fs.size[{#FSNAME},used]{% endraw %}'
+ value_type: numeric_unsigned
+ units: B
+ interval: 1m
+ tags:
+ - tag: tag
+ value: value
+ state: present
+ register: zbxhostmstitem_new
+
+- name: assert that item was created
+ ansible.builtin.assert:
+ that: zbxhostmstitem_new is changed
+
+- name: create dependent item
+ community.zabbix.zabbix_itemprototype:
+ name: '{% raw %}{#FSNAME}:TestDependItemPrototype{% endraw %}'
+ discoveryrule_name: ExampleHostRule
+ host_name: ExampleHost
+ params:
+ type: dependent_item
+ key: '{% raw %}vfs.fs.size.half[{#FSNAME}]{% endraw %}'
+ value_type: numeric_float
+ units: B
+ master_item:
+ item_name: '{% raw %}{#FSNAME}:TestItemPrototype{% endraw %}'
+ discoveryrule_name: ExampleHostRule
+ host_name: ExampleHost
+ preprocessing:
+ - type: javascript
+ params: 'return value / 2;'
+ error_handler: zabbix_server
+ state: present
+ register: zbxhostdependitem_new
+
+- name: assert that item was created
+ ansible.builtin.assert:
+ that: zbxhostdependitem_new is changed
+
+- name: test - attempt to delete previously created zabbix item
+ community.zabbix.zabbix_itemprototype:
+ name: '{% raw %}{#FSNAME}:TestItemPrototype{% endraw %}'
+ discoveryrule_name: ExampleHostRule
+ host_name: ExampleHost
+ state: absent
+ register: zbxhostmstitem_existing_delete
+
+- name: assert that item was deleted
+ ansible.builtin.assert:
+ that: zbxhostmstitem_existing_delete is changed
+
+- name: test - attempt to delete dependent item
+ community.zabbix.zabbix_itemprototype:
+ name: '{% raw %}{#FSNAME}:TestDependItemPrototype{% endraw %}'
+ discoveryrule_name: ExampleHostRule
+ host_name: ExampleHost
+ state: absent
+ register: zbxhostdependitem_delete
+
+- name: assert that the item had been removed with its master
+ ansible.builtin.assert:
+ that: not zbxhostdependitem_delete is changed
+
+- name: remove host rule
+ community.zabbix.zabbix_discoveryrule:
+ name: ExampleHostRule
+ host_name: ExampleHost
+ state: absent
+
+- name: Create new Zabbix discoveryrule on template
+ community.zabbix.zabbix_discoveryrule:
+ name: ExampleTemplateRule
+ template_name: ExampleTemplate
+ params:
+ type: zabbix_agent_active
+ key: 'vfs.fs.discovery'
+ interval: 1m
+ enabled: True
+ state: present
+
+- name: test - create new Zabbix item on template with many options set
+ community.zabbix.zabbix_itemprototype:
+ name: '{% raw %}{#FSNAME}:TestItemPrototype{% endraw %}'
+ discoveryrule_name: ExampleTemplateRule
+ template_name: ExampleTemplate
+ params:
+ type: zabbix_agent_active
+ key: '{% raw %}vfs.fs.size[{#FSNAME},used]{% endraw %}'
+ value_type: numeric_unsigned
+ units: B
+ interval: 1m
+ tags:
+ - tag: tag
+ value: value
+ state: present
+ register: zbxtempitem_new
+
+- name: assert that item was created
+ ansible.builtin.assert:
+ that: zbxtempitem_new is changed
+
+- name: test - create same Zabbix item group once again
+ community.zabbix.zabbix_itemprototype:
+ name: '{% raw %}{#FSNAME}:TestItemPrototype{% endraw %}'
+ discoveryrule_name: ExampleTemplateRule
+ template_name: ExampleTemplate
+ params:
+ type: zabbix_agent_active
+ key: '{% raw %}vfs.fs.size[{#FSNAME},used]{% endraw %}'
+ value_type: numeric_unsigned
+ units: B
+ interval: 1m
+ tags:
+ - tag: tag
+ value: value
+ state: present
+ register: zbxtempitem_existing
+
+- name: assert that nothing has been changed
+ ansible.builtin.assert:
+ that: not zbxtempitem_existing is changed
+
+- name: test - update existing zabbix item
+ community.zabbix.zabbix_itemprototype:
+ name: '{% raw %}{#FSNAME}:TestItemPrototype{% endraw %}'
+ discoveryrule_name: ExampleTemplateRule
+ template_name: ExampleTemplate
+ params:
+ interval: 2m
+ state: present
+ register: zbxtempitem_changed
+
+- name: expect to succeed and that things changed
+ ansible.builtin.assert:
+ that: zbxtempitem_changed is changed
+
+- name: test - attempt to delete previously created zabbix item
+ community.zabbix.zabbix_itemprototype:
+ name: '{% raw %}{#FSNAME}:TestItemPrototype{% endraw %}'
+ discoveryrule_name: ExampleTemplateRule
+ template_name: ExampleTemplate
+ state: absent
+ register: zbxtempitem_existing_delete
+
+- name: assert that item was deleted
+ ansible.builtin.assert:
+ that: zbxtempitem_existing_delete is changed
+
+- name: test - attempt to delete non-existing zabbix item
+ community.zabbix.zabbix_itemprototype:
+ name: '{% raw %}{#FSNAME}:TestItemPrototype{% endraw %}'
+ discoveryrule_name: ExampleTemplateRule
+ template_name: ExampleTemplate
+ state: absent
+ register: zbxtempitem_missing_delete
+
+- name: assert that nothing has been changed
+ ansible.builtin.assert:
+ that: not zbxtempitem_missing_delete is changed
diff --git a/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_trigger/meta/main.yml b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_trigger/meta/main.yml
new file mode 100644
index 000000000..acdb704c8
--- /dev/null
+++ b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_trigger/meta/main.yml
@@ -0,0 +1,3 @@
+---
+dependencies:
+ - setup_zabbix
diff --git a/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_trigger/tasks/main.yml b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_trigger/tasks/main.yml
new file mode 100644
index 000000000..c410c36da
--- /dev/null
+++ b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_trigger/tasks/main.yml
@@ -0,0 +1,28 @@
+---
+- name: test - do not run tests for Zabbix < 6.4
+ meta: end_play
+ when: zabbix_version is version('6.4', '<')
+
+- block:
+ # setup stuff
+ - include_tasks: zabbix_setup.yml
+
+ # zabbix_trigger module tests
+ - include_tasks: zabbix_tests.yml
+
+ # tear down stuff set up earlier
+ - include_tasks: zabbix_teardown.yml
+ always:
+ - name: "cleanup host item if tests failed"
+ community.zabbix.zabbix_item:
+ host_name: ExampleHost
+ name: ExampleHostItem
+ state: absent
+ ignore_errors: true
+
+ - name: "cleanup template item if tests failed"
+ community.zabbix.zabbix_item:
+ template_name: ExampleTemplate
+ name: ExampleTemplateItem
+ state: absent
+ ignore_errors: true
diff --git a/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_trigger/tasks/zabbix_setup.yml b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_trigger/tasks/zabbix_setup.yml
new file mode 100644
index 000000000..de3c7ecf4
--- /dev/null
+++ b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_trigger/tasks/zabbix_setup.yml
@@ -0,0 +1,47 @@
+---
+
+- name: Create test template
+ community.zabbix.zabbix_template:
+ template_name: ExampleTemplate
+ template_groups:
+ - Templates
+
+- name: Create test host
+ community.zabbix.zabbix_host:
+ host_name: ExampleHost
+ host_groups:
+ - Linux servers
+ - Zabbix servers
+ link_templates:
+ - ExampleTemplate
+ status: enabled
+ state: present
+ interfaces:
+ - type: 1
+ main: 1
+ useip: 1
+ ip: 10.1.1.1
+ dns: ""
+ port: "10050"
+
+- name: Create test host item
+ community.zabbix.zabbix_item:
+ name: ExampleHostItem
+ host_name: ExampleHost
+ params:
+ type: zabbix_agent_active
+ key: agent.ping
+ value_type: numeric_unsigned
+ interval: 1m
+ state: present
+
+- name: Create test template item
+ community.zabbix.zabbix_item:
+ name: ExampleTemplateItem
+ template_name: ExampleTemplate
+ params:
+ type: zabbix_agent_active
+ key: system.cpu.load[percpu,avg1]
+ value_type: numeric_float
+ interval: 1m
+ state: present
diff --git a/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_trigger/tasks/zabbix_teardown.yml b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_trigger/tasks/zabbix_teardown.yml
new file mode 100644
index 000000000..305484842
--- /dev/null
+++ b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_trigger/tasks/zabbix_teardown.yml
@@ -0,0 +1,21 @@
+- name: remove test host item
+ community.zabbix.zabbix_item:
+ name: ExampleHostItem
+ host_name: ExampleHost
+ state: absent
+
+- name: remove test template item
+ community.zabbix.zabbix_item:
+ name: ExampleTemplateItem
+ template_name: ExampleTemplate
+ state: absent
+
+- name: remove test host
+ community.zabbix.zabbix_host:
+ host_name: ExampleHost
+ state: absent
+
+- name: remove test template
+ community.zabbix.zabbix_template:
+ template_name: ExampleTemplate
+ state: absent
diff --git a/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_trigger/tasks/zabbix_tests.yml b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_trigger/tasks/zabbix_tests.yml
new file mode 100644
index 000000000..c87bd28b6
--- /dev/null
+++ b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_trigger/tasks/zabbix_tests.yml
@@ -0,0 +1,159 @@
+---
+
+- name: test - create new Zabbix trigger on host with many options set
+ community.zabbix.zabbix_trigger:
+ name: TestTrigger
+ host_name: ExampleHost
+ description: test host trigger
+ params:
+ severity: warning
+ expression: 'nodata(/ExampleHost/agent.ping,2m)=1'
+ recovery_mode: recovery_expression
+ recovery_expression: 'nodata(/ExampleHost/agent.ping,2m)=0'
+ manual_close: True
+ enabled: True
+ tags:
+ - tag: tag
+ value: value
+ state: present
+ register: zbxhosttrigger_new
+
+- name: assert that trigger was created
+ ansible.builtin.assert:
+ that: zbxhosttrigger_new is changed
+
+- name: test - create same Zabbix trigger once again
+ community.zabbix.zabbix_trigger:
+ name: TestTrigger
+ host_name: ExampleHost
+ description: test host trigger
+ params:
+ severity: warning
+ expression: 'nodata(/ExampleHost/agent.ping,2m)=1'
+ recovery_mode: recovery_expression
+ recovery_expression: 'nodata(/ExampleHost/agent.ping,2m)=0'
+ manual_close: True
+ enabled: True
+ tags:
+ - tag: tag
+ value: value
+ state: present
+ register: zbxhosttrigger_existing
+
+- name: assert that nothing has been changed
+ ansible.builtin.assert:
+ that: not zbxhosttrigger_existing is changed
+
+- name: test - update existing zabbix trigger
+ community.zabbix.zabbix_trigger:
+ name: TestTrigger
+ host_name: ExampleHost
+ params:
+ manual_close: False
+ state: present
+ register: zbxhosttrigger_changed
+
+- name: expect to succeed and that things changed
+ ansible.builtin.assert:
+ that: zbxhosttrigger_changed is changed
+
+- name: test - attempt to delete previously created zabbix trigger
+ community.zabbix.zabbix_trigger:
+ name: TestTrigger
+ host_name: ExampleHost
+ state: absent
+ register: zbxhosttrigger_existing_delete
+
+- name: assert that trigger was deleted
+ ansible.builtin.assert:
+ that: zbxhosttrigger_existing_delete is changed
+
+- name: test - attempt to delete non-existing zabbix trigger
+ community.zabbix.zabbix_trigger:
+ name: TestTrigger
+ host_name: ExampleHost
+ state: absent
+ register: zbxhosttrigger_missing_delete
+
+- name: assert that nothing has been changed
+ ansible.builtin.assert:
+ that: not zbxhosttrigger_missing_delete is changed
+
+- name: test - create new Zabbix trigger on template with many options set
+ community.zabbix.zabbix_trigger:
+ name: TestTrigger
+ template_name: ExampleTemplate
+ description: test template trigger
+ params:
+ severity: warning
+ expression: 'count(/ExampleTemplate/system.cpu.load[percpu,avg1],#2,"gt","1.3")>1'
+ recovery_mode: recovery_expression
+ recovery_expression: 'count(/ExampleTemplate/system.cpu.load[percpu,avg1],#2,"lt","1.3")>1'
+ manual_close: True
+ enabled: True
+ tags:
+ - tag: tag
+ value: value
+ state: present
+ register: zbxtemptrigger_new
+
+- name: assert that trigger was created
+ ansible.builtin.assert:
+ that: zbxtemptrigger_new is changed
+
+- name: test - create same Zabbix trigger once again
+ community.zabbix.zabbix_trigger:
+ name: TestTrigger
+ template_name: ExampleTemplate
+ description: test template trigger
+ params:
+ severity: warning
+ expression: 'count(/ExampleTemplate/system.cpu.load[percpu,avg1],#2,"gt","1.3")>1'
+ recovery_mode: recovery_expression
+ recovery_expression: 'count(/ExampleTemplate/system.cpu.load[percpu,avg1],#2,"lt","1.3")>1'
+ manual_close: True
+ enabled: True
+ tags:
+ - tag: tag
+ value: value
+ state: present
+ register: zbxtemptrigger_existing
+
+- name: assert that nothing has been changed
+ ansible.builtin.assert:
+ that: not zbxtemptrigger_existing is changed
+
+- name: test - update existing zabbix trigger
+ community.zabbix.zabbix_trigger:
+ name: TestTrigger
+ template_name: ExampleTemplate
+ params:
+ manual_close: False
+ state: present
+ register: zbxtemptrigger_changed
+
+- name: expect to succeed and that things changed
+ ansible.builtin.assert:
+ that: zbxtemptrigger_changed is changed
+
+- name: test - attempt to delete previously created zabbix trigger
+ community.zabbix.zabbix_trigger:
+ name: TestTrigger
+ template_name: ExampleTemplate
+ state: absent
+ register: zbxtemptrigger_existing_delete
+
+- name: assert that trigger was deleted
+ ansible.builtin.assert:
+ that: zbxtemptrigger_existing_delete is changed
+
+- name: test - attempt to delete non-existing zabbix trigger
+ community.zabbix.zabbix_trigger:
+ name: TestTrigger
+ template_name: ExampleTemplate
+ state: absent
+ register: zbxtemptrigger_missing_delete
+
+- name: assert that nothing has been changed
+ ansible.builtin.assert:
+ that: not zbxtemptrigger_missing_delete is changed
diff --git a/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_triggerprototype/meta/main.yml b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_triggerprototype/meta/main.yml
new file mode 100644
index 000000000..acdb704c8
--- /dev/null
+++ b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_triggerprototype/meta/main.yml
@@ -0,0 +1,3 @@
+---
+dependencies:
+ - setup_zabbix
diff --git a/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_triggerprototype/tasks/main.yml b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_triggerprototype/tasks/main.yml
new file mode 100644
index 000000000..05bcdb724
--- /dev/null
+++ b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_triggerprototype/tasks/main.yml
@@ -0,0 +1,44 @@
+---
+- name: test - do not run tests for Zabbix < 6.4
+ meta: end_play
+ when: zabbix_version is version('6.4', '<')
+
+- block:
+ # setup stuff
+ - include_tasks: zabbix_setup.yml
+
+ # zabbix_triggerprototype module tests
+ - include_tasks: zabbix_tests.yml
+
+ # tear down stuff set up earlier
+ - include_tasks: zabbix_teardown.yml
+ always:
+ - name: "cleanup host item if tests failed"
+ community.zabbix.zabbix_itemprototype:
+ host_name: ExampleHost
+ discoveryrule_name: ExampleHostRule
+ name: ExampleHostItem
+ state: absent
+ ignore_errors: true
+
+ - name: "cleanup template item if tests failed"
+ community.zabbix.zabbix_itemprototype:
+ template_name: ExampleTemplate
+ discoveryrule_name: ExampleTemplateRule
+ name: ExampleTemplateItem
+ state: absent
+ ignore_errors: true
+
+ - name: cleanup discovery rule on host if tests failed
+ community.zabbix.zabbix_discoveryrule:
+ name: ExampleHostRule
+ host_name: ExampleHost
+ state: absent
+ ignore_errors: true
+
+ - name: cleanup discovery rule on template if tests failed
+ community.zabbix.zabbix_discoveryrule:
+ name: ExampleTemplateRule
+ template_name: ExampleTemplate
+ state: absent
+ ignore_errors: true
diff --git a/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_triggerprototype/tasks/zabbix_setup.yml b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_triggerprototype/tasks/zabbix_setup.yml
new file mode 100644
index 000000000..b42134f3a
--- /dev/null
+++ b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_triggerprototype/tasks/zabbix_setup.yml
@@ -0,0 +1,48 @@
+---
+
+- name: Create test template
+ community.zabbix.zabbix_template:
+ template_name: ExampleTemplate
+ template_groups:
+ - Templates
+
+- name: Create test host
+ community.zabbix.zabbix_host:
+ host_name: ExampleHost
+ host_groups:
+ - Linux servers
+ - Zabbix servers
+ link_templates:
+ - ExampleTemplate
+ status: enabled
+ state: present
+ interfaces:
+ - type: 1
+ main: 1
+ useip: 1
+ ip: 10.1.1.1
+ dns: ""
+ port: "10050"
+
+- name: Create new Zabbix discoveryrule on host
+ community.zabbix.zabbix_discoveryrule:
+ name: ExampleHostRule
+ host_name: ExampleHost
+ params:
+ type: zabbix_agent_active
+ key: 'vfs.fs.discovery'
+ interval: 1m
+ enabled: True
+ state: present
+
+- name: Create host item prototype
+ community.zabbix.zabbix_itemprototype:
+ name: '{% raw %}{#FSNAME}:ExampleItemPrototype{% endraw %}'
+ discoveryrule_name: ExampleHostRule
+ host_name: ExampleHost
+ params:
+ type: zabbix_agent_active
+ key: '{% raw %}vfs.fs.size[{#FSNAME}, pused]{% endraw %}'
+ value_type: numeric_unsigned
+ interval: 1m
+ state: present
diff --git a/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_triggerprototype/tasks/zabbix_teardown.yml b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_triggerprototype/tasks/zabbix_teardown.yml
new file mode 100644
index 000000000..ef841de56
--- /dev/null
+++ b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_triggerprototype/tasks/zabbix_teardown.yml
@@ -0,0 +1,15 @@
+- name: remove test template item
+ community.zabbix.zabbix_item:
+ name: ExampleTemplateItem
+ template_name: ExampleTemplate
+ state: absent
+
+- name: remove test host
+ community.zabbix.zabbix_host:
+ host_name: ExampleHost
+ state: absent
+
+- name: remove test template
+ community.zabbix.zabbix_template:
+ template_name: ExampleTemplate
+ state: absent
diff --git a/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_triggerprototype/tasks/zabbix_tests.yml b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_triggerprototype/tasks/zabbix_tests.yml
new file mode 100644
index 000000000..7975e20c1
--- /dev/null
+++ b/ansible_collections/community/zabbix/tests/integration/targets/test_zabbix_triggerprototype/tasks/zabbix_tests.yml
@@ -0,0 +1,180 @@
+---
+
+- name: test - create new Zabbix trigger on host with many options set
+ community.zabbix.zabbix_triggerprototype:
+ name: '{% raw %}Free disk space is less than 20% on volume {#FSNAME}{% endraw %}'
+ host_name: ExampleHost
+ description: test host trigger
+ params:
+ severity: warning
+ expression: '{% raw %}last(/ExampleHost/vfs.fs.size[{#FSNAME}, pused])>80{% endraw %}'
+ manual_close: True
+ enabled: True
+ tags:
+ - tag: tag
+ value: value
+ state: present
+ register: zbxhosttrigger_new
+
+- name: assert that trigger was created
+ ansible.builtin.assert:
+ that: zbxhosttrigger_new is changed
+
+- name: test - create same Zabbix trigger once again
+ community.zabbix.zabbix_triggerprototype:
+ name: '{% raw %}Free disk space is less than 20% on volume {#FSNAME}{% endraw %}'
+ host_name: ExampleHost
+ description: test host trigger
+ params:
+ severity: warning
+ expression: '{% raw %}last(/ExampleHost/vfs.fs.size[{#FSNAME}, pused])>80{% endraw %}'
+ manual_close: True
+ enabled: True
+ tags:
+ - tag: tag
+ value: value
+ state: present
+ register: zbxhosttrigger_existing
+
+- name: assert that nothing has been changed
+ ansible.builtin.assert:
+ that: not zbxhosttrigger_existing is changed
+
+- name: test - update existing zabbix trigger
+ community.zabbix.zabbix_triggerprototype:
+ name: '{% raw %}Free disk space is less than 20% on volume {#FSNAME}{% endraw %}'
+ host_name: ExampleHost
+ params:
+ manual_close: False
+ state: present
+ register: zbxhosttrigger_changed
+
+- name: expect to succeed and that things changed
+ ansible.builtin.assert:
+ that: zbxhosttrigger_changed is changed
+
+- name: test - attempt to delete previously created zabbix trigger
+ community.zabbix.zabbix_triggerprototype:
+ name: '{% raw %}Free disk space is less than 20% on volume {#FSNAME}{% endraw %}'
+ host_name: ExampleHost
+ state: absent
+ register: zbxhosttrigger_existing_delete
+
+- name: assert that trigger was deleted
+ ansible.builtin.assert:
+ that: zbxhosttrigger_existing_delete is changed
+
+- name: test - attempt to delete non-existing zabbix trigger
+ community.zabbix.zabbix_triggerprototype:
+ name: '{% raw %}Free disk space is less than 20% on volume {#FSNAME}{% endraw %}'
+ host_name: ExampleHost
+ state: absent
+ register: zbxhosttrigger_missing_delete
+
+- name: assert that nothing has been changed
+ ansible.builtin.assert:
+ that: not zbxhosttrigger_missing_delete is changed
+
+- name: remove host rule
+ community.zabbix.zabbix_discoveryrule:
+ name: ExampleHostRule
+ host_name: ExampleHost
+ state: absent
+
+- name: Create new Zabbix discoveryrule on template
+ community.zabbix.zabbix_discoveryrule:
+ name: ExampleTemplateRule
+ template_name: ExampleTemplate
+ params:
+ type: zabbix_agent_active
+ key: 'vfs.fs.discovery'
+ interval: 1m
+ enabled: True
+ state: present
+
+- name: Create template item prototype
+ community.zabbix.zabbix_itemprototype:
+ name: '{% raw %}{#FSNAME}:ExampleItemPrototype{% endraw %}'
+ discoveryrule_name: ExampleTemplateRule
+ template_name: ExampleTemplate
+ params:
+ type: zabbix_agent_active
+ key: '{% raw %}vfs.fs.size[{#FSNAME}, pused]{% endraw %}'
+ value_type: numeric_unsigned
+ interval: 1m
+ state: present
+
+- name: test - create new Zabbix trigger on template with many options set
+ community.zabbix.zabbix_triggerprototype:
+ name: '{% raw %}Free disk space is less than 20% on volume {#FSNAME}{% endraw %}'
+ template_name: ExampleTemplate
+ description: test template trigger
+ params:
+ severity: warning
+ expression: '{% raw %}last(/ExampleTemplate/vfs.fs.size[{#FSNAME}, pused])>80{% endraw %}'
+ manual_close: True
+ enabled: True
+ tags:
+ - tag: tag
+ value: value
+ state: present
+ register: zbxtemptrigger_new
+
+- name: assert that trigger was created
+ ansible.builtin.assert:
+ that: zbxtemptrigger_new is changed
+
+- name: test - create same Zabbix trigger once again
+ community.zabbix.zabbix_triggerprototype:
+ name: '{% raw %}Free disk space is less than 20% on volume {#FSNAME}{% endraw %}'
+ template_name: ExampleTemplate
+ description: test template trigger
+ params:
+ severity: warning
+ expression: '{% raw %}last(/ExampleTemplate/vfs.fs.size[{#FSNAME}, pused])>80{% endraw %}'
+ manual_close: True
+ enabled: True
+ tags:
+ - tag: tag
+ value: value
+ state: present
+ register: zbxtemptrigger_existing
+
+- name: assert that nothing has been changed
+ ansible.builtin.assert:
+ that: not zbxtemptrigger_existing is changed
+
+- name: test - update existing zabbix trigger
+ community.zabbix.zabbix_triggerprototype:
+ name: '{% raw %}Free disk space is less than 20% on volume {#FSNAME}{% endraw %}'
+ template_name: ExampleTemplate
+ params:
+ manual_close: False
+ state: present
+ register: zbxtemptrigger_changed
+
+- name: expect to succeed and that things changed
+ ansible.builtin.assert:
+ that: zbxtemptrigger_changed is changed
+
+- name: test - attempt to delete previously created zabbix trigger
+ community.zabbix.zabbix_triggerprototype:
+ name: '{% raw %}Free disk space is less than 20% on volume {#FSNAME}{% endraw %}'
+ template_name: ExampleTemplate
+ state: absent
+ register: zbxtemptrigger_existing_delete
+
+- name: assert that trigger was deleted
+ ansible.builtin.assert:
+ that: zbxtemptrigger_existing_delete is changed
+
+- name: test - attempt to delete non-existing zabbix trigger
+ community.zabbix.zabbix_triggerprototype:
+ name: '{% raw %}Free disk space is less than 20% on volume {#FSNAME}{% endraw %}'
+ template_name: ExampleTemplate
+ state: absent
+ register: zbxtemptrigger_missing_delete
+
+- name: assert that nothing has been changed
+ ansible.builtin.assert:
+ that: not zbxtemptrigger_missing_delete is changed
diff --git a/ansible_collections/containers/podman/CHANGELOG.rst b/ansible_collections/containers/podman/CHANGELOG.rst
index 84fa3621b..59c7cc123 100644
--- a/ansible_collections/containers/podman/CHANGELOG.rst
+++ b/ansible_collections/containers/podman/CHANGELOG.rst
@@ -5,6 +5,93 @@ Ansible Podman modules and plugins Release Notes
.. contents:: Topics
+v1.15.2
+=======
+
+Release Summary
+---------------
+
+Inline Containerfile option for podman_image
+
+Minor Changes
+-------------
+
+- Add option for inline Containerfile in podman_image
+
+v1.15.1
+=======
+
+Release Summary
+---------------
+
+Bugfixes for systemd generation
+
+Bugfixes
+--------
+
+- Fix idempotency for pods in case of systemd generation
+- Fix idempotency for systemd generations
+
+v1.15.0
+=======
+
+Release Summary
+---------------
+
+Podman search module and bugfixes for Podman image
+
+Major Changes
+-------------
+
+- Add podman search
+
+Bugfixes
+--------
+
+- Fix issue with pushing podman image to repo name and org
+
+New Modules
+-----------
+
+- containers.podman.podman_search - Search for remote images using podman
+
+v1.14.0
+=======
+
+Release Summary
+---------------
+
+Idempotency redesign and a lot of new options
+
+Major Changes
+-------------
+
+- Add mount and unmount for volumes
+- Add multiple subnets for networks
+- Add new options for podman_container
+- Add new options to pod module
+- Improve idempotency for networking in podman_container
+- Redesign idempotency for Podman Pod module
+
+Minor Changes
+-------------
+
+- Add autodiscovery for build context in podman_image
+- Add docs, tests and more examples for podman_pod
+- Add extra_args for podman_image push and pull
+- Add idempotency for mounts and volumes in podman_container
+- Add new functionality tests for podman_secret
+- Add path and env options for podman_secret
+- Add route, dns and ipam_driver to podman_network
+- Create podman secret when skip_existing=True and it does not exist
+
+Bugfixes
+--------
+
+- Fix idempotency for pod with 0.0.0.0
+- Fix transports issues in podman_image
+- fix(#747) set correct HealthCmd
+
v1.13.0
=======
diff --git a/ansible_collections/containers/podman/FILES.json b/ansible_collections/containers/podman/FILES.json
index ebc8fb138..f5015daa5 100644
--- a/ansible_collections/containers/podman/FILES.json
+++ b/ansible_collections/containers/podman/FILES.json
@@ -8,6 +8,20 @@
"format": 1
},
{
+ "name": "README.md",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "0b3c0bf86d855ce3b4341bf6f72a5ea3e6300c09ca88d793ca3b149175eaa30c",
+ "format": 1
+ },
+ {
+ "name": "CHANGELOG.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "5ca0e2ce515d6d4a95053896aac7e644c6135e7aac6618a053c5af3912d5646a",
+ "format": 1
+ },
+ {
"name": "docs",
"ftype": "dir",
"chksum_type": null,
@@ -15,136 +29,143 @@
"format": 1
},
{
- "name": "docs/_static",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "docs/podman_play_module.html",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "37b854782e2f442df7939135fd0de7f1bd11b7df93fa86b4b46d1c810d9fec04",
"format": 1
},
{
- "name": "docs/_static/pygments.css",
+ "name": "docs/podman_prune_module.html",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6f6d5ca74e8fbc721d70c8db260ab4ac75b69316daa3854a4614d9f3b2ff0631",
+ "chksum_sha256": "753b3dabeac1e53df2e42e1940df972dc406d97364317358f1491f5a202e1dbc",
"format": 1
},
{
- "name": "docs/_static/minus.png",
+ "name": "docs/podman_containers_module.html",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "47e7fc50db3699f1ca41ce9a2ffa202c00c5d1d5180c55f62ba859b1bd6cc008",
+ "chksum_sha256": "99e13ff1d795cf549f7a74168db55a56041ded5fe530edcea23f7fcc9788ed20",
"format": 1
},
{
- "name": "docs/_static/documentation_options.js",
+ "name": "docs/podman_tag_module.html",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e2718b75af040caf95a8915192026ec541cc7ad4c73b0160b7c4c2f4564d7d4f",
+ "chksum_sha256": "8d7f93ba23f7bcf02b6803333685bd5265e4fe72b1e76b2f0fe0dfd9deb6794b",
"format": 1
},
{
- "name": "docs/_static/language_data.js",
+ "name": "docs/podman_login_info_module.html",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "95b1d44e1ba5a3155d89f8427a3b2c981158f956bf48b9cdf0030265e8b7a609",
+ "chksum_sha256": "9c3bb198ce3a5722598a3a5bc33aa7f66d7c06bd9768646c34f3df071aae33d6",
"format": 1
},
{
- "name": "docs/_static/underscore-1.13.1.js",
+ "name": "docs/podman_save_module.html",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cc10f799cd0f6b65f95c4012445497e5ba3cb9f51964a9468940b27bde98b487",
+ "chksum_sha256": "c99fa28fa3a30963799fd9c44c4c106bfe52776053e7d3c4722cfa63e64e4f52",
"format": 1
},
{
- "name": "docs/_static/jquery-3.6.0.js",
+ "name": "docs/search.html",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1fe2bb5390a75e5d61e72c107cab528fc3c29a837d69aab7d200e1dbb5dcd239",
+ "chksum_sha256": "564edc064d26715602aa4c331924b56af296fe5a593f03b84dc0747c15e241c7",
"format": 1
},
{
- "name": "docs/_static/alabaster.css",
+ "name": "docs/podman_export_module.html",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "13f494414f5bca3551442ddf4ad0e42e2403a8757304c1b9787b18ef539a2eac",
+ "chksum_sha256": "fb952b0a011de62c82f09264ef652563c6c8dce05d79e84b0509e0481dab514e",
"format": 1
},
{
- "name": "docs/_static/custom.css",
+ "name": "docs/podman_import_module.html",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "39f23a6561786e3cb4e33e4a96562a1305a8b74c0d45dc215a64018692cd5d4c",
+ "chksum_sha256": "8ce0f00fcc1856676065ec1d3b6ce0ca6413e9606b3785e6786a2dbfedbb02d1",
"format": 1
},
{
- "name": "docs/_static/underscore-1.3.1.js",
+ "name": "docs/.nojekyll",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f808f0aa32fbe90fb9c9c846917faff3fdd4e236c284b76c02dd33753dc90177",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "docs/_static/underscore.js",
+ "name": "docs/buildah_connection.html",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "218fb1c1fc72e9af6b866f430be2a67fa376392b4db2f4dbf32772671b6ae55c",
+ "chksum_sha256": "8a07dd0bd34f2076d2c0f20839eeca6fe9aec2af329e4c9bd0d874eaec5ff685",
"format": 1
},
{
- "name": "docs/_static/file.png",
+ "name": "docs/index.html",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5c4bc9a16aebf38c4b950f59b8e501ca36495328cb9eb622218bce9064a35e3e",
+ "chksum_sha256": "fea6b00dae1703b250bd9b6d4c512b90adb0a6cd935a9a0d4167ba653e2dc5f5",
"format": 1
},
{
- "name": "docs/_static/jquery-3.5.1.js",
+ "name": "docs/environment_variables.html",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "416a3b2c3bf16d64f6b5b6d0f7b079df2267614dd6847fc2f3271b4409233c37",
+ "chksum_sha256": "879bfbde687917cbafe3b2d1ede5979c327aee400372a741ceff8545162a4097",
"format": 1
},
{
- "name": "docs/_static/_sphinx_javascript_frameworks_compat.js",
+ "name": "docs/podman_container_exec_module.html",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2f1e30341b31300bdc3af29ee4a64e0f40ed15492345387bb47542d9b9b4813c",
+ "chksum_sha256": "789c9263eebe83eb2671b0c5a1abb1fb551b0deb8542a671b106b755b47fce01",
"format": 1
},
{
- "name": "docs/_static/basic.css",
+ "name": "docs/podman_volume_info_module.html",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9980fa28cbdcd8d6b437edcb1a911e0e2a788d7fa0dfe6ea25a302d824a3094d",
+ "chksum_sha256": "2c69f055bb49ddb2ba3e0b2027baf10510ef61f08d0c8dcf69d632fe5a26f815",
"format": 1
},
{
- "name": "docs/_static/doctools.js",
+ "name": "docs/podman_secret_module.html",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3d62b81f63b0418a39a8f5a323203d88ddafc8c5226f86d311970025d86d7b6c",
+ "chksum_sha256": "f83fbfefa21682bc4c4b3f312fbbc5e181f74fd3bb9ad484bf025d042b4f47e1",
"format": 1
},
{
- "name": "docs/_static/plus.png",
+ "name": "docs/podman_search_module.html",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "54115199b96a130cba02147c47c0deb43dcc9b9f08b5162bba8642b34980ac63",
+ "chksum_sha256": "fea1d458300947d1b73f1fa14381939d69e2422825bd79ef63d00be4bc98aaef",
"format": 1
},
{
- "name": "docs/_static/jquery.js",
+ "name": "docs/_static",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "docs/_static/jquery-3.6.0.js",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ff1523fb7389539c84c65aba19260648793bb4f5e29329d2ee8804bc37a3fe6e",
+ "chksum_sha256": "1fe2bb5390a75e5d61e72c107cab528fc3c29a837d69aab7d200e1dbb5dcd239",
"format": 1
},
{
- "name": "docs/_static/antsibull-minimal.css",
+ "name": "docs/_static/_sphinx_javascript_frameworks_compat.js",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "dd914facd58095fed7317de336bdf32ac00ff3de539432463df397d2ae15a696",
+ "chksum_sha256": "2f1e30341b31300bdc3af29ee4a64e0f40ed15492345387bb47542d9b9b4813c",
"format": 1
},
{
@@ -155,262 +176,283 @@
"format": 1
},
{
- "name": "docs/_static/searchtools.js",
+ "name": "docs/_static/underscore.js",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5b9facbcb36e5e15e409e2db42439fb8cbc1e43275e337a645bb3c76c3191b93",
+ "chksum_sha256": "218fb1c1fc72e9af6b866f430be2a67fa376392b4db2f4dbf32772671b6ae55c",
"format": 1
},
{
- "name": "docs/podman_secret_info_module.html",
+ "name": "docs/_static/basic.css",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "afbe8b5e852d8ac13eda2acb5a4424bbe50e04775b182de9c73acdbd4e4572c3",
+ "chksum_sha256": "9980fa28cbdcd8d6b437edcb1a911e0e2a788d7fa0dfe6ea25a302d824a3094d",
"format": 1
},
{
- "name": "docs/podman_prune_module.html",
+ "name": "docs/_static/pygments.css",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "52583bf414f29d8a289f7844bb87e870e438eb4c680fe6efe3b84ac9b24919cf",
+ "chksum_sha256": "6f6d5ca74e8fbc721d70c8db260ab4ac75b69316daa3854a4614d9f3b2ff0631",
"format": 1
},
{
- "name": "docs/podman_container_exec_module.html",
+ "name": "docs/_static/custom.css",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3c7a400e7e16df1d1904f5dad17d5358e90a95e8ae611072a7cc4b2fd19b0f70",
+ "chksum_sha256": "39f23a6561786e3cb4e33e4a96562a1305a8b74c0d45dc215a64018692cd5d4c",
"format": 1
},
{
- "name": "docs/podman_unshare_become.html",
+ "name": "docs/_static/underscore-1.3.1.js",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6caccafc43ebbbeb0c043d2e8a2fa2c8736166a0f94a117514eed3be946685ec",
+ "chksum_sha256": "f808f0aa32fbe90fb9c9c846917faff3fdd4e236c284b76c02dd33753dc90177",
"format": 1
},
{
- "name": "docs/objects.inv",
+ "name": "docs/_static/doctools.js",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4e8f308bf4441db6759e6d8a1b54535cf24e77f1ecee4d66caf6b5582f79efa7",
+ "chksum_sha256": "3d62b81f63b0418a39a8f5a323203d88ddafc8c5226f86d311970025d86d7b6c",
"format": 1
},
{
- "name": "docs/podman_image_info_module.html",
+ "name": "docs/_static/minus.png",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e8b61d10e778183647fd7b720d030addce99763e759ad1eb7daa14ece8b9913b",
+ "chksum_sha256": "47e7fc50db3699f1ca41ce9a2ffa202c00c5d1d5180c55f62ba859b1bd6cc008",
"format": 1
},
{
- "name": "docs/podman_secret_module.html",
+ "name": "docs/_static/jquery-3.5.1.js",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "62e01ebdfbc9e2a163c747e013c5aa3ec7219b3bb260147a929080d445f0dbbd",
+ "chksum_sha256": "416a3b2c3bf16d64f6b5b6d0f7b079df2267614dd6847fc2f3271b4409233c37",
"format": 1
},
{
- "name": "docs/podman_network_info_module.html",
+ "name": "docs/_static/documentation_options.js",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b5763fa0a99790ef332056f61dc499b455de1a37349d006289427922aae21395",
+ "chksum_sha256": "e2718b75af040caf95a8915192026ec541cc7ad4c73b0160b7c4c2f4564d7d4f",
"format": 1
},
{
- "name": "docs/podman_image_module.html",
+ "name": "docs/_static/language_data.js",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9b6b1b68675c355b8306fae41e7a399d1c203028b18aab10e2515c710f4a9a13",
+ "chksum_sha256": "95b1d44e1ba5a3155d89f8427a3b2c981158f956bf48b9cdf0030265e8b7a609",
"format": 1
},
{
- "name": "docs/podman_volume_module.html",
+ "name": "docs/_static/jquery.js",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9eb4b8acf3aecbd24f06e73c0b9d52370377032f5884327f4bb7de0483d2db2f",
+ "chksum_sha256": "ff1523fb7389539c84c65aba19260648793bb4f5e29329d2ee8804bc37a3fe6e",
"format": 1
},
{
- "name": "docs/podman_load_module.html",
+ "name": "docs/_static/underscore-1.13.1.js",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4532a90f7cba943ba79959f0cd05af85c790bdd51f30888de95861e60ab77bd0",
+ "chksum_sha256": "cc10f799cd0f6b65f95c4012445497e5ba3cb9f51964a9468940b27bde98b487",
"format": 1
},
{
- "name": "docs/podman_runlabel_module.html",
+ "name": "docs/_static/plus.png",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4089cfd62f3250303175edaac0e1b9ed017e7dcfa8ebd11c334862e5e4be6324",
+ "chksum_sha256": "54115199b96a130cba02147c47c0deb43dcc9b9f08b5162bba8642b34980ac63",
"format": 1
},
{
- "name": "docs/podman_play_module.html",
+ "name": "docs/_static/file.png",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ef65885a17acc3bfcabad46054472594f0fb7cc3968e8d25746bff6bf99722da",
+ "chksum_sha256": "5c4bc9a16aebf38c4b950f59b8e501ca36495328cb9eb622218bce9064a35e3e",
"format": 1
},
{
- "name": "docs/podman_container_module.html",
+ "name": "docs/_static/searchtools.js",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "538fd60bc9d58b266c163ed7ac7bcf3305dc787935222f7f2c4c9e112a4cf30b",
+ "chksum_sha256": "5b9facbcb36e5e15e409e2db42439fb8cbc1e43275e337a645bb3c76c3191b93",
"format": 1
},
{
- "name": "docs/podman_containers_module.html",
+ "name": "docs/_static/antsibull-minimal.css",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c929fa197c9e07d10d05101e6472dcdef409ced23a50da5918ec4841a4e50d08",
+ "chksum_sha256": "dd914facd58095fed7317de336bdf32ac00ff3de539432463df397d2ae15a696",
"format": 1
},
{
- "name": "docs/index.html",
+ "name": "docs/_static/alabaster.css",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a05292a1b3cd446a6ea3667d7519b521dc50c7efc04fd3b1af4d42dc219fe37d",
+ "chksum_sha256": "13f494414f5bca3551442ddf4ad0e42e2403a8757304c1b9787b18ef539a2eac",
"format": 1
},
{
- "name": "docs/podman_logout_module.html",
+ "name": "docs/objects.inv",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4d81428e5df90da2490fd620e3af3b1a1ddd347b086cee77dff057a580267002",
+ "chksum_sha256": "15cfb3114360d1921c6672d367731f86ad4c3fb0e5f87fc39eace64bdd129bff",
"format": 1
},
{
- "name": "docs/podman_save_module.html",
+ "name": "docs/podman_network_module.html",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5cf2070ca153d3bf60ea9e0440ffb1b72b847e32565a3fd8f953ca7de24a9586",
+ "chksum_sha256": "7c727e9c5434d145285aa871e47e8ca43e1cbe0a6a19c3f19f05fc6290e3d2bb",
"format": 1
},
{
- "name": "docs/genindex.html",
+ "name": "docs/podman_container_info_module.html",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "267654c6dfb8d96914725b0bd774e09c72dfdcfb065ae073f9fc732b01b543d0",
+ "chksum_sha256": "b27a42b534e504a4c46f59406f9301696a9c030170770f1c50589bd4a13f7a99",
"format": 1
},
{
- "name": "docs/podman_network_module.html",
+ "name": "docs/podman_volume_module.html",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "367a0a1343aa808bb256561200464764dc2b165271d028b1508d95b6b7251410",
+ "chksum_sha256": "91eb2bf2c0703276d058ee4d6a5216d59390f6ef6d92bb30bec545f0b38827e8",
"format": 1
},
{
- "name": "docs/buildah_connection.html",
+ "name": "docs/podman_network_info_module.html",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a2f915dd35461f1a8d9806999382c0976f9b1fe03e2fe7b3e1973e20a46157b1",
+ "chksum_sha256": "a6ab63e31ee548cfc0bf8523188b74ba331e8571f1306c27efb0d7eced311ea5",
"format": 1
},
{
"name": "docs/searchindex.js",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "668d68ce702cf98c3d9cbc91d08d64a1fda5060257ad8393f99b06fe0de1d9a7",
+ "chksum_sha256": "b8b751323801bbc009888980bb60145a262bfcd6ccb875b79101b64d92eb9741",
"format": 1
},
{
- "name": "docs/podman_container_info_module.html",
+ "name": "docs/podman_secret_info_module.html",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "13dc267968457dd4b2271a4a03b497d80354ed3aadb843c4ce0a8829f7ae99e5",
+ "chksum_sha256": "18339efa195ab8bd4d43ecfb1f542297225b47cc1f7f2d7b84512f5856aa8f32",
"format": 1
},
{
- "name": "docs/podman_tag_module.html",
+ "name": "docs/podman_pod_module.html",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "176e3fedadbfe0c2bd801ac7b2ad9d90027b76d33a2cb28a2fef572e4e7846fc",
+ "chksum_sha256": "4459bf684e8f0fa3e6463005f31da4428cd749ca4f0dda727b9e8b8df6f7aecc",
"format": 1
},
{
- "name": "docs/environment_variables.html",
+ "name": "docs/podman_image_module.html",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "754ab0bc744fd2f640f433d8ec17c4520c35bf5985108501b479b3653fe3c0d0",
+ "chksum_sha256": "c8097aeb6c5f7a8ca4889635fdb53cb0835446368e2efe76cd7c5c2905781dfa",
"format": 1
},
{
- "name": "docs/podman_volume_info_module.html",
+ "name": "docs/podman_pod_info_module.html",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b8a2a6fad503cd6adb9160e77bb2f31118bf0daa6d977fa03759e28eb6c3f131",
+ "chksum_sha256": "cedeb049f61d24452f879b314f9ec6418c4bc91058f2750346a2dd94021b9b33",
"format": 1
},
{
- "name": "docs/search.html",
+ "name": "docs/podman_container_module.html",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "586d63e6da7c3da64b93d96964e37b0355b62674868eccb23e3478abe568727e",
+ "chksum_sha256": "cb0a2b1a2c5802a025c2ae2e4607bd62f8b3db5389c6356906308dd771b7f4f3",
"format": 1
},
{
- "name": "docs/podman_login_info_module.html",
+ "name": "docs/podman_login_module.html",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "94a726afed1d890e8b184d3084ca0c0070313a067b90dfdbc542b55ecdeea1ca",
+ "chksum_sha256": "c9d0954f1bebc9840a9f93a63c766c31685b0163892a1f3b6f392d4a21d16c82",
"format": 1
},
{
- "name": "docs/podman_login_module.html",
+ "name": "docs/podman_generate_systemd_module.html",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c66423dcc7cc34aed5e2d110a4a874fbd6540cc6f1612e86c33406c919bcf79c",
+ "chksum_sha256": "ed3114ebe0718d51cde4b7891a26446848ad8dac47d8aa523969444a8810c109",
"format": 1
},
{
- "name": "docs/podman_pod_info_module.html",
+ "name": "docs/podman_load_module.html",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a28ac5a8b511831dfa23f72c1f9bb344ef8ba447b35fa5324abb75d01c48c229",
+ "chksum_sha256": "e9b058cd610fce57ba8e6a8249ea01d26bdd0840f0b439ffbb981a8bb0c398cc",
"format": 1
},
{
- "name": "docs/podman_export_module.html",
+ "name": "docs/podman_runlabel_module.html",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "678fc32756bc2c3bfe8666d9b32144bcee1371e213e1885abea262bee7da0b86",
+ "chksum_sha256": "b278009a712accc1d299bd5fd3facdeb97d730c21c4da86f81b3849b8df6173b",
"format": 1
},
{
- "name": "docs/podman_import_module.html",
+ "name": "docs/podman_logout_module.html",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "efe1e379667f9f72c2d6b68dac89fce6a21b1c2bed58e6159a8da49fa343c7c0",
+ "chksum_sha256": "b268659490b3f6ed9d81c2702900932fa1ffe668713b40f37de424f26400c33a",
"format": 1
},
{
- "name": "docs/podman_generate_systemd_module.html",
+ "name": "docs/genindex.html",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a39b726dd24891ea2baed0fa82f0d6441a8de98ec924a34f1429d8f3b5c0db90",
+ "chksum_sha256": "07c7c57566f1a019de33dc9d9cc748814bacdde600e87776e86f84c43d0df78c",
"format": 1
},
{
- "name": "docs/podman_pod_module.html",
+ "name": "docs/podman_connection.html",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8df83296245cffb0523875e8045091ef06d9052f1c0193d48c4e4f6fb43ff3fb",
+ "chksum_sha256": "d7fd9142e3e6e3d968ab802c054f71039b28f6ade5ae9f63ec2134aa18a5abaa",
"format": 1
},
{
- "name": "docs/.nojekyll",
+ "name": "docs/podman_image_info_module.html",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "ebd8ebec750431a9e266d29e3ec3df339f0fdc78ac18f7bf115cac5206a0cf4d",
"format": 1
},
{
- "name": "docs/podman_connection.html",
+ "name": "docs/podman_unshare_become.html",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "163a4dd0ca2c99c1d58fe2e2400f9c4b3b431feb5913408185a6d175ad5f23e7",
+ "chksum_sha256": "c605d51a6585bea6690be038bc9b60f23da5c1622a4a5612f2afadb189a30e1e",
+ "format": 1
+ },
+ {
+ "name": "changelogs",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "changelogs/config.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "72cc16b684890f4595ab75ffdd6f80f5ae19bc84cbb2d82606bc840eb7842209",
+ "format": 1
+ },
+ {
+ "name": "changelogs/changelog.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "5e8b8d66f8f6e186308dde34bfd8c70f85887e338ace88337fa02c3b95826af6",
"format": 1
},
{
@@ -421,31 +463,31 @@
"format": 1
},
{
- "name": "ansible-collection-containers-podman.spec",
+ "name": "CODE-OF-CONDUCT.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "78cf0e2fef0e96de69fd18ab38b358a03a22a5ce2e0ecafbd149407d6bcafb47",
+ "chksum_sha256": "5b42ff686c8d61fc9879d2512a9fa01f8810a7274318e7952ad3322eeea02f11",
"format": 1
},
{
- "name": "setup.py",
+ "name": "COPYING",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "85ef52c56819164efc5178d25aa03a432cdb8431ad0c99255802ce19a1719602",
+ "chksum_sha256": "3972dc9744f6499f0f9b2dbf76696f2ae7ad8af9b23dde66d6af86c9dfb36986",
"format": 1
},
{
- "name": "README.md",
+ "name": "galaxy.yml.in",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0b3c0bf86d855ce3b4341bf6f72a5ea3e6300c09ca88d793ca3b149175eaa30c",
+ "chksum_sha256": "ba21f50b97b7f801dd811f96d4941327fccab34b13311e9bfcc5facfeec16999",
"format": 1
},
{
- "name": "CODE-OF-CONDUCT.md",
+ "name": "ansible-collection-containers-podman.spec",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5b42ff686c8d61fc9879d2512a9fa01f8810a7274318e7952ad3322eeea02f11",
+ "chksum_sha256": "78cf0e2fef0e96de69fd18ab38b358a03a22a5ce2e0ecafbd149407d6bcafb47",
"format": 1
},
{
@@ -456,10 +498,38 @@
"format": 1
},
{
- "name": "COPYING",
+ "name": "meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "meta/runtime.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3972dc9744f6499f0f9b2dbf76696f2ae7ad8af9b23dde66d6af86c9dfb36986",
+ "chksum_sha256": "f07aa47b7583a6a8420798081029b8cdfb09cbf5564acff6f49b6c3237102618",
+ "format": 1
+ },
+ {
+ "name": "setup.cfg",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "df77e31936daa4f2509685b9442e8518931651064d466d9f6e0b597ebf92d6cd",
+ "format": 1
+ },
+ {
+ "name": "setup.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "85ef52c56819164efc5178d25aa03a432cdb8431ad0c99255802ce19a1719602",
+ "format": 1
+ },
+ {
+ "name": "test-requirements.txt",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "9b72b7289eba420f51eb614c54f609181bb26e96caef2abf06c02339a77d2e08",
"format": 1
},
{
@@ -477,178 +547,178 @@
"format": 1
},
{
- "name": "plugins/connection/buildah.py",
+ "name": "plugins/connection/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2dd2825640c5a1d888fe78dcde28734278e8329dc69af136c70b1bb0fb308208",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/connection/__init__.py",
+ "name": "plugins/connection/podman.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "fd66ec667402f515bab568f0b6b0f12bde1d122ac58ba86d722c97b12364b3ce",
"format": 1
},
{
- "name": "plugins/connection/podman.py",
+ "name": "plugins/connection/buildah.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fd66ec667402f515bab568f0b6b0f12bde1d122ac58ba86d722c97b12364b3ce",
+ "chksum_sha256": "2dd2825640c5a1d888fe78dcde28734278e8329dc69af136c70b1bb0fb308208",
"format": 1
},
{
- "name": "plugins/modules",
+ "name": "plugins/module_utils",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/podman_generate_systemd.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "262b7b1263967adfa45d98192ac38169e51026db3b60eac7b365dc81bbd7b784",
+ "name": "plugins/module_utils/podman",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/podman_login_info.py",
+ "name": "plugins/module_utils/podman/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e8f615071a379d21b8f012a60c24036c4812239faf8e6aea2be6e5ca21bbcc4f",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/modules/podman_export.py",
+ "name": "plugins/module_utils/podman/podman_pod_lib.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "48a73c06e5fe4b1f1d4cd70262d32559a13e73eda7012ea5c074fdbabbc2470a",
+ "chksum_sha256": "2ef09fa47a01c25fd211af52b995af2890af2d8936cc48c92a6504c14f7110f6",
"format": 1
},
{
- "name": "plugins/modules/podman_network_info.py",
+ "name": "plugins/module_utils/podman/quadlet.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9beb6e0c733ca304f38f95116dfd89569df21a68428039ef59ec9650314c5222",
+ "chksum_sha256": "303760220f493bef6b31dd3c04037c8cd85983c02e467e7602d48907b538e6db",
"format": 1
},
{
- "name": "plugins/modules/podman_image_info.py",
+ "name": "plugins/module_utils/podman/common.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "30d4db967b76884039d8ccaaf99f098e86b85efd9bef6bc0ceea4b8292905e6b",
+ "chksum_sha256": "bdc5d5dea059a7973e77768d29bd985be5ca111e906daf0634b190a4cc25019b",
"format": 1
},
{
- "name": "plugins/modules/podman_container_info.py",
+ "name": "plugins/module_utils/podman/podman_container_lib.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c0ccb2118efb225624f13e01c499eec274d0d864fd151cca94e0128e994d4b5e",
+ "chksum_sha256": "cf8a413c68acb6f67119215646d47c17f29d0f1797d0140ef5d3b1c1c6440919",
"format": 1
},
{
- "name": "plugins/modules/podman_pod.py",
+ "name": "plugins/module_utils/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "32d5458b844ee1798d4abdc5384dc9b1056bd3a3585e6e38b48bfd3bd0ea43f5",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/modules/podman_container.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "ada302bbc6c4378e586edc7f0e2c0d9e78b161f325b7d886bfa9ee4deeb41882",
+ "name": "plugins/become",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/podman_prune.py",
+ "name": "plugins/become/podman_unshare.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "214aa5244363a5d1e8423c82a770e5c02f7dfc331b8756607a567995e6d6ec50",
+ "chksum_sha256": "2dab878a68742976acd46a1310a37e9e0777b1953f86ee31dd9e0a29e9dfc3f3",
"format": 1
},
{
- "name": "plugins/modules/podman_secret.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "6b19a2f4453349e74339f952ca4d86512b55e792c3ed8d991ec7a681ec1de850",
+ "name": "plugins/modules",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/podman_tag.py",
+ "name": "plugins/modules/podman_network.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f748d6640b2cd18ac2caaf0a9d74917c1f139649df48dd514a11d8a1e7abc7cf",
+ "chksum_sha256": "6b283630c55a99cd1092641455e47355ddd7eee92003e6749452c9637e57c53d",
"format": 1
},
{
- "name": "plugins/modules/__init__.py",
+ "name": "plugins/modules/podman_image.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "b9cdd5378b676ee4395117d2835b59ddf1b97c33192e0eed36bd550162df47e9",
"format": 1
},
{
- "name": "plugins/modules/podman_logout.py",
+ "name": "plugins/modules/podman_network_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7cc7da9cf123ffb9180e98f2fe5704f628831d44014efd25c48cedd1c28a9b7c",
+ "chksum_sha256": "9beb6e0c733ca304f38f95116dfd89569df21a68428039ef59ec9650314c5222",
"format": 1
},
{
- "name": "plugins/modules/podman_load.py",
+ "name": "plugins/modules/podman_secret_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "96694bda39431f44186d599db567446d72debdf52a4a5c51f5fd66fa515229fc",
+ "chksum_sha256": "d6254c8325c6ff5fffb632062f7401d8a1a94d5f1786691e88bee07fe26734f9",
"format": 1
},
{
- "name": "plugins/modules/podman_save.py",
+ "name": "plugins/modules/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ddb55cc96d2a174a1f2e61fcd22a435b11394d53199bd2afeca374139a43be2c",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/modules/podman_runlabel.py",
+ "name": "plugins/modules/podman_save.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3e5ca7f03d9703edab8219fac8d24bdecd88a1da007ace8985bb12e6ddd7c4da",
+ "chksum_sha256": "ddb55cc96d2a174a1f2e61fcd22a435b11394d53199bd2afeca374139a43be2c",
"format": 1
},
{
- "name": "plugins/modules/podman_pod_info.py",
+ "name": "plugins/modules/podman_container.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "dae42675e5739995587997d9d2475219339f222053a99a121f28270444f717b9",
+ "chksum_sha256": "27f57a836884bcbabb4be6c6310ee369c2a2c1b072adb42df5305fbaabac6265",
"format": 1
},
{
- "name": "plugins/modules/podman_container_exec.py",
+ "name": "plugins/modules/podman_container_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "936c9c3ff873c18d2283dd352e6dbaaf46c228678db474dd88eb9df1e824d833",
+ "chksum_sha256": "c0ccb2118efb225624f13e01c499eec274d0d864fd151cca94e0128e994d4b5e",
"format": 1
},
{
- "name": "plugins/modules/podman_login.py",
+ "name": "plugins/modules/podman_search.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1414953f8525bcfa294fb57d70565a02ede619e29ef256d88c34bdb6b2712fb3",
+ "chksum_sha256": "3726a722b4d8a583b96d80e7ac2506dc1e4a5e73e41be06c0053257d1fc0273d",
"format": 1
},
{
- "name": "plugins/modules/podman_play.py",
+ "name": "plugins/modules/podman_prune.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1fef01d23b5083bd5687f63d23e3e692fe0230f73e9a59da523fd8ce8be3509d",
+ "chksum_sha256": "214aa5244363a5d1e8423c82a770e5c02f7dfc331b8756607a567995e6d6ec50",
"format": 1
},
{
- "name": "plugins/modules/podman_containers.py",
+ "name": "plugins/modules/podman_secret.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "166b573ae344ccf93b2e33703fc8d8eeb1cd5e251eba40a95ba037cad4716b7a",
+ "chksum_sha256": "c76daac7a399f6a0881fdfdf1370981ebf313c1065707bb3492d6a092a1e61e1",
"format": 1
},
{
@@ -659,318 +729,283 @@
"format": 1
},
{
- "name": "plugins/modules/podman_volume_info.py",
+ "name": "plugins/modules/podman_login.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "eb8cf9d38d438d24589d547458f4c0ba8341f687eb8a1ded30de1fc0b8933c83",
+ "chksum_sha256": "1414953f8525bcfa294fb57d70565a02ede619e29ef256d88c34bdb6b2712fb3",
"format": 1
},
{
- "name": "plugins/modules/podman_secret_info.py",
+ "name": "plugins/modules/podman_export.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d6254c8325c6ff5fffb632062f7401d8a1a94d5f1786691e88bee07fe26734f9",
+ "chksum_sha256": "48a73c06e5fe4b1f1d4cd70262d32559a13e73eda7012ea5c074fdbabbc2470a",
"format": 1
},
{
- "name": "plugins/modules/podman_network.py",
+ "name": "plugins/modules/podman_tag.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "28e23bfd5e1a689ec64ecd78dc5c2ade91064cc8db4523e2a3c84b3f0a527f09",
+ "chksum_sha256": "f748d6640b2cd18ac2caaf0a9d74917c1f139649df48dd514a11d8a1e7abc7cf",
"format": 1
},
{
- "name": "plugins/modules/podman_volume.py",
+ "name": "plugins/modules/podman_login_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b6807a2c1cbb53a163eb761017d9cc4af0e22a7b151c78e73be0d1cdf6822d61",
+ "chksum_sha256": "e8f615071a379d21b8f012a60c24036c4812239faf8e6aea2be6e5ca21bbcc4f",
"format": 1
},
{
- "name": "plugins/modules/podman_image.py",
+ "name": "plugins/modules/podman_logout.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "61096d944871085194097913b73ad99565cb445245366fa19067f3a279e6fe56",
- "format": 1
- },
- {
- "name": "plugins/module_utils",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
- "format": 1
- },
- {
- "name": "plugins/module_utils/podman",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "7cc7da9cf123ffb9180e98f2fe5704f628831d44014efd25c48cedd1c28a9b7c",
"format": 1
},
{
- "name": "plugins/module_utils/podman/podman_pod_lib.py",
+ "name": "plugins/modules/podman_container_exec.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0c5f4b77c737bc856aef858fe0515bcc2351dec0b1beacff3772a441a280d2fe",
+ "chksum_sha256": "936c9c3ff873c18d2283dd352e6dbaaf46c228678db474dd88eb9df1e824d833",
"format": 1
},
{
- "name": "plugins/module_utils/podman/quadlet.py",
+ "name": "plugins/modules/podman_containers.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5574a455e924ea0eb4d8dd50cb0e04d86ee0ad3507826626be66bec766863c60",
+ "chksum_sha256": "166b573ae344ccf93b2e33703fc8d8eeb1cd5e251eba40a95ba037cad4716b7a",
"format": 1
},
{
- "name": "plugins/module_utils/podman/common.py",
+ "name": "plugins/modules/podman_generate_systemd.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0faf94fa1606dc1a67c73def618c0e4c366ef60ff26723d1613c1f157cc5bb32",
+ "chksum_sha256": "262b7b1263967adfa45d98192ac38169e51026db3b60eac7b365dc81bbd7b784",
"format": 1
},
{
- "name": "plugins/module_utils/podman/__init__.py",
+ "name": "plugins/modules/podman_load.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "96694bda39431f44186d599db567446d72debdf52a4a5c51f5fd66fa515229fc",
"format": 1
},
{
- "name": "plugins/module_utils/podman/podman_container_lib.py",
+ "name": "plugins/modules/podman_volume_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8a80171364990a53ad52201232c9bee988d2f02a91f0883e862c533a06b5730a",
+ "chksum_sha256": "eb8cf9d38d438d24589d547458f4c0ba8341f687eb8a1ded30de1fc0b8933c83",
"format": 1
},
{
- "name": "plugins/module_utils/__init__.py",
+ "name": "plugins/modules/podman_pod_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "dae42675e5739995587997d9d2475219339f222053a99a121f28270444f717b9",
"format": 1
},
{
- "name": "plugins/become",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/podman_runlabel.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "3e5ca7f03d9703edab8219fac8d24bdecd88a1da007ace8985bb12e6ddd7c4da",
"format": 1
},
{
- "name": "plugins/become/podman_unshare.py",
+ "name": "plugins/modules/podman_play.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2dab878a68742976acd46a1310a37e9e0777b1953f86ee31dd9e0a29e9dfc3f3",
+ "chksum_sha256": "1fef01d23b5083bd5687f63d23e3e692fe0230f73e9a59da523fd8ce8be3509d",
"format": 1
},
{
- "name": "meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/podman_image_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "30d4db967b76884039d8ccaaf99f098e86b85efd9bef6bc0ceea4b8292905e6b",
"format": 1
},
{
- "name": "meta/runtime.yml",
+ "name": "plugins/modules/podman_pod.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f07aa47b7583a6a8420798081029b8cdfb09cbf5564acff6f49b6c3237102618",
+ "chksum_sha256": "9f0612814899a80c4797f81bb66e08efae400b77e4aac4bb212f3b204d4586a3",
"format": 1
},
{
- "name": "setup.cfg",
+ "name": "plugins/modules/podman_volume.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "df77e31936daa4f2509685b9442e8518931651064d466d9f6e0b597ebf92d6cd",
+ "chksum_sha256": "79e029a8c9ebf1985f1a371351a10df60a70c36448ef7bb38cbdf1cb18e09124",
"format": 1
},
{
- "name": "changelogs",
+ "name": "tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "changelogs/config.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "72cc16b684890f4595ab75ffdd6f80f5ae19bc84cbb2d82606bc840eb7842209",
+ "name": "tests/integration",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "changelogs/changelog.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "7a25ebd33b1475eaf97acf3e5ba1b50461aabd200f501a6130f0aae1cc2e2e74",
+ "name": "tests/integration/targets",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "galaxy.yml.in",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "ba21f50b97b7f801dd811f96d4941327fccab34b13311e9bfcc5facfeec16999",
+ "name": "tests/integration/targets/podman_load",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "CHANGELOG.rst",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "37440385853bd43f5b71ba017515bd1c7c68ac45c0429cfaae320b3661c75de1",
+ "name": "tests/integration/targets/podman_load/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "test-requirements.txt",
+ "name": "tests/integration/targets/podman_load/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9b72b7289eba420f51eb614c54f609181bb26e96caef2abf06c02339a77d2e08",
+ "chksum_sha256": "078d5ae2eb9bf74046e34c6e7c003e87bb6e08f1a6fd49fcf5d6bb2174bae393",
"format": 1
},
{
- "name": "tests",
+ "name": "tests/integration/targets/podman_play",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/sanity",
+ "name": "tests/integration/targets/podman_play/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/sanity/ignore-2.12.txt",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "67fc6067f750369421d81d68b9f2d30afe4eb13f9d0963eaa1aa07a8c924556b",
+ "name": "tests/integration/targets/podman_play/tasks/files",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/sanity/ignore-2.9.txt",
+ "name": "tests/integration/targets/podman_play/tasks/files/play1.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "67fc6067f750369421d81d68b9f2d30afe4eb13f9d0963eaa1aa07a8c924556b",
+ "chksum_sha256": "1031804932a82f741e8cddaa397e73554e4784a790a3d4493d1b5c8f5f6bbf7e",
"format": 1
},
{
- "name": "tests/sanity/ignore-2.15.txt",
+ "name": "tests/integration/targets/podman_play/tasks/files/play-root1.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "67fc6067f750369421d81d68b9f2d30afe4eb13f9d0963eaa1aa07a8c924556b",
+ "chksum_sha256": "049f1ad67c57e019423f28c77ca8cec13e714ae8727fedfde71792ec56131344",
"format": 1
},
{
- "name": "tests/sanity/ignore-2.13.txt",
+ "name": "tests/integration/targets/podman_play/tasks/files/kube-buil-test.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "67fc6067f750369421d81d68b9f2d30afe4eb13f9d0963eaa1aa07a8c924556b",
+ "chksum_sha256": "c813ffc1f16f8324acbbaad0cf5ad7ee00e0ebd47f314b9ad74f24fd9e2177b0",
"format": 1
},
{
- "name": "tests/sanity/ignore-2.17.txt",
+ "name": "tests/integration/targets/podman_play/tasks/files/play3.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "67fc6067f750369421d81d68b9f2d30afe4eb13f9d0963eaa1aa07a8c924556b",
+ "chksum_sha256": "c92c31b3ad0cd9992a6a9e391d07d69b2b9f3cc39b410e1cbc710bda40323b3d",
"format": 1
},
{
- "name": "tests/sanity/requirements.txt",
+ "name": "tests/integration/targets/podman_play/tasks/files/play-root3.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "471c16a6346df1b74757306a436cbab143fe65c30e6f9cda1f5c7179d6012b73",
+ "chksum_sha256": "3e26cb69976a1b7d1f79a5dafc74e368d9297bb63b29c1c213d5bbddc0e62bf3",
"format": 1
},
{
- "name": "tests/sanity/ignore-2.11.txt",
+ "name": "tests/integration/targets/podman_play/tasks/files/play-pod.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "67fc6067f750369421d81d68b9f2d30afe4eb13f9d0963eaa1aa07a8c924556b",
+ "chksum_sha256": "28fd560a9ff59811770316563e3320dbd77ed04c2c2dd9d698c54870fbcaafb4",
"format": 1
},
{
- "name": "tests/sanity/ignore-2.10.txt",
+ "name": "tests/integration/targets/podman_play/tasks/files/envdata.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "67fc6067f750369421d81d68b9f2d30afe4eb13f9d0963eaa1aa07a8c924556b",
+ "chksum_sha256": "d5ebdb092f5473ae5174dfbac83b6623343326f308d02eea8fb58fbb7b42d264",
"format": 1
},
{
- "name": "tests/sanity/ignore-2.18.txt",
+ "name": "tests/integration/targets/podman_play/tasks/files/multi-yaml.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "67fc6067f750369421d81d68b9f2d30afe4eb13f9d0963eaa1aa07a8c924556b",
+ "chksum_sha256": "984f73601cc75a5052b22b3d9a670ea93e7cc0c4f3fe2fb89f4f341392fb8af6",
"format": 1
},
{
- "name": "tests/sanity/ignore-2.16.txt",
+ "name": "tests/integration/targets/podman_play/tasks/files/Containerfile",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "67fc6067f750369421d81d68b9f2d30afe4eb13f9d0963eaa1aa07a8c924556b",
+ "chksum_sha256": "c0268928bf1b6b6307404eff5d25e292b78239d330dc585c87d810db40307609",
"format": 1
},
{
- "name": "tests/sanity/ignore-2.14.txt",
+ "name": "tests/integration/targets/podman_play/tasks/root-play.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "67fc6067f750369421d81d68b9f2d30afe4eb13f9d0963eaa1aa07a8c924556b",
+ "chksum_sha256": "65c0fccbaf7c464b539f895addc0cf3e7536f9cb8b9f7d8e28725a90d345326b",
"format": 1
},
{
- "name": "tests/.gitignore",
+ "name": "tests/integration/targets/podman_play/tasks/play-with-build.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b5726d3ec9335a09c124469eca039523847a6b0f08a083efaefd002b83326600",
- "format": 1
- },
- {
- "name": "tests/unit",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
- "format": 1
- },
- {
- "name": "tests/unit/plugins",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
- "format": 1
- },
- {
- "name": "tests/unit/plugins/modules",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "dbfd032b6d71ce241ee643cfcaa09997efc32ef57a0f4494982f43caad353678",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_container_lib.py",
+ "name": "tests/integration/targets/podman_play/tasks/play-with-options.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "08beb5d507944be15d2c0e01081fa99c43d522e813d5ecde2b038d71132e2d7d",
+ "chksum_sha256": "fcf6f0e7a8768422425a667d29e28c831e4f284102ab3b62d8c7eb4b0fca6083",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_common.py",
+ "name": "tests/integration/targets/podman_play/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "79b1c212210b13bb60b8d97b140351ce088707f3574b8fc61b076e827e050891",
+ "chksum_sha256": "164bc510cd1be9cb89f2d4845154d7f5854149ca347cdd4f2a115e70316c10f2",
"format": 1
},
{
- "name": "tests/integration",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/podman_play/tasks/play-multi-yaml.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "099247fe1f2666caf7035895336008b05e7e773353747c18015d36d795d9afaa",
"format": 1
},
{
- "name": "tests/integration/targets",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/__init__.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
@@ -988,17 +1023,17 @@
"format": 1
},
{
- "name": "tests/integration/targets/podman_pod/tasks/main.yml",
+ "name": "tests/integration/targets/podman_pod/tasks/resource-limit.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1c065036caae1e7933a92eead273136e4ebc1340257766746ad8d4954269bc10",
+ "chksum_sha256": "e18d34a46646071490b1deed714332f62bd5e1493ebdefb4891126cb3221ff1d",
"format": 1
},
{
"name": "tests/integration/targets/podman_pod/tasks/root-pod.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "32a941c1ab343de12be5b6b942885697a47a90e70b4ba8b8f6b41830fd748be7",
+ "chksum_sha256": "d05ccfd693dfcfdff20c1e073d771c05e7f0d2ff026902c23abbc5da0c112e60",
"format": 1
},
{
@@ -1009,584 +1044,507 @@
"format": 1
},
{
- "name": "tests/integration/targets/podman_pod/tasks/net-pod.yml",
+ "name": "tests/integration/targets/podman_pod/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a0f34f0ea7dc20a24b1a0bd29ef7b71fc5b145fd6ee364cea413c01cc858c30d",
+ "chksum_sha256": "ad501d98456446f3316f11a4f18aaa9501da1f90a08aa2c795424600487b9c40",
"format": 1
},
{
- "name": "tests/integration/targets/podman_pod/tasks/resource-limit.yml",
+ "name": "tests/integration/targets/podman_pod/tasks/idempotency.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c401b840f46b7d047ffff490a03eb414be614b23f41ac053bf85b2b9e2eaef44",
+ "chksum_sha256": "f02a7b07d8e3666e73781bfc9f9e3f26a0eda27a748c966b750ed8bd1ae0e88a",
"format": 1
},
{
- "name": "tests/integration/targets/podman_containers",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/podman_pod/tasks/net-pod.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "a0f34f0ea7dc20a24b1a0bd29ef7b71fc5b145fd6ee364cea413c01cc858c30d",
"format": 1
},
{
- "name": "tests/integration/targets/podman_containers/tasks",
+ "name": "tests/integration/targets/podman_network",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/podman_containers/tasks/main.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "f0693a6b04d4dc0d91ff26d5c155f0dde26d8b54d38bad24379b4b45115572ba",
+ "name": "tests/integration/targets/podman_network/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/podman_containers/tasks/root-multi.yml",
+ "name": "tests/integration/targets/podman_network/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "99e92a4d72c87817aa74687d7a9fc99da5adbeeaffb04fa8da55cac88d2082ad",
+ "chksum_sha256": "3d3ee9ff231e105ebbfc37678d8519cd1a90046dd41116f834d9544b86eca503",
"format": 1
},
{
- "name": "tests/integration/targets/podman_import",
+ "name": "tests/integration/targets/podman_secret_info",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/podman_import/tasks",
+ "name": "tests/integration/targets/podman_secret_info/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/podman_import/tasks/main.yml",
+ "name": "tests/integration/targets/podman_secret_info/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "aee5d8c4cdcd8ca56109a6fcc93be0dcce03ebc65cf6c8d606b1e195ad7bc37a",
+ "chksum_sha256": "3181d366cf3f25687ff6cceaf0677803bedc1d64e6b766fa66d5882c89240674",
"format": 1
},
{
- "name": "tests/integration/targets/podman_container_exec",
+ "name": "tests/integration/targets/podman_save",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/podman_container_exec/tasks",
+ "name": "tests/integration/targets/podman_save/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/podman_container_exec/tasks/main.yml",
+ "name": "tests/integration/targets/podman_save/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0e0ddeed9ec3f80a8dbe98de6b8ca92d6906a8ed8b4e732b57d7aa56ea603766",
+ "chksum_sha256": "18dd5ac7950abeb6dcfdb97f4b9b6e00aeacd1be3b41dae762cca6e237bbbf93",
"format": 1
},
{
- "name": "tests/integration/targets/podman_export",
+ "name": "tests/integration/targets/podman_image_info",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/podman_export/tasks",
+ "name": "tests/integration/targets/podman_image_info/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/podman_export/tasks/main.yml",
+ "name": "tests/integration/targets/podman_image_info/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "608c9d218d3e66b53ad1b5f2b3d6c295ca109edf2d82b08eccc3d8840278f4ab",
+ "chksum_sha256": "2e2b22b24c5afbaf5f7565c37a37cd0294bb0be58d758a75fca3cf7828f283ac",
"format": 1
},
{
- "name": "tests/integration/targets/podman_container",
+ "name": "tests/integration/targets/podman_prune",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/podman_container/tasks",
+ "name": "tests/integration/targets/podman_prune/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/podman_container/tasks/main.yml",
+ "name": "tests/integration/targets/podman_prune/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "75e7c7c41efe6771b55a034dbeeb25b2ecfb16df84db581e3ba58a16f8192fbb",
+ "chksum_sha256": "ea735326ddf2a4077cab00b44e2c064c686d90d5d55e0c373a02c045aaabddf4",
"format": 1
},
{
- "name": "tests/integration/targets/podman_image",
+ "name": "tests/integration/targets/podman_runlabel",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/podman_image/tasks",
+ "name": "tests/integration/targets/podman_runlabel/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/podman_image/tasks/main.yml",
+ "name": "tests/integration/targets/podman_runlabel/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0d8f32e61e6e8357c8a08b3ec698a9a599b10b9dfdb0a03cef829d22b5eb177d",
+ "chksum_sha256": "0beafe98862363c62951e9c1a1398446a78b4e00aee1c91a7343e38eb5380fb0",
"format": 1
},
{
- "name": "tests/integration/targets/podman_image/files",
+ "name": "tests/integration/targets/podman_runlabel/files",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/podman_image/files/Containerfile",
+ "name": "tests/integration/targets/podman_runlabel/files/Dockerfile",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ec6bc9968ca494ec22fcb0bed27fc12ddf2241fea1fe58d0d8a4f0b33487b506",
+ "chksum_sha256": "4afccdec25a964d39944882c25dbb0df6f0f844cfcfb72bbd89bb505167d1241",
"format": 1
},
{
- "name": "tests/integration/targets/connection",
+ "name": "tests/integration/targets/podman_runlabel/files/testinstall.sh",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "6450871e198c27b65e85883b3ca93b78f63261667b2332689d7f5b01fd293077",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/podman_secret",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/connection/create-nonroot-user.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "7af8e41655a36f526418cadf93e37da7117cc67761e34ab9c41b68ac8c00a268",
+ "name": "tests/integration/targets/podman_secret/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/connection/test_connection.yml",
+ "name": "tests/integration/targets/podman_secret/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3b9bd8af9cbdf99e7d47189d08b57d0eb5030fde078c88a3387d2af701ae85a8",
+ "chksum_sha256": "2c52c682319e7f943068ed557d1e60962ec793e1883c0cdbbc03df3e5eedb4e0",
"format": 1
},
{
- "name": "tests/integration/targets/podman_volume",
+ "name": "tests/integration/targets/podman_import",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/podman_volume/tasks",
+ "name": "tests/integration/targets/podman_import/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/podman_volume/tasks/main.yml",
+ "name": "tests/integration/targets/podman_import/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "02a99c50e7aebf79d8a7d254f9c2c25f03bfa7f940b3593907d0b26bd018880e",
+ "chksum_sha256": "aee5d8c4cdcd8ca56109a6fcc93be0dcce03ebc65cf6c8d606b1e195ad7bc37a",
"format": 1
},
{
- "name": "tests/integration/targets/podman_container_info",
+ "name": "tests/integration/targets/podman_generate_systemd",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/podman_container_info/tasks",
+ "name": "tests/integration/targets/podman_generate_systemd/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/podman_container_info/tasks/main.yml",
+ "name": "tests/integration/targets/podman_generate_systemd/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f753b105c6b8ca293911422dc3372b82e98ba5b3f7065cac0a20976933a0c03f",
+ "chksum_sha256": "e8a7e30fe0a898ba81b62400058b9bab61da64e588148084ddb14911943d2b8b",
"format": 1
},
{
- "name": "tests/integration/targets/podman_volume_info",
+ "name": "tests/integration/targets/connection",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/podman_volume_info/tasks",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/connection/test_connection.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "3b9bd8af9cbdf99e7d47189d08b57d0eb5030fde078c88a3387d2af701ae85a8",
"format": 1
},
{
- "name": "tests/integration/targets/podman_volume_info/tasks/main.yml",
+ "name": "tests/integration/targets/connection/create-nonroot-user.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d951a17810a5d38195f5b6b9f2b61997be78c33f425b3d6c8424250d658d4115",
+ "chksum_sha256": "7af8e41655a36f526418cadf93e37da7117cc67761e34ab9c41b68ac8c00a268",
"format": 1
},
{
- "name": "tests/integration/targets/podman_runlabel",
+ "name": "tests/integration/targets/podman_search",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/podman_runlabel/tasks",
+ "name": "tests/integration/targets/podman_search/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/podman_runlabel/tasks/main.yml",
+ "name": "tests/integration/targets/podman_search/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0beafe98862363c62951e9c1a1398446a78b4e00aee1c91a7343e38eb5380fb0",
+ "chksum_sha256": "b1a8962f4a6a36c9c67325449fb05ac7bb71bb016e2612657d3a498295e08a5b",
"format": 1
},
{
- "name": "tests/integration/targets/podman_runlabel/files",
+ "name": "tests/integration/targets/connection_buildah",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/podman_runlabel/files/testinstall.sh",
+ "name": "tests/integration/targets/connection_buildah/runme.sh",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6450871e198c27b65e85883b3ca93b78f63261667b2332689d7f5b01fd293077",
+ "chksum_sha256": "b4d64b01f03eb7aa81f3fbe7202c816188bc8432fedccb5251feeb330f7c16e4",
"format": 1
},
{
- "name": "tests/integration/targets/podman_runlabel/files/Dockerfile",
+ "name": "tests/integration/targets/connection_buildah/test_connection.inventory",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4afccdec25a964d39944882c25dbb0df6f0f844cfcfb72bbd89bb505167d1241",
+ "chksum_sha256": "5012fc0a2af812b87b675f31cd605ecf8d80fe32667b6ed20ea51fe0c8685742",
"format": 1
},
{
- "name": "tests/integration/targets/podman_image_info",
+ "name": "tests/integration/targets/podman_login",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/podman_image_info/tasks",
+ "name": "tests/integration/targets/podman_login/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/podman_image_info/tasks/main.yml",
+ "name": "tests/integration/targets/podman_login/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2e2b22b24c5afbaf5f7565c37a37cd0294bb0be58d758a75fca3cf7828f283ac",
+ "chksum_sha256": "5156c546ca2b45f035ae5b28a7220e06c4fb8051f93a6077e0308b06cafa15b3",
"format": 1
},
{
- "name": "tests/integration/targets/podman_load",
+ "name": "tests/integration/targets/podman_tag",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/podman_load/tasks",
+ "name": "tests/integration/targets/podman_tag/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/podman_load/tasks/main.yml",
+ "name": "tests/integration/targets/podman_tag/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "078d5ae2eb9bf74046e34c6e7c003e87bb6e08f1a6fd49fcf5d6bb2174bae393",
+ "chksum_sha256": "2d0f2ae65dac8386961528eb33d02de73fe60026cb5566ffa68b4467b7c45678",
"format": 1
},
{
- "name": "tests/integration/targets/podman_play",
+ "name": "tests/integration/targets/podman_volume",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/podman_play/tasks",
+ "name": "tests/integration/targets/podman_volume/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/podman_play/tasks/main.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "164bc510cd1be9cb89f2d4845154d7f5854149ca347cdd4f2a115e70316c10f2",
- "format": 1
- },
- {
- "name": "tests/integration/targets/podman_play/tasks/play-with-build.yml",
+ "name": "tests/integration/targets/podman_volume/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "dbfd032b6d71ce241ee643cfcaa09997efc32ef57a0f4494982f43caad353678",
+ "chksum_sha256": "0bb81579b5e53043c413e615d9beb0a55f8606d89e6fa5cf7a1a63b598df38a2",
"format": 1
},
{
- "name": "tests/integration/targets/podman_play/tasks/files",
+ "name": "tests/integration/targets/podman_image",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/podman_play/tasks/files/play-root3.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "3e26cb69976a1b7d1f79a5dafc74e368d9297bb63b29c1c213d5bbddc0e62bf3",
- "format": 1
- },
- {
- "name": "tests/integration/targets/podman_play/tasks/files/play1.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "1031804932a82f741e8cddaa397e73554e4784a790a3d4493d1b5c8f5f6bbf7e",
- "format": 1
- },
- {
- "name": "tests/integration/targets/podman_play/tasks/files/Containerfile",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "c0268928bf1b6b6307404eff5d25e292b78239d330dc585c87d810db40307609",
- "format": 1
- },
- {
- "name": "tests/integration/targets/podman_play/tasks/files/play3.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "c92c31b3ad0cd9992a6a9e391d07d69b2b9f3cc39b410e1cbc710bda40323b3d",
- "format": 1
- },
- {
- "name": "tests/integration/targets/podman_play/tasks/files/kube-buil-test.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "c813ffc1f16f8324acbbaad0cf5ad7ee00e0ebd47f314b9ad74f24fd9e2177b0",
- "format": 1
- },
- {
- "name": "tests/integration/targets/podman_play/tasks/files/play-pod.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "28fd560a9ff59811770316563e3320dbd77ed04c2c2dd9d698c54870fbcaafb4",
- "format": 1
- },
- {
- "name": "tests/integration/targets/podman_play/tasks/files/envdata.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "d5ebdb092f5473ae5174dfbac83b6623343326f308d02eea8fb58fbb7b42d264",
- "format": 1
- },
- {
- "name": "tests/integration/targets/podman_play/tasks/files/multi-yaml.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "984f73601cc75a5052b22b3d9a670ea93e7cc0c4f3fe2fb89f4f341392fb8af6",
- "format": 1
- },
- {
- "name": "tests/integration/targets/podman_play/tasks/files/play-root1.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "049f1ad67c57e019423f28c77ca8cec13e714ae8727fedfde71792ec56131344",
- "format": 1
- },
- {
- "name": "tests/integration/targets/podman_play/tasks/play-with-options.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "fcf6f0e7a8768422425a667d29e28c831e4f284102ab3b62d8c7eb4b0fca6083",
+ "name": "tests/integration/targets/podman_image/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/podman_play/tasks/root-play.yml",
+ "name": "tests/integration/targets/podman_image/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "65c0fccbaf7c464b539f895addc0cf3e7536f9cb8b9f7d8e28725a90d345326b",
+ "chksum_sha256": "c9910a4f7ed79f2cdbd13eb9b32427539a00f4c12849e3a6f0870351de97cce0",
"format": 1
},
{
- "name": "tests/integration/targets/podman_play/tasks/play-multi-yaml.yml",
+ "name": "tests/integration/targets/podman_image/tasks/idem_push.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "099247fe1f2666caf7035895336008b05e7e773353747c18015d36d795d9afaa",
- "format": 1
- },
- {
- "name": "tests/integration/targets/podman_logout",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "fc8551f226392dd186bbebefb3dc336ef5b0ce3d2ac2708ba6bb09a4706fee64",
"format": 1
},
{
- "name": "tests/integration/targets/podman_logout/tasks",
+ "name": "tests/integration/targets/podman_image/files",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/podman_logout/tasks/main.yml",
+ "name": "tests/integration/targets/podman_image/files/Containerfile",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2e1c3fcd8cd0d917e7faad2dc447381ec2ccff2bc3a57b02060cd965ffbb45c1",
+ "chksum_sha256": "ec6bc9968ca494ec22fcb0bed27fc12ddf2241fea1fe58d0d8a4f0b33487b506",
"format": 1
},
{
- "name": "tests/integration/targets/connection_buildah",
+ "name": "tests/integration/targets/connection_podman",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/connection_buildah/test_connection.inventory",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "5012fc0a2af812b87b675f31cd605ecf8d80fe32667b6ed20ea51fe0c8685742",
- "format": 1
- },
- {
- "name": "tests/integration/targets/connection_buildah/runme.sh",
+ "name": "tests/integration/targets/connection_podman/runme.sh",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b4d64b01f03eb7aa81f3fbe7202c816188bc8432fedccb5251feeb330f7c16e4",
+ "chksum_sha256": "06a213f3e9c159069f1fd055653438f8487d1b8939ee735a0025b151ef2a4626",
"format": 1
},
{
- "name": "tests/integration/targets/__init__.py",
+ "name": "tests/integration/targets/connection_podman/test_connection.inventory",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "05ecc4306d853f3a6057303bbcc0750d7fbb86e044cd135faf01c35ad06b6d25",
"format": 1
},
{
- "name": "tests/integration/targets/podman_tag",
+ "name": "tests/integration/targets/podman_pod_info",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/podman_tag/tasks",
+ "name": "tests/integration/targets/podman_pod_info/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/podman_tag/tasks/main.yml",
+ "name": "tests/integration/targets/podman_pod_info/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2d0f2ae65dac8386961528eb33d02de73fe60026cb5566ffa68b4467b7c45678",
+ "chksum_sha256": "f04166aa3908442835ca6e7f747429d54cdeb8bcbf272e3977de79f3ad653271",
"format": 1
},
{
- "name": "tests/integration/targets/podman_login_info",
+ "name": "tests/integration/targets/podman_volume_info",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/podman_login_info/tasks",
+ "name": "tests/integration/targets/podman_volume_info/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/podman_login_info/tasks/main.yml",
+ "name": "tests/integration/targets/podman_volume_info/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "21ef50fdd0d92d39240b082c1f9ba3782478e615863d3e8d9a0960b175b8158e",
+ "chksum_sha256": "d951a17810a5d38195f5b6b9f2b61997be78c33f425b3d6c8424250d658d4115",
"format": 1
},
{
- "name": "tests/integration/targets/podman_network_info",
+ "name": "tests/integration/targets/podman_logout",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/podman_network_info/tasks",
+ "name": "tests/integration/targets/podman_logout/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/podman_network_info/tasks/main.yml",
+ "name": "tests/integration/targets/podman_logout/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5d60cdf4f76199919395dd477e7feb32d8ebbb4b736d0a13052290a3fcb5e5e3",
+ "chksum_sha256": "2e1c3fcd8cd0d917e7faad2dc447381ec2ccff2bc3a57b02060cd965ffbb45c1",
"format": 1
},
{
- "name": "tests/integration/targets/podman_pod_info",
+ "name": "tests/integration/targets/podman_export",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/podman_pod_info/tasks",
+ "name": "tests/integration/targets/podman_export/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/podman_pod_info/tasks/main.yml",
+ "name": "tests/integration/targets/podman_export/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f04166aa3908442835ca6e7f747429d54cdeb8bcbf272e3977de79f3ad653271",
+ "chksum_sha256": "608c9d218d3e66b53ad1b5f2b3d6c295ca109edf2d82b08eccc3d8840278f4ab",
"format": 1
},
{
@@ -1604,24 +1562,24 @@
"format": 1
},
{
- "name": "tests/integration/targets/podman_container_idempotency/tasks/main.yml",
+ "name": "tests/integration/targets/podman_container_idempotency/tasks/idem_ports.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "aa7ee1e6dad1b84726df6521d749c31263b9d6d2cb7c5ab38b04ddf8fadb58f9",
+ "chksum_sha256": "01a8f61e9177ce7fc22f776030737398365effde88cb567f916a23ff93f052d1",
"format": 1
},
{
- "name": "tests/integration/targets/podman_container_idempotency/tasks/idem_network_aliases.yml",
+ "name": "tests/integration/targets/podman_container_idempotency/tasks/root-podman-network.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "04c6bf4fd56edcc1a49a47e98f448c89cf8e90e1d7d67db9997f0af94ef09dc6",
+ "chksum_sha256": "ab1cc0f347537ab4403395b2b2b43f815fe49c7ae35b88b498931fdcb33abd75",
"format": 1
},
{
- "name": "tests/integration/targets/podman_container_idempotency/tasks/rootless-podman-network.yml",
+ "name": "tests/integration/targets/podman_container_idempotency/tasks/idem_pods.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "407d946210fd0801b2626b36ca0921299c75d644ff508caeac1cc2fa49ef6c50",
+ "chksum_sha256": "53d129901494fa24a8075ade0576e2536ae3cbfc5d8fb8d116a10978594a6a49",
"format": 1
},
{
@@ -1632,80 +1590,87 @@
"format": 1
},
{
- "name": "tests/integration/targets/podman_container_idempotency/tasks/idem_networks.yml",
+ "name": "tests/integration/targets/podman_container_idempotency/tasks/idem_stopsignal.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3a0f1de00361a8381b06934149d6433afd307d010d7396be307f7d7f6f27a52e",
+ "chksum_sha256": "f41df0c66fb7820bc442c4e377471942973b8e90ce266a67fcb87173c73859e7",
"format": 1
},
{
- "name": "tests/integration/targets/podman_container_idempotency/tasks/idem_users.yml",
+ "name": "tests/integration/targets/podman_container_idempotency/tasks/rootless-podman-network.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "36f44ffaa058046e62d0e53be0f7edf8232d9145ec44a948c64c15d0c0c3cd23",
+ "chksum_sha256": "d0d89918194de3212ecfe1934bb34c21ec228048703470880007d2521fb83db9",
"format": 1
},
{
- "name": "tests/integration/targets/podman_container_idempotency/tasks/root-podman.yml",
+ "name": "tests/integration/targets/podman_container_idempotency/tasks/idem_volumes.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ed7d2ee3e9e352efc0266c2264469b3f759f1630562794c98f5da83949506bec",
+ "chksum_sha256": "63719f6d21b5a56e4a7de0a0f6bb236bc3fb1a7a64cb6f0aa93d75be187a022b",
"format": 1
},
{
- "name": "tests/integration/targets/podman_container_idempotency/tasks/idem_pods.yml",
+ "name": "tests/integration/targets/podman_container_idempotency/tasks/idem_users.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "53d129901494fa24a8075ade0576e2536ae3cbfc5d8fb8d116a10978594a6a49",
+ "chksum_sha256": "6ddc4448c079d01f96c5acd62b09a2a8d58906e4186b42cd39b4c50e847d6aac",
"format": 1
},
{
- "name": "tests/integration/targets/podman_container_idempotency/tasks/idem_workdir.yml",
+ "name": "tests/integration/targets/podman_container_idempotency/tasks/idem_networks.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "23d729589e61ae790bd81b281f89b48b443b830cf3a6932eb0bdf4289289f677",
+ "chksum_sha256": "3a0f1de00361a8381b06934149d6433afd307d010d7396be307f7d7f6f27a52e",
"format": 1
},
{
- "name": "tests/integration/targets/podman_container_idempotency/tasks/idem_ports.yml",
+ "name": "tests/integration/targets/podman_container_idempotency/tasks/idem_network_aliases.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cc446d64740817d2b04de867ac5e17e3118a4191e332f1c4bc493a34bc58b298",
+ "chksum_sha256": "04c6bf4fd56edcc1a49a47e98f448c89cf8e90e1d7d67db9997f0af94ef09dc6",
"format": 1
},
{
- "name": "tests/integration/targets/podman_container_idempotency/tasks/idem_systemd.yml",
+ "name": "tests/integration/targets/podman_container_idempotency/tasks/root-podman.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "272808017955d5ef59448a1b9ce1bb98d18bce0b1e4b658d1fe6c5b4fcdc884b",
+ "chksum_sha256": "a2f2829caef8339de258439e928c1a2d8ea16421d0099da8dd6fdd036ba8c5e6",
"format": 1
},
{
- "name": "tests/integration/targets/podman_container_idempotency/tasks/root-podman-network.yml",
+ "name": "tests/integration/targets/podman_container_idempotency/tasks/idem_all.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ab1cc0f347537ab4403395b2b2b43f815fe49c7ae35b88b498931fdcb33abd75",
+ "chksum_sha256": "e1337d6faa4cfac0bd54de02973b2ac42f21fdfa89d578266a067a34d6a87840",
"format": 1
},
{
- "name": "tests/integration/targets/podman_container_idempotency/tasks/idem_all.yml",
+ "name": "tests/integration/targets/podman_container_idempotency/tasks/idem_bool_list_dict.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f7ccc04c97fb27dfdf4462cc73f74e244c76759ce0a5b4b7d785e6f8a50b7a56",
+ "chksum_sha256": "6bc8ca04d68777522b76a2d419efb593e20d3ebaab67635355a2395921b644fa",
"format": 1
},
{
- "name": "tests/integration/targets/podman_container_idempotency/tasks/idem_stopsignal.yml",
+ "name": "tests/integration/targets/podman_container_idempotency/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "262e884e0b4a561fbc57726f13763e3b07a92786386c9a5c7ccd13270e38b475",
+ "chksum_sha256": "6d33125ca125b9e4f765246e2d47ee48e633e4916ea6383c1d41f3fe4ca5ce44",
"format": 1
},
{
- "name": "tests/integration/targets/podman_container_idempotency/tasks/idem_volumes.yml",
+ "name": "tests/integration/targets/podman_container_idempotency/tasks/idem_systemd.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5e330c3f7359aaf7e6e3379db977b909e04bcec89b894f1f8f0d44dfebdecffd",
+ "chksum_sha256": "272808017955d5ef59448a1b9ce1bb98d18bce0b1e4b658d1fe6c5b4fcdc884b",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/podman_container_idempotency/tasks/idem_workdir.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d224090cd361155825706088a76172956d222c51141781f7d51c26356c1b4958",
"format": 1
},
{
@@ -1723,185 +1688,276 @@
"format": 1
},
{
- "name": "tests/integration/targets/podman_container_idempotency/files/start.sh",
+ "name": "tests/integration/targets/podman_container_idempotency/files/Dockerfile",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bb80ab18f5df29c7c77f8acea5285af4f0f4d1ddac161801d0ff26ac242c2d86",
+ "chksum_sha256": "002a8a2bf46c5da32a3583bc93c08347c6f7e16c51cd5abdf562d77df313da5c",
"format": 1
},
{
- "name": "tests/integration/targets/podman_container_idempotency/files/Dockerfile",
+ "name": "tests/integration/targets/podman_container_idempotency/files/start.sh",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "002a8a2bf46c5da32a3583bc93c08347c6f7e16c51cd5abdf562d77df313da5c",
+ "chksum_sha256": "bb80ab18f5df29c7c77f8acea5285af4f0f4d1ddac161801d0ff26ac242c2d86",
"format": 1
},
{
- "name": "tests/integration/targets/podman_secret",
+ "name": "tests/integration/targets/podman_containers",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/podman_secret/tasks",
+ "name": "tests/integration/targets/podman_containers/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/podman_secret/tasks/main.yml",
+ "name": "tests/integration/targets/podman_containers/tasks/root-multi.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b9b9415b024f3f617149c74d980afccb22e7223dac8bb673465504507ca4e2aa",
+ "chksum_sha256": "99e92a4d72c87817aa74687d7a9fc99da5adbeeaffb04fa8da55cac88d2082ad",
"format": 1
},
{
- "name": "tests/integration/targets/podman_network",
+ "name": "tests/integration/targets/podman_containers/tasks/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "f0693a6b04d4dc0d91ff26d5c155f0dde26d8b54d38bad24379b4b45115572ba",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/podman_container_exec",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/podman_network/tasks",
+ "name": "tests/integration/targets/podman_container_exec/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/podman_network/tasks/main.yml",
+ "name": "tests/integration/targets/podman_container_exec/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8415501667bf5315c747424b0ffbe15b3283803e26f52d78d7de13836bba1e90",
+ "chksum_sha256": "0e0ddeed9ec3f80a8dbe98de6b8ca92d6906a8ed8b4e732b57d7aa56ea603766",
"format": 1
},
{
- "name": "tests/integration/targets/podman_login",
+ "name": "tests/integration/targets/podman_login_info",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/podman_login/tasks",
+ "name": "tests/integration/targets/podman_login_info/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/podman_login/tasks/main.yml",
+ "name": "tests/integration/targets/podman_login_info/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5156c546ca2b45f035ae5b28a7220e06c4fb8051f93a6077e0308b06cafa15b3",
+ "chksum_sha256": "21ef50fdd0d92d39240b082c1f9ba3782478e615863d3e8d9a0960b175b8158e",
"format": 1
},
{
- "name": "tests/integration/targets/podman_save",
+ "name": "tests/integration/targets/podman_container_info",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/podman_save/tasks",
+ "name": "tests/integration/targets/podman_container_info/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/podman_save/tasks/main.yml",
+ "name": "tests/integration/targets/podman_container_info/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "18dd5ac7950abeb6dcfdb97f4b9b6e00aeacd1be3b41dae762cca6e237bbbf93",
+ "chksum_sha256": "f753b105c6b8ca293911422dc3372b82e98ba5b3f7065cac0a20976933a0c03f",
"format": 1
},
{
- "name": "tests/integration/targets/podman_secret_info",
+ "name": "tests/integration/targets/podman_container",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/podman_secret_info/tasks",
+ "name": "tests/integration/targets/podman_container/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/podman_secret_info/tasks/main.yml",
+ "name": "tests/integration/targets/podman_container/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3181d366cf3f25687ff6cceaf0677803bedc1d64e6b766fa66d5882c89240674",
+ "chksum_sha256": "75e7c7c41efe6771b55a034dbeeb25b2ecfb16df84db581e3ba58a16f8192fbb",
"format": 1
},
{
- "name": "tests/integration/targets/podman_generate_systemd",
+ "name": "tests/integration/targets/podman_network_info",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/podman_generate_systemd/tasks",
+ "name": "tests/integration/targets/podman_network_info/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/podman_generate_systemd/tasks/main.yml",
+ "name": "tests/integration/targets/podman_network_info/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e8a7e30fe0a898ba81b62400058b9bab61da64e588148084ddb14911943d2b8b",
+ "chksum_sha256": "5d60cdf4f76199919395dd477e7feb32d8ebbb4b736d0a13052290a3fcb5e5e3",
"format": 1
},
{
- "name": "tests/integration/targets/connection_podman",
+ "name": "tests/.gitignore",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "b5726d3ec9335a09c124469eca039523847a6b0f08a083efaefd002b83326600",
+ "format": 1
+ },
+ {
+ "name": "tests/unit",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/connection_podman/test_connection.inventory",
+ "name": "tests/unit/plugins",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/unit/plugins/modules",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/unit/plugins/modules/test_container_lib.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "05ecc4306d853f3a6057303bbcc0750d7fbb86e044cd135faf01c35ad06b6d25",
+ "chksum_sha256": "fe86470d10790cb360f699533fdd2155097240ac4c7530648a81c62642a92b6d",
"format": 1
},
{
- "name": "tests/integration/targets/connection_podman/runme.sh",
+ "name": "tests/unit/plugins/modules/test_common.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "06a213f3e9c159069f1fd055653438f8487d1b8939ee735a0025b151ef2a4626",
+ "chksum_sha256": "79b1c212210b13bb60b8d97b140351ce088707f3574b8fc61b076e827e050891",
"format": 1
},
{
- "name": "tests/integration/targets/podman_prune",
+ "name": "tests/sanity",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/podman_prune/tasks",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/sanity/ignore-2.16.txt",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "67fc6067f750369421d81d68b9f2d30afe4eb13f9d0963eaa1aa07a8c924556b",
"format": 1
},
{
- "name": "tests/integration/targets/podman_prune/tasks/main.yml",
+ "name": "tests/sanity/ignore-2.12.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ea735326ddf2a4077cab00b44e2c064c686d90d5d55e0c373a02c045aaabddf4",
+ "chksum_sha256": "67fc6067f750369421d81d68b9f2d30afe4eb13f9d0963eaa1aa07a8c924556b",
+ "format": 1
+ },
+ {
+ "name": "tests/sanity/ignore-2.11.txt",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "67fc6067f750369421d81d68b9f2d30afe4eb13f9d0963eaa1aa07a8c924556b",
+ "format": 1
+ },
+ {
+ "name": "tests/sanity/ignore-2.18.txt",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "67fc6067f750369421d81d68b9f2d30afe4eb13f9d0963eaa1aa07a8c924556b",
+ "format": 1
+ },
+ {
+ "name": "tests/sanity/ignore-2.9.txt",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "67fc6067f750369421d81d68b9f2d30afe4eb13f9d0963eaa1aa07a8c924556b",
+ "format": 1
+ },
+ {
+ "name": "tests/sanity/ignore-2.13.txt",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "67fc6067f750369421d81d68b9f2d30afe4eb13f9d0963eaa1aa07a8c924556b",
+ "format": 1
+ },
+ {
+ "name": "tests/sanity/ignore-2.15.txt",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "67fc6067f750369421d81d68b9f2d30afe4eb13f9d0963eaa1aa07a8c924556b",
+ "format": 1
+ },
+ {
+ "name": "tests/sanity/ignore-2.14.txt",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "67fc6067f750369421d81d68b9f2d30afe4eb13f9d0963eaa1aa07a8c924556b",
+ "format": 1
+ },
+ {
+ "name": "tests/sanity/ignore-2.10.txt",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "67fc6067f750369421d81d68b9f2d30afe4eb13f9d0963eaa1aa07a8c924556b",
+ "format": 1
+ },
+ {
+ "name": "tests/sanity/ignore-2.17.txt",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "67fc6067f750369421d81d68b9f2d30afe4eb13f9d0963eaa1aa07a8c924556b",
+ "format": 1
+ },
+ {
+ "name": "tests/sanity/requirements.txt",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "471c16a6346df1b74757306a436cbab143fe65c30e6f9cda1f5c7179d6012b73",
"format": 1
}
],
diff --git a/ansible_collections/containers/podman/MANIFEST.json b/ansible_collections/containers/podman/MANIFEST.json
index ac80755e7..15749ba5a 100644
--- a/ansible_collections/containers/podman/MANIFEST.json
+++ b/ansible_collections/containers/podman/MANIFEST.json
@@ -2,7 +2,7 @@
"collection_info": {
"namespace": "containers",
"name": "podman",
- "version": "1.13.0",
+ "version": "1.15.2",
"authors": [
"Sagi Shnaidman <sshnaidm@redhat.com>",
"Ansible team"
@@ -28,7 +28,7 @@
"name": "FILES.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c7ca356d6a22a0996ed4045ff5711e5cf96cd60a62009e0da6f63fea100d538f",
+ "chksum_sha256": "edb1850aa75bf5e77e1c7bf9f3bcbd65ebb410ccbd8cfce2b7ee8ab5b36759af",
"format": 1
},
"format": 1
diff --git a/ansible_collections/containers/podman/changelogs/changelog.yaml b/ansible_collections/containers/podman/changelogs/changelog.yaml
index bb58cacf4..6e9c6ae4d 100644
--- a/ansible_collections/containers/podman/changelogs/changelog.yaml
+++ b/ansible_collections/containers/podman/changelogs/changelog.yaml
@@ -290,6 +290,55 @@ releases:
- Add quadlet support for Podman modules
release_summary: Quadlet support for Podman
release_date: '2024-04-22'
+ 1.14.0:
+ changes:
+ bugfixes:
+ - Fix idempotency for pod with 0.0.0.0
+ - Fix transports issues in podman_image
+ - fix(#747) set correct HealthCmd
+ major_changes:
+ - Add mount and unmount for volumes
+ - Add multiple subnets for networks
+ - Add new options for podman_container
+ - Add new options to pod module
+ - Improve idempotency for networking in podman_container
+ - Redesign idempotency for Podman Pod module
+ minor_changes:
+ - Add autodiscovery for build context in podman_image
+ - Add docs, tests and more examples for podman_pod
+ - Add extra_args for podman_image push and pull
+ - Add idempotency for mounts and volumes in podman_container
+ - Add new functionality tests for podman_secret
+ - Add path and env options for podman_secret
+ - Add route, dns and ipam_driver to podman_network
+ - Create podman secret when skip_existing=True and it does not exist
+ release_summary: Idempotency redesign and a lot of new options
+ release_date: '2024-05-30'
+ 1.15.0:
+ changes:
+ bugfixes:
+ - Fix issue with pushing podman image to repo name and org
+ major_changes:
+ - Add podman search
+ release_summary: Podman search module and bugfixes for Podman image
+ modules:
+ - description: Search for remote images using podman
+ name: podman_search
+ namespace: ''
+ release_date: '2024-06-03'
+ 1.15.1:
+ changes:
+ bugfixes:
+ - Fix idempotency for pods in case of systemd generation
+ - Fix idempotency for systemd generations
+ release_summary: Bugfixes for systemd generation
+ release_date: '2024-06-07'
+ 1.15.2:
+ changes:
+ minor_changes:
+ - Add option for inline Containerfile in podman_image
+ release_summary: Inline Containerfile option for podman_image
+ release_date: '2024-06-11'
1.2.0:
changes:
minor_changes:
diff --git a/ansible_collections/containers/podman/docs/buildah_connection.html b/ansible_collections/containers/podman/docs/buildah_connection.html
index 28b73f8bf..0da9548d6 100644
--- a/ansible_collections/containers/podman/docs/buildah_connection.html
+++ b/ansible_collections/containers/podman/docs/buildah_connection.html
@@ -37,7 +37,7 @@
<h1>containers.podman.buildah connection – Interact with an existing buildah container<a class="headerlink" href="#containers-podman-buildah-connection-interact-with-an-existing-buildah-container" title="Permalink to this heading">¶</a></h1>
<div class="admonition note">
<p class="admonition-title">Note</p>
-<p>This connection plugin is part of the <a class="reference external" href="https://galaxy.ansible.com/ui/repo/published/containers/podman/">containers.podman collection</a> (version 1.13.0).</p>
+<p>This connection plugin is part of the <a class="reference external" href="https://galaxy.ansible.com/ui/repo/published/containers/podman/">containers.podman collection</a> (version 1.15.2).</p>
<p>It is not included in <code class="docutils literal notranslate"><span class="pre">ansible-core</span></code>.
To check whether it is installed, run <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">list</span></code>.</p>
<p>To install it, use: <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">install</span> <span class="pre">containers.podman</span></code>.</p>
@@ -156,6 +156,7 @@ To check whether it is installed, run <code class="code docutils literal notrans
<li class="toctree-l1"><a class="reference internal" href="podman_prune_module.html">containers.podman.podman_prune module – Allows to prune various podman objects</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_runlabel_module.html">containers.podman.podman_runlabel module – Run given label from given image</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_save_module.html">containers.podman.podman_save module – Saves podman image to tar file</a></li>
+<li class="toctree-l1"><a class="reference internal" href="podman_search_module.html">containers.podman.podman_search module – Search for remote images using podman</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_module.html">containers.podman.podman_secret module – Manage podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_info_module.html">containers.podman.podman_secret_info module – Gather info about podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_tag_module.html">containers.podman.podman_tag module – Add an additional name to a local image</a></li>
diff --git a/ansible_collections/containers/podman/docs/environment_variables.html b/ansible_collections/containers/podman/docs/environment_variables.html
index 2fc11f2c2..c0389b3d4 100644
--- a/ansible_collections/containers/podman/docs/environment_variables.html
+++ b/ansible_collections/containers/podman/docs/environment_variables.html
@@ -124,6 +124,7 @@ Environment variables used by the ansible-core configuration are documented in <
<li class="toctree-l1"><a class="reference internal" href="podman_prune_module.html">containers.podman.podman_prune module – Allows to prune various podman objects</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_runlabel_module.html">containers.podman.podman_runlabel module – Run given label from given image</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_save_module.html">containers.podman.podman_save module – Saves podman image to tar file</a></li>
+<li class="toctree-l1"><a class="reference internal" href="podman_search_module.html">containers.podman.podman_search module – Search for remote images using podman</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_module.html">containers.podman.podman_secret module – Manage podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_info_module.html">containers.podman.podman_secret_info module – Gather info about podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_tag_module.html">containers.podman.podman_tag module – Add an additional name to a local image</a></li>
diff --git a/ansible_collections/containers/podman/docs/genindex.html b/ansible_collections/containers/podman/docs/genindex.html
index 312e433c8..e2700a3a2 100644
--- a/ansible_collections/containers/podman/docs/genindex.html
+++ b/ansible_collections/containers/podman/docs/genindex.html
@@ -132,6 +132,7 @@
<li class="toctree-l1"><a class="reference internal" href="podman_prune_module.html">containers.podman.podman_prune module – Allows to prune various podman objects</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_runlabel_module.html">containers.podman.podman_runlabel module – Run given label from given image</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_save_module.html">containers.podman.podman_save module – Saves podman image to tar file</a></li>
+<li class="toctree-l1"><a class="reference internal" href="podman_search_module.html">containers.podman.podman_search module – Search for remote images using podman</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_module.html">containers.podman.podman_secret module – Manage podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_info_module.html">containers.podman.podman_secret_info module – Gather info about podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_tag_module.html">containers.podman.podman_tag module – Add an additional name to a local image</a></li>
diff --git a/ansible_collections/containers/podman/docs/index.html b/ansible_collections/containers/podman/docs/index.html
index 8880530d7..a489a75f7 100644
--- a/ansible_collections/containers/podman/docs/index.html
+++ b/ansible_collections/containers/podman/docs/index.html
@@ -34,7 +34,7 @@
<section id="containers-podman">
<span id="plugins-in-containers-podman"></span><h1>Containers.Podman<a class="headerlink" href="#containers-podman" title="Permalink to this heading">¶</a></h1>
-<p>Collection version 1.13.0</p>
+<p>Collection version 1.15.2</p>
<nav class="contents local" id="contents">
<ul class="simple">
<li><p><a class="reference internal" href="#description" id="id1">Description</a></p></li>
@@ -87,6 +87,7 @@
<li><p><a class="reference internal" href="podman_prune_module.html#ansible-collections-containers-podman-podman-prune-module"><span class="std std-ref">podman_prune module</span></a> – Allows to prune various podman objects</p></li>
<li><p><a class="reference internal" href="podman_runlabel_module.html#ansible-collections-containers-podman-podman-runlabel-module"><span class="std std-ref">podman_runlabel module</span></a> – Run given label from given image</p></li>
<li><p><a class="reference internal" href="podman_save_module.html#ansible-collections-containers-podman-podman-save-module"><span class="std std-ref">podman_save module</span></a> – Saves podman image to tar file</p></li>
+<li><p><a class="reference internal" href="podman_search_module.html#ansible-collections-containers-podman-podman-search-module"><span class="std std-ref">podman_search module</span></a> – Search for remote images using podman</p></li>
<li><p><a class="reference internal" href="podman_secret_module.html#ansible-collections-containers-podman-podman-secret-module"><span class="std std-ref">podman_secret module</span></a> – Manage podman secrets</p></li>
<li><p><a class="reference internal" href="podman_secret_info_module.html#ansible-collections-containers-podman-podman-secret-info-module"><span class="std std-ref">podman_secret_info module</span></a> – Gather info about podman secrets</p></li>
<li><p><a class="reference internal" href="podman_tag_module.html#ansible-collections-containers-podman-podman-tag-module"><span class="std std-ref">podman_tag module</span></a> – Add an additional name to a local image</p></li>
@@ -155,6 +156,7 @@
<li class="toctree-l1"><a class="reference internal" href="podman_prune_module.html">containers.podman.podman_prune module – Allows to prune various podman objects</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_runlabel_module.html">containers.podman.podman_runlabel module – Run given label from given image</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_save_module.html">containers.podman.podman_save module – Saves podman image to tar file</a></li>
+<li class="toctree-l1"><a class="reference internal" href="podman_search_module.html">containers.podman.podman_search module – Search for remote images using podman</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_module.html">containers.podman.podman_secret module – Manage podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_info_module.html">containers.podman.podman_secret_info module – Gather info about podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_tag_module.html">containers.podman.podman_tag module – Add an additional name to a local image</a></li>
diff --git a/ansible_collections/containers/podman/docs/objects.inv b/ansible_collections/containers/podman/docs/objects.inv
index e51191bf4..3ae60a5a5 100644
--- a/ansible_collections/containers/podman/docs/objects.inv
+++ b/ansible_collections/containers/podman/docs/objects.inv
Binary files differ
diff --git a/ansible_collections/containers/podman/docs/podman_connection.html b/ansible_collections/containers/podman/docs/podman_connection.html
index d5a6f2b6f..f6875f2d8 100644
--- a/ansible_collections/containers/podman/docs/podman_connection.html
+++ b/ansible_collections/containers/podman/docs/podman_connection.html
@@ -36,7 +36,7 @@
<h1>containers.podman.podman connection – Interact with an existing podman container<a class="headerlink" href="#containers-podman-podman-connection-interact-with-an-existing-podman-container" title="Permalink to this heading">¶</a></h1>
<div class="admonition note">
<p class="admonition-title">Note</p>
-<p>This connection plugin is part of the <a class="reference external" href="https://galaxy.ansible.com/ui/repo/published/containers/podman/">containers.podman collection</a> (version 1.13.0).</p>
+<p>This connection plugin is part of the <a class="reference external" href="https://galaxy.ansible.com/ui/repo/published/containers/podman/">containers.podman collection</a> (version 1.15.2).</p>
<p>It is not included in <code class="docutils literal notranslate"><span class="pre">ansible-core</span></code>.
To check whether it is installed, run <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">list</span></code>.</p>
<p>To install it, use: <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">install</span> <span class="pre">containers.podman</span></code>.</p>
@@ -188,6 +188,7 @@ To check whether it is installed, run <code class="code docutils literal notrans
<li class="toctree-l1"><a class="reference internal" href="podman_prune_module.html">containers.podman.podman_prune module – Allows to prune various podman objects</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_runlabel_module.html">containers.podman.podman_runlabel module – Run given label from given image</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_save_module.html">containers.podman.podman_save module – Saves podman image to tar file</a></li>
+<li class="toctree-l1"><a class="reference internal" href="podman_search_module.html">containers.podman.podman_search module – Search for remote images using podman</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_module.html">containers.podman.podman_secret module – Manage podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_info_module.html">containers.podman.podman_secret_info module – Gather info about podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_tag_module.html">containers.podman.podman_tag module – Add an additional name to a local image</a></li>
diff --git a/ansible_collections/containers/podman/docs/podman_container_exec_module.html b/ansible_collections/containers/podman/docs/podman_container_exec_module.html
index f93c89afd..13ed78687 100644
--- a/ansible_collections/containers/podman/docs/podman_container_exec_module.html
+++ b/ansible_collections/containers/podman/docs/podman_container_exec_module.html
@@ -37,7 +37,7 @@
<h1>containers.podman.podman_container_exec module – Executes a command in a running container.<a class="headerlink" href="#containers-podman-podman-container-exec-module-executes-a-command-in-a-running-container" title="Permalink to this heading">¶</a></h1>
<div class="admonition note">
<p class="admonition-title">Note</p>
-<p>This module is part of the <a class="reference external" href="https://galaxy.ansible.com/ui/repo/published/containers/podman/">containers.podman collection</a> (version 1.13.0).</p>
+<p>This module is part of the <a class="reference external" href="https://galaxy.ansible.com/ui/repo/published/containers/podman/">containers.podman collection</a> (version 1.15.2).</p>
<p>It is not included in <code class="docutils literal notranslate"><span class="pre">ansible-core</span></code>.
To check whether it is installed, run <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">list</span></code>.</p>
<p>To install it, use: <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">install</span> <span class="pre">containers.podman</span></code>.
@@ -307,6 +307,7 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
<li class="toctree-l1"><a class="reference internal" href="podman_prune_module.html">containers.podman.podman_prune module – Allows to prune various podman objects</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_runlabel_module.html">containers.podman.podman_runlabel module – Run given label from given image</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_save_module.html">containers.podman.podman_save module – Saves podman image to tar file</a></li>
+<li class="toctree-l1"><a class="reference internal" href="podman_search_module.html">containers.podman.podman_search module – Search for remote images using podman</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_module.html">containers.podman.podman_secret module – Manage podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_info_module.html">containers.podman.podman_secret_info module – Gather info about podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_tag_module.html">containers.podman.podman_tag module – Add an additional name to a local image</a></li>
diff --git a/ansible_collections/containers/podman/docs/podman_container_info_module.html b/ansible_collections/containers/podman/docs/podman_container_info_module.html
index 516979249..a32520bc7 100644
--- a/ansible_collections/containers/podman/docs/podman_container_info_module.html
+++ b/ansible_collections/containers/podman/docs/podman_container_info_module.html
@@ -37,7 +37,7 @@
<h1>containers.podman.podman_container_info module – Gather facts about containers using podman<a class="headerlink" href="#containers-podman-podman-container-info-module-gather-facts-about-containers-using-podman" title="Permalink to this heading">¶</a></h1>
<div class="admonition note">
<p class="admonition-title">Note</p>
-<p>This module is part of the <a class="reference external" href="https://galaxy.ansible.com/ui/repo/published/containers/podman/">containers.podman collection</a> (version 1.13.0).</p>
+<p>This module is part of the <a class="reference external" href="https://galaxy.ansible.com/ui/repo/published/containers/podman/">containers.podman collection</a> (version 1.15.2).</p>
<p>It is not included in <code class="docutils literal notranslate"><span class="pre">ansible-core</span></code>.
To check whether it is installed, run <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">list</span></code>.</p>
<p>To install it, use: <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">install</span> <span class="pre">containers.podman</span></code>.
@@ -198,6 +198,7 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
<li class="toctree-l1"><a class="reference internal" href="podman_prune_module.html">containers.podman.podman_prune module – Allows to prune various podman objects</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_runlabel_module.html">containers.podman.podman_runlabel module – Run given label from given image</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_save_module.html">containers.podman.podman_save module – Saves podman image to tar file</a></li>
+<li class="toctree-l1"><a class="reference internal" href="podman_search_module.html">containers.podman.podman_search module – Search for remote images using podman</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_module.html">containers.podman.podman_secret module – Manage podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_info_module.html">containers.podman.podman_secret_info module – Gather info about podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_tag_module.html">containers.podman.podman_tag module – Add an additional name to a local image</a></li>
diff --git a/ansible_collections/containers/podman/docs/podman_container_module.html b/ansible_collections/containers/podman/docs/podman_container_module.html
index 6cb5f17ee..422aeca7a 100644
--- a/ansible_collections/containers/podman/docs/podman_container_module.html
+++ b/ansible_collections/containers/podman/docs/podman_container_module.html
@@ -37,7 +37,7 @@
<h1>containers.podman.podman_container module – Manage podman containers<a class="headerlink" href="#containers-podman-podman-container-module-manage-podman-containers" title="Permalink to this heading">¶</a></h1>
<div class="admonition note">
<p class="admonition-title">Note</p>
-<p>This module is part of the <a class="reference external" href="https://galaxy.ansible.com/ui/repo/published/containers/podman/">containers.podman collection</a> (version 1.13.0).</p>
+<p>This module is part of the <a class="reference external" href="https://galaxy.ansible.com/ui/repo/published/containers/podman/">containers.podman collection</a> (version 1.15.2).</p>
<p>It is not included in <code class="docutils literal notranslate"><span class="pre">ansible-core</span></code>.
To check whether it is installed, run <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">list</span></code>.</p>
<p>To install it, use: <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">install</span> <span class="pre">containers.podman</span></code>.
@@ -85,6 +85,13 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
</div></td>
</tr>
<tr class="row-odd"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-arch"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-arch"><strong>arch</strong></p>
+<a class="ansibleOptionLink" href="#parameter-arch" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>Set the architecture for the container. Override the architecture, defaults to hosts, of the image to be pulled. For example, arm.</p>
+</div></td>
+</tr>
+<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-attach"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-attach"><strong>attach</strong></p>
<a class="ansibleOptionLink" href="#parameter-attach" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">list</span> / <span class="ansible-option-elements">elements=string</span></p>
</div></td>
@@ -97,28 +104,28 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
</ul>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-cell">
+<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-authfile"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-authfile"><strong>authfile</strong></p>
<a class="ansibleOptionLink" href="#parameter-authfile" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">path</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Path of the authentication file. Default is ``${XDG_RUNTIME_DIR}/containers/auth.json`` (Not available for remote commands) You can also override the default path of the authentication file by setting the ``REGISTRY_AUTH_FILE`` environment variable. ``export REGISTRY_AUTH_FILE=path``</p>
</div></td>
</tr>
-<tr class="row-odd"><td><div class="ansible-option-cell">
+<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-blkio_weight"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-blkio-weight"><strong>blkio_weight</strong></p>
<a class="ansibleOptionLink" href="#parameter-blkio_weight" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">integer</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Block IO weight (relative weight) accepts a weight value between 10 and 1000</p>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-cell">
+<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-blkio_weight_device"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-blkio-weight-device"><strong>blkio_weight_device</strong></p>
<a class="ansibleOptionLink" href="#parameter-blkio_weight_device" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">dictionary</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Block IO weight (relative device weight, format DEVICE_NAME[:]WEIGHT).</p>
</div></td>
</tr>
-<tr class="row-odd"><td><div class="ansible-option-cell">
+<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-cap_add"></div>
<div class="ansibleOptionAnchor" id="parameter-capabilities"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-capabilities"><span id="ansible-collections-containers-podman-podman-container-module-parameter-cap-add"></span><strong>cap_add</strong></p>
<a class="ansibleOptionLink" href="#parameter-cap_add" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-aliases">aliases: capabilities</span></p>
@@ -127,13 +134,20 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
<td><div class="ansible-option-cell"><p>List of capabilities to add to the container.</p>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-cell">
+<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-cap_drop"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-cap-drop"><strong>cap_drop</strong></p>
<a class="ansibleOptionLink" href="#parameter-cap_drop" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">list</span> / <span class="ansible-option-elements">elements=string</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>List of capabilities to drop from the container.</p>
</div></td>
</tr>
+<tr class="row-even"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-cgroup_conf"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-cgroup-conf"><strong>cgroup_conf</strong></p>
+<a class="ansibleOptionLink" href="#parameter-cgroup_conf" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">dictionary</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>When running on cgroup v2, specify the cgroup file to write to and its value.</p>
+</div></td>
+</tr>
<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-cgroup_parent"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-cgroup-parent"><strong>cgroup_parent</strong></p>
<a class="ansibleOptionLink" href="#parameter-cgroup_parent" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">path</span></p>
@@ -156,90 +170,97 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
</div></td>
</tr>
<tr class="row-even"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-chrootdirs"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-chrootdirs"><strong>chrootdirs</strong></p>
+<a class="ansibleOptionLink" href="#parameter-chrootdirs" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>Path to a directory inside the container that is treated as a chroot directory.</p>
+</div></td>
+</tr>
+<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-cidfile"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-cidfile"><strong>cidfile</strong></p>
<a class="ansibleOptionLink" href="#parameter-cidfile" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">path</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Write the container ID to the file</p>
</div></td>
</tr>
-<tr class="row-odd"><td><div class="ansible-option-cell">
+<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-cmd_args"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-cmd-args"><strong>cmd_args</strong></p>
<a class="ansibleOptionLink" href="#parameter-cmd_args" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">list</span> / <span class="ansible-option-elements">elements=string</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Any additional command options you want to pass to podman command itself, for example <code class="docutils literal notranslate"><span class="pre">--log-level=debug</span></code> or <code class="docutils literal notranslate"><span class="pre">--syslog</span></code>. This is NOT command to run in container, but rather options for podman itself. For container command please use <em>command</em> option.</p>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-cell">
+<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-command"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-command"><strong>command</strong></p>
<a class="ansibleOptionLink" href="#parameter-command" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">any</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Override command of container. Can be a string or a list.</p>
</div></td>
</tr>
-<tr class="row-odd"><td><div class="ansible-option-cell">
+<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-conmon_pidfile"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-conmon-pidfile"><strong>conmon_pidfile</strong></p>
<a class="ansibleOptionLink" href="#parameter-conmon_pidfile" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">path</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Write the pid of the conmon process to a file. conmon runs in a separate process than Podman, so this is necessary when using systemd to restart Podman containers.</p>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-cell">
+<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-cpu_period"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-cpu-period"><strong>cpu_period</strong></p>
<a class="ansibleOptionLink" href="#parameter-cpu_period" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">integer</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Limit the CPU CFS (Completely Fair Scheduler) period</p>
</div></td>
</tr>
-<tr class="row-odd"><td><div class="ansible-option-cell">
+<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-cpu_quota"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-cpu-quota"><strong>cpu_quota</strong></p>
<a class="ansibleOptionLink" href="#parameter-cpu_quota" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">integer</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Limit the CPU CFS (Completely Fair Scheduler) quota</p>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-cell">
+<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-cpu_rt_period"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-cpu-rt-period"><strong>cpu_rt_period</strong></p>
<a class="ansibleOptionLink" href="#parameter-cpu_rt_period" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">integer</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Limit the CPU real-time period in microseconds. Limit the container’s Real Time CPU usage. This flag tell the kernel to restrict the container’s Real Time CPU usage to the period you specify.</p>
</div></td>
</tr>
-<tr class="row-odd"><td><div class="ansible-option-cell">
+<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-cpu_rt_runtime"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-cpu-rt-runtime"><strong>cpu_rt_runtime</strong></p>
<a class="ansibleOptionLink" href="#parameter-cpu_rt_runtime" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">integer</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Limit the CPU real-time runtime in microseconds. This flag tells the kernel to limit the amount of time in a given CPU period Real Time tasks may consume.</p>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-cell">
+<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-cpu_shares"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-cpu-shares"><strong>cpu_shares</strong></p>
<a class="ansibleOptionLink" href="#parameter-cpu_shares" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">integer</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>CPU shares (relative weight)</p>
</div></td>
</tr>
-<tr class="row-odd"><td><div class="ansible-option-cell">
+<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-cpus"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-cpus"><strong>cpus</strong></p>
<a class="ansibleOptionLink" href="#parameter-cpus" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Number of CPUs. The default is 0.0 which means no limit.</p>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-cell">
+<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-cpuset_cpus"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-cpuset-cpus"><strong>cpuset_cpus</strong></p>
<a class="ansibleOptionLink" href="#parameter-cpuset_cpus" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>CPUs in which to allow execution (0-3, 0,1)</p>
</div></td>
</tr>
-<tr class="row-odd"><td><div class="ansible-option-cell">
+<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-cpuset_mems"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-cpuset-mems"><strong>cpuset_mems</strong></p>
<a class="ansibleOptionLink" href="#parameter-cpuset_mems" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Memory nodes (MEMs) in which to allow execution (0-3, 0,1). Only effective on NUMA systems.</p>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-cell">
+<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-debug"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-debug"><strong>debug</strong></p>
<a class="ansibleOptionLink" href="#parameter-debug" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">boolean</span></p>
</div></td>
@@ -251,6 +272,13 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
</ul>
</div></td>
</tr>
+<tr class="row-even"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-decryption_key"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-decryption-key"><strong>decryption_key</strong></p>
+<a class="ansibleOptionLink" href="#parameter-decryption_key" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>The “key-passphrase” to be used for decryption of images. Key can point to keys and/or certificates.</p>
+</div></td>
+</tr>
<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-delete_depend"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-delete-depend"><strong>delete_depend</strong></p>
<a class="ansibleOptionLink" href="#parameter-delete_depend" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">boolean</span></p>
@@ -309,34 +337,41 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
</div></td>
</tr>
<tr class="row-odd"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-device_cgroup_rule"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-device-cgroup-rule"><strong>device_cgroup_rule</strong></p>
+<a class="ansibleOptionLink" href="#parameter-device_cgroup_rule" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>Add a rule to the cgroup allowed devices list. The rule is expected to be in the format specified in the Linux kernel documentation admin-guide/cgroup-v1/devices.</p>
+</div></td>
+</tr>
+<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-device_read_bps"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-device-read-bps"><strong>device_read_bps</strong></p>
<a class="ansibleOptionLink" href="#parameter-device_read_bps" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">list</span> / <span class="ansible-option-elements">elements=string</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Limit read rate (bytes per second) from a device (e.g. device-read-bps /dev/sda:1mb)</p>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-cell">
+<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-device_read_iops"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-device-read-iops"><strong>device_read_iops</strong></p>
<a class="ansibleOptionLink" href="#parameter-device_read_iops" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">list</span> / <span class="ansible-option-elements">elements=string</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Limit read rate (IO per second) from a device (e.g. device-read-iops /dev/sda:1000)</p>
</div></td>
</tr>
-<tr class="row-odd"><td><div class="ansible-option-cell">
+<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-device_write_bps"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-device-write-bps"><strong>device_write_bps</strong></p>
<a class="ansibleOptionLink" href="#parameter-device_write_bps" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">list</span> / <span class="ansible-option-elements">elements=string</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Limit write rate (bytes per second) to a device (e.g. device-write-bps /dev/sda:1mb)</p>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-cell">
+<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-device_write_iops"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-device-write-iops"><strong>device_write_iops</strong></p>
<a class="ansibleOptionLink" href="#parameter-device_write_iops" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">list</span> / <span class="ansible-option-elements">elements=string</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Limit write rate (IO per second) to a device (e.g. device-write-iops /dev/sda:1000)</p>
</div></td>
</tr>
-<tr class="row-odd"><td><div class="ansible-option-cell">
+<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-dns"></div>
<div class="ansibleOptionAnchor" id="parameter-dns_servers"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-dns-servers"><span id="ansible-collections-containers-podman-podman-container-module-parameter-dns"></span><strong>dns</strong></p>
<a class="ansibleOptionLink" href="#parameter-dns" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-aliases">aliases: dns_servers</span></p>
@@ -345,7 +380,7 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
<td><div class="ansible-option-cell"><p>Set custom DNS servers</p>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-cell">
+<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-dns_option"></div>
<div class="ansibleOptionAnchor" id="parameter-dns_opts"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-dns-opts"><span id="ansible-collections-containers-podman-podman-container-module-parameter-dns-option"></span><strong>dns_option</strong></p>
<a class="ansibleOptionLink" href="#parameter-dns_option" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-aliases">aliases: dns_opts</span></p>
@@ -354,7 +389,7 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
<td><div class="ansible-option-cell"><p>Set custom DNS options</p>
</div></td>
</tr>
-<tr class="row-odd"><td><div class="ansible-option-cell">
+<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-dns_search"></div>
<div class="ansibleOptionAnchor" id="parameter-dns_search_domains"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-dns-search-domains"><span id="ansible-collections-containers-podman-podman-container-module-parameter-dns-search"></span><strong>dns_search</strong></p>
<a class="ansibleOptionLink" href="#parameter-dns_search" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-aliases">aliases: dns_search_domains</span></p>
@@ -363,21 +398,21 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
<td><div class="ansible-option-cell"><p>Set custom DNS search domains (Use dns_search with ‘’ if you don’t wish to set the search domain)</p>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-cell">
+<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-entrypoint"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-entrypoint"><strong>entrypoint</strong></p>
<a class="ansibleOptionLink" href="#parameter-entrypoint" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Overwrite the default ENTRYPOINT of the image</p>
</div></td>
</tr>
-<tr class="row-odd"><td><div class="ansible-option-cell">
+<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-env"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-env"><strong>env</strong></p>
<a class="ansibleOptionLink" href="#parameter-env" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">dictionary</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Set environment variables. This option allows you to specify arbitrary environment variables that are available for the process that will be launched inside of the container.</p>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-cell">
+<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-env_file"></div>
<div class="ansibleOptionAnchor" id="parameter-env_files"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-env-files"><span id="ansible-collections-containers-podman-podman-container-module-parameter-env-file"></span><strong>env_file</strong></p>
<a class="ansibleOptionLink" href="#parameter-env_file" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-aliases">aliases: env_files</span></p>
@@ -386,7 +421,7 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
<td><div class="ansible-option-cell"><p>Read in a line delimited file of environment variables. Doesn’t support idempotency. If users changes the file with environment variables it’s on them to recreate the container. The file must be present on the REMOTE machine where actual podman is running, not on the controller machine where Ansible is executing. If you need to copy the file from controller to remote machine, use the copy or slurp module.</p>
</div></td>
</tr>
-<tr class="row-odd"><td><div class="ansible-option-cell">
+<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-env_host"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-env-host"><strong>env_host</strong></p>
<a class="ansibleOptionLink" href="#parameter-env_host" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">boolean</span></p>
</div></td>
@@ -398,6 +433,13 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
</ul>
</div></td>
</tr>
+<tr class="row-odd"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-env_merge"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-env-merge"><strong>env_merge</strong></p>
+<a class="ansibleOptionLink" href="#parameter-env_merge" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">dictionary</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>Preprocess default environment variables for the containers</p>
+</div></td>
+</tr>
<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-etc_hosts"></div>
<div class="ansibleOptionAnchor" id="parameter-add_hosts"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-etc-hosts"><span id="ansible-collections-containers-podman-podman-container-module-parameter-add-hosts"></span><strong>etc_hosts</strong></p>
@@ -592,6 +634,13 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
</div></td>
</tr>
<tr class="row-odd"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-gpus"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-gpus"><strong>gpus</strong></p>
+<a class="ansibleOptionLink" href="#parameter-gpus" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>GPU devices to add to the container.</p>
+</div></td>
+</tr>
+<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-group_add"></div>
<div class="ansibleOptionAnchor" id="parameter-groups"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-groups"><span id="ansible-collections-containers-podman-podman-container-module-parameter-group-add"></span><strong>group_add</strong></p>
<a class="ansibleOptionLink" href="#parameter-group_add" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-aliases">aliases: groups</span></p>
@@ -600,16 +649,62 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
<td><div class="ansible-option-cell"><p>Add additional groups to run as</p>
</div></td>
</tr>
+<tr class="row-odd"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-group_entry"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-group-entry"><strong>group_entry</strong></p>
+<a class="ansibleOptionLink" href="#parameter-group_entry" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>Customize the entry that is written to the /etc/group file within the container when –user is used.</p>
+</div></td>
+</tr>
<tr class="row-even"><td><div class="ansible-option-cell">
-<div class="ansibleOptionAnchor" id="parameter-healthcheck"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-healthcheck"><strong>healthcheck</strong></p>
-<a class="ansibleOptionLink" href="#parameter-healthcheck" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
+<div class="ansibleOptionAnchor" id="parameter-health_startup_cmd"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-health-startup-cmd"><strong>health_startup_cmd</strong></p>
+<a class="ansibleOptionLink" href="#parameter-health_startup_cmd" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
</div></td>
-<td><div class="ansible-option-cell"><p>Set or alter a healthcheck command for a container.</p>
+<td><div class="ansible-option-cell"><p>Set a startup healthcheck command for a container.</p>
+</div></td>
+</tr>
+<tr class="row-odd"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-health_startup_interval"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-health-startup-interval"><strong>health_startup_interval</strong></p>
+<a class="ansibleOptionLink" href="#parameter-health_startup_interval" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>Set an interval for the startup healthcheck.</p>
+</div></td>
+</tr>
+<tr class="row-even"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-health_startup_retries"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-health-startup-retries"><strong>health_startup_retries</strong></p>
+<a class="ansibleOptionLink" href="#parameter-health_startup_retries" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">integer</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>The number of attempts allowed before the startup healthcheck restarts the container. If set to 0, the container is never restarted. The default is 0.</p>
</div></td>
</tr>
<tr class="row-odd"><td><div class="ansible-option-cell">
-<div class="ansibleOptionAnchor" id="parameter-healthcheck_failure_action"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-healthcheck-failure-action"><strong>healthcheck_failure_action</strong></p>
-<a class="ansibleOptionLink" href="#parameter-healthcheck_failure_action" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
+<div class="ansibleOptionAnchor" id="parameter-health_startup_success"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-health-startup-success"><strong>health_startup_success</strong></p>
+<a class="ansibleOptionLink" href="#parameter-health_startup_success" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">integer</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>The number of successful runs required before the startup healthcheck succeeds and the regular healthcheck begins. A value of 0 means that any success begins the regular healthcheck. The default is 0.</p>
+</div></td>
+</tr>
+<tr class="row-even"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-health_startup_timeout"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-health-startup-timeout"><strong>health_startup_timeout</strong></p>
+<a class="ansibleOptionLink" href="#parameter-health_startup_timeout" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>The maximum time a startup healthcheck command has to complete before it is marked as failed.</p>
+</div></td>
+</tr>
+<tr class="row-odd"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-healthcheck"></div>
+<div class="ansibleOptionAnchor" id="parameter-health_cmd"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-healthcheck"><span id="ansible-collections-containers-podman-podman-container-module-parameter-health-cmd"></span><strong>healthcheck</strong></p>
+<a class="ansibleOptionLink" href="#parameter-healthcheck" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-aliases">aliases: health_cmd</span></p>
+<p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>Set or alter a healthcheck command for a container.</p>
+</div></td>
+</tr>
+<tr class="row-even"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-healthcheck_failure_action"></div>
+<div class="ansibleOptionAnchor" id="parameter-health_on_failure"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-healthcheck-failure-action"><span id="ansible-collections-containers-podman-podman-container-module-parameter-health-on-failure"></span><strong>healthcheck_failure_action</strong></p>
+<a class="ansibleOptionLink" href="#parameter-healthcheck_failure_action" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-aliases">aliases: health_on_failure</span></p>
+<p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>The action to be taken when the container is considered unhealthy. The action must be one of “none”, “kill”, “restart”, or “stop”. The default policy is “none”.</p>
<p class="ansible-option-line"><strong class="ansible-option-choices">Choices:</strong></p>
@@ -621,48 +716,63 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
</ul>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-cell">
-<div class="ansibleOptionAnchor" id="parameter-healthcheck_interval"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-healthcheck-interval"><strong>healthcheck_interval</strong></p>
-<a class="ansibleOptionLink" href="#parameter-healthcheck_interval" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
+<tr class="row-odd"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-healthcheck_interval"></div>
+<div class="ansibleOptionAnchor" id="parameter-health_interval"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-healthcheck-interval"><span id="ansible-collections-containers-podman-podman-container-module-parameter-health-interval"></span><strong>healthcheck_interval</strong></p>
+<a class="ansibleOptionLink" href="#parameter-healthcheck_interval" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-aliases">aliases: health_interval</span></p>
+<p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Set an interval for the healthchecks (a value of disable results in no automatic timer setup) (default “30s”)</p>
</div></td>
</tr>
-<tr class="row-odd"><td><div class="ansible-option-cell">
-<div class="ansibleOptionAnchor" id="parameter-healthcheck_retries"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-healthcheck-retries"><strong>healthcheck_retries</strong></p>
-<a class="ansibleOptionLink" href="#parameter-healthcheck_retries" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">integer</span></p>
+<tr class="row-even"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-healthcheck_retries"></div>
+<div class="ansibleOptionAnchor" id="parameter-health_retries"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-healthcheck-retries"><span id="ansible-collections-containers-podman-podman-container-module-parameter-health-retries"></span><strong>healthcheck_retries</strong></p>
+<a class="ansibleOptionLink" href="#parameter-healthcheck_retries" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-aliases">aliases: health_retries</span></p>
+<p class="ansible-option-type-line"><span class="ansible-option-type">integer</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>The number of retries allowed before a healthcheck is considered to be unhealthy. The default value is 3.</p>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-cell">
-<div class="ansibleOptionAnchor" id="parameter-healthcheck_start_period"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-healthcheck-start-period"><strong>healthcheck_start_period</strong></p>
-<a class="ansibleOptionLink" href="#parameter-healthcheck_start_period" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
+<tr class="row-odd"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-healthcheck_start_period"></div>
+<div class="ansibleOptionAnchor" id="parameter-health_start_period"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-healthcheck-start-period"><span id="ansible-collections-containers-podman-podman-container-module-parameter-health-start-period"></span><strong>healthcheck_start_period</strong></p>
+<a class="ansibleOptionLink" href="#parameter-healthcheck_start_period" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-aliases">aliases: health_start_period</span></p>
+<p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>The initialization time needed for a container to bootstrap. The value can be expressed in time format like 2m3s. The default value is 0s</p>
</div></td>
</tr>
-<tr class="row-odd"><td><div class="ansible-option-cell">
-<div class="ansibleOptionAnchor" id="parameter-healthcheck_timeout"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-healthcheck-timeout"><strong>healthcheck_timeout</strong></p>
-<a class="ansibleOptionLink" href="#parameter-healthcheck_timeout" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
+<tr class="row-even"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-healthcheck_timeout"></div>
+<div class="ansibleOptionAnchor" id="parameter-health_timeout"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-healthcheck-timeout"><span id="ansible-collections-containers-podman-podman-container-module-parameter-health-timeout"></span><strong>healthcheck_timeout</strong></p>
+<a class="ansibleOptionLink" href="#parameter-healthcheck_timeout" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-aliases">aliases: health_timeout</span></p>
+<p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>The maximum time allowed to complete the healthcheck before an interval is considered failed. Like start-period, the value can be expressed in a time format such as 1m22s. The default value is 30s</p>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-cell">
+<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-hooks_dir"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-hooks-dir"><strong>hooks_dir</strong></p>
<a class="ansibleOptionLink" href="#parameter-hooks_dir" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">list</span> / <span class="ansible-option-elements">elements=string</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Each .json file in the path configures a hook for Podman containers. For more details on the syntax of the JSON files and the semantics of hook injection, see oci-hooks(5). Can be set multiple times.</p>
</div></td>
</tr>
-<tr class="row-odd"><td><div class="ansible-option-cell">
+<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-hostname"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-hostname"><strong>hostname</strong></p>
<a class="ansibleOptionLink" href="#parameter-hostname" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Container host name. Sets the container host name that is available inside the container.</p>
</div></td>
</tr>
+<tr class="row-odd"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-hostuser"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-hostuser"><strong>hostuser</strong></p>
+<a class="ansibleOptionLink" href="#parameter-hostuser" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>Add a user account to /etc/passwd from the host to the container. The Username or UID must exist on the host system.</p>
+</div></td>
+</tr>
<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-http_proxy"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-http-proxy"><strong>http_proxy</strong></p>
<a class="ansibleOptionLink" href="#parameter-http_proxy" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">boolean</span></p>
@@ -721,13 +831,25 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
</div></td>
</tr>
<tr class="row-odd"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-init_ctr"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-init-ctr"><strong>init_ctr</strong></p>
+<a class="ansibleOptionLink" href="#parameter-init_ctr" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>(Pods only). When using pods, create an init style container, which is run after the infra container is started but before regular pod containers are started.</p>
+<p class="ansible-option-line"><strong class="ansible-option-choices">Choices:</strong></p>
+<ul class="simple">
+<li><p><code class="ansible-option-choices-entry docutils literal notranslate"><span class="pre">&quot;once&quot;</span></code></p></li>
+<li><p><code class="ansible-option-choices-entry docutils literal notranslate"><span class="pre">&quot;always&quot;</span></code></p></li>
+</ul>
+</div></td>
+</tr>
+<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-init_path"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-init-path"><strong>init_path</strong></p>
<a class="ansibleOptionLink" href="#parameter-init_path" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Path to the container-init binary.</p>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-cell">
+<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-interactive"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-interactive"><strong>interactive</strong></p>
<a class="ansibleOptionLink" href="#parameter-interactive" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">boolean</span></p>
</div></td>
@@ -739,13 +861,20 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
</ul>
</div></td>
</tr>
-<tr class="row-odd"><td><div class="ansible-option-cell">
+<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-ip"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-ip"><strong>ip</strong></p>
<a class="ansibleOptionLink" href="#parameter-ip" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Specify a static IP address for the container, for example ‘10.88.64.128’. Can only be used if no additional CNI networks to join were specified via ‘network:’, and if the container is not joining another container’s network namespace via ‘network container:&lt;name|id&gt;’. The address must be within the default CNI network’s pool (default 10.88.0.0/16).</p>
</div></td>
</tr>
+<tr class="row-odd"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-ip6"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-ip6"><strong>ip6</strong></p>
+<a class="ansibleOptionLink" href="#parameter-ip6" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>Specify a static IPv6 address for the container</p>
+</div></td>
+</tr>
<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-ipc"></div>
<div class="ansibleOptionAnchor" id="parameter-ipc_mode"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-ipc-mode"><span id="ansible-collections-containers-podman-podman-container-module-parameter-ipc"></span><strong>ipc</strong></p>
@@ -902,13 +1031,27 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
</div></td>
</tr>
<tr class="row-even"><td><div class="ansible-option-cell">
-<div class="ansibleOptionAnchor" id="parameter-network_aliases"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-network-aliases"><strong>network_aliases</strong></p>
-<a class="ansibleOptionLink" href="#parameter-network_aliases" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">list</span> / <span class="ansible-option-elements">elements=string</span></p>
+<div class="ansibleOptionAnchor" id="parameter-network_aliases"></div>
+<div class="ansibleOptionAnchor" id="parameter-network_alias"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-network-aliases"><span id="ansible-collections-containers-podman-podman-container-module-parameter-network-alias"></span><strong>network_aliases</strong></p>
+<a class="ansibleOptionLink" href="#parameter-network_aliases" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-aliases">aliases: network_alias</span></p>
+<p class="ansible-option-type-line"><span class="ansible-option-type">list</span> / <span class="ansible-option-elements">elements=string</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Add network-scoped alias for the container. A container will only have access to aliases on the first network that it joins. This is a limitation that will be removed in a later release.</p>
</div></td>
</tr>
<tr class="row-odd"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-no_healthcheck"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-no-healthcheck"><strong>no_healthcheck</strong></p>
+<a class="ansibleOptionLink" href="#parameter-no_healthcheck" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">boolean</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>Disable any defined healthchecks for container.</p>
+<p class="ansible-option-line"><strong class="ansible-option-choices">Choices:</strong></p>
+<ul class="simple">
+<li><p><code class="ansible-option-choices-entry docutils literal notranslate"><span class="pre">false</span></code></p></li>
+<li><p><code class="ansible-option-choices-entry docutils literal notranslate"><span class="pre">true</span></code></p></li>
+</ul>
+</div></td>
+</tr>
+<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-no_hosts"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-no-hosts"><strong>no_hosts</strong></p>
<a class="ansibleOptionLink" href="#parameter-no_hosts" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">boolean</span></p>
</div></td>
@@ -920,7 +1063,7 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
</ul>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-cell">
+<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-oom_kill_disable"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-oom-kill-disable"><strong>oom_kill_disable</strong></p>
<a class="ansibleOptionLink" href="#parameter-oom_kill_disable" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">boolean</span></p>
</div></td>
@@ -932,14 +1075,47 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
</ul>
</div></td>
</tr>
-<tr class="row-odd"><td><div class="ansible-option-cell">
+<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-oom_score_adj"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-oom-score-adj"><strong>oom_score_adj</strong></p>
<a class="ansibleOptionLink" href="#parameter-oom_score_adj" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">integer</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Tune the host’s OOM preferences for containers (accepts -1000 to 1000)</p>
</div></td>
</tr>
+<tr class="row-odd"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-os"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-os"><strong>os</strong></p>
+<a class="ansibleOptionLink" href="#parameter-os" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>Override the OS, defaults to hosts, of the image to be pulled. For example, windows.</p>
+</div></td>
+</tr>
+<tr class="row-even"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-passwd"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-passwd"><strong>passwd</strong></p>
+<a class="ansibleOptionLink" href="#parameter-passwd" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">boolean</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>Allow Podman to add entries to /etc/passwd and /etc/group when used in conjunction with the –user option. This is used to override the Podman provided user setup in favor of entrypoint configurations such as libnss-extrausers.</p>
+<p class="ansible-option-line"><strong class="ansible-option-choices">Choices:</strong></p>
+<ul class="simple">
+<li><p><code class="ansible-option-choices-entry docutils literal notranslate"><span class="pre">false</span></code></p></li>
+<li><p><code class="ansible-option-choices-entry docutils literal notranslate"><span class="pre">true</span></code></p></li>
+</ul>
+</div></td>
+</tr>
+<tr class="row-odd"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-passwd_entry"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-passwd-entry"><strong>passwd_entry</strong></p>
+<a class="ansibleOptionLink" href="#parameter-passwd_entry" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>Customize the entry that is written to the /etc/passwd file within the container when –passwd is used.</p>
+</div></td>
+</tr>
<tr class="row-even"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-personality"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-personality"><strong>personality</strong></p>
+<a class="ansibleOptionLink" href="#parameter-personality" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>Personality sets the execution domain via Linux personality(2).</p>
+</div></td>
+</tr>
+<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-pid"></div>
<div class="ansibleOptionAnchor" id="parameter-pid_mode"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-pid-mode"><span id="ansible-collections-containers-podman-podman-container-module-parameter-pid"></span><strong>pid</strong></p>
<a class="ansibleOptionLink" href="#parameter-pid" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-aliases">aliases: pid_mode</span></p>
@@ -948,6 +1124,13 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
<td><div class="ansible-option-cell"><p>Set the PID mode for the container</p>
</div></td>
</tr>
+<tr class="row-even"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-pid_file"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-pid-file"><strong>pid_file</strong></p>
+<a class="ansibleOptionLink" href="#parameter-pid_file" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">path</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>When the pidfile location is specified, the container process’ PID is written to the pidfile.</p>
+</div></td>
+</tr>
<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-pids_limit"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-pids-limit"><strong>pids_limit</strong></p>
<a class="ansibleOptionLink" href="#parameter-pids_limit" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
@@ -956,12 +1139,40 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
</div></td>
</tr>
<tr class="row-even"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-platform"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-platform"><strong>platform</strong></p>
+<a class="ansibleOptionLink" href="#parameter-platform" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>Specify the platform for selecting the image.</p>
+</div></td>
+</tr>
+<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-pod"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-pod"><strong>pod</strong></p>
<a class="ansibleOptionLink" href="#parameter-pod" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Run container in an existing pod. If you want podman to make the pod for you, prefix the pod name with “new:”</p>
</div></td>
</tr>
+<tr class="row-even"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-pod_id_file"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-pod-id-file"><strong>pod_id_file</strong></p>
+<a class="ansibleOptionLink" href="#parameter-pod_id_file" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">path</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>Run container in an existing pod and read the pod’s ID from the specified file. When a container is run within a pod which has an infra-container, the infra-container starts first.</p>
+</div></td>
+</tr>
+<tr class="row-odd"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-preserve_fd"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-preserve-fd"><strong>preserve_fd</strong></p>
+<a class="ansibleOptionLink" href="#parameter-preserve_fd" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">list</span> / <span class="ansible-option-elements">elements=string</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>Pass down to the process the additional file descriptors specified in the comma separated list.</p>
+</div></td>
+</tr>
+<tr class="row-even"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-preserve_fds"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-preserve-fds"><strong>preserve_fds</strong></p>
+<a class="ansibleOptionLink" href="#parameter-preserve_fds" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>Pass down to the process N additional file descriptors (in addition to 0, 1, 2). The total FDs are 3\+N.</p>
+</div></td>
+</tr>
<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-privileged"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-privileged"><strong>privileged</strong></p>
<a class="ansibleOptionLink" href="#parameter-privileged" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">boolean</span></p>
@@ -998,26 +1209,47 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
</div></td>
</tr>
<tr class="row-even"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-pull"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-pull"><strong>pull</strong></p>
+<a class="ansibleOptionLink" href="#parameter-pull" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>Pull image policy. The default is ‘missing’.</p>
+<p class="ansible-option-line"><strong class="ansible-option-choices">Choices:</strong></p>
+<ul class="simple">
+<li><p><code class="ansible-option-choices-entry docutils literal notranslate"><span class="pre">&quot;missing&quot;</span></code></p></li>
+<li><p><code class="ansible-option-choices-entry docutils literal notranslate"><span class="pre">&quot;always&quot;</span></code></p></li>
+<li><p><code class="ansible-option-choices-entry docutils literal notranslate"><span class="pre">&quot;never&quot;</span></code></p></li>
+<li><p><code class="ansible-option-choices-entry docutils literal notranslate"><span class="pre">&quot;newer&quot;</span></code></p></li>
+</ul>
+</div></td>
+</tr>
+<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-quadlet_dir"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-quadlet-dir"><strong>quadlet_dir</strong></p>
<a class="ansibleOptionLink" href="#parameter-quadlet_dir" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">path</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Path to the directory to write quadlet file in. By default, it will be set as <code class="docutils literal notranslate"><span class="pre">/etc/containers/systemd/</span></code> for root user, <code class="docutils literal notranslate"><span class="pre">~/.config/containers/systemd/</span></code> for non-root users.</p>
</div></td>
</tr>
-<tr class="row-odd"><td><div class="ansible-option-cell">
+<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-quadlet_filename"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-quadlet-filename"><strong>quadlet_filename</strong></p>
<a class="ansibleOptionLink" href="#parameter-quadlet_filename" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Name of quadlet file to write. By default it takes <code class="docutils literal notranslate"><span class="pre">name</span></code> value.</p>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-cell">
+<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-quadlet_options"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-quadlet-options"><strong>quadlet_options</strong></p>
<a class="ansibleOptionLink" href="#parameter-quadlet_options" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">list</span> / <span class="ansible-option-elements">elements=string</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Options for the quadlet file. Provide missing in usual container args options as a list of lines to add.</p>
</div></td>
</tr>
+<tr class="row-even"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-rdt_class"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-rdt-class"><strong>rdt_class</strong></p>
+<a class="ansibleOptionLink" href="#parameter-rdt_class" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>Rdt-class sets the class of service (CLOS or COS) for the container to run in. Requires root.</p>
+</div></td>
+</tr>
<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-read_only"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-read-only"><strong>read_only</strong></p>
<a class="ansibleOptionLink" href="#parameter-read_only" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">boolean</span></p>
@@ -1076,6 +1308,20 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
</div></td>
</tr>
<tr class="row-odd"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-retry"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-retry"><strong>retry</strong></p>
+<a class="ansibleOptionLink" href="#parameter-retry" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">integer</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>Number of times to retry pulling or pushing images between the registry and local storage in case of failure. Default is 3.</p>
+</div></td>
+</tr>
+<tr class="row-even"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-retry_delay"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-retry-delay"><strong>retry_delay</strong></p>
+<a class="ansibleOptionLink" href="#parameter-retry_delay" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>Duration of delay between retry attempts when pulling or pushing images between the registry and local storage in case of failure.</p>
+</div></td>
+</tr>
+<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-rm"></div>
<div class="ansibleOptionAnchor" id="parameter-remove"></div>
<div class="ansibleOptionAnchor" id="parameter-auto_remove"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-rm"><span id="ansible-collections-containers-podman-podman-container-module-parameter-remove"></span><span id="ansible-collections-containers-podman-podman-container-module-parameter-auto-remove"></span><strong>rm</strong></p>
@@ -1091,6 +1337,18 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
</div></td>
</tr>
<tr class="row-even"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-rmi"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-rmi"><strong>rmi</strong></p>
+<a class="ansibleOptionLink" href="#parameter-rmi" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">boolean</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>After exit of the container, remove the image unless another container is using it. Implies –rm on the new container. The default is false.</p>
+<p class="ansible-option-line"><strong class="ansible-option-choices">Choices:</strong></p>
+<ul class="simple">
+<li><p><code class="ansible-option-choices-entry docutils literal notranslate"><span class="pre">false</span></code></p></li>
+<li><p><code class="ansible-option-choices-entry docutils literal notranslate"><span class="pre">true</span></code></p></li>
+</ul>
+</div></td>
+</tr>
+<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-rootfs"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-rootfs"><strong>rootfs</strong></p>
<a class="ansibleOptionLink" href="#parameter-rootfs" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">boolean</span></p>
</div></td>
@@ -1102,13 +1360,20 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
</ul>
</div></td>
</tr>
-<tr class="row-odd"><td><div class="ansible-option-cell">
+<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-sdnotify"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-sdnotify"><strong>sdnotify</strong></p>
<a class="ansibleOptionLink" href="#parameter-sdnotify" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Determines how to use the NOTIFY_SOCKET, as passed with systemd and Type=notify. Can be container, conmon, ignore.</p>
</div></td>
</tr>
+<tr class="row-odd"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-seccomp_policy"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-seccomp-policy"><strong>seccomp_policy</strong></p>
+<a class="ansibleOptionLink" href="#parameter-seccomp_policy" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>Specify the policy to select the seccomp profile.</p>
+</div></td>
+</tr>
<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-secrets"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-secrets"><strong>secrets</strong></p>
<a class="ansibleOptionLink" href="#parameter-secrets" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">list</span> / <span class="ansible-option-elements">elements=string</span></p>
@@ -1131,6 +1396,13 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
</div></td>
</tr>
<tr class="row-odd"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-shm_size_systemd"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-shm-size-systemd"><strong>shm_size_systemd</strong></p>
+<a class="ansibleOptionLink" href="#parameter-shm_size_systemd" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>Size of systemd-specific tmpfs mounts such as /run, /run/lock, /var/log/journal and /tmp.</p>
+</div></td>
+</tr>
+<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-sig_proxy"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-sig-proxy"><strong>sig_proxy</strong></p>
<a class="ansibleOptionLink" href="#parameter-sig_proxy" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">boolean</span></p>
</div></td>
@@ -1142,7 +1414,7 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
</ul>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-cell">
+<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-state"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-state"><strong>state</strong></p>
<a class="ansibleOptionLink" href="#parameter-state" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
</div></td>
@@ -1163,55 +1435,62 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
</ul>
</div></td>
</tr>
-<tr class="row-odd"><td><div class="ansible-option-cell">
+<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-stop_signal"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-stop-signal"><strong>stop_signal</strong></p>
<a class="ansibleOptionLink" href="#parameter-stop_signal" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">integer</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Signal to stop a container. Default is SIGTERM.</p>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-cell">
+<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-stop_time"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-stop-time"><strong>stop_time</strong></p>
<a class="ansibleOptionLink" href="#parameter-stop_time" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Seconds to wait before forcibly stopping the container. Use -1 for infinite wait. Applies to “stopped” status.</p>
</div></td>
</tr>
-<tr class="row-odd"><td><div class="ansible-option-cell">
+<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-stop_timeout"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-stop-timeout"><strong>stop_timeout</strong></p>
<a class="ansibleOptionLink" href="#parameter-stop_timeout" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">integer</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Timeout (in seconds) to stop a container. Default is 10.</p>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-cell">
+<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-subgidname"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-subgidname"><strong>subgidname</strong></p>
<a class="ansibleOptionLink" href="#parameter-subgidname" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Run the container in a new user namespace using the map with ‘name’ in the /etc/subgid file.</p>
</div></td>
</tr>
-<tr class="row-odd"><td><div class="ansible-option-cell">
+<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-subuidname"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-subuidname"><strong>subuidname</strong></p>
<a class="ansibleOptionLink" href="#parameter-subuidname" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Run the container in a new user namespace using the map with ‘name’ in the /etc/subuid file.</p>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-cell">
+<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-sysctl"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-sysctl"><strong>sysctl</strong></p>
<a class="ansibleOptionLink" href="#parameter-sysctl" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">dictionary</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Configure namespaced kernel parameters at runtime</p>
</div></td>
</tr>
-<tr class="row-odd"><td><div class="ansible-option-cell">
+<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-systemd"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-systemd"><strong>systemd</strong></p>
<a class="ansibleOptionLink" href="#parameter-systemd" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Run container in systemd mode. The default is true.</p>
</div></td>
</tr>
+<tr class="row-odd"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-timeout"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-timeout"><strong>timeout</strong></p>
+<a class="ansibleOptionLink" href="#parameter-timeout" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">integer</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>Maximum time (in seconds) a container is allowed to run before conmon sends it the kill signal. By default containers run until they exit or are stopped by “podman stop”.</p>
+</div></td>
+</tr>
<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-timezone"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-timezone"><strong>timezone</strong></p>
<a class="ansibleOptionLink" href="#parameter-timezone" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
@@ -1220,13 +1499,25 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
</div></td>
</tr>
<tr class="row-odd"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-tls_verify"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-tls-verify"><strong>tls_verify</strong></p>
+<a class="ansibleOptionLink" href="#parameter-tls_verify" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">boolean</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>Require HTTPS and verify certificates when pulling images.</p>
+<p class="ansible-option-line"><strong class="ansible-option-choices">Choices:</strong></p>
+<ul class="simple">
+<li><p><code class="ansible-option-choices-entry docutils literal notranslate"><span class="pre">false</span></code></p></li>
+<li><p><code class="ansible-option-choices-entry docutils literal notranslate"><span class="pre">true</span></code></p></li>
+</ul>
+</div></td>
+</tr>
+<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-tmpfs"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-tmpfs"><strong>tmpfs</strong></p>
<a class="ansibleOptionLink" href="#parameter-tmpfs" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">dictionary</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Create a tmpfs mount. For example tmpfs “/tmp” “rw,size=787448k,mode=1777”</p>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-cell">
+<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-tty"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-tty"><strong>tty</strong></p>
<a class="ansibleOptionLink" href="#parameter-tty" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">boolean</span></p>
</div></td>
@@ -1238,14 +1529,14 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
</ul>
</div></td>
</tr>
-<tr class="row-odd"><td><div class="ansible-option-cell">
+<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-uidmap"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-uidmap"><strong>uidmap</strong></p>
<a class="ansibleOptionLink" href="#parameter-uidmap" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">list</span> / <span class="ansible-option-elements">elements=string</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Run the container in a new user namespace using the supplied mapping.</p>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-cell">
+<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-ulimit"></div>
<div class="ansibleOptionAnchor" id="parameter-ulimits"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-ulimits"><span id="ansible-collections-containers-podman-podman-container-module-parameter-ulimit"></span><strong>ulimit</strong></p>
<a class="ansibleOptionLink" href="#parameter-ulimit" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-aliases">aliases: ulimits</span></p>
@@ -1254,6 +1545,32 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
<td><div class="ansible-option-cell"><p>Ulimit options</p>
</div></td>
</tr>
+<tr class="row-even"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-umask"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-umask"><strong>umask</strong></p>
+<a class="ansibleOptionLink" href="#parameter-umask" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>Set the umask inside the container. Defaults to 0022. Remote connections use local containers.conf for defaults.</p>
+</div></td>
+</tr>
+<tr class="row-odd"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-unsetenv"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-unsetenv"><strong>unsetenv</strong></p>
+<a class="ansibleOptionLink" href="#parameter-unsetenv" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">list</span> / <span class="ansible-option-elements">elements=string</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>Unset default environment variables for the container.</p>
+</div></td>
+</tr>
+<tr class="row-even"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-unsetenv_all"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-unsetenv-all"><strong>unsetenv_all</strong></p>
+<a class="ansibleOptionLink" href="#parameter-unsetenv_all" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">boolean</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>Unset all default environment variables for the container.</p>
+<p class="ansible-option-line"><strong class="ansible-option-choices">Choices:</strong></p>
+<ul class="simple">
+<li><p><code class="ansible-option-choices-entry docutils literal notranslate"><span class="pre">false</span></code></p></li>
+<li><p><code class="ansible-option-choices-entry docutils literal notranslate"><span class="pre">true</span></code></p></li>
+</ul>
+</div></td>
+</tr>
<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-user"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-user"><strong>user</strong></p>
<a class="ansibleOptionLink" href="#parameter-user" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
@@ -1278,6 +1595,13 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
</div></td>
</tr>
<tr class="row-even"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-variant"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-variant"><strong>variant</strong></p>
+<a class="ansibleOptionLink" href="#parameter-variant" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>Use VARIANT instead of the default architecture variant of the container image.</p>
+</div></td>
+</tr>
+<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-volume"></div>
<div class="ansibleOptionAnchor" id="parameter-volumes"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-volumes"><span id="ansible-collections-containers-podman-podman-container-module-parameter-volume"></span><strong>volume</strong></p>
<a class="ansibleOptionLink" href="#parameter-volume" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-aliases">aliases: volumes</span></p>
@@ -1286,14 +1610,14 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
<td><div class="ansible-option-cell"><p>Create a bind mount. If you specify, volume /HOST-DIR:/CONTAINER-DIR, podman bind mounts /HOST-DIR in the host to /CONTAINER-DIR in the podman container.</p>
</div></td>
</tr>
-<tr class="row-odd"><td><div class="ansible-option-cell">
+<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-volumes_from"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-volumes-from"><strong>volumes_from</strong></p>
<a class="ansibleOptionLink" href="#parameter-volumes_from" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">list</span> / <span class="ansible-option-elements">elements=string</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Mount volumes from the specified container(s).</p>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-cell">
+<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-workdir"></div>
<div class="ansibleOptionAnchor" id="parameter-working_dir"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-container-module-parameter-working-dir"><span id="ansible-collections-containers-podman-podman-container-module-parameter-workdir"></span><strong>workdir</strong></p>
<a class="ansibleOptionLink" href="#parameter-workdir" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-aliases">aliases: working_dir</span></p>
@@ -1494,6 +1818,7 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
<li class="toctree-l1"><a class="reference internal" href="podman_prune_module.html">containers.podman.podman_prune module – Allows to prune various podman objects</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_runlabel_module.html">containers.podman.podman_runlabel module – Run given label from given image</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_save_module.html">containers.podman.podman_save module – Saves podman image to tar file</a></li>
+<li class="toctree-l1"><a class="reference internal" href="podman_search_module.html">containers.podman.podman_search module – Search for remote images using podman</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_module.html">containers.podman.podman_secret module – Manage podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_info_module.html">containers.podman.podman_secret_info module – Gather info about podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_tag_module.html">containers.podman.podman_tag module – Add an additional name to a local image</a></li>
diff --git a/ansible_collections/containers/podman/docs/podman_containers_module.html b/ansible_collections/containers/podman/docs/podman_containers_module.html
index adc0dca87..53914c128 100644
--- a/ansible_collections/containers/podman/docs/podman_containers_module.html
+++ b/ansible_collections/containers/podman/docs/podman_containers_module.html
@@ -37,7 +37,7 @@
<h1>containers.podman.podman_containers module – Manage podman containers in a batch<a class="headerlink" href="#containers-podman-podman-containers-module-manage-podman-containers-in-a-batch" title="Permalink to this heading">¶</a></h1>
<div class="admonition note">
<p class="admonition-title">Note</p>
-<p>This module is part of the <a class="reference external" href="https://galaxy.ansible.com/ui/repo/published/containers/podman/">containers.podman collection</a> (version 1.13.0).</p>
+<p>This module is part of the <a class="reference external" href="https://galaxy.ansible.com/ui/repo/published/containers/podman/">containers.podman collection</a> (version 1.15.2).</p>
<p>It is not included in <code class="docutils literal notranslate"><span class="pre">ansible-core</span></code>.
To check whether it is installed, run <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">list</span></code>.</p>
<p>To install it, use: <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">install</span> <span class="pre">containers.podman</span></code>.
@@ -168,6 +168,7 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
<li class="toctree-l1"><a class="reference internal" href="podman_prune_module.html">containers.podman.podman_prune module – Allows to prune various podman objects</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_runlabel_module.html">containers.podman.podman_runlabel module – Run given label from given image</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_save_module.html">containers.podman.podman_save module – Saves podman image to tar file</a></li>
+<li class="toctree-l1"><a class="reference internal" href="podman_search_module.html">containers.podman.podman_search module – Search for remote images using podman</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_module.html">containers.podman.podman_secret module – Manage podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_info_module.html">containers.podman.podman_secret_info module – Gather info about podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_tag_module.html">containers.podman.podman_tag module – Add an additional name to a local image</a></li>
diff --git a/ansible_collections/containers/podman/docs/podman_export_module.html b/ansible_collections/containers/podman/docs/podman_export_module.html
index b5e525aa4..c74c58a05 100644
--- a/ansible_collections/containers/podman/docs/podman_export_module.html
+++ b/ansible_collections/containers/podman/docs/podman_export_module.html
@@ -37,7 +37,7 @@
<h1>containers.podman.podman_export module – Export a podman container<a class="headerlink" href="#containers-podman-podman-export-module-export-a-podman-container" title="Permalink to this heading">¶</a></h1>
<div class="admonition note">
<p class="admonition-title">Note</p>
-<p>This module is part of the <a class="reference external" href="https://galaxy.ansible.com/ui/repo/published/containers/podman/">containers.podman collection</a> (version 1.13.0).</p>
+<p>This module is part of the <a class="reference external" href="https://galaxy.ansible.com/ui/repo/published/containers/podman/">containers.podman collection</a> (version 1.15.2).</p>
<p>It is not included in <code class="docutils literal notranslate"><span class="pre">ansible-core</span></code>.
To check whether it is installed, run <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">list</span></code>.</p>
<p>To install it, use: <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">install</span> <span class="pre">containers.podman</span></code>.
@@ -185,6 +185,7 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
<li class="toctree-l1"><a class="reference internal" href="podman_prune_module.html">containers.podman.podman_prune module – Allows to prune various podman objects</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_runlabel_module.html">containers.podman.podman_runlabel module – Run given label from given image</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_save_module.html">containers.podman.podman_save module – Saves podman image to tar file</a></li>
+<li class="toctree-l1"><a class="reference internal" href="podman_search_module.html">containers.podman.podman_search module – Search for remote images using podman</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_module.html">containers.podman.podman_secret module – Manage podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_info_module.html">containers.podman.podman_secret_info module – Gather info about podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_tag_module.html">containers.podman.podman_tag module – Add an additional name to a local image</a></li>
diff --git a/ansible_collections/containers/podman/docs/podman_generate_systemd_module.html b/ansible_collections/containers/podman/docs/podman_generate_systemd_module.html
index daec06e3f..cad291774 100644
--- a/ansible_collections/containers/podman/docs/podman_generate_systemd_module.html
+++ b/ansible_collections/containers/podman/docs/podman_generate_systemd_module.html
@@ -37,7 +37,7 @@
<h1>containers.podman.podman_generate_systemd module – Generate systemd unit from a pod or a container<a class="headerlink" href="#containers-podman-podman-generate-systemd-module-generate-systemd-unit-from-a-pod-or-a-container" title="Permalink to this heading">¶</a></h1>
<div class="admonition note">
<p class="admonition-title">Note</p>
-<p>This module is part of the <a class="reference external" href="https://galaxy.ansible.com/ui/repo/published/containers/podman/">containers.podman collection</a> (version 1.13.0).</p>
+<p>This module is part of the <a class="reference external" href="https://galaxy.ansible.com/ui/repo/published/containers/podman/">containers.podman collection</a> (version 1.15.2).</p>
<p>It is not included in <code class="docutils literal notranslate"><span class="pre">ansible-core</span></code>.
To check whether it is installed, run <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">list</span></code>.</p>
<p>To install it, use: <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">install</span> <span class="pre">containers.podman</span></code>.
@@ -427,6 +427,7 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
<li class="toctree-l1"><a class="reference internal" href="podman_prune_module.html">containers.podman.podman_prune module – Allows to prune various podman objects</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_runlabel_module.html">containers.podman.podman_runlabel module – Run given label from given image</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_save_module.html">containers.podman.podman_save module – Saves podman image to tar file</a></li>
+<li class="toctree-l1"><a class="reference internal" href="podman_search_module.html">containers.podman.podman_search module – Search for remote images using podman</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_module.html">containers.podman.podman_secret module – Manage podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_info_module.html">containers.podman.podman_secret_info module – Gather info about podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_tag_module.html">containers.podman.podman_tag module – Add an additional name to a local image</a></li>
diff --git a/ansible_collections/containers/podman/docs/podman_image_info_module.html b/ansible_collections/containers/podman/docs/podman_image_info_module.html
index b64277209..0f493199d 100644
--- a/ansible_collections/containers/podman/docs/podman_image_info_module.html
+++ b/ansible_collections/containers/podman/docs/podman_image_info_module.html
@@ -37,7 +37,7 @@
<h1>containers.podman.podman_image_info module – Gather info about images using podman<a class="headerlink" href="#containers-podman-podman-image-info-module-gather-info-about-images-using-podman" title="Permalink to this heading">¶</a></h1>
<div class="admonition note">
<p class="admonition-title">Note</p>
-<p>This module is part of the <a class="reference external" href="https://galaxy.ansible.com/ui/repo/published/containers/podman/">containers.podman collection</a> (version 1.13.0).</p>
+<p>This module is part of the <a class="reference external" href="https://galaxy.ansible.com/ui/repo/published/containers/podman/">containers.podman collection</a> (version 1.15.2).</p>
<p>It is not included in <code class="docutils literal notranslate"><span class="pre">ansible-core</span></code>.
To check whether it is installed, run <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">list</span></code>.</p>
<p>To install it, use: <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">install</span> <span class="pre">containers.podman</span></code>.</p>
@@ -187,6 +187,7 @@ To check whether it is installed, run <code class="code docutils literal notrans
<li class="toctree-l1"><a class="reference internal" href="podman_prune_module.html">containers.podman.podman_prune module – Allows to prune various podman objects</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_runlabel_module.html">containers.podman.podman_runlabel module – Run given label from given image</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_save_module.html">containers.podman.podman_save module – Saves podman image to tar file</a></li>
+<li class="toctree-l1"><a class="reference internal" href="podman_search_module.html">containers.podman.podman_search module – Search for remote images using podman</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_module.html">containers.podman.podman_secret module – Manage podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_info_module.html">containers.podman.podman_secret_info module – Gather info about podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_tag_module.html">containers.podman.podman_tag module – Add an additional name to a local image</a></li>
diff --git a/ansible_collections/containers/podman/docs/podman_image_module.html b/ansible_collections/containers/podman/docs/podman_image_module.html
index 941b06dfa..218526333 100644
--- a/ansible_collections/containers/podman/docs/podman_image_module.html
+++ b/ansible_collections/containers/podman/docs/podman_image_module.html
@@ -37,7 +37,7 @@
<h1>containers.podman.podman_image module – Pull images for use by podman<a class="headerlink" href="#containers-podman-podman-image-module-pull-images-for-use-by-podman" title="Permalink to this heading">¶</a></h1>
<div class="admonition note">
<p class="admonition-title">Note</p>
-<p>This module is part of the <a class="reference external" href="https://galaxy.ansible.com/ui/repo/published/containers/podman/">containers.podman collection</a> (version 1.13.0).</p>
+<p>This module is part of the <a class="reference external" href="https://galaxy.ansible.com/ui/repo/published/containers/podman/">containers.podman collection</a> (version 1.15.2).</p>
<p>It is not included in <code class="docutils literal notranslate"><span class="pre">ansible-core</span></code>.
To check whether it is installed, run <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">list</span></code>.</p>
<p>To install it, use: <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">install</span> <span class="pre">containers.podman</span></code>.</p>
@@ -117,6 +117,15 @@ To check whether it is installed, run <code class="code docutils literal notrans
</div></td>
</tr>
<tr class="row-odd"><td><div class="ansible-option-indent"></div><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-build/container_file"></div>
+<div class="ansibleOptionAnchor" id="parameter-build_args/container_file"></div>
+<div class="ansibleOptionAnchor" id="parameter-buildargs/container_file"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-image-module-parameter-buildargs-container-file"><span id="ansible-collections-containers-podman-podman-image-module-parameter-build-args-container-file"></span><span id="ansible-collections-containers-podman-podman-image-module-parameter-build-container-file"></span><strong>container_file</strong></p>
+<a class="ansibleOptionLink" href="#parameter-build/container_file" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
+</div></td>
+<td><div class="ansible-option-indent-desc"></div><div class="ansible-option-cell"><p>Content of the Containerfile to use for building the image. Mutually exclusive with the <code class="docutils literal notranslate"><span class="pre">file</span></code> option which is path to the existing Containerfile.</p>
+</div></td>
+</tr>
+<tr class="row-even"><td><div class="ansible-option-indent"></div><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-build/extra_args"></div>
<div class="ansibleOptionAnchor" id="parameter-build_args/extra_args"></div>
<div class="ansibleOptionAnchor" id="parameter-buildargs/extra_args"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-image-module-parameter-buildargs-extra-args"><span id="ansible-collections-containers-podman-podman-image-module-parameter-build-args-extra-args"></span><span id="ansible-collections-containers-podman-podman-image-module-parameter-build-extra-args"></span><strong>extra_args</strong></p>
@@ -125,16 +134,16 @@ To check whether it is installed, run <code class="code docutils literal notrans
<td><div class="ansible-option-indent-desc"></div><div class="ansible-option-cell"><p>Extra args to pass to build, if executed. Does not idempotently check for new build args.</p>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-indent"></div><div class="ansible-option-cell">
+<tr class="row-odd"><td><div class="ansible-option-indent"></div><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-build/file"></div>
<div class="ansibleOptionAnchor" id="parameter-build_args/file"></div>
<div class="ansibleOptionAnchor" id="parameter-buildargs/file"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-image-module-parameter-buildargs-file"><span id="ansible-collections-containers-podman-podman-image-module-parameter-build-args-file"></span><span id="ansible-collections-containers-podman-podman-image-module-parameter-build-file"></span><strong>file</strong></p>
<a class="ansibleOptionLink" href="#parameter-build/file" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">path</span></p>
</div></td>
-<td><div class="ansible-option-indent-desc"></div><div class="ansible-option-cell"><p>Path to the Containerfile if it is not in the build context directory.</p>
+<td><div class="ansible-option-indent-desc"></div><div class="ansible-option-cell"><p>Path to the Containerfile if it is not in the build context directory. Mutually exclusive with the <code class="docutils literal notranslate"><span class="pre">container_file</span></code> option.</p>
</div></td>
</tr>
-<tr class="row-odd"><td><div class="ansible-option-indent"></div><div class="ansible-option-cell">
+<tr class="row-even"><td><div class="ansible-option-indent"></div><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-build/force_rm"></div>
<div class="ansibleOptionAnchor" id="parameter-build_args/force_rm"></div>
<div class="ansibleOptionAnchor" id="parameter-buildargs/force_rm"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-image-module-parameter-buildargs-force-rm"><span id="ansible-collections-containers-podman-podman-image-module-parameter-build-args-force-rm"></span><span id="ansible-collections-containers-podman-podman-image-module-parameter-build-force-rm"></span><strong>force_rm</strong></p>
@@ -148,7 +157,7 @@ To check whether it is installed, run <code class="code docutils literal notrans
</ul>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-indent"></div><div class="ansible-option-cell">
+<tr class="row-odd"><td><div class="ansible-option-indent"></div><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-build/format"></div>
<div class="ansibleOptionAnchor" id="parameter-build_args/format"></div>
<div class="ansibleOptionAnchor" id="parameter-buildargs/format"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-image-module-parameter-buildargs-format"><span id="ansible-collections-containers-podman-podman-image-module-parameter-build-args-format"></span><span id="ansible-collections-containers-podman-podman-image-module-parameter-build-format"></span><strong>format</strong></p>
@@ -162,7 +171,7 @@ To check whether it is installed, run <code class="code docutils literal notrans
</ul>
</div></td>
</tr>
-<tr class="row-odd"><td><div class="ansible-option-indent"></div><div class="ansible-option-cell">
+<tr class="row-even"><td><div class="ansible-option-indent"></div><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-build/rm"></div>
<div class="ansibleOptionAnchor" id="parameter-build_args/rm"></div>
<div class="ansibleOptionAnchor" id="parameter-buildargs/rm"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-image-module-parameter-buildargs-rm"><span id="ansible-collections-containers-podman-podman-image-module-parameter-build-args-rm"></span><span id="ansible-collections-containers-podman-podman-image-module-parameter-build-rm"></span><strong>rm</strong></p>
@@ -176,7 +185,7 @@ To check whether it is installed, run <code class="code docutils literal notrans
</ul>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-indent"></div><div class="ansible-option-cell">
+<tr class="row-odd"><td><div class="ansible-option-indent"></div><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-build/target"></div>
<div class="ansibleOptionAnchor" id="parameter-build_args/target"></div>
<div class="ansibleOptionAnchor" id="parameter-buildargs/target"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-image-module-parameter-buildargs-target"><span id="ansible-collections-containers-podman-podman-image-module-parameter-build-args-target"></span><span id="ansible-collections-containers-podman-podman-image-module-parameter-build-target"></span><strong>target</strong></p>
@@ -185,7 +194,7 @@ To check whether it is installed, run <code class="code docutils literal notrans
<td><div class="ansible-option-indent-desc"></div><div class="ansible-option-cell"><p>Specify the target build stage to build.</p>
</div></td>
</tr>
-<tr class="row-odd"><td><div class="ansible-option-indent"></div><div class="ansible-option-cell">
+<tr class="row-even"><td><div class="ansible-option-indent"></div><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-build/volume"></div>
<div class="ansibleOptionAnchor" id="parameter-build_args/volume"></div>
<div class="ansibleOptionAnchor" id="parameter-buildargs/volume"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-image-module-parameter-buildargs-volume"><span id="ansible-collections-containers-podman-podman-image-module-parameter-build-args-volume"></span><span id="ansible-collections-containers-podman-podman-image-module-parameter-build-volume"></span><strong>volume</strong></p>
@@ -194,14 +203,14 @@ To check whether it is installed, run <code class="code docutils literal notrans
<td><div class="ansible-option-indent-desc"></div><div class="ansible-option-cell"><p>Specify multiple volume / mount options to mount one or more mounts to a container.</p>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-cell">
+<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-ca_cert_dir"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-image-module-parameter-ca-cert-dir"><strong>ca_cert_dir</strong></p>
<a class="ansibleOptionLink" href="#parameter-ca_cert_dir" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">path</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Path to directory containing TLS certificates and keys to use.</p>
</div></td>
</tr>
-<tr class="row-odd"><td><div class="ansible-option-cell">
+<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-executable"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-image-module-parameter-executable"><strong>executable</strong></p>
<a class="ansibleOptionLink" href="#parameter-executable" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
</div></td>
@@ -209,7 +218,7 @@ To check whether it is installed, run <code class="code docutils literal notrans
<p class="ansible-option-line"><strong class="ansible-option-default-bold">Default:</strong> <code class="ansible-option-default docutils literal notranslate"><span class="pre">&quot;podman&quot;</span></code></p>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-cell">
+<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-force"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-image-module-parameter-force"><strong>force</strong></p>
<a class="ansibleOptionLink" href="#parameter-force" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">boolean</span></p>
</div></td>
@@ -222,28 +231,28 @@ To check whether it is installed, run <code class="code docutils literal notrans
</ul>
</div></td>
</tr>
-<tr class="row-odd"><td><div class="ansible-option-cell">
+<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-name"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-image-module-parameter-name"><strong>name</strong></p>
<a class="ansibleOptionLink" href="#parameter-name" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span> / <span class="ansible-option-required">required</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Name of the image to pull, push, or delete. It may contain a tag using the format <code class="docutils literal notranslate"><span class="pre">image:tag</span></code>.</p>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-cell">
+<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-password"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-image-module-parameter-password"><strong>password</strong></p>
<a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Password to use when authenticating to remote registries.</p>
</div></td>
</tr>
-<tr class="row-odd"><td><div class="ansible-option-cell">
+<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-path"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-image-module-parameter-path"><strong>path</strong></p>
<a class="ansibleOptionLink" href="#parameter-path" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Path to the build context directory.</p>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-cell">
+<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-pull"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-image-module-parameter-pull"><strong>pull</strong></p>
<a class="ansibleOptionLink" href="#parameter-pull" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">boolean</span></p>
</div></td>
@@ -255,6 +264,13 @@ To check whether it is installed, run <code class="code docutils literal notrans
</ul>
</div></td>
</tr>
+<tr class="row-even"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-pull_extra_args"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-image-module-parameter-pull-extra-args"><strong>pull_extra_args</strong></p>
+<a class="ansibleOptionLink" href="#parameter-pull_extra_args" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>Extra arguments to pass to the pull command.</p>
+</div></td>
+</tr>
<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-push"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-image-module-parameter-push"><strong>push</strong></p>
<a class="ansibleOptionLink" href="#parameter-push" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">boolean</span></p>
@@ -297,10 +313,17 @@ To check whether it is installed, run <code class="code docutils literal notrans
</div></td>
</tr>
<tr class="row-odd"><td><div class="ansible-option-indent"></div><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-push_args/extra_args"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-image-module-parameter-push-args-extra-args"><strong>extra_args</strong></p>
+<a class="ansibleOptionLink" href="#parameter-push_args/extra_args" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
+</div></td>
+<td><div class="ansible-option-indent-desc"></div><div class="ansible-option-cell"><p>Extra args to pass to push, if executed. Does not idempotently check for new push args.</p>
+</div></td>
+</tr>
+<tr class="row-even"><td><div class="ansible-option-indent"></div><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-push_args/format"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-image-module-parameter-push-args-format"><strong>format</strong></p>
<a class="ansibleOptionLink" href="#parameter-push_args/format" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
</div></td>
-<td><div class="ansible-option-indent-desc"></div><div class="ansible-option-cell"><p>Manifest type to use when pushing an image using the ‘dir’ transport (default is manifest type of source).</p>
+<td><div class="ansible-option-indent-desc"></div><div class="ansible-option-cell"><p>Manifest type to use when pushing an image using the ‘dir’ transport (default is manifest type of source)</p>
<p class="ansible-option-line"><strong class="ansible-option-choices">Choices:</strong></p>
<ul class="simple">
<li><p><code class="ansible-option-choices-entry docutils literal notranslate"><span class="pre">&quot;oci&quot;</span></code></p></li>
@@ -309,7 +332,7 @@ To check whether it is installed, run <code class="code docutils literal notrans
</ul>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-indent"></div><div class="ansible-option-cell">
+<tr class="row-odd"><td><div class="ansible-option-indent"></div><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-push_args/remove_signatures"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-image-module-parameter-push-args-remove-signatures"><strong>remove_signatures</strong></p>
<a class="ansibleOptionLink" href="#parameter-push_args/remove_signatures" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">boolean</span></p>
</div></td>
@@ -321,21 +344,22 @@ To check whether it is installed, run <code class="code docutils literal notrans
</ul>
</div></td>
</tr>
-<tr class="row-odd"><td><div class="ansible-option-indent"></div><div class="ansible-option-cell">
+<tr class="row-even"><td><div class="ansible-option-indent"></div><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-push_args/sign_by"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-image-module-parameter-push-args-sign-by"><strong>sign_by</strong></p>
<a class="ansibleOptionLink" href="#parameter-push_args/sign_by" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
</div></td>
<td><div class="ansible-option-indent-desc"></div><div class="ansible-option-cell"><p>Path to a key file to use to sign the image.</p>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-indent"></div><div class="ansible-option-cell">
+<tr class="row-odd"><td><div class="ansible-option-indent"></div><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-push_args/transport"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-image-module-parameter-push-args-transport"><strong>transport</strong></p>
<a class="ansibleOptionLink" href="#parameter-push_args/transport" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
</div></td>
-<td><div class="ansible-option-indent-desc"></div><div class="ansible-option-cell"><p>Transport to use when pushing in image. If no transport is set, will attempt to push to a remote registry.</p>
+<td><div class="ansible-option-indent-desc"></div><div class="ansible-option-cell"><p>Transport to use when pushing in image. If no transport is set, will attempt to push to a remote registry</p>
<p class="ansible-option-line"><strong class="ansible-option-choices">Choices:</strong></p>
<ul class="simple">
<li><p><code class="ansible-option-choices-entry docutils literal notranslate"><span class="pre">&quot;dir&quot;</span></code></p></li>
+<li><p><code class="ansible-option-choices-entry docutils literal notranslate"><span class="pre">&quot;docker&quot;</span></code></p></li>
<li><p><code class="ansible-option-choices-entry docutils literal notranslate"><span class="pre">&quot;docker-archive&quot;</span></code></p></li>
<li><p><code class="ansible-option-choices-entry docutils literal notranslate"><span class="pre">&quot;docker-daemon&quot;</span></code></p></li>
<li><p><code class="ansible-option-choices-entry docutils literal notranslate"><span class="pre">&quot;oci-archive&quot;</span></code></p></li>
@@ -343,28 +367,28 @@ To check whether it is installed, run <code class="code docutils literal notrans
</ul>
</div></td>
</tr>
-<tr class="row-odd"><td><div class="ansible-option-cell">
+<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-quadlet_dir"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-image-module-parameter-quadlet-dir"><strong>quadlet_dir</strong></p>
<a class="ansibleOptionLink" href="#parameter-quadlet_dir" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">path</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Path to the directory to write quadlet file in. By default, it will be set as <code class="docutils literal notranslate"><span class="pre">/etc/containers/systemd/</span></code> for root user, <code class="docutils literal notranslate"><span class="pre">~/.config/containers/systemd/</span></code> for non-root users.</p>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-cell">
+<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-quadlet_filename"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-image-module-parameter-quadlet-filename"><strong>quadlet_filename</strong></p>
<a class="ansibleOptionLink" href="#parameter-quadlet_filename" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Name of quadlet file to write. By default it takes image name without prefixes and tags.</p>
</div></td>
</tr>
-<tr class="row-odd"><td><div class="ansible-option-cell">
+<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-quadlet_options"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-image-module-parameter-quadlet-options"><strong>quadlet_options</strong></p>
<a class="ansibleOptionLink" href="#parameter-quadlet_options" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">list</span> / <span class="ansible-option-elements">elements=string</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Options for the quadlet file. Provide missing in usual network args options as a list of lines to add.</p>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-cell">
+<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-state"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-image-module-parameter-state"><strong>state</strong></p>
<a class="ansibleOptionLink" href="#parameter-state" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
</div></td>
@@ -378,7 +402,7 @@ To check whether it is installed, run <code class="code docutils literal notrans
</ul>
</div></td>
</tr>
-<tr class="row-odd"><td><div class="ansible-option-cell">
+<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-tag"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-image-module-parameter-tag"><strong>tag</strong></p>
<a class="ansibleOptionLink" href="#parameter-tag" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
</div></td>
@@ -386,14 +410,14 @@ To check whether it is installed, run <code class="code docutils literal notrans
<p class="ansible-option-line"><strong class="ansible-option-default-bold">Default:</strong> <code class="ansible-option-default docutils literal notranslate"><span class="pre">&quot;latest&quot;</span></code></p>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-cell">
+<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-username"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-image-module-parameter-username"><strong>username</strong></p>
<a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>username to use when authenticating to remote registries.</p>
</div></td>
</tr>
-<tr class="row-odd"><td><div class="ansible-option-cell">
+<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-validate_certs"></div>
<div class="ansibleOptionAnchor" id="parameter-tlsverify"></div>
<div class="ansibleOptionAnchor" id="parameter-tls_verify"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-image-module-parameter-validate-certs"><span id="ansible-collections-containers-podman-podman-image-module-parameter-tlsverify"></span><span id="ansible-collections-containers-podman-podman-image-module-parameter-tls-verify"></span><strong>validate_certs</strong></p>
@@ -516,6 +540,15 @@ To check whether it is installed, run <code class="code docutils literal notrans
<span class="w"> </span><span class="nt">name</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">nginx</span>
<span class="w"> </span><span class="nt">arch</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">amd64</span>
+<span class="p p-Indicator">-</span><span class="w"> </span><span class="nt">name</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">Build a container from file inline</span>
+<span class="w"> </span><span class="nt">containers.podman.podman_image</span><span class="p">:</span>
+<span class="w"> </span><span class="nt">name</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">mycustom_image</span>
+<span class="w"> </span><span class="nt">state</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">build</span>
+<span class="w"> </span><span class="nt">build</span><span class="p">:</span>
+<span class="w"> </span><span class="nt">container_file</span><span class="p">:</span><span class="w"> </span><span class="p p-Indicator">|-</span>
+<span class="w"> </span><span class="no">FROM alpine:latest</span>
+<span class="w"> </span><span class="no">CMD echo &quot;Hello, World!&quot;</span>
+
<span class="p p-Indicator">-</span><span class="w"> </span><span class="nt">name</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">Create a quadlet file for an image</span>
<span class="w"> </span><span class="nt">containers.podman.podman_image</span><span class="p">:</span>
<span class="w"> </span><span class="nt">name</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">docker.io/library/alpine:latest</span>
@@ -546,7 +579,7 @@ To check whether it is installed, run <code class="code docutils literal notrans
</div></td>
<td><div class="ansible-option-cell"><p>Image inspection results for the image that was pulled, pushed, or built.</p>
<p class="ansible-option-line"><strong class="ansible-option-returned-bold">Returned:</strong> success</p>
-<p class="ansible-option-line ansible-option-sample"><strong class="ansible-option-sample-bold">Sample:</strong> <code class="ansible-option-sample docutils literal notranslate"><span class="pre">[{&quot;Annotations&quot;:</span> <span class="pre">{},</span> <span class="pre">&quot;Architecture&quot;:</span> <span class="pre">&quot;amd64&quot;,</span> <span class="pre">&quot;Author&quot;:</span> <span class="pre">&quot;&quot;,</span> <span class="pre">&quot;Comment&quot;:</span> <span class="pre">&quot;from</span> <span class="pre">Bitnami</span> <span class="pre">with</span> <span class="pre">love&quot;,</span> <span class="pre">&quot;ContainerConfig&quot;:</span> <span class="pre">{&quot;Cmd&quot;:</span> <span class="pre">[&quot;/run.sh&quot;],</span> <span class="pre">&quot;Entrypoint&quot;:</span> <span class="pre">[&quot;/app-entrypoint.sh&quot;],</span> <span class="pre">&quot;Env&quot;:</span> <span class="pre">[&quot;PATH=/opt/bitnami/java/bin:/opt/bitnami/wildfly/bin:/opt/bitnami/nami/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin&quot;,</span> <span class="pre">&quot;IMAGE_OS=debian-9&quot;,</span> <span class="pre">&quot;NAMI_VERSION=1.0.0-1&quot;,</span> <span class="pre">&quot;GPG_KEY_SERVERS_LIST=ha.pool.sks-keyservers.net&quot;,</span> <span class="pre">&quot;TINI_VERSION=v0.13.2&quot;,</span> <span class="pre">&quot;TINI_GPG_KEY=595E85A6B1B4779EA4DAAEC70B588DFF0527A9B7&quot;,</span> <span class="pre">&quot;GOSU_VERSION=1.10&quot;,</span> <span class="pre">&quot;GOSU_GPG_KEY=B42F6819007F00F88E364FD4036A9C25BF357DD4&quot;,</span> <span class="pre">&quot;BITNAMI_IMAGE_VERSION=16.0.0-debian-9-r27&quot;,</span> <span class="pre">&quot;BITNAMI_PKG_CHMOD=-R</span> <span class="pre">g+rwX&quot;,</span> <span class="pre">&quot;BITNAMI_PKG_EXTRA_DIRS=/home/wildfly&quot;,</span> <span class="pre">&quot;HOME=/&quot;,</span> <span class="pre">&quot;BITNAMI_APP_NAME=wildfly&quot;,</span> <span class="pre">&quot;NAMI_PREFIX=/.nami&quot;,</span> <span class="pre">&quot;WILDFLY_HOME=/home/wildfly&quot;,</span> <span class="pre">&quot;WILDFLY_JAVA_HOME=&quot;,</span> <span class="pre">&quot;WILDFLY_JAVA_OPTS=&quot;,</span> <span class="pre">&quot;WILDFLY_MANAGEMENT_HTTP_PORT_NUMBER=9990&quot;,</span> <span class="pre">&quot;WILDFLY_PASSWORD=bitnami&quot;,</span> <span class="pre">&quot;WILDFLY_PUBLIC_CONSOLE=true&quot;,</span> <span class="pre">&quot;WILDFLY_SERVER_AJP_PORT_NUMBER=8009&quot;,</span> <span class="pre">&quot;WILDFLY_SERVER_HTTP_PORT_NUMBER=8080&quot;,</span> <span class="pre">&quot;WILDFLY_SERVER_INTERFACE=0.0.0.0&quot;,</span> <span class="pre">&quot;WILDFLY_USERNAME=user&quot;,</span> <span class="pre">&quot;WILDFLY_WILDFLY_HOME=/home/wildfly&quot;,</span> <span class="pre">&quot;WILDFLY_WILDFLY_OPTS=-Dwildfly.as.deployment.ondemand=false&quot;],</span> <span class="pre">&quot;ExposedPorts&quot;:</span> <span class="pre">{&quot;8080/tcp&quot;:</span> <span class="pre">{},</span> <span class="pre">&quot;9990/tcp&quot;:</span> <span class="pre">{}},</span> <span class="pre">&quot;Labels&quot;:</span> <span class="pre">{&quot;maintainer&quot;:</span> <span class="pre">&quot;Bitnami</span> <span class="pre">&lt;containers&#64;bitnami.com&gt;&quot;},</span> <span class="pre">&quot;User&quot;:</span> <span class="pre">&quot;1001&quot;},</span> <span class="pre">&quot;Created&quot;:</span> <span class="pre">&quot;2019-04-10T05:48:03.553887623Z&quot;,</span> <span class="pre">&quot;Digest&quot;:</span> <span class="pre">&quot;sha256:5a8ab28e314c2222de3feaf6dac94a0436a37fc08979d2722c99d2bef2619a9b&quot;,</span> <span class="pre">&quot;GraphDriver&quot;:</span> <span class="pre">{&quot;Data&quot;:</span> <span class="pre">{&quot;LowerDir&quot;:</span> <span class="pre">&quot;/var/lib/containers/storage/overlay/142c1beadf1bb09fbd929465ec98c9dca3256638220450efb4214727d0d0680e/diff:/var/lib/containers/s&quot;,</span> <span class="pre">&quot;MergedDir&quot;:</span> <span class="pre">&quot;/var/lib/containers/storage/overlay/9aa10191f5bddb59e28508e721fdeb43505e5b395845fa99723ed787878dbfea/merged&quot;,</span> <span class="pre">&quot;UpperDir&quot;:</span> <span class="pre">&quot;/var/lib/containers/storage/overlay/9aa10191f5bddb59e28508e721fdeb43505e5b395845fa99723ed787878dbfea/diff&quot;,</span> <span class="pre">&quot;WorkDir&quot;:</span> <span class="pre">&quot;/var/lib/containers/storage/overlay/9aa10191f5bddb59e28508e721fdeb43505e5b395845fa99723ed787878dbfea/work&quot;},</span> <span class="pre">&quot;Name&quot;:</span> <span class="pre">&quot;overlay&quot;},</span> <span class="pre">&quot;History&quot;:</span> <span class="pre">[{&quot;comment&quot;:</span> <span class="pre">&quot;from</span> <span class="pre">Bitnami</span> <span class="pre">with</span> <span class="pre">love&quot;,</span> <span class="pre">&quot;created&quot;:</span> <span class="pre">&quot;2019-04-09T22:27:40.659377677Z&quot;},</span> <span class="pre">{&quot;created&quot;:</span> <span class="pre">&quot;2019-04-09T22:38:53.86336555Z&quot;,</span> <span class="pre">&quot;created_by&quot;:</span> <span class="pre">&quot;/bin/sh</span> <span class="pre">-c</span> <span class="pre">#(nop)</span>&#160; <span class="pre">LABEL</span> <span class="pre">maintainer=Bitnami</span> <span class="pre">&lt;containers&#64;bitnami.com&gt;&quot;,</span> <span class="pre">&quot;empty_layer&quot;:</span> <span class="pre">true},</span> <span class="pre">{&quot;created&quot;:</span> <span class="pre">&quot;2019-04-09T22:38:54.022778765Z&quot;,</span> <span class="pre">&quot;created_by&quot;:</span> <span class="pre">&quot;/bin/sh</span> <span class="pre">-c</span> <span class="pre">#(nop)</span>&#160; <span class="pre">ENV</span> <span class="pre">IMAGE_OS=debian-9&quot;,</span> <span class="pre">&quot;empty_layer&quot;:</span> <span class="pre">true}],</span> <span class="pre">&quot;Id&quot;:</span> <span class="pre">&quot;ace34da54e4af2145e1ad277005adb235a214e4dfe1114c2db9ab460b840f785&quot;,</span> <span class="pre">&quot;Labels&quot;:</span> <span class="pre">{&quot;maintainer&quot;:</span> <span class="pre">&quot;Bitnami</span> <span class="pre">&lt;containers&#64;bitnami.com&gt;&quot;},</span> <span class="pre">&quot;ManifestType&quot;:</span> <span class="pre">&quot;application/vnd.docker.distribution.manifest.v1+prettyjws&quot;,</span> <span class="pre">&quot;Os&quot;:</span> <span class="pre">&quot;linux&quot;,</span> <span class="pre">&quot;Parent&quot;:</span> <span class="pre">&quot;&quot;,</span> <span class="pre">&quot;RepoDigests&quot;:</span> <span class="pre">[&quot;quay.io/bitnami/wildfly&#64;sha256:5a8ab28e314c2222de3feaf6dac94a0436a37fc08979d2722c99d2bef2619a9b&quot;],</span> <span class="pre">&quot;RepoTags&quot;:</span> <span class="pre">[&quot;quay.io/bitnami/wildfly:latest&quot;],</span> <span class="pre">&quot;RootFS&quot;:</span> <span class="pre">{&quot;Layers&quot;:</span> <span class="pre">[&quot;&quot;,</span> <span class="pre">&quot;&quot;,</span> <span class="pre">&quot;&quot;,</span> <span class="pre">&quot;&quot;,</span> <span class="pre">&quot;&quot;,</span> <span class="pre">&quot;&quot;,</span> <span class="pre">&quot;&quot;,</span> <span class="pre">&quot;&quot;,</span> <span class="pre">&quot;&quot;,</span> <span class="pre">&quot;&quot;,</span> <span class="pre">&quot;&quot;,</span> <span class="pre">&quot;&quot;],</span> <span class="pre">&quot;Type&quot;:</span> <span class="pre">&quot;layers&quot;},</span> <span class="pre">&quot;Size&quot;:</span> <span class="pre">466180019,</span> <span class="pre">&quot;User&quot;:</span> <span class="pre">&quot;1001&quot;,</span> <span class="pre">&quot;Version&quot;:</span> <span class="pre">&quot;18.09.3&quot;,</span> <span class="pre">&quot;VirtualSize&quot;:</span> <span class="pre">466180019}]</span></code></p>
+<p class="ansible-option-line ansible-option-sample"><strong class="ansible-option-sample-bold">Sample:</strong> <code class="ansible-option-sample docutils literal notranslate"><span class="pre">[{&quot;Annotations&quot;:</span> <span class="pre">{},</span> <span class="pre">&quot;Architecture&quot;:</span> <span class="pre">&quot;amd64&quot;,</span> <span class="pre">&quot;Author&quot;:</span> <span class="pre">&quot;&quot;,</span> <span class="pre">&quot;Comment&quot;:</span> <span class="pre">&quot;from</span> <span class="pre">Bitnami</span> <span class="pre">with</span> <span class="pre">love&quot;,</span> <span class="pre">&quot;ContainerConfig&quot;:</span> <span class="pre">{&quot;Cmd&quot;:</span> <span class="pre">[&quot;/run.sh&quot;],</span> <span class="pre">&quot;Entrypoint&quot;:</span> <span class="pre">[&quot;/app-entrypoint.sh&quot;],</span> <span class="pre">&quot;Env&quot;:</span> <span class="pre">[&quot;PATH=/opt/bitnami/java/bin:/opt/bitnami/wildfly/bin:/opt/bitnami/nami/bin:...&quot;,</span> <span class="pre">&quot;IMAGE_OS=debian-9&quot;,</span> <span class="pre">&quot;NAMI_VERSION=1.0.0-1&quot;,</span> <span class="pre">&quot;GPG_KEY_SERVERS_LIST=ha.pool.sks-keyservers.net&quot;,</span> <span class="pre">&quot;TINI_VERSION=v0.13.2&quot;,</span> <span class="pre">&quot;TINI_GPG_KEY=595E85A6B1B4779EA4DAAEC70B588DFF0527A9B7&quot;,</span> <span class="pre">&quot;GOSU_VERSION=1.10&quot;,</span> <span class="pre">&quot;GOSU_GPG_KEY=B42F6819007F00F88E364FD4036A9C25BF357DD4&quot;,</span> <span class="pre">&quot;BITNAMI_IMAGE_VERSION=16.0.0-debian-9-r27&quot;,</span> <span class="pre">&quot;BITNAMI_PKG_CHMOD=-R</span> <span class="pre">g+rwX&quot;,</span> <span class="pre">&quot;BITNAMI_PKG_EXTRA_DIRS=/home/wildfly&quot;,</span> <span class="pre">&quot;HOME=/&quot;,</span> <span class="pre">&quot;BITNAMI_APP_NAME=wildfly&quot;,</span> <span class="pre">&quot;NAMI_PREFIX=/.nami&quot;,</span> <span class="pre">&quot;WILDFLY_HOME=/home/wildfly&quot;,</span> <span class="pre">&quot;WILDFLY_JAVA_HOME=&quot;,</span> <span class="pre">&quot;WILDFLY_JAVA_OPTS=&quot;,</span> <span class="pre">&quot;WILDFLY_MANAGEMENT_HTTP_PORT_NUMBER=9990&quot;,</span> <span class="pre">&quot;WILDFLY_PASSWORD=bitnami&quot;,</span> <span class="pre">&quot;WILDFLY_PUBLIC_CONSOLE=true&quot;,</span> <span class="pre">&quot;WILDFLY_SERVER_AJP_PORT_NUMBER=8009&quot;,</span> <span class="pre">&quot;WILDFLY_SERVER_HTTP_PORT_NUMBER=8080&quot;,</span> <span class="pre">&quot;WILDFLY_SERVER_INTERFACE=0.0.0.0&quot;,</span> <span class="pre">&quot;WILDFLY_USERNAME=user&quot;,</span> <span class="pre">&quot;WILDFLY_WILDFLY_HOME=/home/wildfly&quot;,</span> <span class="pre">&quot;WILDFLY_WILDFLY_OPTS=-Dwildfly.as.deployment.ondemand=false&quot;],</span> <span class="pre">&quot;ExposedPorts&quot;:</span> <span class="pre">{&quot;8080/tcp&quot;:</span> <span class="pre">{},</span> <span class="pre">&quot;9990/tcp&quot;:</span> <span class="pre">{}},</span> <span class="pre">&quot;Labels&quot;:</span> <span class="pre">{&quot;maintainer&quot;:</span> <span class="pre">&quot;Bitnami</span> <span class="pre">&lt;containers&#64;bitnami.com&gt;&quot;},</span> <span class="pre">&quot;User&quot;:</span> <span class="pre">&quot;1001&quot;},</span> <span class="pre">&quot;Created&quot;:</span> <span class="pre">&quot;2019-04-10T05:48:03.553887623Z&quot;,</span> <span class="pre">&quot;Digest&quot;:</span> <span class="pre">&quot;sha256:5a8ab28e314c2222de3feaf6dac94a0436a37fc08979d2722c99d2bef2619a9b&quot;,</span> <span class="pre">&quot;GraphDriver&quot;:</span> <span class="pre">{&quot;Data&quot;:</span> <span class="pre">{&quot;LowerDir&quot;:</span> <span class="pre">&quot;/var/lib/containers/storage/overlay/142c1beadf1bb09fbd929465e..../diff:/var/lib/containers/s&quot;,</span> <span class="pre">&quot;MergedDir&quot;:</span> <span class="pre">&quot;/var/lib/containers/storage/overlay/9aa10191f5bddb59e28508e721fdeb43505e5b395845fa99/merged&quot;,</span> <span class="pre">&quot;UpperDir&quot;:</span> <span class="pre">&quot;/var/lib/containers/storage/overlay/9aa10191f5bddb59e28508e721fdeb43505e5b395845fa99/diff&quot;,</span> <span class="pre">&quot;WorkDir&quot;:</span> <span class="pre">&quot;/var/lib/containers/storage/overlay/9aa10191f5bddb59e28508e721fdeb43505e5b395845fa99/work&quot;},</span> <span class="pre">&quot;Name&quot;:</span> <span class="pre">&quot;overlay&quot;},</span> <span class="pre">&quot;History&quot;:</span> <span class="pre">[{&quot;comment&quot;:</span> <span class="pre">&quot;from</span> <span class="pre">Bitnami</span> <span class="pre">with</span> <span class="pre">love&quot;,</span> <span class="pre">&quot;created&quot;:</span> <span class="pre">&quot;2019-04-09T22:27:40.659377677Z&quot;},</span> <span class="pre">{&quot;created&quot;:</span> <span class="pre">&quot;2019-04-09T22:38:53.86336555Z&quot;,</span> <span class="pre">&quot;created_by&quot;:</span> <span class="pre">&quot;/bin/sh</span> <span class="pre">-c</span> <span class="pre">#(nop)</span>&#160; <span class="pre">LABEL</span> <span class="pre">maintainer=Bitnami</span> <span class="pre">&lt;containers&#64;bitnami.com&gt;&quot;,</span> <span class="pre">&quot;empty_layer&quot;:</span> <span class="pre">true},</span> <span class="pre">{&quot;created&quot;:</span> <span class="pre">&quot;2019-04-09T22:38:54.022778765Z&quot;,</span> <span class="pre">&quot;created_by&quot;:</span> <span class="pre">&quot;/bin/sh</span> <span class="pre">-c</span> <span class="pre">#(nop)</span>&#160; <span class="pre">ENV</span> <span class="pre">IMAGE_OS=debian-9&quot;,</span> <span class="pre">&quot;empty_layer&quot;:</span> <span class="pre">true}],</span> <span class="pre">&quot;Id&quot;:</span> <span class="pre">&quot;ace34da54e4af2145e1ad277005adb235a214e4dfe1114c2db9ab460b840f785&quot;,</span> <span class="pre">&quot;Labels&quot;:</span> <span class="pre">{&quot;maintainer&quot;:</span> <span class="pre">&quot;Bitnami</span> <span class="pre">&lt;containers&#64;bitnami.com&gt;&quot;},</span> <span class="pre">&quot;ManifestType&quot;:</span> <span class="pre">&quot;application/vnd.docker.distribution.manifest.v1+prettyjws&quot;,</span> <span class="pre">&quot;Os&quot;:</span> <span class="pre">&quot;linux&quot;,</span> <span class="pre">&quot;Parent&quot;:</span> <span class="pre">&quot;&quot;,</span> <span class="pre">&quot;RepoDigests&quot;:</span> <span class="pre">[&quot;quay.io/bitnami/wildfly&#64;sha256:5a8ab28e314c2222de3feaf6dac94a0436a37fc08979d2722c99d2bef2619a9b&quot;],</span> <span class="pre">&quot;RepoTags&quot;:</span> <span class="pre">[&quot;quay.io/bitnami/wildfly:latest&quot;],</span> <span class="pre">&quot;RootFS&quot;:</span> <span class="pre">{&quot;Layers&quot;:</span> <span class="pre">[&quot;&quot;,</span> <span class="pre">&quot;&quot;,</span> <span class="pre">&quot;&quot;,</span> <span class="pre">&quot;&quot;,</span> <span class="pre">&quot;&quot;,</span> <span class="pre">&quot;&quot;,</span> <span class="pre">&quot;&quot;,</span> <span class="pre">&quot;&quot;,</span> <span class="pre">&quot;&quot;,</span> <span class="pre">&quot;&quot;,</span> <span class="pre">&quot;&quot;,</span> <span class="pre">&quot;&quot;],</span> <span class="pre">&quot;Type&quot;:</span> <span class="pre">&quot;layers&quot;},</span> <span class="pre">&quot;Size&quot;:</span> <span class="pre">466180019,</span> <span class="pre">&quot;User&quot;:</span> <span class="pre">&quot;1001&quot;,</span> <span class="pre">&quot;Version&quot;:</span> <span class="pre">&quot;18.09.3&quot;,</span> <span class="pre">&quot;VirtualSize&quot;:</span> <span class="pre">466180019}]</span></code></p>
</div></td>
</tr>
</tbody>
@@ -606,6 +639,7 @@ To check whether it is installed, run <code class="code docutils literal notrans
<li class="toctree-l1"><a class="reference internal" href="podman_prune_module.html">containers.podman.podman_prune module – Allows to prune various podman objects</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_runlabel_module.html">containers.podman.podman_runlabel module – Run given label from given image</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_save_module.html">containers.podman.podman_save module – Saves podman image to tar file</a></li>
+<li class="toctree-l1"><a class="reference internal" href="podman_search_module.html">containers.podman.podman_search module – Search for remote images using podman</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_module.html">containers.podman.podman_secret module – Manage podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_info_module.html">containers.podman.podman_secret_info module – Gather info about podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_tag_module.html">containers.podman.podman_tag module – Add an additional name to a local image</a></li>
diff --git a/ansible_collections/containers/podman/docs/podman_import_module.html b/ansible_collections/containers/podman/docs/podman_import_module.html
index b7140a6b9..c7ff14bd0 100644
--- a/ansible_collections/containers/podman/docs/podman_import_module.html
+++ b/ansible_collections/containers/podman/docs/podman_import_module.html
@@ -37,7 +37,7 @@
<h1>containers.podman.podman_import module – Import Podman container from a tar file.<a class="headerlink" href="#containers-podman-podman-import-module-import-podman-container-from-a-tar-file" title="Permalink to this heading">¶</a></h1>
<div class="admonition note">
<p class="admonition-title">Note</p>
-<p>This module is part of the <a class="reference external" href="https://galaxy.ansible.com/ui/repo/published/containers/podman/">containers.podman collection</a> (version 1.13.0).</p>
+<p>This module is part of the <a class="reference external" href="https://galaxy.ansible.com/ui/repo/published/containers/podman/">containers.podman collection</a> (version 1.15.2).</p>
<p>It is not included in <code class="docutils literal notranslate"><span class="pre">ansible-core</span></code>.
To check whether it is installed, run <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">list</span></code>.</p>
<p>To install it, use: <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">install</span> <span class="pre">containers.podman</span></code>.
@@ -206,6 +206,7 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
<li class="toctree-l1"><a class="reference internal" href="podman_prune_module.html">containers.podman.podman_prune module – Allows to prune various podman objects</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_runlabel_module.html">containers.podman.podman_runlabel module – Run given label from given image</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_save_module.html">containers.podman.podman_save module – Saves podman image to tar file</a></li>
+<li class="toctree-l1"><a class="reference internal" href="podman_search_module.html">containers.podman.podman_search module – Search for remote images using podman</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_module.html">containers.podman.podman_secret module – Manage podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_info_module.html">containers.podman.podman_secret_info module – Gather info about podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_tag_module.html">containers.podman.podman_tag module – Add an additional name to a local image</a></li>
diff --git a/ansible_collections/containers/podman/docs/podman_load_module.html b/ansible_collections/containers/podman/docs/podman_load_module.html
index 256597ef5..851da1a40 100644
--- a/ansible_collections/containers/podman/docs/podman_load_module.html
+++ b/ansible_collections/containers/podman/docs/podman_load_module.html
@@ -37,7 +37,7 @@
<h1>containers.podman.podman_load module – Load image from a tar file.<a class="headerlink" href="#containers-podman-podman-load-module-load-image-from-a-tar-file" title="Permalink to this heading">¶</a></h1>
<div class="admonition note">
<p class="admonition-title">Note</p>
-<p>This module is part of the <a class="reference external" href="https://galaxy.ansible.com/ui/repo/published/containers/podman/">containers.podman collection</a> (version 1.13.0).</p>
+<p>This module is part of the <a class="reference external" href="https://galaxy.ansible.com/ui/repo/published/containers/podman/">containers.podman collection</a> (version 1.15.2).</p>
<p>It is not included in <code class="docutils literal notranslate"><span class="pre">ansible-core</span></code>.
To check whether it is installed, run <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">list</span></code>.</p>
<p>To install it, use: <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">install</span> <span class="pre">containers.podman</span></code>.
@@ -180,6 +180,7 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
<li class="toctree-l1"><a class="reference internal" href="podman_prune_module.html">containers.podman.podman_prune module – Allows to prune various podman objects</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_runlabel_module.html">containers.podman.podman_runlabel module – Run given label from given image</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_save_module.html">containers.podman.podman_save module – Saves podman image to tar file</a></li>
+<li class="toctree-l1"><a class="reference internal" href="podman_search_module.html">containers.podman.podman_search module – Search for remote images using podman</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_module.html">containers.podman.podman_secret module – Manage podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_info_module.html">containers.podman.podman_secret_info module – Gather info about podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_tag_module.html">containers.podman.podman_tag module – Add an additional name to a local image</a></li>
diff --git a/ansible_collections/containers/podman/docs/podman_login_info_module.html b/ansible_collections/containers/podman/docs/podman_login_info_module.html
index 9b18049cd..24498f73b 100644
--- a/ansible_collections/containers/podman/docs/podman_login_info_module.html
+++ b/ansible_collections/containers/podman/docs/podman_login_info_module.html
@@ -37,7 +37,7 @@
<h1>containers.podman.podman_login_info module – Return the logged-in user if any for a given registry<a class="headerlink" href="#containers-podman-podman-login-info-module-return-the-logged-in-user-if-any-for-a-given-registry" title="Permalink to this heading">¶</a></h1>
<div class="admonition note">
<p class="admonition-title">Note</p>
-<p>This module is part of the <a class="reference external" href="https://galaxy.ansible.com/ui/repo/published/containers/podman/">containers.podman collection</a> (version 1.13.0).</p>
+<p>This module is part of the <a class="reference external" href="https://galaxy.ansible.com/ui/repo/published/containers/podman/">containers.podman collection</a> (version 1.15.2).</p>
<p>It is not included in <code class="docutils literal notranslate"><span class="pre">ansible-core</span></code>.
To check whether it is installed, run <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">list</span></code>.</p>
<p>To install it, use: <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">install</span> <span class="pre">containers.podman</span></code>.
@@ -190,6 +190,7 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
<li class="toctree-l1"><a class="reference internal" href="podman_prune_module.html">containers.podman.podman_prune module – Allows to prune various podman objects</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_runlabel_module.html">containers.podman.podman_runlabel module – Run given label from given image</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_save_module.html">containers.podman.podman_save module – Saves podman image to tar file</a></li>
+<li class="toctree-l1"><a class="reference internal" href="podman_search_module.html">containers.podman.podman_search module – Search for remote images using podman</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_module.html">containers.podman.podman_secret module – Manage podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_info_module.html">containers.podman.podman_secret_info module – Gather info about podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_tag_module.html">containers.podman.podman_tag module – Add an additional name to a local image</a></li>
diff --git a/ansible_collections/containers/podman/docs/podman_login_module.html b/ansible_collections/containers/podman/docs/podman_login_module.html
index 61adafae2..6116da049 100644
--- a/ansible_collections/containers/podman/docs/podman_login_module.html
+++ b/ansible_collections/containers/podman/docs/podman_login_module.html
@@ -37,7 +37,7 @@
<h1>containers.podman.podman_login module – Login to a container registry using podman<a class="headerlink" href="#containers-podman-podman-login-module-login-to-a-container-registry-using-podman" title="Permalink to this heading">¶</a></h1>
<div class="admonition note">
<p class="admonition-title">Note</p>
-<p>This module is part of the <a class="reference external" href="https://galaxy.ansible.com/ui/repo/published/containers/podman/">containers.podman collection</a> (version 1.13.0).</p>
+<p>This module is part of the <a class="reference external" href="https://galaxy.ansible.com/ui/repo/published/containers/podman/">containers.podman collection</a> (version 1.15.2).</p>
<p>It is not included in <code class="docutils literal notranslate"><span class="pre">ansible-core</span></code>.
To check whether it is installed, run <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">list</span></code>.</p>
<p>To install it, use: <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">install</span> <span class="pre">containers.podman</span></code>.
@@ -204,6 +204,7 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
<li class="toctree-l1"><a class="reference internal" href="podman_prune_module.html">containers.podman.podman_prune module – Allows to prune various podman objects</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_runlabel_module.html">containers.podman.podman_runlabel module – Run given label from given image</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_save_module.html">containers.podman.podman_save module – Saves podman image to tar file</a></li>
+<li class="toctree-l1"><a class="reference internal" href="podman_search_module.html">containers.podman.podman_search module – Search for remote images using podman</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_module.html">containers.podman.podman_secret module – Manage podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_info_module.html">containers.podman.podman_secret_info module – Gather info about podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_tag_module.html">containers.podman.podman_tag module – Add an additional name to a local image</a></li>
diff --git a/ansible_collections/containers/podman/docs/podman_logout_module.html b/ansible_collections/containers/podman/docs/podman_logout_module.html
index 9d0295c50..57c7c482d 100644
--- a/ansible_collections/containers/podman/docs/podman_logout_module.html
+++ b/ansible_collections/containers/podman/docs/podman_logout_module.html
@@ -37,7 +37,7 @@
<h1>containers.podman.podman_logout module – Log out of a container registry using podman<a class="headerlink" href="#containers-podman-podman-logout-module-log-out-of-a-container-registry-using-podman" title="Permalink to this heading">¶</a></h1>
<div class="admonition note">
<p class="admonition-title">Note</p>
-<p>This module is part of the <a class="reference external" href="https://galaxy.ansible.com/ui/repo/published/containers/podman/">containers.podman collection</a> (version 1.13.0).</p>
+<p>This module is part of the <a class="reference external" href="https://galaxy.ansible.com/ui/repo/published/containers/podman/">containers.podman collection</a> (version 1.15.2).</p>
<p>It is not included in <code class="docutils literal notranslate"><span class="pre">ansible-core</span></code>.
To check whether it is installed, run <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">list</span></code>.</p>
<p>To install it, use: <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">install</span> <span class="pre">containers.podman</span></code>.
@@ -198,6 +198,7 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
<li class="toctree-l1"><a class="reference internal" href="podman_prune_module.html">containers.podman.podman_prune module – Allows to prune various podman objects</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_runlabel_module.html">containers.podman.podman_runlabel module – Run given label from given image</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_save_module.html">containers.podman.podman_save module – Saves podman image to tar file</a></li>
+<li class="toctree-l1"><a class="reference internal" href="podman_search_module.html">containers.podman.podman_search module – Search for remote images using podman</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_module.html">containers.podman.podman_secret module – Manage podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_info_module.html">containers.podman.podman_secret_info module – Gather info about podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_tag_module.html">containers.podman.podman_tag module – Add an additional name to a local image</a></li>
diff --git a/ansible_collections/containers/podman/docs/podman_network_info_module.html b/ansible_collections/containers/podman/docs/podman_network_info_module.html
index cb8f98328..c53da9ae0 100644
--- a/ansible_collections/containers/podman/docs/podman_network_info_module.html
+++ b/ansible_collections/containers/podman/docs/podman_network_info_module.html
@@ -37,7 +37,7 @@
<h1>containers.podman.podman_network_info module – Gather info about podman networks<a class="headerlink" href="#containers-podman-podman-network-info-module-gather-info-about-podman-networks" title="Permalink to this heading">¶</a></h1>
<div class="admonition note">
<p class="admonition-title">Note</p>
-<p>This module is part of the <a class="reference external" href="https://galaxy.ansible.com/ui/repo/published/containers/podman/">containers.podman collection</a> (version 1.13.0).</p>
+<p>This module is part of the <a class="reference external" href="https://galaxy.ansible.com/ui/repo/published/containers/podman/">containers.podman collection</a> (version 1.15.2).</p>
<p>It is not included in <code class="docutils literal notranslate"><span class="pre">ansible-core</span></code>.
To check whether it is installed, run <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">list</span></code>.</p>
<p>To install it, use: <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">install</span> <span class="pre">containers.podman</span></code>.
@@ -182,6 +182,7 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
<li class="toctree-l1"><a class="reference internal" href="podman_prune_module.html">containers.podman.podman_prune module – Allows to prune various podman objects</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_runlabel_module.html">containers.podman.podman_runlabel module – Run given label from given image</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_save_module.html">containers.podman.podman_save module – Saves podman image to tar file</a></li>
+<li class="toctree-l1"><a class="reference internal" href="podman_search_module.html">containers.podman.podman_search module – Search for remote images using podman</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_module.html">containers.podman.podman_secret module – Manage podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_info_module.html">containers.podman.podman_secret_info module – Gather info about podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_tag_module.html">containers.podman.podman_tag module – Add an additional name to a local image</a></li>
diff --git a/ansible_collections/containers/podman/docs/podman_network_module.html b/ansible_collections/containers/podman/docs/podman_network_module.html
index daaf3bc22..0075d7951 100644
--- a/ansible_collections/containers/podman/docs/podman_network_module.html
+++ b/ansible_collections/containers/podman/docs/podman_network_module.html
@@ -37,7 +37,7 @@
<h1>containers.podman.podman_network module – Manage podman networks<a class="headerlink" href="#containers-podman-podman-network-module-manage-podman-networks" title="Permalink to this heading">¶</a></h1>
<div class="admonition note">
<p class="admonition-title">Note</p>
-<p>This module is part of the <a class="reference external" href="https://galaxy.ansible.com/ui/repo/published/containers/podman/">containers.podman collection</a> (version 1.13.0).</p>
+<p>This module is part of the <a class="reference external" href="https://galaxy.ansible.com/ui/repo/published/containers/podman/">containers.podman collection</a> (version 1.15.2).</p>
<p>It is not included in <code class="docutils literal notranslate"><span class="pre">ansible-core</span></code>.
To check whether it is installed, run <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">list</span></code>.</p>
<p>To install it, use: <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">install</span> <span class="pre">containers.podman</span></code>.
@@ -102,13 +102,20 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
</div></td>
</tr>
<tr class="row-even"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-dns"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-network-module-parameter-dns"><strong>dns</strong></p>
+<a class="ansibleOptionLink" href="#parameter-dns" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">list</span> / <span class="ansible-option-elements">elements=string</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>Set network-scoped DNS resolver/nameserver for containers in this network. If not set, the host servers from /etc/resolv.conf is used.</p>
+</div></td>
+</tr>
+<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-driver"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-network-module-parameter-driver"><strong>driver</strong></p>
<a class="ansibleOptionLink" href="#parameter-driver" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Driver to manage the network (default “bridge”)</p>
</div></td>
</tr>
-<tr class="row-odd"><td><div class="ansible-option-cell">
+<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-executable"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-network-module-parameter-executable"><strong>executable</strong></p>
<a class="ansibleOptionLink" href="#parameter-executable" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
</div></td>
@@ -116,7 +123,7 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
<p class="ansible-option-line"><strong class="ansible-option-default-bold">Default:</strong> <code class="ansible-option-default docutils literal notranslate"><span class="pre">&quot;podman&quot;</span></code></p>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-cell">
+<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-force"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-network-module-parameter-force"><strong>force</strong></p>
<a class="ansibleOptionLink" href="#parameter-force" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">boolean</span></p>
</div></td>
@@ -128,21 +135,21 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
</ul>
</div></td>
</tr>
-<tr class="row-odd"><td><div class="ansible-option-cell">
+<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-gateway"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-network-module-parameter-gateway"><strong>gateway</strong></p>
<a class="ansibleOptionLink" href="#parameter-gateway" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>IPv4 or IPv6 gateway for the subnet</p>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-cell">
+<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-interface_name"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-network-module-parameter-interface-name"><strong>interface_name</strong></p>
<a class="ansibleOptionLink" href="#parameter-interface_name" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>For bridge, it uses the bridge interface name. For macvlan, it is the parent device on the host (it is the same as ‘opt.parent’)</p>
</div></td>
</tr>
-<tr class="row-odd"><td><div class="ansible-option-cell">
+<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-internal"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-network-module-parameter-internal"><strong>internal</strong></p>
<a class="ansibleOptionLink" href="#parameter-internal" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">boolean</span></p>
</div></td>
@@ -154,18 +161,31 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
</ul>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-cell">
+<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-ip_range"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-network-module-parameter-ip-range"><strong>ip_range</strong></p>
<a class="ansibleOptionLink" href="#parameter-ip_range" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Allocate container IP from range</p>
</div></td>
</tr>
+<tr class="row-even"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-ipam_driver"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-network-module-parameter-ipam-driver"><strong>ipam_driver</strong></p>
+<a class="ansibleOptionLink" href="#parameter-ipam_driver" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>Set the ipam driver (IP Address Management Driver) for the network. When unset podman chooses an ipam driver automatically based on the network driver</p>
+<p class="ansible-option-line"><strong class="ansible-option-choices">Choices:</strong></p>
+<ul class="simple">
+<li><p><code class="ansible-option-choices-entry docutils literal notranslate"><span class="pre">&quot;host-local&quot;</span></code></p></li>
+<li><p><code class="ansible-option-choices-entry docutils literal notranslate"><span class="pre">&quot;dhcp&quot;</span></code></p></li>
+<li><p><code class="ansible-option-choices-entry docutils literal notranslate"><span class="pre">&quot;none&quot;</span></code></p></li>
+</ul>
+</div></td>
+</tr>
<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-ipv6"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-network-module-parameter-ipv6"><strong>ipv6</strong></p>
<a class="ansibleOptionLink" href="#parameter-ipv6" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">boolean</span></p>
</div></td>
-<td><div class="ansible-option-cell"><p>Enable IPv6 (Dual Stack) networking. You must pass a IPv6 subnet. The subnet option must be used with the ipv6 option.</p>
+<td><div class="ansible-option-cell"><p>Enable IPv6 (Dual Stack) networking. You must pass a IPv6 subnet. The subnet option must be used with the ipv6 option. Idempotency is not supported because it generates subnets randomly.</p>
<p class="ansible-option-line"><strong class="ansible-option-choices">Choices:</strong></p>
<ul class="simple">
<li><p><code class="ansible-option-choices-entry docutils literal notranslate"><span class="pre">false</span></code></p></li>
@@ -188,6 +208,34 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
</div></td>
</tr>
<tr class="row-even"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-net_config"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-network-module-parameter-net-config"><strong>net_config</strong></p>
+<a class="ansibleOptionLink" href="#parameter-net_config" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">list</span> / <span class="ansible-option-elements">elements=dictionary</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>List of dictionaries with network configuration. Each dictionary should contain ‘subnet’ and ‘gateway’ keys. ‘ip_range’ is optional.</p>
+</div></td>
+</tr>
+<tr class="row-odd"><td><div class="ansible-option-indent"></div><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-net_config/gateway"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-network-module-parameter-net-config-gateway"><strong>gateway</strong></p>
+<a class="ansibleOptionLink" href="#parameter-net_config/gateway" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span> / <span class="ansible-option-required">required</span></p>
+</div></td>
+<td><div class="ansible-option-indent-desc"></div><div class="ansible-option-cell"><p>Gateway for the subnet</p>
+</div></td>
+</tr>
+<tr class="row-even"><td><div class="ansible-option-indent"></div><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-net_config/ip_range"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-network-module-parameter-net-config-ip-range"><strong>ip_range</strong></p>
+<a class="ansibleOptionLink" href="#parameter-net_config/ip_range" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
+</div></td>
+<td><div class="ansible-option-indent-desc"></div><div class="ansible-option-cell"><p>Allocate container IP from range</p>
+</div></td>
+</tr>
+<tr class="row-odd"><td><div class="ansible-option-indent"></div><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-net_config/subnet"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-network-module-parameter-net-config-subnet"><strong>subnet</strong></p>
+<a class="ansibleOptionLink" href="#parameter-net_config/subnet" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span> / <span class="ansible-option-required">required</span></p>
+</div></td>
+<td><div class="ansible-option-indent-desc"></div><div class="ansible-option-cell"><p>Subnet in CIDR format</p>
+</div></td>
+</tr>
+<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-opt"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-network-module-parameter-opt"><strong>opt</strong></p>
<a class="ansibleOptionLink" href="#parameter-opt" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">dictionary</span></p>
</div></td>
@@ -275,6 +323,13 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
</div></td>
</tr>
<tr class="row-odd"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-route"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-network-module-parameter-route"><strong>route</strong></p>
+<a class="ansibleOptionLink" href="#parameter-route" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">list</span> / <span class="ansible-option-elements">elements=string</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>A static route in the format &lt;destination in CIDR notation&gt;,&lt;gateway&gt;,&lt;route metric (optional)&gt;. This route will be added to every container in this network.</p>
+</div></td>
+</tr>
+<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-state"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-network-module-parameter-state"><strong>state</strong></p>
<a class="ansibleOptionLink" href="#parameter-state" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
</div></td>
@@ -287,7 +342,7 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
</ul>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-cell">
+<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-subnet"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-network-module-parameter-subnet"><strong>subnet</strong></p>
<a class="ansibleOptionLink" href="#parameter-subnet" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
</div></td>
@@ -399,6 +454,7 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
<li class="toctree-l1"><a class="reference internal" href="podman_prune_module.html">containers.podman.podman_prune module – Allows to prune various podman objects</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_runlabel_module.html">containers.podman.podman_runlabel module – Run given label from given image</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_save_module.html">containers.podman.podman_save module – Saves podman image to tar file</a></li>
+<li class="toctree-l1"><a class="reference internal" href="podman_search_module.html">containers.podman.podman_search module – Search for remote images using podman</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_module.html">containers.podman.podman_secret module – Manage podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_info_module.html">containers.podman.podman_secret_info module – Gather info about podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_tag_module.html">containers.podman.podman_tag module – Add an additional name to a local image</a></li>
diff --git a/ansible_collections/containers/podman/docs/podman_play_module.html b/ansible_collections/containers/podman/docs/podman_play_module.html
index 30a3654c8..d05c146f8 100644
--- a/ansible_collections/containers/podman/docs/podman_play_module.html
+++ b/ansible_collections/containers/podman/docs/podman_play_module.html
@@ -37,7 +37,7 @@
<h1>containers.podman.podman_play module – Play kubernetes YAML file using podman<a class="headerlink" href="#containers-podman-podman-play-module-play-kubernetes-yaml-file-using-podman" title="Permalink to this heading">¶</a></h1>
<div class="admonition note">
<p class="admonition-title">Note</p>
-<p>This module is part of the <a class="reference external" href="https://galaxy.ansible.com/ui/repo/published/containers/podman/">containers.podman collection</a> (version 1.13.0).</p>
+<p>This module is part of the <a class="reference external" href="https://galaxy.ansible.com/ui/repo/published/containers/podman/">containers.podman collection</a> (version 1.15.2).</p>
<p>It is not included in <code class="docutils literal notranslate"><span class="pre">ansible-core</span></code>.
To check whether it is installed, run <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">list</span></code>.</p>
<p>To install it, use: <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">install</span> <span class="pre">containers.podman</span></code>.
@@ -406,6 +406,7 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
<li class="toctree-l1"><a class="reference internal" href="podman_prune_module.html">containers.podman.podman_prune module – Allows to prune various podman objects</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_runlabel_module.html">containers.podman.podman_runlabel module – Run given label from given image</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_save_module.html">containers.podman.podman_save module – Saves podman image to tar file</a></li>
+<li class="toctree-l1"><a class="reference internal" href="podman_search_module.html">containers.podman.podman_search module – Search for remote images using podman</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_module.html">containers.podman.podman_secret module – Manage podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_info_module.html">containers.podman.podman_secret_info module – Gather info about podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_tag_module.html">containers.podman.podman_tag module – Add an additional name to a local image</a></li>
diff --git a/ansible_collections/containers/podman/docs/podman_pod_info_module.html b/ansible_collections/containers/podman/docs/podman_pod_info_module.html
index d93a4370c..61cf7fb14 100644
--- a/ansible_collections/containers/podman/docs/podman_pod_info_module.html
+++ b/ansible_collections/containers/podman/docs/podman_pod_info_module.html
@@ -37,7 +37,7 @@
<h1>containers.podman.podman_pod_info module – Gather info about podman pods<a class="headerlink" href="#containers-podman-podman-pod-info-module-gather-info-about-podman-pods" title="Permalink to this heading">¶</a></h1>
<div class="admonition note">
<p class="admonition-title">Note</p>
-<p>This module is part of the <a class="reference external" href="https://galaxy.ansible.com/ui/repo/published/containers/podman/">containers.podman collection</a> (version 1.13.0).</p>
+<p>This module is part of the <a class="reference external" href="https://galaxy.ansible.com/ui/repo/published/containers/podman/">containers.podman collection</a> (version 1.15.2).</p>
<p>It is not included in <code class="docutils literal notranslate"><span class="pre">ansible-core</span></code>.
To check whether it is installed, run <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">list</span></code>.</p>
<p>To install it, use: <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">install</span> <span class="pre">containers.podman</span></code>.
@@ -182,6 +182,7 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
<li class="toctree-l1"><a class="reference internal" href="podman_prune_module.html">containers.podman.podman_prune module – Allows to prune various podman objects</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_runlabel_module.html">containers.podman.podman_runlabel module – Run given label from given image</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_save_module.html">containers.podman.podman_save module – Saves podman image to tar file</a></li>
+<li class="toctree-l1"><a class="reference internal" href="podman_search_module.html">containers.podman.podman_search module – Search for remote images using podman</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_module.html">containers.podman.podman_secret module – Manage podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_info_module.html">containers.podman.podman_secret_info module – Gather info about podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_tag_module.html">containers.podman.podman_tag module – Add an additional name to a local image</a></li>
diff --git a/ansible_collections/containers/podman/docs/podman_pod_module.html b/ansible_collections/containers/podman/docs/podman_pod_module.html
index 6cd068f33..47accd99a 100644
--- a/ansible_collections/containers/podman/docs/podman_pod_module.html
+++ b/ansible_collections/containers/podman/docs/podman_pod_module.html
@@ -37,7 +37,7 @@
<h1>containers.podman.podman_pod module – Manage Podman pods<a class="headerlink" href="#containers-podman-podman-pod-module-manage-podman-pods" title="Permalink to this heading">¶</a></h1>
<div class="admonition note">
<p class="admonition-title">Note</p>
-<p>This module is part of the <a class="reference external" href="https://galaxy.ansible.com/ui/repo/published/containers/podman/">containers.podman collection</a> (version 1.13.0).</p>
+<p>This module is part of the <a class="reference external" href="https://galaxy.ansible.com/ui/repo/published/containers/podman/">containers.podman collection</a> (version 1.15.2).</p>
<p>It is not included in <code class="docutils literal notranslate"><span class="pre">ansible-core</span></code>.
To check whether it is installed, run <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">list</span></code>.</p>
<p>To install it, use: <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">install</span> <span class="pre">containers.podman</span></code>.
@@ -175,8 +175,10 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
</div></td>
</tr>
<tr class="row-odd"><td><div class="ansible-option-cell">
-<div class="ansibleOptionAnchor" id="parameter-dns_opt"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-pod-module-parameter-dns-opt"><strong>dns_opt</strong></p>
-<a class="ansibleOptionLink" href="#parameter-dns_opt" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">list</span> / <span class="ansible-option-elements">elements=string</span></p>
+<div class="ansibleOptionAnchor" id="parameter-dns_opt"></div>
+<div class="ansibleOptionAnchor" id="parameter-dns_option"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-pod-module-parameter-dns-option"><span id="ansible-collections-containers-podman-podman-pod-module-parameter-dns-opt"></span><strong>dns_opt</strong></p>
+<a class="ansibleOptionLink" href="#parameter-dns_opt" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-aliases">aliases: dns_option</span></p>
+<p class="ansible-option-type-line"><span class="ansible-option-type">list</span> / <span class="ansible-option-elements">elements=string</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Set custom DNS options in the /etc/resolv.conf file that will be shared between all containers in the pod.</p>
</div></td>
@@ -197,6 +199,18 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
</div></td>
</tr>
<tr class="row-even"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-exit_policy"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-pod-module-parameter-exit-policy"><strong>exit_policy</strong></p>
+<a class="ansibleOptionLink" href="#parameter-exit_policy" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>Set the exit policy of the pod when the last container exits. Supported policies are stop and continue</p>
+<p class="ansible-option-line"><strong class="ansible-option-choices">Choices:</strong></p>
+<ul class="simple">
+<li><p><code class="ansible-option-choices-entry docutils literal notranslate"><span class="pre">&quot;stop&quot;</span></code></p></li>
+<li><p><code class="ansible-option-choices-entry docutils literal notranslate"><span class="pre">&quot;continue&quot;</span></code></p></li>
+</ul>
+</div></td>
+</tr>
+<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-generate_systemd"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-pod-module-parameter-generate-systemd"><strong>generate_systemd</strong></p>
<a class="ansibleOptionLink" href="#parameter-generate_systemd" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">dictionary</span></p>
</div></td>
@@ -204,21 +218,21 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
<p class="ansible-option-line"><strong class="ansible-option-default-bold">Default:</strong> <code class="ansible-option-default docutils literal notranslate"><span class="pre">{}</span></code></p>
</div></td>
</tr>
-<tr class="row-odd"><td><div class="ansible-option-indent"></div><div class="ansible-option-cell">
+<tr class="row-even"><td><div class="ansible-option-indent"></div><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-generate_systemd/after"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-pod-module-parameter-generate-systemd-after"><strong>after</strong></p>
<a class="ansibleOptionLink" href="#parameter-generate_systemd/after" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">list</span> / <span class="ansible-option-elements">elements=string</span></p>
</div></td>
<td><div class="ansible-option-indent-desc"></div><div class="ansible-option-cell"><p>Add the systemd unit after (After=) option, that ordering dependencies between the list of dependencies and this service.</p>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-indent"></div><div class="ansible-option-cell">
+<tr class="row-odd"><td><div class="ansible-option-indent"></div><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-generate_systemd/container_prefix"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-pod-module-parameter-generate-systemd-container-prefix"><strong>container_prefix</strong></p>
<a class="ansibleOptionLink" href="#parameter-generate_systemd/container_prefix" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
</div></td>
<td><div class="ansible-option-indent-desc"></div><div class="ansible-option-cell"><p>Set the systemd unit name prefix for containers. The default is “container”.</p>
</div></td>
</tr>
-<tr class="row-odd"><td><div class="ansible-option-indent"></div><div class="ansible-option-cell">
+<tr class="row-even"><td><div class="ansible-option-indent"></div><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-generate_systemd/names"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-pod-module-parameter-generate-systemd-names"><strong>names</strong></p>
<a class="ansibleOptionLink" href="#parameter-generate_systemd/names" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">boolean</span></p>
</div></td>
@@ -230,7 +244,7 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
</ul>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-indent"></div><div class="ansible-option-cell">
+<tr class="row-odd"><td><div class="ansible-option-indent"></div><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-generate_systemd/new"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-pod-module-parameter-generate-systemd-new"><strong>new</strong></p>
<a class="ansibleOptionLink" href="#parameter-generate_systemd/new" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">boolean</span></p>
</div></td>
@@ -242,7 +256,7 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
</ul>
</div></td>
</tr>
-<tr class="row-odd"><td><div class="ansible-option-indent"></div><div class="ansible-option-cell">
+<tr class="row-even"><td><div class="ansible-option-indent"></div><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-generate_systemd/no_header"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-pod-module-parameter-generate-systemd-no-header"><strong>no_header</strong></p>
<a class="ansibleOptionLink" href="#parameter-generate_systemd/no_header" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">boolean</span></p>
</div></td>
@@ -254,28 +268,28 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
</ul>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-indent"></div><div class="ansible-option-cell">
+<tr class="row-odd"><td><div class="ansible-option-indent"></div><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-generate_systemd/path"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-pod-module-parameter-generate-systemd-path"><strong>path</strong></p>
<a class="ansibleOptionLink" href="#parameter-generate_systemd/path" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
</div></td>
<td><div class="ansible-option-indent-desc"></div><div class="ansible-option-cell"><p>Specify a path to the directory where unit files will be generated. Required for this option. If it doesn’t exist, the directory will be created.</p>
</div></td>
</tr>
-<tr class="row-odd"><td><div class="ansible-option-indent"></div><div class="ansible-option-cell">
+<tr class="row-even"><td><div class="ansible-option-indent"></div><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-generate_systemd/pod_prefix"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-pod-module-parameter-generate-systemd-pod-prefix"><strong>pod_prefix</strong></p>
<a class="ansibleOptionLink" href="#parameter-generate_systemd/pod_prefix" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
</div></td>
<td><div class="ansible-option-indent-desc"></div><div class="ansible-option-cell"><p>Set the systemd unit name prefix for pods. The default is “pod”.</p>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-indent"></div><div class="ansible-option-cell">
+<tr class="row-odd"><td><div class="ansible-option-indent"></div><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-generate_systemd/requires"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-pod-module-parameter-generate-systemd-requires"><strong>requires</strong></p>
<a class="ansibleOptionLink" href="#parameter-generate_systemd/requires" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">list</span> / <span class="ansible-option-elements">elements=string</span></p>
</div></td>
<td><div class="ansible-option-indent-desc"></div><div class="ansible-option-cell"><p>Set the systemd unit requires (Requires=) option. Similar to wants, but declares a stronger requirement dependency.</p>
</div></td>
</tr>
-<tr class="row-odd"><td><div class="ansible-option-indent"></div><div class="ansible-option-cell">
+<tr class="row-even"><td><div class="ansible-option-indent"></div><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-generate_systemd/restart_policy"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-pod-module-parameter-generate-systemd-restart-policy"><strong>restart_policy</strong></p>
<a class="ansibleOptionLink" href="#parameter-generate_systemd/restart_policy" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
</div></td>
@@ -292,28 +306,28 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
</ul>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-indent"></div><div class="ansible-option-cell">
+<tr class="row-odd"><td><div class="ansible-option-indent"></div><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-generate_systemd/restart_sec"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-pod-module-parameter-generate-systemd-restart-sec"><strong>restart_sec</strong></p>
<a class="ansibleOptionLink" href="#parameter-generate_systemd/restart_sec" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">integer</span></p>
</div></td>
<td><div class="ansible-option-indent-desc"></div><div class="ansible-option-cell"><p>Set the systemd service restartsec value.</p>
</div></td>
</tr>
-<tr class="row-odd"><td><div class="ansible-option-indent"></div><div class="ansible-option-cell">
+<tr class="row-even"><td><div class="ansible-option-indent"></div><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-generate_systemd/separator"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-pod-module-parameter-generate-systemd-separator"><strong>separator</strong></p>
<a class="ansibleOptionLink" href="#parameter-generate_systemd/separator" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
</div></td>
<td><div class="ansible-option-indent-desc"></div><div class="ansible-option-cell"><p>Set the systemd unit name separator between the name/id of a container/pod and the prefix. The default is “-” (dash).</p>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-indent"></div><div class="ansible-option-cell">
+<tr class="row-odd"><td><div class="ansible-option-indent"></div><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-generate_systemd/start_timeout"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-pod-module-parameter-generate-systemd-start-timeout"><strong>start_timeout</strong></p>
<a class="ansibleOptionLink" href="#parameter-generate_systemd/start_timeout" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">integer</span></p>
</div></td>
<td><div class="ansible-option-indent-desc"></div><div class="ansible-option-cell"><p>Override the default start timeout for the container with the given value.</p>
</div></td>
</tr>
-<tr class="row-odd"><td><div class="ansible-option-indent"></div><div class="ansible-option-cell">
+<tr class="row-even"><td><div class="ansible-option-indent"></div><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-generate_systemd/stop_timeout"></div>
<div class="ansibleOptionAnchor" id="parameter-generate_systemd/time"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-pod-module-parameter-generate-systemd-time"><span id="ansible-collections-containers-podman-podman-pod-module-parameter-generate-systemd-stop-timeout"></span><strong>stop_timeout</strong></p>
<a class="ansibleOptionLink" href="#parameter-generate_systemd/stop_timeout" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-aliases">aliases: time</span></p>
@@ -322,20 +336,27 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
<td><div class="ansible-option-indent-desc"></div><div class="ansible-option-cell"><p>Override the default stop timeout for the container with the given value. Called `time` before version 4.</p>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-indent"></div><div class="ansible-option-cell">
+<tr class="row-odd"><td><div class="ansible-option-indent"></div><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-generate_systemd/wants"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-pod-module-parameter-generate-systemd-wants"><strong>wants</strong></p>
<a class="ansibleOptionLink" href="#parameter-generate_systemd/wants" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">list</span> / <span class="ansible-option-elements">elements=string</span></p>
</div></td>
<td><div class="ansible-option-indent-desc"></div><div class="ansible-option-cell"><p>Add the systemd unit wants (Wants=) option, that this service is (weak) dependent on.</p>
</div></td>
</tr>
-<tr class="row-odd"><td><div class="ansible-option-cell">
+<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-gidmap"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-pod-module-parameter-gidmap"><strong>gidmap</strong></p>
<a class="ansibleOptionLink" href="#parameter-gidmap" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">list</span> / <span class="ansible-option-elements">elements=string</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>GID map for the user namespace. Using this flag will run the container with user namespace enabled. It conflicts with the `userns` and `subgidname` flags.</p>
</div></td>
</tr>
+<tr class="row-odd"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-gpus"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-pod-module-parameter-gpus"><strong>gpus</strong></p>
+<a class="ansibleOptionLink" href="#parameter-gpus" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>GPU devices to add to the container (‘all’ to pass all GPUs).</p>
+</div></td>
+</tr>
<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-hostname"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-pod-module-parameter-hostname"><strong>hostname</strong></p>
<a class="ansibleOptionLink" href="#parameter-hostname" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
@@ -391,27 +412,34 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
</div></td>
</tr>
<tr class="row-odd"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-ip6"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-pod-module-parameter-ip6"><strong>ip6</strong></p>
+<a class="ansibleOptionLink" href="#parameter-ip6" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>Set a static IPv6 for the pod’s shared network.</p>
+</div></td>
+</tr>
+<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-label"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-pod-module-parameter-label"><strong>label</strong></p>
<a class="ansibleOptionLink" href="#parameter-label" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">dictionary</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Add metadata to a pod, pass dictionary of label keys and values.</p>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-cell">
+<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-label_file"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-pod-module-parameter-label-file"><strong>label_file</strong></p>
<a class="ansibleOptionLink" href="#parameter-label_file" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Read in a line delimited file of labels.</p>
</div></td>
</tr>
-<tr class="row-odd"><td><div class="ansible-option-cell">
+<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-mac_address"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-pod-module-parameter-mac-address"><strong>mac_address</strong></p>
<a class="ansibleOptionLink" href="#parameter-mac_address" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Set a static MAC address for the pod’s shared network.</p>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-cell">
+<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-memory"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-pod-module-parameter-memory"><strong>memory</strong></p>
<a class="ansibleOptionLink" href="#parameter-memory" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
</div></td>
@@ -419,7 +447,7 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
<p>A unit can be b (bytes), k (kibibytes), m (mebibytes), or g (gibibytes).</p>
</div></td>
</tr>
-<tr class="row-odd"><td><div class="ansible-option-cell">
+<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-memory_swap"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-pod-module-parameter-memory-swap"><strong>memory_swap</strong></p>
<a class="ansibleOptionLink" href="#parameter-memory_swap" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
</div></td>
@@ -427,21 +455,21 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
<p>A unit can be b (bytes), k (kibibytes), m (mebibytes), or g (gibibytes).</p>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-cell">
+<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-name"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-pod-module-parameter-name"><strong>name</strong></p>
<a class="ansibleOptionLink" href="#parameter-name" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span> / <span class="ansible-option-required">required</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Assign a name to the pod.</p>
</div></td>
</tr>
-<tr class="row-odd"><td><div class="ansible-option-cell">
+<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-network"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-pod-module-parameter-network"><strong>network</strong></p>
<a class="ansibleOptionLink" href="#parameter-network" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">list</span> / <span class="ansible-option-elements">elements=string</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Set network mode for the pod. Supported values are bridge (the default), host (do not create a network namespace, all containers in the pod will use the host’s network), or a list of names of CNI networks to join.</p>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-cell">
+<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-network_alias"></div>
<div class="ansibleOptionAnchor" id="parameter-network_aliases"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-pod-module-parameter-network-aliases"><span id="ansible-collections-containers-podman-podman-pod-module-parameter-network-alias"></span><strong>network_alias</strong></p>
<a class="ansibleOptionLink" href="#parameter-network_alias" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-aliases">aliases: network_aliases</span></p>
@@ -450,7 +478,7 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
<td><div class="ansible-option-cell"><p>Add a network-scoped alias for the pod, setting the alias for all networks that the pod joins. To set a name only for a specific network, use the alias option as described under the -`network` option. Network aliases work only with the bridge networking mode. This option can be specified multiple times.</p>
</div></td>
</tr>
-<tr class="row-odd"><td><div class="ansible-option-cell">
+<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-no_hosts"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-pod-module-parameter-no-hosts"><strong>no_hosts</strong></p>
<a class="ansibleOptionLink" href="#parameter-no_hosts" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">boolean</span></p>
</div></td>
@@ -462,21 +490,21 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
</ul>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-cell">
+<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-pid"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-pod-module-parameter-pid"><strong>pid</strong></p>
<a class="ansibleOptionLink" href="#parameter-pid" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Set the PID mode for the pod. The default is to create a private PID namespace for the pod. Requires the PID namespace to be shared via `share` option.</p>
</div></td>
</tr>
-<tr class="row-odd"><td><div class="ansible-option-cell">
+<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-pod_id_file"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-pod-module-parameter-pod-id-file"><strong>pod_id_file</strong></p>
<a class="ansibleOptionLink" href="#parameter-pod_id_file" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Write the pod ID to the file.</p>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-cell">
+<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-publish"></div>
<div class="ansibleOptionAnchor" id="parameter-ports"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-pod-module-parameter-publish"><span id="ansible-collections-containers-podman-podman-pod-module-parameter-ports"></span><strong>publish</strong></p>
<a class="ansibleOptionLink" href="#parameter-publish" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-aliases">aliases: ports</span></p>
@@ -485,28 +513,28 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
<td><div class="ansible-option-cell"><p>Publish a port or range of ports from the pod to the host.</p>
</div></td>
</tr>
-<tr class="row-odd"><td><div class="ansible-option-cell">
+<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-quadlet_dir"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-pod-module-parameter-quadlet-dir"><strong>quadlet_dir</strong></p>
<a class="ansibleOptionLink" href="#parameter-quadlet_dir" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">path</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Path to the directory to write quadlet file in. By default, it will be set as <code class="docutils literal notranslate"><span class="pre">/etc/containers/systemd/</span></code> for root user, <code class="docutils literal notranslate"><span class="pre">~/.config/containers/systemd/</span></code> for non-root users.</p>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-cell">
+<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-quadlet_filename"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-pod-module-parameter-quadlet-filename"><strong>quadlet_filename</strong></p>
<a class="ansibleOptionLink" href="#parameter-quadlet_filename" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Name of quadlet file to write. By default it takes <em>name</em> value.</p>
</div></td>
</tr>
-<tr class="row-odd"><td><div class="ansible-option-cell">
+<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-quadlet_options"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-pod-module-parameter-quadlet-options"><strong>quadlet_options</strong></p>
<a class="ansibleOptionLink" href="#parameter-quadlet_options" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">list</span> / <span class="ansible-option-elements">elements=string</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Options for the quadlet file. Provide missing in usual container args options as a list of lines to add.</p>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-cell">
+<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-recreate"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-pod-module-parameter-recreate"><strong>recreate</strong></p>
<a class="ansibleOptionLink" href="#parameter-recreate" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">boolean</span></p>
</div></td>
@@ -518,13 +546,53 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
</ul>
</div></td>
</tr>
+<tr class="row-even"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-restart_policy"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-pod-module-parameter-restart-policy"><strong>restart_policy</strong></p>
+<a class="ansibleOptionLink" href="#parameter-restart_policy" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>Restart policy to follow when containers exit.</p>
+</div></td>
+</tr>
<tr class="row-odd"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-security_opt"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-pod-module-parameter-security-opt"><strong>security_opt</strong></p>
+<a class="ansibleOptionLink" href="#parameter-security_opt" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">list</span> / <span class="ansible-option-elements">elements=string</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>Security options for the pod.</p>
+</div></td>
+</tr>
+<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-share"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-pod-module-parameter-share"><strong>share</strong></p>
<a class="ansibleOptionLink" href="#parameter-share" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>A comma delimited list of kernel namespaces to share. If none or “” is specified, no namespaces will be shared. The namespaces to choose from are ipc, net, pid, user, uts.</p>
</div></td>
</tr>
+<tr class="row-odd"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-share_parent"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-pod-module-parameter-share-parent"><strong>share_parent</strong></p>
+<a class="ansibleOptionLink" href="#parameter-share_parent" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">boolean</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>This boolean determines whether or not all containers entering the pod use the pod as their cgroup parent. The default value of this option in Podman is true.</p>
+<p class="ansible-option-line"><strong class="ansible-option-choices">Choices:</strong></p>
+<ul class="simple">
+<li><p><code class="ansible-option-choices-entry docutils literal notranslate"><span class="pre">false</span></code></p></li>
+<li><p><code class="ansible-option-choices-entry docutils literal notranslate"><span class="pre">true</span></code></p></li>
+</ul>
+</div></td>
+</tr>
+<tr class="row-even"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-shm_size"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-pod-module-parameter-shm-size"><strong>shm_size</strong></p>
+<a class="ansibleOptionLink" href="#parameter-shm_size" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>Set the size of the /dev/shm shared memory space. A unit can be b (bytes), k (kibibytes), m (mebibytes), or g (gibibytes). If the unit is omitted, the system uses bytes. If the size is omitted, the default is 64m. When size is 0, there is no limit on the amount of memory used for IPC by the pod.</p>
+</div></td>
+</tr>
+<tr class="row-odd"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-shm_size_systemd"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-pod-module-parameter-shm-size-systemd"><strong>shm_size_systemd</strong></p>
+<a class="ansibleOptionLink" href="#parameter-shm_size_systemd" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>Size of systemd-specific tmpfs mounts such as /run, /run/lock, /var/log/journal and /tmp. A unit can be b (bytes), k (kibibytes), m (mebibytes), or g (gibibytes). If the unit is omitted, the system uses bytes. If the size is omitted, the default is 64m. When size is 0, the usage is limited to 50 percents of the host’s available memory.</p>
+</div></td>
+</tr>
<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-state"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-pod-module-parameter-state"><strong>state</strong></p>
<a class="ansibleOptionLink" href="#parameter-state" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
@@ -559,19 +627,33 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
</div></td>
</tr>
<tr class="row-odd"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-sysctl"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-pod-module-parameter-sysctl"><strong>sysctl</strong></p>
+<a class="ansibleOptionLink" href="#parameter-sysctl" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">dictionary</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>Set kernel parameters for the pod.</p>
+</div></td>
+</tr>
+<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-uidmap"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-pod-module-parameter-uidmap"><strong>uidmap</strong></p>
<a class="ansibleOptionLink" href="#parameter-uidmap" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">list</span> / <span class="ansible-option-elements">elements=string</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Run the container in a new user namespace using the supplied mapping. This option conflicts with the `userns` and `subuidname` options. This option provides a way to map host UIDs to container UIDs. It can be passed several times to map different ranges.</p>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-cell">
+<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-userns"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-pod-module-parameter-userns"><strong>userns</strong></p>
<a class="ansibleOptionLink" href="#parameter-userns" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Set the user namespace mode for all the containers in a pod. It defaults to the PODMAN_USERNS environment variable. An empty value (“”) means user namespaces are disabled.</p>
</div></td>
</tr>
+<tr class="row-even"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-uts"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-pod-module-parameter-uts"><strong>uts</strong></p>
+<a class="ansibleOptionLink" href="#parameter-uts" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>Set the UTS namespace mode for the pod.</p>
+</div></td>
+</tr>
<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-volume"></div>
<div class="ansibleOptionAnchor" id="parameter-volumes"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-pod-module-parameter-volumes"><span id="ansible-collections-containers-podman-podman-pod-module-parameter-volume"></span><strong>volume</strong></p>
@@ -581,6 +663,13 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
<td><div class="ansible-option-cell"><p>Create a bind mount.</p>
</div></td>
</tr>
+<tr class="row-even"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-volumes_from"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-pod-module-parameter-volumes-from"><strong>volumes_from</strong></p>
+<a class="ansibleOptionLink" href="#parameter-volumes_from" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">list</span> / <span class="ansible-option-elements">elements=string</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>Mount volumes from the specified container.</p>
+</div></td>
+</tr>
</tbody>
</table>
</section>
@@ -600,6 +689,62 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
<span class="w"> </span><span class="nt">state</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">started</span>
<span class="w"> </span><span class="nt">publish</span><span class="p">:</span><span class="w"> </span><span class="s">&quot;127.0.0.1::80&quot;</span>
+<span class="c1"># Full workflow example with pod and containers</span>
+<span class="p p-Indicator">-</span><span class="w"> </span><span class="nt">name</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">Create a pod with parameters</span>
+<span class="w"> </span><span class="nt">containers.podman.podman_pod</span><span class="p">:</span>
+<span class="w"> </span><span class="nt">name</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">mypod</span>
+<span class="w"> </span><span class="nt">state</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">created</span>
+<span class="w"> </span><span class="nt">network</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">host</span>
+<span class="w"> </span><span class="nt">share</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">net</span>
+<span class="w"> </span><span class="nt">userns</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">auto</span>
+<span class="w"> </span><span class="nt">security_opt</span><span class="p">:</span>
+<span class="w"> </span><span class="p p-Indicator">-</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">seccomp=unconfined</span>
+<span class="w"> </span><span class="p p-Indicator">-</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">apparmor=unconfined</span>
+<span class="w"> </span><span class="nt">hostname</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">mypod</span>
+<span class="w"> </span><span class="nt">dns</span><span class="p">:</span>
+<span class="w"> </span><span class="p p-Indicator">-</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">1.1.1.1</span>
+<span class="w"> </span><span class="nt">volumes</span><span class="p">:</span>
+<span class="w"> </span><span class="p p-Indicator">-</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">/tmp:/tmp/:ro</span>
+<span class="w"> </span><span class="nt">label</span><span class="p">:</span>
+<span class="w"> </span><span class="nt">key</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">cval</span>
+<span class="w"> </span><span class="nt">otherkey</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">kddkdk</span>
+<span class="w"> </span><span class="nt">somekey</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">someval</span>
+<span class="w"> </span><span class="nt">add_host</span><span class="p">:</span>
+<span class="w"> </span><span class="p p-Indicator">-</span><span class="w"> </span><span class="s">&quot;google:5.5.5.5&quot;</span>
+
+<span class="p p-Indicator">-</span><span class="w"> </span><span class="nt">name</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">Create containers attached to the pod</span>
+<span class="w"> </span><span class="nt">containers.podman.podman_container</span><span class="p">:</span>
+<span class="w"> </span><span class="nt">name</span><span class="p">:</span><span class="w"> </span><span class="s">&quot;</span><span class="cp">{{</span> <span class="nv">item</span> <span class="cp">}}</span><span class="s">&quot;</span>
+<span class="w"> </span><span class="nt">state</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">created</span>
+<span class="w"> </span><span class="nt">pod</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">mypod</span>
+<span class="w"> </span><span class="nt">image</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">alpine</span>
+<span class="w"> </span><span class="nt">command</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">sleep 1h</span>
+<span class="w"> </span><span class="nt">loop</span><span class="p">:</span>
+<span class="w"> </span><span class="p p-Indicator">-</span><span class="w"> </span><span class="s">&quot;container1&quot;</span>
+<span class="w"> </span><span class="p p-Indicator">-</span><span class="w"> </span><span class="s">&quot;container2&quot;</span>
+
+<span class="p p-Indicator">-</span><span class="w"> </span><span class="nt">name</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">Start pod</span>
+<span class="w"> </span><span class="nt">containers.podman.podman_pod</span><span class="p">:</span>
+<span class="w"> </span><span class="nt">name</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">mypod</span>
+<span class="w"> </span><span class="nt">state</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">started</span>
+<span class="w"> </span><span class="nt">network</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">host</span>
+<span class="w"> </span><span class="nt">share</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">net</span>
+<span class="w"> </span><span class="nt">userns</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">auto</span>
+<span class="w"> </span><span class="nt">security_opt</span><span class="p">:</span>
+<span class="w"> </span><span class="p p-Indicator">-</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">seccomp=unconfined</span>
+<span class="w"> </span><span class="p p-Indicator">-</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">apparmor=unconfined</span>
+<span class="w"> </span><span class="nt">hostname</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">mypod</span>
+<span class="w"> </span><span class="nt">dns</span><span class="p">:</span>
+<span class="w"> </span><span class="p p-Indicator">-</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">1.1.1.1</span>
+<span class="w"> </span><span class="nt">volumes</span><span class="p">:</span>
+<span class="w"> </span><span class="p p-Indicator">-</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">/tmp:/tmp/:ro</span>
+<span class="w"> </span><span class="nt">label</span><span class="p">:</span>
+<span class="w"> </span><span class="nt">key</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">cval</span>
+<span class="w"> </span><span class="nt">otherkey</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">kddkdk</span>
+<span class="w"> </span><span class="nt">somekey</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">someval</span>
+<span class="w"> </span><span class="nt">add_host</span><span class="p">:</span>
+<span class="w"> </span><span class="p p-Indicator">-</span><span class="w"> </span><span class="s">&quot;google:5.5.5.5&quot;</span>
+
<span class="c1"># Create a Quadlet file for a pod</span>
<span class="p p-Indicator">-</span><span class="w"> </span><span class="nt">containers.podman.podman_pod</span><span class="p">:</span>
<span class="w"> </span><span class="nt">name</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">qpod</span>
@@ -688,6 +833,7 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
<li class="toctree-l1"><a class="reference internal" href="podman_prune_module.html">containers.podman.podman_prune module – Allows to prune various podman objects</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_runlabel_module.html">containers.podman.podman_runlabel module – Run given label from given image</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_save_module.html">containers.podman.podman_save module – Saves podman image to tar file</a></li>
+<li class="toctree-l1"><a class="reference internal" href="podman_search_module.html">containers.podman.podman_search module – Search for remote images using podman</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_module.html">containers.podman.podman_secret module – Manage podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_info_module.html">containers.podman.podman_secret_info module – Gather info about podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_tag_module.html">containers.podman.podman_tag module – Add an additional name to a local image</a></li>
diff --git a/ansible_collections/containers/podman/docs/podman_prune_module.html b/ansible_collections/containers/podman/docs/podman_prune_module.html
index 5340e199c..bfb4bae4a 100644
--- a/ansible_collections/containers/podman/docs/podman_prune_module.html
+++ b/ansible_collections/containers/podman/docs/podman_prune_module.html
@@ -37,7 +37,7 @@
<h1>containers.podman.podman_prune module – Allows to prune various podman objects<a class="headerlink" href="#containers-podman-podman-prune-module-allows-to-prune-various-podman-objects" title="Permalink to this heading">¶</a></h1>
<div class="admonition note">
<p class="admonition-title">Note</p>
-<p>This module is part of the <a class="reference external" href="https://galaxy.ansible.com/ui/repo/published/containers/podman/">containers.podman collection</a> (version 1.13.0).</p>
+<p>This module is part of the <a class="reference external" href="https://galaxy.ansible.com/ui/repo/published/containers/podman/">containers.podman collection</a> (version 1.15.2).</p>
<p>It is not included in <code class="docutils literal notranslate"><span class="pre">ansible-core</span></code>.
To check whether it is installed, run <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">list</span></code>.</p>
<p>To install it, use: <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">install</span> <span class="pre">containers.podman</span></code>.
@@ -339,6 +339,7 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
<li class="toctree-l1 current"><a class="current reference internal" href="#">containers.podman.podman_prune module – Allows to prune various podman objects</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_runlabel_module.html">containers.podman.podman_runlabel module – Run given label from given image</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_save_module.html">containers.podman.podman_save module – Saves podman image to tar file</a></li>
+<li class="toctree-l1"><a class="reference internal" href="podman_search_module.html">containers.podman.podman_search module – Search for remote images using podman</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_module.html">containers.podman.podman_secret module – Manage podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_info_module.html">containers.podman.podman_secret_info module – Gather info about podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_tag_module.html">containers.podman.podman_tag module – Add an additional name to a local image</a></li>
diff --git a/ansible_collections/containers/podman/docs/podman_runlabel_module.html b/ansible_collections/containers/podman/docs/podman_runlabel_module.html
index 9fa123e9d..4eb9cc87b 100644
--- a/ansible_collections/containers/podman/docs/podman_runlabel_module.html
+++ b/ansible_collections/containers/podman/docs/podman_runlabel_module.html
@@ -37,7 +37,7 @@
<h1>containers.podman.podman_runlabel module – Run given label from given image<a class="headerlink" href="#containers-podman-podman-runlabel-module-run-given-label-from-given-image" title="Permalink to this heading">¶</a></h1>
<div class="admonition note">
<p class="admonition-title">Note</p>
-<p>This module is part of the <a class="reference external" href="https://galaxy.ansible.com/ui/repo/published/containers/podman/">containers.podman collection</a> (version 1.13.0).</p>
+<p>This module is part of the <a class="reference external" href="https://galaxy.ansible.com/ui/repo/published/containers/podman/">containers.podman collection</a> (version 1.15.2).</p>
<p>It is not included in <code class="docutils literal notranslate"><span class="pre">ansible-core</span></code>.
To check whether it is installed, run <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">list</span></code>.</p>
<p>To install it, use: <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">install</span> <span class="pre">containers.podman</span></code>.
@@ -163,6 +163,7 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
<li class="toctree-l1"><a class="reference internal" href="podman_prune_module.html">containers.podman.podman_prune module – Allows to prune various podman objects</a></li>
<li class="toctree-l1 current"><a class="current reference internal" href="#">containers.podman.podman_runlabel module – Run given label from given image</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_save_module.html">containers.podman.podman_save module – Saves podman image to tar file</a></li>
+<li class="toctree-l1"><a class="reference internal" href="podman_search_module.html">containers.podman.podman_search module – Search for remote images using podman</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_module.html">containers.podman.podman_secret module – Manage podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_info_module.html">containers.podman.podman_secret_info module – Gather info about podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_tag_module.html">containers.podman.podman_tag module – Add an additional name to a local image</a></li>
diff --git a/ansible_collections/containers/podman/docs/podman_save_module.html b/ansible_collections/containers/podman/docs/podman_save_module.html
index ea7fe01b5..93356b0ed 100644
--- a/ansible_collections/containers/podman/docs/podman_save_module.html
+++ b/ansible_collections/containers/podman/docs/podman_save_module.html
@@ -15,7 +15,7 @@
<script src="_static/sphinx_highlight.js"></script>
<link rel="index" title="Index" href="genindex.html" />
<link rel="search" title="Search" href="search.html" />
- <link rel="next" title="containers.podman.podman_secret module – Manage podman secrets" href="podman_secret_module.html" />
+ <link rel="next" title="containers.podman.podman_search module – Search for remote images using podman" href="podman_search_module.html" />
<link rel="prev" title="containers.podman.podman_runlabel module – Run given label from given image" href="podman_runlabel_module.html" />
<link rel="stylesheet" href="_static/custom.css" type="text/css" />
@@ -37,7 +37,7 @@
<h1>containers.podman.podman_save module – Saves podman image to tar file<a class="headerlink" href="#containers-podman-podman-save-module-saves-podman-image-to-tar-file" title="Permalink to this heading">¶</a></h1>
<div class="admonition note">
<p class="admonition-title">Note</p>
-<p>This module is part of the <a class="reference external" href="https://galaxy.ansible.com/ui/repo/published/containers/podman/">containers.podman collection</a> (version 1.13.0).</p>
+<p>This module is part of the <a class="reference external" href="https://galaxy.ansible.com/ui/repo/published/containers/podman/">containers.podman collection</a> (version 1.15.2).</p>
<p>It is not included in <code class="docutils literal notranslate"><span class="pre">ansible-core</span></code>.
To check whether it is installed, run <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">list</span></code>.</p>
<p>To install it, use: <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">install</span> <span class="pre">containers.podman</span></code>.
@@ -221,6 +221,7 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
<li class="toctree-l1"><a class="reference internal" href="podman_prune_module.html">containers.podman.podman_prune module – Allows to prune various podman objects</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_runlabel_module.html">containers.podman.podman_runlabel module – Run given label from given image</a></li>
<li class="toctree-l1 current"><a class="current reference internal" href="#">containers.podman.podman_save module – Saves podman image to tar file</a></li>
+<li class="toctree-l1"><a class="reference internal" href="podman_search_module.html">containers.podman.podman_search module – Search for remote images using podman</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_module.html">containers.podman.podman_secret module – Manage podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_info_module.html">containers.podman.podman_secret_info module – Gather info about podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_tag_module.html">containers.podman.podman_tag module – Add an additional name to a local image</a></li>
@@ -240,7 +241,7 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
<ul>
<li><a href="index.html">Documentation overview</a><ul>
<li>Previous: <a href="podman_runlabel_module.html" title="previous chapter">containers.podman.podman_runlabel module – Run given label from given image</a></li>
- <li>Next: <a href="podman_secret_module.html" title="next chapter">containers.podman.podman_secret module – Manage podman secrets</a></li>
+ <li>Next: <a href="podman_search_module.html" title="next chapter">containers.podman.podman_search module – Search for remote images using podman</a></li>
</ul></li>
</ul>
</div>
diff --git a/ansible_collections/containers/podman/docs/podman_search_module.html b/ansible_collections/containers/podman/docs/podman_search_module.html
new file mode 100644
index 000000000..216d4c343
--- /dev/null
+++ b/ansible_collections/containers/podman/docs/podman_search_module.html
@@ -0,0 +1,273 @@
+<!DOCTYPE html>
+
+<html lang="en">
+ <head>
+ <meta charset="utf-8" />
+ <meta name="viewport" content="width=device-width, initial-scale=1.0" /><meta name="generator" content="Docutils 0.19: https://docutils.sourceforge.io/" />
+<meta content="2.5.0" name="antsibull-docs" />
+
+ <title>containers.podman.podman_search module – Search for remote images using podman &#8212; Python documentation</title>
+ <link rel="stylesheet" type="text/css" href="_static/pygments.css" />
+ <link rel="stylesheet" type="text/css" href="_static/alabaster.css" />
+ <link rel="stylesheet" type="text/css" href="_static/antsibull-minimal.css" />
+ <script data-url_root="./" id="documentation_options" src="_static/documentation_options.js"></script>
+ <script src="_static/doctools.js"></script>
+ <script src="_static/sphinx_highlight.js"></script>
+ <link rel="index" title="Index" href="genindex.html" />
+ <link rel="search" title="Search" href="search.html" />
+ <link rel="next" title="containers.podman.podman_secret module – Manage podman secrets" href="podman_secret_module.html" />
+ <link rel="prev" title="containers.podman.podman_save module – Saves podman image to tar file" href="podman_save_module.html" />
+
+ <link rel="stylesheet" href="_static/custom.css" type="text/css" />
+
+
+ <meta name="viewport" content="width=device-width, initial-scale=0.9, maximum-scale=0.9" />
+
+ </head><body>
+
+
+ <div class="document">
+ <div class="documentwrapper">
+ <div class="bodywrapper">
+
+
+ <div class="body" role="main">
+
+ <span class="target" id="ansible-collections-containers-podman-podman-search-module"></span><section id="containers-podman-podman-search-module-search-for-remote-images-using-podman">
+<h1>containers.podman.podman_search module – Search for remote images using podman<a class="headerlink" href="#containers-podman-podman-search-module-search-for-remote-images-using-podman" title="Permalink to this heading">¶</a></h1>
+<div class="admonition note">
+<p class="admonition-title">Note</p>
+<p>This module is part of the <a class="reference external" href="https://galaxy.ansible.com/ui/repo/published/containers/podman/">containers.podman collection</a> (version 1.15.2).</p>
+<p>It is not included in <code class="docutils literal notranslate"><span class="pre">ansible-core</span></code>.
+To check whether it is installed, run <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">list</span></code>.</p>
+<p>To install it, use: <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">install</span> <span class="pre">containers.podman</span></code>.</p>
+<p>To use it in a playbook, specify: <code class="code docutils literal notranslate"><span class="pre">containers.podman.podman_search</span></code>.</p>
+</div>
+<nav class="contents local" id="contents">
+<ul class="simple">
+<li><p><a class="reference internal" href="#synopsis" id="id1">Synopsis</a></p></li>
+<li><p><a class="reference internal" href="#parameters" id="id2">Parameters</a></p></li>
+<li><p><a class="reference internal" href="#notes" id="id3">Notes</a></p></li>
+<li><p><a class="reference internal" href="#examples" id="id4">Examples</a></p></li>
+<li><p><a class="reference internal" href="#return-values" id="id5">Return Values</a></p></li>
+</ul>
+</nav>
+<section id="synopsis">
+<h2><a class="toc-backref" href="#id1" role="doc-backlink">Synopsis</a><a class="headerlink" href="#synopsis" title="Permalink to this heading">¶</a></h2>
+<ul class="simple">
+<li><p>Search for remote images using <code class="docutils literal notranslate"><span class="pre">podman</span></code></p></li>
+</ul>
+</section>
+<section id="parameters">
+<h2><a class="toc-backref" href="#id2" role="doc-backlink">Parameters</a><a class="headerlink" href="#parameters" title="Permalink to this heading">¶</a></h2>
+<table class="longtable ansible-option-table docutils align-default" style="width: 100%">
+<thead>
+<tr class="row-odd"><th class="head"><p>Parameter</p></th>
+<th class="head"><p>Comments</p></th>
+</tr>
+</thead>
+<tbody>
+<tr class="row-even"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-executable"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-search-module-parameter-executable"><strong>executable</strong></p>
+<a class="ansibleOptionLink" href="#parameter-executable" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>Path to <code class="docutils literal notranslate"><span class="pre">podman</span></code> executable if it is not in the <code class="docutils literal notranslate"><span class="pre">$PATH</span></code> on the machine running <code class="docutils literal notranslate"><span class="pre">podman</span></code></p>
+<p class="ansible-option-line"><strong class="ansible-option-default-bold">Default:</strong> <code class="ansible-option-default docutils literal notranslate"><span class="pre">&quot;podman&quot;</span></code></p>
+</div></td>
+</tr>
+<tr class="row-odd"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-limit"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-search-module-parameter-limit"><strong>limit</strong></p>
+<a class="ansibleOptionLink" href="#parameter-limit" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">integer</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>Limit the number of image results returned from the search (per image registry)</p>
+<p class="ansible-option-line"><strong class="ansible-option-default-bold">Default:</strong> <code class="ansible-option-default docutils literal notranslate"><span class="pre">25</span></code></p>
+</div></td>
+</tr>
+<tr class="row-even"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-list_tags"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-search-module-parameter-list-tags"><strong>list_tags</strong></p>
+<a class="ansibleOptionLink" href="#parameter-list_tags" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">boolean</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>Whether or not to return the list of tags associated with each image</p>
+<p class="ansible-option-line"><strong class="ansible-option-choices">Choices:</strong></p>
+<ul class="simple">
+<li><p><code class="ansible-option-default-bold docutils literal notranslate"><strong><span class="pre">false</span></strong></code> <span class="ansible-option-choices-default-mark">← (default)</span></p></li>
+<li><p><code class="ansible-option-choices-entry docutils literal notranslate"><span class="pre">true</span></code></p></li>
+</ul>
+</div></td>
+</tr>
+<tr class="row-odd"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-term"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-search-module-parameter-term"><strong>term</strong></p>
+<a class="ansibleOptionLink" href="#parameter-term" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span> / <span class="ansible-option-required">required</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>The search term to look for. Will search all default registries unless a registry is defined in the search term.</p>
+</div></td>
+</tr>
+</tbody>
+</table>
+</section>
+<section id="notes">
+<h2><a class="toc-backref" href="#id3" role="doc-backlink">Notes</a><a class="headerlink" href="#notes" title="Permalink to this heading">¶</a></h2>
+<div class="admonition note">
+<p class="admonition-title">Note</p>
+<ul class="simple">
+<li><p>Podman may required elevated privileges in order to run properly.</p></li>
+</ul>
+</div>
+</section>
+<section id="examples">
+<h2><a class="toc-backref" href="#id4" role="doc-backlink">Examples</a><a class="headerlink" href="#examples" title="Permalink to this heading">¶</a></h2>
+<div class="highlight-yaml+jinja notranslate"><div class="highlight"><pre><span></span><span class="p p-Indicator">-</span><span class="w"> </span><span class="nt">name</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">Search for any rhel images</span>
+<span class="w"> </span><span class="nt">containers.podman.podman_search</span><span class="p">:</span>
+<span class="w"> </span><span class="nt">term</span><span class="p">:</span><span class="w"> </span><span class="s">&quot;rhel&quot;</span>
+<span class="w"> </span><span class="nt">limit</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">3</span>
+
+<span class="p p-Indicator">-</span><span class="w"> </span><span class="nt">name</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">Gather info on a specific remote image</span>
+<span class="w"> </span><span class="nt">containers.podman.podman_search</span><span class="p">:</span>
+<span class="w"> </span><span class="nt">term</span><span class="p">:</span><span class="w"> </span><span class="s">&quot;myimageregistry.com/ansible-automation-platform/ee-minimal-rhel8&quot;</span>
+
+<span class="p p-Indicator">-</span><span class="w"> </span><span class="nt">name</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">Gather tag info on a known remote image</span>
+<span class="w"> </span><span class="nt">containers.podman.podman_search</span><span class="p">:</span>
+<span class="w"> </span><span class="nt">term</span><span class="p">:</span><span class="w"> </span><span class="s">&quot;myimageregistry.com/ansible-automation-platform/ee-minimal-rhel8&quot;</span>
+<span class="w"> </span><span class="nt">list_tags</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">True</span>
+</pre></div>
+</div>
+</section>
+<section id="return-values">
+<h2><a class="toc-backref" href="#id5" role="doc-backlink">Return Values</a><a class="headerlink" href="#return-values" title="Permalink to this heading">¶</a></h2>
+<p>Common return values are documented <span class="xref std std-ref">here</span>, the following are the fields unique to this module:</p>
+<table class="longtable ansible-option-table docutils align-default" style="width: 100%">
+<thead>
+<tr class="row-odd"><th class="head"><p>Key</p></th>
+<th class="head"><p>Description</p></th>
+</tr>
+</thead>
+<tbody>
+<tr class="row-even"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="return-images"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-search-module-return-images"><strong>images</strong></p>
+<a class="ansibleOptionLink" href="#return-images" title="Permalink to this return value"></a><p class="ansible-option-type-line"><span class="ansible-option-type">list</span> / <span class="ansible-option-elements">elements=string</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>info from all or specified images</p>
+<p class="ansible-option-line"><strong class="ansible-option-returned-bold">Returned:</strong> always</p>
+<p class="ansible-option-line ansible-option-sample"><strong class="ansible-option-sample-bold">Sample:</strong> <code class="ansible-option-sample docutils literal notranslate"><span class="pre">[{&quot;Automated&quot;:</span> <span class="pre">&quot;&quot;,</span> <span class="pre">&quot;Description&quot;:</span> <span class="pre">&quot;Red</span> <span class="pre">Hat</span> <span class="pre">Enterprise</span> <span class="pre">Linux</span> <span class="pre">Atomic</span> <span class="pre">Image</span> <span class="pre">is</span> <span class="pre">a</span> <span class="pre">minimal,</span> <span class="pre">fully</span> <span class="pre">supported</span> <span class="pre">base</span> <span class="pre">image.&quot;,</span> <span class="pre">&quot;Index&quot;:</span> <span class="pre">&quot;registry.access.redhat.com&quot;,</span> <span class="pre">&quot;Name&quot;:</span> <span class="pre">&quot;registry.access.redhat.com/rhel7-atomic&quot;,</span> <span class="pre">&quot;Official&quot;:</span> <span class="pre">&quot;&quot;,</span> <span class="pre">&quot;Stars&quot;:</span> <span class="pre">0,</span> <span class="pre">&quot;Tags&quot;:</span> <span class="pre">[&quot;1.0&quot;,</span> <span class="pre">&quot;1.1&quot;,</span> <span class="pre">&quot;1.1.1-devel&quot;]}]</span></code></p>
+</div></td>
+</tr>
+</tbody>
+</table>
+<section id="authors">
+<h3>Authors<a class="headerlink" href="#authors" title="Permalink to this heading">¶</a></h3>
+<ul class="simple">
+<li><p>Derek Waters (&#64;derekwaters)</p></li>
+</ul>
+</section>
+<section id="collection-links">
+<h3>Collection links<a class="headerlink" href="#collection-links" title="Permalink to this heading">¶</a></h3>
+<ul class="ansible-links">
+<li><span><a aria-role="button" class="ansible-link reference external" href="https://github.com/containers/ansible-podman-collections/issues?q=is%3Aissue+is%3Aopen+sort%3Aupdated-desc" rel="noopener external" target="_blank">Issue Tracker</a></span></li>
+<li><span><a aria-role="button" class="ansible-link reference external" href="https://github.com/containers/ansible-podman-collections" rel="noopener external" target="_blank">Repository (Sources)</a></span></li>
+</ul>
+</section>
+</section>
+</section>
+
+
+ </div>
+
+ </div>
+ </div>
+ <div class="sphinxsidebar" role="navigation" aria-label="main navigation">
+ <div class="sphinxsidebarwrapper">
+<h1 class="logo"><a href="index.html">Python</a></h1>
+
+
+
+
+
+
+
+
+<h3>Navigation</h3>
+<ul class="current">
+<li class="toctree-l1"><a class="reference internal" href="podman_container_module.html">containers.podman.podman_container module – Manage podman containers</a></li>
+<li class="toctree-l1"><a class="reference internal" href="podman_container_exec_module.html">containers.podman.podman_container_exec module – Executes a command in a running container.</a></li>
+<li class="toctree-l1"><a class="reference internal" href="podman_container_info_module.html">containers.podman.podman_container_info module – Gather facts about containers using podman</a></li>
+<li class="toctree-l1"><a class="reference internal" href="podman_containers_module.html">containers.podman.podman_containers module – Manage podman containers in a batch</a></li>
+<li class="toctree-l1"><a class="reference internal" href="podman_export_module.html">containers.podman.podman_export module – Export a podman container</a></li>
+<li class="toctree-l1"><a class="reference internal" href="podman_generate_systemd_module.html">containers.podman.podman_generate_systemd module – Generate systemd unit from a pod or a container</a></li>
+<li class="toctree-l1"><a class="reference internal" href="podman_image_module.html">containers.podman.podman_image module – Pull images for use by podman</a></li>
+<li class="toctree-l1"><a class="reference internal" href="podman_image_info_module.html">containers.podman.podman_image_info module – Gather info about images using podman</a></li>
+<li class="toctree-l1"><a class="reference internal" href="podman_import_module.html">containers.podman.podman_import module – Import Podman container from a tar file.</a></li>
+<li class="toctree-l1"><a class="reference internal" href="podman_load_module.html">containers.podman.podman_load module – Load image from a tar file.</a></li>
+<li class="toctree-l1"><a class="reference internal" href="podman_login_module.html">containers.podman.podman_login module – Login to a container registry using podman</a></li>
+<li class="toctree-l1"><a class="reference internal" href="podman_login_info_module.html">containers.podman.podman_login_info module – Return the logged-in user if any for a given registry</a></li>
+<li class="toctree-l1"><a class="reference internal" href="podman_logout_module.html">containers.podman.podman_logout module – Log out of a container registry using podman</a></li>
+<li class="toctree-l1"><a class="reference internal" href="podman_network_module.html">containers.podman.podman_network module – Manage podman networks</a></li>
+<li class="toctree-l1"><a class="reference internal" href="podman_network_info_module.html">containers.podman.podman_network_info module – Gather info about podman networks</a></li>
+<li class="toctree-l1"><a class="reference internal" href="podman_play_module.html">containers.podman.podman_play module – Play kubernetes YAML file using podman</a></li>
+<li class="toctree-l1"><a class="reference internal" href="podman_pod_module.html">containers.podman.podman_pod module – Manage Podman pods</a></li>
+<li class="toctree-l1"><a class="reference internal" href="podman_pod_info_module.html">containers.podman.podman_pod_info module – Gather info about podman pods</a></li>
+<li class="toctree-l1"><a class="reference internal" href="podman_prune_module.html">containers.podman.podman_prune module – Allows to prune various podman objects</a></li>
+<li class="toctree-l1"><a class="reference internal" href="podman_runlabel_module.html">containers.podman.podman_runlabel module – Run given label from given image</a></li>
+<li class="toctree-l1"><a class="reference internal" href="podman_save_module.html">containers.podman.podman_save module – Saves podman image to tar file</a></li>
+<li class="toctree-l1 current"><a class="current reference internal" href="#">containers.podman.podman_search module – Search for remote images using podman</a></li>
+<li class="toctree-l1"><a class="reference internal" href="podman_secret_module.html">containers.podman.podman_secret module – Manage podman secrets</a></li>
+<li class="toctree-l1"><a class="reference internal" href="podman_secret_info_module.html">containers.podman.podman_secret_info module – Gather info about podman secrets</a></li>
+<li class="toctree-l1"><a class="reference internal" href="podman_tag_module.html">containers.podman.podman_tag module – Add an additional name to a local image</a></li>
+<li class="toctree-l1"><a class="reference internal" href="podman_volume_module.html">containers.podman.podman_volume module – Manage Podman volumes</a></li>
+<li class="toctree-l1"><a class="reference internal" href="podman_volume_info_module.html">containers.podman.podman_volume_info module – Gather info about podman volumes</a></li>
+</ul>
+<ul>
+<li class="toctree-l1"><a class="reference internal" href="podman_unshare_become.html">containers.podman.podman_unshare become – Run tasks using podman unshare</a></li>
+</ul>
+<ul>
+<li class="toctree-l1"><a class="reference internal" href="buildah_connection.html">containers.podman.buildah connection – Interact with an existing buildah container</a></li>
+<li class="toctree-l1"><a class="reference internal" href="podman_connection.html">containers.podman.podman connection – Interact with an existing podman container</a></li>
+</ul>
+
+<div class="relations">
+<h3>Related Topics</h3>
+<ul>
+ <li><a href="index.html">Documentation overview</a><ul>
+ <li>Previous: <a href="podman_save_module.html" title="previous chapter">containers.podman.podman_save module – Saves podman image to tar file</a></li>
+ <li>Next: <a href="podman_secret_module.html" title="next chapter">containers.podman.podman_secret module – Manage podman secrets</a></li>
+ </ul></li>
+</ul>
+</div>
+<div id="searchbox" style="display: none" role="search">
+ <h3 id="searchlabel">Quick search</h3>
+ <div class="searchformwrapper">
+ <form class="search" action="search.html" method="get">
+ <input type="text" name="q" aria-labelledby="searchlabel" autocomplete="off" autocorrect="off" autocapitalize="off" spellcheck="false"/>
+ <input type="submit" value="Go" />
+ </form>
+ </div>
+</div>
+<script>document.getElementById('searchbox').style.display = "block"</script>
+
+
+
+
+
+
+
+
+ </div>
+ </div>
+ <div class="clearer"></div>
+ </div>
+ <div class="footer">
+ &copy;.
+
+ |
+ Powered by <a href="http://sphinx-doc.org/">Sphinx 7.0.1</a>
+ &amp; <a href="https://github.com/bitprophet/alabaster">Alabaster 0.7.13</a>
+
+ |
+ <a href="_sources/podman_search_module.rst.txt"
+ rel="nofollow">Page source</a>
+ </div>
+
+
+
+
+ </body>
+</html> \ No newline at end of file
diff --git a/ansible_collections/containers/podman/docs/podman_secret_info_module.html b/ansible_collections/containers/podman/docs/podman_secret_info_module.html
index 7c8f69c54..adef8bd36 100644
--- a/ansible_collections/containers/podman/docs/podman_secret_info_module.html
+++ b/ansible_collections/containers/podman/docs/podman_secret_info_module.html
@@ -37,7 +37,7 @@
<h1>containers.podman.podman_secret_info module – Gather info about podman secrets<a class="headerlink" href="#containers-podman-podman-secret-info-module-gather-info-about-podman-secrets" title="Permalink to this heading">¶</a></h1>
<div class="admonition note">
<p class="admonition-title">Note</p>
-<p>This module is part of the <a class="reference external" href="https://galaxy.ansible.com/ui/repo/published/containers/podman/">containers.podman collection</a> (version 1.13.0).</p>
+<p>This module is part of the <a class="reference external" href="https://galaxy.ansible.com/ui/repo/published/containers/podman/">containers.podman collection</a> (version 1.15.2).</p>
<p>It is not included in <code class="docutils literal notranslate"><span class="pre">ansible-core</span></code>.
To check whether it is installed, run <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">list</span></code>.</p>
<p>To install it, use: <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">install</span> <span class="pre">containers.podman</span></code>.
@@ -193,6 +193,7 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
<li class="toctree-l1"><a class="reference internal" href="podman_prune_module.html">containers.podman.podman_prune module – Allows to prune various podman objects</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_runlabel_module.html">containers.podman.podman_runlabel module – Run given label from given image</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_save_module.html">containers.podman.podman_save module – Saves podman image to tar file</a></li>
+<li class="toctree-l1"><a class="reference internal" href="podman_search_module.html">containers.podman.podman_search module – Search for remote images using podman</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_module.html">containers.podman.podman_secret module – Manage podman secrets</a></li>
<li class="toctree-l1 current"><a class="current reference internal" href="#">containers.podman.podman_secret_info module – Gather info about podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_tag_module.html">containers.podman.podman_tag module – Add an additional name to a local image</a></li>
diff --git a/ansible_collections/containers/podman/docs/podman_secret_module.html b/ansible_collections/containers/podman/docs/podman_secret_module.html
index 11b33f02c..c8c6c3e80 100644
--- a/ansible_collections/containers/podman/docs/podman_secret_module.html
+++ b/ansible_collections/containers/podman/docs/podman_secret_module.html
@@ -16,7 +16,7 @@
<link rel="index" title="Index" href="genindex.html" />
<link rel="search" title="Search" href="search.html" />
<link rel="next" title="containers.podman.podman_secret_info module – Gather info about podman secrets" href="podman_secret_info_module.html" />
- <link rel="prev" title="containers.podman.podman_save module – Saves podman image to tar file" href="podman_save_module.html" />
+ <link rel="prev" title="containers.podman.podman_search module – Search for remote images using podman" href="podman_search_module.html" />
<link rel="stylesheet" href="_static/custom.css" type="text/css" />
@@ -37,7 +37,7 @@
<h1>containers.podman.podman_secret module – Manage podman secrets<a class="headerlink" href="#containers-podman-podman-secret-module-manage-podman-secrets" title="Permalink to this heading">¶</a></h1>
<div class="admonition note">
<p class="admonition-title">Note</p>
-<p>This module is part of the <a class="reference external" href="https://galaxy.ansible.com/ui/repo/published/containers/podman/">containers.podman collection</a> (version 1.13.0).</p>
+<p>This module is part of the <a class="reference external" href="https://galaxy.ansible.com/ui/repo/published/containers/podman/">containers.podman collection</a> (version 1.15.2).</p>
<p>It is not included in <code class="docutils literal notranslate"><span class="pre">ansible-core</span></code>.
To check whether it is installed, run <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">list</span></code>.</p>
<p>To install it, use: <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">install</span> <span class="pre">containers.podman</span></code>.
@@ -80,14 +80,14 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
<div class="ansibleOptionAnchor" id="parameter-data"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-secret-module-parameter-data"><strong>data</strong></p>
<a class="ansibleOptionLink" href="#parameter-data" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
</div></td>
-<td><div class="ansible-option-cell"><p>The value of the secret. Required when <code class="docutils literal notranslate"><span class="pre">state</span></code> is <code class="docutils literal notranslate"><span class="pre">present</span></code>.</p>
+<td><div class="ansible-option-cell"><p>The value of the secret. Required when <code class="docutils literal notranslate"><span class="pre">state</span></code> is <code class="docutils literal notranslate"><span class="pre">present</span></code>. Mutually exclusive with <code class="docutils literal notranslate"><span class="pre">env</span></code> and <code class="docutils literal notranslate"><span class="pre">path</span></code>.</p>
</div></td>
</tr>
<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-debug"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-secret-module-parameter-debug"><strong>debug</strong></p>
<a class="ansibleOptionLink" href="#parameter-debug" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">boolean</span></p>
</div></td>
-<td><div class="ansible-option-cell"><p>Enable debug mode for module.</p>
+<td><div class="ansible-option-cell"><p>Enable debug mode for module. It prints secrets diff.</p>
<p class="ansible-option-line"><strong class="ansible-option-choices">Choices:</strong></p>
<ul class="simple">
<li><p><code class="ansible-option-default-bold docutils literal notranslate"><strong><span class="pre">false</span></strong></code> <span class="ansible-option-choices-default-mark">← (default)</span></p></li>
@@ -110,6 +110,13 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
</div></td>
</tr>
<tr class="row-even"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-env"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-secret-module-parameter-env"><strong>env</strong></p>
+<a class="ansibleOptionLink" href="#parameter-env" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>The name of the environment variable that contains the secret. Mutually exclusive with <code class="docutils literal notranslate"><span class="pre">data</span></code> and <code class="docutils literal notranslate"><span class="pre">path</span></code>.</p>
+</div></td>
+</tr>
+<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-executable"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-secret-module-parameter-executable"><strong>executable</strong></p>
<a class="ansibleOptionLink" href="#parameter-executable" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span></p>
</div></td>
@@ -117,7 +124,7 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
<p class="ansible-option-line"><strong class="ansible-option-default-bold">Default:</strong> <code class="ansible-option-default docutils literal notranslate"><span class="pre">&quot;podman&quot;</span></code></p>
</div></td>
</tr>
-<tr class="row-odd"><td><div class="ansible-option-cell">
+<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-force"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-secret-module-parameter-force"><strong>force</strong></p>
<a class="ansibleOptionLink" href="#parameter-force" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">boolean</span></p>
</div></td>
@@ -129,20 +136,27 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
</ul>
</div></td>
</tr>
-<tr class="row-even"><td><div class="ansible-option-cell">
+<tr class="row-odd"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-labels"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-secret-module-parameter-labels"><strong>labels</strong></p>
<a class="ansibleOptionLink" href="#parameter-labels" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">dictionary</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>Labels to set on the secret.</p>
</div></td>
</tr>
-<tr class="row-odd"><td><div class="ansible-option-cell">
+<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-name"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-secret-module-parameter-name"><strong>name</strong></p>
<a class="ansibleOptionLink" href="#parameter-name" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">string</span> / <span class="ansible-option-required">required</span></p>
</div></td>
<td><div class="ansible-option-cell"><p>The name of the secret.</p>
</div></td>
</tr>
+<tr class="row-odd"><td><div class="ansible-option-cell">
+<div class="ansibleOptionAnchor" id="parameter-path"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-secret-module-parameter-path"><strong>path</strong></p>
+<a class="ansibleOptionLink" href="#parameter-path" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">path</span></p>
+</div></td>
+<td><div class="ansible-option-cell"><p>Path to the file that contains the secret. Mutually exclusive with <code class="docutils literal notranslate"><span class="pre">data</span></code> and <code class="docutils literal notranslate"><span class="pre">env</span></code>.</p>
+</div></td>
+</tr>
<tr class="row-even"><td><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-skip_existing"></div><p class="ansible-option-title" id="ansible-collections-containers-podman-podman-secret-module-parameter-skip-existing"><strong>skip_existing</strong></p>
<a class="ansibleOptionLink" href="#parameter-skip_existing" title="Permalink to this option"></a><p class="ansible-option-type-line"><span class="ansible-option-type">boolean</span></p>
@@ -253,6 +267,7 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
<li class="toctree-l1"><a class="reference internal" href="podman_prune_module.html">containers.podman.podman_prune module – Allows to prune various podman objects</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_runlabel_module.html">containers.podman.podman_runlabel module – Run given label from given image</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_save_module.html">containers.podman.podman_save module – Saves podman image to tar file</a></li>
+<li class="toctree-l1"><a class="reference internal" href="podman_search_module.html">containers.podman.podman_search module – Search for remote images using podman</a></li>
<li class="toctree-l1 current"><a class="current reference internal" href="#">containers.podman.podman_secret module – Manage podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_info_module.html">containers.podman.podman_secret_info module – Gather info about podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_tag_module.html">containers.podman.podman_tag module – Add an additional name to a local image</a></li>
@@ -271,7 +286,7 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
<h3>Related Topics</h3>
<ul>
<li><a href="index.html">Documentation overview</a><ul>
- <li>Previous: <a href="podman_save_module.html" title="previous chapter">containers.podman.podman_save module – Saves podman image to tar file</a></li>
+ <li>Previous: <a href="podman_search_module.html" title="previous chapter">containers.podman.podman_search module – Search for remote images using podman</a></li>
<li>Next: <a href="podman_secret_info_module.html" title="next chapter">containers.podman.podman_secret_info module – Gather info about podman secrets</a></li>
</ul></li>
</ul>
diff --git a/ansible_collections/containers/podman/docs/podman_tag_module.html b/ansible_collections/containers/podman/docs/podman_tag_module.html
index 17687a89a..c832b3647 100644
--- a/ansible_collections/containers/podman/docs/podman_tag_module.html
+++ b/ansible_collections/containers/podman/docs/podman_tag_module.html
@@ -37,7 +37,7 @@
<h1>containers.podman.podman_tag module – Add an additional name to a local image<a class="headerlink" href="#containers-podman-podman-tag-module-add-an-additional-name-to-a-local-image" title="Permalink to this heading">¶</a></h1>
<div class="admonition note">
<p class="admonition-title">Note</p>
-<p>This module is part of the <a class="reference external" href="https://galaxy.ansible.com/ui/repo/published/containers/podman/">containers.podman collection</a> (version 1.13.0).</p>
+<p>This module is part of the <a class="reference external" href="https://galaxy.ansible.com/ui/repo/published/containers/podman/">containers.podman collection</a> (version 1.15.2).</p>
<p>It is not included in <code class="docutils literal notranslate"><span class="pre">ansible-core</span></code>.
To check whether it is installed, run <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">list</span></code>.</p>
<p>To install it, use: <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">install</span> <span class="pre">containers.podman</span></code>.
@@ -165,6 +165,7 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
<li class="toctree-l1"><a class="reference internal" href="podman_prune_module.html">containers.podman.podman_prune module – Allows to prune various podman objects</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_runlabel_module.html">containers.podman.podman_runlabel module – Run given label from given image</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_save_module.html">containers.podman.podman_save module – Saves podman image to tar file</a></li>
+<li class="toctree-l1"><a class="reference internal" href="podman_search_module.html">containers.podman.podman_search module – Search for remote images using podman</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_module.html">containers.podman.podman_secret module – Manage podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_info_module.html">containers.podman.podman_secret_info module – Gather info about podman secrets</a></li>
<li class="toctree-l1 current"><a class="current reference internal" href="#">containers.podman.podman_tag module – Add an additional name to a local image</a></li>
diff --git a/ansible_collections/containers/podman/docs/podman_unshare_become.html b/ansible_collections/containers/podman/docs/podman_unshare_become.html
index ae5b9003b..f57812095 100644
--- a/ansible_collections/containers/podman/docs/podman_unshare_become.html
+++ b/ansible_collections/containers/podman/docs/podman_unshare_become.html
@@ -37,7 +37,7 @@
<h1>containers.podman.podman_unshare become – Run tasks using podman unshare<a class="headerlink" href="#containers-podman-podman-unshare-become-run-tasks-using-podman-unshare" title="Permalink to this heading">¶</a></h1>
<div class="admonition note">
<p class="admonition-title">Note</p>
-<p>This become plugin is part of the <a class="reference external" href="https://galaxy.ansible.com/ui/repo/published/containers/podman/">containers.podman collection</a> (version 1.13.0).</p>
+<p>This become plugin is part of the <a class="reference external" href="https://galaxy.ansible.com/ui/repo/published/containers/podman/">containers.podman collection</a> (version 1.15.2).</p>
<p>It is not included in <code class="docutils literal notranslate"><span class="pre">ansible-core</span></code>.
To check whether it is installed, run <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">list</span></code>.</p>
<p>To install it, use: <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">install</span> <span class="pre">containers.podman</span></code>.</p>
@@ -254,6 +254,7 @@ To check whether it is installed, run <code class="code docutils literal notrans
<li class="toctree-l1"><a class="reference internal" href="podman_prune_module.html">containers.podman.podman_prune module – Allows to prune various podman objects</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_runlabel_module.html">containers.podman.podman_runlabel module – Run given label from given image</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_save_module.html">containers.podman.podman_save module – Saves podman image to tar file</a></li>
+<li class="toctree-l1"><a class="reference internal" href="podman_search_module.html">containers.podman.podman_search module – Search for remote images using podman</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_module.html">containers.podman.podman_secret module – Manage podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_info_module.html">containers.podman.podman_secret_info module – Gather info about podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_tag_module.html">containers.podman.podman_tag module – Add an additional name to a local image</a></li>
diff --git a/ansible_collections/containers/podman/docs/podman_volume_info_module.html b/ansible_collections/containers/podman/docs/podman_volume_info_module.html
index 35af76fc6..8118bbd4f 100644
--- a/ansible_collections/containers/podman/docs/podman_volume_info_module.html
+++ b/ansible_collections/containers/podman/docs/podman_volume_info_module.html
@@ -37,7 +37,7 @@
<h1>containers.podman.podman_volume_info module – Gather info about podman volumes<a class="headerlink" href="#containers-podman-podman-volume-info-module-gather-info-about-podman-volumes" title="Permalink to this heading">¶</a></h1>
<div class="admonition note">
<p class="admonition-title">Note</p>
-<p>This module is part of the <a class="reference external" href="https://galaxy.ansible.com/ui/repo/published/containers/podman/">containers.podman collection</a> (version 1.13.0).</p>
+<p>This module is part of the <a class="reference external" href="https://galaxy.ansible.com/ui/repo/published/containers/podman/">containers.podman collection</a> (version 1.15.2).</p>
<p>It is not included in <code class="docutils literal notranslate"><span class="pre">ansible-core</span></code>.
To check whether it is installed, run <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">list</span></code>.</p>
<p>To install it, use: <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">install</span> <span class="pre">containers.podman</span></code>.
@@ -181,6 +181,7 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
<li class="toctree-l1"><a class="reference internal" href="podman_prune_module.html">containers.podman.podman_prune module – Allows to prune various podman objects</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_runlabel_module.html">containers.podman.podman_runlabel module – Run given label from given image</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_save_module.html">containers.podman.podman_save module – Saves podman image to tar file</a></li>
+<li class="toctree-l1"><a class="reference internal" href="podman_search_module.html">containers.podman.podman_search module – Search for remote images using podman</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_module.html">containers.podman.podman_secret module – Manage podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_info_module.html">containers.podman.podman_secret_info module – Gather info about podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_tag_module.html">containers.podman.podman_tag module – Add an additional name to a local image</a></li>
diff --git a/ansible_collections/containers/podman/docs/podman_volume_module.html b/ansible_collections/containers/podman/docs/podman_volume_module.html
index 11f57bf97..34b562578 100644
--- a/ansible_collections/containers/podman/docs/podman_volume_module.html
+++ b/ansible_collections/containers/podman/docs/podman_volume_module.html
@@ -37,7 +37,7 @@
<h1>containers.podman.podman_volume module – Manage Podman volumes<a class="headerlink" href="#containers-podman-podman-volume-module-manage-podman-volumes" title="Permalink to this heading">¶</a></h1>
<div class="admonition note">
<p class="admonition-title">Note</p>
-<p>This module is part of the <a class="reference external" href="https://galaxy.ansible.com/ui/repo/published/containers/podman/">containers.podman collection</a> (version 1.13.0).</p>
+<p>This module is part of the <a class="reference external" href="https://galaxy.ansible.com/ui/repo/published/containers/podman/">containers.podman collection</a> (version 1.15.2).</p>
<p>It is not included in <code class="docutils literal notranslate"><span class="pre">ansible-core</span></code>.
To check whether it is installed, run <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">list</span></code>.</p>
<p>To install it, use: <code class="code docutils literal notranslate"><span class="pre">ansible-galaxy</span> <span class="pre">collection</span> <span class="pre">install</span> <span class="pre">containers.podman</span></code>.
@@ -167,6 +167,8 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
<ul class="simple">
<li><p><code class="ansible-option-default-bold docutils literal notranslate"><strong><span class="pre">&quot;present&quot;</span></strong></code> <span class="ansible-option-choices-default-mark">← (default)</span></p></li>
<li><p><code class="ansible-option-choices-entry docutils literal notranslate"><span class="pre">&quot;absent&quot;</span></code></p></li>
+<li><p><code class="ansible-option-choices-entry docutils literal notranslate"><span class="pre">&quot;mounted&quot;</span></code></p></li>
+<li><p><code class="ansible-option-choices-entry docutils literal notranslate"><span class="pre">&quot;unmounted&quot;</span></code></p></li>
<li><p><code class="ansible-option-choices-entry docutils literal notranslate"><span class="pre">&quot;quadlet&quot;</span></code></p></li>
</ul>
</div></td>
@@ -276,6 +278,7 @@ see <a class="reference internal" href="#ansible-collections-containers-podman-p
<li class="toctree-l1"><a class="reference internal" href="podman_prune_module.html">containers.podman.podman_prune module – Allows to prune various podman objects</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_runlabel_module.html">containers.podman.podman_runlabel module – Run given label from given image</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_save_module.html">containers.podman.podman_save module – Saves podman image to tar file</a></li>
+<li class="toctree-l1"><a class="reference internal" href="podman_search_module.html">containers.podman.podman_search module – Search for remote images using podman</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_module.html">containers.podman.podman_secret module – Manage podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_info_module.html">containers.podman.podman_secret_info module – Gather info about podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_tag_module.html">containers.podman.podman_tag module – Add an additional name to a local image</a></li>
diff --git a/ansible_collections/containers/podman/docs/search.html b/ansible_collections/containers/podman/docs/search.html
index aeb228ad7..de1515205 100644
--- a/ansible_collections/containers/podman/docs/search.html
+++ b/ansible_collections/containers/podman/docs/search.html
@@ -104,6 +104,7 @@
<li class="toctree-l1"><a class="reference internal" href="podman_prune_module.html">containers.podman.podman_prune module – Allows to prune various podman objects</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_runlabel_module.html">containers.podman.podman_runlabel module – Run given label from given image</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_save_module.html">containers.podman.podman_save module – Saves podman image to tar file</a></li>
+<li class="toctree-l1"><a class="reference internal" href="podman_search_module.html">containers.podman.podman_search module – Search for remote images using podman</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_module.html">containers.podman.podman_secret module – Manage podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_secret_info_module.html">containers.podman.podman_secret_info module – Gather info about podman secrets</a></li>
<li class="toctree-l1"><a class="reference internal" href="podman_tag_module.html">containers.podman.podman_tag module – Add an additional name to a local image</a></li>
diff --git a/ansible_collections/containers/podman/plugins/module_utils/podman/common.py b/ansible_collections/containers/podman/plugins/module_utils/podman/common.py
index cbb6b080e..aac7b60eb 100644
--- a/ansible_collections/containers/podman/plugins/module_utils/podman/common.py
+++ b/ansible_collections/containers/podman/plugins/module_utils/podman/common.py
@@ -337,3 +337,88 @@ def get_podman_version(module, fail=True):
(executable, err))
return None
return out.split("version")[1].strip()
+
+
+def createcommand(argument, info_config, boolean_type=False):
+ """Returns list of values for given argument from CreateCommand
+ from Podman container inspect output.
+
+ Args:
+ argument (str): argument name
+ info_config (dict): dictionary with container info
+ boolean_type (bool): if True, then argument is boolean type
+
+ Returns:
+
+ all_values: list of values for given argument from createcommand
+ """
+ if "createcommand" not in info_config:
+ return []
+ cr_com = info_config["createcommand"]
+ argument_values = ARGUMENTS_OPTS_DICT.get(argument, [argument])
+ all_values = []
+ for arg in argument_values:
+ for ind, cr_opt in enumerate(cr_com):
+ if arg == cr_opt:
+ if boolean_type:
+ # This is a boolean argument and doesn't have value
+ return [True]
+ if not cr_com[ind + 1].startswith("-"):
+ # This is a key=value argument
+ all_values.append(cr_com[ind + 1])
+ else:
+ # This is also a false/true switching argument
+ return [True]
+ if cr_opt.startswith("%s=" % arg):
+ all_values.append(cr_opt.split("=", 1)[1])
+ return all_values
+
+
+def diff_generic(params, info_config, module_arg, cmd_arg, boolean_type=False):
+ """
+ Generic diff function for module arguments from CreateCommand
+ in Podman inspection output.
+
+ Args:
+ params (dict): module parameters
+ info_config (dict): dictionary with container info
+ module_arg (str): module argument name
+ cmd_arg (str): command line argument name
+ boolean_type (bool): if True, then argument is boolean type
+
+ Returns:
+ bool: True if there is a difference, False otherwise
+
+ """
+ before = createcommand(cmd_arg, info_config, boolean_type=boolean_type)
+ if before == []:
+ before = None
+ after = params[module_arg]
+ if boolean_type and (before, after) in [(None, False), (False, None)]:
+ before, after = False, False
+ return before, after
+ if before is None and after is None:
+ return before, after
+ if after is not None:
+ if isinstance(after, list):
+ after = ",".join(sorted([str(i).lower() for i in after]))
+ if before:
+ before = ",".join(sorted([str(i).lower() for i in before]))
+ elif isinstance(after, dict):
+ after = ",".join(sorted(
+ [str(k).lower() + "=" + str(v).lower() for k, v in after.items() if v is not None]))
+ if before:
+ before = ",".join(sorted([j.lower() for j in before]))
+ elif isinstance(after, bool):
+ after = str(after).capitalize()
+ if before is not None:
+ before = str(before[0]).capitalize()
+ elif isinstance(after, int):
+ after = str(after)
+ if before is not None:
+ before = str(before[0])
+ else:
+ before = before[0] if before else None
+ else:
+ before = ",".join(sorted(before)) if len(before) > 1 else before[0]
+ return before, after
diff --git a/ansible_collections/containers/podman/plugins/module_utils/podman/podman_container_lib.py b/ansible_collections/containers/podman/plugins/module_utils/podman/podman_container_lib.py
index bf42ffdee..76458f144 100644
--- a/ansible_collections/containers/podman/plugins/module_utils/podman/podman_container_lib.py
+++ b/ansible_collections/containers/podman/plugins/module_utils/podman/podman_container_lib.py
@@ -8,8 +8,8 @@ from ansible_collections.containers.podman.plugins.module_utils.podman.common im
from ansible_collections.containers.podman.plugins.module_utils.podman.common import lower_keys
from ansible_collections.containers.podman.plugins.module_utils.podman.common import generate_systemd
from ansible_collections.containers.podman.plugins.module_utils.podman.common import delete_systemd
-from ansible_collections.containers.podman.plugins.module_utils.podman.common import normalize_signal
-from ansible_collections.containers.podman.plugins.module_utils.podman.common import ARGUMENTS_OPTS_DICT
+from ansible_collections.containers.podman.plugins.module_utils.podman.common import diff_generic
+from ansible_collections.containers.podman.plugins.module_utils.podman.common import createcommand
from ansible_collections.containers.podman.plugins.module_utils.podman.quadlet import create_quadlet_state
from ansible_collections.containers.podman.plugins.module_utils.podman.quadlet import ContainerQuadlet
@@ -23,15 +23,18 @@ ARGUMENTS_SPEC_CONTAINER = dict(
'absent', 'present', 'stopped', 'started', 'created', 'quadlet']),
image=dict(type='str'),
annotation=dict(type='dict'),
+ arch=dict(type='str'),
attach=dict(type='list', elements='str', choices=['stdout', 'stderr', 'stdin']),
authfile=dict(type='path'),
blkio_weight=dict(type='int'),
blkio_weight_device=dict(type='dict'),
cap_add=dict(type='list', elements='str', aliases=['capabilities']),
cap_drop=dict(type='list', elements='str'),
+ cgroup_conf=dict(type='dict'),
cgroup_parent=dict(type='path'),
cgroupns=dict(type='str'),
cgroups=dict(type='str'),
+ chrootdirs=dict(type='str'),
cidfile=dict(type='path'),
cmd_args=dict(type='list', elements='str'),
conmon_pidfile=dict(type='path'),
@@ -44,6 +47,7 @@ ARGUMENTS_SPEC_CONTAINER = dict(
cpus=dict(type='str'),
cpuset_cpus=dict(type='str'),
cpuset_mems=dict(type='str'),
+ decryption_key=dict(type='str', no_log=False),
delete_depend=dict(type='bool'),
delete_time=dict(type='str'),
delete_volumes=dict(type='bool'),
@@ -51,6 +55,7 @@ ARGUMENTS_SPEC_CONTAINER = dict(
debug=dict(type='bool', default=False),
detach_keys=dict(type='str', no_log=False),
device=dict(type='list', elements='str'),
+ device_cgroup_rule=dict(type='str'),
device_read_bps=dict(type='list', elements='str'),
device_read_iops=dict(type='list', elements='str'),
device_write_bps=dict(type='list', elements='str'),
@@ -62,6 +67,7 @@ ARGUMENTS_SPEC_CONTAINER = dict(
env=dict(type='dict'),
env_file=dict(type='list', elements='path', aliases=['env_files']),
env_host=dict(type='bool'),
+ env_merge=dict(type='dict'),
etc_hosts=dict(type='dict', aliases=['add_hosts']),
expose=dict(type='list', elements='str', aliases=[
'exposed', 'exposed_ports']),
@@ -70,23 +76,33 @@ ARGUMENTS_SPEC_CONTAINER = dict(
force_delete=dict(type='bool', default=True),
generate_systemd=dict(type='dict', default={}),
gidmap=dict(type='list', elements='str'),
+ gpus=dict(type='str'),
group_add=dict(type='list', elements='str', aliases=['groups']),
- healthcheck=dict(type='str'),
- healthcheck_interval=dict(type='str'),
- healthcheck_retries=dict(type='int'),
- healthcheck_start_period=dict(type='str'),
- healthcheck_timeout=dict(type='str'),
+ group_entry=dict(type='str'),
+ healthcheck=dict(type='str', aliases=['health_cmd']),
+ healthcheck_interval=dict(type='str', aliases=['health_interval']),
+ healthcheck_retries=dict(type='int', aliases=['health_retries']),
+ healthcheck_start_period=dict(type='str', aliases=['health_start_period']),
+ health_startup_cmd=dict(type='str'),
+ health_startup_interval=dict(type='str'),
+ health_startup_retries=dict(type='int'),
+ health_startup_success=dict(type='int'),
+ health_startup_timeout=dict(type='str'),
+ healthcheck_timeout=dict(type='str', aliases=['health_timeout']),
healthcheck_failure_action=dict(type='str', choices=[
- 'none', 'kill', 'restart', 'stop']),
+ 'none', 'kill', 'restart', 'stop'], aliases=['health_on_failure']),
hooks_dir=dict(type='list', elements='str'),
hostname=dict(type='str'),
+ hostuser=dict(type='str'),
http_proxy=dict(type='bool'),
image_volume=dict(type='str', choices=['bind', 'tmpfs', 'ignore']),
image_strict=dict(type='bool', default=False),
init=dict(type='bool'),
+ init_ctr=dict(type='str', choices=['once', 'always']),
init_path=dict(type='str'),
interactive=dict(type='bool'),
ip=dict(type='str'),
+ ip6=dict(type='str'),
ipc=dict(type='str', aliases=['ipc_mode']),
kernel_memory=dict(type='str'),
label=dict(type='dict', aliases=['labels']),
@@ -108,32 +124,49 @@ ARGUMENTS_SPEC_CONTAINER = dict(
memory_swappiness=dict(type='int'),
mount=dict(type='list', elements='str', aliases=['mounts']),
network=dict(type='list', elements='str', aliases=['net', 'network_mode']),
- network_aliases=dict(type='list', elements='str'),
+ network_aliases=dict(type='list', elements='str', aliases=['network_alias']),
+ no_healthcheck=dict(type='bool'),
no_hosts=dict(type='bool'),
oom_kill_disable=dict(type='bool'),
oom_score_adj=dict(type='int'),
+ os=dict(type='str'),
+ passwd=dict(type='bool', no_log=False),
+ passwd_entry=dict(type='str', no_log=False),
+ personality=dict(type='str'),
pid=dict(type='str', aliases=['pid_mode']),
+ pid_file=dict(type='path'),
pids_limit=dict(type='str'),
+ platform=dict(type='str'),
pod=dict(type='str'),
+ pod_id_file=dict(type='path'),
+ preserve_fd=dict(type='list', elements='str'),
+ preserve_fds=dict(type='str'),
privileged=dict(type='bool'),
publish=dict(type='list', elements='str', aliases=[
'ports', 'published', 'published_ports']),
publish_all=dict(type='bool'),
+ pull=dict(type='str', choices=['always', 'missing', 'never', 'newer']),
quadlet_dir=dict(type='path'),
quadlet_filename=dict(type='str'),
quadlet_options=dict(type='list', elements='str'),
+ rdt_class=dict(type='str'),
read_only=dict(type='bool'),
read_only_tmpfs=dict(type='bool'),
recreate=dict(type='bool', default=False),
requires=dict(type='list', elements='str'),
restart_policy=dict(type='str'),
restart_time=dict(type='str'),
+ retry=dict(type='int'),
+ retry_delay=dict(type='str'),
rm=dict(type='bool', aliases=['remove', 'auto_remove']),
+ rmi=dict(type='bool'),
rootfs=dict(type='bool'),
+ seccomp_policy=dict(type='str'),
secrets=dict(type='list', elements='str', no_log=True),
sdnotify=dict(type='str'),
security_opt=dict(type='list', elements='str'),
shm_size=dict(type='str'),
+ shm_size_systemd=dict(type='str'),
sig_proxy=dict(type='bool'),
stop_signal=dict(type='int'),
stop_timeout=dict(type='int'),
@@ -142,14 +175,20 @@ ARGUMENTS_SPEC_CONTAINER = dict(
subuidname=dict(type='str'),
sysctl=dict(type='dict'),
systemd=dict(type='str'),
+ timeout=dict(type='int'),
timezone=dict(type='str'),
+ tls_verify=dict(type='bool'),
tmpfs=dict(type='dict'),
tty=dict(type='bool'),
uidmap=dict(type='list', elements='str'),
ulimit=dict(type='list', elements='str', aliases=['ulimits']),
+ umask=dict(type='str'),
+ unsetenv=dict(type='list', elements='str'),
+ unsetenv_all=dict(type='bool'),
user=dict(type='str'),
userns=dict(type='str', aliases=['userns_mode']),
uts=dict(type='str'),
+ variant=dict(type='str'),
volume=dict(type='list', elements='str', aliases=['volumes']),
volumes_from=dict(type='list', elements='str'),
workdir=dict(type='str', aliases=['working_dir'])
@@ -292,6 +331,9 @@ class PodmanModuleParams:
c += ['--annotation', '='.join(annotate)]
return c
+ def addparam_arch(self, c):
+ return c + ['--arch=%s' % self.params['arch']]
+
def addparam_attach(self, c):
for attach in self.params['attach']:
c += ['--attach=%s' % attach]
@@ -329,6 +371,14 @@ class PodmanModuleParams:
def addparam_cgroup_parent(self, c):
return c + ['--cgroup-parent', self.params['cgroup_parent']]
+ def addparam_cgroup_conf(self, c):
+ for cgroup in self.params['cgroup_conf'].items():
+ c += ['--cgroup-conf=%s' % '='.join([str(i) for i in cgroup])]
+ return c
+
+ def addparam_chrootdirs(self, c):
+ return c + ['--chrootdirs', self.params['chrootdirs']]
+
def addparam_cidfile(self, c):
return c + ['--cidfile', self.params['cidfile']]
@@ -359,6 +409,9 @@ class PodmanModuleParams:
def addparam_cpuset_mems(self, c):
return c + ['--cpuset-mems', self.params['cpuset_mems']]
+ def addparam_decryption_key(self, c):
+ return c + ['--decryption-key=%s' % self.params['decryption_key']]
+
def addparam_detach(self, c):
# Remove detach from create command and don't set if attach is true
if self.action == 'create' or self.params['attach']:
@@ -373,6 +426,9 @@ class PodmanModuleParams:
c += ['--device', dev]
return c
+ def addparam_device_cgroup_rule(self, c):
+ return c + ['--device-cgroup-rule=%s' % self.params['device_cgroup_rule']]
+
def addparam_device_read_bps(self, c):
for dev in self.params['device_read_bps']:
c += ['--device-read-bps', dev]
@@ -426,6 +482,13 @@ class PodmanModuleParams:
c += ['--add-host', ':'.join(host_ip)]
return c
+ def addparam_env_merge(self, c):
+ for env_merge in self.params['env_merge'].items():
+ c += ['--env-merge',
+ b"=".join([to_bytes(k, errors='surrogate_or_strict')
+ for k in env_merge])]
+ return c
+
def addparam_expose(self, c):
for exp in self.params['expose']:
c += ['--expose', exp]
@@ -436,11 +499,17 @@ class PodmanModuleParams:
c += ['--gidmap', gidmap]
return c
+ def addparam_gpus(self, c):
+ return c + ['--gpus', self.params['gpus']]
+
def addparam_group_add(self, c):
for g in self.params['group_add']:
c += ['--group-add', g]
return c
+ def addparam_group_entry(self, c):
+ return c + ['--group-entry', self.params['group_entry']]
+
def addparam_healthcheck(self, c):
return c + ['--healthcheck-command', self.params['healthcheck']]
@@ -456,10 +525,25 @@ class PodmanModuleParams:
return c + ['--healthcheck-start-period',
self.params['healthcheck_start_period']]
+ def addparam_health_startup_cmd(self, c):
+ return c + ['--health-startup-command', self.params['health_startup_cmd']]
+
+ def addparam_health_startup_interval(self, c):
+ return c + ['--health-startup-interval', self.params['health_startup_interval']]
+
def addparam_healthcheck_timeout(self, c):
return c + ['--healthcheck-timeout',
self.params['healthcheck_timeout']]
+ def addparam_health_startup_retries(self, c):
+ return c + ['--health-startup-retries', self.params['health_startup_retries']]
+
+ def addparam_health_startup_success(self, c):
+ return c + ['--health-startup-success', self.params['health_startup_success']]
+
+ def addparam_health_startup_timeout(self, c):
+ return c + ['--health-startup-timeout', self.params['health_startup_timeout']]
+
def addparam_healthcheck_failure_action(self, c):
return c + ['--health-on-failure',
self.params['healthcheck_failure_action']]
@@ -472,6 +556,9 @@ class PodmanModuleParams:
def addparam_hostname(self, c):
return c + ['--hostname', self.params['hostname']]
+ def addparam_hostuser(self, c):
+ return c + ['--hostuser', self.params['hostuser']]
+
def addparam_http_proxy(self, c):
return c + ['--http-proxy=%s' % self.params['http_proxy']]
@@ -486,12 +573,18 @@ class PodmanModuleParams:
def addparam_init_path(self, c):
return c + ['--init-path', self.params['init_path']]
+ def addparam_init_ctr(self, c):
+ return c + ['--init-ctr', self.params['init_ctr']]
+
def addparam_interactive(self, c):
return c + ['--interactive=%s' % self.params['interactive']]
def addparam_ip(self, c):
return c + ['--ip', self.params['ip']]
+ def addparam_ip6(self, c):
+ return c + ['--ip6', self.params['ip6']]
+
def addparam_ipc(self, c):
return c + ['--ipc', self.params['ipc']]
@@ -559,21 +652,57 @@ class PodmanModuleParams:
def addparam_no_hosts(self, c):
return c + ['--no-hosts=%s' % self.params['no_hosts']]
+ def addparam_no_healthcheck(self, c):
+ if self.params['no_healthcheck']:
+ c += ['--no-healthcheck']
+ return c
+
def addparam_oom_kill_disable(self, c):
return c + ['--oom-kill-disable=%s' % self.params['oom_kill_disable']]
def addparam_oom_score_adj(self, c):
return c + ['--oom-score-adj', self.params['oom_score_adj']]
+ def addparam_os(self, c):
+ return c + ['--os', self.params['os']]
+
+ def addparam_passwd(self, c):
+ if self.params['passwd']:
+ c += ['--passwd']
+ return c
+
+ def addparam_passwd_entry(self, c):
+ return c + ['--passwd-entry', self.params['passwd_entry']]
+
+ def addparam_personality(self, c):
+ return c + ['--personality', self.params['personality']]
+
def addparam_pid(self, c):
return c + ['--pid', self.params['pid']]
+ def addparam_pid_file(self, c):
+ return c + ['--pid-file', self.params['pid_file']]
+
def addparam_pids_limit(self, c):
return c + ['--pids-limit', self.params['pids_limit']]
+ def addparam_platform(self, c):
+ return c + ['--platform', self.params['platform']]
+
def addparam_pod(self, c):
return c + ['--pod', self.params['pod']]
+ def addparam_pod_id_file(self, c):
+ return c + ['--pod-id-file', self.params['pod_id_file']]
+
+ def addparam_preserve_fd(self, c):
+ for fd in self.params['preserve_fd']:
+ c += ['--preserve-fd', fd]
+ return c
+
+ def addparam_preserve_fds(self, c):
+ return c + ['--preserve-fds', self.params['preserve_fds']]
+
def addparam_privileged(self, c):
return c + ['--privileged=%s' % self.params['privileged']]
@@ -585,6 +714,12 @@ class PodmanModuleParams:
def addparam_publish_all(self, c):
return c + ['--publish-all=%s' % self.params['publish_all']]
+ def addparam_pull(self, c):
+ return c + ['--pull=%s' % self.params['pull']]
+
+ def addparam_rdt_class(self, c):
+ return c + ['--rdt-class', self.params['rdt_class']]
+
def addparam_read_only(self, c):
return c + ['--read-only=%s' % self.params['read_only']]
@@ -597,17 +732,31 @@ class PodmanModuleParams:
def addparam_restart_policy(self, c):
return c + ['--restart=%s' % self.params['restart_policy']]
+ def addparam_retry(self, c):
+ return c + ['--retry', self.params['retry']]
+
+ def addparam_retry_delay(self, c):
+ return c + ['--retry-delay', self.params['retry_delay']]
+
def addparam_rm(self, c):
if self.params['rm']:
c += ['--rm']
return c
+ def addparam_rmi(self, c):
+ if self.params['rmi']:
+ c += ['--rmi']
+ return c
+
def addparam_rootfs(self, c):
return c + ['--rootfs=%s' % self.params['rootfs']]
def addparam_sdnotify(self, c):
return c + ['--sdnotify=%s' % self.params['sdnotify']]
+ def addparam_seccomp_policy(self, c):
+ return c + ['--seccomp-policy', self.params['seccomp_policy']]
+
def addparam_secrets(self, c):
for secret in self.params['secrets']:
c += ['--secret', secret]
@@ -621,6 +770,9 @@ class PodmanModuleParams:
def addparam_shm_size(self, c):
return c + ['--shm-size', self.params['shm_size']]
+ def addparam_shm_size_systemd(self, c):
+ return c + ['--shm-size-systemd', self.params['shm_size_systemd']]
+
def addparam_sig_proxy(self, c):
return c + ['--sig-proxy=%s' % self.params['sig_proxy']]
@@ -646,14 +798,20 @@ class PodmanModuleParams:
def addparam_systemd(self, c):
return c + ['--systemd=%s' % str(self.params['systemd']).lower()]
+ def addparam_timeout(self, c):
+ return c + ['--timeout', self.params['timeout']]
+
+ def addparam_timezone(self, c):
+ return c + ['--tz=%s' % self.params['timezone']]
+
+ def addparam_tls_verify(self, c):
+ return c + ['--tls-verify=%s' % self.params['tls_verify']]
+
def addparam_tmpfs(self, c):
for tmpfs in self.params['tmpfs'].items():
c += ['--tmpfs', ':'.join(tmpfs)]
return c
- def addparam_timezone(self, c):
- return c + ['--tz=%s' % self.params['timezone']]
-
def addparam_tty(self, c):
return c + ['--tty=%s' % self.params['tty']]
@@ -667,6 +825,19 @@ class PodmanModuleParams:
c += ['--ulimit', u]
return c
+ def addparam_umask(self, c):
+ return c + ['--umask', self.params['umask']]
+
+ def addparam_unsetenv(self, c):
+ for unsetenv in self.params['unsetenv']:
+ c += ['--unsetenv', unsetenv]
+ return c
+
+ def addparam_unsetenv_all(self, c):
+ if self.params['unsetenv_all']:
+ c += ['--unsetenv-all']
+ return c
+
def addparam_user(self, c):
return c + ['--user', self.params['user']]
@@ -676,6 +847,9 @@ class PodmanModuleParams:
def addparam_uts(self, c):
return c + ['--uts', self.params['uts']]
+ def addparam_variant(self, c):
+ return c + ['--variant', self.params['variant']]
+
def addparam_volume(self, c):
for vol in self.params['volume']:
if vol:
@@ -700,42 +874,9 @@ class PodmanDefaults:
self.version = podman_version
self.image_info = image_info
self.defaults = {
- "blkio_weight": 0,
- "cgroups": "default",
- "cidfile": "",
- "cpus": 0.0,
- "cpu_shares": 0,
- "cpu_quota": 0,
- "cpu_period": 0,
- "cpu_rt_runtime": 0,
- "cpu_rt_period": 0,
- "cpuset_cpus": "",
- "cpuset_mems": "",
"detach": True,
- "device": [],
- "env_host": False,
- "etc_hosts": {},
- "group_add": [],
- "ipc": "",
- "kernelmemory": "0",
"log_level": "error",
- "memory": "0",
- "memory_swap": "0",
- "memory_reservation": "0",
- # "memory_swappiness": -1,
- "no_hosts": False,
- # libpod issue with networks in inspection
- "oom_score_adj": 0,
- "pid": "",
- "privileged": False,
- "read_only": False,
- "rm": False,
- "security_opt": [],
- "stop_signal": self.image_info.get('config', {}).get('stopsignal', "15"),
"tty": False,
- "user": self.image_info.get('user', ''),
- "workdir": self.image_info.get('config', {}).get('workingdir', '/'),
- "uts": "",
}
def default_dict(self):
@@ -744,17 +885,8 @@ class PodmanDefaults:
if (LooseVersion(self.version) >= LooseVersion('1.8.0')
and LooseVersion(self.version) < LooseVersion('1.9.0')):
self.defaults['cpu_shares'] = 1024
- if (LooseVersion(self.version) >= LooseVersion('2.0.0')):
- self.defaults['network'] = ["slirp4netns"]
- self.defaults['ipc'] = "private"
- self.defaults['uts'] = "private"
- self.defaults['pid'] = "private"
- if (LooseVersion(self.version) >= LooseVersion('5.0.0')):
- self.defaults['network'] = ["pasta"]
if (LooseVersion(self.version) >= LooseVersion('3.0.0')):
self.defaults['log_level'] = "warning"
- if (LooseVersion(self.version) >= LooseVersion('4.1.0')):
- self.defaults['ipc'] = "shareable"
return self.defaults
@@ -781,35 +913,6 @@ class PodmanContainerDiff:
params_with_defaults[p] = self.module_params[p]
return params_with_defaults
- def _createcommand(self, argument):
- """Returns list of values for given argument from CreateCommand
- from Podman container inspect output.
-
- Args:
- argument (str): argument name
-
- Returns:
-
- all_values: list of values for given argument from createcommand
- """
- if "createcommand" not in self.info["config"]:
- return []
- cr_com = self.info["config"]["createcommand"]
- argument_values = ARGUMENTS_OPTS_DICT.get(argument, [argument])
- all_values = []
- for arg in argument_values:
- for ind, cr_opt in enumerate(cr_com):
- if arg == cr_opt:
- # This is a key=value argument
- if not cr_com[ind + 1].startswith("-"):
- all_values.append(cr_com[ind + 1])
- else:
- # This is a false/true switching argument
- return [True]
- if cr_opt.startswith("%s=" % arg):
- all_values.append(cr_opt.split("=", 1)[1])
- return all_values
-
def _diff_update_and_compare(self, param_name, before, after):
if before != after:
self.diff['before'].update({param_name: before})
@@ -817,6 +920,24 @@ class PodmanContainerDiff:
return True
return False
+ def _diff_generic(self, module_arg, cmd_arg, boolean_type=False):
+ """
+ Generic diff function for module arguments from CreateCommand
+ in Podman inspection output.
+
+ Args:
+ module_arg (str): module argument name
+ cmd_arg (str): command line argument name
+ boolean_type (bool): if True, then argument is boolean type
+
+ Returns:
+ bool: True if there is a difference, False otherwise
+
+ """
+ info_config = self.info["config"]
+ before, after = diff_generic(self.params, info_config, module_arg, cmd_arg, boolean_type)
+ return self._diff_update_and_compare(module_arg, before, after)
+
def diffparam_annotation(self):
before = self.info['config']['annotations'] or {}
after = before.copy()
@@ -824,24 +945,17 @@ class PodmanContainerDiff:
after.update(self.params['annotation'])
return self._diff_update_and_compare('annotation', before, after)
- def diffparam_env_host(self):
- # It's impossible to get from inspect, recreate it if not default
- before = False
- after = self.params['env_host']
- return self._diff_update_and_compare('env_host', before, after)
+ def diffparam_arch(self):
+ return self._diff_generic('arch', '--arch')
+
+ def diffparam_authfile(self):
+ return self._diff_generic('authfile', '--authfile')
def diffparam_blkio_weight(self):
- before = self.info['hostconfig']['blkioweight']
- after = self.params['blkio_weight']
- return self._diff_update_and_compare('blkio_weight', before, after)
+ return self._diff_generic('blkio_weight', '--blkio-weight')
def diffparam_blkio_weight_device(self):
- before = self.info['hostconfig']['blkioweightdevice']
- if before == [] and self.module_params['blkio_weight_device'] is None:
- after = []
- else:
- after = self.params['blkio_weight_device']
- return self._diff_update_and_compare('blkio_weight_device', before, after)
+ return self._diff_generic('blkio_weight_device', '--blkio-weight-device')
def diffparam_cap_add(self):
before = self.info['effectivecaps'] or []
@@ -869,30 +983,27 @@ class PodmanContainerDiff:
before, after = sorted(list(set(before))), sorted(list(set(after)))
return self._diff_update_and_compare('cap_drop', before, after)
+ def diffparam_cgroup_conf(self):
+ return self._diff_generic('cgroup_conf', '--cgroup-conf')
+
def diffparam_cgroup_parent(self):
- before = self.info['hostconfig']['cgroupparent']
- after = self.params['cgroup_parent']
- if after is None:
- after = before
- return self._diff_update_and_compare('cgroup_parent', before, after)
-
- def diffparam_cgroups(self):
- # Cgroups output is not supported in all versions
- if 'cgroups' in self.info['hostconfig']:
- before = self.info['hostconfig']['cgroups']
- after = self.params['cgroups']
- return self._diff_update_and_compare('cgroups', before, after)
- return False
+ return self._diff_generic('cgroup_parent', '--cgroup-parent')
- def diffparam_cidfile(self):
- before = self.info['hostconfig']['containeridfile']
- after = self.params['cidfile']
- labels = self.info['config']['labels'] or {}
- # Ignore cidfile that is coming from systemd files
- # https://github.com/containers/ansible-podman-collections/issues/276
- if 'podman_systemd_unit' in labels:
- after = before
- return self._diff_update_and_compare('cidfile', before, after)
+ def diffparam_cgroupns(self):
+ return self._diff_generic('cgroupns', '--cgroupns')
+
+ # Disabling idemotency check for cgroups as it's added by systemd generator
+ # https://github.com/containers/ansible-podman-collections/issues/775
+ # def diffparam_cgroups(self):
+ # return self._diff_generic('cgroups', '--cgroups')
+
+ def diffparam_chrootdirs(self):
+ return self._diff_generic('chrootdirs', '--chrootdirs')
+
+ # Disabling idemotency check for cidfile as it's added by systemd generator
+ # https://github.com/containers/ansible-podman-collections/issues/775
+ # def diffparam_cidfile(self):
+ # return self._diff_generic('cidfile', '--cidfile')
def diffparam_command(self):
# TODO(sshnaidm): to inspect image to get the default command
@@ -905,107 +1016,73 @@ class PodmanContainerDiff:
return False
def diffparam_conmon_pidfile(self):
- before = self.info['conmonpidfile']
- if self.module_params['conmon_pidfile'] is None:
- after = before
- else:
- after = self.params['conmon_pidfile']
- return self._diff_update_and_compare('conmon_pidfile', before, after)
+ return self._diff_generic('conmon_pidfile', '--conmon-pidfile')
def diffparam_cpu_period(self):
- before = self.info['hostconfig']['cpuperiod']
- # if cpu_period left to default keep settings
- after = self.params['cpu_period'] or before
- return self._diff_update_and_compare('cpu_period', before, after)
+ return self._diff_generic('cpu_period', '--cpu-period')
def diffparam_cpu_quota(self):
- before = self.info['hostconfig']['cpuquota']
- # if cpu_quota left to default keep settings
- after = self.params['cpu_quota'] or before
- return self._diff_update_and_compare('cpu_quota', before, after)
+ return self._diff_generic('cpu_quota', '--cpu-quota')
def diffparam_cpu_rt_period(self):
- before = self.info['hostconfig']['cpurealtimeperiod']
- after = self.params['cpu_rt_period']
- return self._diff_update_and_compare('cpu_rt_period', before, after)
+ return self._diff_generic('cpu_rt_period', '--cpu-rt-period')
def diffparam_cpu_rt_runtime(self):
- before = self.info['hostconfig']['cpurealtimeruntime']
- after = self.params['cpu_rt_runtime']
- return self._diff_update_and_compare('cpu_rt_runtime', before, after)
+ return self._diff_generic('cpu_rt_runtime', '--cpu-rt-runtime')
def diffparam_cpu_shares(self):
- before = self.info['hostconfig']['cpushares']
- after = self.params['cpu_shares']
- return self._diff_update_and_compare('cpu_shares', before, after)
+ return self._diff_generic('cpu_shares', '--cpu-shares')
def diffparam_cpus(self):
- before = self.info['hostconfig']['nanocpus'] / 1000000000
- # if cpus left to default keep settings
- after = float(self.params['cpus'] or before)
- return self._diff_update_and_compare('cpus', before, after)
+ return self._diff_generic('cpus', '--cpus')
def diffparam_cpuset_cpus(self):
- before = self.info['hostconfig']['cpusetcpus']
- after = self.params['cpuset_cpus']
- return self._diff_update_and_compare('cpuset_cpus', before, after)
+ return self._diff_generic('cpuset_cpus', '--cpuset-cpus')
def diffparam_cpuset_mems(self):
- before = self.info['hostconfig']['cpusetmems']
- after = self.params['cpuset_mems']
- return self._diff_update_and_compare('cpuset_mems', before, after)
+ return self._diff_generic('cpuset_mems', '--cpuset-mems')
+
+ def diffparam_decryption_key(self):
+ return self._diff_generic('decryption_key', '--decryption-key')
def diffparam_device(self):
- before = [":".join([i['pathonhost'], i['pathincontainer']])
- for i in self.info['hostconfig']['devices']]
- if not before and 'createcommand' in self.info['config']:
- before = [i.lower() for i in self._createcommand('--device')]
- before = [":".join((i, i))
- if len(i.split(":")) == 1 else i for i in before]
- after = [":".join(i.split(":")[:2]) for i in self.params['device']]
- after = [":".join((i, i))
- if len(i.split(":")) == 1 else i for i in after]
- before, after = [i.lower() for i in before], [i.lower() for i in after]
- before, after = sorted(list(set(before))), sorted(list(set(after)))
- return self._diff_update_and_compare('devices', before, after)
+ return self._diff_generic('device', '--device')
+
+ def diffparam_device_cgroup_rule(self):
+ return self._diff_generic('device_cgroup_rule', '--device-cgroup-rule')
def diffparam_device_read_bps(self):
- before = self.info['hostconfig']['blkiodevicereadbps'] or []
- before = ["%s:%s" % (i['path'], i['rate']) for i in before]
- after = self.params['device_read_bps'] or []
- before, after = sorted(list(set(before))), sorted(list(set(after)))
- return self._diff_update_and_compare('device_read_bps', before, after)
+ return self._diff_generic('device_read_bps', '--device-read-bps')
def diffparam_device_read_iops(self):
- before = self.info['hostconfig']['blkiodevicereadiops'] or []
- before = ["%s:%s" % (i['path'], i['rate']) for i in before]
- after = self.params['device_read_iops'] or []
- before, after = sorted(list(set(before))), sorted(list(set(after)))
- return self._diff_update_and_compare('device_read_iops', before, after)
+ return self._diff_generic('device_read_iops', '--device-read-iops')
def diffparam_device_write_bps(self):
- before = self.info['hostconfig']['blkiodevicewritebps'] or []
- before = ["%s:%s" % (i['path'], i['rate']) for i in before]
- after = self.params['device_write_bps'] or []
- before, after = sorted(list(set(before))), sorted(list(set(after)))
- return self._diff_update_and_compare('device_write_bps', before, after)
+ return self._diff_generic('device_write_bps', '--device-write-bps')
def diffparam_device_write_iops(self):
- before = self.info['hostconfig']['blkiodevicewriteiops'] or []
- before = ["%s:%s" % (i['path'], i['rate']) for i in before]
- after = self.params['device_write_iops'] or []
- before, after = sorted(list(set(before))), sorted(list(set(after)))
- return self._diff_update_and_compare('device_write_iops', before, after)
+ return self._diff_generic('device_write_iops', '--device-write-iops')
+
+ def diffparam_dns(self):
+ return self._diff_generic('dns', '--dns')
+
+ def diffparam_dns_option(self):
+ return self._diff_generic('dns_option', '--dns-option')
+
+ def diffparam_dns_search(self):
+ return self._diff_generic('dns_search', '--dns-search')
- # Limited idempotency, it can't guess default values
def diffparam_env(self):
- env_before = self.info['config']['env'] or {}
- before = {i.split("=")[0]: "=".join(i.split("=")[1:])
- for i in env_before}
- after = before.copy()
- if self.params['env']:
- after.update({k: str(v) for k, v in self.params['env'].items()})
- return self._diff_update_and_compare('env', before, after)
+ return self._diff_generic('env', '--env')
+
+ def diffparam_env_file(self):
+ return self._diff_generic('env_file', '--env-file')
+
+ def diffparam_env_merge(self):
+ return self._diff_generic('env_merge', '--env-merge')
+
+ def diffparam_env_host(self):
+ return self._diff_generic('env_host', '--env-host')
def diffparam_etc_hosts(self):
if self.info['hostconfig']['extrahosts']:
@@ -1013,13 +1090,23 @@ class PodmanContainerDiff:
for i in self.info['hostconfig']['extrahosts']])
else:
before = {}
- after = self.params['etc_hosts']
+ after = self.params['etc_hosts'] or {}
return self._diff_update_and_compare('etc_hosts', before, after)
+ def diffparam_expose(self):
+ return self._diff_generic('expose', '--expose')
+
+ def diffparam_gidmap(self):
+ return self._diff_generic('gidmap', '--gidmap')
+
+ def diffparam_gpus(self):
+ return self._diff_generic('gpus', '--gpus')
+
def diffparam_group_add(self):
- before = self.info['hostconfig']['groupadd']
- after = self.params['group_add']
- return self._diff_update_and_compare('group_add', before, after)
+ return self._diff_generic('group_add', '--group-add')
+
+ def diffparam_group_entry(self):
+ return self._diff_generic('group_entry', '--group-entry')
# Healthcheck is only defined in container config if a healthcheck
# was configured; otherwise the config key isn't part of the config.
@@ -1041,11 +1128,44 @@ class PodmanContainerDiff:
after = self.params['healthcheck_failure_action'] or before
return self._diff_update_and_compare('healthcheckonfailureaction', before, after)
- # Because of hostname is random generated, this parameter has partial idempotency only.
+ def diffparam_healthcheck_interval(self):
+ return self._diff_generic('healthcheck_interval', '--healthcheck-interval')
+
+ def diffparam_healthcheck_retries(self):
+ return self._diff_generic('healthcheck_retries', '--healthcheck-retries')
+
+ def diffparam_healthcheck_start_period(self):
+ return self._diff_generic('healthcheck_start_period', '--healthcheck-start-period')
+
+ def diffparam_health_startup_cmd(self):
+ return self._diff_generic('health_startup_cmd', '--health-startup-cmd')
+
+ def diffparam_health_startup_interval(self):
+ return self._diff_generic('health_startup_interval', '--health-startup-interval')
+
+ def diffparam_health_startup_retries(self):
+ return self._diff_generic('health_startup_retries', '--health-startup-retries')
+
+ def diffparam_health_startup_success(self):
+ return self._diff_generic('health_startup_success', '--health-startup-success')
+
+ def diffparam_health_startup_timeout(self):
+ return self._diff_generic('health_startup_timeout', '--health-startup-timeout')
+
+ def diffparam_healthcheck_timeout(self):
+ return self._diff_generic('healthcheck_timeout', '--healthcheck-timeout')
+
+ def diffparam_hooks_dir(self):
+ return self._diff_generic('hooks_dir', '--hooks-dir')
+
def diffparam_hostname(self):
- before = self.info['config']['hostname']
- after = self.params['hostname'] or before
- return self._diff_update_and_compare('hostname', before, after)
+ return self._diff_generic('hostname', '--hostname')
+
+ def diffparam_hostuser(self):
+ return self._diff_generic('hostuser', '--hostuser')
+
+ def diffparam_http_proxy(self):
+ return self._diff_generic('http_proxy', '--http-proxy')
def diffparam_image(self):
before_id = self.info['image'] or self.info['rootfs']
@@ -1066,12 +1186,29 @@ class PodmanContainerDiff:
return self._diff_update_and_compare('image', before_id, after_id)
return self._diff_update_and_compare('image', before, after)
+ def diffparam_image_volume(self):
+ return self._diff_generic('image_volume', '--image-volume')
+
+ def diffparam_init(self):
+ return self._diff_generic('init', '--init', boolean_type=True)
+
+ def diffparam_init_ctr(self):
+ return self._diff_generic('init_ctr', '--init-ctr')
+
+ def diffparam_init_path(self):
+ return self._diff_generic('init_path', '--init-path')
+
+ def diffparam_interactive(self):
+ return self._diff_generic('interactive', '--interactive')
+
+ def diffparam_ip(self):
+ return self._diff_generic('ip', '--ip')
+
+ def diffparam_ip6(self):
+ return self._diff_generic('ip6', '--ip6')
+
def diffparam_ipc(self):
- before = self.info['hostconfig']['ipcmode']
- after = self.params['ipc']
- if self.params['pod'] and not self.module_params['ipc']:
- after = before
- return self._diff_update_and_compare('ipc', before, after)
+ return self._diff_generic('ipc', '--ipc')
def diffparam_label(self):
before = self.info['config']['labels'] or {}
@@ -1088,282 +1225,208 @@ class PodmanContainerDiff:
before.pop('podman_systemd_unit', None)
return self._diff_update_and_compare('label', before, after)
+ def diffparam_label_file(self):
+ return self._diff_generic('label_file', '--label-file')
+
def diffparam_log_driver(self):
- before = self.info['hostconfig']['logconfig']['type']
- if self.module_params['log_driver'] is not None:
- after = self.params['log_driver']
- else:
- after = before
- return self._diff_update_and_compare('log_driver', before, after)
+ return self._diff_generic('log_driver', '--log-driver')
- # Parameter has limited idempotency, unable to guess the default log_path
def diffparam_log_opt(self):
- before, after = {}, {}
-
- # Log path
- path_before = None
- if 'logpath' in self.info:
- path_before = self.info['logpath']
- # For Podman v3
- if ('logconfig' in self.info['hostconfig'] and
- 'path' in self.info['hostconfig']['logconfig']):
- path_before = self.info['hostconfig']['logconfig']['path']
- if path_before is not None:
- if (self.module_params['log_opt'] and
- 'path' in self.module_params['log_opt'] and
- self.module_params['log_opt']['path'] is not None):
- path_after = self.params['log_opt']['path']
- else:
- path_after = path_before
- if path_before != path_after:
- before.update({'log-path': path_before})
- after.update({'log-path': path_after})
-
- # Log tag
- tag_before = None
- if 'logtag' in self.info:
- tag_before = self.info['logtag']
- # For Podman v3
- if ('logconfig' in self.info['hostconfig'] and
- 'tag' in self.info['hostconfig']['logconfig']):
- tag_before = self.info['hostconfig']['logconfig']['tag']
- if tag_before is not None:
- if (self.module_params['log_opt'] and
- 'tag' in self.module_params['log_opt'] and
- self.module_params['log_opt']['tag'] is not None):
- tag_after = self.params['log_opt']['tag']
- else:
- tag_after = ''
- if tag_before != tag_after:
- before.update({'log-tag': tag_before})
- after.update({'log-tag': tag_after})
-
- # Log size
- # For Podman v3
- # size_before = '0B'
- # TODO(sshnaidm): integrate B/KB/MB/GB calculation for sizes
- # if ('logconfig' in self.info['hostconfig'] and
- # 'size' in self.info['hostconfig']['logconfig']):
- # size_before = self.info['hostconfig']['logconfig']['size']
- # if size_before != '0B':
- # if (self.module_params['log_opt'] and
- # 'max_size' in self.module_params['log_opt'] and
- # self.module_params['log_opt']['max_size'] is not None):
- # size_after = self.params['log_opt']['max_size']
- # else:
- # size_after = ''
- # if size_before != size_after:
- # before.update({'log-size': size_before})
- # after.update({'log-size': size_after})
-
- return self._diff_update_and_compare('log_opt', before, after)
+ return self._diff_generic('log_opt', '--log-opt')
def diffparam_mac_address(self):
- before = str(self.info['networksettings']['macaddress'])
- if not before and self.info['networksettings'].get('networks'):
- nets = self.info['networksettings']['networks']
- macs = [
- nets[i]["macaddress"] for i in nets if nets[i]["macaddress"]]
- if macs:
- before = macs[0]
- if not before and 'createcommand' in self.info['config']:
- before = [i.lower() for i in self._createcommand('--mac-address')]
- before = before[0] if before else ''
- if self.module_params['mac_address'] is not None:
- after = self.params['mac_address']
- else:
- after = before
- return self._diff_update_and_compare('mac_address', before, after)
+ return self._diff_generic('mac_address', '--mac-address')
+
+ def diffparam_memory(self):
+ return self._diff_generic('memory', '--memory')
+
+ def diffparam_memory_reservation(self):
+ return self._diff_generic('memory_reservation', '--memory-reservation')
+
+ def diffparam_memory_swap(self):
+ return self._diff_generic('memory_swap', '--memory-swap')
+
+ def diffparam_memory_swappiness(self):
+ return self._diff_generic('memory_swappiness', '--memory-swappiness')
+
+ def diffparam_mount(self):
+ return self._diff_generic('mount', '--mount')
def diffparam_network(self):
- net_mode_before = self.info['hostconfig']['networkmode']
- net_mode_after = ''
- before = list(self.info['networksettings'].get('networks', {}))
- # Remove default 'podman' network in v3 for comparison
- if before == ['podman']:
- before = []
- # Special case for options for slirp4netns rootless networking from v2
- if net_mode_before == 'slirp4netns' and 'createcommand' in self.info['config']:
- cr_net = [i.lower() for i in self._createcommand('--network')]
- for cr_net_opt in cr_net:
- if 'slirp4netns:' in cr_net_opt:
- before = [cr_net_opt]
- if net_mode_before == 'pasta':
- cr_net = [i.lower() for i in self._createcommand('--network')]
- for cr_net_opt in cr_net:
- if 'pasta:' in cr_net_opt:
- before = [cr_net_opt]
- after = self.params['network'] or []
- after = [i.lower() for i in after]
- # If container is in pod and no networks are provided
- if not self.module_params['network'] and self.params['pod']:
- after = before
- return self._diff_update_and_compare('network', before, after)
- # Check special network modes
- if after in [['bridge'], ['host'], ['slirp4netns'], ['none'], ['pasta']]:
- net_mode_after = after[0]
- # If changes are only for network mode and container has no networks
- if net_mode_after and not before:
- # Remove differences between v1 and v2
- net_mode_after = net_mode_after.replace('bridge', 'default')
- net_mode_after = net_mode_after.replace('slirp4netns', 'default')
- net_mode_after = net_mode_after.replace('pasta', 'default')
- net_mode_before = net_mode_before.replace('bridge', 'default')
- net_mode_before = net_mode_before.replace('slirp4netns', 'default')
- net_mode_before = net_mode_before.replace('pasta', 'default')
- return self._diff_update_and_compare('network', net_mode_before, net_mode_after)
- # If container is attached to network of a different container
- if "container" in net_mode_before:
- for netw in after:
- if "container" in netw:
- before = after = netw
- before, after = sorted(list(set(before))), sorted(list(set(after)))
- return self._diff_update_and_compare('network', before, after)
+ return self._diff_generic('network', '--network')
+
+ def diffparam_network_aliases(self):
+ return self._diff_generic('network_aliases', '--network-alias')
+
+ def diffparam_no_healthcheck(self):
+ return self._diff_generic('no_healthcheck', '--no-healthcheck', boolean_type=True)
+
+ def diffparam_no_hosts(self):
+ return self._diff_generic('no_hosts', '--no-hosts')
+
+ def diffparam_oom_kill_disable(self):
+ return self._diff_generic('oom_kill_disable', '--oom-kill-disable')
def diffparam_oom_score_adj(self):
- before = self.info['hostconfig']['oomscoreadj']
- after = self.params['oom_score_adj']
- return self._diff_update_and_compare('oom_score_adj', before, after)
+ return self._diff_generic('oom_score_adj', '--oom-score-adj')
- def diffparam_privileged(self):
- before = self.info['hostconfig']['privileged']
- after = self.params['privileged']
- return self._diff_update_and_compare('privileged', before, after)
+ def diffparam_os(self):
+ return self._diff_generic('os', '--os')
+
+ def diffparam_passwd(self):
+ return self._diff_generic('passwd', '--passwd', boolean_type=True)
+
+ def diffparam_passwd_entry(self):
+ return self._diff_generic('passwd_entry', '--passwd-entry')
+
+ def diffparam_personality(self):
+ return self._diff_generic('personality', '--personality')
def diffparam_pid(self):
- def get_container_id_by_name(name):
- rc, podman_inspect_info, err = self.module.run_command(
- [self.module.params["executable"], "inspect", name, "-f", "{{.Id}}"])
- if rc != 0:
- return None
- return podman_inspect_info.strip()
-
- before = self.info['hostconfig']['pidmode']
- after = self.params['pid']
- if after is not None and "container:" in after and "container:" in before:
- if after.split(":")[1] == before.split(":")[1]:
- return self._diff_update_and_compare('pid', before, after)
- after = "container:" + get_container_id_by_name(after.split(":")[1])
- return self._diff_update_and_compare('pid', before, after)
-
- # TODO(sshnaidm) Need to add port ranges support
+ return self._diff_generic('pid', '--pid')
+
+ def diffparam_pid_file(self):
+ return self._diff_generic('pid_file', '--pid-file')
+
+ def diffparam_pids_limit(self):
+ return self._diff_generic('pids_limit', '--pids-limit')
+
+ def diffparam_platform(self):
+ return self._diff_generic('platform', '--platform')
+
+ # def diffparam_pod(self):
+ # return self._diff_generic('pod', '--pod')
+
+ def diffparam_pod_id_file(self):
+ return self._diff_generic('pod_id_file', '--pod-id-file')
+
+ def diffparam_privileged(self):
+ return self._diff_generic('privileged', '--privileged')
+
def diffparam_publish(self):
- def compose(p, h):
- s = ":".join(
- [str(h["hostport"]), p.replace('/tcp', '')]
- ).strip(":")
- if h['hostip'] == '0.0.0.0' and LooseVersion(self.version) >= LooseVersion('5.0.0'):
- return s
- if h['hostip']:
- return ":".join([h['hostip'], s])
- return s
-
- ports = self.info['hostconfig']['portbindings']
- before = []
- for port, hosts in ports.items():
- if hosts:
- for h in hosts:
- before.append(compose(port, h))
- after = self.params['publish'] or []
- if self.params['publish_all']:
- image_ports = self.image_info.get('config', {}).get('exposedports', {})
- if image_ports:
- after += list(image_ports.keys())
- after = [
- i.replace("/tcp", "").replace("[", "").replace("]", "").replace("0.0.0.0:", "")
- for i in after]
- # No support for port ranges yet
- for ports in after:
- if "-" in ports:
- return self._diff_update_and_compare('publish', '', '')
- before, after = sorted(list(set(before))), sorted(list(set(after)))
- return self._diff_update_and_compare('publish', before, after)
+ return self._diff_generic('publish', '--publish')
+
+ def diffparam_publish_all(self):
+ return self._diff_generic('publish_all', '--publish-all')
+
+ def diffparam_pull(self):
+ return self._diff_generic('pull', '--pull')
+
+ def diffparam_rdt_class(self):
+ return self._diff_generic('rdt_class', '--rdt-class')
def diffparam_read_only(self):
- before = self.info['hostconfig']['readonlyrootfs']
- after = self.params['read_only']
- return self._diff_update_and_compare('read_only', before, after)
+ return self._diff_generic('read_only', '--read-only')
+
+ def diffparam_read_only_tmpfs(self):
+ return self._diff_generic('read_only_tmpfs', '--read-only-tmpfs')
+
+ def diffparam_requires(self):
+ return self._diff_generic('requires', '--requires')
def diffparam_restart_policy(self):
- before = self.info['hostconfig']['restartpolicy']['name']
- before_max_count = int(self.info['hostconfig']['restartpolicy'].get('maximumretrycount', 0))
- after = self.params['restart_policy'] or ""
- if ':' in after:
- after, after_max_count = after.rsplit(':', 1)
- after_max_count = int(after_max_count)
- else:
- after_max_count = 0
- before = "%s:%i" % (before, before_max_count)
- after = "%s:%i" % (after, after_max_count)
- return self._diff_update_and_compare('restart_policy', before, after)
+ return self._diff_generic('restart_policy', '--restart')
+
+ def diffparam_retry(self):
+ return self._diff_generic('retry', '--retry')
+
+ def diffparam_retry_delay(self):
+ return self._diff_generic('retry_delay', '--retry-delay')
+
+ def diffparam_rootfs(self):
+ return self._diff_generic('rootfs', '--rootfs')
+
+ # Disabling idemotency check for sdnotify as it's added by systemd generator
+ # https://github.com/containers/ansible-podman-collections/issues/775
+ # def diffparam_sdnotify(self):
+ # return self._diff_generic('sdnotify', '--sdnotify')
def diffparam_rm(self):
before = self.info['hostconfig']['autoremove']
after = self.params['rm']
+ if after is None:
+ return self._diff_update_and_compare('rm', '', '')
return self._diff_update_and_compare('rm', before, after)
+ def diffparam_rmi(self):
+ return self._diff_generic('rmi', '--rmi', boolean_type=True)
+
+ def diffparam_seccomp_policy(self):
+ return self._diff_generic('seccomp_policy', '--seccomp-policy')
+
+ def diffparam_secrets(self):
+ return self._diff_generic('secrets', '--secret')
+
def diffparam_security_opt(self):
- unsorted_before = self.info['hostconfig']['securityopt']
- unsorted_after = self.params['security_opt']
- # In rootful containers with apparmor there is a profile, "container-default",
- # which is already added by default
- # Since SElinux labels are basically annotations, they are merged in a single list
- # element by podman so we need to split them in a (sorted) list if we want to compare it
- # to the list we provide to the module
- before = sorted(item for element in unsorted_before for item in element.split(',')
- if 'apparmor=container-default' not in item)
- after = sorted(list(set(unsorted_after)))
- return self._diff_update_and_compare('security_opt', before, after)
+ return self._diff_generic('security_opt', '--security-opt')
+
+ def diffparam_shm_size(self):
+ return self._diff_generic('shm_size', '--shm-size')
+
+ def diffparam_shm_size_systemd(self):
+ return self._diff_generic('shm_size_systemd', '--shm-size-systemd')
def diffparam_stop_signal(self):
- before = normalize_signal(self.info['config']['stopsignal'])
- after = normalize_signal(self.params['stop_signal'])
- return self._diff_update_and_compare('stop_signal', before, after)
+ return self._diff_generic('stop_signal', '--stop-signal')
+
+ def diffparam_stop_timeout(self):
+ return self._diff_generic('stop_timeout', '--stop-timeout')
+
+ def diffparam_subgidname(self):
+ return self._diff_generic('subgidname', '--subgidname')
+
+ def diffparam_subuidname(self):
+ return self._diff_generic('subuidname', '--subuidname')
+
+ def diffparam_sysctl(self):
+ return self._diff_generic('sysctl', '--sysctl')
+
+ def diffparam_systemd(self):
+ return self._diff_generic('systemd', '--systemd')
+
+ def diffparam_timeout(self):
+ return self._diff_generic('timeout', '--timeout')
def diffparam_timezone(self):
- before = self.info['config'].get('timezone')
- after = self.params['timezone']
- return self._diff_update_and_compare('timezone', before, after)
+ return self._diff_generic('timezone', '--tz')
+
+ def diffparam_tls_verify(self):
+ return self._diff_generic('tls_verify', '--tls-verify')
def diffparam_tty(self):
before = self.info['config']['tty']
after = self.params['tty']
return self._diff_update_and_compare('tty', before, after)
- def diffparam_user(self):
- before = self.info['config']['user']
- after = self.params['user']
- return self._diff_update_and_compare('user', before, after)
+ def diffparam_tmpfs(self):
+ return self._diff_generic('tmpfs', '--tmpfs')
+
+ def diffparam_uidmap(self):
+ return self._diff_generic('uidmap', '--uidmap')
def diffparam_ulimit(self):
- after = self.params['ulimit'] or []
- # In case of latest podman
- if 'createcommand' in self.info['config']:
- before = self._createcommand('--ulimit')
- before, after = sorted(before), sorted(after)
- return self._diff_update_and_compare('ulimit', before, after)
- if after:
- ulimits = self.info['hostconfig']['ulimits']
- before = {
- u['name'].replace('rlimit_', ''): "%s:%s" % (u['soft'], u['hard']) for u in ulimits}
- after = {i.split('=')[0]: i.split('=')[1]
- for i in self.params['ulimit']}
- new_before = []
- new_after = []
- for u in list(after.keys()):
- # We don't support unlimited ulimits because it depends on platform
- if u in before and "-1" not in after[u]:
- new_before.append([u, before[u]])
- new_after.append([u, after[u]])
- return self._diff_update_and_compare('ulimit', new_before, new_after)
- return self._diff_update_and_compare('ulimit', '', '')
+ return self._diff_generic('ulimit', '--ulimit')
+
+ def diffparam_umask(self):
+ return self._diff_generic('umask', '--umask')
+
+ def diffparam_unsetenv(self):
+ return self._diff_generic('unsetenv', '--unsetenv')
+
+ def diffparam_unsetenv_all(self):
+ return self._diff_generic('unsetenv_all', '--unsetenv-all', boolean_type=True)
+
+ def diffparam_user(self):
+ return self._diff_generic('user', '--user')
+
+ def diffparam_userns(self):
+ return self._diff_generic('userns', '--userns')
def diffparam_uts(self):
- before = self.info['hostconfig']['utsmode']
- after = self.params['uts']
- if self.params['pod'] and not self.module_params['uts']:
- after = before
- return self._diff_update_and_compare('uts', before, after)
+ return self._diff_generic('uts', '--uts')
+
+ def diffparam_variant(self):
+ return self._diff_generic('variant', '--variant')
def diffparam_volume(self):
def clean_volume(x):
@@ -1372,44 +1435,29 @@ class PodmanContainerDiff:
return "/"
return x.replace("//", "/").rstrip("/")
- before = self.info['mounts']
- before_local_vols = []
- if before:
- volumes = []
- local_vols = []
- for m in before:
- if m['type'] != 'volume':
- volumes.append(
- [
- clean_volume(m['source']),
- clean_volume(m['destination'])
- ])
- elif m['type'] == 'volume':
- local_vols.append(
- [m['name'], clean_volume(m['destination'])])
- before = [":".join(v) for v in volumes]
- before_local_vols = [":".join(v) for v in local_vols]
- if self.params['volume'] is not None:
+ before = createcommand('--volume', self.info['config'])
+ if before == []:
+ before = None
+ after = self.params['volume']
+ if after is not None:
after = [":".join(
- [clean_volume(i) for i in v.split(":")[:2]]
- ) for v in self.params['volume']]
- else:
- after = []
- if before_local_vols:
- after = list(set(after).difference(before_local_vols))
- before, after = sorted(list(set(before))), sorted(list(set(after)))
+ [clean_volume(i) for i in v.split(":")[:2]]) for v in self.params['volume']]
+ if before is not None:
+ before = [":".join([clean_volume(i) for i in v.split(":")[:2]]) for v in before]
+ self.module.log("PODMAN Before: %s and After: %s" % (before, after))
+ if before is None and after is None:
+ return self._diff_update_and_compare('volume', before, after)
+ if after is not None:
+ after = ",".join(sorted([str(i).lower() for i in after]))
+ if before:
+ before = ",".join(sorted([str(i).lower() for i in before]))
return self._diff_update_and_compare('volume', before, after)
def diffparam_volumes_from(self):
- # Possibly volumesfrom is not in config
- before = self.info['hostconfig'].get('volumesfrom', []) or []
- after = self.params['volumes_from'] or []
- return self._diff_update_and_compare('volumes_from', before, after)
+ return self._diff_generic('volumes_from', '--volumes-from')
def diffparam_workdir(self):
- before = self.info['config']['workingdir']
- after = self.params['workdir']
- return self._diff_update_and_compare('workdir', before, after)
+ return self._diff_generic('workdir', '--workdir')
def is_different(self):
diff_func_list = [func for func in dir(self)
diff --git a/ansible_collections/containers/podman/plugins/module_utils/podman/podman_pod_lib.py b/ansible_collections/containers/podman/plugins/module_utils/podman/podman_pod_lib.py
index e0031351f..8f315a5cc 100644
--- a/ansible_collections/containers/podman/plugins/module_utils/podman/podman_pod_lib.py
+++ b/ansible_collections/containers/podman/plugins/module_utils/podman/podman_pod_lib.py
@@ -6,6 +6,8 @@ from ansible_collections.containers.podman.plugins.module_utils.podman.common im
from ansible_collections.containers.podman.plugins.module_utils.podman.common import lower_keys
from ansible_collections.containers.podman.plugins.module_utils.podman.common import generate_systemd
from ansible_collections.containers.podman.plugins.module_utils.podman.common import delete_systemd
+from ansible_collections.containers.podman.plugins.module_utils.podman.common import diff_generic
+from ansible_collections.containers.podman.plugins.module_utils.podman.common import createcommand
from ansible_collections.containers.podman.plugins.module_utils.podman.quadlet import create_quadlet_state, PodQuadlet
@@ -28,9 +30,9 @@ ARGUMENTS_SPEC_POD = dict(
]),
recreate=dict(type='bool', default=False),
add_host=dict(type='list', required=False, elements='str'),
- cgroup_parent=dict(type='str', required=False),
blkio_weight=dict(type='str', required=False),
blkio_weight_device=dict(type='list', elements='str', required=False),
+ cgroup_parent=dict(type='str', required=False),
cpus=dict(type='str', required=False),
cpuset_cpus=dict(type='str', required=False),
cpuset_mems=dict(type='str', required=False),
@@ -39,10 +41,12 @@ ARGUMENTS_SPEC_POD = dict(
device_read_bps=dict(type='list', elements='str', required=False),
device_write_bps=dict(type='list', elements='str', required=False),
dns=dict(type='list', elements='str', required=False),
- dns_opt=dict(type='list', elements='str', required=False),
+ dns_opt=dict(type='list', elements='str', aliases=['dns_option'], required=False),
dns_search=dict(type='list', elements='str', required=False),
+ exit_policy=dict(type='str', required=False, choices=['continue', 'stop']),
generate_systemd=dict(type='dict', default={}),
gidmap=dict(type='list', elements='str', required=False),
+ gpus=dict(type='str', required=False),
hostname=dict(type='str', required=False),
infra=dict(type='bool', required=False),
infra_conmon_pidfile=dict(type='str', required=False),
@@ -50,6 +54,7 @@ ARGUMENTS_SPEC_POD = dict(
infra_image=dict(type='str', required=False),
infra_name=dict(type='str', required=False),
ip=dict(type='str', required=False),
+ ip6=dict(type='str', required=False),
label=dict(type='dict', required=False),
label_file=dict(type='str', required=False),
mac_address=dict(type='str', required=False),
@@ -67,13 +72,21 @@ ARGUMENTS_SPEC_POD = dict(
quadlet_dir=dict(type='path'),
quadlet_filename=dict(type='str'),
quadlet_options=dict(type='list', elements='str'),
+ restart_policy=dict(type='str', required=False),
+ security_opt=dict(type='list', elements='str', required=False),
share=dict(type='str', required=False),
+ share_parent=dict(type='bool', required=False),
+ shm_size=dict(type='str', required=False),
+ shm_size_systemd=dict(type='str', required=False),
subgidname=dict(type='str', required=False),
subuidname=dict(type='str', required=False),
+ sysctl=dict(type='dict', required=False),
uidmap=dict(type='list', elements='str', required=False),
userns=dict(type='str', required=False),
+ uts=dict(type='str', required=False),
volume=dict(type='list', elements='str', aliases=['volumes'],
required=False),
+ volumes_from=dict(type='list', elements='str', required=False),
executable=dict(type='str', required=False, default='podman'),
debug=dict(type='bool', default=False),
)
@@ -200,7 +213,7 @@ class PodmanPodModuleParams:
def addparam_dns_opt(self, c):
for g in self.params['dns_opt']:
- c += ['--dns-opt', g]
+ c += ['--dns-option', g]
return c
def addparam_dns_search(self, c):
@@ -208,11 +221,17 @@ class PodmanPodModuleParams:
c += ['--dns-search', g]
return c
+ def addparam_exit_policy(self, c):
+ return c + ['--exit-policy=%s' % self.params['exit_policy']]
+
def addparam_gidmap(self, c):
for gidmap in self.params['gidmap']:
c += ['--gidmap', gidmap]
return c
+ def addparam_gpus(self, c):
+ return c + ['--gpus', self.params['gpus']]
+
def addparam_hostname(self, c):
return c + ['--hostname', self.params['hostname']]
@@ -236,6 +255,9 @@ class PodmanPodModuleParams:
def addparam_ip(self, c):
return c + ['--ip', self.params['ip']]
+ def addparam_ip6(self, c):
+ return c + ['--ip6', self.params['ip6']]
+
def addparam_label(self, c):
for label in self.params['label'].items():
c += ['--label', b'='.join(
@@ -285,15 +307,39 @@ class PodmanPodModuleParams:
c += ['--publish', g]
return c
+ def addparam_restart_policy(self, c):
+ return c + ['--restart=%s' % self.params['restart_policy']]
+
+ def addparam_security_opt(self, c):
+ for g in self.params['security_opt']:
+ c += ['--security-opt', g]
+ return c
+
def addparam_share(self, c):
return c + ['--share', self.params['share']]
+ def addparam_share_parent(self, c):
+ if self.params['share_parent'] is not None:
+ return c + ['--share-parent=%s' % self.params['share_parent']]
+ return c
+
+ def addparam_shm_size(self, c):
+ return c + ['--shm-size=%s' % self.params['shm_size']]
+
+ def addparam_shm_size_systemd(self, c):
+ return c + ['--shm-size-systemd=%s' % self.params['shm_size_systemd']]
+
def addparam_subgidname(self, c):
return c + ['--subgidname', self.params['subgidname']]
def addparam_subuidname(self, c):
return c + ['--subuidname', self.params['subuidname']]
+ def addparam_sysctl(self, c):
+ for k, v in self.params['sysctl'].items():
+ c += ['--sysctl', "%s=%s" % (k, v)]
+ return c
+
def addparam_uidmap(self, c):
for uidmap in self.params['uidmap']:
c += ['--uidmap', uidmap]
@@ -302,22 +348,26 @@ class PodmanPodModuleParams:
def addparam_userns(self, c):
return c + ['--userns', self.params['userns']]
+ def addparam_uts(self, c):
+ return c + ['--uts', self.params['uts']]
+
def addparam_volume(self, c):
for vol in self.params['volume']:
if vol:
c += ['--volume', vol]
return c
+ def addparam_volumes_from(self, c):
+ for vol in self.params['volumes_from']:
+ c += ['--volumes-from', vol]
+ return c
+
class PodmanPodDefaults:
def __init__(self, module, podman_version):
self.module = module
self.version = podman_version
self.defaults = {
- 'add_host': [],
- 'dns': [],
- 'dns_opt': [],
- 'dns_search': [],
'infra': True,
'label': {},
}
@@ -361,50 +411,79 @@ class PodmanPodDiff:
return True
return False
+ def _diff_generic(self, module_arg, cmd_arg, boolean_type=False):
+ """
+ Generic diff function for module arguments from CreateCommand
+ in Podman inspection output.
+
+ Args:
+ module_arg (str): module argument name
+ cmd_arg (str): command line argument name
+ boolean_type (bool): if True, then argument is boolean type
+
+ Returns:
+ bool: True if there is a difference, False otherwise
+
+ """
+ info_config = self.info
+ before, after = diff_generic(self.params, info_config, module_arg, cmd_arg, boolean_type)
+ return self._diff_update_and_compare(module_arg, before, after)
+
def diffparam_add_host(self):
- if not self.infra_info:
- return self._diff_update_and_compare('add_host', '', '')
- before = self.infra_info['hostconfig']['extrahosts'] or []
- after = self.params['add_host']
- before, after = sorted(list(set(before))), sorted(list(set(after)))
- return self._diff_update_and_compare('add_host', before, after)
+ return self._diff_generic('add_host', '--add-host')
+
+ def diffparam_blkio_weight(self):
+ return self._diff_generic('blkio_weight', '--blkio-weight')
+
+ def diffparam_blkio_weight_device(self):
+ return self._diff_generic('blkio_weight_device', '--blkio-weight-device')
def diffparam_cgroup_parent(self):
- before = (self.info.get('cgroupparent', '')
- or self.info.get('hostconfig', {}).get('cgroupparent', ''))
- after = self.params['cgroup_parent'] or before
- return self._diff_update_and_compare('cgroup_parent', before, after)
+ return self._diff_generic('cgroup_parent', '--cgroup-parent')
+
+ def diffparam_cpu_shares(self):
+ return self._diff_generic('cpu_shares', '--cpu-shares')
+
+ def diffparam_cpus(self):
+ return self._diff_generic('cpus', '--cpus')
+
+ def diffparam_cpuset_cpus(self):
+ return self._diff_generic('cpuset_cpus', '--cpuset-cpus')
+
+ def diffparam_cpuset_mems(self):
+ return self._diff_generic('cpuset_mems', '--cpuset-mems')
+
+ def diffparam_device(self):
+ return self._diff_generic('device', '--device')
+
+ def diffparam_device_read_bps(self):
+ return self._diff_generic('device_read_bps', '--device-read-bps')
+
+ def diffparam_device_write_bps(self):
+ return self._diff_generic('device_write_bps', '--device-write-bps')
def diffparam_dns(self):
- if not self.infra_info:
- return self._diff_update_and_compare('dns', '', '')
- before = self.infra_info['hostconfig']['dns'] or []
- after = self.params['dns']
- before, after = sorted(list(set(before))), sorted(list(set(after)))
- return self._diff_update_and_compare('dns', before, after)
+ return self._diff_generic('dns', '--dns')
def diffparam_dns_opt(self):
- if not self.infra_info:
- return self._diff_update_and_compare('dns_opt', '', '')
- before = self.infra_info['hostconfig']['dnsoptions'] or []
- after = self.params['dns_opt']
- before, after = sorted(list(set(before))), sorted(list(set(after)))
- return self._diff_update_and_compare('dns_opt', before, after)
+ return self._diff_generic('dns_opt', '--dns-option')
def diffparam_dns_search(self):
- if not self.infra_info:
- return self._diff_update_and_compare('dns_search', '', '')
- before = self.infra_info['hostconfig']['dnssearch'] or []
- after = self.params['dns_search']
- before, after = sorted(list(set(before))), sorted(list(set(after)))
- return self._diff_update_and_compare('dns_search', before, after)
+ return self._diff_generic('dns_search', '--dns-search')
+
+ # Disabling idemotency check for exit policy as it's added by systemd generator
+ # https://github.com/containers/ansible-podman-collections/issues/774
+ # def diffparam_exit_policy(self):
+ # return self._diff_generic('exit_policy', '--exit-policy')
+
+ def diffparam_gidmap(self):
+ return self._diff_generic('gidmap', '--gidmap')
+
+ def diffparam_gpus(self):
+ return self._diff_generic('gpus', '--gpus')
def diffparam_hostname(self):
- if not self.infra_info:
- return self._diff_update_and_compare('hostname', '', '')
- before = self.infra_info['config']['hostname']
- after = self.params['hostname'] or before
- return self._diff_update_and_compare('hostname', before, after)
+ return self._diff_generic('hostname', '--hostname')
# TODO(sshnaidm): https://github.com/containers/podman/issues/6968
def diffparam_infra(self):
@@ -416,30 +495,25 @@ class PodmanPodDiff:
after = self.params['infra']
return self._diff_update_and_compare('infra', before, after)
- # TODO(sshnaidm): https://github.com/containers/podman/issues/6969
- # def diffparam_infra_command(self):
- # before = str(self.info['hostconfig']['infra_command'])
- # after = self.params['infra_command']
- # return self._diff_update_and_compare('infra_command', before, after)
+ def diffparam_infra_command(self):
+ return self._diff_generic('infra_command', '--infra-command')
+
+ # Disabling idemotency check for infra_conmon_pidfile as it's added by systemd generator
+ # https://github.com/containers/ansible-podman-collections/issues/774
+ # def diffparam_infra_conmon_pidfile(self):
+ # return self._diff_generic('infra_conmon_pidfile', '--infra-conmon-pidfile')
def diffparam_infra_image(self):
- if not self.infra_info:
- return self._diff_update_and_compare('infra_image', '', '')
- before = str(self.infra_info['imagename'])
- after = before
- if self.module_params['infra_image']:
- after = self.params['infra_image']
- before = before.replace(":latest", "")
- after = after.replace(":latest", "")
- before = before.split("/")[-1] # pylint: disable=W,C,R
- after = after.split("/")[-1] # pylint: disable=W,C,R
- return self._diff_update_and_compare('infra_image', before, after)
-
- # TODO(sshnaidm): https://github.com/containers/podman/pull/6956
- # def diffparam_ip(self):
- # before = str(self.info['hostconfig']['ip'])
- # after = self.params['ip']
- # return self._diff_update_and_compare('ip', before, after)
+ return self._diff_generic('infra_image', '--infra-image')
+
+ def diffparam_infra_name(self):
+ return self._diff_generic('infra_name', '--infra-name')
+
+ def diffparam_ip(self):
+ return self._diff_generic('ip', '--ip')
+
+ def diffparam_ip6(self):
+ return self._diff_generic('ip6', '--ip6')
def diffparam_label(self):
if 'config' in self.info and 'labels' in self.info['config']:
@@ -454,129 +528,101 @@ class PodmanPodDiff:
before.pop('podman_systemd_unit', None)
return self._diff_update_and_compare('label', before, after)
- # TODO(sshnaidm): https://github.com/containers/podman/pull/6956
- # def diffparam_mac_address(self):
- # before = str(self.info['hostconfig']['mac_address'])
- # after = self.params['mac_address']
- # return self._diff_update_and_compare('mac_address', before, after)
+ def diffparam_label_file(self):
+ return self._diff_generic('label_file', '--label-file')
+
+ def diffparam_mac_address(self):
+ return self._diff_generic('mac_address', '--mac-address')
+
+ def diffparam_memory(self):
+ return self._diff_generic('memory', '--memory')
+
+ def diffparam_memory_swap(self):
+ return self._diff_generic('memory_swap', '--memory-swap')
def diffparam_network(self):
- if not self.infra_info:
- return self._diff_update_and_compare('network', [], [])
- net_mode_before = self.infra_info['hostconfig']['networkmode']
- net_mode_after = ''
- before = list(self.infra_info['networksettings'].get('networks', {}))
- # Remove default 'podman' network in v3 for comparison
- if before == ['podman']:
- before = []
- after = self.params['network'] or []
- after = [i.lower() for i in after]
- # Special case for options for slirp4netns rootless networking from v2
- if net_mode_before == 'slirp4netns' and 'createcommand' in self.info:
- cr_com = self.info['createcommand']
- if '--network' in cr_com:
- cr_net = cr_com[cr_com.index('--network') + 1].lower()
- if 'slirp4netns:' in cr_net:
- before = [cr_net]
- if net_mode_before == 'pasta' and 'createcommand' in self.info:
- cr_com = self.info['createcommand']
- if '--network' in cr_com:
- cr_net = cr_com[cr_com.index('--network') + 1].lower()
- if 'pasta:' in cr_net:
- before = [cr_net]
- # Currently supported only 'host' and 'none' network modes idempotency
- if after in [['bridge'], ['host'], ['slirp4netns'], ['pasta']]:
- net_mode_after = after[0]
-
- if net_mode_after and not before:
- # Remove differences between v1 and v2
- net_mode_after = net_mode_after.replace('bridge', 'default')
- net_mode_after = net_mode_after.replace('slirp4netns', 'default')
- net_mode_after = net_mode_after.replace('pasta', 'default')
- net_mode_before = net_mode_before.replace('bridge', 'default')
- net_mode_before = net_mode_before.replace('slirp4netns', 'default')
- net_mode_before = net_mode_before.replace('pasta', 'default')
- return self._diff_update_and_compare('network', net_mode_before, net_mode_after)
- # For 4.4.0+ podman versions with no network specified
- if not net_mode_after and net_mode_before == 'slirp4netns' and not after:
- net_mode_after = 'slirp4netns'
- if before == ['slirp4netns']:
- after = ['slirp4netns']
- if not net_mode_after and net_mode_before == 'bridge' and not after:
- net_mode_after = 'bridge'
- if before == ['bridge']:
- after = ['bridge']
- # For pasta networking for Podman v5
- if not net_mode_after and net_mode_before == 'pasta' and not after:
- net_mode_after = 'pasta'
- if before == ['pasta']:
- after = ['pasta']
- before, after = sorted(list(set(before))), sorted(list(set(after)))
- return self._diff_update_and_compare('network', before, after)
-
- # TODO(sshnaidm)
- # def diffparam_no_hosts(self):
- # before = str(self.info['hostconfig']['no_hosts'])
- # after = self.params['no_hosts']
- # return self._diff_update_and_compare('no_hosts', before, after)
-
- # TODO(sshnaidm) Need to add port ranges support
+ return self._diff_generic('network', '--network')
+
+ def diffparam_network_alias(self):
+ return self._diff_generic('network_alias', '--network-alias')
+
+ def diffparam_no_hosts(self):
+ return self._diff_generic('no_hosts', '--no-hosts', boolean_type=True)
+
+ def diffparam_pid(self):
+ return self._diff_generic('pid', '--pid')
+
+ # Disabling idemotency check for pod id file as it's added by systemd generator
+ # https://github.com/containers/ansible-podman-collections/issues/774
+ # def diffparam_pod_id_file(self):
+ # return self._diff_generic('pod_id_file', '--pod-id-file')
+
def diffparam_publish(self):
- def compose(p, h):
- s = ":".join(
- [str(h["hostport"]), p.replace('/tcp', '')]
- ).strip(":")
- if h['hostip'] == '0.0.0.0' and LooseVersion(self.version) >= LooseVersion('5.0.0'):
- return s
- if h['hostip']:
- return ":".join([h['hostip'], s])
- return s
-
- if not self.infra_info:
- return self._diff_update_and_compare('publish', '', '')
-
- ports = self.infra_info['hostconfig']['portbindings']
- before = []
- for port, hosts in ports.items():
- if hosts:
- for h in hosts:
- before.append(compose(port, h))
- after = self.params['publish'] or []
- after = [
- i.replace("/tcp", "").replace("[", "").replace("]", "")
- for i in after]
- # No support for port ranges yet
- for ports in after:
- if "-" in ports:
- return self._diff_update_and_compare('publish', '', '')
- before, after = sorted(list(set(before))), sorted(list(set(after)))
- return self._diff_update_and_compare('publish', before, after)
+ return self._diff_generic('publish', '--publish')
+
+ def diffparam_restart_policy(self):
+ return self._diff_generic('restart_policy', '--restart')
+
+ def diffparam_security_opt(self):
+ return self._diff_generic('security_opt', '--security-opt')
def diffparam_share(self):
- if not self.infra_info:
- return self._diff_update_and_compare('share', '', '')
- if 'sharednamespaces' in self.info:
- before = self.info['sharednamespaces']
- elif 'config' in self.info:
- before = [
- i.split('shares')[1].lower()
- for i in self.info['config'] if 'shares' in i]
- # TODO(sshnaidm): to discover why in podman v1 'cgroup' appears
- before.remove('cgroup')
- else:
- before = []
- if self.params['share'] is not None:
- after = self.params['share'].split(",")
- else:
- after = ['uts', 'ipc', 'net']
- # TODO: find out why on Ubuntu the 'net' is not present
- if 'net' not in before:
- after.remove('net')
- if self.params["uidmap"] or self.params["gidmap"] or self.params["userns"]:
- after.append('user')
-
- before, after = sorted(list(set(before))), sorted(list(set(after)))
- return self._diff_update_and_compare('share', before, after)
+ return self._diff_generic('share', '--share')
+
+ def diffparam_share_parent(self):
+ return self._diff_generic('share_parent', '--share-parent')
+
+ def diffparam_shm_size(self):
+ return self._diff_generic('shm_size', '--shm-size')
+
+ def diffparam_shm_size_systemd(self):
+ return self._diff_generic('shm_size_systemd', '--shm-size-systemd')
+
+ def diffparam_subgidname(self):
+ return self._diff_generic('subgidname', '--subgidname')
+
+ def diffparam_subuidname(self):
+ return self._diff_generic('subuidname', '--subuidname')
+
+ def diffparam_sysctl(self):
+ return self._diff_generic('sysctl', '--sysctl')
+
+ def diffparam_uidmap(self):
+ return self._diff_generic('uidmap', '--uidmap')
+
+ def diffparam_userns(self):
+ return self._diff_generic('userns', '--userns')
+
+ def diffparam_uts(self):
+ return self._diff_generic('uts', '--uts')
+
+ def diffparam_volume(self):
+ def clean_volume(x):
+ '''Remove trailing and double slashes from volumes.'''
+ if not x.rstrip("/"):
+ return "/"
+ return x.replace("//", "/").rstrip("/")
+
+ before = createcommand('--volume', self.info)
+ if before == []:
+ before = None
+ after = self.params['volume']
+ if after is not None:
+ after = [":".join(
+ [clean_volume(i) for i in v.split(":")[:2]]) for v in self.params['volume']]
+ if before is not None:
+ before = [":".join([clean_volume(i) for i in v.split(":")[:2]]) for v in before]
+ self.module.log("PODMAN Before: %s and After: %s" % (before, after))
+ if before is None and after is None:
+ return self._diff_update_and_compare('volume', before, after)
+ if after is not None:
+ after = ",".join(sorted([str(i).lower() for i in after]))
+ if before:
+ before = ",".join(sorted([str(i).lower() for i in before]))
+ return self._diff_update_and_compare('volume', before, after)
+
+ def diffparam_volumes_from(self):
+ return self._diff_generic('volumes_from', '--volumes-from')
def is_different(self):
diff_func_list = [func for func in dir(self)
diff --git a/ansible_collections/containers/podman/plugins/module_utils/podman/quadlet.py b/ansible_collections/containers/podman/plugins/module_utils/podman/quadlet.py
index 17764b60d..6296c7b81 100644
--- a/ansible_collections/containers/podman/plugins/module_utils/podman/quadlet.py
+++ b/ansible_collections/containers/podman/plugins/module_utils/podman/quadlet.py
@@ -88,25 +88,21 @@ class ContainerQuadlet(Quadlet):
'gidmap': 'GIDMap',
'global_args': 'GlobalArgs',
'group': 'Group', # Does not exist in module parameters
- 'healthcheck': 'HealthCheckCmd',
+ 'healthcheck': 'HealthCmd',
'healthcheck_interval': 'HealthInterval',
'healthcheck_failure_action': 'HealthOnFailure',
'healthcheck_retries': 'HealthRetries',
'healthcheck_start_period': 'HealthStartPeriod',
'healthcheck_timeout': 'HealthTimeout',
- # the following are not implemented yet in Podman module
- 'HealthStartupCmd': 'HealthStartupCmd',
- 'HealthStartupInterval': 'HealthStartupInterval',
- 'HealthStartupRetries': 'HealthStartupRetries',
- 'HealthStartupSuccess': 'HealthStartupSuccess',
- 'HealthStartupTimeout': 'HealthStartupTimeout',
- # end of not implemented yet
+ 'health_startup_cmd': 'HealthStartupCmd',
+ 'health_startup_interval': 'HealthStartupInterval',
+ 'health_startup_retries': 'HealthStartupRetries',
+ 'health_startup_success': 'HealthStartupSuccess',
+ 'health_startup_timeout': 'HealthStartupTimeout',
'hostname': 'HostName',
'image': 'Image',
'ip': 'IP',
- # the following are not implemented yet in Podman module
- 'IP6': 'IP6',
- # end of not implemented yet
+ 'ip6': 'IP6',
'label': 'Label',
'log_driver': 'LogDriver',
"Mask": "Mask", # add it in security_opt
@@ -117,9 +113,7 @@ class ContainerQuadlet(Quadlet):
'pids_limit': 'PidsLimit',
'pod': 'Pod',
'publish': 'PublishPort',
- # the following are not implemented yet in Podman module
- "Pull": "Pull",
- # end of not implemented yet
+ "pull": "Pull",
'read_only': 'ReadOnly',
'read_only_tmpfs': 'ReadOnlyTmpfs',
'rootfs': 'Rootfs',
@@ -194,6 +188,8 @@ class ContainerQuadlet(Quadlet):
# Work on params which are not in the param_map and add them to PodmanArgs
params["podman_args"] = []
+ if params["arch"]:
+ params["podman_args"].append(f"--arch {params['arch']}")
if params["authfile"]:
params["podman_args"].append(f"--authfile {params['authfile']}")
if params["attach"]:
@@ -206,8 +202,13 @@ class ContainerQuadlet(Quadlet):
f"--blkio-weight-device {':'.join(blkio)}" for blkio in params["blkio_weight_device"].items()]))
if params["cgroupns"]:
params["podman_args"].append(f"--cgroupns {params['cgroupns']}")
+ if params["cgroup_conf"]:
+ for k, v in params["cgroup_conf"].items():
+ params["podman_args"].append(f"--cgroup-conf {k}={v}")
if params["cgroup_parent"]:
params["podman_args"].append(f"--cgroup-parent {params['cgroup_parent']}")
+ if params["chrootdirs"]:
+ params["podman_args"].append(f"--chrootdirs {params['chrootdirs']}")
if params["cidfile"]:
params["podman_args"].append(f"--cidfile {params['cidfile']}")
if params["conmon_pidfile"]:
@@ -226,6 +227,10 @@ class ContainerQuadlet(Quadlet):
params["podman_args"].append(f"--cpu-rt-runtime {params['cpu_rt_runtime']}")
if params["cpu_shares"]:
params["podman_args"].append(f"--cpu-shares {params['cpu_shares']}")
+ if params["decryption_key"]:
+ params["podman_args"].append(f"--decryption-key {params['decryption_key']}")
+ if params["device_cgroup_rule"]:
+ params["podman_args"].append(f"--device-cgroup-rule {params['device_cgroup_rule']}")
if params["device_read_bps"]:
for i in params["device_read_bps"]:
params["podman_args"].append(f"--device-read-bps {i}")
@@ -241,6 +246,15 @@ class ContainerQuadlet(Quadlet):
if params["etc_hosts"]:
for host_ip in params['etc_hosts'].items():
params["podman_args"].append(f"--add-host {':'.join(host_ip)}")
+ if params["env_merge"]:
+ for k, v in params["env_merge"].items():
+ params["podman_args"].append(f"--env {k}={v}")
+ if params["gpus"]:
+ params["podman_args"].append(f"--gpus {params['gpus']}")
+ if params["group_entry"]:
+ params["podman_args"].append(f"--group-entry {params['group_entry']}")
+ if params["hostuser"]:
+ params["podman_args"].append(f"--hostuser {params['hostuser']}")
if params["hooks_dir"]:
for hook in params["hooks_dir"]:
params["podman_args"].append(f"--hooks-dir {hook}")
@@ -248,6 +262,8 @@ class ContainerQuadlet(Quadlet):
params["podman_args"].append(f"--http-proxy {params['http_proxy']}")
if params["image_volume"]:
params["podman_args"].append(f"--image-volume {params['image_volume']}")
+ if params["init_ctr"]:
+ params["podman_args"].append(f"--init-ctr {params['init_ctr']}")
if params["init_path"]:
params["podman_args"].append(f"--init-path {params['init_path']}")
if params["interactive"]:
@@ -274,37 +290,79 @@ class ContainerQuadlet(Quadlet):
if params["network_aliases"]:
for alias in params["network_aliases"]:
params["podman_args"].append(f"--network-alias {alias}")
+ if params["no_healthcheck"]:
+ params["podman_args"].append("--no-healthcheck")
if params["no_hosts"] is not None:
params["podman_args"].append(f"--no-hosts={params['no_hosts']}")
if params["oom_kill_disable"]:
params["podman_args"].append(f"--oom-kill-disable={params['oom_kill_disable']}")
if params["oom_score_adj"]:
params["podman_args"].append(f"--oom-score-adj {params['oom_score_adj']}")
+ if params["os"]:
+ params["podman_args"].append(f"--os {params['os']}")
+ if params["passwd"]:
+ params["podman_args"].append("--passwd")
+ if params["passwd_entry"]:
+ params["podman_args"].append(f"--passwd-entry {params['passwd_entry']}")
+ if params["personality"]:
+ params["podman_args"].append(f"--personality {params['personality']}")
if params["pid"]:
params["podman_args"].append(f"--pid {params['pid']}")
+ if params["pid_file"]:
+ params["podman_args"].append(f"--pid-file {params['pid_file']}")
+ if params["preserve_fd"]:
+ for pres in params["preserve_fd"]:
+ params["podman_args"].append(f"--preserve-fd {pres}")
+ if params["preserve_fds"]:
+ params["podman_args"].append(f"--preserve-fds {params['preserve_fds']}")
if params["privileged"]:
params["podman_args"].append("--privileged")
if params["publish_all"]:
params["podman_args"].append("--publish-all")
+ if params["rdt_class"]:
+ params["podman_args"].append(f"--rdt-class {params['rdt_class']}")
if params["requires"]:
params["podman_args"].append(f"--requires {','.join(params['requires'])}")
if params["restart_policy"]:
params["podman_args"].append(f"--restart-policy {params['restart_policy']}")
+ if params["retry"]:
+ params["podman_args"].append(f"--retry {params['retry']}")
+ if params["retry_delay"]:
+ params["podman_args"].append(f"--retry-delay {params['retry_delay']}")
if params["rm"]:
params["podman_args"].append("--rm")
+ if params["rmi"]:
+ params["podman_args"].append("--rmi")
+ if params["seccomp_policy"]:
+ params["podman_args"].append(f"--seccomp-policy {params['seccomp_policy']}")
if params["security_opt"]:
for security_opt in params["security_opt"]:
params["podman_args"].append(f"--security-opt {security_opt}")
+ if params["shm_size_systemd"]:
+ params["podman_args"].append(f"--shm-size-systemd {params['shm_size_systemd']}")
if params["sig_proxy"]:
params["podman_args"].append(f"--sig-proxy {params['sig_proxy']}")
if params["stop_signal"]:
params["podman_args"].append(f"--stop-signal {params['stop_signal']}")
if params["systemd"]:
params["podman_args"].append(f"--systemd={str(params['systemd']).lower()}")
+ if params["timeout"]:
+ params["podman_args"].append(f"--timeout {params['timeout']}")
+ if params["tls_verify"]:
+ params["podman_args"].append(f"--tls-verify={str(params['tls_verify']).lower()}")
if params["tty"]:
params["podman_args"].append("--tty")
+ if params["umask"]:
+ params["podman_args"].append(f"--umask {params['umask']}")
+ if params["unsetenv"]:
+ for unset in params["unsetenv"]:
+ params["podman_args"].append(f"--unsetenv {unset}")
+ if params["unsetenv_all"]:
+ params["podman_args"].append("--unsetenv-all")
if params["uts"]:
params["podman_args"].append(f"--uts {params['uts']}")
+ if params["variant"]:
+ params["podman_args"].append(f"--variant {params['variant']}")
if params["volumes_from"]:
for volume in params["volumes_from"]:
params["podman_args"].append(f"--volumes-from {volume}")
@@ -416,6 +474,10 @@ class PodQuadlet(Quadlet):
if params["gidmap"]:
for gidmap in params["gidmap"]:
params["podman_args"].append(f"--gidmap {gidmap}")
+ if params["exit_policy"]:
+ params["podman_args"].append(f"--exit-policy={params['gpus']}")
+ if params["gpus"]:
+ params["podman_args"].append(f"--gpus {params['gpus']}")
if params["hostname"]:
params["podman_args"].append(f"--hostname {params['hostname']}")
if params["infra"]:
@@ -430,6 +492,8 @@ class PodQuadlet(Quadlet):
params["podman_args"].append(f"--infra-name {params['infra_name']}")
if params["ip"]:
params["podman_args"].append(f"--ip {params['ip']}")
+ if params["ip6"]:
+ params["podman_args"].append(f"--ip6 {params['ip6']}")
if params["label"]:
for label, label_v in params["label"].items():
params["podman_args"].append(f"--label {label}={label_v}")
@@ -447,17 +511,36 @@ class PodQuadlet(Quadlet):
params["podman_args"].append(f"--pid {params['pid']}")
if params["pod_id_file"]:
params["podman_args"].append(f"--pod-id-file {params['pod_id_file']}")
+ if params["restart_policy"]:
+ params["podman_args"].append(f"--restart={params['restart_policy']}")
+ if params["security_opt"]:
+ for security_opt in params["security_opt"]:
+ params["podman_args"].append(f"--security-opt {security_opt}")
if params["share"]:
params["podman_args"].append(f"--share {params['share']}")
+ if params["share_parent"] is not None:
+ params["podman_args"].append(f"--share-parent={str(params['share_parent']).lower()}")
+ if params["shm_size"]:
+ params["podman_args"].append(f"--shm-size {params['shm_size']}")
+ if params["shm_size_systemd"]:
+ params["podman_args"].append(f"--shm-size-systemd {params['shm_size_systemd']}")
if params["subgidname"]:
params["podman_args"].append(f"--subgidname {params['subgidname']}")
if params["subuidname"]:
params["podman_args"].append(f"--subuidname {params['subuidname']}")
+ if params["sysctl"]:
+ for k, v in params["sysctl"].items():
+ params["podman_args"].append(f"--sysctl {k}={v}")
if params["uidmap"]:
for uidmap in params["uidmap"]:
params["podman_args"].append(f"--uidmap {uidmap}")
if params["userns"]:
params["podman_args"].append(f"--userns {params['userns']}")
+ if params["uts"]:
+ params["podman_args"].append(f"--uts {params['uts']}")
+ if params["volumes_from"]:
+ for volume in params["volumes_from"]:
+ params["podman_args"].append(f"--volumes-from {volume}")
if params["debug"]:
params["global_args"].append("--log-level debug")
diff --git a/ansible_collections/containers/podman/plugins/modules/podman_container.py b/ansible_collections/containers/podman/plugins/modules/podman_container.py
index 75349f14e..b06c9ae9e 100644
--- a/ansible_collections/containers/podman/plugins/modules/podman_container.py
+++ b/ansible_collections/containers/podman/plugins/modules/podman_container.py
@@ -79,6 +79,11 @@ options:
- Add an annotation to the container. The format is key value, multiple
times.
type: dict
+ arch:
+ description:
+ - Set the architecture for the container.
+ Override the architecture, defaults to hosts, of the image to be pulled. For example, arm.
+ type: str
attach:
description:
- Attach to STDIN, STDOUT or STDERR. The default in Podman is false.
@@ -125,6 +130,10 @@ options:
the cgroups path of the init process. Cgroups will be created if they
do not already exist.
type: path
+ cgroup_conf:
+ description:
+ - When running on cgroup v2, specify the cgroup file to write to and its value.
+ type: dict
cgroupns:
description:
- Path to cgroups under which the cgroup for the container will be
@@ -137,6 +146,10 @@ options:
The disabled option will force the container to not create CGroups,
and thus conflicts with CGroup options cgroupns and cgroup-parent.
type: str
+ chrootdirs:
+ description:
+ - Path to a directory inside the container that is treated as a chroot directory.
+ type: str
cidfile:
description:
- Write the container ID to the file
@@ -196,6 +209,10 @@ options:
- Memory nodes (MEMs) in which to allow execution (0-3, 0,1). Only
effective on NUMA systems.
type: str
+ decryption_key:
+ description:
+ - The "key-passphrase" to be used for decryption of images. Key can point to keys and/or certificates.
+ type: str
delete_depend:
description:
- Remove selected container and recursively remove all containers that depend on it.
@@ -234,6 +251,12 @@ options:
(e.g. device /dev/sdc:/dev/xvdc:rwm)
type: list
elements: str
+ device_cgroup_rule:
+ description:
+ - Add a rule to the cgroup allowed devices list.
+ The rule is expected to be in the format specified in the Linux kernel
+ documentation admin-guide/cgroup-v1/devices.
+ type: str
device_read_bps:
description:
- Limit read rate (bytes per second) from a device
@@ -307,6 +330,10 @@ options:
- Use all current host environment variables in container.
Defaults to false.
type: bool
+ env_merge:
+ description:
+ - Preprocess default environment variables for the containers
+ type: dict
etc_hosts:
description:
- Dict of host-to-IP mappings, where each host name is a key in the
@@ -436,6 +463,10 @@ options:
- Run the container in a new user namespace using the supplied mapping.
type: list
elements: str
+ gpus:
+ description:
+ - GPU devices to add to the container.
+ type: str
group_add:
description:
- Add additional groups to run as
@@ -443,33 +474,70 @@ options:
elements: str
aliases:
- groups
+ group_entry:
+ description:
+ - Customize the entry that is written to the /etc/group file within the container when --user is used.
+ type: str
healthcheck:
description:
- Set or alter a healthcheck command for a container.
type: str
+ aliases:
+ - health_cmd
healthcheck_interval:
description:
- Set an interval for the healthchecks
(a value of disable results in no automatic timer setup)
(default "30s")
type: str
+ aliases:
+ - health_interval
healthcheck_retries:
description:
- The number of retries allowed before a healthcheck is considered to be
unhealthy. The default value is 3.
type: int
+ aliases:
+ - health_retries
healthcheck_start_period:
description:
- The initialization time needed for a container to bootstrap.
The value can be expressed in time format like 2m3s. The default value
is 0s
type: str
+ aliases:
+ - health_start_period
+ health_startup_cmd:
+ description:
+ - Set a startup healthcheck command for a container.
+ type: str
+ health_startup_interval:
+ description:
+ - Set an interval for the startup healthcheck.
+ type: str
+ health_startup_retries:
+ description:
+ - The number of attempts allowed before the startup healthcheck restarts the container.
+ If set to 0, the container is never restarted. The default is 0.
+ type: int
+ health_startup_success:
+ description:
+ - The number of successful runs required before the startup healthcheck succeeds
+ and the regular healthcheck begins. A value of 0 means that any success begins the regular healthcheck.
+ The default is 0.
+ type: int
+ health_startup_timeout:
+ description:
+ - The maximum time a startup healthcheck command has to complete before it is marked as failed.
+ type: str
healthcheck_timeout:
description:
- The maximum time allowed to complete the healthcheck before an interval
is considered failed. Like start-period, the value can be expressed in
a time format such as 1m22s. The default value is 30s
type: str
+ aliases:
+ - health_timeout
healthcheck_failure_action:
description:
- The action to be taken when the container is considered unhealthy. The action must be one of
@@ -481,6 +549,8 @@ options:
- 'kill'
- 'restart'
- 'stop'
+ aliases:
+ - health_on_failure
hooks_dir:
description:
- Each .json file in the path configures a hook for Podman containers.
@@ -493,6 +563,11 @@ options:
- Container host name. Sets the container host name that is available
inside the container.
type: str
+ hostuser:
+ description:
+ - Add a user account to /etc/passwd from the host to the container.
+ The Username or UID must exist on the host system.
+ type: str
http_proxy:
description:
- By default proxy environment variables are passed into the container if
@@ -522,6 +597,14 @@ options:
- Run an init inside the container that forwards signals and reaps
processes. The default is false.
type: bool
+ init_ctr:
+ description:
+ - (Pods only). When using pods, create an init style container,
+ which is run after the infra container is started but before regular pod containers are started.
+ type: str
+ choices:
+ - 'once'
+ - 'always'
init_path:
description:
- Path to the container-init binary.
@@ -542,6 +625,10 @@ options:
The address must be within the default CNI network's pool
(default 10.88.0.0/16).
type: str
+ ip6:
+ description:
+ - Specify a static IPv6 address for the container
+ type: str
ipc:
description:
- Default is to create a private IPC namespace (POSIX SysV IPC) for the
@@ -671,6 +758,12 @@ options:
This is a limitation that will be removed in a later release.
type: list
elements: str
+ aliases:
+ - network_alias
+ no_healthcheck:
+ description:
+ - Disable any defined healthchecks for container.
+ type: bool
no_hosts:
description:
- Do not create /etc/hosts for the container
@@ -685,23 +778,64 @@ options:
description:
- Tune the host's OOM preferences for containers (accepts -1000 to 1000)
type: int
+ os:
+ description:
+ - Override the OS, defaults to hosts, of the image to be pulled. For example, windows.
+ type: str
+ passwd:
+ description:
+ - Allow Podman to add entries to /etc/passwd and /etc/group when used in conjunction with the --user option.
+ This is used to override the Podman provided user setup in favor of entrypoint configurations
+ such as libnss-extrausers.
+ type: bool
+ passwd_entry:
+ description:
+ - Customize the entry that is written to the /etc/passwd file within the container when --passwd is used.
+ type: str
+ personality:
+ description:
+ - Personality sets the execution domain via Linux personality(2).
+ type: str
pid:
description:
- Set the PID mode for the container
type: str
aliases:
- pid_mode
+ pid_file:
+ description:
+ - When the pidfile location is specified, the container process' PID is written to the pidfile.
+ type: path
pids_limit:
description:
- Tune the container's PIDs limit. Set -1 to have unlimited PIDs for the
container.
type: str
+ platform:
+ description:
+ - Specify the platform for selecting the image.
+ type: str
pod:
description:
- Run container in an existing pod.
If you want podman to make the pod for you, prefix the pod name
with "new:"
type: str
+ pod_id_file:
+ description:
+ - Run container in an existing pod and read the pod's ID from the specified file.
+ When a container is run within a pod which has an infra-container,
+ the infra-container starts first.
+ type: path
+ preserve_fd:
+ description:
+ - Pass down to the process the additional file descriptors specified in the comma separated list.
+ type: list
+ elements: str
+ preserve_fds:
+ description:
+ - Pass down to the process N additional file descriptors (in addition to 0, 1, 2). The total FDs are 3\+N.
+ type: str
privileged:
description:
- Give extended privileges to this container. The default is false.
@@ -724,6 +858,15 @@ options:
- Publish all exposed ports to random ports on the host interfaces. The
default is false.
type: bool
+ pull:
+ description:
+ - Pull image policy. The default is 'missing'.
+ type: str
+ choices:
+ - 'missing'
+ - 'always'
+ - 'never'
+ - 'newer'
quadlet_dir:
description:
- Path to the directory to write quadlet file in.
@@ -740,6 +883,10 @@ options:
options as a list of lines to add.
type: list
elements: str
+ rdt_class:
+ description:
+ - Rdt-class sets the class of service (CLOS or COS) for the container to run in. Requires root.
+ type: str
read_only:
description:
- Mount the container's root filesystem as read only. Default is false
@@ -779,6 +926,15 @@ options:
- Seconds to wait before forcibly stopping the container when restarting. Use -1 for infinite wait.
Applies to "restarted" status.
type: str
+ retry:
+ description:
+ - Number of times to retry pulling or pushing images between the registry and local storage in case of failure.
+ Default is 3.
+ type: int
+ retry_delay:
+ description:
+ - Duration of delay between retry attempts when pulling or pushing images between the registry and local storage in case of failure.
+ type: str
rm:
description:
- Automatically remove the container when it exits. The default is false.
@@ -786,6 +942,11 @@ options:
aliases:
- remove
- auto_remove
+ rmi:
+ description:
+ - After exit of the container, remove the image unless another container is using it.
+ Implies --rm on the new container. The default is false.
+ type: bool
rootfs:
description:
- If true, the first argument refers to an exploded container on the file
@@ -803,6 +964,10 @@ options:
L(documentation,https://docs.podman.io/en/latest/markdown/podman-run.1.html#secret-secret-opt-opt) for more details.
type: list
elements: str
+ seccomp_policy:
+ description:
+ - Specify the policy to select the seccomp profile.
+ type: str
security_opt:
description:
- Security Options. For example security_opt "seccomp=unconfined"
@@ -817,6 +982,10 @@ options:
If you omit the unit, the system uses bytes. If you omit the size
entirely, the system uses 64m
type: str
+ shm_size_systemd:
+ description:
+ - Size of systemd-specific tmpfs mounts such as /run, /run/lock, /var/log/journal and /tmp.
+ type: str
sig_proxy:
description:
- Proxy signals sent to the podman run command to the container process.
@@ -853,6 +1022,11 @@ options:
description:
- Run container in systemd mode. The default is true.
type: str
+ timeout:
+ description:
+ - Maximum time (in seconds) a container is allowed to run before conmon sends it the kill signal.
+ By default containers run until they exit or are stopped by "podman stop".
+ type: int
timezone:
description:
- Set timezone in container. This flag takes area-based timezones,
@@ -861,6 +1035,10 @@ options:
See /usr/share/zoneinfo/ for valid timezones.
Remote connections use local containers.conf for defaults.
type: str
+ tls_verify:
+ description:
+ - Require HTTPS and verify certificates when pulling images.
+ type: bool
tmpfs:
description:
- Create a tmpfs mount. For example tmpfs
@@ -882,6 +1060,20 @@ options:
elements: str
aliases:
- ulimits
+ umask:
+ description:
+ - Set the umask inside the container. Defaults to 0022.
+ Remote connections use local containers.conf for defaults.
+ type: str
+ unsetenv:
+ description:
+ - Unset default environment variables for the container.
+ type: list
+ elements: str
+ unsetenv_all:
+ description:
+ - Unset all default environment variables for the container.
+ type: bool
user:
description:
- Sets the username or UID used and optionally the groupname or GID for
@@ -899,6 +1091,10 @@ options:
description:
- Set the UTS mode for the container
type: str
+ variant:
+ description:
+ - Use VARIANT instead of the default architecture variant of the container image.
+ type: str
volume:
description:
- Create a bind mount. If you specify, volume /HOST-DIR:/CONTAINER-DIR,
diff --git a/ansible_collections/containers/podman/plugins/modules/podman_image.py b/ansible_collections/containers/podman/plugins/modules/podman_image.py
index 7fcb0041a..a46a6c3c5 100644
--- a/ansible_collections/containers/podman/plugins/modules/podman_image.py
+++ b/ansible_collections/containers/podman/plugins/modules/podman_image.py
@@ -42,6 +42,10 @@ DOCUMENTATION = r'''
description: Whether or not to pull the image.
default: True
type: bool
+ pull_extra_args:
+ description:
+ - Extra arguments to pass to the pull command.
+ type: str
push:
description: Whether or not to push an image.
default: False
@@ -67,7 +71,8 @@ DOCUMENTATION = r'''
- quadlet
validate_certs:
description:
- - Require HTTPS and validate certificates when pulling or pushing. Also used during build if a pull or push is necessary.
+ - Require HTTPS and validate certificates when pulling or pushing.
+ Also used during build if a pull or push is necessary.
type: bool
aliases:
- tlsverify
@@ -94,9 +99,15 @@ DOCUMENTATION = r'''
- build_args
- buildargs
suboptions:
+ container_file:
+ description:
+ - Content of the Containerfile to use for building the image.
+ Mutually exclusive with the C(file) option which is path to the existing Containerfile.
+ type: str
file:
description:
- Path to the Containerfile if it is not in the build context directory.
+ Mutually exclusive with the C(container_file) option.
type: path
volume:
description:
@@ -105,7 +116,8 @@ DOCUMENTATION = r'''
elements: str
annotation:
description:
- - Dictionary of key=value pairs to add to the image. Only works with OCI images. Ignored for Docker containers.
+ - Dictionary of key=value pairs to add to the image. Only works with OCI images.
+ Ignored for Docker containers.
type: dict
force_rm:
description:
@@ -148,7 +160,7 @@ DOCUMENTATION = r'''
type: bool
format:
description:
- - Manifest type to use when pushing an image using the 'dir' transport (default is manifest type of source).
+ - Manifest type to use when pushing an image using the 'dir' transport (default is manifest type of source)
type: str
choices:
- oci
@@ -168,14 +180,19 @@ DOCUMENTATION = r'''
- destination
transport:
description:
- - Transport to use when pushing in image. If no transport is set, will attempt to push to a remote registry.
+ - Transport to use when pushing in image. If no transport is set, will attempt to push to a remote registry
type: str
choices:
- dir
+ - docker
- docker-archive
- docker-daemon
- oci-archive
- ostree
+ extra_args:
+ description:
+ - Extra args to pass to push, if executed. Does not idempotently check for new push args.
+ type: str
quadlet_dir:
description:
- Path to the directory to write quadlet file in.
@@ -300,6 +317,15 @@ EXAMPLES = r"""
name: nginx
arch: amd64
+- name: Build a container from file inline
+ containers.podman.podman_image:
+ name: mycustom_image
+ state: build
+ build:
+ container_file: |-
+ FROM alpine:latest
+ CMD echo "Hello, World!"
+
- name: Create a quadlet file for an image
containers.podman.podman_image:
name: docker.io/library/alpine:latest
@@ -333,7 +359,7 @@ RETURN = r"""
"/app-entrypoint.sh"
],
"Env": [
- "PATH=/opt/bitnami/java/bin:/opt/bitnami/wildfly/bin:/opt/bitnami/nami/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin",
+ "PATH=/opt/bitnami/java/bin:/opt/bitnami/wildfly/bin:/opt/bitnami/nami/bin:...",
"IMAGE_OS=debian-9",
"NAMI_VERSION=1.0.0-1",
"GPG_KEY_SERVERS_LIST=ha.pool.sks-keyservers.net",
@@ -373,10 +399,10 @@ RETURN = r"""
"Digest": "sha256:5a8ab28e314c2222de3feaf6dac94a0436a37fc08979d2722c99d2bef2619a9b",
"GraphDriver": {
"Data": {
- "LowerDir": "/var/lib/containers/storage/overlay/142c1beadf1bb09fbd929465ec98c9dca3256638220450efb4214727d0d0680e/diff:/var/lib/containers/s",
- "MergedDir": "/var/lib/containers/storage/overlay/9aa10191f5bddb59e28508e721fdeb43505e5b395845fa99723ed787878dbfea/merged",
- "UpperDir": "/var/lib/containers/storage/overlay/9aa10191f5bddb59e28508e721fdeb43505e5b395845fa99723ed787878dbfea/diff",
- "WorkDir": "/var/lib/containers/storage/overlay/9aa10191f5bddb59e28508e721fdeb43505e5b395845fa99723ed787878dbfea/work"
+ "LowerDir": "/var/lib/containers/storage/overlay/142c1beadf1bb09fbd929465e..../diff:/var/lib/containers/s",
+ "MergedDir": "/var/lib/containers/storage/overlay/9aa10191f5bddb59e28508e721fdeb43505e5b395845fa99/merged",
+ "UpperDir": "/var/lib/containers/storage/overlay/9aa10191f5bddb59e28508e721fdeb43505e5b395845fa99/diff",
+ "WorkDir": "/var/lib/containers/storage/overlay/9aa10191f5bddb59e28508e721fdeb43505e5b395845fa99/work"
},
"Name": "overlay"
},
@@ -434,9 +460,12 @@ RETURN = r"""
]
"""
-import json
-import re
-import shlex
+import json # noqa: E402
+import os # noqa: E402
+import re # noqa: E402
+import shlex # noqa: E402
+import tempfile # noqa: E402
+import time # noqa: E402
from ansible.module_utils._text import to_native
from ansible.module_utils.basic import AnsibleModule
@@ -456,6 +485,7 @@ class PodmanImageManager(object):
self.executable = self.module.get_bin_path(module.params.get('executable'), required=True)
self.tag = self.module.params.get('tag')
self.pull = self.module.params.get('pull')
+ self.pull_extra_args = self.module.params.get('pull_extra_args')
self.push = self.module.params.get('push')
self.path = self.module.params.get('path')
self.force = self.module.params.get('force')
@@ -509,7 +539,7 @@ class PodmanImageManager(object):
if not layer_ids:
layer_ids = lines.splitlines()
- return (layer_ids[-1])
+ return layer_ids[-1]
def present(self):
image = self.find_image()
@@ -520,9 +550,18 @@ class PodmanImageManager(object):
digest_before = None
if not image or self.force:
- if self.path:
+ if self.state == 'build' or self.path:
# Build the image
- self.results['actions'].append('Built image {image_name} from {path}'.format(image_name=self.image_name, path=self.path))
+ build_file = self.build.get('file') if self.build else None
+ container_file_txt = self.build.get('container_file') if self.build else None
+ if build_file and container_file_txt:
+ self.module.fail_json(msg='Cannot specify both build file and container file content!')
+ if not self.path and build_file:
+ self.path = os.path.dirname(build_file)
+ elif not self.path and not build_file and not container_file_txt:
+ self.module.fail_json(msg='Path to build context or file is required when building an image')
+ self.results['actions'].append('Built image {image_name} from {path}'.format(
+ image_name=self.image_name, path=self.path or 'default context'))
if not self.module.check_mode:
self.results['image'], self.results['stdout'] = self.build_image()
image = self.results['image']
@@ -541,16 +580,8 @@ class PodmanImageManager(object):
self.results['changed'] = True
if self.push:
- # Push the image
- if '/' in self.image_name:
- push_format_string = 'Pushed image {image_name}'
- else:
- push_format_string = 'Pushed image {image_name} to {dest}'
- self.results['actions'].append(push_format_string.format(image_name=self.image_name, dest=self.push_args['dest']))
- self.results['changed'] = True
- if not self.module.check_mode:
- self.results['image'], output = self.push_image()
- self.results['stdout'] += "\n" + output
+ self.results['image'], output = self.push_image()
+ self.results['stdout'] += "\n" + output
if image and not self.results.get('image'):
self.results['image'] = image
@@ -654,13 +685,18 @@ class PodmanImageManager(object):
if self.ca_cert_dir:
args.extend(['--cert-dir', self.ca_cert_dir])
+ if self.pull_extra_args:
+ args.extend(shlex.split(self.pull_extra_args))
+
rc, out, err = self._run(args, ignore_errors=True)
if rc != 0:
if not self.pull:
- self.module.fail_json(msg='Failed to find image {image_name} locally, image pull set to {pull_bool}'.format(
- pull_bool=self.pull, image_name=image_name))
+ self.module.fail_json(
+ msg='Failed to find image {image_name} locally, image pull set to {pull_bool}'.format(
+ pull_bool=self.pull, image_name=image_name))
else:
- self.module.fail_json(msg='Failed to pull image {image_name}'.format(image_name=image_name))
+ self.module.fail_json(
+ msg='Failed to pull image {image_name}'.format(image_name=image_name))
return self.inspect_image(out.strip())
def build_image(self):
@@ -697,6 +733,17 @@ class PodmanImageManager(object):
containerfile = self.build.get('file')
if containerfile:
args.extend(['--file', containerfile])
+ container_file_txt = self.build.get('container_file')
+ if container_file_txt:
+ # create a temporarly file with the content of the Containerfile
+ if self.path:
+ container_file_path = os.path.join(self.path, 'Containerfile.generated_by_ansible_%s' % time.time())
+ else:
+ container_file_path = os.path.join(
+ tempfile.gettempdir(), 'Containerfile.generated_by_ansible_%s' % time.time())
+ with open(container_file_path, 'w') as f:
+ f.write(container_file_txt)
+ args.extend(['--file', container_file_path])
volume = self.build.get('volume')
if volume:
@@ -717,13 +764,16 @@ class PodmanImageManager(object):
target = self.build.get('target')
if target:
args.extend(['--target', target])
-
- args.append(self.path)
+ if self.path:
+ args.append(self.path)
rc, out, err = self._run(args, ignore_errors=True)
if rc != 0:
- self.module.fail_json(msg="Failed to build image {image}: {out} {err}".format(image=self.image_name, out=out, err=err))
-
+ self.module.fail_json(msg="Failed to build image {image}: {out} {err}".format(
+ image=self.image_name, out=out, err=err))
+ # remove the temporary file if it was created
+ if container_file_txt:
+ os.remove(container_file_path)
last_id = self._get_id_from_output(out, startswith='-->')
return self.inspect_image(last_id), out + err
@@ -760,49 +810,55 @@ class PodmanImageManager(object):
if sign_by_key:
args.extend(['--sign-by', sign_by_key])
+ push_extra_args = self.push_args.get('extra_args')
+ if push_extra_args:
+ args.extend(shlex.split(push_extra_args))
+
args.append(self.image_name)
# Build the destination argument
dest = self.push_args.get('dest')
- dest_format_string = '{dest}/{image_name}'
- regexp = re.compile(r'/{name}(:{tag})?'.format(name=self.name, tag=self.tag))
- if not dest:
- if '/' not in self.name:
- self.module.fail_json(msg="'push_args['dest']' is required when pushing images that do not have the remote registry in the image name")
-
- # If the push destination contains the image name and/or the tag
- # remove it and warn since it's not needed.
- elif regexp.search(dest):
- dest = regexp.sub('', dest)
- self.module.warn("Image name and tag are automatically added to push_args['dest']. Destination changed to {dest}".format(dest=dest))
+ transport = self.push_args.get('transport')
- if dest and dest.endswith('/'):
- dest = dest[:-1]
+ if dest is None:
+ dest = self.image_name
- transport = self.push_args.get('transport')
if transport:
- if not dest:
- self.module.fail_json("'push_args['transport'] requires 'push_args['dest'] but it was not provided.")
if transport == 'docker':
dest_format_string = '{transport}://{dest}'
elif transport == 'ostree':
dest_format_string = '{transport}:{name}@{dest}'
else:
dest_format_string = '{transport}:{dest}'
-
- dest_string = dest_format_string.format(transport=transport, name=self.name, dest=dest, image_name=self.image_name,)
-
- # Only append the destination argument if the image name is not a URL
- if '/' not in self.name:
- args.append(dest_string)
-
- rc, out, err = self._run(args, ignore_errors=True)
- if rc != 0:
- self.module.fail_json(msg="Failed to push image {image_name}: {err}".format(image_name=self.image_name, err=err))
- last_id = self._get_id_from_output(
- out + err, contains=':', split_on=':')
-
- return self.inspect_image(last_id), out + err
+ if transport == 'docker-daemon' and ":" not in dest:
+ dest_format_string = '{transport}:{dest}:latest'
+ dest_string = dest_format_string.format(transport=transport, name=self.name, dest=dest)
+ else:
+ dest_string = dest
+ # In case of dest as a repository with org name only, append image name to it
+ if ":" not in dest and "@" not in dest and len(dest.rstrip("/").split("/")) == 2:
+ dest_string = dest.rstrip("/") + "/" + self.image_name
+
+ if "/" not in dest_string and "@" not in dest_string and "docker-daemon" not in dest_string:
+ self.module.fail_json(msg="Destination must be a full URL or path to a directory.")
+
+ args.append(dest_string)
+ self.module.log("PODMAN-IMAGE-DEBUG: Pushing image {image_name} to {dest_string}".format(
+ image_name=self.image_name, dest_string=dest_string))
+ self.results['actions'].append(" ".join(args))
+ self.results['podman_actions'].append(" ".join([self.executable] + args))
+ self.results['changed'] = True
+ out, err = '', ''
+ if not self.module.check_mode:
+ rc, out, err = self._run(args, ignore_errors=True)
+ if rc != 0:
+ self.module.fail_json(msg="Failed to push image {image_name}".format(
+ image_name=self.image_name),
+ stdout=out, stderr=err,
+ actions=self.results['actions'],
+ podman_actions=self.results['podman_actions'])
+
+ return self.inspect_image(self.image_name), out + err
def remove_image(self, image_name=None):
if image_name is None:
@@ -813,7 +869,8 @@ class PodmanImageManager(object):
args.append('--force')
rc, out, err = self._run(args, ignore_errors=True)
if rc != 0:
- self.module.fail_json(msg='Failed to remove image {image_name}. {err}'.format(image_name=image_name, err=err))
+ self.module.fail_json(msg='Failed to remove image {image_name}. {err}'.format(
+ image_name=image_name, err=err))
return out
def remove_image_id(self, image_id=None):
@@ -847,6 +904,7 @@ def main():
arch=dict(type='str'),
tag=dict(type='str', default='latest'),
pull=dict(type='bool', default=True),
+ pull_extra_args=dict(type='str'),
push=dict(type='bool', default=False),
path=dict(type='str'),
force=dict(type='bool', default=False),
@@ -868,6 +926,7 @@ def main():
annotation=dict(type='dict'),
force_rm=dict(type='bool', default=False),
file=dict(type='path'),
+ container_file=dict(type='str'),
format=dict(
type='str',
choices=['oci', 'docker'],
@@ -889,6 +948,7 @@ def main():
remove_signatures=dict(type='bool'),
sign_by=dict(type='str'),
dest=dict(type='str', aliases=['destination'],),
+ extra_args=dict(type='str'),
transport=dict(
type='str',
choices=[
@@ -897,6 +957,7 @@ def main():
'docker-daemon',
'oci-archive',
'ostree',
+ 'docker'
]
),
),
diff --git a/ansible_collections/containers/podman/plugins/modules/podman_network.py b/ansible_collections/containers/podman/plugins/modules/podman_network.py
index 37bfefede..7623fffc1 100644
--- a/ansible_collections/containers/podman/plugins/modules/podman_network.py
+++ b/ansible_collections/containers/podman/plugins/modules/podman_network.py
@@ -33,6 +33,12 @@ options:
description:
- disable dns plugin (default "false")
type: bool
+ dns:
+ description:
+ - Set network-scoped DNS resolver/nameserver for containers in this network.
+ If not set, the host servers from /etc/resolv.conf is used.
+ type: list
+ elements: str
driver:
description:
- Driver to manage the network (default "bridge")
@@ -61,11 +67,27 @@ options:
description:
- Allocate container IP from range
type: str
+ ipam_driver:
+ description:
+ - Set the ipam driver (IP Address Management Driver) for the network.
+ When unset podman chooses an ipam driver automatically based on the network driver
+ type: str
+ choices:
+ - host-local
+ - dhcp
+ - none
ipv6:
description:
- Enable IPv6 (Dual Stack) networking. You must pass a IPv6 subnet.
The subnet option must be used with the ipv6 option.
+ Idempotency is not supported because it generates subnets randomly.
type: bool
+ route:
+ description:
+ - A static route in the format <destination in CIDR notation>,<gateway>,<route metric (optional)>.
+ This route will be added to every container in this network.
+ type: list
+ elements: str
subnet:
description:
- Subnet in CIDR format
@@ -74,6 +96,29 @@ options:
description:
- Create a Macvlan connection based on this device
type: str
+ net_config:
+ description:
+ - List of dictionaries with network configuration.
+ Each dictionary should contain 'subnet' and 'gateway' keys.
+ 'ip_range' is optional.
+ type: list
+ elements: dict
+ suboptions:
+ subnet:
+ description:
+ - Subnet in CIDR format
+ type: str
+ required: true
+ gateway:
+ description:
+ - Gateway for the subnet
+ type: str
+ required: true
+ ip_range:
+ description:
+ - Allocate container IP from range
+ type: str
+ required: false
opt:
description:
- Add network options. Currently 'vlan' and 'mtu' are supported.
@@ -297,6 +342,11 @@ class PodmanNetworkModuleParams:
def addparam_gateway(self, c):
return c + ['--gateway', self.params['gateway']]
+ def addparam_dns(self, c):
+ for dns in self.params['dns']:
+ c += ['--dns', dns]
+ return c
+
def addparam_driver(self, c):
return c + ['--driver', self.params['driver']]
@@ -312,6 +362,13 @@ class PodmanNetworkModuleParams:
def addparam_macvlan(self, c):
return c + ['--macvlan', self.params['macvlan']]
+ def addparam_net_config(self, c):
+ for net in self.params['net_config']:
+ for kw in ('subnet', 'gateway', 'ip_range'):
+ if kw in net and net[kw]:
+ c += ['--%s=%s' % (kw.replace('_', '-'), net[kw])]
+ return c
+
def addparam_interface_name(self, c):
return c + ['--interface-name', self.params['interface_name']]
@@ -326,6 +383,14 @@ class PodmanNetworkModuleParams:
for k in opt])]
return c
+ def addparam_route(self, c):
+ for route in self.params['route']:
+ c += ['--route', route]
+ return c
+
+ def addparam_ipam_driver(self, c):
+ return c + ['--ipam-driver=%s' % self.params['ipam_driver']]
+
def addparam_disable_dns(self, c):
return c + ['--disable-dns=%s' % self.params['disable_dns']]
@@ -337,7 +402,6 @@ class PodmanNetworkDefaults:
self.defaults = {
'driver': 'bridge',
'internal': False,
- 'ipv6': False
}
def default_dict(self):
@@ -385,32 +449,45 @@ class PodmanNetworkDiff:
before = after = self.params['disable_dns']
return self._diff_update_and_compare('disable_dns', before, after)
+ def diffparam_dns(self):
+ before = self.info.get('network_dns_servers', [])
+ after = self.params['dns'] or []
+ return self._diff_update_and_compare('dns', sorted(before), sorted(after))
+
def diffparam_driver(self):
# Currently only bridge is supported
before = after = 'bridge'
return self._diff_update_and_compare('driver', before, after)
def diffparam_ipv6(self):
- if LooseVersion(self.version) >= LooseVersion('4.0.0'):
- before = self.info.get('ipv6_enabled', False)
- after = self.params['ipv6']
- return self._diff_update_and_compare('ipv6', before, after)
- before = after = ''
- return self._diff_update_and_compare('ipv6', before, after)
+ # We don't support dual stack because it generates subnets randomly
+ return self._diff_update_and_compare('ipv6', '', '')
def diffparam_gateway(self):
# Disable idempotency of subnet for v4, subnets are added automatically
# TODO(sshnaidm): check if it's still the issue in v5
- if LooseVersion(self.version) >= LooseVersion('4.0.0'):
- return self._diff_update_and_compare('gateway', '', '')
- try:
- before = self.info['plugins'][0]['ipam']['ranges'][0][0]['gateway']
- except (IndexError, KeyError):
- before = ''
- after = before
- if self.params['gateway'] is not None:
+ if LooseVersion(self.version) < LooseVersion('4.0.0'):
+ try:
+ before = self.info['plugins'][0]['ipam']['ranges'][0][0]['gateway']
+ except (IndexError, KeyError):
+ before = ''
+ after = before
+ if self.params['gateway'] is not None:
+ after = self.params['gateway']
+ return self._diff_update_and_compare('gateway', before, after)
+ else:
+ before_subs = self.info.get('subnets')
after = self.params['gateway']
- return self._diff_update_and_compare('gateway', before, after)
+ if not before_subs:
+ before = None
+ if before_subs:
+ if len(before_subs) > 1 and after:
+ return self._diff_update_and_compare(
+ 'gateway', ",".join([i['gateway'] for i in before_subs]), after)
+ before = [i.get('gateway') for i in before_subs][0]
+ if not after:
+ after = before
+ return self._diff_update_and_compare('gateway', before, after)
def diffparam_internal(self):
if LooseVersion(self.version) >= LooseVersion('4.0.0'):
@@ -429,21 +506,62 @@ class PodmanNetworkDiff:
before = after = ''
return self._diff_update_and_compare('ip_range', before, after)
- def diffparam_subnet(self):
- # Disable idempotency of subnet for v4, subnets are added automatically
- # TODO(sshnaidm): check if it's still the issue in v5
- if LooseVersion(self.version) >= LooseVersion('4.0.0'):
- return self._diff_update_and_compare('subnet', '', '')
- try:
- before = self.info['plugins'][0]['ipam']['ranges'][0][0]['subnet']
- except (IndexError, KeyError):
+ def diffparam_ipam_driver(self):
+ before = self.info.get("ipam_options", {}).get("driver", "")
+ after = self.params['ipam_driver']
+ if not after:
+ after = before
+ return self._diff_update_and_compare('ipam_driver', before, after)
+
+ def diffparam_net_config(self):
+ after = self.params['net_config']
+ if not after:
+ return self._diff_update_and_compare('net_config', '', '')
+ before_subs = self.info.get('subnets', [])
+ if before_subs:
+ before = ":".join(sorted([",".join([i['subnet'], i['gateway']]).rstrip(",") for i in before_subs]))
+ else:
before = ''
- after = before
- if self.params['subnet'] is not None:
+ after = ":".join(sorted([",".join([i['subnet'], i['gateway']]).rstrip(",") for i in after]))
+ return self._diff_update_and_compare('net_config', before, after)
+
+ def diffparam_route(self):
+ routes = self.info.get('routes', [])
+ if routes:
+ before = [",".join([
+ r['destination'], r['gateway'], str(r.get('metric', ''))]).rstrip(",") for r in routes]
+ else:
+ before = []
+ after = self.params['route'] or []
+ return self._diff_update_and_compare('route', sorted(before), sorted(after))
+
+ def diffparam_subnet(self):
+ # Disable idempotency of subnet for v3 and below
+ if LooseVersion(self.version) < LooseVersion('4.0.0'):
+ try:
+ before = self.info['plugins'][0]['ipam']['ranges'][0][0]['subnet']
+ except (IndexError, KeyError):
+ before = ''
+ after = before
+ if self.params['subnet'] is not None:
+ after = self.params['subnet']
+ if HAS_IP_ADDRESS_MODULE:
+ after = ipaddress.ip_network(after).compressed
+ return self._diff_update_and_compare('subnet', before, after)
+ else:
+ if self.params['ipv6'] is not None:
+ # We can't support dual stack, it generates subnets randomly
+ return self._diff_update_and_compare('subnet', '', '')
after = self.params['subnet']
- if HAS_IP_ADDRESS_MODULE:
- after = ipaddress.ip_network(after).compressed
- return self._diff_update_and_compare('subnet', before, after)
+ if after is None:
+ # We can't guess what subnet was used before by default
+ return self._diff_update_and_compare('subnet', '', '')
+ before = self.info.get('subnets')
+ if before:
+ if len(before) > 1 and after:
+ return self._diff_update_and_compare('subnet', ",".join([i['subnet'] for i in before]), after)
+ before = [i['subnet'] for i in before][0]
+ return self._diff_update_and_compare('subnet', before, after)
def diffparam_macvlan(self):
before = after = ''
@@ -694,12 +812,15 @@ def main():
choices=['present', 'absent', 'quadlet']),
name=dict(type='str', required=True),
disable_dns=dict(type='bool', required=False),
+ dns=dict(type='list', elements='str', required=False),
driver=dict(type='str', required=False),
force=dict(type='bool', default=False),
gateway=dict(type='str', required=False),
interface_name=dict(type='str', required=False),
internal=dict(type='bool', required=False),
ip_range=dict(type='str', required=False),
+ ipam_driver=dict(type='str', required=False,
+ choices=['host-local', 'dhcp', 'none']),
ipv6=dict(type='bool', required=False),
subnet=dict(type='str', required=False),
macvlan=dict(type='str', required=False),
@@ -715,14 +836,23 @@ def main():
executable=dict(type='str', required=False, default='podman'),
debug=dict(type='bool', default=False),
recreate=dict(type='bool', default=False),
+ route=dict(type='list', elements='str', required=False),
quadlet_dir=dict(type='path', required=False),
quadlet_filename=dict(type='str', required=False),
quadlet_options=dict(type='list', elements='str', required=False),
+ net_config=dict(type='list', required=False, elements='dict',
+ options=dict(
+ subnet=dict(type='str', required=True),
+ gateway=dict(type='str', required=True),
+ ip_range=dict(type='str', required=False),
+ )),
),
required_by=dict( # for IP range and GW to set 'subnet' is required
ip_range=('subnet'),
gateway=('subnet'),
- ))
+ ),
+ # define or subnet or net config
+ mutually_exclusive=[['subnet', 'net_config']])
PodmanNetworkManager(module).execute()
diff --git a/ansible_collections/containers/podman/plugins/modules/podman_pod.py b/ansible_collections/containers/podman/plugins/modules/podman_pod.py
index a975921ea..cdf728243 100644
--- a/ansible_collections/containers/podman/plugins/modules/podman_pod.py
+++ b/ansible_collections/containers/podman/plugins/modules/podman_pod.py
@@ -117,6 +117,8 @@ options:
all containers in the pod.
type: list
elements: str
+ aliases:
+ - dns_option
required: false
dns_search:
description:
@@ -125,6 +127,14 @@ options:
type: list
elements: str
required: false
+ exit_policy:
+ description:
+ - Set the exit policy of the pod when the last container exits. Supported policies are stop and continue
+ choices:
+ - stop
+ - continue
+ type: str
+ required: false
generate_systemd:
description:
- Generate systemd unit file for container.
@@ -227,6 +237,11 @@ options:
elements: str
required: false
type: list
+ gpus:
+ description:
+ - GPU devices to add to the container ('all' to pass all GPUs).
+ type: str
+ required: false
hostname:
description:
- Set a hostname to the pod
@@ -266,6 +281,11 @@ options:
- Set a static IP for the pod's shared network.
type: str
required: false
+ ip6:
+ description:
+ - Set a static IPv6 for the pod's shared network.
+ type: str
+ required: false
label:
description:
- Add metadata to a pod, pass dictionary of label keys and values.
@@ -357,6 +377,16 @@ options:
options as a list of lines to add.
type: list
elements: str
+ restart_policy:
+ description:
+ - Restart policy to follow when containers exit.
+ type: str
+ security_opt:
+ description:
+ - Security options for the pod.
+ type: list
+ elements: str
+ required: false
share:
description:
- A comma delimited list of kernel namespaces to share. If none or "" is specified,
@@ -364,6 +394,30 @@ options:
user, uts.
type: str
required: false
+ share_parent:
+ description:
+ - This boolean determines whether or not all containers entering the pod use the pod as their cgroup parent.
+ The default value of this option in Podman is true.
+ type: bool
+ required: false
+ shm_size:
+ description:
+ - Set the size of the /dev/shm shared memory space.
+ A unit can be b (bytes), k (kibibytes), m (mebibytes), or g (gibibytes).
+ If the unit is omitted, the system uses bytes.
+ If the size is omitted, the default is 64m.
+ When size is 0, there is no limit on the amount of memory used for IPC by the pod.
+ type: str
+ required: false
+ shm_size_systemd:
+ description:
+ - Size of systemd-specific tmpfs mounts such as /run, /run/lock, /var/log/journal and /tmp.
+ A unit can be b (bytes), k (kibibytes), m (mebibytes), or g (gibibytes).
+ If the unit is omitted, the system uses bytes.
+ If the size is omitted, the default is 64m.
+ When size is 0, the usage is limited to 50 percents of the host's available memory.
+ type: str
+ required: false
subgidname:
description:
- Name for GID map from the /etc/subgid file. Using this flag will run the container
@@ -377,6 +431,11 @@ options:
This flag conflicts with `userns` and `uidmap`.
required: false
type: str
+ sysctl:
+ description:
+ - Set kernel parameters for the pod.
+ type: dict
+ required: false
uidmap:
description:
- Run the container in a new user namespace using the supplied mapping.
@@ -393,6 +452,11 @@ options:
An empty value ("") means user namespaces are disabled.
required: false
type: str
+ uts:
+ description:
+ - Set the UTS namespace mode for the pod.
+ required: false
+ type: str
volume:
description:
- Create a bind mount.
@@ -401,6 +465,12 @@ options:
elements: str
required: false
type: list
+ volumes_from:
+ description:
+ - Mount volumes from the specified container.
+ elements: str
+ required: false
+ type: list
executable:
description:
- Path to C(podman) executable if it is not in the C($PATH) on the
@@ -450,7 +520,7 @@ pod:
'''
-EXAMPLES = '''
+EXAMPLES = r'''
# What modules does for example
- containers.podman.podman_pod:
name: pod1
@@ -465,6 +535,62 @@ EXAMPLES = '''
state: started
publish: "127.0.0.1::80"
+# Full workflow example with pod and containers
+- name: Create a pod with parameters
+ containers.podman.podman_pod:
+ name: mypod
+ state: created
+ network: host
+ share: net
+ userns: auto
+ security_opt:
+ - seccomp=unconfined
+ - apparmor=unconfined
+ hostname: mypod
+ dns:
+ - 1.1.1.1
+ volumes:
+ - /tmp:/tmp/:ro
+ label:
+ key: cval
+ otherkey: kddkdk
+ somekey: someval
+ add_host:
+ - "google:5.5.5.5"
+
+- name: Create containers attached to the pod
+ containers.podman.podman_container:
+ name: "{{ item }}"
+ state: created
+ pod: mypod
+ image: alpine
+ command: sleep 1h
+ loop:
+ - "container1"
+ - "container2"
+
+- name: Start pod
+ containers.podman.podman_pod:
+ name: mypod
+ state: started
+ network: host
+ share: net
+ userns: auto
+ security_opt:
+ - seccomp=unconfined
+ - apparmor=unconfined
+ hostname: mypod
+ dns:
+ - 1.1.1.1
+ volumes:
+ - /tmp:/tmp/:ro
+ label:
+ key: cval
+ otherkey: kddkdk
+ somekey: someval
+ add_host:
+ - "google:5.5.5.5"
+
# Create a Quadlet file for a pod
- containers.podman.podman_pod:
name: qpod
diff --git a/ansible_collections/containers/podman/plugins/modules/podman_search.py b/ansible_collections/containers/podman/plugins/modules/podman_search.py
new file mode 100644
index 000000000..128e3ce03
--- /dev/null
+++ b/ansible_collections/containers/podman/plugins/modules/podman_search.py
@@ -0,0 +1,131 @@
+#!/usr/bin/python
+# Copyright (c) 2024 Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+__metaclass__ = type
+
+
+DOCUMENTATION = r'''
+module: podman_search
+author:
+ - Derek Waters (@derekwaters)
+short_description: Search for remote images using podman
+notes:
+ - Podman may required elevated privileges in order to run properly.
+description:
+ - Search for remote images using C(podman)
+options:
+ executable:
+ description:
+ - Path to C(podman) executable if it is not in the C($PATH) on the machine running C(podman)
+ default: 'podman'
+ type: str
+ term:
+ description:
+ - The search term to look for. Will search all default registries unless a registry is defined in the search term.
+ type: str
+ required: True
+ limit:
+ description:
+ - Limit the number of image results returned from the search (per image registry)
+ required: False
+ default: 25
+ type: int
+ list_tags:
+ description:
+ - Whether or not to return the list of tags associated with each image
+ required: False
+ default: False
+ type: bool
+
+'''
+
+EXAMPLES = r"""
+- name: Search for any rhel images
+ containers.podman.podman_search:
+ term: "rhel"
+ limit: 3
+
+- name: Gather info on a specific remote image
+ containers.podman.podman_search:
+ term: "myimageregistry.com/ansible-automation-platform/ee-minimal-rhel8"
+
+- name: Gather tag info on a known remote image
+ containers.podman.podman_search:
+ term: "myimageregistry.com/ansible-automation-platform/ee-minimal-rhel8"
+ list_tags: True
+"""
+
+RETURN = r"""
+images:
+ description: info from all or specified images
+ returned: always
+ type: list
+ sample: [
+ {
+ "Automated": "",
+ "Description": "Red Hat Enterprise Linux Atomic Image is a minimal, fully supported base image.",
+ "Index": "registry.access.redhat.com",
+ "Name": "registry.access.redhat.com/rhel7-atomic",
+ "Official": "",
+ "Stars": 0,
+ "Tags": ["1.0", "1.1", "1.1.1-devel"]
+ }
+ ]
+"""
+
+import json
+
+from ansible.module_utils.basic import AnsibleModule
+
+
+def search_images(module, executable, term, limit, list_tags):
+ command = [executable, 'search', term, '--format', 'json']
+ command.extend(['--limit', "{0}".format(limit)])
+ if list_tags:
+ command.extend(['--list-tags'])
+
+ rc, out, err = module.run_command(command)
+
+ if rc != 0:
+ module.fail_json(msg="Unable to gather info for '{0}': {1}".format(term, err))
+ return out
+
+
+def main():
+ module = AnsibleModule(
+ argument_spec=dict(
+ executable=dict(type='str', default='podman'),
+ term=dict(type='str', required=True),
+ limit=dict(type='int', required=False, default=25),
+ list_tags=dict(type='bool', required=False, default=False)
+ ),
+ supports_check_mode=True,
+ )
+
+ executable = module.params['executable']
+ term = module.params.get('term')
+ limit = module.params.get('limit')
+ list_tags = module.params.get('list_tags')
+ executable = module.get_bin_path(executable, required=True)
+
+ result_str = search_images(module, executable, term, limit, list_tags)
+ if result_str == "":
+ results = []
+ else:
+ try:
+ results = json.loads(result_str)
+ except json.decoder.JSONDecodeError:
+ module.fail_json(msg='Failed to parse JSON output from podman search: {out}'.format(out=result_str))
+
+ results = dict(
+ changed=False,
+ images=results
+ )
+
+ module.exit_json(**results)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/containers/podman/plugins/modules/podman_secret.py b/ansible_collections/containers/podman/plugins/modules/podman_secret.py
index a31aae9dc..76b10ad39 100644
--- a/ansible_collections/containers/podman/plugins/modules/podman_secret.py
+++ b/ansible_collections/containers/podman/plugins/modules/podman_secret.py
@@ -21,6 +21,7 @@ options:
data:
description:
- The value of the secret. Required when C(state) is C(present).
+ Mutually exclusive with C(env) and C(path).
type: str
driver:
description:
@@ -31,6 +32,11 @@ options:
description:
- Driver-specific key-value options.
type: dict
+ env:
+ description:
+ - The name of the environment variable that contains the secret.
+ Mutually exclusive with C(data) and C(path).
+ type: str
executable:
description:
- Path to C(podman) executable if it is not in the C($PATH) on the
@@ -53,6 +59,11 @@ options:
- The name of the secret.
required: True
type: str
+ path:
+ description:
+ - Path to the file that contains the secret.
+ Mutually exclusive with C(data) and C(env).
+ type: path
state:
description:
- Whether to create or remove the named secret.
@@ -67,7 +78,7 @@ options:
type: dict
debug:
description:
- - Enable debug mode for module.
+ - Enable debug mode for module. It prints secrets diff.
type: bool
default: False
'''
@@ -99,6 +110,8 @@ EXAMPLES = r"""
name: mysecret
"""
+import os
+
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.containers.podman.plugins.module_utils.podman.common import LooseVersion
from ansible_collections.containers.podman.plugins.module_utils.podman.common import get_podman_version
@@ -116,14 +129,15 @@ def podman_secret_exists(module, executable, name, version):
return rc == 0
-def need_update(module, executable, name, data, driver, driver_opts, debug, labels):
-
+def need_update(module, executable, name, data, path, env, skip, driver, driver_opts, debug, labels):
cmd = [executable, 'secret', 'inspect', '--showsecret', name]
rc, out, err = module.run_command(cmd)
if rc != 0:
if debug:
module.log("PODMAN-SECRET-DEBUG: Unable to get secret info: %s" % err)
return True
+ if skip:
+ return False
try:
secret = module.from_json(out)[0]
# We support only file driver for now
@@ -131,10 +145,37 @@ def need_update(module, executable, name, data, driver, driver_opts, debug, labe
if debug:
module.log("PODMAN-SECRET-DEBUG: Idempotency of driver %s is not supported" % driver)
return True
- if secret['SecretData'] != data:
- diff['after'] = "<different-secret>"
- diff['before'] = "<secret>"
- return True
+ if data:
+ if secret['SecretData'] != data:
+ if debug:
+ diff['after'] = data
+ diff['before'] = secret['SecretData']
+ else:
+ diff['after'] = "<different-secret>"
+ diff['before'] = "<secret>"
+ return True
+ if path:
+ with open(path, 'rb') as f:
+ text = f.read().decode('utf-8')
+ if secret['SecretData'] != text:
+ if debug:
+ diff['after'] = text
+ diff['before'] = secret['SecretData']
+ else:
+ diff['after'] = "<different-secret>"
+ diff['before'] = "<secret>"
+ return True
+ if env:
+ env_data = os.environ.get(env)
+ if secret['SecretData'] != env_data:
+ if debug:
+ diff['after'] = env_data
+ diff['before'] = secret['SecretData']
+ else:
+ diff['after'] = "<different-secret>"
+ diff['before'] = "<secret>"
+ return True
+
if driver_opts:
for k, v in driver_opts.items():
if secret['Spec']['Driver']['Options'].get(k) != v:
@@ -154,13 +195,13 @@ def need_update(module, executable, name, data, driver, driver_opts, debug, labe
return False
-def podman_secret_create(module, executable, name, data, force, skip,
+def podman_secret_create(module, executable, name, data, path, env, force, skip,
driver, driver_opts, debug, labels):
podman_version = get_podman_version(module, fail=False)
if (podman_version is not None and
LooseVersion(podman_version) >= LooseVersion('4.7.0')
and (driver is None or driver == 'file')):
- if not skip and need_update(module, executable, name, data, driver, driver_opts, debug, labels):
+ if need_update(module, executable, name, data, path, env, skip, driver, driver_opts, debug, labels):
podman_secret_remove(module, executable, name)
else:
return {"changed": False}
@@ -182,9 +223,20 @@ def podman_secret_create(module, executable, name, data, force, skip,
cmd.append('--label')
cmd.append("=".join([k, v]))
cmd.append(name)
- cmd.append('-')
+ if data:
+ cmd.append('-')
+ elif path:
+ cmd.append(path)
+ elif env:
+ if os.environ.get(env) is None:
+ module.fail_json(msg="Environment variable %s is not set" % env)
+ cmd.append("--env")
+ cmd.append(env)
- rc, out, err = module.run_command(cmd, data=data, binary_data=True)
+ if data:
+ rc, out, err = module.run_command(cmd, data=data, binary_data=True)
+ else:
+ rc, out, err = module.run_command(cmd)
if rc != 0:
module.fail_json(msg="Unable to create secret: %s" % err)
@@ -219,6 +271,8 @@ def main():
state=dict(type='str', default='present', choices=['absent', 'present']),
name=dict(type='str', required=True),
data=dict(type='str', no_log=True),
+ env=dict(type='str'),
+ path=dict(type='path'),
force=dict(type='bool', default=False),
skip_existing=dict(type='bool', default=False),
driver=dict(type='str'),
@@ -226,6 +280,8 @@ def main():
labels=dict(type='dict'),
debug=dict(type='bool', default=False),
),
+ required_if=[('state', 'present', ['path', 'env', 'data'], True)],
+ mutually_exclusive=[['path', 'env', 'data']],
)
state = module.params['state']
@@ -234,16 +290,16 @@ def main():
if state == 'present':
data = module.params['data']
- if data is None:
- raise Exception("'data' is required when 'state' is 'present'")
force = module.params['force']
skip = module.params['skip_existing']
driver = module.params['driver']
driver_opts = module.params['driver_opts']
debug = module.params['debug']
labels = module.params['labels']
+ path = module.params['path']
+ env = module.params['env']
results = podman_secret_create(module, executable,
- name, data, force, skip,
+ name, data, path, env, force, skip,
driver, driver_opts, debug, labels)
else:
results = podman_secret_remove(module, executable, name)
diff --git a/ansible_collections/containers/podman/plugins/modules/podman_volume.py b/ansible_collections/containers/podman/plugins/modules/podman_volume.py
index 0b990354a..cb958cc50 100644
--- a/ansible_collections/containers/podman/plugins/modules/podman_volume.py
+++ b/ansible_collections/containers/podman/plugins/modules/podman_volume.py
@@ -24,6 +24,8 @@ options:
choices:
- present
- absent
+ - mounted
+ - unmounted
- quadlet
recreate:
description:
@@ -131,6 +133,7 @@ EXAMPLES = '''
'''
# noqa: F402
import json # noqa: F402
+import os # noqa: F402
from ansible.module_utils.basic import AnsibleModule # noqa: F402
from ansible.module_utils._text import to_bytes, to_native # noqa: F402
@@ -160,7 +163,7 @@ class PodmanVolumeModuleParams:
Returns:
list -- list of byte strings for Popen command
"""
- if self.action in ['delete']:
+ if self.action in ['delete', 'mount', 'unmount']:
return self._simple_action()
if self.action in ['create']:
return self._create_action()
@@ -169,6 +172,12 @@ class PodmanVolumeModuleParams:
if self.action == 'delete':
cmd = ['rm', '-f', self.params['name']]
return [to_bytes(i, errors='surrogate_or_strict') for i in cmd]
+ if self.action == 'mount':
+ cmd = ['mount', self.params['name']]
+ return [to_bytes(i, errors='surrogate_or_strict') for i in cmd]
+ if self.action == 'unmount':
+ cmd = ['unmount', self.params['name']]
+ return [to_bytes(i, errors='surrogate_or_strict') for i in cmd]
def _create_action(self):
cmd = [self.action, self.params['name']]
@@ -326,6 +335,7 @@ class PodmanVolume:
self.module = module
self.name = name
self.stdout, self.stderr = '', ''
+ self.mount_point = None
self.info = self.get_info()
self.version = self._get_podman_version()
self.diff = {}
@@ -380,7 +390,7 @@ class PodmanVolume:
"""Perform action with volume.
Arguments:
- action {str} -- action to perform - create, stop, delete
+ action {str} -- action to perform - create, delete, mount, unmout
"""
b_command = PodmanVolumeModuleParams(action,
self.module.params,
@@ -389,11 +399,14 @@ class PodmanVolume:
).construct_command_from_params()
full_cmd = " ".join([self.module.params['executable'], 'volume']
+ [to_native(i) for i in b_command])
+ # check if running not from root
+ if os.getuid() != 0 and action == 'mount':
+ full_cmd = f"{self.module.params['executable']} unshare {full_cmd}"
self.module.log("PODMAN-VOLUME-DEBUG: %s" % full_cmd)
self.actions.append(full_cmd)
if not self.module.check_mode:
rc, out, err = self.module.run_command(
- [self.module.params['executable'], b'volume'] + b_command,
+ full_cmd,
expand_user_and_vars=False)
self.stdout = out
self.stderr = err
@@ -401,6 +414,9 @@ class PodmanVolume:
self.module.fail_json(
msg="Can't %s volume %s" % (action, self.name),
stdout=out, stderr=err)
+ # in case of mount/unmount, return path to the volume from stdout
+ if action in ['mount']:
+ self.mount_point = out.strip()
def delete(self):
"""Delete the volume."""
@@ -410,6 +426,14 @@ class PodmanVolume:
"""Create the volume."""
self._perform_action('create')
+ def mount(self):
+ """Delete the volume."""
+ self._perform_action('mount')
+
+ def unmount(self):
+ """Create the volume."""
+ self._perform_action('unmount')
+
def recreate(self):
"""Recreate the volume."""
self.delete()
@@ -468,6 +492,8 @@ class PodmanVolumeManager:
states_map = {
'present': self.make_present,
'absent': self.make_absent,
+ 'mounted': self.make_mount,
+ 'unmounted': self.make_unmount,
'quadlet': self.make_quadlet,
}
process_action = states_map[self.state]
@@ -501,6 +527,26 @@ class PodmanVolumeManager:
'podman_actions': self.volume.actions})
self.module.exit_json(**self.results)
+ def make_mount(self):
+ """Run actions if desired state is 'mounted'."""
+ if not self.volume.exists:
+ self.volume.create()
+ self.results['actions'].append('created %s' % self.volume.name)
+ self.volume.mount()
+ self.results['actions'].append('mounted %s' % self.volume.name)
+ if self.volume.mount_point:
+ self.results.update({'mount_point': self.volume.mount_point})
+ self.update_volume_result()
+
+ def make_unmount(self):
+ """Run actions if desired state is 'unmounted'."""
+ if self.volume.exists:
+ self.volume.unmount()
+ self.results['actions'].append('unmounted %s' % self.volume.name)
+ self.update_volume_result()
+ else:
+ self.module.fail_json(msg="Volume %s does not exist!" % self.name)
+
def make_quadlet(self):
results_update = create_quadlet_state(self.module, "volume")
self.results.update(results_update)
@@ -511,7 +557,7 @@ def main():
module = AnsibleModule(
argument_spec=dict(
state=dict(type='str', default="present",
- choices=['present', 'absent', 'quadlet']),
+ choices=['present', 'absent', 'mounted', 'unmounted', 'quadlet']),
name=dict(type='str', required=True),
label=dict(type='dict', required=False),
driver=dict(type='str', required=False),
diff --git a/ansible_collections/containers/podman/tests/integration/targets/podman_container_idempotency/tasks/idem_all.yml b/ansible_collections/containers/podman/tests/integration/targets/podman_container_idempotency/tasks/idem_all.yml
index 023878349..0babf0395 100644
--- a/ansible_collections/containers/podman/tests/integration/targets/podman_container_idempotency/tasks/idem_all.yml
+++ b/ansible_collections/containers/podman/tests/integration/targets/podman_container_idempotency/tasks/idem_all.yml
@@ -162,10 +162,9 @@
command: 1h
register: test14
-# We can't guess the default log path
- name: Check info with default log opt path
assert:
- that: test14 is not changed
+ that: test14 is changed
- name: Run container with all log-opts
containers.podman.podman_container:
@@ -238,8 +237,8 @@
that:
- test15 is changed
- test16 is not changed
- - test17 is not changed
- - test18 is not changed
+ - test17 is changed
+ - test18 is changed
- test19 is changed
- name: Run container with etc_hosts
@@ -381,7 +380,7 @@
- name: Check info of second container with PID of container ID again
assert:
- that: test29 is not changed
+ that: test29 is changed
- name: Remove dependent test container
containers.podman.podman_container:
diff --git a/ansible_collections/containers/podman/tests/integration/targets/podman_container_idempotency/tasks/idem_bool_list_dict.yml b/ansible_collections/containers/podman/tests/integration/targets/podman_container_idempotency/tasks/idem_bool_list_dict.yml
new file mode 100644
index 000000000..2d4771ceb
--- /dev/null
+++ b/ansible_collections/containers/podman/tests/integration/targets/podman_container_idempotency/tasks/idem_bool_list_dict.yml
@@ -0,0 +1,468 @@
+- name: Remove container
+ containers.podman.podman_container:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "idempotency"
+ state: absent
+
+- name: Run container with boolean key-value type - 1
+ containers.podman.podman_container:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "idempotency"
+ image: "{{ idem_image }}"
+ state: present
+ command: 1h
+ register: resultx1
+
+- name: Run container with boolean key-value type - 2
+ containers.podman.podman_container:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "idempotency"
+ image: "{{ idem_image }}"
+ state: present
+ command: 1h
+ register: resultx2
+
+- name: Run container with boolean key-value type - 3
+ containers.podman.podman_container:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "idempotency"
+ image: "{{ idem_image }}"
+ state: present
+ command: 1h
+ tls_verify: false
+ register: resultx3
+
+- name: Run container with boolean key-value type - 4
+ containers.podman.podman_container:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "idempotency"
+ image: "{{ idem_image }}"
+ state: present
+ command: 1h
+ tls_verify: false
+ register: resultx4
+
+- name: Run container with boolean key-value type - 5
+ containers.podman.podman_container:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "idempotency"
+ image: "{{ idem_image }}"
+ state: present
+ command: 1h
+ tls_verify: true
+ register: resultx5
+
+- name: Run container with boolean key-value type - 6
+ containers.podman.podman_container:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "idempotency"
+ image: "{{ idem_image }}"
+ state: present
+ command: 1h
+ tls_verify: true
+ register: resultx6
+
+- name: Run container with boolean key-value type - 7
+ containers.podman.podman_container:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "idempotency"
+ image: "{{ idem_image }}"
+ state: present
+ command: 1h
+ tls_verify: false
+ register: resultx7
+
+- name: Run container with boolean key-value type - 8
+ containers.podman.podman_container:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "idempotency"
+ image: "{{ idem_image }}"
+ state: present
+ command: 1h
+ # tls_verify: false
+ register: resultx8
+
+- name: Run container with boolean key-value type - 9
+ containers.podman.podman_container:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "idempotency"
+ image: "{{ idem_image }}"
+ state: present
+ command: 1h
+ tls_verify: true
+ register: resultx9
+
+- name: Run container with boolean key-value type - 10
+ containers.podman.podman_container:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "idempotency"
+ image: "{{ idem_image }}"
+ state: present
+ command: 1h
+ # tls_verify: true
+ register: resultx10
+
+- name: Assert checks
+ assert:
+ that:
+ - resultx1.changed == true
+ - resultx2.changed == false
+ - resultx3.changed == true
+ - resultx4.changed == false
+ - resultx5.changed == true
+ - resultx6.changed == false
+ - resultx7.changed == true
+ - resultx8.changed == true
+ - resultx9.changed == true
+ - resultx10.changed == true
+
+- name: Remove container
+ containers.podman.podman_container:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "idempotency"
+ state: absent
+
+- name: Run container with list type - 1
+ containers.podman.podman_container:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "idempotency"
+ image: "{{ idem_image }}"
+ state: present
+ command: 1h
+ unsetenv:
+ - HOME
+ - TERM
+ - USER
+ register: resultq1
+
+- name: Run container with list type - 2
+ containers.podman.podman_container:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "idempotency"
+ image: "{{ idem_image }}"
+ state: present
+ command: 1h
+ unsetenv:
+ - HOME
+ - TERM
+ - USER
+ register: resultq2
+
+- name: Run container with list type - 3
+ containers.podman.podman_container:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "idempotency"
+ image: "{{ idem_image }}"
+ state: present
+ command: 1h
+ unsetenv:
+ - HOME
+ register: resultq3
+
+- name: Run container with list type - 4
+ containers.podman.podman_container:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "idempotency"
+ image: "{{ idem_image }}"
+ state: present
+ command: 1h
+ unsetenv:
+ - HOME
+ register: resultq4
+
+- name: Run container with list type - 5
+ containers.podman.podman_container:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "idempotency"
+ image: "{{ idem_image }}"
+ state: present
+ command: 1h
+ register: resultq5
+
+- name: Run container with list type - 6
+ containers.podman.podman_container:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "idempotency"
+ image: "{{ idem_image }}"
+ state: present
+ command: 1h
+ register: resultq6
+
+- name: Run container with list type - 7
+ containers.podman.podman_container:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "idempotency"
+ image: "{{ idem_image }}"
+ state: present
+ command: 1h
+ unsetenv:
+ - USER
+ register: resultq7
+
+- name: Run container with list type - 8
+ containers.podman.podman_container:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "idempotency"
+ image: "{{ idem_image }}"
+ state: present
+ command: 1h
+ unsetenv:
+ - USER
+ register: resultq8
+
+- name: Run container with list type - 9
+ containers.podman.podman_container:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "idempotency"
+ image: "{{ idem_image }}"
+ state: present
+ command: 1h
+ register: resultq9
+
+- name: Assert checks
+ assert:
+ that:
+ - resultq1.changed == true
+ - resultq2.changed == false
+ - resultq3.changed == true
+ - resultq4.changed == false
+ - resultq5.changed == true
+ - resultq6.changed == false
+ - resultq7.changed == true
+ - resultq8.changed == false
+ - resultq9.changed == true
+
+- name: Remove container
+ containers.podman.podman_container:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "idempotency"
+ state: absent
+
+- name: Run container with boolean trigger type - 1
+ containers.podman.podman_container:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "idempotency"
+ image: "{{ idem_image }}"
+ state: present
+ command: 1h
+ register: resulty1
+
+- name: Run container with boolean trigger type - 2
+ containers.podman.podman_container:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "idempotency"
+ image: "{{ idem_image }}"
+ state: present
+ command: 1h
+ register: resulty2
+
+- name: Run container with boolean trigger type - 3
+ containers.podman.podman_container:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "idempotency"
+ image: "{{ idem_image }}"
+ state: present
+ command: 1h
+ no_healthcheck: false
+ register: resulty3
+
+- name: Run container with boolean trigger type - 4
+ containers.podman.podman_container:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "idempotency"
+ image: "{{ idem_image }}"
+ state: present
+ command: 1h
+ no_healthcheck: false
+ register: resulty4
+
+- name: Run container with boolean trigger type - 5
+ containers.podman.podman_container:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "idempotency"
+ image: "{{ idem_image }}"
+ state: present
+ command: 1h
+ no_healthcheck: true
+ register: resulty5
+
+- name: Run container with boolean trigger type - 6
+ containers.podman.podman_container:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "idempotency"
+ image: "{{ idem_image }}"
+ state: present
+ command: 1h
+ no_healthcheck: true
+ register: resulty6
+
+- name: Run container with boolean trigger type - 7
+ containers.podman.podman_container:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "idempotency"
+ image: "{{ idem_image }}"
+ state: present
+ command: 1h
+ no_healthcheck: false
+ register: resulty7
+
+- name: Run container with boolean trigger type - 8
+ containers.podman.podman_container:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "idempotency"
+ image: "{{ idem_image }}"
+ state: present
+ command: 1h
+ # no_healthcheck: false
+ register: resulty8
+
+- name: Run container with boolean trigger type - 9
+ containers.podman.podman_container:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "idempotency"
+ image: "{{ idem_image }}"
+ state: present
+ command: 1h
+ no_healthcheck: true
+ register: resulty9
+
+- name: Run container with boolean trigger type - 10
+ containers.podman.podman_container:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "idempotency"
+ image: "{{ idem_image }}"
+ state: present
+ command: 1h
+ # no_healthcheck: true
+ register: resulty10
+
+- name: Assert checks
+ assert:
+ that:
+ - resulty1.changed == true
+ - resulty2.changed == false
+ - resulty3.changed == false
+ - resulty4.changed == false
+ - resulty5.changed == true
+ - resulty6.changed == false
+ - resulty7.changed == true
+ - resulty8.changed == false
+ - resulty9.changed == true
+ - resulty10.changed == true
+
+
+- name: Remove container
+ containers.podman.podman_container:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "idempotency"
+ state: absent
+
+- name: Run container with dict type - 1
+ containers.podman.podman_container:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "idempotency"
+ image: "{{ idem_image }}"
+ state: present
+ command: 1h
+ log_opt:
+ max_size: 10m
+ tag: test
+ path: /var/log
+ register: resultv1
+
+- name: Run container with dict type - 2
+ containers.podman.podman_container:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "idempotency"
+ image: "{{ idem_image }}"
+ state: present
+ command: 1h
+ log_opt:
+ max_size: 10m
+ tag: test
+ path: /var/log
+ register: resultv2
+
+- name: Run container with dict type - 3
+ containers.podman.podman_container:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "idempotency"
+ image: "{{ idem_image }}"
+ state: present
+ command: 1h
+ log_opt:
+ max_size: 10m
+ register: resultv3
+
+- name: Run container with dict type - 4
+ containers.podman.podman_container:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "idempotency"
+ image: "{{ idem_image }}"
+ state: present
+ command: 1h
+ log_opt:
+ max_size: 10m
+ register: resultv4
+
+- name: Run container with dict type - 5
+ containers.podman.podman_container:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "idempotency"
+ image: "{{ idem_image }}"
+ state: present
+ command: 1h
+ register: resultv5
+
+- name: Run container with dict type - 6
+ containers.podman.podman_container:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "idempotency"
+ image: "{{ idem_image }}"
+ state: present
+ command: 1h
+ register: resultv6
+
+- name: Run container with dict type - 7
+ containers.podman.podman_container:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "idempotency"
+ image: "{{ idem_image }}"
+ state: present
+ command: 1h
+ log_opt:
+ tag: test
+ register: resultv7
+
+- name: Run container with dict type - 8
+ containers.podman.podman_container:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "idempotency"
+ image: "{{ idem_image }}"
+ state: present
+ command: 1h
+ log_opt:
+ tag: test
+ register: resultv8
+
+- name: Run container with dict type - 9
+ containers.podman.podman_container:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "idempotency"
+ image: "{{ idem_image }}"
+ state: present
+ command: 1h
+ register: resultv9
+
+- name: Assert checks
+ assert:
+ that:
+ - resultq1.changed == true
+ - resultq2.changed == false
+ - resultq3.changed == true
+ - resultq4.changed == false
+ - resultq5.changed == true
+ - resultq6.changed == false
+ - resultq7.changed == true
+ - resultq8.changed == false
+ - resultq9.changed == true
diff --git a/ansible_collections/containers/podman/tests/integration/targets/podman_container_idempotency/tasks/idem_ports.yml b/ansible_collections/containers/podman/tests/integration/targets/podman_container_idempotency/tasks/idem_ports.yml
index 81b7baf06..29ad10c6d 100644
--- a/ansible_collections/containers/podman/tests/integration/targets/podman_container_idempotency/tasks/idem_ports.yml
+++ b/ansible_collections/containers/podman/tests/integration/targets/podman_container_idempotency/tasks/idem_ports.yml
@@ -33,7 +33,7 @@
- "4444:4444/tcp"
- "1212:5555"
- "8888:19191/udp"
- - "1900:1900/udp"
+ - "19000:19000/udp"
- "127.0.0.1:7671:7676/udp"
- "127.0.0.1:12122:8876/udp"
- "127.0.0.1:13122:8871/tcp"
@@ -41,6 +41,10 @@
- "127.0.0.2:43423:8872/tcp"
- "127.0.0.3:43423:8872"
- "0.0.0.0:15674:7846"
+ - 127.0.0.1:20000-20010:20000-20010/udp
+ - 0.0.0.0:20000-20010:20000-20010/tcp
+ - "10000-10010:10000-10010/udp"
+ - "[::1]:3001-3003:3001-3003"
register: test2
- name: check test2
@@ -57,7 +61,7 @@
- "4444:4444/tcp"
- "1212:5555"
- "8888:19191/udp"
- - "1900:1900/udp"
+ - "19000:19000/udp"
- "127.0.0.1:7671:7676/udp"
- "127.0.0.1:12122:8876/udp"
- "127.0.0.1:13122:8871/tcp"
@@ -65,6 +69,10 @@
- "127.0.0.2:43423:8872/tcp"
- "127.0.0.3:43423:8872"
- "0.0.0.0:15674:7846"
+ - 127.0.0.1:20000-20010:20000-20010/udp
+ - 0.0.0.0:20000-20010:20000-20010/tcp
+ - "10000-10010:10000-10010/udp"
+ - "[::1]:3001-3003:3001-3003"
register: test3
- name: check test3
@@ -95,7 +103,7 @@
- name: check test5
assert:
- that: test5 is changed
+ that: test5 is not changed
- containers.podman.podman_container:
executable: "{{ test_executable | default('podman') }}"
@@ -149,7 +157,7 @@
- name: check test9
assert:
- that: test9 is not changed
+ that: test9 is changed
- containers.podman.podman_container:
executable: "{{ test_executable | default('podman') }}"
@@ -164,7 +172,7 @@
- name: check test9a
assert:
- that: test9a is not changed
+ that: test9a is changed
- containers.podman.podman_container:
executable: "{{ test_executable | default('podman') }}"
@@ -213,7 +221,7 @@
- name: check test11
assert:
- that: test11 is not changed
+ that: test11 is changed
- containers.podman.podman_container:
executable: "{{ test_executable | default('podman') }}"
@@ -226,7 +234,7 @@
- name: check test11a
assert:
- that: test11a is not changed
+ that: test11a is changed
- containers.podman.podman_container:
executable: "{{ test_executable | default('podman') }}"
diff --git a/ansible_collections/containers/podman/tests/integration/targets/podman_container_idempotency/tasks/idem_stopsignal.yml b/ansible_collections/containers/podman/tests/integration/targets/podman_container_idempotency/tasks/idem_stopsignal.yml
index 98586aece..b806120d1 100644
--- a/ansible_collections/containers/podman/tests/integration/targets/podman_container_idempotency/tasks/idem_stopsignal.yml
+++ b/ansible_collections/containers/podman/tests/integration/targets/podman_container_idempotency/tasks/idem_stopsignal.yml
@@ -34,7 +34,20 @@
- name: check test2
assert:
- that: test2 is not changed
+ that: test2 is changed
+
+- containers.podman.podman_container:
+ executable: "{{ test_executable | default('podman') }}"
+ image: "{{ idem_image }}"
+ name: idempotency
+ state: present
+ stop_signal: 9
+ command: 1h
+ register: test2a
+
+- name: check test2a
+ assert:
+ that: test2a is not changed
- containers.podman.podman_container:
executable: "{{ test_executable | default('podman') }}"
@@ -46,7 +59,7 @@
- name: check test3
assert:
- that: test3 is not changed
+ that: test3 is changed
- containers.podman.podman_container:
executable: "{{ test_executable | default('podman') }}"
@@ -122,7 +135,7 @@
- name: check test9
assert:
- that: test9 is not changed
+ that: test9 is changed
- containers.podman.podman_container:
executable: "{{ test_executable | default('podman') }}"
@@ -184,7 +197,7 @@
- name: check test11
assert:
- that: test11 is not changed
+ that: test11 is changed
- containers.podman.podman_container:
executable: "{{ test_executable | default('podman') }}"
diff --git a/ansible_collections/containers/podman/tests/integration/targets/podman_container_idempotency/tasks/idem_users.yml b/ansible_collections/containers/podman/tests/integration/targets/podman_container_idempotency/tasks/idem_users.yml
index 6be03bb1a..9b82ae332 100644
--- a/ansible_collections/containers/podman/tests/integration/targets/podman_container_idempotency/tasks/idem_users.yml
+++ b/ansible_collections/containers/podman/tests/integration/targets/podman_container_idempotency/tasks/idem_users.yml
@@ -34,7 +34,7 @@
- name: check test2
assert:
- that: test2 is not changed
+ that: test2 is changed
- containers.podman.podman_container:
executable: "{{ test_executable | default('podman') }}"
@@ -46,7 +46,7 @@
- name: check test3
assert:
- that: test3 is not changed
+ that: test3 is changed
- containers.podman.podman_container:
executable: "{{ test_executable | default('podman') }}"
@@ -122,7 +122,7 @@
- name: check test9
assert:
- that: test9 is not changed
+ that: test9 is changed
- containers.podman.podman_container:
executable: "{{ test_executable | default('podman') }}"
diff --git a/ansible_collections/containers/podman/tests/integration/targets/podman_container_idempotency/tasks/idem_volumes.yml b/ansible_collections/containers/podman/tests/integration/targets/podman_container_idempotency/tasks/idem_volumes.yml
index 3fff4e34a..26e7040f8 100644
--- a/ansible_collections/containers/podman/tests/integration/targets/podman_container_idempotency/tasks/idem_volumes.yml
+++ b/ansible_collections/containers/podman/tests/integration/targets/podman_container_idempotency/tasks/idem_volumes.yml
@@ -126,7 +126,7 @@
- name: check test9
assert:
- that: test9 is not changed
+ that: test9 is changed
- name: Create volumes
shell: |
@@ -190,7 +190,7 @@
- name: check test13
assert:
- that: test13 is not changed
+ that: test13 is changed
- containers.podman.podman_container:
executable: "{{ test_executable | default('podman') }}"
@@ -253,3 +253,99 @@
- name: check test17
assert:
that: test17 is not changed
+
+- containers.podman.podman_container:
+ executable: "{{ test_executable | default('podman') }}"
+ image: alpine
+ name: idempotency1
+ state: present
+ command: sleep 1h
+ mounts:
+ - "type=bind,source=/tmp,destination=/tmp"
+ register: test18
+
+- name: check test18
+ assert:
+ that: test18 is changed
+
+- containers.podman.podman_container:
+ executable: "{{ test_executable | default('podman') }}"
+ image: alpine
+ name: idempotency1
+ state: present
+ command: sleep 1h
+ mounts:
+ - "type=bind,source=/tmp,destination=/tmp"
+ register: test19
+
+- name: check test19
+ assert:
+ that: test19 is not changed
+
+- containers.podman.podman_container:
+ executable: "{{ test_executable | default('podman') }}"
+ image: alpine
+ name: idempotency1
+ state: present
+ command: sleep 1h
+ mounts:
+ - "type=bind,source=/tmp,destination=/tmp"
+ - "type=bind,source=/var,destination=/var"
+ volumes:
+ - /opt:/data
+ - "local_volume2:/data2"
+ register: test20
+
+- name: check test20
+ assert:
+ that: test20 is changed
+
+- containers.podman.podman_container:
+ executable: "{{ test_executable | default('podman') }}"
+ image: alpine
+ name: idempotency1
+ state: present
+ command: sleep 1h
+ mounts:
+ - "type=bind,source=/tmp,destination=/tmp"
+ - "type=bind,source=/var,destination=/var"
+ volumes:
+ - /opt:/data
+ - "local_volume2:/data2"
+ register: test21
+
+- name: check test21
+ assert:
+ that: test21 is not changed
+
+- containers.podman.podman_container:
+ executable: "{{ test_executable | default('podman') }}"
+ image: alpine
+ name: idempotency1
+ state: present
+ command: sleep 1h
+ mounts:
+ - "type=bind,source=/tmp,destination=/tmp"
+ volumes:
+ - /opt:/data
+ register: test22
+
+- name: check test22
+ assert:
+ that: test22 is changed
+
+- containers.podman.podman_container:
+ executable: "{{ test_executable | default('podman') }}"
+ image: alpine
+ name: idempotency1
+ state: present
+ command: sleep 1h
+ mounts:
+ - "type=bind,source=/tmp,destination=/tmp"
+ volumes:
+ - /opt:/data
+ register: test23
+
+- name: check test23
+ assert:
+ that: test23 is not changed
diff --git a/ansible_collections/containers/podman/tests/integration/targets/podman_container_idempotency/tasks/idem_workdir.yml b/ansible_collections/containers/podman/tests/integration/targets/podman_container_idempotency/tasks/idem_workdir.yml
index ef1103188..f413090cc 100644
--- a/ansible_collections/containers/podman/tests/integration/targets/podman_container_idempotency/tasks/idem_workdir.yml
+++ b/ansible_collections/containers/podman/tests/integration/targets/podman_container_idempotency/tasks/idem_workdir.yml
@@ -34,13 +34,14 @@
- name: check test2
assert:
- that: test2 is not changed
+ that: test2 is changed
- containers.podman.podman_container:
executable: "{{ test_executable | default('podman') }}"
image: "{{ idem_image }}"
name: idempotency
state: present
+ workdir: /work
command: 1h
register: test3
@@ -122,7 +123,7 @@
- name: check test9
assert:
- that: test9 is not changed
+ that: test9 is changed
- containers.podman.podman_container:
executable: "{{ test_executable | default('podman') }}"
@@ -184,7 +185,7 @@
- name: check test11
assert:
- that: test11 is not changed
+ that: test11 is changed
- containers.podman.podman_container:
executable: "{{ test_executable | default('podman') }}"
diff --git a/ansible_collections/containers/podman/tests/integration/targets/podman_container_idempotency/tasks/main.yml b/ansible_collections/containers/podman/tests/integration/targets/podman_container_idempotency/tasks/main.yml
index 02b66d250..2d27906a5 100644
--- a/ansible_collections/containers/podman/tests/integration/targets/podman_container_idempotency/tasks/main.yml
+++ b/ansible_collections/containers/podman/tests/integration/targets/podman_container_idempotency/tasks/main.yml
@@ -45,6 +45,7 @@
- name: Test idempotency of systemd generation
include_tasks: idem_systemd.yml
+ when: podman_version | int > 3
- name: Test idempotency of other settings
include_tasks: idem_all.yml
diff --git a/ansible_collections/containers/podman/tests/integration/targets/podman_container_idempotency/tasks/root-podman.yml b/ansible_collections/containers/podman/tests/integration/targets/podman_container_idempotency/tasks/root-podman.yml
index f41587367..0d9032754 100644
--- a/ansible_collections/containers/podman/tests/integration/targets/podman_container_idempotency/tasks/root-podman.yml
+++ b/ansible_collections/containers/podman/tests/integration/targets/podman_container_idempotency/tasks/root-podman.yml
@@ -162,7 +162,7 @@
- name: Check that it is recreated
assert:
that:
- - info7 is not changed
+ - info7 is changed
- name: Run container with publishing ports and ipv6
containers.podman.podman_container:
@@ -178,6 +178,7 @@
- "127.0.0.1:7671:7676/udp"
- "127.0.0.3:43423:8872"
- "[::1]:34523:35425"
+ - "40001-40010"
register: info8
- name: Check that it is recreated
@@ -199,6 +200,7 @@
- "127.0.0.1:7671:7676/udp"
- "127.0.0.3:43423:8872"
- "[::1]:34523:35425"
+ - "40001-40010"
register: info9
- name: Check that it is recreated
diff --git a/ansible_collections/containers/podman/tests/integration/targets/podman_container_idempotency/tasks/rootless-podman-network.yml b/ansible_collections/containers/podman/tests/integration/targets/podman_container_idempotency/tasks/rootless-podman-network.yml
index 3c16efac2..74c21ff4d 100644
--- a/ansible_collections/containers/podman/tests/integration/targets/podman_container_idempotency/tasks/rootless-podman-network.yml
+++ b/ansible_collections/containers/podman/tests/integration/targets/podman_container_idempotency/tasks/rootless-podman-network.yml
@@ -109,7 +109,7 @@
that:
- info5 is changed
- - when: podman_version < 5
+ - when: podman_version | int < 5
name: Run container tasks with slirp4netns options before v5
block:
- name: Run container with slirp4netns options
@@ -161,7 +161,7 @@
- info8 is changed
- - when: podman_version >= 5
+ - when: podman_version | int >= 5
name: Run container tasks with pasta options for v5 and later
block:
- name: Run container with pasta options
diff --git a/ansible_collections/containers/podman/tests/integration/targets/podman_image/tasks/idem_push.yml b/ansible_collections/containers/podman/tests/integration/targets/podman_image/tasks/idem_push.yml
new file mode 100644
index 000000000..064ce8fc4
--- /dev/null
+++ b/ansible_collections/containers/podman/tests/integration/targets/podman_image/tasks/idem_push.yml
@@ -0,0 +1,534 @@
+---
+- containers.podman.podman_image:
+ name: testimage2
+ pull: false
+ push: true
+ ignore_errors: true
+ register: img_result1
+
+- name: Check outputs
+ assert:
+ that:
+ - img_result1 is failed
+ - "'Destination must be a full URL or path to a directory' in img_result1.msg"
+
+- containers.podman.podman_image:
+ name: quay.io/testing/testimage
+ pull: false
+ push: true
+ ignore_errors: true
+ register: img_result2
+
+- name: Check outputs
+ assert:
+ that:
+ - "'authentication required' in img_result2.stderr"
+ - "'push quay.io/testing/testimage:latest quay.io/testing/testimage:latest' in img_result2.actions"
+
+- containers.podman.podman_image:
+ name: quay.io/testing/testimage:draft
+ pull: false
+ push: true
+ ignore_errors: true
+ register: img_result2a
+
+- name: Check outputs
+ assert:
+ that:
+ - "'authentication required' in img_result2a.stderr"
+ - "'push quay.io/testing/testimage:draft quay.io/testing/testimage:draft' in img_result2a.actions"
+
+- containers.podman.podman_image:
+ name: quay.io/testing/testimage
+ pull: false
+ push: true
+ push_args:
+ dest: quay.io/testing/testimage
+ ignore_errors: true
+ register: img_result3
+
+- name: Check outputs
+ assert:
+ that:
+ - "'authentication required' in img_result3.stderr"
+ - "'push quay.io/testing/testimage:latest quay.io/testing/testimage' in img_result3.actions"
+
+- containers.podman.podman_image:
+ name: quay.io/testing/testimage:draft
+ pull: false
+ push: true
+ push_args:
+ dest: quay.io/testing/testimage
+ ignore_errors: true
+ register: img_result3a
+
+- name: Check outputs
+ assert:
+ that:
+ - "'authentication required' in img_result3a.stderr"
+ - "'push quay.io/testing/testimage:draft quay.io/testing/testimage' in img_result3a.actions"
+
+- containers.podman.podman_image:
+ name: quay.io/testing/testimage:draft
+ pull: false
+ push: true
+ push_args:
+ dest: quay.io/testing/testimage:nodraft
+ ignore_errors: true
+ register: img_result3b
+
+- name: Check outputs
+ assert:
+ that:
+ - "'authentication required' in img_result3b.stderr"
+ - "'push quay.io/testing/testimage:draft quay.io/testing/testimage:nodraft' in img_result3b.actions"
+
+- containers.podman.podman_image:
+ name: testimage2
+ tag: testtag
+ pull: false
+ push: true
+ push_args:
+ dest: quay.io/testing/testimage
+ ignore_errors: true
+ register: img_result4
+
+- name: Check outputs
+ assert:
+ that:
+ - "'authentication required' in img_result4.stderr"
+ - "'push testimage2:testtag quay.io/testing/testimage' in img_result4.actions"
+
+- containers.podman.podman_image:
+ name: testimage2
+ tag: testtag
+ pull: false
+ push: true
+ push_args:
+ dest: quay.io/testing
+ ignore_errors: true
+ register: img_result4a
+
+- name: Check outputs
+ assert:
+ that:
+ - "'authentication required' in img_result4a.stderr"
+ - "'push testimage2:testtag quay.io/testing/testimage2:testtag' in img_result4a.actions"
+
+- containers.podman.podman_image:
+ name: testimage2:testtag
+ pull: false
+ push: true
+ push_args:
+ dest: quay.io/testing/
+ ignore_errors: true
+ register: img_result4b
+
+- name: Check outputs
+ assert:
+ that:
+ - "'authentication required' in img_result4b.stderr"
+ - "'push testimage2:testtag quay.io/testing/testimage2:testtag' in img_result4b.actions"
+
+- containers.podman.podman_image:
+ name: testimage2
+ pull: false
+ push: true
+ push_args:
+ dest: quay.io/testing/
+ ignore_errors: true
+ register: img_result4c
+
+- name: Check outputs
+ assert:
+ that:
+ - "'authentication required' in img_result4c.stderr"
+ - "'push testimage2:latest quay.io/testing/testimage2:latest' in img_result4c.actions"
+
+- containers.podman.podman_image:
+ name: testimage2
+ pull: false
+ push: true
+ push_args:
+ dest: /tmp/testimage-dir
+ transport: dir
+ ignore_errors: true
+ register: img_result5
+
+- name: Check outputs
+ assert:
+ that:
+ - img_result5 is changed
+ - "'push testimage2:latest dir:/tmp/testimage-dir' in img_result5.actions"
+
+- containers.podman.podman_image:
+ name: testimage2
+ pull: false
+ push: true
+ push_args:
+ dest: dir:/tmp/testimage-dir1
+ ignore_errors: true
+ register: img_result6
+
+- name: Check outputs
+ assert:
+ that:
+ - img_result6 is changed
+ - "'push testimage2:latest dir:/tmp/testimage-dir1' in img_result6.actions"
+
+- name: Remove directories
+ file:
+ path: "{{ item }}"
+ state: absent
+ loop:
+ - /tmp/test-docker-arch
+ - /tmp/test-docker-arch1
+
+- containers.podman.podman_image:
+ name: testimage2
+ pull: false
+ push: true
+ push_args:
+ dest: /tmp/test-docker-arch
+ transport: docker-archive
+ ignore_errors: true
+ register: img_result7
+
+- name: Check outputs
+ assert:
+ that:
+ - img_result7 is changed
+ - "'push testimage2:latest docker-archive:/tmp/test-docker-arch' in img_result7.actions"
+
+- containers.podman.podman_image:
+ name: testimage2
+ pull: false
+ push: true
+ push_args:
+ dest: docker-archive:/tmp/test-docker-arch1
+ ignore_errors: true
+ register: img_result8
+
+- name: Check outputs
+ assert:
+ that:
+ - img_result8 is changed
+ - "'push testimage2:latest docker-archive:/tmp/test-docker-arch1' in img_result8.actions"
+
+- containers.podman.podman_image:
+ name: testimage2
+ pull: false
+ push: true
+ push_args:
+ dest: /tmp/test-oci-arch
+ transport: oci-archive
+ ignore_errors: true
+ register: img_result9
+
+- name: Check outputs
+ assert:
+ that:
+ - img_result9 is changed
+ - "'push testimage2:latest oci-archive:/tmp/test-oci-arch' in img_result9.actions"
+
+- containers.podman.podman_image:
+ name: testimage2
+ pull: false
+ push: true
+ push_args:
+ dest: oci-archive:/tmp/test-oci-arch1
+ ignore_errors: true
+ register: img_result10
+
+- name: Check outputs
+ assert:
+ that:
+ - img_result10 is changed
+ - "'push testimage2:latest oci-archive:/tmp/test-oci-arch1' in img_result10.actions"
+
+- containers.podman.podman_image:
+ name: testimage2
+ pull: false
+ push: true
+ push_args:
+ dest: quay.io/testing/testimage1
+ transport: docker
+ ignore_errors: true
+ register: img_result11
+
+- name: Check outputs
+ assert:
+ that:
+ - "'authentication required' in img_result11.stderr"
+ - "'push testimage2:latest docker://quay.io/testing/testimage1' in img_result11.actions"
+
+- containers.podman.podman_image:
+ name: testimage2
+ pull: false
+ push: true
+ push_args:
+ dest: docker://quay.io/testing/testimage22
+ ignore_errors: true
+ register: img_result12
+
+- name: Check outputs
+ assert:
+ that:
+ - "'authentication required' in img_result12.stderr"
+ - "'push testimage2:latest docker://quay.io/testing/testimage22' in img_result12.actions"
+
+- containers.podman.podman_image:
+ name: testimage2
+ pull: false
+ push: true
+ push_args:
+ dest: dockerimage
+ transport: docker-daemon
+ ignore_errors: true
+ register: img_result13
+
+- name: Check outputs
+ assert:
+ that:
+ - img_result13 is changed
+ - "'push testimage2:latest docker-daemon:dockerimage:latest' in img_result13.actions"
+
+- containers.podman.podman_image:
+ name: testimage2
+ pull: false
+ push: true
+ push_args:
+ dest: docker-daemon:dockerimage2
+ ignore_errors: true
+ register: img_result14
+
+- name: Check outputs
+ assert:
+ that:
+ - img_result14 is failure
+ - "'push testimage2:latest docker-daemon:dockerimage2' in img_result14.actions"
+
+- containers.podman.podman_image:
+ name: testimage2
+ pull: false
+ push: true
+ push_args:
+ dest: dockerimage:tagged1
+ transport: docker-daemon
+ ignore_errors: true
+ register: img_result15
+
+- name: Check outputs
+ assert:
+ that:
+ - img_result15 is success
+ - "'push testimage2:latest docker-daemon:dockerimage:tagged1' in img_result15.actions"
+
+- containers.podman.podman_image:
+ name: testimage2
+ pull: false
+ push: true
+ push_args:
+ dest: docker-daemon:dockerimage:tagged
+ ignore_errors: true
+ register: img_result16
+
+- name: Check outputs
+ assert:
+ that:
+ - img_result16 is success
+ - "'push testimage2:latest docker-daemon:dockerimage:tagged' in img_result16.actions"
+
+- containers.podman.podman_image:
+ name: quay.io/testing/testimage
+ pull: false
+ push: true
+ push_args:
+ dest: /tmp/testimage-dir25
+ transport: dir
+ ignore_errors: true
+ register: img_result17
+
+- name: Check outputs
+ assert:
+ that:
+ - img_result17 is success
+ - "'push quay.io/testing/testimage:latest dir:/tmp/testimage-dir25' in img_result17.actions"
+
+- containers.podman.podman_image:
+ name: quay.io/testing/testimage
+ pull: false
+ push: true
+ push_args:
+ dest: dir:/tmp/testimage-dir15
+ ignore_errors: true
+ register: img_result18
+
+- name: Check outputs
+ assert:
+ that:
+ - img_result18 is success
+ - "'push quay.io/testing/testimage:latest dir:/tmp/testimage-dir15' in img_result18.actions"
+
+- name: Remove directories for docker-archive
+ file:
+ path: "{{ item }}"
+ state: absent
+ loop:
+ - /tmp/test-docker-arch5
+ - /tmp/test-docker-arch15
+
+- containers.podman.podman_image:
+ name: quay.io/testing/testimage
+ pull: false
+ push: true
+ push_args:
+ dest: /tmp/test-docker-arch5
+ transport: docker-archive
+ ignore_errors: true
+ register: img_result19
+
+- name: Check outputs
+ assert:
+ that:
+ - img_result19 is success
+ - "'push quay.io/testing/testimage:latest docker-archive:/tmp/test-docker-arch5' in img_result19.actions"
+
+- containers.podman.podman_image:
+ name: quay.io/testing/testimage
+ pull: false
+ push: true
+ push_args:
+ dest: docker-archive:/tmp/test-docker-arch15
+ ignore_errors: true
+ register: img_result20
+
+- name: Check outputs
+ assert:
+ that:
+ - img_result20 is success
+ - "'push quay.io/testing/testimage:latest docker-archive:/tmp/test-docker-arch15' in img_result20.actions"
+
+- containers.podman.podman_image:
+ name: quay.io/testing/testimage
+ pull: false
+ push: true
+ push_args:
+ dest: /tmp/test-oci-arch5
+ transport: oci-archive
+ ignore_errors: true
+ register: img_result21
+
+- name: Check outputs
+ assert:
+ that:
+ - img_result21 is success
+ - "'push quay.io/testing/testimage:latest oci-archive:/tmp/test-oci-arch5' in img_result21.actions"
+
+- containers.podman.podman_image:
+ name: quay.io/testing/testimage
+ pull: false
+ push: true
+ push_args:
+ dest: oci-archive:/tmp/test-oci-arch15
+ ignore_errors: true
+ register: img_result22
+
+- name: Check outputs
+ assert:
+ that:
+ - img_result22 is success
+ - "'push quay.io/testing/testimage:latest oci-archive:/tmp/test-oci-arch15' in img_result22.actions"
+
+- containers.podman.podman_image:
+ name: quay.io/testing/testimage
+ pull: false
+ push: true
+ push_args:
+ dest: quay.io/testing/testimage5
+ transport: docker
+ ignore_errors: true
+ register: img_result23
+
+- name: Check outputs
+ assert:
+ that:
+ - "'authentication required' in img_result23.stderr"
+ - "'push quay.io/testing/testimage:latest docker://quay.io/testing/testimage5' in img_result23.actions"
+
+- containers.podman.podman_image:
+ name: quay.io/testing/testimage
+ pull: false
+ push: true
+ push_args:
+ dest: docker://quay.io/testing/testimage22
+ ignore_errors: true
+ register: img_result24
+
+- name: Check outputs
+ assert:
+ that:
+ - "'authentication required' in img_result24.stderr"
+ - "'push quay.io/testing/testimage:latest docker://quay.io/testing/testimage22' in img_result24.actions"
+
+- containers.podman.podman_image:
+ name: quay.io/testing/testimage
+ pull: false
+ push: true
+ push_args:
+ dest: dockerimage5
+ transport: docker-daemon
+ ignore_errors: true
+ register: img_result25
+
+- name: Check outputs
+ assert:
+ that:
+ - img_result25 is success
+ - "'push quay.io/testing/testimage:latest docker-daemon:dockerimage5:latest' in img_result25.actions"
+
+- containers.podman.podman_image:
+ name: quay.io/testing/testimage
+ pull: false
+ push: true
+ push_args:
+ dest: docker-daemon:dockerimage25
+ ignore_errors: true
+ register: img_result26
+
+- name: Check outputs
+ assert:
+ that:
+ - img_result26 is failed
+ - "'push quay.io/testing/testimage:latest docker-daemon:dockerimage25' in img_result26.actions"
+
+- containers.podman.podman_image:
+ name: quay.io/testing/testimage
+ pull: false
+ push: true
+ push_args:
+ dest: dockerimage:tagged15
+ transport: docker-daemon
+ ignore_errors: true
+ register: img_result27
+
+- name: Check outputs
+ assert:
+ that:
+ - img_result27 is success
+ - "'push quay.io/testing/testimage:latest docker-daemon:dockerimage:tagged15' in img_result27.actions"
+
+- containers.podman.podman_image:
+ name: quay.io/testing/testimage
+ pull: false
+ push: true
+ push_args:
+ dest: docker-daemon:dockerimage:tagged5
+ ignore_errors: true
+ register: img_result28
+
+- name: Check outputs
+ assert:
+ that:
+ - img_result28 is success
+ - "'push quay.io/testing/testimage:latest docker-daemon:dockerimage:tagged5' in img_result28.actions"
diff --git a/ansible_collections/containers/podman/tests/integration/targets/podman_image/tasks/main.yml b/ansible_collections/containers/podman/tests/integration/targets/podman_image/tasks/main.yml
index 96133b9da..dca1ee53e 100644
--- a/ansible_collections/containers/podman/tests/integration/targets/podman_image/tasks/main.yml
+++ b/ansible_collections/containers/podman/tests/integration/targets/podman_image/tasks/main.yml
@@ -209,15 +209,34 @@
register: oci_build3
ignore_errors: true
- - name: Build OCI image, point to location of Containerfile
+ - name: Build OCI image, point to location of Containerfile without path
containers.podman.podman_image:
executable: "{{ test_executable | default('podman') }}"
name: testimage2
- path: /var/tmp/build/subdir
+ state: build
build:
file: /var/tmp/build/Dockerfile
register: oci_build4
+ - name: Build OCI image, point to location of Containerfile and path
+ containers.podman.podman_image:
+ executable: "{{ test_executable | default('podman') }}"
+ name: testimage3
+ path: /var/tmp/build
+ build:
+ file: /var/tmp/build/Dockerfile
+ register: oci_build5
+
+ - name: Build OCI image with no Containerfile and path
+ containers.podman.podman_image:
+ executable: "{{ test_executable | default('podman') }}"
+ name: testimage5x
+ state: build
+ build:
+ format: oci
+ register: oci_build6
+ ignore_errors: true
+
- name: Inspect first image
containers.podman.podman_image_info:
executable: "{{ test_executable | default('podman') }}"
@@ -237,7 +256,9 @@
- oci_build2 is not changed
- oci_build3 is not changed
- oci_build3 is failed
- - oci_build4 is changed
+ - oci_build4 is success
+ - oci_build5 is success
+ - oci_build6 is failed
- "'localhost/testimage:latest' in testimage_info.images[0]['RepoTags'][0]"
- "'localhost/testimage2:latest' in testimage2_info.images[0]['RepoTags'][0]"
- "'no such file or directory' in oci_build3.msg"
@@ -329,6 +350,72 @@
- item.Architecture == "arm"
loop: "{{ imageinfo_arch.images }}"
+ - name: Build Docker image
+ containers.podman.podman_image:
+ executable: "{{ test_executable | default('podman') }}"
+ name: quay.io/testing/testimage
+ path: /var/tmp/build
+ register: build_image1
+
+ - name: Build Docker image - 2
+ containers.podman.podman_image:
+ executable: "{{ test_executable | default('podman') }}"
+ name: quay.io/testing/testimage:draft
+ path: /var/tmp/build
+
+ - name: Build Docker image
+ containers.podman.podman_image:
+ executable: "{{ test_executable | default('podman') }}"
+ name: testimage2:testtag
+ path: /var/tmp/build
+ register: build_image2
+
+ - name: Build image from a given Containerfile
+ containers.podman.podman_image:
+ executable: "{{ test_executable | default('podman') }}"
+ name: testimage2:customfile
+ path: "{{ playbook_dir }}"
+ build:
+ container_file: |-
+ FROM quay.io/coreos/alpine-sh
+ RUN echo "Hello World" > /tmp/hello.txt
+ register: build_custom1
+
+ - name: Build image from a given Containerfile w/o path
+ containers.podman.podman_image:
+ executable: "{{ test_executable | default('podman') }}"
+ name: testimage2:customfile2
+ state: build
+ build:
+ container_file: |-
+ FROM quay.io/coreos/alpine-sh
+ RUN echo "Hello2 World" > /tmp/hello2.txt
+ force: true
+ register: build_custom2
+
+ - name: Build image from a given Containerfile and file (fail)
+ containers.podman.podman_image:
+ executable: "{{ test_executable | default('podman') }}"
+ name: testimage2:failme
+ state: build
+ build:
+ container_file: |-
+ FROM quay.io/coreos/alpine-sh
+ RUN echo "Hello2 World" > /tmp/hello2.txt
+ file: /var/tmp/build/Dockerfile
+ force: true
+ register: fail_custom_image
+ ignore_errors: true
+
+ - name: Check if image was built properly
+ assert:
+ that:
+ - build_custom1 is changed
+ - build_custom2 is changed
+ - fail_custom_image is failed
+
+ - include_tasks: idem_push.yml
+
- name: Create a Quadlet for image with filename
containers.podman.podman_image:
executable: "{{ test_executable | default('podman') }}"
@@ -469,4 +556,8 @@
- quay.io/coreos/etcd:v3.3.11
- localhost/testimage
- localhost/testimage2
+ - localhost/testimage2:testtag
+ - localhost/testimage3
- localhost/dockerimage
+ - quay.io/testing/testimage
+ - quay.io/testing/testimage:draft
diff --git a/ansible_collections/containers/podman/tests/integration/targets/podman_network/tasks/main.yml b/ansible_collections/containers/podman/tests/integration/targets/podman_network/tasks/main.yml
index 272ac6b43..4bfb4b0c1 100644
--- a/ansible_collections/containers/podman/tests/integration/targets/podman_network/tasks/main.yml
+++ b/ansible_collections/containers/podman/tests/integration/targets/podman_network/tasks/main.yml
@@ -328,20 +328,20 @@
that:
- info16 is not changed
- - name: Create network with IPv6 'exploded'
+ - name: Create network with IPv6 'exploded' - dual stack
containers.podman.podman_network:
executable: "{{ test_executable | default('podman') }}"
name: "{{ network_name }}"
ipv6: true
- subnet: fd4f:552c:830f:0000::/64
+ subnet: fd4f:552c:830f::/64
state: present
- - name: Create network with IPv6 'exploded' again
+ - name: Create network with IPv6 'exploded' - dual stack again
containers.podman.podman_network:
executable: "{{ test_executable | default('podman') }}"
name: "{{ network_name }}"
ipv6: true
- subnet: fd4f:552c:830f:0000::/64
+ subnet: fd4f:552c:830f::/64
state: present
register: info17
@@ -350,6 +350,358 @@
that:
- info17 is not changed
+ - name: Create network with DNS
+ containers.podman.podman_network:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "{{ network_name }}"
+ state: present
+ dns:
+ - 1.1.1.1
+ - 2.2.2.2
+ register: resultn2
+
+ - name: Check output - Create network with DNS
+ assert:
+ that:
+ - resultn2 is changed
+
+ - name: Create network with DNS again
+ containers.podman.podman_network:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "{{ network_name }}"
+ state: present
+ dns:
+ - 1.1.1.1
+ - 2.2.2.2
+ register: resultn3
+
+ - name: Check output - Create network with DNS again
+ assert:
+ that:
+ - resultn3 is not changed
+
+ - name: Create network with DNS - different
+ containers.podman.podman_network:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "{{ network_name }}"
+ state: present
+ dns:
+ - 1.1.1.1
+ register: resultn4
+
+ - name: Check output - Create network with DNS - different
+ assert:
+ that:
+ - resultn4 is changed
+
+ - name: Create network with routes
+ containers.podman.podman_network:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "{{ network_name }}"
+ state: present
+ route:
+ - 1.1.1.1/32,2.2.2.2
+ - 3.3.3.0/24,4.4.4.4,240
+ - 7.7.0.0/16,9.9.9.9,100
+ register: resultn5
+
+ - name: Check output - Create network with routes
+ assert:
+ that:
+ - resultn5 is changed
+
+ - name: Create network with routes again
+ containers.podman.podman_network:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "{{ network_name }}"
+ state: present
+ route:
+ - 1.1.1.1/32,2.2.2.2
+ - 3.3.3.0/24,4.4.4.4,240
+ - 7.7.0.0/16,9.9.9.9,100
+ register: resultn6
+
+ - name: Check output - Create network with routes again
+ assert:
+ that:
+ - resultn6 is not changed
+
+ - name: Create network with routes - different
+ containers.podman.podman_network:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "{{ network_name }}"
+ state: present
+ route:
+ - 1.1.1.1/32,2.2.2.2
+ - 3.3.3.0/24,4.4.4.4,280
+ register: resultn7
+
+ - name: Check output - Create network with routes - different
+ assert:
+ that:
+ - resultn7 is changed
+
+ - name: Create network with IPAM driver none
+ containers.podman.podman_network:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "{{ network_name }}"
+ state: present
+ ipam_driver: none
+ register: resultn8
+
+ - name: Check output - Create network with IPAM driver none
+ assert:
+ that:
+ - resultn8 is changed
+
+ - name: Create network with IPAM driver none again
+ containers.podman.podman_network:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "{{ network_name }}"
+ state: present
+ ipam_driver: none
+ register: resultn9
+
+ - name: Check output - Create network with IPAM driver none again
+ assert:
+ that:
+ - resultn9 is not changed
+
+ - name: Create network without IPAM driver
+ containers.podman.podman_network:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "{{ network_name }}"
+ state: present
+ register: resultn10
+
+ - name: Check output - Create network without IPAM driver
+ assert:
+ that:
+ - resultn10 is not changed
+
+ - name: Create network with IPAM driver host-local
+ containers.podman.podman_network:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "{{ network_name }}"
+ state: present
+ ipam_driver: "host-local"
+ register: resultn11
+
+ - name: Check output - Create network with IPAM driver host-local
+ assert:
+ that:
+ - resultn11 is changed
+
+ - name: Delete network
+ containers.podman.podman_network:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "{{ network_name }}"
+ state: absent
+
+ - name: Create a network with multiple subnets
+ containers.podman.podman_network:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "{{ network_name }}"
+ net_config:
+ - subnet: 10.44.44.0/24
+ gateway: 10.44.44.44
+ ip_range: 10.44.44.128/30
+ - subnet: 10.22.22.0/24
+ gateway: 10.22.22.244
+ - subnet: 10.11.1.0/24
+ gateway: 10.11.1.33
+
+ - name: Create a network with multiple subnets again
+ containers.podman.podman_network:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "{{ network_name }}"
+ net_config:
+ - subnet: 10.44.44.0/24
+ gateway: 10.44.44.44
+ ip_range: 10.44.44.128/30
+ - subnet: 10.22.22.0/24
+ gateway: 10.22.22.244
+ - subnet: 10.11.1.0/24
+ gateway: 10.11.1.33
+ register: net2conf
+
+ - name: Check output - Create a network with multiple subnets again
+ assert:
+ that:
+ - net2conf is not changed
+
+ - name: Create a network with multiple subnets - different
+ containers.podman.podman_network:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "{{ network_name }}"
+ net_config:
+ - subnet: 10.44.44.0/24
+ gateway: 10.44.44.44
+ ip_range: 10.44.44.128/30
+ - subnet: 10.11.12.0/24
+ gateway: 10.11.12.33
+ register: net2conf1
+
+ - name: Check output - Create a network with multiple subnets - different
+ assert:
+ that:
+ - net2conf1 is changed
+
+ - name: Create a network with multiple subnets - different gateway
+ containers.podman.podman_network:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "{{ network_name }}"
+ net_config:
+ - subnet: 10.44.44.0/24
+ gateway: 10.44.44.41
+ ip_range: 10.44.44.128/30
+ - subnet: 10.11.12.0/24
+ gateway: 10.11.12.33
+ register: net2conf2
+
+ - name: Check output - Create a network with multiple subnets - different gateway
+ assert:
+ that:
+ - net2conf2 is changed
+
+ - name: Create a network with multiple subnets again - same
+ containers.podman.podman_network:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "{{ network_name }}"
+ net_config:
+ - subnet: 10.44.44.0/24
+ gateway: 10.44.44.41
+ ip_range: 10.44.44.128/30
+ - subnet: 10.11.12.0/24
+ gateway: 10.11.12.33
+ register: net2conf3
+
+ - name: Check output - Create a network with multiple subnets again - same
+ assert:
+ that:
+ - net2conf3 is not changed
+
+ - name: Create a network with single subnet
+ containers.podman.podman_network:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "{{ network_name }}"
+ subnet: 10.44.44.0/24
+ gateway: 10.44.44.41
+ register: net2conf4
+
+ - name: Check output - Create a network with single subnet
+ assert:
+ that:
+ - net2conf4 is changed
+
+ - name: Create a network with single subnet - again
+ containers.podman.podman_network:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "{{ network_name }}"
+ subnet: 10.44.44.0/24
+ gateway: 10.44.44.41
+ register: net2conf5
+
+ - name: Check output - Create a network with single subnet - again
+ assert:
+ that:
+ - net2conf5 is not changed
+
+ - name: Create a network with single subnet and changed gateway
+ containers.podman.podman_network:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "{{ network_name }}"
+ subnet: 10.44.44.0/24
+ gateway: 10.44.44.42
+ register: net2conf6
+
+ - name: Check output - Create a network with single subnet and changed gateway
+ assert:
+ that:
+ - net2conf6 is changed
+
+ - name: Create a network with single subnet - without gateway
+ containers.podman.podman_network:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "{{ network_name }}"
+ subnet: 10.44.44.0/24
+ register: net2conf7
+
+ - name: Check output - Create a network with single subnet - without gateway
+ assert:
+ that:
+ - net2conf7 is not changed
+
+ - name: Create a network with multiple subnets
+ containers.podman.podman_network:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "{{ network_name }}"
+ net_config:
+ - subnet: 10.44.44.0/24
+ gateway: 10.44.44.44
+ ip_range: 10.44.44.128/30
+ - subnet: 10.22.22.0/24
+ gateway: 10.22.22.244
+ - subnet: 10.17.1.0/24
+ gateway: 10.17.1.33
+
+ - name: Create a network with multiple subnets again
+ containers.podman.podman_network:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "{{ network_name }}"
+ net_config:
+ - subnet: 10.44.44.0/24
+ gateway: 10.44.44.44
+ ip_range: 10.44.44.128/30
+ - subnet: 10.22.22.0/24
+ gateway: 10.22.22.244
+ - subnet: 10.17.1.0/24
+ gateway: 10.17.1.33
+ register: net2conf
+
+ - name: Check output - Create a network with multiple subnets again
+ assert:
+ that:
+ - net2conf is not changed
+
+ - name: Create a IPv6 network with multiple subnets
+ containers.podman.podman_network:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "{{ network_name }}"
+ net_config:
+ - subnet: 2002:db8:1::/64
+ gateway: 2002:db8:1::12
+ ip_range: 2002:db8:1::128/68
+ - subnet: 2004:db9:1::/64
+ gateway: 2004:db9:1::15
+ - subnet: 10.15.11.0/24
+ gateway: 10.15.11.124
+ register: net6conf
+
+ - name: Check output - Create a network with multiple subnets again
+ assert:
+ that:
+ - net6conf is changed
+
+ - name: Create a IPv6 network with multiple subnets again
+ containers.podman.podman_network:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "{{ network_name }}"
+ net_config:
+ - subnet: 2002:db8:1::/64
+ gateway: 2002:db8:1::12
+ ip_range: 2002:db8:1::128/68
+ - subnet: 2004:db9:1::/64
+ gateway: 2004:db9:1::15
+ - subnet: 10.15.11.0/24
+ gateway: 10.15.11.124
+ register: net6conf1
+
+ - name: Check output - Create a network with multiple subnets again
+ assert:
+ that:
+ - net6conf1 is not changed
+
- name: Create a Quadlet for network with filename
containers.podman.podman_network:
executable: "{{ test_executable | default('podman') }}"
diff --git a/ansible_collections/containers/podman/tests/integration/targets/podman_pod/tasks/idempotency.yml b/ansible_collections/containers/podman/tests/integration/targets/podman_pod/tasks/idempotency.yml
new file mode 100644
index 000000000..26c3bd8f9
--- /dev/null
+++ b/ansible_collections/containers/podman/tests/integration/targets/podman_pod/tasks/idempotency.yml
@@ -0,0 +1,363 @@
+- name: Test podman pod idempotency
+ block:
+
+ - name: Delete all pods leftovers from tests
+ containers.podman.podman_pod:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "{{ item }}"
+ state: absent
+ loop:
+ - "podidem1"
+ - "podidem2"
+
+ - name: Delete all container leftovers from tests
+ containers.podman.podman_container:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "{{ item }}"
+ state: absent
+ loop:
+ - "container1"
+ - "container2"
+
+ - name: Create pod
+ containers.podman.podman_pod:
+ executable: "{{ test_executable | default('podman') }}"
+ name: podidem1
+ state: created
+ register: podidem1_info
+
+ - name: Check info
+ assert:
+ that:
+ - podidem1_info is changed
+ - >-
+ (podidem1_info.pod['State']['status'] is defined and
+ podidem1_info.pod['State']['status'] == 'Created') or
+ (podidem1_info.pod['State']['status'] is not defined and
+ podidem1_info.pod['State'] == 'Created')
+
+ - name: Create pod again
+ containers.podman.podman_pod:
+ executable: "{{ test_executable | default('podman') }}"
+ name: podidem1
+ state: created
+ register: podidem1_info2
+
+ - name: Check info
+ assert:
+ that:
+ - podidem1_info2 is not changed
+
+ - name: Start pod
+ containers.podman.podman_pod:
+ executable: "{{ test_executable | default('podman') }}"
+ name: podidem1
+ state: started
+ register: podidem1_info3
+
+ - name: Check info
+ assert:
+ that:
+ - podidem1_info3 is changed
+ - >-
+ (podidem1_info3.pod['State']['status'] is defined and
+ podidem1_info3.pod['State']['status'] == 'Running') or
+ (podidem1_info3.pod['State']['status'] is not defined and
+ podidem1_info3.pod['State'] == 'Running')
+
+ - name: Start pod again
+ containers.podman.podman_pod:
+ executable: "{{ test_executable | default('podman') }}"
+ name: podidem1
+ state: started
+ register: podidem1_info4
+
+ - name: Check info
+ assert:
+ that:
+ - podidem1_info4 is not changed
+
+ - name: Stop pod
+ containers.podman.podman_pod:
+ executable: "{{ test_executable | default('podman') }}"
+ name: podidem1
+ state: stopped
+ register: podidem1_info5
+
+ - name: Check info
+ assert:
+ that:
+ - podidem1_info5 is changed
+
+ - name: Stop pod again
+ containers.podman.podman_pod:
+ executable: "{{ test_executable | default('podman') }}"
+ name: podidem1
+ state: stopped
+ register: podidem1_info6
+
+ - name: Check info
+ assert:
+ that:
+ - podidem1_info6 is not changed
+
+ - name: Create stopped pod again
+ containers.podman.podman_pod:
+ executable: "{{ test_executable | default('podman') }}"
+ name: podidem1
+ state: created
+ register: podidem1_info7
+
+ - name: Check info
+ assert:
+ that:
+ - podidem1_info7 is not changed
+
+ - name: Create pod with multiple options and containers
+ containers.podman.podman_pod:
+ executable: "{{ test_executable | default('podman') }}"
+ name: podidem2
+ state: created
+ infra: true
+ network: host
+ share: net
+ userns: auto
+ security_opt:
+ - seccomp=unconfined
+ - apparmor=unconfined
+ hostname: mypod
+ dns:
+ - 1.1.1.1
+ volumes:
+ - /tmp:/tmp/:ro
+ - /var/run/://var/run
+ label:
+ key: cval
+ otherkey: kddkdk
+ somekey: someval
+ add_host:
+ - "google:5.5.5.5"
+ register: podidem2_info
+
+ - name: Check info
+ assert:
+ that:
+ - podidem2_info is changed
+
+ - name: Create same pod
+ containers.podman.podman_pod:
+ executable: "{{ test_executable | default('podman') }}"
+ name: podidem2
+ state: created
+ infra: true
+ network: host
+ share: net
+ userns: auto
+ security_opt:
+ - seccomp=unconfined
+ - apparmor=unconfined
+ hostname: mypod
+ dns:
+ - 1.1.1.1
+ volumes:
+ - /tmp:/tmp/:ro
+ - /var/run/://var/run
+ label:
+ key: cval
+ otherkey: kddkdk
+ somekey: someval
+ add_host:
+ - "google:5.5.5.5"
+ register: podidem2_info2
+
+ - name: Check info
+ assert:
+ that:
+ - podidem2_info2 is not changed
+
+ - name: Change the pod
+ containers.podman.podman_pod:
+ executable: "{{ test_executable | default('podman') }}"
+ name: podidem2
+ state: created
+ infra: true
+ network: host
+ share: net
+ userns: auto
+ security_opt:
+ - seccomp=unconfined
+ - apparmor=unconfined
+ hostname: mypod
+ dns:
+ - 1.1.1.2
+ volumes:
+ - /tmp:/tmp/:ro
+ - /var/run/://var/run
+ label:
+ key: cval
+ otherkey: kddkdk
+ somekey: someval
+ add_host:
+ - "google:5.5.5.5"
+ register: podidem2_info3
+
+ - name: Check info
+ assert:
+ that:
+ - podidem2_info3 is changed
+
+ - name: Start the pod
+ containers.podman.podman_pod:
+ executable: "{{ test_executable | default('podman') }}"
+ name: podidem2
+ state: started
+ infra: true
+ network: host
+ share: net
+ userns: auto
+ security_opt:
+ - seccomp=unconfined
+ - apparmor=unconfined
+ hostname: mypod
+ dns:
+ - 1.1.1.2
+ volumes:
+ - /tmp:/tmp/:ro
+ - /var/run/://var/run
+ label:
+ key: cval
+ otherkey: kddkdk
+ somekey: someval
+ add_host:
+ - "google:5.5.5.5"
+
+ - name: Configure and start the pod again
+ containers.podman.podman_pod:
+ executable: "{{ test_executable | default('podman') }}"
+ name: podidem2
+ state: started
+ infra: true
+ network: host
+ share: net
+ userns: auto
+ security_opt:
+ - seccomp=unconfined
+ - apparmor=unconfined
+ hostname: mypod
+ dns:
+ - 1.1.1.2
+ volumes:
+ - /tmp:/tmp/:ro
+ - /var/run/://var/run
+ label:
+ key: cval
+ otherkey: kddkdk
+ somekey: someval
+ add_host:
+ - "google:5.5.5.5"
+ register: podidem2_info4
+
+ - name: Check info
+ assert:
+ that:
+ - podidem2_info4 is not changed
+
+ - name: Remove pod
+ containers.podman.podman_pod:
+ executable: "{{ test_executable | default('podman') }}"
+ name: podidem2
+ state: absent
+
+ - name: Create a pod with parameters - created
+ containers.podman.podman_pod:
+ executable: "{{ test_executable | default('podman') }}"
+ name: podidem2
+ state: created
+ infra: true
+ network: host
+ share: net
+ userns: auto
+ security_opt:
+ - seccomp=unconfined
+ - apparmor=unconfined
+ hostname: mypod
+ dns:
+ - 1.1.1.2
+ volumes:
+ - /tmp:/tmp/:ro
+ label:
+ key: cval
+ otherkey: kddkdk
+ somekey: someval
+ add_host:
+ - "google:5.5.5.5"
+ register: pod1_set
+
+ - name: Create containers
+ containers.podman.podman_container:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "{{ item }}"
+ state: created
+ pod: podidem2
+ image: alpine
+ command: sleep 1h
+ loop:
+ - "container1"
+ - "container2"
+
+ - name: Start pod
+ containers.podman.podman_pod:
+ executable: "{{ test_executable | default('podman') }}"
+ name: podidem2
+ state: started
+ infra: true
+ network: host
+ share: net
+ userns: auto
+ security_opt:
+ - seccomp=unconfined
+ - apparmor=unconfined
+ hostname: mypod
+ dns:
+ - 1.1.1.2
+ volumes:
+ - /tmp:/tmp/:ro
+ label:
+ key: cval
+ otherkey: kddkdk
+ somekey: someval
+ add_host:
+ - "google:5.5.5.5"
+ register: pod2_set
+
+ - name: Check info for pod creation
+ assert:
+ that:
+ - pod1_set is changed
+ - pod2_set is changed
+ - pod1_set.pod['State'] == 'Created'
+ - pod1_set.pod['Containers'][0]['State'] == 'created'
+ - pod2_set.pod['State'] == 'Running'
+ - pod2_set.pod['Containers'][0]['State'] == 'running'
+ - pod2_set.pod['Containers'][1]['State'] == 'running'
+ - pod2_set.pod['Containers'][2]['State'] == 'running'
+
+ always:
+
+ - name: Delete all pods leftovers from tests
+ containers.podman.podman_pod:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "{{ item }}"
+ state: absent
+ loop:
+ - "podidem1"
+ - "podidem1"
+
+ - name: Delete all container leftovers from tests
+ containers.podman.podman_container:
+ executable: "{{ test_executable | default('podman') }}"
+ name: "{{ item }}"
+ state: absent
+ loop:
+ - "container1"
+ - "container2"
diff --git a/ansible_collections/containers/podman/tests/integration/targets/podman_pod/tasks/main.yml b/ansible_collections/containers/podman/tests/integration/targets/podman_pod/tasks/main.yml
index cb455aa23..c148f9a99 100644
--- a/ansible_collections/containers/podman/tests/integration/targets/podman_pod/tasks/main.yml
+++ b/ansible_collections/containers/podman/tests/integration/targets/podman_pod/tasks/main.yml
@@ -1010,6 +1010,8 @@
subuidname: username1
userns: auto
publish: 8000:8001
+ sysctl:
+ "net.ipv4.ip_forward": 1
add_host:
- host1
volume:
@@ -1052,6 +1054,7 @@
- "PodmanArgs=--subuidname username1"
- "PodmanArgs=--userns auto"
- "PodmanArgs=--add-host host1"
+ - "PodmanArgs=--sysctl net.ipv4.ip_forward=1"
- "Label=somelabel=labelvalue"
- "WantedBy=default.target"
loop_control:
@@ -1075,6 +1078,8 @@
subuidname: username1
userns: auto
publish: 8000:8001
+ sysctl:
+ "net.ipv4.ip_forward": 1
add_host:
- host1
volume:
@@ -1103,6 +1108,8 @@
subuidname: username1
userns: auto
publish: 8000:8001
+ sysctl:
+ "net.ipv4.ip_forward": 1
add_host:
- host1
volume:
@@ -1146,6 +1153,9 @@
- "container1"
- "container2"
+- name: Test idempotency rootless pods
+ include_tasks: idempotency.yml
+
- name: Test idempotency for root pods
include_tasks: root-pod.yml
vars:
diff --git a/ansible_collections/containers/podman/tests/integration/targets/podman_pod/tasks/resource-limit.yml b/ansible_collections/containers/podman/tests/integration/targets/podman_pod/tasks/resource-limit.yml
index 8727d6e50..83b673af1 100644
--- a/ansible_collections/containers/podman/tests/integration/targets/podman_pod/tasks/resource-limit.yml
+++ b/ansible_collections/containers/podman/tests/integration/targets/podman_pod/tasks/resource-limit.yml
@@ -8,6 +8,7 @@
cpuset_mems: '0-1'
cpu_shares: 1024
device_write_bps: ['/dev/zero:1048576']
+ shm_size: 1G
- name: Create pod for limiting resources
containers.podman.podman_pod:
@@ -18,6 +19,7 @@
cpuset_mems: "{{ limit.cpuset_mems }}"
cpu_shares: "{{ limit.cpu_shares }}"
device_write_bps: "{{ limit.device_write_bps }}"
+ shm_size: "{{ limit.shm_size }}"
- name: Get information on pod for limiting resources
containers.podman.podman_pod_info:
diff --git a/ansible_collections/containers/podman/tests/integration/targets/podman_pod/tasks/root-pod.yml b/ansible_collections/containers/podman/tests/integration/targets/podman_pod/tasks/root-pod.yml
index 968ec2cf3..557d0854f 100644
--- a/ansible_collections/containers/podman/tests/integration/targets/podman_pod/tasks/root-pod.yml
+++ b/ansible_collections/containers/podman/tests/integration/targets/podman_pod/tasks/root-pod.yml
@@ -132,6 +132,7 @@
- "127.0.0.2:7671:7676/udp"
- "127.0.0.1:13122:8871/tcp"
- "127.0.0.1:43423:8872"
+ - "0.0.0.0:44444:44445"
- "[::1]:8743:8745"
register: pod8_info
@@ -152,6 +153,7 @@
- "127.0.0.2:7671:7676/udp"
- "127.0.0.1:13122:8871/tcp"
- "127.0.0.1:43423:8872"
+ - "0.0.0.0:44444:44445"
- "[::1]:8743:8745"
register: pod9_info
diff --git a/ansible_collections/containers/podman/tests/integration/targets/podman_search/tasks/main.yml b/ansible_collections/containers/podman/tests/integration/targets/podman_search/tasks/main.yml
new file mode 100644
index 000000000..66c5ebda0
--- /dev/null
+++ b/ansible_collections/containers/podman/tests/integration/targets/podman_search/tasks/main.yml
@@ -0,0 +1,50 @@
+- name: Test podman_search
+ block:
+
+ - name: Search for an invalid image
+ containers.podman.podman_search:
+ term: invalidtermnope
+ executable: "{{ test_executable | default('podman') }}"
+ register: info_0
+
+ - name: Check results for no matching images
+ assert:
+ that:
+ - info_0.images | length == 0
+
+ - name: Search for matching images
+ containers.podman.podman_search:
+ term: etcd
+ executable: "{{ test_executable | default('podman') }}"
+ register: info_1
+
+ - name: Check results for matching images
+ assert:
+ that:
+ - info_1.images | length > 0
+
+ - name: Search for a specific image
+ containers.podman.podman_search:
+ term: registry.access.redhat.com/rhel7/rsyslog
+ executable: "{{ test_executable | default('podman') }}"
+ register: info_2
+
+ - name: Check result for matching image
+ assert:
+ that:
+ - info_2.images | length == 1
+ - info_2.images[0].Index == "registry.access.redhat.com"
+ - info_2.images[0].Name == "registry.access.redhat.com/rhel7/rsyslog"
+
+ - name: Search for specific image tags
+ containers.podman.podman_search:
+ term: registry.access.redhat.com/rhel7/rsyslog
+ list_tags: true
+ executable: "{{ test_executable | default('podman') }}"
+ register: info_3
+
+ - name: Check result for image tags
+ assert:
+ that:
+ - info_3.images | length == 1
+ - info_3.images[0].Tags | length > 0
diff --git a/ansible_collections/containers/podman/tests/integration/targets/podman_secret/tasks/main.yml b/ansible_collections/containers/podman/tests/integration/targets/podman_secret/tasks/main.yml
index 66f827df5..c47ebb4bf 100644
--- a/ansible_collections/containers/podman/tests/integration/targets/podman_secret/tasks/main.yml
+++ b/ansible_collections/containers/podman/tests/integration/targets/podman_secret/tasks/main.yml
@@ -17,7 +17,10 @@
containers.podman.podman_secret:
executable: "{{ test_executable | default('podman') }}"
state: absent
- name: mysecret
+ name: "{{ item }}"
+ loop:
+ - mysecret
+ - mysecret2
- name: Create secret
containers.podman.podman_secret:
@@ -283,7 +286,158 @@
containers.podman.podman_secret:
executable: "{{ test_executable | default('podman') }}"
state: absent
- name: mysecret
+ name: "{{ item }}"
+ loop:
+ - mysecret
+ - mysecret2
+
+ - name: Create secret if not exists and skip existing
+ containers.podman.podman_secret:
+ executable: "{{ test_executable | default('podman') }}"
+ name: mysecret2
+ data: secret content
+ state: present
+ skip_existing: true
+ register: secretskip1
+
+ - name: Create secret if not exists and skip existing - again
+ containers.podman.podman_secret:
+ executable: "{{ test_executable | default('podman') }}"
+ name: mysecret2
+ data: secret content
+ state: present
+ skip_existing: true
+ register: secretskip2
+
+ - name: Check secret
+ containers.podman.podman_secret_info:
+ executable: "{{ test_executable | default('podman') }}"
+ name: mysecret2
+ register: secret_info
+
+ - name: Check outputs
+ assert:
+ that:
+ - secretskip1 is changed
+ - secretskip2 is not changed
+ - secret_info is success
+ - secret_info.secrets | length > 0
+
+ - name: Remove secret
+ containers.podman.podman_secret:
+ executable: "{{ test_executable | default('podman') }}"
+ state: absent
+ name: mysecret2
+
+
+ - when: podman_version_gt470
+ block:
+
+ - name: Create a file with secret data
+ copy:
+ content: "secret content 1"
+ dest: ~/mysecret-1
+
+ - name: Create secret from file
+ containers.podman.podman_secret:
+ executable: "{{ test_executable | default('podman') }}"
+ name: mysecret2
+ path: ~/mysecret-1
+ state: present
+ register: secret1
+
+ - name: Create secret again
+ containers.podman.podman_secret:
+ executable: "{{ test_executable | default('podman') }}"
+ name: mysecret2
+ path: ~/mysecret-1
+ state: present
+ register: secret2
+
+ - name: Check outputs
+ assert:
+ that:
+ - secret1 is changed
+ - secret2 is not changed
+
+ - name: Create another secret in other file
+ copy:
+ content: "secret content 2"
+ dest: ~/mysecret-2
+
+ - name: Create secret from other file
+ containers.podman.podman_secret:
+ executable: "{{ test_executable | default('podman') }}"
+ name: mysecret3
+ path: ~/mysecret-2
+ state: present
+ debug: true
+ register: secret3
+
+ - name: Check outputs
+ assert:
+ that:
+ - secret3 is changed
+
+ - name: Create a secret from non existing file
+ containers.podman.podman_secret:
+ executable: "{{ test_executable | default('podman') }}"
+ name: mysecret4
+ path: ~/mysecret-3
+ state: present
+ debug: true
+ register: secret4
+ ignore_errors: true
+
+ - name: Check outputs
+ assert:
+ that:
+ - secret4 is failed
+
+ - name: Create a secret from non-existing environment variable
+ containers.podman.podman_secret:
+ executable: "{{ test_executable | default('podman') }}"
+ name: mysecret5
+ env: NON_EXISTING_ENV
+ state: present
+ register: secret5
+ ignore_errors: true
+
+ - name: Check outputs
+ assert:
+ that:
+ - secret5 is failed
+ - "'Environment variable NON_EXISTING_ENV is not set' in secret5.msg"
+
+ - name: Create a secret from existing environment variable
+ containers.podman.podman_secret:
+ executable: "{{ test_executable | default('podman') }}"
+ name: mysecret5
+ env: EXISTING_ENV
+ state: present
+ environment:
+ EXISTING_ENV: "secret env content"
+ register: secret6
+
+ - name: Show secret6
+ containers.podman.podman_secret_info:
+ executable: "{{ test_executable | default('podman') }}"
+ name: mysecret5
+ showsecret: true
+ register: secret6_info
+
+ - name: Check outputs
+ assert:
+ that:
+ - secret6 is changed
+ - secret6_info is success
+ - secret6_info.secrets.0.SecretData == "secret env content"
+
+ - name: Remove secret
+ containers.podman.podman_secret:
+ executable: "{{ test_executable | default('podman') }}"
+ state: absent
+ name: mysecret5
always:
- name: Remove container that uses secret
diff --git a/ansible_collections/containers/podman/tests/integration/targets/podman_volume/tasks/main.yml b/ansible_collections/containers/podman/tests/integration/targets/podman_volume/tasks/main.yml
index 9d43f3f1c..0b52fbfc8 100644
--- a/ansible_collections/containers/podman/tests/integration/targets/podman_volume/tasks/main.yml
+++ b/ansible_collections/containers/podman/tests/integration/targets/podman_volume/tasks/main.yml
@@ -161,6 +161,79 @@
- info10 is failed
- delete.volume == {}
+ - name: Mount non existing volume
+ containers.podman.podman_volume:
+ executable: "{{ test_executable | default('podman') }}"
+ name: nonexistent
+ state: mounted
+ register: mount1
+
+ - name: Check results
+ assert:
+ that:
+ - mount1 is success
+ - "'mount_point' in mount1"
+
+ - name: Create volume for mount
+ containers.podman.podman_volume:
+ executable: "{{ test_executable | default('podman') }}"
+ name: mountme
+ state: present
+
+ - name: Mount existing volume
+ containers.podman.podman_volume:
+ executable: "{{ test_executable | default('podman') }}"
+ name: mountme
+ state: mounted
+ register: mount2
+
+ - name: Check results
+ assert:
+ that:
+ - mount2 is success
+ - "'mount_point' in mount2"
+
+ - name: Unmount volume
+ containers.podman.podman_volume:
+ executable: "{{ test_executable | default('podman') }}"
+ name: mountme
+ state: unmounted
+ register: unmount
+
+ - name: Check results
+ assert:
+ that:
+ - unmount is success
+ - "'mount_point' not in unmount"
+
+ - name: Mount as root
+ become: true
+ containers.podman.podman_volume:
+ executable: "{{ test_executable | default('podman') }}"
+ name: rootmount
+ state: mounted
+ register: mount3
+
+ - name: Check results
+ assert:
+ that:
+ - mount3 is success
+ - "'mount_point' in mount3"
+
+ - name: Unmount as root
+ become: true
+ containers.podman.podman_volume:
+ executable: "{{ test_executable | default('podman') }}"
+ name: rootmount
+ state: unmounted
+ register: unmount2
+
+ - name: Check results
+ assert:
+ that:
+ - unmount2 is success
+ - "'mount_point' not in unmount2"
+
- name: Create a Quadlet for volume with filename
containers.podman.podman_volume:
executable: "{{ test_executable | default('podman') }}"
diff --git a/ansible_collections/containers/podman/tests/unit/plugins/modules/test_container_lib.py b/ansible_collections/containers/podman/tests/unit/plugins/modules/test_container_lib.py
index 20dd4e66c..ea336419a 100644
--- a/ansible_collections/containers/podman/tests/unit/plugins/modules/test_container_lib.py
+++ b/ansible_collections/containers/podman/tests/unit/plugins/modules/test_container_lib.py
@@ -66,7 +66,15 @@ def test_container_add_params(test_input, expected):
[
None, # module
{"conmon_pidfile": "bbb"}, # module params
- {"conmonpidfile": "ccc"}, # container info
+ {"conmonpidfile": "ccc",
+ "config": {
+ "createcommand": [
+ "podman",
+ "create",
+ "--conmon-pidfile=ccc",
+ "testcont",
+ ]}
+ }, # container info
{}, # image info
"4.1.1", # podman version
],
@@ -76,7 +84,67 @@ def test_container_add_params(test_input, expected):
[
None, # module
{"conmon_pidfile": None}, # module params
- {"conmonpidfile": "ccc"}, # container info
+ {"conmonpidfile": "ccc",
+ "config": {
+ "createcommand": [
+ "podman",
+ "create",
+ "--conmon-pidfile=ccc",
+ "testcont",
+ ]}
+ }, # container info
+ {}, # image info
+ "4.1.1", # podman version
+ ],
+ True,
+ ),
+ (
+ [
+ None, # module
+ {"conmon_pidfile": None}, # module params
+ {"conmonpidfile": None,
+ "config": {
+ "createcommand": [
+ "podman",
+ "create",
+ "testcont",
+ ]}
+ }, # container info
+ {}, # image info
+ "4.1.1", # podman version
+ ],
+ False,
+ ),
+ (
+ [
+ None, # module
+ {"conmon_pidfile": 'aaa'}, # module params
+ {"conmonpidfile": None,
+ "config": {
+ "createcommand": [
+ "podman",
+ "create",
+ "testcont",
+ ]}
+ }, # container info
+ {}, # image info
+ "4.1.1", # podman version
+ ],
+ True,
+ ),
+ (
+ [
+ None, # module
+ {"conmon_pidfile": 'aaa'}, # module params
+ {"conmonpidfile": 'aaa',
+ "config": {
+ "createcommand": [
+ "podman",
+ "create",
+ "--conmon-pidfile=aaa",
+ "testcont",
+ ]}
+ }, # container info
{}, # image info
"4.1.1", # podman version
],
diff --git a/ansible_collections/cyberark/conjur/.github/workflows/ansible-test.yml b/ansible_collections/cyberark/conjur/.github/workflows/ansible-test.yml
index aa74f4dea..091efc318 100644
--- a/ansible_collections/cyberark/conjur/.github/workflows/ansible-test.yml
+++ b/ansible_collections/cyberark/conjur/.github/workflows/ansible-test.yml
@@ -74,7 +74,7 @@ jobs:
# The docker container has all the pinned dependencies that are required.
# Explicitly specify the version of Python we want to test
- name: Run sanity tests
- run: ansible-test sanity --docker -v --color --python ${{ matrix.python }}
+ run: ansible-test sanity --docker -v --color --python ${{ matrix.python }} --exclude dev/ --exclude ci/ --exclude secrets.yml
working-directory: ./ansible_collections/cyberark/conjur
###
diff --git a/ansible_collections/cyberark/conjur/.gitignore b/ansible_collections/cyberark/conjur/.gitignore
index c7b557863..abf17ead9 100644
--- a/ansible_collections/cyberark/conjur/.gitignore
+++ b/ansible_collections/cyberark/conjur/.gitignore
@@ -21,6 +21,7 @@ reports
*.js
*.html
output/
+importer_result.json
# Pycache directories and files
**/__pycache__/
diff --git a/ansible_collections/cyberark/conjur/CHANGELOG.md b/ansible_collections/cyberark/conjur/CHANGELOG.md
index 887e2a89b..048c56f3f 100644
--- a/ansible_collections/cyberark/conjur/CHANGELOG.md
+++ b/ansible_collections/cyberark/conjur/CHANGELOG.md
@@ -6,6 +6,12 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.
## [Unreleased]
+## [1.3.0] - 2024-05-23
+
+### Added
+- Lookup plugin supports configuration by Ansible variables
+ [cyberark/ansible-conjur-collection#203](https://github.com/cyberark/ansible-conjur-collection/pull/203)
+
## [1.2.2] - 2023-09-28
### Changed
diff --git a/ansible_collections/cyberark/conjur/CONTRIBUTING.md b/ansible_collections/cyberark/conjur/CONTRIBUTING.md
index 82319a287..7f232a2cd 100644
--- a/ansible_collections/cyberark/conjur/CONTRIBUTING.md
+++ b/ansible_collections/cyberark/conjur/CONTRIBUTING.md
@@ -38,7 +38,7 @@ Before getting started, the following tools need to be installed:
## Set up a development environment
-The `dev` directory contains a `docker-compose` file which creates a development
+The `dev` directory contains a `docker-compose.yml` file which creates a development
environment :
- A Conjur Open Source instance
- An Ansible control node
@@ -142,7 +142,7 @@ Generate the master key, which will be used to encrypt Conjur's database. Store
this value as an environment variable.
```sh-session
-docker-compose run --no-deps --rm conjur data-key generate > data_key
+docker compose run --no-deps --rm conjur data-key generate > data_key
export CONJUR_DATA_KEY="$(< data_key)"
```
@@ -150,20 +150,20 @@ Start the Conjur OSS environment. An account, named `cucumber`, will be
automatically created.
```sh-session
-docker-compose up -d conjur
+docker compose up -d conjur
```
Retrieve the admin user's API key, and store the value in an environment variable.
```sh-session
-export CLI_CONJUR_AUTHN_API_KEY="$(docker-compose exec conjur conjurctl role retrieve-key cucumber:user:admin)"
+export CLI_CONJUR_AUTHN_API_KEY="$(docker compose exec conjur conjurctl role retrieve-key cucumber:user:admin)"
```
Start the Conjur CLI container. The CLI will be automatically authenticated as
the user `cucumber:user:admin`.
```sh-session
-docker-compose up -d conjur_cli
+docker compose up -d conjur_cli
```
## Load policy to set up Conjur Ansible integration
@@ -174,15 +174,15 @@ be a policy, a host, a user, a layer, a group, or a variable.
Check out the policy file, and load it into Conjur:
```sh-session
-docker-compose exec conjur_cli cat /policy/root.yml
-docker-compose exec conjur_cli conjur policy load root /policy/root.yml
+docker compose exec conjur_cli cat /policy/root.yml
+docker compose exec conjur_cli conjur policy load root /policy/root.yml
```
Also, load a dummy secret value into the `ansible/target-password` variable.
This is a variable required by remote nodes in order to complete their workloads.
```sh-session
-docker-compose exec conjur_cli conjur variable values add ansible/target-password S3cretV@lue
+docker compose exec conjur_cli conjur variable values add ansible/target-password S3cretV@lue
```
## Create Ansible managed nodes
@@ -192,15 +192,15 @@ nodes. First, retrieve the API key for the Conjur host representing the control
node, then create it:
```sh-session
-export ANSIBLE_CONJUR_AUTHN_API_KEY="$(docker-compose exec conjur conjurctl role retrieve-key cucumber:host:ansible/ansible-master)"
-docker-compose up -d ansible
+export ANSIBLE_CONJUR_AUTHN_API_KEY="$(docker compose exec conjur conjurctl role retrieve-key cucumber:host:ansible/ansible-master)"
+docker compose up -d ansible
```
Next, create two instances of each managed node:
```sh-session
-docker-compose up -d --scale test_app_ubuntu=2 test_app_ubuntu
-docker-compose up -d --scale test_app_centos=2 test_app_centos
+docker compose up -d --scale test_app_ubuntu=2 test_app_ubuntu
+docker compose up -d --scale test_app_centos=2 test_app_centos
```
## Use Conjur Ansible Role to set up identity on managed nodes
@@ -209,13 +209,13 @@ To grant your Ansible host a Conjur identity, first install the Conjur
Collection on your Ansible control node:
```sh-session
-docker-compose exec ansible ansible-galaxy collection install cyberark.conjur
+docker compose exec ansible ansible-galaxy collection install cyberark.conjur
```
Set up the host factory token in the HFTOKEN env var
```sh-session
-export HFTOKEN="$(docker-compose exec conjur_cli conjur hostfactory tokens create ansible/ansible-factory | jq -r '.[0].token')"
+export HFTOKEN="$(docker compose exec conjur_cli conjur hostfactory tokens create ansible/ansible-factory | jq -r '.[0].token')"
```
Once you've done this, you can configure each Ansible node with a Conjur
diff --git a/ansible_collections/cyberark/conjur/FILES.json b/ansible_collections/cyberark/conjur/FILES.json
index 4ae72bb59..a0df958ea 100644
--- a/ansible_collections/cyberark/conjur/FILES.json
+++ b/ansible_collections/cyberark/conjur/FILES.json
@@ -8,38 +8,73 @@
"format": 1
},
{
- "name": "README.md",
+ "name": "meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "meta/runtime.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "91243325e5fdfc7e9f6a4b5bbbab1b91c433349af7df860af23d5e1932d8a341",
+ "chksum_sha256": "de1872da7735269d370a0ea862d4776d2910afc36ec88476f7d2785941027389",
"format": 1
},
{
- "name": ".github",
+ "name": "plugins",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": ".github/workflows",
+ "name": "plugins/lookup",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": ".github/workflows/ansible-test.yml",
+ "name": "plugins/lookup/conjur_variable.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "dee0253dc4b4a22ee8eb759ff1edb195e903f44911341e7f6389d0a3bb29f63f",
+ "chksum_sha256": "6af3ac635df7b6ab2aefe766a466ad774a8221ef18ad4e7707552eb1fcbac1bc",
"format": 1
},
{
- "name": ".github/CODEOWNERS",
+ "name": "LICENSE",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f1dee7b6ae693cebe88547d034b17710489e515c3def06dad75252c8b19bfc51",
+ "chksum_sha256": "e092618211b1d864e3caf325abbd567f997e6ffb98d9fb97188d4fa280334bbe",
+ "format": 1
+ },
+ {
+ "name": "README.md",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "91243325e5fdfc7e9f6a4b5bbbab1b91c433349af7df860af23d5e1932d8a341",
+ "format": 1
+ },
+ {
+ "name": "Jenkinsfile",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "b064c627bfc189a0e5ba53ca39f939799a86a95848fd89f72629aaa15ab8f266",
+ "format": 1
+ },
+ {
+ "name": ".gitmodules",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "6204ac468423a4b2a3796e91656026cb91670ce5ab56a95bc756f42418d3dc60",
+ "format": 1
+ },
+ {
+ "name": "CHANGELOG.md",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "8ffcd5f648c85b61d3b612fc3d5e838ea49b7e217869e7b1a1f527bf094a3eb4",
"format": 1
},
{
@@ -50,24 +85,31 @@
"format": 1
},
{
- "name": "plugins",
+ "name": ".github",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/lookup",
+ "name": ".github/workflows",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/lookup/conjur_variable.py",
+ "name": ".github/workflows/ansible-test.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "a34673fa117a06bedb8444f0685cb9f726e1c9f34a770f6e92d20e57a1262e2e",
+ "format": 1
+ },
+ {
+ "name": ".github/CODEOWNERS",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "071df1f77b15701960faf7cee3ce8967143abedf76953f71550645068a57fcb2",
+ "chksum_sha256": "f1dee7b6ae693cebe88547d034b17710489e515c3def06dad75252c8b19bfc51",
"format": 1
},
{
@@ -85,24 +127,24 @@
"format": 1
},
{
- "name": "roles/conjur_host_identity/README.md",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "a712c1e5c56fb9a3917ebfd4a55109801da9e57dff304d1941aaf2cf2ab471ca",
- "format": 1
- },
- {
- "name": "roles/conjur_host_identity/defaults",
+ "name": "roles/conjur_host_identity/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/conjur_host_identity/defaults/main.yml",
+ "name": "roles/conjur_host_identity/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c7f30d049a541e0fd98faeb5dcd8d83839e00db1f68ece8b14431630fb779e13",
+ "chksum_sha256": "0103f8e65603e7da7511224456dacf19c6a0e92c6e6f1fbf152f171d33741f80",
+ "format": 1
+ },
+ {
+ "name": "roles/conjur_host_identity/README.md",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "a712c1e5c56fb9a3917ebfd4a55109801da9e57dff304d1941aaf2cf2ab471ca",
"format": 1
},
{
@@ -113,38 +155,38 @@
"format": 1
},
{
- "name": "roles/conjur_host_identity/tasks/uninstall.yml",
+ "name": "roles/conjur_host_identity/tasks/summon-conjur.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d40f3bd7fdddbd8eddeb983e28ba959685e086d7382789b717e54a7f469d1f77",
+ "chksum_sha256": "b75ef3c1b8db527e7469e50d3b1f4f13ce09cc17ccec05ec598273f2afb79f34",
"format": 1
},
{
- "name": "roles/conjur_host_identity/tasks/identity.yml",
+ "name": "roles/conjur_host_identity/tasks/summon.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e4a249b32c8b6d90b37e9787c6a22f6683820cf03af4f1038b8d73dbabf3fd1b",
+ "chksum_sha256": "40ad1a387e7f7a0a7c342614d9cdd5cd7ad4334634f4da733929e8e3b0a7ab4f",
"format": 1
},
{
- "name": "roles/conjur_host_identity/tasks/install.yml",
+ "name": "roles/conjur_host_identity/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "723de78f2785f8cb7737c2e0e0801d674eba41636a7b1c5915dd41cec7083fed",
+ "chksum_sha256": "5f2195992211d603a73dead76e8e60e511ae0da2293a8a3167836d129ffe9bbf",
"format": 1
},
{
- "name": "roles/conjur_host_identity/tasks/main.yml",
+ "name": "roles/conjur_host_identity/tasks/identity.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5f2195992211d603a73dead76e8e60e511ae0da2293a8a3167836d129ffe9bbf",
+ "chksum_sha256": "e4a249b32c8b6d90b37e9787c6a22f6683820cf03af4f1038b8d73dbabf3fd1b",
"format": 1
},
{
- "name": "roles/conjur_host_identity/tasks/summon.yml",
+ "name": "roles/conjur_host_identity/tasks/uninstall.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "40ad1a387e7f7a0a7c342614d9cdd5cd7ad4334634f4da733929e8e3b0a7ab4f",
+ "chksum_sha256": "d40f3bd7fdddbd8eddeb983e28ba959685e086d7382789b717e54a7f469d1f77",
"format": 1
},
{
@@ -155,10 +197,10 @@
"format": 1
},
{
- "name": "roles/conjur_host_identity/tasks/summon-conjur.yml",
+ "name": "roles/conjur_host_identity/tasks/install.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b75ef3c1b8db527e7469e50d3b1f4f13ce09cc17ccec05ec598273f2afb79f34",
+ "chksum_sha256": "723de78f2785f8cb7737c2e0e0801d674eba41636a7b1c5915dd41cec7083fed",
"format": 1
},
{
@@ -183,17 +225,17 @@
"format": 1
},
{
- "name": "roles/conjur_host_identity/meta",
+ "name": "roles/conjur_host_identity/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/conjur_host_identity/meta/main.yml",
+ "name": "roles/conjur_host_identity/defaults/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0103f8e65603e7da7511224456dacf19c6a0e92c6e6f1fbf152f171d33741f80",
+ "chksum_sha256": "c7f30d049a541e0fd98faeb5dcd8d83839e00db1f68ece8b14431630fb779e13",
"format": 1
},
{
@@ -204,27 +246,6 @@
"format": 1
},
{
- "name": "roles/conjur_host_identity/tests/junit",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
- "format": 1
- },
- {
- "name": "roles/conjur_host_identity/tests/junit/configure-conjur-identity",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "43eeedf90a26a10532a10c174115a19abc03be3cb49b55b233b0041a0588f1d1",
- "format": 1
- },
- {
- "name": "roles/conjur_host_identity/tests/junit/cleanup-conjur-identity",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "9a5067e650b6e98e39eda4bc661a1199a64b45206713e7fb52dfdb9751a418d7",
- "format": 1
- },
- {
"name": "roles/conjur_host_identity/tests/test_cases",
"ftype": "dir",
"chksum_type": null,
@@ -232,136 +253,136 @@
"format": 1
},
{
- "name": "roles/conjur_host_identity/tests/test_cases/configure-conjur-identity",
+ "name": "roles/conjur_host_identity/tests/test_cases/not-conjurized",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/conjur_host_identity/tests/test_cases/configure-conjur-identity/playbook.yml",
+ "name": "roles/conjur_host_identity/tests/test_cases/not-conjurized/playbook.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "18dec0d8ca7c324adae8bcf96843531e568799a23237b93be089989a46baf0a2",
+ "chksum_sha256": "741c4dcf1a42dfb96f2485d3b2a2230fbec440fcad165c063b555bac3ccdabc6",
"format": 1
},
{
- "name": "roles/conjur_host_identity/tests/test_cases/configure-conjur-identity/tests",
+ "name": "roles/conjur_host_identity/tests/test_cases/bad-ssl-config",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/conjur_host_identity/tests/test_cases/configure-conjur-identity/tests/test_default.py",
+ "name": "roles/conjur_host_identity/tests/test_cases/bad-ssl-config/playbook.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2f668ca0bbd274a6b5f6b3894ee901a9240d68015153d78b4fd79308152de3a2",
+ "chksum_sha256": "63ef031c2940fba555a7bdfa5304ddee32917d6ad77cab0cf00666d89a3ec9bb",
"format": 1
},
{
- "name": "roles/conjur_host_identity/tests/test_cases/cleanup-conjur-identity",
+ "name": "roles/conjur_host_identity/tests/test_cases/misconfig-conjur-identity",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/conjur_host_identity/tests/test_cases/cleanup-conjur-identity/playbook.yml",
+ "name": "roles/conjur_host_identity/tests/test_cases/misconfig-conjur-identity/playbook.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7d1c3f284f2be73ce5f3f533fb67b0e26f5809704e11f74965d1a2de1e5bb608",
+ "chksum_sha256": "60531ded85de255ac154c919bd9a093c81bf021d17127d9c8c52b5f257b3b086",
"format": 1
},
{
- "name": "roles/conjur_host_identity/tests/test_cases/cleanup-conjur-identity/tests",
+ "name": "roles/conjur_host_identity/tests/test_cases/configure-conjur-identity",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/conjur_host_identity/tests/test_cases/cleanup-conjur-identity/tests/test_default.py",
+ "name": "roles/conjur_host_identity/tests/test_cases/configure-conjur-identity/playbook.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2390200888250358407b3ae321abb9fb0d4dbef50cf330438b00f3e207f938b4",
+ "chksum_sha256": "885a58b434a59e6321bab832ff84e6d6320545d3799fa32b7af39a414af3cdf5",
"format": 1
},
{
- "name": "roles/conjur_host_identity/tests/test_cases/not-conjurized",
+ "name": "roles/conjur_host_identity/tests/test_cases/configure-conjur-identity/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/conjur_host_identity/tests/test_cases/not-conjurized/playbook.yml",
+ "name": "roles/conjur_host_identity/tests/test_cases/configure-conjur-identity/tests/test_default.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1eaabc866dada7379694872a1aa3e1ccff58ba60c589c58c0db0173b1d8f9d68",
+ "chksum_sha256": "2f668ca0bbd274a6b5f6b3894ee901a9240d68015153d78b4fd79308152de3a2",
"format": 1
},
{
- "name": "roles/conjur_host_identity/tests/test_cases/misconfig-conjur-identity",
+ "name": "roles/conjur_host_identity/tests/test_cases/cleanup-conjur-identity",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/conjur_host_identity/tests/test_cases/misconfig-conjur-identity/playbook.yml",
+ "name": "roles/conjur_host_identity/tests/test_cases/cleanup-conjur-identity/playbook.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b7b3002d1d9927f3aa012e8abba3164c3c822d0e6830ab9b463341e839bb82b6",
+ "chksum_sha256": "57b7637ac64e48f434942aaa9843573b0b83eedec3a990135f682eb0c9b4fc08",
"format": 1
},
{
- "name": "roles/conjur_host_identity/tests/test_cases/bad-ssl-config",
+ "name": "roles/conjur_host_identity/tests/test_cases/cleanup-conjur-identity/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/conjur_host_identity/tests/test_cases/bad-ssl-config/playbook.yml",
+ "name": "roles/conjur_host_identity/tests/test_cases/cleanup-conjur-identity/tests/test_default.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7d215e320462650efd9a1fa88f8fa769620377c9d8b1b0b1df0275ccf64bc241",
+ "chksum_sha256": "2390200888250358407b3ae321abb9fb0d4dbef50cf330438b00f3e207f938b4",
"format": 1
},
{
"name": "roles/conjur_host_identity/tests/test.sh",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ca5f445a253831f7d22c2bb208e42307253086115644ca0edf11ae7c950f7861",
+ "chksum_sha256": "4470918bf5bbb2cc8a29d33a667ec086741f9da48db477dbb219fc67d6d669ea",
"format": 1
},
{
- "name": "roles/conjur_host_identity/tests/.pytest_cache",
+ "name": "roles/conjur_host_identity/tests/junit",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/conjur_host_identity/tests/.pytest_cache/README.md",
+ "name": "roles/conjur_host_identity/tests/junit/configure-conjur-identity",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "73fd6fccdd802c419a6b2d983d6c3173b7da97558ac4b589edec2dfe443db9ad",
+ "chksum_sha256": "d9ce4ce1faeb20918127918421a1b20b9fd2a6d24283ed26fd6708d978f718dd",
"format": 1
},
{
- "name": "roles/conjur_host_identity/tests/.pytest_cache/.gitignore",
+ "name": "roles/conjur_host_identity/tests/junit/cleanup-conjur-identity",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3ed731b65d06150c138e2dadb0be0697550888a6b47eb8c45ecc9adba8b8e9bd",
+ "chksum_sha256": "25033a22dba4a120c623e43344ffd6c8185f0cbf8151680b42ca830745ea3b83",
"format": 1
},
{
- "name": "roles/conjur_host_identity/tests/.pytest_cache/CACHEDIR.TAG",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "37dc88ef9a0abeddbe81053a6dd8fdfb13afb613045ea1eb4a5c815a74a3bde4",
+ "name": "roles/conjur_host_identity/tests/.pytest_cache",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
@@ -393,73 +414,38 @@
"format": 1
},
{
- "name": "CONTRIBUTING.md",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "51665e44857e861e04b64120a17ea6f6960a1ca496d9ef6e2fec24bc01f585b6",
- "format": 1
- },
- {
- "name": ".gitignore",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "b87b7493010ee0eaff5a1e8b2b20a403e03e0a9970b282d28c70cc34679df409",
- "format": 1
- },
- {
- "name": "SECURITY.md",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "1e913fcef04d2f2652839b896dd875dd3268b67d4669105e0e4b1a0249ef843a",
- "format": 1
- },
- {
- "name": "requirements.txt",
+ "name": "roles/conjur_host_identity/tests/.pytest_cache/README.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0a2f8c8f38472c11f47c93fe7ac69bb6e08b2d09dcc5a2b9fdd7054366822a21",
- "format": 1
- },
- {
- "name": "examples",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "73fd6fccdd802c419a6b2d983d6c3173b7da97558ac4b589edec2dfe443db9ad",
"format": 1
},
{
- "name": "examples/test.yml",
+ "name": "roles/conjur_host_identity/tests/.pytest_cache/CACHEDIR.TAG",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "94fcc880e144e39d06fb0fd4957ca4aac77578627b509a110f4ec5b2ffc7f9ff",
+ "chksum_sha256": "37dc88ef9a0abeddbe81053a6dd8fdfb13afb613045ea1eb4a5c815a74a3bde4",
"format": 1
},
{
- "name": "Jenkinsfile",
+ "name": "roles/conjur_host_identity/tests/.pytest_cache/.gitignore",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b064c627bfc189a0e5ba53ca39f939799a86a95848fd89f72629aaa15ab8f266",
+ "chksum_sha256": "3ed731b65d06150c138e2dadb0be0697550888a6b47eb8c45ecc9adba8b8e9bd",
"format": 1
},
{
- "name": ".gitmodules",
+ "name": "SECURITY.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6204ac468423a4b2a3796e91656026cb91670ce5ab56a95bc756f42418d3dc60",
- "format": 1
- },
- {
- "name": "meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "1e913fcef04d2f2652839b896dd875dd3268b67d4669105e0e4b1a0249ef843a",
"format": 1
},
{
- "name": "meta/runtime.yml",
+ "name": "CONTRIBUTING.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "de1872da7735269d370a0ea862d4776d2910afc36ec88476f7d2785941027389",
+ "chksum_sha256": "fbb106317526ffc7993cdea0a385432ec8ae1db6ea090cd8262fc821af9fe7a1",
"format": 1
},
{
@@ -494,14 +480,7 @@
"name": "tests/unit/plugins/lookup/test_conjur_variable.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "61fd56fd948d13be3d30094d2b6ccd2828585a2f851ea64448a8f59dfc5682fe",
- "format": 1
- },
- {
- "name": "tests/unit/plugins/lookup/__init__.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "fd97e205015ff8784b511ce645e3961117c48fe893391b7549fdb0dd0635c1b9",
"format": 1
},
{
@@ -519,178 +498,157 @@
"format": 1
},
{
- "name": "tests/sanity",
+ "name": "tests/conjur_variable",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/sanity/ignore-2.10.txt",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "f045d340441215a5303e2854a50b88c7656ee4e603331cb50f42f99d0b8161bf",
- "format": 1
- },
- {
- "name": "tests/sanity/ignore-2.15.txt",
+ "name": "tests/conjur_variable/pytest.ini",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5fb43913e33d45cc72021c443619154459a731ee3213afdc81f0af6e0caa157b",
+ "chksum_sha256": "4b8dce177a1820e68b4a821d858669035b849f9c5fbc4cfd6d4718325a7e69c8",
"format": 1
},
{
- "name": "tests/sanity/ignore-2.12.txt",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "91fd6123db0c8a19145dd93733f8dfb3ea537ef9ddbc0f0568c2a8121ea023f3",
+ "name": "tests/conjur_variable/test_cases",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/sanity/ignore-2.9.txt",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "3193867f74b8b551868226d231cd572d45f541838294893a32ef46193cdc031f",
+ "name": "tests/conjur_variable/test_cases/retrieve-variable-bad-certs",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/sanity/ignore-2.13.txt",
+ "name": "tests/conjur_variable/test_cases/retrieve-variable-bad-certs/env",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5fb43913e33d45cc72021c443619154459a731ee3213afdc81f0af6e0caa157b",
+ "chksum_sha256": "e1d90dec2be9f840d4f27dcfd2bf1c67be44c5d801ee57ea45c94ff6895ddf62",
"format": 1
},
{
- "name": "tests/sanity/ignore-2.14.txt",
+ "name": "tests/conjur_variable/test_cases/retrieve-variable-bad-certs/bad-cert.pem",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5fb43913e33d45cc72021c443619154459a731ee3213afdc81f0af6e0caa157b",
+ "chksum_sha256": "7529afc89345f5dde282fb51014d158769d3fb22ddf20744d093eb8fa820b8d3",
"format": 1
},
{
- "name": "tests/sanity/ignore-2.11.txt",
+ "name": "tests/conjur_variable/test_cases/retrieve-variable-bad-certs/playbook.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c81b4555220e56eb03ee55d1d12c2f38a6c4b27b0e6b798d29e71d19ac205c2c",
- "format": 1
- },
- {
- "name": "tests/conjur_variable",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "bb4764e18fcc10f83c16d10cbc4b7eac8c0abd2668f1b158649fc1a0d47df2c8",
"format": 1
},
{
- "name": "tests/conjur_variable/junit",
+ "name": "tests/conjur_variable/test_cases/retrieve-variable-bad-certs/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/conjur_variable/junit/retrieve-variable-with-spaces-secret",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "739441ec5ebe746549090a0c2c622c477bc7c675c8e227ff35c69a3d2e96158d",
- "format": 1
- },
- {
- "name": "tests/conjur_variable/junit/retrieve-variable-with-authn-token-bad-cert",
+ "name": "tests/conjur_variable/test_cases/retrieve-variable-bad-certs/tests/test_default.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e39962d8d46d3ce0e86e15e5853168d8e825cadc62341dd199709dacd7e7b08c",
+ "chksum_sha256": "5f2cb44532e32a0e4c55aa765c53cf52728d8c1c512e0eb606a5460cfc06532b",
"format": 1
},
{
- "name": "tests/conjur_variable/junit/retrieve-variable-bad-cert-path",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "c5cbf82d39043d5cd79c45b6c83b69c12f7949777e3e1b2b6356ee184c6c5c46",
+ "name": "tests/conjur_variable/test_cases/retrieve-variable-disable-verify-certs",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/conjur_variable/junit/retrieve-variable-no-cert-provided",
+ "name": "tests/conjur_variable/test_cases/retrieve-variable-disable-verify-certs/playbook.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0c692b3d4d648d53411adb491a16b9fef7242338f14e3e0c4b3605e0c659d2d4",
+ "chksum_sha256": "d94bc0b090fc07e738c0bfc05b3b6747850b8f06e5e290771d200efeb3044e35",
"format": 1
},
{
- "name": "tests/conjur_variable/junit/retrieve-variable",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "aeccf0044a93b226d471017be1d7a8a906661565e6f32abb6f9fb3f9443b3a9c",
+ "name": "tests/conjur_variable/test_cases/retrieve-variable-disable-verify-certs/tests",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/conjur_variable/junit/retrieve-variable-into-file",
+ "name": "tests/conjur_variable/test_cases/retrieve-variable-disable-verify-certs/tests/test_default.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "311ba1466f21f2377778c95d8eea977faaed8df674cdf5fd6fd52fbd12a99dde",
+ "chksum_sha256": "e6b50f356ee08724cd4535c1831566274c2b913fe9b24d85deb2c695a3c422a9",
"format": 1
},
{
- "name": "tests/conjur_variable/junit/retrieve-variable-with-authn-token",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "30c648fa1e7599e0e098ed8afb4c1dffff299f7c697febc381941b5769640374",
+ "name": "tests/conjur_variable/test_cases/retrieve-variable-with-authn-token",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/conjur_variable/junit/retrieve-variable-bad-certs",
+ "name": "tests/conjur_variable/test_cases/retrieve-variable-with-authn-token/env",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "315ac99b99c9ca992510859ce660641275f859caf504c2ee41d82d39198197aa",
+ "chksum_sha256": "0f79221a2dba5e7c47cf03aed09ac314d3a0185b73d98735c499d3bfabc01c26",
"format": 1
},
{
- "name": "tests/conjur_variable/junit/retrieve-variable-disable-verify-certs",
+ "name": "tests/conjur_variable/test_cases/retrieve-variable-with-authn-token/playbook.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d3b1c3e40859a1321db5f0928383f6b8b2fc1f1656176cb2ceb27f712829ef3c",
+ "chksum_sha256": "a57510d743d358c0405b3de73cbae3e5945c492fb8ca64be0d3777f1e4de811e",
"format": 1
},
{
- "name": "tests/conjur_variable/test_cases",
+ "name": "tests/conjur_variable/test_cases/retrieve-variable-with-authn-token/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/conjur_variable/test_cases/retrieve-variable-with-spaces-secret",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/conjur_variable/test_cases/retrieve-variable-with-authn-token/tests/test_default.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e6b50f356ee08724cd4535c1831566274c2b913fe9b24d85deb2c695a3c422a9",
"format": 1
},
{
- "name": "tests/conjur_variable/test_cases/retrieve-variable-with-spaces-secret/env",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "bf54c0c9c02929e148e92803bfe2e2daa791960688f6c2f8f4330241bec77068",
+ "name": "tests/conjur_variable/test_cases/retrieve-variable-no-cert-provided",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/conjur_variable/test_cases/retrieve-variable-with-spaces-secret/playbook.yml",
+ "name": "tests/conjur_variable/test_cases/retrieve-variable-no-cert-provided/playbook.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e1a6f8f4cded9369fbf9c6bc55f725cd0aa8da7ff1fba59e91b338511ed20736",
+ "chksum_sha256": "bb4764e18fcc10f83c16d10cbc4b7eac8c0abd2668f1b158649fc1a0d47df2c8",
"format": 1
},
{
- "name": "tests/conjur_variable/test_cases/retrieve-variable-with-spaces-secret/tests",
+ "name": "tests/conjur_variable/test_cases/retrieve-variable-no-cert-provided/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/conjur_variable/test_cases/retrieve-variable-with-spaces-secret/tests/test_default.py",
+ "name": "tests/conjur_variable/test_cases/retrieve-variable-no-cert-provided/tests/test_default.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8ed19681bbd132fe9faf61bafa8f1039de0e315ae62c78ba48ede77027d1bfd2",
+ "chksum_sha256": "5f2cb44532e32a0e4c55aa765c53cf52728d8c1c512e0eb606a5460cfc06532b",
"format": 1
},
{
@@ -736,101 +694,108 @@
"format": 1
},
{
- "name": "tests/conjur_variable/test_cases/retrieve-variable-bad-cert-path",
+ "name": "tests/conjur_variable/test_cases/retrieve-variable-with-spaces-secret",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/conjur_variable/test_cases/retrieve-variable-bad-cert-path/env",
+ "name": "tests/conjur_variable/test_cases/retrieve-variable-with-spaces-secret/env",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9f5febf65f45e537c0666df07ea12f0568f1ee5afa7bc9eef5a36370e6b5dfa4",
+ "chksum_sha256": "bf54c0c9c02929e148e92803bfe2e2daa791960688f6c2f8f4330241bec77068",
"format": 1
},
{
- "name": "tests/conjur_variable/test_cases/retrieve-variable-bad-cert-path/playbook.yml",
+ "name": "tests/conjur_variable/test_cases/retrieve-variable-with-spaces-secret/playbook.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bb4764e18fcc10f83c16d10cbc4b7eac8c0abd2668f1b158649fc1a0d47df2c8",
+ "chksum_sha256": "e1a6f8f4cded9369fbf9c6bc55f725cd0aa8da7ff1fba59e91b338511ed20736",
"format": 1
},
{
- "name": "tests/conjur_variable/test_cases/retrieve-variable-bad-cert-path/tests",
+ "name": "tests/conjur_variable/test_cases/retrieve-variable-with-spaces-secret/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/conjur_variable/test_cases/retrieve-variable-bad-cert-path/tests/test_default.py",
+ "name": "tests/conjur_variable/test_cases/retrieve-variable-with-spaces-secret/tests/test_default.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5f2cb44532e32a0e4c55aa765c53cf52728d8c1c512e0eb606a5460cfc06532b",
+ "chksum_sha256": "8ed19681bbd132fe9faf61bafa8f1039de0e315ae62c78ba48ede77027d1bfd2",
"format": 1
},
{
- "name": "tests/conjur_variable/test_cases/retrieve-variable-no-cert-provided",
+ "name": "tests/conjur_variable/test_cases/retrieve-variable",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/conjur_variable/test_cases/retrieve-variable-no-cert-provided/playbook.yml",
+ "name": "tests/conjur_variable/test_cases/retrieve-variable/env",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bb4764e18fcc10f83c16d10cbc4b7eac8c0abd2668f1b158649fc1a0d47df2c8",
+ "chksum_sha256": "bf54c0c9c02929e148e92803bfe2e2daa791960688f6c2f8f4330241bec77068",
"format": 1
},
{
- "name": "tests/conjur_variable/test_cases/retrieve-variable-no-cert-provided/tests",
+ "name": "tests/conjur_variable/test_cases/retrieve-variable/playbook.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "6bce626301d8259174bfa57b2bc7ff543b267f2f93be4a1a77df69b8cf515801",
+ "format": 1
+ },
+ {
+ "name": "tests/conjur_variable/test_cases/retrieve-variable/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/conjur_variable/test_cases/retrieve-variable-no-cert-provided/tests/test_default.py",
+ "name": "tests/conjur_variable/test_cases/retrieve-variable/tests/test_default.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5f2cb44532e32a0e4c55aa765c53cf52728d8c1c512e0eb606a5460cfc06532b",
+ "chksum_sha256": "dee229d35c099b379b2e11d1c502ca0158eb506c9065358b969213ccdb489693",
"format": 1
},
{
- "name": "tests/conjur_variable/test_cases/retrieve-variable",
+ "name": "tests/conjur_variable/test_cases/retrieve-variable-bad-cert-path",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/conjur_variable/test_cases/retrieve-variable/env",
+ "name": "tests/conjur_variable/test_cases/retrieve-variable-bad-cert-path/env",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bf54c0c9c02929e148e92803bfe2e2daa791960688f6c2f8f4330241bec77068",
+ "chksum_sha256": "9f5febf65f45e537c0666df07ea12f0568f1ee5afa7bc9eef5a36370e6b5dfa4",
"format": 1
},
{
- "name": "tests/conjur_variable/test_cases/retrieve-variable/playbook.yml",
+ "name": "tests/conjur_variable/test_cases/retrieve-variable-bad-cert-path/playbook.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6bce626301d8259174bfa57b2bc7ff543b267f2f93be4a1a77df69b8cf515801",
+ "chksum_sha256": "bb4764e18fcc10f83c16d10cbc4b7eac8c0abd2668f1b158649fc1a0d47df2c8",
"format": 1
},
{
- "name": "tests/conjur_variable/test_cases/retrieve-variable/tests",
+ "name": "tests/conjur_variable/test_cases/retrieve-variable-bad-cert-path/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/conjur_variable/test_cases/retrieve-variable/tests/test_default.py",
+ "name": "tests/conjur_variable/test_cases/retrieve-variable-bad-cert-path/tests/test_default.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "dee229d35c099b379b2e11d1c502ca0158eb506c9065358b969213ccdb489693",
+ "chksum_sha256": "5f2cb44532e32a0e4c55aa765c53cf52728d8c1c512e0eb606a5460cfc06532b",
"format": 1
},
{
@@ -869,192 +834,220 @@
"format": 1
},
{
- "name": "tests/conjur_variable/test_cases/retrieve-variable-with-authn-token",
+ "name": "tests/conjur_variable/test.sh",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "7d461af6848ab63aefea2ef7c5465ff2c6bc72578920c7a2e12c1465325b2381",
+ "format": 1
+ },
+ {
+ "name": "tests/conjur_variable/junit",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/conjur_variable/test_cases/retrieve-variable-with-authn-token/env",
+ "name": "tests/conjur_variable/junit/retrieve-variable-bad-certs",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0f79221a2dba5e7c47cf03aed09ac314d3a0185b73d98735c499d3bfabc01c26",
+ "chksum_sha256": "0ef55c7934d417649a1313aae4b0b7eabb404f3cf64ff6ff2e6c37cbd39d4d05",
"format": 1
},
{
- "name": "tests/conjur_variable/test_cases/retrieve-variable-with-authn-token/playbook.yml",
+ "name": "tests/conjur_variable/junit/retrieve-variable-disable-verify-certs",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a57510d743d358c0405b3de73cbae3e5945c492fb8ca64be0d3777f1e4de811e",
+ "chksum_sha256": "77156f57f44418b716a535a663b0b155cb6975720cc256e2fac3fbb598b2db82",
"format": 1
},
{
- "name": "tests/conjur_variable/test_cases/retrieve-variable-with-authn-token/tests",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/conjur_variable/junit/retrieve-variable-with-authn-token",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "550be09b44f712be5a7eb5f2cc825caca21cac4ed18dd81b2dbbf2443ce496fc",
"format": 1
},
{
- "name": "tests/conjur_variable/test_cases/retrieve-variable-with-authn-token/tests/test_default.py",
+ "name": "tests/conjur_variable/junit/retrieve-variable-no-cert-provided",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e6b50f356ee08724cd4535c1831566274c2b913fe9b24d85deb2c695a3c422a9",
+ "chksum_sha256": "78603c751dec995c32becb12177ff1bf9c6789f739f7001c9f69b5a004c44b0b",
"format": 1
},
{
- "name": "tests/conjur_variable/test_cases/retrieve-variable-bad-certs",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/conjur_variable/junit/retrieve-variable-with-authn-token-bad-cert",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "334595eecd27a5e40344583b59d2c8c1e5010f870dcf37ff03b4b4e342e5cc51",
"format": 1
},
{
- "name": "tests/conjur_variable/test_cases/retrieve-variable-bad-certs/env",
+ "name": "tests/conjur_variable/junit/retrieve-variable-with-spaces-secret",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e1d90dec2be9f840d4f27dcfd2bf1c67be44c5d801ee57ea45c94ff6895ddf62",
+ "chksum_sha256": "51ed0f376d250f01bff8b43b5e302685b3b06059aa909692bb841a6f9a226bc4",
"format": 1
},
{
- "name": "tests/conjur_variable/test_cases/retrieve-variable-bad-certs/bad-cert.pem",
+ "name": "tests/conjur_variable/junit/retrieve-variable",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7529afc89345f5dde282fb51014d158769d3fb22ddf20744d093eb8fa820b8d3",
+ "chksum_sha256": "079a81b156f581d0f2370377b2c8a8e515518da0f217fd3acc2f2f931e91fdaf",
"format": 1
},
{
- "name": "tests/conjur_variable/test_cases/retrieve-variable-bad-certs/playbook.yml",
+ "name": "tests/conjur_variable/junit/retrieve-variable-bad-cert-path",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bb4764e18fcc10f83c16d10cbc4b7eac8c0abd2668f1b158649fc1a0d47df2c8",
+ "chksum_sha256": "f497365b2e3f719a6210a71a277e6969bf0634398153d057d308828697a5cf02",
"format": 1
},
{
- "name": "tests/conjur_variable/test_cases/retrieve-variable-bad-certs/tests",
+ "name": "tests/conjur_variable/junit/retrieve-variable-into-file",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "f919784a4b1ce86f8dc683a4d1405fb2651752517f163d8325d69015f7c31951",
+ "format": 1
+ },
+ {
+ "name": "tests/conjur_variable/.pytest_cache",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/conjur_variable/test_cases/retrieve-variable-bad-certs/tests/test_default.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "5f2cb44532e32a0e4c55aa765c53cf52728d8c1c512e0eb606a5460cfc06532b",
+ "name": "tests/conjur_variable/.pytest_cache/v",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/conjur_variable/test_cases/retrieve-variable-disable-verify-certs",
+ "name": "tests/conjur_variable/.pytest_cache/v/cache",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/conjur_variable/test_cases/retrieve-variable-disable-verify-certs/playbook.yml",
+ "name": "tests/conjur_variable/.pytest_cache/v/cache/nodeids",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d94bc0b090fc07e738c0bfc05b3b6747850b8f06e5e290771d200efeb3044e35",
+ "chksum_sha256": "d6a49f2d4432db81b4a2d792008159f6cff2deffb9a47c45718eff2b23bc075e",
"format": 1
},
{
- "name": "tests/conjur_variable/test_cases/retrieve-variable-disable-verify-certs/tests",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/conjur_variable/.pytest_cache/v/cache/stepwise",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "4f53cda18c2baa0c0354bb5f9a3ecbe5ed12ab4d8e11ba873c2f11161202b945",
"format": 1
},
{
- "name": "tests/conjur_variable/test_cases/retrieve-variable-disable-verify-certs/tests/test_default.py",
+ "name": "tests/conjur_variable/.pytest_cache/README.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e6b50f356ee08724cd4535c1831566274c2b913fe9b24d85deb2c695a3c422a9",
+ "chksum_sha256": "73fd6fccdd802c419a6b2d983d6c3173b7da97558ac4b589edec2dfe443db9ad",
"format": 1
},
{
- "name": "tests/conjur_variable/pytest.ini",
+ "name": "tests/conjur_variable/.pytest_cache/CACHEDIR.TAG",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4b8dce177a1820e68b4a821d858669035b849f9c5fbc4cfd6d4718325a7e69c8",
+ "chksum_sha256": "37dc88ef9a0abeddbe81053a6dd8fdfb13afb613045ea1eb4a5c815a74a3bde4",
"format": 1
},
{
- "name": "tests/conjur_variable/test.sh",
+ "name": "tests/conjur_variable/.pytest_cache/.gitignore",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a0e338fdee47ad4ca994120b6e97ee50c556391b2ca8f8a7cb0af14bb5fae2cc",
+ "chksum_sha256": "3ed731b65d06150c138e2dadb0be0697550888a6b47eb8c45ecc9adba8b8e9bd",
"format": 1
},
{
- "name": "tests/conjur_variable/.pytest_cache",
+ "name": "tests/sanity",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/conjur_variable/.pytest_cache/README.md",
+ "name": "tests/sanity/ignore-2.11.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "73fd6fccdd802c419a6b2d983d6c3173b7da97558ac4b589edec2dfe443db9ad",
+ "chksum_sha256": "f79319d5da69edbabc9e008b849cc9fac2a60e75eea8c974f82e33aff3b6ec90",
"format": 1
},
{
- "name": "tests/conjur_variable/.pytest_cache/.gitignore",
+ "name": "tests/sanity/ignore-2.13.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3ed731b65d06150c138e2dadb0be0697550888a6b47eb8c45ecc9adba8b8e9bd",
+ "chksum_sha256": "b8e334841d92f6524a674975c27baf4559d939485530318bb057ce77af6ee7d2",
"format": 1
},
{
- "name": "tests/conjur_variable/.pytest_cache/CACHEDIR.TAG",
+ "name": "tests/sanity/ignore-2.9.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "37dc88ef9a0abeddbe81053a6dd8fdfb13afb613045ea1eb4a5c815a74a3bde4",
+ "chksum_sha256": "1deb934aa3193bb20d8fd05c0ae18d90b7cf354e81535cf70732eacae73033f6",
"format": 1
},
{
- "name": "tests/conjur_variable/.pytest_cache/v",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/sanity/ignore-2.10.txt",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "f79319d5da69edbabc9e008b849cc9fac2a60e75eea8c974f82e33aff3b6ec90",
"format": 1
},
{
- "name": "tests/conjur_variable/.pytest_cache/v/cache",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/sanity/ignore-2.12.txt",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "a23220bdf7d5aee641cc7e9a2549c5c631b02d41e97079430265cbe5255a875a",
"format": 1
},
{
- "name": "tests/conjur_variable/.pytest_cache/v/cache/nodeids",
+ "name": "tests/sanity/ignore-2.15.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d6a49f2d4432db81b4a2d792008159f6cff2deffb9a47c45718eff2b23bc075e",
+ "chksum_sha256": "b8e334841d92f6524a674975c27baf4559d939485530318bb057ce77af6ee7d2",
"format": 1
},
{
- "name": "tests/conjur_variable/.pytest_cache/v/cache/stepwise",
+ "name": "tests/sanity/ignore-2.14.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4f53cda18c2baa0c0354bb5f9a3ecbe5ed12ab4d8e11ba873c2f11161202b945",
+ "chksum_sha256": "b8e334841d92f6524a674975c27baf4559d939485530318bb057ce77af6ee7d2",
"format": 1
},
{
- "name": "LICENSE",
+ "name": ".gitignore",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e092618211b1d864e3caf325abbd567f997e6ffb98d9fb97188d4fa280334bbe",
+ "chksum_sha256": "4e04ac42de48a1b26574f63c37c1bdcd6ec3a0497fc82906cb015f5f4225bc6b",
"format": 1
},
{
- "name": "CHANGELOG.md",
+ "name": "examples",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "examples/test.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7194fe5e661a279cedfd17870f63d1e6c6682b3c5cfc785780cc8e54bbf3f735",
+ "chksum_sha256": "94fcc880e144e39d06fb0fd4957ca4aac77578627b509a110f4ec5b2ffc7f9ff",
+ "format": 1
+ },
+ {
+ "name": "requirements.txt",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "0a2f8c8f38472c11f47c93fe7ac69bb6e08b2d09dcc5a2b9fdd7054366822a21",
"format": 1
}
],
diff --git a/ansible_collections/cyberark/conjur/MANIFEST.json b/ansible_collections/cyberark/conjur/MANIFEST.json
index 94249cfd4..82ee35c8b 100644
--- a/ansible_collections/cyberark/conjur/MANIFEST.json
+++ b/ansible_collections/cyberark/conjur/MANIFEST.json
@@ -2,7 +2,7 @@
"collection_info": {
"namespace": "cyberark",
"name": "conjur",
- "version": "1.2.2",
+ "version": "1.3.0",
"authors": [
"CyberArk Business Development (@cyberark-bizdev)",
"(@cyberark/community-and-integrations-team)"
@@ -36,7 +36,7 @@
"name": "FILES.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5eef7fc8156f49fae3c3f275f146563380090cf88f9d722f421d15917eb8a0bb",
+ "chksum_sha256": "f68cdfd855c5490531761c6f094397f42929d7884d7340214dce8350cfb1eecd",
"format": 1
},
"format": 1
diff --git a/ansible_collections/cyberark/conjur/plugins/lookup/conjur_variable.py b/ansible_collections/cyberark/conjur/plugins/lookup/conjur_variable.py
index db3d209e4..fd3bf5c28 100644
--- a/ansible_collections/cyberark/conjur/plugins/lookup/conjur_variable.py
+++ b/ansible_collections/cyberark/conjur/plugins/lookup/conjur_variable.py
@@ -15,9 +15,10 @@ DOCUMENTATION = """
author:
- CyberArk BizDev (@cyberark-bizdev)
description:
- Retrieves credentials from Conjur using the controlling host's Conjur identity
- or environment variables.
+ Retrieves credentials from Conjur using the controlling host's Conjur identity,
+ environment variables, or extra-vars.
Environment variables could be CONJUR_ACCOUNT, CONJUR_APPLIANCE_URL, CONJUR_CERT_FILE, CONJUR_AUTHN_LOGIN, CONJUR_AUTHN_API_KEY, CONJUR_AUTHN_TOKEN_FILE
+ Extra-vars could be conjur_account, conjur_appliance_url, conjur_cert_file, conjur_authn_login, conjur_authn_api_key, conjur_authn_token_file
Conjur info - U(https://www.conjur.org/).
requirements:
- 'The controlling host running Ansible has a Conjur identity.
@@ -46,16 +47,6 @@ DOCUMENTATION = """
key: identity_file_path
env:
- name: CONJUR_IDENTITY_FILE
- authn_token_file:
- description: Path to the access token file.
- type: path
- default: /var/run/conjur/access-token
- required: False
- ini:
- - section: conjur,
- key: authn_token_file
- env:
- - name: CONJUR_AUTHN_TOKEN_FILE
config_file:
description: Path to the Conjur configuration file. The configuration file is a YAML file.
type: path
@@ -66,6 +57,72 @@ DOCUMENTATION = """
key: config_file_path
env:
- name: CONJUR_CONFIG_FILE
+ conjur_appliance_url:
+ description: Conjur appliance url
+ type: string
+ required: False
+ ini:
+ - section: conjur,
+ key: appliance_url
+ vars:
+ - name: conjur_appliance_url
+ env:
+ - name: CONJUR_APPLIANCE_URL
+ conjur_authn_login:
+ description: Conjur authn login
+ type: string
+ required: False
+ ini:
+ - section: conjur,
+ key: authn_login
+ vars:
+ - name: conjur_authn_login
+ env:
+ - name: CONJUR_AUTHN_LOGIN
+ conjur_account:
+ description: Conjur account
+ type: string
+ required: False
+ ini:
+ - section: conjur,
+ key: account
+ vars:
+ - name: conjur_account
+ env:
+ - name: CONJUR_ACCOUNT
+ conjur_authn_api_key:
+ description: Conjur authn api key
+ type: string
+ required: False
+ ini:
+ - section: conjur,
+ key: authn_api_key
+ vars:
+ - name: conjur_authn_api_key
+ env:
+ - name: CONJUR_AUTHN_API_KEY
+ conjur_cert_file:
+ description: Path to the Conjur cert file
+ type: path
+ required: False
+ ini:
+ - section: conjur,
+ key: cert_file
+ vars:
+ - name: conjur_cert_file
+ env:
+ - name: CONJUR_CERT_FILE
+ conjur_authn_token_file:
+ description: Path to the access token file
+ type: path
+ required: False
+ ini:
+ - section: conjur,
+ key: authn_token_file
+ vars:
+ - name: conjur_authn_token_file
+ env:
+ - name: CONJUR_AUTHN_TOKEN_FILE
"""
EXAMPLES = """
@@ -87,14 +144,13 @@ RETURN = """
import os.path
import socket
+import ansible.module_utils.six.moves.urllib.error as urllib_error
from ansible.errors import AnsibleError
from ansible.plugins.lookup import LookupBase
from base64 import b64encode
from netrc import netrc
-from os import environ
from time import sleep
from ansible.module_utils.six.moves.urllib.parse import quote
-from ansible.module_utils.urls import urllib_error
from stat import S_IRUSR, S_IWUSR
from tempfile import gettempdir, NamedTemporaryFile
import yaml
@@ -272,7 +328,25 @@ class LookupModule(LookupBase):
elif not terms[0] or terms[0].isspace():
raise AnsibleError("Invalid secret path: empty secret path not accepted.")
- self.set_options(direct=kwargs)
+ # We should register the variables as LookupModule options.
+ #
+ # Doing this has some nice advantages if we're considering supporting
+ # a set of Ansible variables that could sometimes replace environment
+ # variables.
+ #
+ # Registering the variables as options forces them to adhere to the
+ # behavior described in the DOCUMENTATION variable. An option can have
+ # both a Ansible variable and environment variable source, which means
+ # Ansible will do some juggling on our behalf.
+ self.set_options(var_options=variables, direct=kwargs)
+
+ appliance_url = self.get_var_value("conjur_appliance_url")
+ account = self.get_var_value("conjur_account")
+ authn_login = self.get_var_value("conjur_authn_login")
+ authn_api_key = self.get_var_value("conjur_authn_api_key")
+ cert_file = self.get_var_value("conjur_cert_file")
+ authn_token_file = self.get_var_value("conjur_authn_token_file")
+
validate_certs = self.get_option('validate_certs')
conf_file = self.get_option('config_file')
as_file = self.get_option('as_file')
@@ -280,53 +354,59 @@ class LookupModule(LookupBase):
if validate_certs is False:
display.warning('Certificate validation has been disabled. Please enable with validate_certs option.')
- if 'http://' in str(environ.get("CONJUR_APPLIANCE_URL")):
+ if 'http://' in str(appliance_url):
raise AnsibleError(('[WARNING]: Conjur URL uses insecure connection. Please consider using HTTPS.'))
conf = _merge_dictionaries(
_load_conf_from_file(conf_file),
{
- "account": environ.get('CONJUR_ACCOUNT'),
- "appliance_url": environ.get("CONJUR_APPLIANCE_URL")
+ "account": account,
+ "appliance_url": appliance_url
} if (
- environ.get('CONJUR_ACCOUNT') is not None
- and environ.get('CONJUR_APPLIANCE_URL') is not None
+ account is not None
+ and appliance_url is not None
)
else {},
{
- "cert_file": environ.get('CONJUR_CERT_FILE')
- } if (environ.get('CONJUR_CERT_FILE') is not None)
+ "cert_file": cert_file
+ } if (cert_file is not None)
else {},
{
- "authn_token_file": environ.get('CONJUR_AUTHN_TOKEN_FILE')
- } if (environ.get('CONJUR_AUTHN_TOKEN_FILE') is not None)
+ "authn_token_file": authn_token_file
+ } if authn_token_file is not None
else {}
)
+ if 'account' not in conf or 'appliance_url' not in conf:
+ raise AnsibleError(
+ """Configuration must define options `conjur_account` and `conjur_appliance_url`.
+ This config can be set by any of the following methods, listed in order of priority:
+ - Ansible variables of the same name, set either in the parent playbook or passed to
+ the ansible-playbook command with the --extra-vars flag
+ - Environment variables `CONJUR_ACCOUNT` and `CONJUR_APPLIANCE_URL`
+ - A configuration file on the controlling host with fields `account` and `appliance_url`"""
+ )
+
if 'authn_token_file' not in conf:
identity_file = self.get_option('identity_file')
identity = _merge_dictionaries(
_load_identity_from_file(identity_file, conf['appliance_url']),
{
- "id": environ.get('CONJUR_AUTHN_LOGIN'),
- "api_key": environ.get('CONJUR_AUTHN_API_KEY')
- } if (environ.get('CONJUR_AUTHN_LOGIN') is not None
- and environ.get('CONJUR_AUTHN_API_KEY') is not None)
+ "id": authn_login,
+ "api_key": authn_api_key
+ } if authn_login is not None
+ and authn_api_key is not None
else {}
)
- if 'account' not in conf or 'appliance_url' not in conf:
- raise AnsibleError(
- ("Configuration file on the controlling host must "
- "define `account` and `appliance_url`"
- "entries or they should be environment variables")
- )
-
if 'id' not in identity or 'api_key' not in identity:
raise AnsibleError(
- ("Identity file on the controlling host must contain "
- "`login` and `password` entries for Conjur appliance"
- " URL or they should be environment variables")
+ """Configuration must define options `conjur_authn_login` and `conjur_authn_api_key`.
+ This config can be set by any of the following methods, listed in order of priority:
+ - Ansible variables of the same name, set either in the parent playbook or passed to
+ the ansible-playbook command with the --extra-vars flag
+ - Environment variables `CONJUR_AUTHN_LOGIN` and `CONJUR_AUTHN_API_KEY`
+ - An identity file on the controlling host with the fields `login` and `password`"""
)
cert_file = None
@@ -364,3 +444,11 @@ class LookupModule(LookupBase):
return _store_secret_in_file(conjur_variable)
return conjur_variable
+
+ def get_var_value(self, key):
+ try:
+ variable_value = self.get_option(key)
+ except KeyError:
+ raise AnsibleError("{0} was not defined in configuration".format(key))
+
+ return variable_value
diff --git a/ansible_collections/cyberark/conjur/roles/conjur_host_identity/tests/junit/cleanup-conjur-identity b/ansible_collections/cyberark/conjur/roles/conjur_host_identity/tests/junit/cleanup-conjur-identity
index d5cf77577..5f69dd9a4 100644
--- a/ansible_collections/cyberark/conjur/roles/conjur_host_identity/tests/junit/cleanup-conjur-identity
+++ b/ansible_collections/cyberark/conjur/roles/conjur_host_identity/tests/junit/cleanup-conjur-identity
@@ -1 +1 @@
-<?xml version="1.0" encoding="utf-8"?><testsuites><testsuite name="pytest" errors="0" failures="0" skipped="0" tests="8" time="3.385" timestamp="2023-09-28T17:31:14.495645" hostname="3bd1d1ebca35"><testcase classname="test_cases.cleanup-conjur-identity.tests.test_default" name="test_hosts_file[docker://dev-test_app_centos-1]" time="0.554" /><testcase classname="test_cases.cleanup-conjur-identity.tests.test_default" name="test_is_not_conjurized[docker://dev-test_app_centos-1]" time="0.182" /><testcase classname="test_cases.cleanup-conjur-identity.tests.test_default" name="test_hosts_file[docker://dev-test_app_centos-2]" time="0.524" /><testcase classname="test_cases.cleanup-conjur-identity.tests.test_default" name="test_is_not_conjurized[docker://dev-test_app_centos-2]" time="0.169" /><testcase classname="test_cases.cleanup-conjur-identity.tests.test_default" name="test_hosts_file[docker://dev-test_app_ubuntu-1]" time="0.517" /><testcase classname="test_cases.cleanup-conjur-identity.tests.test_default" name="test_is_not_conjurized[docker://dev-test_app_ubuntu-1]" time="0.182" /><testcase classname="test_cases.cleanup-conjur-identity.tests.test_default" name="test_hosts_file[docker://dev-test_app_ubuntu-2]" time="0.551" /><testcase classname="test_cases.cleanup-conjur-identity.tests.test_default" name="test_is_not_conjurized[docker://dev-test_app_ubuntu-2]" time="0.178" /></testsuite></testsuites> \ No newline at end of file
+<?xml version="1.0" encoding="utf-8"?><testsuites><testsuite name="pytest" errors="0" failures="0" skipped="0" tests="8" time="3.233" timestamp="2024-05-24T10:41:52.994874" hostname="c471efefee8a"><testcase classname="test_cases.cleanup-conjur-identity.tests.test_default" name="test_hosts_file[docker://dev-test_app_centos-1]" time="0.541" /><testcase classname="test_cases.cleanup-conjur-identity.tests.test_default" name="test_is_not_conjurized[docker://dev-test_app_centos-1]" time="0.155" /><testcase classname="test_cases.cleanup-conjur-identity.tests.test_default" name="test_hosts_file[docker://dev-test_app_centos-2]" time="0.512" /><testcase classname="test_cases.cleanup-conjur-identity.tests.test_default" name="test_is_not_conjurized[docker://dev-test_app_centos-2]" time="0.173" /><testcase classname="test_cases.cleanup-conjur-identity.tests.test_default" name="test_hosts_file[docker://dev-test_app_ubuntu-1]" time="0.499" /><testcase classname="test_cases.cleanup-conjur-identity.tests.test_default" name="test_is_not_conjurized[docker://dev-test_app_ubuntu-1]" time="0.166" /><testcase classname="test_cases.cleanup-conjur-identity.tests.test_default" name="test_hosts_file[docker://dev-test_app_ubuntu-2]" time="0.521" /><testcase classname="test_cases.cleanup-conjur-identity.tests.test_default" name="test_is_not_conjurized[docker://dev-test_app_ubuntu-2]" time="0.160" /></testsuite></testsuites> \ No newline at end of file
diff --git a/ansible_collections/cyberark/conjur/roles/conjur_host_identity/tests/junit/configure-conjur-identity b/ansible_collections/cyberark/conjur/roles/conjur_host_identity/tests/junit/configure-conjur-identity
index aec86fd9d..05c489723 100644
--- a/ansible_collections/cyberark/conjur/roles/conjur_host_identity/tests/junit/configure-conjur-identity
+++ b/ansible_collections/cyberark/conjur/roles/conjur_host_identity/tests/junit/configure-conjur-identity
@@ -1 +1 @@
-<?xml version="1.0" encoding="utf-8"?><testsuites><testsuite name="pytest" errors="0" failures="0" skipped="0" tests="12" time="4.685" timestamp="2023-09-28T17:32:48.189759" hostname="3bd1d1ebca35"><testcase classname="test_cases.configure-conjur-identity.tests.test_default" name="test_hosts_file[docker://dev-test_app_centos-1]" time="0.551" /><testcase classname="test_cases.configure-conjur-identity.tests.test_default" name="test_is_conjurized[docker://dev-test_app_centos-1]" time="0.356" /><testcase classname="test_cases.configure-conjur-identity.tests.test_default" name="test_retrieve_secret_with_summon[docker://dev-test_app_centos-1]" time="0.130" /><testcase classname="test_cases.configure-conjur-identity.tests.test_default" name="test_hosts_file[docker://dev-test_app_centos-2]" time="0.524" /><testcase classname="test_cases.configure-conjur-identity.tests.test_default" name="test_is_conjurized[docker://dev-test_app_centos-2]" time="0.363" /><testcase classname="test_cases.configure-conjur-identity.tests.test_default" name="test_retrieve_secret_with_summon[docker://dev-test_app_centos-2]" time="0.134" /><testcase classname="test_cases.configure-conjur-identity.tests.test_default" name="test_hosts_file[docker://dev-test_app_ubuntu-1]" time="0.530" /><testcase classname="test_cases.configure-conjur-identity.tests.test_default" name="test_is_conjurized[docker://dev-test_app_ubuntu-1]" time="0.413" /><testcase classname="test_cases.configure-conjur-identity.tests.test_default" name="test_retrieve_secret_with_summon[docker://dev-test_app_ubuntu-1]" time="0.122" /><testcase classname="test_cases.configure-conjur-identity.tests.test_default" name="test_hosts_file[docker://dev-test_app_ubuntu-2]" time="0.536" /><testcase classname="test_cases.configure-conjur-identity.tests.test_default" name="test_is_conjurized[docker://dev-test_app_ubuntu-2]" time="0.358" /><testcase classname="test_cases.configure-conjur-identity.tests.test_default" name="test_retrieve_secret_with_summon[docker://dev-test_app_ubuntu-2]" time="0.139" /></testsuite></testsuites> \ No newline at end of file
+<?xml version="1.0" encoding="utf-8"?><testsuites><testsuite name="pytest" errors="0" failures="0" skipped="0" tests="12" time="4.388" timestamp="2024-05-24T10:43:04.792928" hostname="c471efefee8a"><testcase classname="test_cases.configure-conjur-identity.tests.test_default" name="test_hosts_file[docker://dev-test_app_centos-1]" time="0.499" /><testcase classname="test_cases.configure-conjur-identity.tests.test_default" name="test_is_conjurized[docker://dev-test_app_centos-1]" time="0.342" /><testcase classname="test_cases.configure-conjur-identity.tests.test_default" name="test_retrieve_secret_with_summon[docker://dev-test_app_centos-1]" time="0.120" /><testcase classname="test_cases.configure-conjur-identity.tests.test_default" name="test_hosts_file[docker://dev-test_app_centos-2]" time="0.532" /><testcase classname="test_cases.configure-conjur-identity.tests.test_default" name="test_is_conjurized[docker://dev-test_app_centos-2]" time="0.351" /><testcase classname="test_cases.configure-conjur-identity.tests.test_default" name="test_retrieve_secret_with_summon[docker://dev-test_app_centos-2]" time="0.119" /><testcase classname="test_cases.configure-conjur-identity.tests.test_default" name="test_hosts_file[docker://dev-test_app_ubuntu-1]" time="0.495" /><testcase classname="test_cases.configure-conjur-identity.tests.test_default" name="test_is_conjurized[docker://dev-test_app_ubuntu-1]" time="0.350" /><testcase classname="test_cases.configure-conjur-identity.tests.test_default" name="test_retrieve_secret_with_summon[docker://dev-test_app_ubuntu-1]" time="0.119" /><testcase classname="test_cases.configure-conjur-identity.tests.test_default" name="test_hosts_file[docker://dev-test_app_ubuntu-2]" time="0.530" /><testcase classname="test_cases.configure-conjur-identity.tests.test_default" name="test_is_conjurized[docker://dev-test_app_ubuntu-2]" time="0.305" /><testcase classname="test_cases.configure-conjur-identity.tests.test_default" name="test_retrieve_secret_with_summon[docker://dev-test_app_ubuntu-2]" time="0.121" /></testsuite></testsuites> \ No newline at end of file
diff --git a/ansible_collections/cyberark/conjur/roles/conjur_host_identity/tests/test.sh b/ansible_collections/cyberark/conjur/roles/conjur_host_identity/tests/test.sh
index 1f7e28145..61d3e7f67 100755
--- a/ansible_collections/cyberark/conjur/roles/conjur_host_identity/tests/test.sh
+++ b/ansible_collections/cyberark/conjur/roles/conjur_host_identity/tests/test.sh
@@ -20,7 +20,7 @@ function run_test_case {
docker exec -e HFTOKEN="$(hf_token)" \
"$(ansible_cid)" bash -ec "
- cd /cyberark/tests/conjur-host-identity
+ cd /cyberark/tests/conjur_host_identity
# You can add -vvvvv here for debugging
ansible-playbook test_cases/$test_case/playbook.yml
@@ -28,7 +28,7 @@ function run_test_case {
if [ -d "test_cases/${test_case}/tests/" ]; then
docker exec "$(ansible_cid)" bash -ec "
- cd /cyberark/tests/conjur-host-identity
+ cd /cyberark/tests/conjur_host_identity
py.test --junitxml=./junit/${test_case} --connection docker -v test_cases/${test_case}/tests/test_default.py
"
fi
diff --git a/ansible_collections/cyberark/conjur/roles/conjur_host_identity/tests/test_cases/bad-ssl-config/playbook.yml b/ansible_collections/cyberark/conjur/roles/conjur_host_identity/tests/test_cases/bad-ssl-config/playbook.yml
index 2d1282c41..570a47f6b 100644
--- a/ansible_collections/cyberark/conjur/roles/conjur_host_identity/tests/test_cases/bad-ssl-config/playbook.yml
+++ b/ansible_collections/cyberark/conjur/roles/conjur_host_identity/tests/test_cases/bad-ssl-config/playbook.yml
@@ -5,7 +5,7 @@
- name: Attempt to configure Conjur identity
block:
- import_role:
- name: "cyberark.conjur.conjur-host-identity"
+ name: "cyberark.conjur.conjur_host_identity"
vars:
conjur_account: cucumber
conjur_appliance_url: "https://conjur-proxy-nginx"
diff --git a/ansible_collections/cyberark/conjur/roles/conjur_host_identity/tests/test_cases/cleanup-conjur-identity/playbook.yml b/ansible_collections/cyberark/conjur/roles/conjur_host_identity/tests/test_cases/cleanup-conjur-identity/playbook.yml
index ec472992d..718e2717c 100644
--- a/ansible_collections/cyberark/conjur/roles/conjur_host_identity/tests/test_cases/cleanup-conjur-identity/playbook.yml
+++ b/ansible_collections/cyberark/conjur/roles/conjur_host_identity/tests/test_cases/cleanup-conjur-identity/playbook.yml
@@ -2,7 +2,7 @@
- name: Configuring Conjur identity on remote hosts
hosts: testapp
roles:
- - role: "cyberark.conjur.conjur-host-identity"
+ - role: "cyberark.conjur.conjur_host_identity"
conjur_account: "{{lookup('env', 'CONJUR_ACCOUNT')}}"
conjur_appliance_url: "{{lookup('env', 'CONJUR_APPLIANCE_URL')}}"
conjur_host_factory_token: "{{lookup('env', 'HFTOKEN')}}"
@@ -13,5 +13,5 @@
- name: Revoke Conjur identity from remote hosts
hosts: testapp
roles:
- - role: "cyberark.conjur.conjur-host-identity"
+ - role: "cyberark.conjur.conjur_host_identity"
state: absent
diff --git a/ansible_collections/cyberark/conjur/roles/conjur_host_identity/tests/test_cases/configure-conjur-identity/playbook.yml b/ansible_collections/cyberark/conjur/roles/conjur_host_identity/tests/test_cases/configure-conjur-identity/playbook.yml
index 7309d21b6..44fa6e2e4 100644
--- a/ansible_collections/cyberark/conjur/roles/conjur_host_identity/tests/test_cases/configure-conjur-identity/playbook.yml
+++ b/ansible_collections/cyberark/conjur/roles/conjur_host_identity/tests/test_cases/configure-conjur-identity/playbook.yml
@@ -2,7 +2,7 @@
- name: Configuring Conjur identity on remote hosts
hosts: testapp
roles:
- - role: "cyberark.conjur.conjur-host-identity"
+ - role: "cyberark.conjur.conjur_host_identity"
conjur_account: "{{lookup('env', 'CONJUR_ACCOUNT')}}"
conjur_appliance_url: "{{lookup('env', 'CONJUR_APPLIANCE_URL')}}"
conjur_host_factory_token: "{{lookup('env', 'HFTOKEN')}}"
diff --git a/ansible_collections/cyberark/conjur/roles/conjur_host_identity/tests/test_cases/misconfig-conjur-identity/playbook.yml b/ansible_collections/cyberark/conjur/roles/conjur_host_identity/tests/test_cases/misconfig-conjur-identity/playbook.yml
index 96f31c7d0..d09af37c1 100644
--- a/ansible_collections/cyberark/conjur/roles/conjur_host_identity/tests/test_cases/misconfig-conjur-identity/playbook.yml
+++ b/ansible_collections/cyberark/conjur/roles/conjur_host_identity/tests/test_cases/misconfig-conjur-identity/playbook.yml
@@ -5,7 +5,7 @@
- name: Attempt to configure Conjur identity
block:
- import_role:
- name: "cyberark.conjur.conjur-host-identity"
+ name: "cyberark.conjur.conjur_host_identity"
vars:
conjur_account: cucumber
# conjur_appliance_url: "https://conjur-proxy-nginx"
diff --git a/ansible_collections/cyberark/conjur/roles/conjur_host_identity/tests/test_cases/not-conjurized/playbook.yml b/ansible_collections/cyberark/conjur/roles/conjur_host_identity/tests/test_cases/not-conjurized/playbook.yml
index 9175cfbbb..658beee54 100644
--- a/ansible_collections/cyberark/conjur/roles/conjur_host_identity/tests/test_cases/not-conjurized/playbook.yml
+++ b/ansible_collections/cyberark/conjur/roles/conjur_host_identity/tests/test_cases/not-conjurized/playbook.yml
@@ -13,7 +13,7 @@
- name: Attempt to configure Conjur identity
block:
- import_role:
- name: "cyberark.conjur.conjur-host-identity"
+ name: "cyberark.conjur.conjur_host_identity"
vars:
conjur_account: "{{lookup('env', 'CONJUR_ACCOUNT')}}"
conjur_appliance_url: "{{lookup('env', 'CONJUR_APPLIANCE_URL')}}"
diff --git a/ansible_collections/cyberark/conjur/tests/conjur_variable/junit/retrieve-variable b/ansible_collections/cyberark/conjur/tests/conjur_variable/junit/retrieve-variable
index bc362ea43..78767685b 100644
--- a/ansible_collections/cyberark/conjur/tests/conjur_variable/junit/retrieve-variable
+++ b/ansible_collections/cyberark/conjur/tests/conjur_variable/junit/retrieve-variable
@@ -1 +1 @@
-<?xml version="1.0" encoding="utf-8"?><testsuites><testsuite name="pytest" errors="0" failures="0" skipped="0" tests="1" time="0.476" timestamp="2023-09-28T17:28:54.180417" hostname="3bd1d1ebca35"><testcase classname="test_cases.retrieve-variable.tests.test_default" name="test_retrieved_secret[docker://dev-ansible-1]" time="0.450" /></testsuite></testsuites> \ No newline at end of file
+<?xml version="1.0" encoding="utf-8"?><testsuites><testsuite name="pytest" errors="0" failures="0" skipped="0" tests="1" time="0.480" timestamp="2024-05-24T10:40:05.446831" hostname="c471efefee8a"><testcase classname="test_cases.retrieve-variable.tests.test_default" name="test_retrieved_secret[docker://dev-ansible-1]" time="0.450" /></testsuite></testsuites> \ No newline at end of file
diff --git a/ansible_collections/cyberark/conjur/tests/conjur_variable/junit/retrieve-variable-bad-cert-path b/ansible_collections/cyberark/conjur/tests/conjur_variable/junit/retrieve-variable-bad-cert-path
index 7d7b7b4a7..26ee4675a 100644
--- a/ansible_collections/cyberark/conjur/tests/conjur_variable/junit/retrieve-variable-bad-cert-path
+++ b/ansible_collections/cyberark/conjur/tests/conjur_variable/junit/retrieve-variable-bad-cert-path
@@ -1 +1 @@
-<?xml version="1.0" encoding="utf-8"?><testsuites><testsuite name="pytest" errors="0" failures="0" skipped="0" tests="1" time="0.381" timestamp="2023-09-28T17:28:57.025234" hostname="3bd1d1ebca35"><testcase classname="test_cases.retrieve-variable-bad-cert-path.tests.test_default" name="test_retrieval_failed[docker://dev-ansible-1]" time="0.357" /></testsuite></testsuites> \ No newline at end of file
+<?xml version="1.0" encoding="utf-8"?><testsuites><testsuite name="pytest" errors="0" failures="0" skipped="0" tests="1" time="0.381" timestamp="2024-05-24T10:40:08.220541" hostname="c471efefee8a"><testcase classname="test_cases.retrieve-variable-bad-cert-path.tests.test_default" name="test_retrieval_failed[docker://dev-ansible-1]" time="0.351" /></testsuite></testsuites> \ No newline at end of file
diff --git a/ansible_collections/cyberark/conjur/tests/conjur_variable/junit/retrieve-variable-bad-certs b/ansible_collections/cyberark/conjur/tests/conjur_variable/junit/retrieve-variable-bad-certs
index 684fdb89d..09efed3d7 100644
--- a/ansible_collections/cyberark/conjur/tests/conjur_variable/junit/retrieve-variable-bad-certs
+++ b/ansible_collections/cyberark/conjur/tests/conjur_variable/junit/retrieve-variable-bad-certs
@@ -1 +1 @@
-<?xml version="1.0" encoding="utf-8"?><testsuites><testsuite name="pytest" errors="0" failures="0" skipped="0" tests="1" time="0.372" timestamp="2023-09-28T17:28:59.744035" hostname="3bd1d1ebca35"><testcase classname="test_cases.retrieve-variable-bad-certs.tests.test_default" name="test_retrieval_failed[docker://dev-ansible-1]" time="0.348" /></testsuite></testsuites> \ No newline at end of file
+<?xml version="1.0" encoding="utf-8"?><testsuites><testsuite name="pytest" errors="0" failures="0" skipped="0" tests="1" time="0.408" timestamp="2024-05-24T10:40:10.871829" hostname="c471efefee8a"><testcase classname="test_cases.retrieve-variable-bad-certs.tests.test_default" name="test_retrieval_failed[docker://dev-ansible-1]" time="0.376" /></testsuite></testsuites> \ No newline at end of file
diff --git a/ansible_collections/cyberark/conjur/tests/conjur_variable/junit/retrieve-variable-disable-verify-certs b/ansible_collections/cyberark/conjur/tests/conjur_variable/junit/retrieve-variable-disable-verify-certs
index 7cd98843b..2466fcc8a 100644
--- a/ansible_collections/cyberark/conjur/tests/conjur_variable/junit/retrieve-variable-disable-verify-certs
+++ b/ansible_collections/cyberark/conjur/tests/conjur_variable/junit/retrieve-variable-disable-verify-certs
@@ -1 +1 @@
-<?xml version="1.0" encoding="utf-8"?><testsuites><testsuite name="pytest" errors="0" failures="0" skipped="0" tests="1" time="0.484" timestamp="2023-09-28T17:29:03.245129" hostname="3bd1d1ebca35"><testcase classname="test_cases.retrieve-variable-disable-verify-certs.tests.test_default" name="test_retrieved_secret[docker://dev-ansible-1]" time="0.458" /></testsuite></testsuites> \ No newline at end of file
+<?xml version="1.0" encoding="utf-8"?><testsuites><testsuite name="pytest" errors="0" failures="0" skipped="0" tests="1" time="0.473" timestamp="2024-05-24T10:40:14.267839" hostname="c471efefee8a"><testcase classname="test_cases.retrieve-variable-disable-verify-certs.tests.test_default" name="test_retrieved_secret[docker://dev-ansible-1]" time="0.441" /></testsuite></testsuites> \ No newline at end of file
diff --git a/ansible_collections/cyberark/conjur/tests/conjur_variable/junit/retrieve-variable-into-file b/ansible_collections/cyberark/conjur/tests/conjur_variable/junit/retrieve-variable-into-file
index c9082babb..255bc72ab 100644
--- a/ansible_collections/cyberark/conjur/tests/conjur_variable/junit/retrieve-variable-into-file
+++ b/ansible_collections/cyberark/conjur/tests/conjur_variable/junit/retrieve-variable-into-file
@@ -1 +1 @@
-<?xml version="1.0" encoding="utf-8"?><testsuites><testsuite name="pytest" errors="0" failures="0" skipped="0" tests="1" time="0.721" timestamp="2023-09-28T17:29:06.557606" hostname="3bd1d1ebca35"><testcase classname="test_cases.retrieve-variable-into-file.tests.test_default" name="test_retrieved_secret[docker://dev-ansible-1]" time="0.695" /></testsuite></testsuites> \ No newline at end of file
+<?xml version="1.0" encoding="utf-8"?><testsuites><testsuite name="pytest" errors="0" failures="0" skipped="0" tests="1" time="0.694" timestamp="2024-05-24T10:40:17.441882" hostname="c471efefee8a"><testcase classname="test_cases.retrieve-variable-into-file.tests.test_default" name="test_retrieved_secret[docker://dev-ansible-1]" time="0.666" /></testsuite></testsuites> \ No newline at end of file
diff --git a/ansible_collections/cyberark/conjur/tests/conjur_variable/junit/retrieve-variable-no-cert-provided b/ansible_collections/cyberark/conjur/tests/conjur_variable/junit/retrieve-variable-no-cert-provided
index 34fd1fa03..901d73d5f 100644
--- a/ansible_collections/cyberark/conjur/tests/conjur_variable/junit/retrieve-variable-no-cert-provided
+++ b/ansible_collections/cyberark/conjur/tests/conjur_variable/junit/retrieve-variable-no-cert-provided
@@ -1 +1 @@
-<?xml version="1.0" encoding="utf-8"?><testsuites><testsuite name="pytest" errors="0" failures="0" skipped="0" tests="1" time="0.373" timestamp="2023-09-28T17:29:09.688977" hostname="3bd1d1ebca35"><testcase classname="test_cases.retrieve-variable-no-cert-provided.tests.test_default" name="test_retrieval_failed[docker://dev-ansible-1]" time="0.349" /></testsuite></testsuites> \ No newline at end of file
+<?xml version="1.0" encoding="utf-8"?><testsuites><testsuite name="pytest" errors="0" failures="0" skipped="0" tests="1" time="0.398" timestamp="2024-05-24T10:40:20.482341" hostname="c471efefee8a"><testcase classname="test_cases.retrieve-variable-no-cert-provided.tests.test_default" name="test_retrieval_failed[docker://dev-ansible-1]" time="0.369" /></testsuite></testsuites> \ No newline at end of file
diff --git a/ansible_collections/cyberark/conjur/tests/conjur_variable/junit/retrieve-variable-with-authn-token b/ansible_collections/cyberark/conjur/tests/conjur_variable/junit/retrieve-variable-with-authn-token
index c3f0e5978..155af8480 100644
--- a/ansible_collections/cyberark/conjur/tests/conjur_variable/junit/retrieve-variable-with-authn-token
+++ b/ansible_collections/cyberark/conjur/tests/conjur_variable/junit/retrieve-variable-with-authn-token
@@ -1 +1 @@
-<?xml version="1.0" encoding="utf-8"?><testsuites><testsuite name="pytest" errors="0" failures="0" skipped="0" tests="1" time="0.585" timestamp="2023-09-28T17:29:12.796986" hostname="3bd1d1ebca35"><testcase classname="test_cases.retrieve-variable-with-authn-token.tests.test_default" name="test_retrieved_secret[docker://dev-ansible-1]" time="0.552" /></testsuite></testsuites> \ No newline at end of file
+<?xml version="1.0" encoding="utf-8"?><testsuites><testsuite name="pytest" errors="0" failures="0" skipped="0" tests="1" time="0.485" timestamp="2024-05-24T10:40:23.597487" hostname="c471efefee8a"><testcase classname="test_cases.retrieve-variable-with-authn-token.tests.test_default" name="test_retrieved_secret[docker://dev-ansible-1]" time="0.453" /></testsuite></testsuites> \ No newline at end of file
diff --git a/ansible_collections/cyberark/conjur/tests/conjur_variable/junit/retrieve-variable-with-authn-token-bad-cert b/ansible_collections/cyberark/conjur/tests/conjur_variable/junit/retrieve-variable-with-authn-token-bad-cert
index db2705ced..cfba55757 100644
--- a/ansible_collections/cyberark/conjur/tests/conjur_variable/junit/retrieve-variable-with-authn-token-bad-cert
+++ b/ansible_collections/cyberark/conjur/tests/conjur_variable/junit/retrieve-variable-with-authn-token-bad-cert
@@ -1 +1 @@
-<?xml version="1.0" encoding="utf-8"?><testsuites><testsuite name="pytest" errors="0" failures="0" skipped="0" tests="1" time="1.129" timestamp="2023-09-28T17:29:16.832828" hostname="3bd1d1ebca35"><testcase classname="test_cases.retrieve-variable-with-authn-token-bad-cert.tests.test_default" name="test_retrieve_secret_failed[docker://dev-ansible-1]" time="1.046" /></testsuite></testsuites> \ No newline at end of file
+<?xml version="1.0" encoding="utf-8"?><testsuites><testsuite name="pytest" errors="0" failures="0" skipped="0" tests="1" time="0.681" timestamp="2024-05-24T10:40:26.802196" hostname="c471efefee8a"><testcase classname="test_cases.retrieve-variable-with-authn-token-bad-cert.tests.test_default" name="test_retrieve_secret_failed[docker://dev-ansible-1]" time="0.633" /></testsuite></testsuites> \ No newline at end of file
diff --git a/ansible_collections/cyberark/conjur/tests/conjur_variable/junit/retrieve-variable-with-spaces-secret b/ansible_collections/cyberark/conjur/tests/conjur_variable/junit/retrieve-variable-with-spaces-secret
index 7747d9fd7..af4ac7fde 100644
--- a/ansible_collections/cyberark/conjur/tests/conjur_variable/junit/retrieve-variable-with-spaces-secret
+++ b/ansible_collections/cyberark/conjur/tests/conjur_variable/junit/retrieve-variable-with-spaces-secret
@@ -1 +1 @@
-<?xml version="1.0" encoding="utf-8"?><testsuites><testsuite name="pytest" errors="0" failures="0" skipped="0" tests="1" time="0.746" timestamp="2023-09-28T17:29:22.880901" hostname="3bd1d1ebca35"><testcase classname="test_cases.retrieve-variable-with-spaces-secret.tests.test_default" name="test_retrieved_secret[docker://dev-ansible-1]" time="0.695" /></testsuite></testsuites> \ No newline at end of file
+<?xml version="1.0" encoding="utf-8"?><testsuites><testsuite name="pytest" errors="0" failures="0" skipped="0" tests="1" time="0.573" timestamp="2024-05-24T10:40:31.821871" hostname="c471efefee8a"><testcase classname="test_cases.retrieve-variable-with-spaces-secret.tests.test_default" name="test_retrieved_secret[docker://dev-ansible-1]" time="0.538" /></testsuite></testsuites> \ No newline at end of file
diff --git a/ansible_collections/cyberark/conjur/tests/conjur_variable/test.sh b/ansible_collections/cyberark/conjur/tests/conjur_variable/test.sh
index c82810054..6983c7522 100755
--- a/ansible_collections/cyberark/conjur/tests/conjur_variable/test.sh
+++ b/ansible_collections/cyberark/conjur/tests/conjur_variable/test.sh
@@ -26,7 +26,8 @@ function run_test_case {
fi
# You can add -vvvvv here for debugging
- ansible-playbook 'test_cases/${test_case}/playbook.yml'
+ export SAMPLE_KEY='set_in_env'
+ ansible-playbook --extra-vars 'sample_key=set_in_extravars' 'test_cases/${test_case}/playbook.yml'
py.test --junitxml='./junit/${test_case}' \
--connection docker \
diff --git a/ansible_collections/cyberark/conjur/tests/sanity/ignore-2.10.txt b/ansible_collections/cyberark/conjur/tests/sanity/ignore-2.10.txt
index a81ded000..adfba63eb 100644
--- a/ansible_collections/cyberark/conjur/tests/sanity/ignore-2.10.txt
+++ b/ansible_collections/cyberark/conjur/tests/sanity/ignore-2.10.txt
@@ -1,9 +1,3 @@
-dev/start.sh shebang
Jenkinsfile shebang
tests/conjur_variable/policy/root.yml yamllint:unparsable-with-libyaml
roles/conjur_host_identity/tests/policy/root.yml yamllint:unparsable-with-libyaml # File loaded by summon utility (in Jenkinsfile), not via Python
-ci/build_release shebang
-ci/publish_to_galaxy shebang
-ci/test.sh shebang
-secrets.yml yamllint:unparsable-with-libyaml # File loaded by Conjur server, not via Python
-dev/policy/root.yml yamllint:unparsable-with-libyaml \ No newline at end of file
diff --git a/ansible_collections/cyberark/conjur/tests/sanity/ignore-2.11.txt b/ansible_collections/cyberark/conjur/tests/sanity/ignore-2.11.txt
index b643480a2..adfba63eb 100644
--- a/ansible_collections/cyberark/conjur/tests/sanity/ignore-2.11.txt
+++ b/ansible_collections/cyberark/conjur/tests/sanity/ignore-2.11.txt
@@ -1,9 +1,3 @@
Jenkinsfile shebang
-dev/start.sh shebang
tests/conjur_variable/policy/root.yml yamllint:unparsable-with-libyaml
roles/conjur_host_identity/tests/policy/root.yml yamllint:unparsable-with-libyaml # File loaded by summon utility (in Jenkinsfile), not via Python
-ci/build_release shebang
-ci/publish_to_galaxy shebang
-ci/test.sh shebang
-secrets.yml yamllint:unparsable-with-libyaml # File loaded by Conjur server, not via Python
-dev/policy/root.yml yamllint:unparsable-with-libyaml \ No newline at end of file
diff --git a/ansible_collections/cyberark/conjur/tests/sanity/ignore-2.12.txt b/ansible_collections/cyberark/conjur/tests/sanity/ignore-2.12.txt
index 6615526dc..c5b63bd70 100644
--- a/ansible_collections/cyberark/conjur/tests/sanity/ignore-2.12.txt
+++ b/ansible_collections/cyberark/conjur/tests/sanity/ignore-2.12.txt
@@ -1,9 +1,3 @@
Jenkinsfile shebang
-dev/start.sh shebang
tests/conjur_variable/policy/root.yml yamllint:unparsable-with-libyaml # File loaded by Conjur server, not via Python
roles/conjur_host_identity/tests/policy/root.yml yamllint:unparsable-with-libyaml # File loaded by Conjur server, not via Python
-ci/build_release shebang
-ci/publish_to_galaxy shebang
-ci/test.sh shebang
-secrets.yml yamllint:unparsable-with-libyaml # File loaded by Summon utility (in Jenkinsfile), not via Python
-dev/policy/root.yml yamllint:unparsable-with-libyaml \ No newline at end of file
diff --git a/ansible_collections/cyberark/conjur/tests/sanity/ignore-2.13.txt b/ansible_collections/cyberark/conjur/tests/sanity/ignore-2.13.txt
index e456a5ff7..4144f5b48 100644
--- a/ansible_collections/cyberark/conjur/tests/sanity/ignore-2.13.txt
+++ b/ansible_collections/cyberark/conjur/tests/sanity/ignore-2.13.txt
@@ -1,10 +1,2 @@
-ci/build_release shebang
-ci/publish_to_galaxy shebang
-ci/test.sh shebang
-dev/policy/root.yml yamllint:unparsable-with-libyaml
-dev/start.sh shebang
-dev/stop.sh shebang
-dev/util.sh shebang
Jenkinsfile shebang
plugins/lookup/conjur_variable.py validate-modules:version-added-must-be-major-or-minor
-secrets.yml yamllint:unparsable-with-libyaml
diff --git a/ansible_collections/cyberark/conjur/tests/sanity/ignore-2.14.txt b/ansible_collections/cyberark/conjur/tests/sanity/ignore-2.14.txt
index e456a5ff7..4144f5b48 100644
--- a/ansible_collections/cyberark/conjur/tests/sanity/ignore-2.14.txt
+++ b/ansible_collections/cyberark/conjur/tests/sanity/ignore-2.14.txt
@@ -1,10 +1,2 @@
-ci/build_release shebang
-ci/publish_to_galaxy shebang
-ci/test.sh shebang
-dev/policy/root.yml yamllint:unparsable-with-libyaml
-dev/start.sh shebang
-dev/stop.sh shebang
-dev/util.sh shebang
Jenkinsfile shebang
plugins/lookup/conjur_variable.py validate-modules:version-added-must-be-major-or-minor
-secrets.yml yamllint:unparsable-with-libyaml
diff --git a/ansible_collections/cyberark/conjur/tests/sanity/ignore-2.15.txt b/ansible_collections/cyberark/conjur/tests/sanity/ignore-2.15.txt
index e456a5ff7..4144f5b48 100644
--- a/ansible_collections/cyberark/conjur/tests/sanity/ignore-2.15.txt
+++ b/ansible_collections/cyberark/conjur/tests/sanity/ignore-2.15.txt
@@ -1,10 +1,2 @@
-ci/build_release shebang
-ci/publish_to_galaxy shebang
-ci/test.sh shebang
-dev/policy/root.yml yamllint:unparsable-with-libyaml
-dev/start.sh shebang
-dev/stop.sh shebang
-dev/util.sh shebang
Jenkinsfile shebang
plugins/lookup/conjur_variable.py validate-modules:version-added-must-be-major-or-minor
-secrets.yml yamllint:unparsable-with-libyaml
diff --git a/ansible_collections/cyberark/conjur/tests/sanity/ignore-2.9.txt b/ansible_collections/cyberark/conjur/tests/sanity/ignore-2.9.txt
index 1ee255c73..e3a0cf635 100644
--- a/ansible_collections/cyberark/conjur/tests/sanity/ignore-2.9.txt
+++ b/ansible_collections/cyberark/conjur/tests/sanity/ignore-2.9.txt
@@ -1,7 +1,3 @@
Jenkinsfile shebang
-dev/start.sh shebang
tests/conjur_variable/test.sh shebang
roles/conjur_host_identity/tests/test.sh shebang
-ci/build_release shebang
-ci/publish_to_galaxy shebang
-ci/test.sh shebang \ No newline at end of file
diff --git a/ansible_collections/cyberark/conjur/tests/unit/plugins/lookup/test_conjur_variable.py b/ansible_collections/cyberark/conjur/tests/unit/plugins/lookup/test_conjur_variable.py
index 995adf2da..c78704e8b 100644
--- a/ansible_collections/cyberark/conjur/tests/unit/plugins/lookup/test_conjur_variable.py
+++ b/ansible_collections/cyberark/conjur/tests/unit/plugins/lookup/test_conjur_variable.py
@@ -84,6 +84,22 @@ class TestConjurLookup(TestCase):
@patch('ansible_collections.cyberark.conjur.plugins.lookup.conjur_variable._fetch_conjur_variable')
@patch('ansible_collections.cyberark.conjur.plugins.lookup.conjur_variable._fetch_conjur_token')
+ def test_run_with_ansible_vars(self, mock_fetch_conjur_token, mock_fetch_conjur_variable):
+ mock_fetch_conjur_token.return_value = "token"
+ mock_fetch_conjur_variable.return_value = ["conjur_variable"]
+
+ variables = {'conjur_account': 'fakeaccount',
+ 'conjur_appliance_url': 'https://conjur-fake',
+ 'conjur_cert_file': './conjurfake.pem',
+ 'conjur_authn_login': 'host/ansible/ansible-fake',
+ 'conjur_authn_api_key': 'fakekey'}
+ terms = ['ansible/fake-secret']
+
+ output = self.lookup.run(terms, variables)
+ self.assertEqual(output, ["conjur_variable"])
+
+ @patch('ansible_collections.cyberark.conjur.plugins.lookup.conjur_variable._fetch_conjur_variable')
+ @patch('ansible_collections.cyberark.conjur.plugins.lookup.conjur_variable._fetch_conjur_token')
@patch('ansible_collections.cyberark.conjur.plugins.lookup.conjur_variable._merge_dictionaries')
def test_retrieve_to_file(self, mock_merge_dictionaries, mock_fetch_conjur_token, mock_fetch_conjur_variable):
mock_fetch_conjur_token.return_value = "token"
@@ -116,10 +132,11 @@ class TestConjurLookup(TestCase):
kwargs = {'as_file': False, 'conf_file': 'conf_file', 'validate_certs': True}
with self.assertRaises(AnsibleError) as context:
self.lookup.run(terms, **kwargs)
- self.assertEqual(
- context.exception.message,
- "Configuration file on the controlling host must define `account` and `appliance_url` entries or they should be environment variables"
- )
+
+ self.assertIn(
+ "Configuration must define options `conjur_account` and `conjur_appliance_url`",
+ context.exception.message,
+ )
# Withhold 'id' and 'api_key' fields
mock_merge_dictionaries.side_effect = [
@@ -129,11 +146,11 @@ class TestConjurLookup(TestCase):
with self.assertRaises(AnsibleError) as context:
self.lookup.run(terms, **kwargs)
- self.assertEqual(
- context.exception.message,
- ("Identity file on the controlling host must contain `login` and `password` "
- "entries for Conjur appliance URL or they should be environment variables")
- )
+
+ self.assertIn(
+ "Configuration must define options `conjur_authn_login` and `conjur_authn_api_key`",
+ context.exception.message,
+ )
@patch('ansible_collections.cyberark.conjur.plugins.lookup.conjur_variable._merge_dictionaries')
def test_run_bad_cert_path(self, mock_merge_dictionaries):
@@ -152,8 +169,48 @@ class TestConjurLookup(TestCase):
with self.assertRaises(AnsibleError) as context:
self.lookup.run([], **kwargs)
- self.assertEqual(context.exception.message, "Invalid secret path: no secret path provided.")
+
+ self.assertEqual(context.exception.message, "Invalid secret path: no secret path provided.")
with self.assertRaises(AnsibleError) as context:
self.lookup.run([''], **kwargs)
- self.assertEqual(context.exception.message, "Invalid secret path: empty secret path not accepted.")
+
+ self.assertEqual(context.exception.message, "Invalid secret path: empty secret path not accepted.")
+
+ @patch('ansible_collections.cyberark.conjur.plugins.lookup.conjur_variable._fetch_conjur_variable')
+ @patch('ansible_collections.cyberark.conjur.plugins.lookup.conjur_variable._fetch_conjur_token')
+ def test_run_missing_account(self, mock_fetch_conjur_token, mock_fetch_conjur_variable):
+ mock_fetch_conjur_token.return_value = "token"
+ mock_fetch_conjur_variable.return_value = ["conjur_variable"]
+
+ variables = {'conjur_cert_file': './conjurfake.pem',
+ 'conjur_authn_login': 'host/ansible/ansible-fake',
+ 'conjur_authn_api_key': 'fakekey'}
+ terms = ['ansible/fake-secret']
+
+ with self.assertRaises(AnsibleError) as context:
+ self.lookup.run(terms, variables)
+
+ self.assertIn(
+ "Configuration must define options `conjur_account` and `conjur_appliance_url`",
+ context.exception.message
+ )
+
+ @patch('ansible_collections.cyberark.conjur.plugins.lookup.conjur_variable._fetch_conjur_variable')
+ @patch('ansible_collections.cyberark.conjur.plugins.lookup.conjur_variable._fetch_conjur_token')
+ def test_run_missing_login(self, mock_fetch_conjur_token, mock_fetch_conjur_variable):
+ mock_fetch_conjur_token.return_value = "token"
+ mock_fetch_conjur_variable.return_value = ["conjur_variable"]
+
+ variables = {'conjur_account': 'fakeaccount',
+ 'conjur_appliance_url': 'https://conjur-fake',
+ 'conjur_cert_file': './conjurfake.pem'}
+ terms = ['ansible/fake-secret']
+
+ with self.assertRaises(AnsibleError) as context:
+ self.lookup.run(terms, variables)
+
+ self.assertIn(
+ "Configuration must define options `conjur_authn_login` and `conjur_authn_api_key`",
+ context.exception.message
+ )
diff --git a/ansible_collections/dellemc/openmanage/.github/CODEOWNERS b/ansible_collections/dellemc/openmanage/.github/CODEOWNERS
index c19af9b2e..d131d6f34 100644
--- a/ansible_collections/dellemc/openmanage/.github/CODEOWNERS
+++ b/ansible_collections/dellemc/openmanage/.github/CODEOWNERS
@@ -7,6 +7,7 @@
# be requested for review when someone opens a pull request.
# order is alphabetical for easier maintenance.
#
+# Abhishek Kumar Sinha (Abhishek.Sinha10@Dell.com)
# Anirudh Kumar (Anirudh_Kumar1@Dell.com)
# Anooja Vardhineni (Anooja_Vardhineni@Dellteam.com)
# Chris Clonch (cacack)
@@ -16,13 +17,16 @@
# Husniya Hameed (husniya-hameed)
# Jagadeesh N V (jagadeeshnv)
# Jaya Gupta (Jaya_Gupta@Dell.com)
+# Kritika Bhateja (Kritika.Bhateja@Dell.com)
+# Lovepreet Singh (Lovepreet.Singh1@dell.com)
# Mario Lenz (mariolenz)
# Rajeev Arakkal (rajeevarakkal)
# Rajshekar P (rajshekarp87)
# Sachin Apagundi (sachin-apa)
# Sajna N Shetty (Sajna-Shetty)
# Sachin Kumar (Sachin_Kumar12@Dell.com)
+# Shivam Sharma (Shivam.Sharma3@Dell.com)
# Vasanth Ds (Vasanth_Sathyanaraya@Dell.com)
# for all files:
-* @sachin-apa @jagadeeshnv @felixs88
+* @sachin-apa @felixs88 @ABHISHEK-SINHA10 @Kritika-Bhateja-03 @rajshekarp87 @ShivamSh3 @singh-lovepreet1
diff --git a/ansible_collections/dellemc/openmanage/CHANGELOG.rst b/ansible_collections/dellemc/openmanage/CHANGELOG.rst
index 1620e6974..6a6f070c0 100644
--- a/ansible_collections/dellemc/openmanage/CHANGELOG.rst
+++ b/ansible_collections/dellemc/openmanage/CHANGELOG.rst
@@ -4,6 +4,49 @@ Dell OpenManage Ansible Modules Release Notes
.. contents:: Topics
+v9.3.0
+======
+
+Release Summary
+---------------
+
+- The ``ome_session`` module is added to allow you to create and delete the sessions on OpenManage Enterprise and OpenManage Enterprise Modular.
+- Added support to use session ID for authentication of iDRAC, OpenManage Enterprise and OpenManage Enterprise Modular modules.
+- Added time_to_wait option in ``idrac_storage_volume`` module.
+- Added support for Python 3.12.
+
+Major Changes
+-------------
+
+- Added support to use session ID for authentication of iDRAC, OpenManage Enterprise and OpenManage Enterprise Modular.
+- ome_session - This module allows you to create and delete the sessions on OpenManage Enterprise and OpenManage Enterprise Modular.
+
+Minor Changes
+-------------
+
+- Added support for Python 3.12.
+- Added time_to_wait option in ``idrac_storage_volume`` module.
+
+Bugfixes
+--------
+
+- Resolved the issue in ``idrac_certificates`` module where subject_alt_name parameter was only accepting first item in list. (https://github.com/dell/dellemc-openmanage-ansible-modules/issues/584)
+- Resolved the issue in ``idrac_virtual_media`` module where the Authorization request header was included in the request. (https://github.com/dell/dellemc-openmanage-ansible-modules/issues/612)
+- Resolved the issue in ``ome_application_certificate`` module related to a padding error in generated CSR file. (https://github.com/dell/dellemc-openmanage-ansible-modules/issues/370)
+
+Known Issues
+------------
+
+- idrac_diagnostics - Issue(285322) - This module doesn't support export of diagnostics file to HTTP and HTTPS share via SOCKS proxy.
+- idrac_firmware - Issue(279282) - This module does not support firmware update using HTTP, HTTPS, and FTP shares with authentication on iDRAC8.
+- idrac_storage_volume - Issue(290766) - The module will report success instead of showing failure for new virtual creation on the BOSS-N1 controller if a virtual disk is already present on the same controller.
+- ome_diagnostics - Issue(279193) - Export of SupportAssist collection logs to the share location fails on OME version 4.0.0.
+- ome_smart_fabric_uplink - Issue(186024) - The module supported by OpenManage Enterprise Modular, however it does not allow the creation of multiple uplinks of the same name. If an uplink is created using the same name as an existing uplink, then the existing uplink is modified.
+
+New Modules
+-----------
+
+- dellemc.openmanage.ome_session - This module allows you to create and delete sessions on OpenManage Enterprise and OpenManage Enterprise Modular.
v9.2.0
======
@@ -494,7 +537,6 @@ Release Summary
Gather facts from iDRAC and Deploy operating system is added.
- Plugin OME inventory is enhanced to support the environment variables for the input parameters.
-
Known Issues
------------
diff --git a/ansible_collections/dellemc/openmanage/FILES.json b/ansible_collections/dellemc/openmanage/FILES.json
index cd6b8725e..463de8c02 100644
--- a/ansible_collections/dellemc/openmanage/FILES.json
+++ b/ansible_collections/dellemc/openmanage/FILES.json
@@ -8,3352 +8,3268 @@
"format": 1
},
{
- "name": ".ansible-lint",
+ "name": "LICENSE",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b1faa2fe6cb8f8029a0aae03332dc309e5144309c0a7ae2df24c25727e6f70cf",
+ "chksum_sha256": "3972dc9744f6499f0f9b2dbf76696f2ae7ad8af9b23dde66d6af86c9dfb36986",
"format": 1
},
{
- "name": ".github",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "requirements.txt",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "6f0f045a08a6f73f1903309c1636aefca64bad5073cf5e1b3d092f0f4fc1806b",
"format": 1
},
{
- "name": ".github/CODEOWNERS",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "22c50cccda3a7ff5dcb7285a2bfebca1428e8b6b355f3f376072bffbb12f6c5b",
+ "name": "roles",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": ".github/ISSUE_TEMPLATE",
+ "name": "roles/idrac_server_powerstate",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": ".github/ISSUE_TEMPLATE/ask_a_question.md",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "e172d0ed215d9561ccf644c10a0f7fb5ea722a427ff13741f8634053d62a9338",
+ "name": "roles/idrac_server_powerstate/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": ".github/ISSUE_TEMPLATE/bug_report.yml",
+ "name": "roles/idrac_server_powerstate/meta/argument_specs.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "56f4cdaffe4a97636427b91eb59acb67bc92f869451d137308428771d48e6aad",
+ "chksum_sha256": "689583bd8c529dfe982c3dcdf84e968eeee49eea7da2eac65ad34f74afc2dd3c",
"format": 1
},
{
- "name": ".github/ISSUE_TEMPLATE/config.yml",
+ "name": "roles/idrac_server_powerstate/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "155b8ff8561f87047a5e2cebac1737edfbba156c21cc733fe636d84ca222d627",
+ "chksum_sha256": "f1119ecdb9b37bfa6f3860f8c767174cca920a521a95946f04a93f5c2ca4cd0e",
"format": 1
},
{
- "name": ".github/ISSUE_TEMPLATE/feature_request.md",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "e15deb214aa7cb6847890eb95491e027c021b7f1ec01c7fbda8dca021d1162a0",
+ "name": "roles/idrac_server_powerstate/vars",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": ".github/PULL_REQUEST_TEMPLATE.md",
+ "name": "roles/idrac_server_powerstate/vars/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "186d97efed69e2211f154847a8ac52f263fa54319620bf1dcea00b59f494a969",
+ "chksum_sha256": "f92fe5299b10e093b76f7527715368bf5d9a243323f74a69b3a74bc75b172886",
"format": 1
},
{
- "name": ".github/workflows",
+ "name": "roles/idrac_server_powerstate/molecule",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": ".github/workflows/ansible-test.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "d797937de9e579f1ecc980d7db21d2e3d36055f4212cad8103d4dba0d7d140cd",
- "format": 1
- },
- {
- "name": ".gitignore",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "3f8be16c6a53d1a7c659b0a9803866e10395d5fa466d557f8e8089e3a26cf0c3",
+ "name": "roles/idrac_server_powerstate/molecule/nmi",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "LICENSE",
+ "name": "roles/idrac_server_powerstate/molecule/nmi/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3972dc9744f6499f0f9b2dbf76696f2ae7ad8af9b23dde66d6af86c9dfb36986",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "bindep.txt",
+ "name": "roles/idrac_server_powerstate/molecule/nmi/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "76c27d1138b5331a2bfb3b8b7e92c70cc39fd1fe793a48117b54ce2649cd7b1f",
+ "chksum_sha256": "c163e9d4d92dfc89ad31c68ee2172162ce2cf9f2c8c1a65f28d5f42437bbc90d",
"format": 1
},
{
- "name": "changelogs",
+ "name": "roles/idrac_server_powerstate/molecule/on",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "changelogs/.plugin-cache.yaml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "a52aa92b1346f5c730d603464c8f20375eaead759c3524f64cd876eea4f3ecc4",
- "format": 1
- },
- {
- "name": "changelogs/config.yaml",
+ "name": "roles/idrac_server_powerstate/molecule/on/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c00012f627317aec0a7b0b4db3da8c43eb7eb63188eb3f7ee8c2319692aab2d7",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "changelogs/changelog.yaml",
+ "name": "roles/idrac_server_powerstate/molecule/on/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "743a69fffd94987fc63c58c728271c10cd363eff555161aef6d6a45d70300b3d",
+ "chksum_sha256": "d71c25f25f9e3ba08d70dd9f7dc33d304e69950e6245781aeb8ed71ac00d23d1",
"format": 1
},
{
- "name": "docs",
+ "name": "roles/idrac_server_powerstate/molecule/forcerestart",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "docs/ADDITIONAL_INFORMATION.md",
+ "name": "roles/idrac_server_powerstate/molecule/forcerestart/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "83c4a24d8d137a954c891ffcc1c75a8157d7957204a59797c5fd4a9dfd3326da",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "docs/BRANCHING.md",
+ "name": "roles/idrac_server_powerstate/molecule/forcerestart/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "347e9f1d9850cbb807f3ac5680a4db0145a90f6714ee23c2d9ea3618806c8609",
+ "chksum_sha256": "1fc6007dac86a8108d44f5d57cafbbedd7f31ec317258a090cdd20ca7338d5d7",
"format": 1
},
{
- "name": "docs/CODE_OF_CONDUCT.md",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "b115034e51a649fda55572de32880b12224311bc35517d0534de26223683230a",
+ "name": "roles/idrac_server_powerstate/molecule/gracefulrestart",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "docs/COMMITTER_GUIDE.md",
+ "name": "roles/idrac_server_powerstate/molecule/gracefulrestart/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9faa99978e41d71c8f84fb1e764ab495075c57e81c8a26edcf0bd41bdedda326",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "docs/CONTRIBUTING.md",
+ "name": "roles/idrac_server_powerstate/molecule/gracefulrestart/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ffe89c3652d21757f689570ac1a14eaa4dc6e35bc99793f48e6eef21d2b84d54",
+ "chksum_sha256": "495911d67e2d1421a451dc2ecc4d52705486791f8be17321134fd2d91fb4aa9b",
"format": 1
},
{
- "name": "docs/DEBUG.md",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "9b2ce4f68fc99226b34a0ebced6fd1919f514f0a33c877b377566b146972d4b6",
+ "name": "roles/idrac_server_powerstate/molecule/default",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "docs/DOCUMENTATION.md",
+ "name": "roles/idrac_server_powerstate/molecule/default/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d79a78be68eeb1be24c29a4d65bb253a1c90f43eaefc661d402e3bab0b5b37f3",
+ "chksum_sha256": "f3f5fdbd0243581c13e09c4a9347c3197712b89de7ccf5c19bf040002a8e0967",
"format": 1
},
{
- "name": "docs/ISSUE_TRIAGE.md",
+ "name": "roles/idrac_server_powerstate/molecule/default/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "25dd3183365794c152fe138272727efcded3456422c5dc16c18f2b31a8f2ab16",
+ "chksum_sha256": "8790919530e9b792ac0a0bb5aa7f39756369e469fdcdead5ed849377be400f55",
"format": 1
},
{
- "name": "docs/MAINTAINERS.md",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "480717a42cbfbb59ee6afb30d1df16f4e6f586b1375a262ba2c87032811fb929",
+ "name": "roles/idrac_server_powerstate/molecule/forceoff",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "docs/SECURITY.md",
+ "name": "roles/idrac_server_powerstate/molecule/forceoff/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b4bcf55efb2913e91247bcbc9e2944cd6bdca0c80a2f6314dbd62802b167f3c0",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "docs/SUPPORT.md",
+ "name": "roles/idrac_server_powerstate/molecule/forceoff/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cd2fd536176f16d1722ee4fb035b8c8a7b92d77395a8fd6b74f56a9dc5cb3d86",
+ "chksum_sha256": "8d81e28400e1ecd4d4f63e3880ad12efcad626f1b482aea24e3781db13463872",
"format": 1
},
{
- "name": "docs/modules",
+ "name": "roles/idrac_server_powerstate/molecule/gracefulshutdown",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "docs/modules/dellemc_configure_idrac_eventing.rst",
+ "name": "roles/idrac_server_powerstate/molecule/gracefulshutdown/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "15a235619160acc869ef11202c81b3d702762fe22e7796a8f7dd2f09f8fe2036",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "docs/modules/dellemc_configure_idrac_services.rst",
+ "name": "roles/idrac_server_powerstate/molecule/gracefulshutdown/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c5da2d61f8d536f4835cad6c0b11619759d47177e8121c5f52a605ae6698239d",
+ "chksum_sha256": "fb25fbf9767297093fc2c5fcb74c38fdae2de2870dd4a7f1d264e32ab3fd2f34",
"format": 1
},
{
- "name": "docs/modules/dellemc_idrac_lc_attributes.rst",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "0be40c9b7e8fe75321a78edeac8f5d8e38c425ea93b85a2da535de9e6615a851",
+ "name": "roles/idrac_server_powerstate/molecule/powercycle",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "docs/modules/dellemc_idrac_storage_volume.rst",
+ "name": "roles/idrac_server_powerstate/molecule/powercycle/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8c37ba05337b65851c5996643e77bf4c4e29165693f840e39c8b4e837c70ca1b",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "docs/modules/dellemc_system_lockdown_mode.rst",
+ "name": "roles/idrac_server_powerstate/molecule/powercycle/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2868bb81a87c50614180c0615c9b3203ba6fdbca3401b447e6feee7f55e78ca4",
+ "chksum_sha256": "be3e16e9141db31d632e532d034f33ed6d72a9151f1c3194080d16b1ab4961f2",
"format": 1
},
{
- "name": "docs/modules/idrac_attributes.rst",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "595a9dd30f4ba4e0a770c5be1df3a46031bf16435b7b3c402d192d2fee03f9dd",
+ "name": "roles/idrac_server_powerstate/molecule/pushpowerbutton",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "docs/modules/idrac_bios.rst",
+ "name": "roles/idrac_server_powerstate/molecule/pushpowerbutton/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6f031a7525fb6bd815beb1b5046f439b8d77b0fc196ea5883ee750b625f9839",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "docs/modules/idrac_boot.rst",
+ "name": "roles/idrac_server_powerstate/molecule/pushpowerbutton/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ecd08d41158980bb9feb767b6bb8c1ec198d18835d2f740be86ba0509810c864",
+ "chksum_sha256": "e4566f9dc07e3d658986932a9190fadcd98950e65e54829810aac38898b43e77",
"format": 1
},
{
- "name": "docs/modules/idrac_certificates.rst",
+ "name": "roles/idrac_server_powerstate/README.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "10f8fcb9881c004e3fb57910fc5abb181c4b64b10a6c517ad71fc9f5fcdf1989",
+ "chksum_sha256": "241acc485326568db77e4c7c6aa738605779ea7949337b58425a58cd2d26bfc5",
"format": 1
},
{
- "name": "docs/modules/idrac_firmware.rst",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "cdab71f3484273cc4cb5102504a66c4e192315bbbdae56f49f62a56a956e4330",
+ "name": "roles/idrac_server_powerstate/defaults",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "docs/modules/idrac_firmware_info.rst",
+ "name": "roles/idrac_server_powerstate/defaults/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7721e7d37e42c9c633a151701459650ffea7ec5473fb60cab1baea0dabd5cbb3",
+ "chksum_sha256": "016ff520beef55bf26d90d19c4ff8187fcf861cc6697999b71d4c756e8cd1af4",
"format": 1
},
{
- "name": "docs/modules/idrac_license.rst",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "bf8bb22e1c12abed8dfe74e3eb68d33ff2f5dfadde8c3336e00272e6920078c7",
+ "name": "roles/idrac_server_powerstate/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "docs/modules/idrac_lifecycle_controller_job_status_info.rst",
+ "name": "roles/idrac_server_powerstate/tasks/init.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "acc28fe56cb9951a42e2a509ed99904697406a3040d4b40e87e5aa87b0a64ee8",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "docs/modules/idrac_lifecycle_controller_jobs.rst",
+ "name": "roles/idrac_server_powerstate/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cb9ae6a91089f33685ff3774ee97ace1e4cff6eb393d9456666a35f19db3d142",
+ "chksum_sha256": "28a5dd2d4dd0c53d832afbc13fc43dd71180b4d8579e5e37d62073e2fa51ce07",
"format": 1
},
{
- "name": "docs/modules/idrac_lifecycle_controller_logs.rst",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "2da2f71d6dd23d5feed5e67935caf3de9fc1f37fbec3f390ce09c04a01be3082",
+ "name": "roles/idrac_server_powerstate/handlers",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "docs/modules/idrac_lifecycle_controller_status_info.rst",
+ "name": "roles/idrac_server_powerstate/handlers/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "68af30f4831b0eda2e449b16c60c11b93c542432bf1745ee3a77c320f61e56c4",
+ "chksum_sha256": "8fb9435cbde9641256ffb3cad4e82e8169969ecf0faebed24c6e1eca315a17c6",
"format": 1
},
{
- "name": "docs/modules/idrac_network.rst",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "0d0088371f009e142aec401b6e9233e200ac68c2d5986e2c2369e8eb8424a306",
+ "name": "roles/idrac_server_powerstate/tests",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "docs/modules/idrac_network_attributes.rst",
+ "name": "roles/idrac_server_powerstate/tests/inventory",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fd8281ba848e85640dfb98c566358c8d258168c999098c457ecca0d1c045ad32",
+ "chksum_sha256": "e02233819b1a09844410549191813f7cc7ba360f21298578f4ba1727a27d87fc",
"format": 1
},
{
- "name": "docs/modules/idrac_os_deployment.rst",
+ "name": "roles/idrac_server_powerstate/tests/test.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7b7ddf54fe21917b967f0ffd6bd81e05f799ce8ae356e150dd4c317a65b22d23",
+ "chksum_sha256": "a60392a56123662801eb3957572a4c57d858026c0c0222d8e7a3b2c04a24c5f1",
"format": 1
},
{
- "name": "docs/modules/idrac_redfish_storage_controller.rst",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "cd01363e3a17bbc7a0210fd2c7402ce4cc66ec6a0d786d9a0569daf9bfa877c3",
+ "name": "roles/idrac_user",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "docs/modules/idrac_session.rst",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "b76827433fbcff22ddf95bc5632868b72cf395f419840c4e12d5373142c0f72c",
+ "name": "roles/idrac_user/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "docs/modules/idrac_server_config_profile.rst",
+ "name": "roles/idrac_user/meta/argument_specs.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3163bc9948b760abb5c2eb6e5af336417c5a1fb2296f795d1c46db4676de674c",
+ "chksum_sha256": "be066c11750b38f72617e0d31f8c107f945bb65448118a94bdb923a80babb2c0",
"format": 1
},
{
- "name": "docs/modules/idrac_syslog.rst",
+ "name": "roles/idrac_user/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7230d44ecb0bb2901ff2e42191b6a4a59e471c258f0cd930ae98065bd2f61123",
+ "chksum_sha256": "36cbc64418daec070e7d63dcecf4ec4e7be341ef8a7661b9c5487e8903a48c2c",
"format": 1
},
{
- "name": "docs/modules/idrac_system_info.rst",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "c69ff9e0600ac8b497816051f7ff2c59e434b15cd89c8456a30065923900380a",
+ "name": "roles/idrac_user/vars",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "docs/modules/idrac_timezone_ntp.rst",
+ "name": "roles/idrac_user/vars/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e02646c6841f8212f8869ac5c125e9daf54038c78ef86388946023ab1479bfe9",
+ "chksum_sha256": "71a5ee7c5ff8f63dcdb4c815fd75a8694be391c701903e8297b8e3eecf83d12d",
"format": 1
},
{
- "name": "docs/modules/idrac_user.rst",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "9282d3b21e78c2282d625e7ef011ccbf10f988855c5bf0e58408a179135360e2",
+ "name": "roles/idrac_user/molecule",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "docs/modules/idrac_user_info.rst",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "dbfd54f283705a64cc6a84da5dbfba7673d8d12e0a1a49cfa01f8dbfd376958b",
+ "name": "roles/idrac_user/molecule/default",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "docs/modules/idrac_virtual_media.rst",
+ "name": "roles/idrac_user/molecule/default/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b7a27e6acb73cf51ab56187d2d1f10810fb5c9721ec1faae4d3d003fd72244e4",
+ "chksum_sha256": "13fc1cf138058346df47e0863e038de0d6a3fe59cf9ce35d2af2d872f237444c",
"format": 1
},
{
- "name": "docs/modules/ome_active_directory.rst",
+ "name": "roles/idrac_user/molecule/default/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "550f341e2ef2ec6078ca235bde879963c7bf97185824f9b7d21e7034ff6177ee",
+ "chksum_sha256": "6b58bbd18e562ced8fbaccd5a23d479b69b4bec8659d246d62de5c135b291dcc",
"format": 1
},
{
- "name": "docs/modules/ome_alert_policies.rst",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "cc865e63694540cecbd0212211fdd061a39bef5eb1405087e5e091dba8955289",
+ "name": "roles/idrac_user/molecule/TC-152148",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "docs/modules/ome_alert_policies_actions_info.rst",
+ "name": "roles/idrac_user/molecule/TC-152148/prepare.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e4d6f52e980cb1836d1ca7c5352bf17c04acc5462fbd949bdd46b0f80b575509",
+ "chksum_sha256": "06c0f8c4fb2670cc81d9f1cfbd7d0664c88249ec23d5d0c35bc4177da0fd296c",
"format": 1
},
{
- "name": "docs/modules/ome_alert_policies_category_info.rst",
+ "name": "roles/idrac_user/molecule/TC-152148/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d4b8d6a58969d476a4699531df99e0a2137437a65f8564b722e99618d7f74fc3",
+ "chksum_sha256": "58ffd7e1cf3dfebbc59c5e0ee4062434ecf4f0c76969a060fc207ea06905e906",
"format": 1
},
{
- "name": "docs/modules/ome_alert_policies_info.rst",
+ "name": "roles/idrac_user/molecule/TC-152148/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a202372a436597ad619380b934da694ad1a1d175566ebfcfb03e177ae9a8d142",
+ "chksum_sha256": "38da55bda10bc1de8eb098d795abe2588f27159b217e6c3157c5396273916e02",
"format": 1
},
{
- "name": "docs/modules/ome_alert_policies_message_id_info.rst",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "428d10ae4bd13d877782495244086220166d69a939d99b112ac09c03bd787eaf",
+ "name": "roles/idrac_user/molecule/TC-152120",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "docs/modules/ome_application_alerts_smtp.rst",
+ "name": "roles/idrac_user/molecule/TC-152120/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a00fbbcff1e1de06909d434ad763b6f1497e9acaf617041a9f5de38f0c44a21b",
+ "chksum_sha256": "58ffd7e1cf3dfebbc59c5e0ee4062434ecf4f0c76969a060fc207ea06905e906",
"format": 1
},
{
- "name": "docs/modules/ome_application_alerts_syslog.rst",
+ "name": "roles/idrac_user/molecule/TC-152120/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "18605643bc8d76ca3804e19686f96f0450abe023a0914e587be8434812d5c2e6",
+ "chksum_sha256": "600419dfff67b4f0e13aceef503059c3573db9322dc487db74440b6489e93177",
"format": 1
},
{
- "name": "docs/modules/ome_application_certificate.rst",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "788a9fd904afaa8824f07b0668d0b5d04acb05db3ad81c47fe3f671dc2405592",
+ "name": "roles/idrac_user/molecule/TC-152150",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "docs/modules/ome_device_local_access_configuration.rst",
+ "name": "roles/idrac_user/molecule/TC-152150/prepare.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d1166a89877d55ff1b942027f2027cd5509c8270f62ba4f5f1824317789f90b8",
+ "chksum_sha256": "f5f0a568105643945bdd6b58bf10914d1a66c46130e7ee8f4ffa7b70f0c387c7",
"format": 1
},
{
- "name": "docs/modules/ome_application_network_address.rst",
+ "name": "roles/idrac_user/molecule/TC-152150/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b9b03c1fa75d1bb56a672dd5b59d0736a33580f5e6c51f4d0707f1cb27812ce6",
+ "chksum_sha256": "58ffd7e1cf3dfebbc59c5e0ee4062434ecf4f0c76969a060fc207ea06905e906",
"format": 1
},
{
- "name": "docs/modules/ome_application_network_proxy.rst",
+ "name": "roles/idrac_user/molecule/TC-152150/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b3ad1795dd6d386e0ec41d6a30af7f35d6abff67c25f84a938b22d5cb56de4d3",
+ "chksum_sha256": "6220849df8cee2230b06defb296fc7569be79bff4e9f2e53dfc4c0ff4f108708",
"format": 1
},
{
- "name": "docs/modules/ome_application_network_settings.rst",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "913d8eb24f87c9a6067713fa4de41b6b48f9961831feb38e5829370f84ce6998",
+ "name": "roles/idrac_user/molecule/TC-152146",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "docs/modules/ome_application_network_time.rst",
+ "name": "roles/idrac_user/molecule/TC-152146/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5f774a1362558799886f5b90831219e7136a0286a335d9c3f2a9924c687b9a56",
+ "chksum_sha256": "58ffd7e1cf3dfebbc59c5e0ee4062434ecf4f0c76969a060fc207ea06905e906",
"format": 1
},
{
- "name": "docs/modules/ome_application_network_webserver.rst",
+ "name": "roles/idrac_user/molecule/TC-152146/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d2fae7e588d022a7ecbc50be03fbf84eb38e41f49933bb1d5fdd1fc6a398a76a",
+ "chksum_sha256": "c4fbf2ba3ab40583ae88f9b5b63744498b858acb41a33bb75a882f0974b47b3d",
"format": 1
},
{
- "name": "docs/modules/ome_application_security_settings.rst",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "4ea4e898cab891699e9006122e6a8ba9a70d20c299da60742ff397a6916137f4",
+ "name": "roles/idrac_user/molecule/TC-152147",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "docs/modules/ome_chassis_slots.rst",
+ "name": "roles/idrac_user/molecule/TC-152147/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7c4d578297728a799abe5ffd7dba313380895513a565107438a17a55ee431866",
+ "chksum_sha256": "13fc1cf138058346df47e0863e038de0d6a3fe59cf9ce35d2af2d872f237444c",
"format": 1
},
{
- "name": "docs/modules/ome_configuration_compliance_baseline.rst",
+ "name": "roles/idrac_user/molecule/TC-152147/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7715d2c934990d7048614a5fcd3319e79bedcb9a0cbbc3632b2f56fff566dba1",
+ "chksum_sha256": "1096107ae7f0281cbc4f43477562d8c9709ceb968ee5058b5880afb035cfac59",
"format": 1
},
{
- "name": "docs/modules/ome_configuration_compliance_info.rst",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "3354f92e90c63162cd9e77c0d125a47e9c831b056ec44dfb2056a305e93a0891",
+ "name": "roles/idrac_user/molecule/resources",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "docs/modules/ome_device_group.rst",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "558d22aaf32ef3df83d9da91a3e6c0b4d3f07d2ceeedd5e3879156dcfe1a0d55",
+ "name": "roles/idrac_user/molecule/resources/idrac_user",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "docs/modules/ome_device_info.rst",
+ "name": "roles/idrac_user/molecule/resources/idrac_user/cleanup.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "053b25f1b3afd23790b7f8fdbfe965422848f7da3fd28a864e0b63e469daa386",
+ "chksum_sha256": "291fe02f4f127b148c6bc9a2325d06c7e6497f4c853152a7fc42f3c5ccf479e5",
"format": 1
},
{
- "name": "docs/modules/ome_devices.rst",
+ "name": "roles/idrac_user/molecule/resources/idrac_user/get_user_info.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "83328a82404890d15c1d5b6bdd0e8bfac5c9aca355fa3df468bdc744622fc439",
+ "chksum_sha256": "6e080f7169503086fc1e8fc4e4fa95e56a0dd39403fe183f086ad9770ded41e2",
"format": 1
},
{
- "name": "docs/modules/ome_device_location.rst",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "ef6e85e39bfc4de890bcf9168816ec761aaa2692b850f634e1286a329ccef81d",
+ "name": "roles/idrac_user/molecule/TC-152149",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "docs/modules/ome_device_mgmt_network.rst",
+ "name": "roles/idrac_user/molecule/TC-152149/prepare.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1b887414a02438629a3acc83ecd225f6573293cf81aff71215c9e7a47b7a83fb",
+ "chksum_sha256": "6018b3098204f927d9412e33103fb6cc93847ba09cd25a01df104efe45037753",
"format": 1
},
{
- "name": "docs/modules/ome_device_network_services.rst",
+ "name": "roles/idrac_user/molecule/TC-152149/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "61099f10bb3e80e56332bacf60f4f6dda3be03a0d5bae8c9945de89b2ae98ef1",
+ "chksum_sha256": "58ffd7e1cf3dfebbc59c5e0ee4062434ecf4f0c76969a060fc207ea06905e906",
"format": 1
},
{
- "name": "docs/modules/ome_device_power_settings.rst",
+ "name": "roles/idrac_user/molecule/TC-152149/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "515913e4e4658681b8cc5cf7da7fd70112d64a74acacb1ed3539a10b0e3dc34e",
+ "chksum_sha256": "20d79a5f4a9dafe606d8b6cde2accf782af12b9f1781f5df0682b93fcdf16ddf",
"format": 1
},
{
- "name": "docs/modules/ome_device_quick_deploy.rst",
+ "name": "roles/idrac_user/README.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "87e3c3bc99c687465681a9ab0001ab2fae508969f750fb1cd3fc475db7fd8491",
+ "chksum_sha256": "369f4ae5faa868c85788d6df11bf9446ae9fb5ed5d3c9f2808ed9e84cbc06c1a",
"format": 1
},
{
- "name": "docs/modules/idrac_reset.rst",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "3fd35275a4b04236ac8166c50d34316ac0568d857e2a031478f31700458bb6c0",
+ "name": "roles/idrac_user/defaults",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "docs/modules/ome_diagnostics.rst",
+ "name": "roles/idrac_user/defaults/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a6ac0f361b89eb6beb02b3288b1b11dbd263d6a2ef624cd1d564872a1df96433",
+ "chksum_sha256": "6ab3a9e6149fca242c0436f5630a97a2063f6232f42a9df4482b3977471d0be3",
"format": 1
},
{
- "name": "docs/modules/ome_discovery.rst",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "fb562eaadab71e87fedaf360f12cfb9b5676b43fdd5584fd61d53ec61c38280c",
+ "name": "roles/idrac_user/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "docs/modules/ome_domain_user_groups.rst",
+ "name": "roles/idrac_user/tasks/present.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e6f655e0ac9dfe1fafaf7ea9e9e9cdbebe2c2efa5c314852427376c3b1775f7e",
+ "chksum_sha256": "31094d92ad1e121dfb3f27d6dc0e8fdf471ee01b2168ba2cfbd66078dd0054c0",
"format": 1
},
{
- "name": "docs/modules/ome_firmware.rst",
+ "name": "roles/idrac_user/tasks/absent.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ddee2a12586396c8678b11cc1ae1e1dfe2a04c87e43f4c31325d16ddd78d4423",
+ "chksum_sha256": "6fcebf5c4162fe906c18e2babb948835808726952abe198cc14caaaee1454546",
"format": 1
},
{
- "name": "docs/modules/ome_firmware_baseline.rst",
+ "name": "roles/idrac_user/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a4d5b803ac5016911fd4bc3ad926861e93bc73eee282a40bea3dd2a55158c15d",
+ "chksum_sha256": "cdf0aed407cb791aaabd992919f0d1449b6199b9fe04fe1ccdee43c7e8a3ef67",
"format": 1
},
{
- "name": "docs/modules/ome_firmware_baseline_compliance_info.rst",
+ "name": "roles/idrac_user/tasks/get_user.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0811710e22209c9e995c81a9d263cacf9639d33c3ea3f8c2d9bb0fe40e90f102",
+ "chksum_sha256": "7250bb2b6723ad29148ec605badbcc828f6656088ceaa7f4ad02b46dc4aa25dc",
"format": 1
},
{
- "name": "docs/modules/ome_firmware_baseline_info.rst",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "735740831ade31895dbfa49a19cddf12d2d82d3836fc35bfa6e295c34a4f4170",
+ "name": "roles/idrac_user/handlers",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "docs/modules/ome_firmware_catalog.rst",
+ "name": "roles/idrac_user/handlers/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "04484a34dec1e25ef3444e0258b612f71aab29bbbee70c07145740c00ac760fe",
+ "chksum_sha256": "00a6e4df8f9fe8c7b2be5ed666c8a779836b8bdd3a57736d81729e080d52d27b",
"format": 1
},
{
- "name": "docs/modules/ome_groups.rst",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "aaaf374a259564074eced1eefc734fd65c19803f8a4557262f78aefc30adcc41",
+ "name": "roles/idrac_user/tests",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "docs/modules/ome_identity_pool.rst",
+ "name": "roles/idrac_user/tests/inventory",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6a0a40cc5aefcdd5d4683474015e5bfac0a7902b8fffd1cf79ad1f1f45f105b2",
+ "chksum_sha256": "e02233819b1a09844410549191813f7cc7ba360f21298578f4ba1727a27d87fc",
"format": 1
},
{
- "name": "docs/modules/ome_job_info.rst",
+ "name": "roles/idrac_user/tests/test.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "58c1431a6b58bf1e0c14fe1c461b71887456679aa307a47b9afa7530f024d3c8",
+ "chksum_sha256": "594b1a865eae9e2fc2a8f2f9daf0872a5c4a6b697af7167eadfb52df1d5009be",
"format": 1
},
{
- "name": "docs/modules/ome_network_port_breakout.rst",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "2dbfb63ba6660e46088091c3b606befd0a32b9ccacc1455861d973afea3c605c",
+ "name": "roles/idrac_job_queue",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "docs/modules/ome_network_vlan.rst",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "b586d89ff753eb1590f1b320739791d08b5fdb84f2cd641e91e56d9d275dded1",
+ "name": "roles/idrac_job_queue/templates",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "docs/modules/ome_network_vlan_info.rst",
+ "name": "roles/idrac_job_queue/templates/idrac_delete_job_queue_force.j2",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "051c30fcff66703db65f1affd6b5d0694b1624f201ba31fc360d75c66d175a9f",
+ "chksum_sha256": "b777b52ecc380bc79f8a903a8bd2fb8613e98ea665d41e01efa2532fc9f9ef5b",
"format": 1
},
{
- "name": "docs/modules/ome_powerstate.rst",
+ "name": "roles/idrac_job_queue/templates/idrac_delete_job_queue.j2",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b1e7bd216d6245c298ff31b9e9f09ed3a86f925da0ccd4ffefcbd4435bb3331e",
+ "chksum_sha256": "f83008a98424af87bd8817bb29a82a175e175a8459146267c86e7cbd1895c31b",
"format": 1
},
{
- "name": "docs/modules/ome_profile.rst",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "8d4b6451dc04d7e0759759d53e112f0542e380b9ece9756ee59f95f9ee38d511",
+ "name": "roles/idrac_job_queue/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "docs/modules/ome_profile_info.rst",
+ "name": "roles/idrac_job_queue/meta/argument_specs.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f0f891ac994a3dc6f86b8a1cf20bd7f7dca9e59ffbc5a34cdd2980f668e27e36",
+ "chksum_sha256": "e63d726e475ad59773fa26fe3918bd6cb5b042b508834e49c8c344fbf4945ccd",
"format": 1
},
{
- "name": "docs/modules/ome_server_interface_profile_info.rst",
+ "name": "roles/idrac_job_queue/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a81231f6eb702cad2b4dadc9e328f09207493c95a3c4ddcc26d2bfac6b1a7cdb",
+ "chksum_sha256": "081550bbea995e8776778cb69f4f3003f5dc9ef2148439bb0ab441f28fd67949",
"format": 1
},
{
- "name": "docs/modules/ome_server_interface_profiles.rst",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "965b12325e46ccebc57a9e35846325c0abfc8cacc179c09b94c0f2ecd15942df",
+ "name": "roles/idrac_job_queue/vars",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "docs/modules/ome_smart_fabric.rst",
+ "name": "roles/idrac_job_queue/vars/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d0647d3498526246714b4f6baeef18576f44a9cb422b5b4bae3febddc1025fcc",
+ "chksum_sha256": "12bd53e6ac521d02cf9d16a83ef864467aa1ca7398b0b20e15836de87c74f9cb",
"format": 1
},
{
- "name": "docs/modules/ome_smart_fabric_info.rst",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "e83b11d41e04c07eb26b4cc125a461fee36c5f88dd67224ce52f13eeab1e280e",
+ "name": "roles/idrac_job_queue/molecule",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "docs/modules/ome_smart_fabric_uplink.rst",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "9587cc3af635051d2bd261dbfbbefcb7e818fe00b24323cc4795b3dad314fe66",
+ "name": "roles/idrac_job_queue/molecule/default",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "docs/modules/ome_smart_fabric_uplink_info.rst",
+ "name": "roles/idrac_job_queue/molecule/default/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "dc8261cc3a57c431a71cafca0737970c2e56d41b56f3d686563506ed4fc9a9cf",
+ "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
"format": 1
},
{
- "name": "docs/modules/ome_template.rst",
+ "name": "roles/idrac_job_queue/molecule/default/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8755b2b7ea80ec6d275153e325ab764ba585c9056a4fe3c4868ce55c55270ab4",
+ "chksum_sha256": "e0df09c4e425f51cef7082d2a84ab880b9c9b8ab3af909589cbd5234afbb5534",
"format": 1
},
{
- "name": "docs/modules/ome_template_identity_pool.rst",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "33e0114359d664f81f2dd3740f0cfc1d9a2647a16180993eafe2a2a3138c31dc",
+ "name": "roles/idrac_job_queue/molecule/delete_job",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "docs/modules/ome_template_info.rst",
+ "name": "roles/idrac_job_queue/molecule/delete_job/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5f082648ad3106f0ff53750f2d0eb4fe802d55898d9229207883428428e4af47",
+ "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
"format": 1
},
{
- "name": "docs/modules/ome_template_network_vlan.rst",
+ "name": "roles/idrac_job_queue/molecule/delete_job/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2d3ac25a1114e76392bb80b3f515c88c5ea32681b2328d65c26e6090c9c048b4",
+ "chksum_sha256": "15a3cff421befeb73dc678bd03f4ff1ee685ccd48e25ba743b924735955d300c",
"format": 1
},
{
- "name": "docs/modules/ome_template_network_vlan_info.rst",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "7c2564dcba6d877a3f84518295f5b8819d22a4871b73773069e0490bf70c2c7c",
+ "name": "roles/idrac_job_queue/molecule/clear_job_queue",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "docs/modules/ome_user.rst",
+ "name": "roles/idrac_job_queue/molecule/clear_job_queue/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0b3bc47a7e05896ce44fa54a0bc32aa9a7caf371fccbbb592883423775ca7864",
+ "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
"format": 1
},
{
- "name": "docs/modules/ome_user_info.rst",
+ "name": "roles/idrac_job_queue/molecule/clear_job_queue/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9d771343ae8d467df5ba57726a24cc02ed665fb3635c45acf89bc15cd761009b",
+ "chksum_sha256": "f040659df832a82b82f060fe657dcefd0e06f397600604b86b65ba6964e9e338",
"format": 1
},
{
- "name": "docs/modules/redfish_event_subscription.rst",
+ "name": "roles/idrac_job_queue/README.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c5598c90fd6fc719915c94c66508e658504044cb929a7176af7812a6a581689b",
+ "chksum_sha256": "e3e961b7f75abd408cc8ab2d76fab6a3175b32ddca1f3e87c64b009ff8e53daf",
"format": 1
},
{
- "name": "docs/modules/redfish_firmware.rst",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "147b4be1cb5993f3419161982de26ae9a74a1d1e2dcccca3e6d470b002733f72",
+ "name": "roles/idrac_job_queue/defaults",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "docs/modules/redfish_firmware_rollback.rst",
+ "name": "roles/idrac_job_queue/defaults/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "948db5ad72847ed27ab771e8c879021d9e791a70e6296646fc773baf93f54ec2",
+ "chksum_sha256": "cf4f24cabcc7843106afbcf1ad295d130bc31c4b9431d8e149ef9611ec66f0e4",
"format": 1
},
{
- "name": "docs/modules/redfish_powerstate.rst",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "d4dbb35bf868492afe99c72b8efe328f4cc916b37169ceddc2f7b1fbb893b332",
+ "name": "roles/idrac_job_queue/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "docs/modules/redfish_storage_volume.rst",
+ "name": "roles/idrac_job_queue/tasks/get_idrac_firmware_version.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f08222e870ac987aa36b3b1f57b848ad595e876eee376b98bb9450f9a723ab67",
+ "chksum_sha256": "a193b7f5af28f17972f0da7e52e5fc0ecd34f4b8ae6398bd1b04769a7cffa453",
"format": 1
},
{
- "name": "docs/modules/idrac_diagnostics.rst",
+ "name": "roles/idrac_job_queue/tasks/clear_jobs_with_wsman.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "dbabe7fd2ab37c6d7b1ced5ff8d6786bacd77162473c5064ddab94b060378c6b",
+ "chksum_sha256": "fc5e2cbcef3dffe45fbae0ccdb4310621cde5b940b81f9051ec1039126e894a0",
"format": 1
},
{
- "name": "docs/modules/idrac_storage_volume.rst",
+ "name": "roles/idrac_job_queue/tasks/delete_job_with_id.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d9f99982eda9f2b90ce1ee0eb805a56bc6d97eaa6935efdefefd20584a181ef3",
+ "chksum_sha256": "5aec3e437d0a24ee7b8a41737abd2db6889a1c4a0c0543943212573d06a2fc83",
"format": 1
},
{
- "name": "docs/modules/ome_application_console_preferences.rst",
+ "name": "roles/idrac_job_queue/tasks/clear_jobs_with_api.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b64935ad594dde5a7496c90891053dbc8b32e21a5d582532bee85dcaf3fea42e",
+ "chksum_sha256": "32e48bc5f0ceedcf0077e0ab7f6931e5dc1f4c34324aef0957ef440b44f69369",
"format": 1
},
{
- "name": "docs/ATTRIBUTION.md",
+ "name": "roles/idrac_job_queue/tasks/validate_input_data.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0e55990cf8c6c469b4c35f0cf9ba6dd92d89f39d36e8d22f45a939a6edd06c20",
+ "chksum_sha256": "0c862aa3048f9061f02fff2a55ec1324b7b1c81b6175617d3e739eeda382d7ee",
"format": 1
},
{
- "name": "docs/EXECUTION_ENVIRONMENT.md",
+ "name": "roles/idrac_job_queue/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c853f1c80c075bc775ec977562ae868177567256bef9d69543f6519cfd7b852a",
+ "chksum_sha256": "edba2b2d2854747abff9b2b993b2ac119117eaa60533c517b5d7586af869d784",
"format": 1
},
{
- "name": "docs/README.md",
+ "name": "roles/idrac_job_queue/handlers",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_job_queue/handlers/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7e931cceff02aadb24f0f4afa56b4e1283505d82cbd41d1f535aabd3a58bf8c9",
+ "chksum_sha256": "2ae7c77fc2f339f8db094a30043ad551606c6bca9e5bdfbd9e34a79563f7334c",
"format": 1
},
{
- "name": "meta",
+ "name": "roles/idrac_job_queue/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "meta/execution-environment.yml",
+ "name": "roles/idrac_job_queue/tests/inventory",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "565e8d831c58bf2a1f80168b5ee92f526fa5fe48b2ec71a9387371c0d50ca9a9",
+ "chksum_sha256": "e02233819b1a09844410549191813f7cc7ba360f21298578f4ba1727a27d87fc",
"format": 1
},
{
- "name": "meta/runtime.yml",
+ "name": "roles/idrac_job_queue/tests/test.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "dac8ac908fdd6b7bffe4a091c3f443b45716b1bc591e80cae03270731ef62598",
+ "chksum_sha256": "8898537e0e2c4d23b06938b4e5181cc005b3eedad5c6f3c848c78a8a93f7ad21",
"format": 1
},
{
- "name": "playbooks",
+ "name": "roles/idrac_attributes",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/idrac",
+ "name": "roles/idrac_attributes/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/idrac/deprecated",
+ "name": "roles/idrac_attributes/meta/argument_specs.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "f061ffd4298bd768dcb142d03cbdc74c96c5b3e6db84ca86c663f04c4ed083bc",
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_attributes/meta/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "0a7e4d3e398ea14415197ac551e718bfc71cce0229df4f4f61a35981096dc47e",
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_attributes/vars",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/idrac/deprecated/dellemc_configure_idrac_eventing.yml",
+ "name": "roles/idrac_attributes/vars/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "333f32a604a8273f9048d333fb835e9717dd561b4583360503885f3ef49cfa41",
+ "chksum_sha256": "e55a77ecab73769ee48b7cee96d383945f3b74732b244fae6e9b02e0aad3713f",
"format": 1
},
{
- "name": "playbooks/idrac/deprecated/dellemc_configure_idrac_services.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "82a62b3e894b89054dcc4d01e56614212501a76ed120d0b98c310bb609ef9836",
+ "name": "roles/idrac_attributes/molecule",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/idrac/deprecated/dellemc_idrac_lc_attributes.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "965bcf06720fc6b878fed5cc6d77e64b4830027670406a0de451fb519277b842",
+ "name": "roles/idrac_attributes/molecule/default",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/idrac/deprecated/dellemc_system_lockdown_mode.yml",
+ "name": "roles/idrac_attributes/molecule/default/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d8ec2bdb41b68c6c16783cab302a56739474ba2db2a144340906d83d411d8549",
+ "chksum_sha256": "1c3ff5ada33af88f7d89035e74a24e9f7ebd0bd1ce9aea711a11e456babcedeb",
"format": 1
},
{
- "name": "playbooks/idrac/deprecated/idrac_network.yml",
+ "name": "roles/idrac_attributes/molecule/default/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "75b64d730751d3543b829f7baa60bfe506b5c36a068aad0f52db166c7c6b7f15",
+ "chksum_sha256": "90489136185f945470e64689324a856f15d9c0584887d89572ced2fda43d7888",
"format": 1
},
{
- "name": "playbooks/idrac/deprecated/idrac_timezone_ntp.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "7fbea4b5c1db843d6a0091e3b19d96062a724fa1059bbb23651692663386b13d",
+ "name": "roles/idrac_attributes/molecule/system_attr",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/idrac/idrac_attributes.yml",
+ "name": "roles/idrac_attributes/molecule/system_attr/cleanup.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "335fab9fb27b80e9bb72c6b1b312dc5b344d2f1ae9b6614255f854bc5ca6b3a8",
+ "chksum_sha256": "2eaa72c7f6eb429b320d8c6340ed73e43509fb9b6396c73001e924be3f4b7183",
"format": 1
},
{
- "name": "playbooks/idrac/idrac_bios.yml",
+ "name": "roles/idrac_attributes/molecule/system_attr/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2f4892fed0e96b2bd7532645fe590d2ae4456bc1de316b405dceb9ef2c10c8fa",
+ "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
"format": 1
},
{
- "name": "playbooks/idrac/idrac_boot.yml",
+ "name": "roles/idrac_attributes/molecule/system_attr/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a3f384d9f351aca517327d735422ff3a7d7e48fd0a8370f9d41dd1da6dd1295b",
+ "chksum_sha256": "3eef0d294a285ae534ef217c2b07156e28ce8017c57ac02dd3b7d8c71db471bf",
"format": 1
},
{
- "name": "playbooks/idrac/idrac_boot_virtual_media_workflow.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "542c406f1a42e6130c8c4a75b97a2ea5fbb8f0c0cfeea41e724972855457e486",
+ "name": "roles/idrac_attributes/molecule/idrac_attr",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/idrac/idrac_certificates.yml",
+ "name": "roles/idrac_attributes/molecule/idrac_attr/cleanup.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "304bead36bc953c79bdb064a6c3372945953d0b6d8addbdc4be45d0e59c51fcc",
+ "chksum_sha256": "2eaa72c7f6eb429b320d8c6340ed73e43509fb9b6396c73001e924be3f4b7183",
"format": 1
},
{
- "name": "playbooks/idrac/idrac_firmware.yml",
+ "name": "roles/idrac_attributes/molecule/idrac_attr/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "971813db1ea19f8f3e3b9d513f7f15ab2e8015c202323836cb43a3075d0d1b90",
+ "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
"format": 1
},
{
- "name": "playbooks/idrac/idrac_firmware_info.yml",
+ "name": "roles/idrac_attributes/molecule/idrac_attr/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "eaa3b675ac75e62eead40e8ccab34205de55128cbfdeddac97cff7c1ad7dbbec",
+ "chksum_sha256": "a7bdba9a94a875c15ec6f83690f3d06fe6c078a228c5c2fecd3c220b344f2cc1",
"format": 1
},
{
- "name": "playbooks/idrac/idrac_license.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "de01f4b83a795770f2b12c9c3d5fbadeb6f9351334083f5fc21bef71d29609b9",
+ "name": "roles/idrac_attributes/molecule/lifecycle_controller_attr",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/idrac/idrac_lifecycle_controller_job_status_info.yml",
+ "name": "roles/idrac_attributes/molecule/lifecycle_controller_attr/cleanup.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "453984f1feb8ed3332755cd58dd3b3d13eacf365db73f0a4231e929c28913ec1",
+ "chksum_sha256": "2eaa72c7f6eb429b320d8c6340ed73e43509fb9b6396c73001e924be3f4b7183",
"format": 1
},
{
- "name": "playbooks/idrac/idrac_lifecycle_controller_jobs.yml",
+ "name": "roles/idrac_attributes/molecule/lifecycle_controller_attr/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2a3912a6f0afe6778d57a2eed40df6183a3ed2838400a16cfdc2eaa4755ea14d",
+ "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
"format": 1
},
{
- "name": "playbooks/idrac/idrac_lifecycle_controller_logs.yml",
+ "name": "roles/idrac_attributes/molecule/lifecycle_controller_attr/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "65432bf367476a0dddc30fbfa2ad9bc77a5423daf5dad6518447fc1bea124cd2",
+ "chksum_sha256": "cf6b840896de4b786ee1a8c174e9c6330883bb9a29f5fad3116c88652328c937",
"format": 1
},
{
- "name": "playbooks/idrac/idrac_lifecycle_controller_status_info.yml",
+ "name": "roles/idrac_attributes/README.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "faff447c9a135cfba87c04161408d2f9491d42fe85da06223c821a30cd3c611f",
+ "chksum_sha256": "245dd1c52d38db33b91d948c3a4403bb0739cf172ffd4e63dbe444477c395902",
"format": 1
},
{
- "name": "playbooks/idrac/idrac_network_attributes.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "b7dd0ba1946e68eacb3df926df9a2f7d8fdc9f922ea4044b304de3945b0af77d",
+ "name": "roles/idrac_attributes/defaults",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/idrac/idrac_os_deployment.yml",
+ "name": "roles/idrac_attributes/defaults/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "42fc0b056a6282757e5b0be577e896fc6b62ab4ce0ce4c673eec2f99012d12d6",
+ "chksum_sha256": "ccd23c69bb78d5c4da4314139c4f34ec24adb35d0bdfa16c096e2b5c20f9a8c5",
"format": 1
},
{
- "name": "playbooks/idrac/idrac_redfish_storage_controller.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "0f12fa63252f024245e3644b89527a023e718107110a8580cc332c8b57c19b22",
+ "name": "roles/idrac_attributes/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/idrac/idrac_redfish_storage_controller_job_tracking.yml",
+ "name": "roles/idrac_attributes/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a417f1624b45912c41ce6f116140e65bdd241cb60473269ee25efc428d5c2cc9",
+ "chksum_sha256": "0c2023b600b957bbafd3a31e5ec79d21662dffa1fc218927b65078b2b3de54a3",
"format": 1
},
{
- "name": "playbooks/idrac/idrac_reset_result_tracking.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "1065ba2ac114119031a719e466be0b1cf3d70a05590d7a021855502dc9877f08",
+ "name": "roles/idrac_attributes/handlers",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/idrac/idrac_server_config_profile.yml",
+ "name": "roles/idrac_attributes/handlers/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "95fbf44d10f20da1cb0a866ec7578e2546e215e535c094b0b0270317eefa8bcb",
+ "chksum_sha256": "4efd533c8f0a3c63e2ad84713dbb875c27953ade61183e3b98526db8816bbf52",
"format": 1
},
{
- "name": "playbooks/idrac/idrac_syslog.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "be1eeae26000a909eb88c2f7ae2f45d5c492a0716cca5d56ce8f9e709aefaac9",
+ "name": "roles/idrac_attributes/tests",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/idrac/idrac_system_info.yml",
+ "name": "roles/idrac_attributes/tests/inventory",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e385033356b74133e34ecdd3aa63f6ad1d134cc5857ad48bd847f4fc6766c350",
+ "chksum_sha256": "e02233819b1a09844410549191813f7cc7ba360f21298578f4ba1727a27d87fc",
"format": 1
},
{
- "name": "playbooks/idrac/idrac_user.yml",
+ "name": "roles/idrac_attributes/tests/test.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d3aedae3410210ff0893afd6e160929f8e7581a5337928f5ee00f0b89a81a9f9",
+ "chksum_sha256": "50d768c3b61c1deb820db1bda56cf4a916b4232f9ed448da92035eca8e654b93",
"format": 1
},
{
- "name": "playbooks/idrac/idrac_user_info.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "5068eb579dea93aca8ef6edab4e4254bc341c00d2ef373cfdffb50a34a64c755",
+ "name": "roles/redfish_firmware",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/idrac/idrac_virtual_media.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "466e5e23ac6ac17de823878df10bc89f90762170e96ca4682626b4ef95d028bc",
+ "name": "roles/redfish_firmware/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/idrac/idrac_diagnostics.yml",
+ "name": "roles/redfish_firmware/meta/argument_specs.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f14fa78ada0d1c85ecd18419f0ec1940cbd387af3fd35e78da2e676228ed897e",
+ "chksum_sha256": "f8553c37be4eecc702220849c497ff5cb9dc0712567fc85d3cbed123c450f89f",
"format": 1
},
{
- "name": "playbooks/idrac/idrac_storage_volume.yml",
+ "name": "roles/redfish_firmware/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "739c488df021adb4d9cf2f8843dd4314bdad00f4076e22c2e0580d2d8b08ba7d",
+ "chksum_sha256": "669ae8e69c647cf300677acbc0990147d93ded846aa44fd40c45664768c9d6c6",
"format": 1
},
{
- "name": "playbooks/idrac/idrac_reset.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "de8fe2c4ad4c761b39cd3f2f26a33de198d13764460cd4ae48ddc41e3055c129",
+ "name": "roles/redfish_firmware/vars",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/idrac/idrac_session.yml",
+ "name": "roles/redfish_firmware/vars/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b0440a47587c5d5ddb9912b53731727e93d0b889c12241594343f919d22f5bda",
+ "chksum_sha256": "95897ec515cd6dc828104d2b67e61b8842802aec9e73eac05462c52cd111f653",
"format": 1
},
{
- "name": "playbooks/ome",
+ "name": "roles/redfish_firmware/molecule",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/ome/alert_policy",
+ "name": "roles/redfish_firmware/molecule/default",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/ome/alert_policy/ome_alert_policies.yml",
+ "name": "roles/redfish_firmware/molecule/default/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d0d6be7aa3a27a5adaa464911c691ec919d592384a57a651a23fd377f28df60d",
+ "chksum_sha256": "f2a39596d4b1e4313eca1a4958921f7d8c57b84115d3f81080704d742edf30b1",
"format": 1
},
{
- "name": "playbooks/ome/alert_policy/ome_alert_policies_actions_info.yml",
+ "name": "roles/redfish_firmware/molecule/default/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d26bc85e086d2fe2560c3b15a0cef97d58380ffa4a670cd86d1521746250d55a",
+ "chksum_sha256": "12286deeca54ad309b8ad6898fbe72fecbcfd6c35885753b2e8a63de6d2666c1",
"format": 1
},
{
- "name": "playbooks/ome/alert_policy/ome_alert_policies_category_info.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "cdfb33544a84e3accabf9b330d92733266c8747b56a4a89d0fdfc14d531d9f75",
+ "name": "roles/redfish_firmware/molecule/negative",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/ome/alert_policy/ome_alert_policies_info.yml",
+ "name": "roles/redfish_firmware/molecule/negative/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4ed4df04af14ae90aec17feffb5fb5ce93617856f43b270072e0501328580397",
+ "chksum_sha256": "f2a39596d4b1e4313eca1a4958921f7d8c57b84115d3f81080704d742edf30b1",
"format": 1
},
{
- "name": "playbooks/ome/alert_policy/ome_alert_policies_message_id_info.yml",
+ "name": "roles/redfish_firmware/molecule/negative/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b98604773e4979bb9807e2652fa6217e3ce2c7414597998a84be4592c9d078c4",
+ "chksum_sha256": "ca7ab87d800e87cded135f5d6b008e71d7784794dca7aed1c67e87db1cb3d1ad",
"format": 1
},
{
- "name": "playbooks/ome/application",
+ "name": "roles/redfish_firmware/molecule/resources",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/ome/application/ome_application_alerts_smtp.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "d2eb95a8fcea5239a320840fc4b4ff2321d071a06638ae5d5860651e1b3a4587",
- "format": 1
- },
- {
- "name": "playbooks/ome/application/ome_application_alerts_syslog.yml",
+ "name": "roles/redfish_firmware/molecule/resources/cleanup.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "56af4d077ccfe2b4bdb46de7587fbfb03609ff7484108df4840b69d4118d0c2c",
+ "chksum_sha256": "e0599c04f5f5203a69b80ed580e6a03f0586e029a482936479beef09f03ef165",
"format": 1
},
{
- "name": "playbooks/ome/application/ome_application_certificate.yml",
+ "name": "roles/redfish_firmware/molecule/resources/prepare.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "451395c662a84f8d3a030be78a6c1cb7bc32dd5223c2f0c21c4ee6a6433e7d56",
+ "chksum_sha256": "70be84b8715d7ea5e18f87fbfd34763293a17ebfa3210f7bd4aa767e0b3ab8fc",
"format": 1
},
{
- "name": "playbooks/ome/application/ome_application_console_preferences.yml",
+ "name": "roles/redfish_firmware/README.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d69507043f5d3ab6e177a969f319b3dcbf8edfb5678e58e348653c8f6b8556b2",
+ "chksum_sha256": "e5c84a2ed15c9a447689ca3a576997db0b740caf3318d6c53f5f4bd94ee8ad26",
"format": 1
},
{
- "name": "playbooks/ome/application/ome_application_network_address.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "2941e928032f4e93b02e7f56db09bb784702d1782e1e3c87c094a8b2e68bceef",
+ "name": "roles/redfish_firmware/defaults",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/ome/application/ome_application_network_address_with_job_tracking.yml",
+ "name": "roles/redfish_firmware/defaults/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3375ed92eb8f5143b01108debff662d6f83c98483fd340328315618aa4e801fd",
+ "chksum_sha256": "db1cab86f255f2f7829eb72d7f45c792a3c1560d5c23fc42076c70893f4dd784",
"format": 1
},
{
- "name": "playbooks/ome/application/ome_application_network_proxy.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "afbcdc3fd81f79fec948b8898ed212e48d494d521af66e2c3cfa91538ebfeaa7",
+ "name": "roles/redfish_firmware/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/ome/application/ome_application_network_settings.yml",
+ "name": "roles/redfish_firmware/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d61531249118fb36496a87f9bfc892ae34483d6997ff5cb09155f0269a7ae329",
+ "chksum_sha256": "5c256607b253027b52ff6b9ffa117ac4ee5adf9f6fd3a0adfe23251aedcade66",
"format": 1
},
{
- "name": "playbooks/ome/application/ome_application_network_time.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "5d54fa4f854c3871c631114f2bd1a70e33a6f6b87a593529ac4b7bc2a620bb48",
+ "name": "roles/redfish_firmware/handlers",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/ome/application/ome_application_network_time_zone_info.yml",
+ "name": "roles/redfish_firmware/handlers/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b66d01170f512a566b92b06b43aae3faa1755f0e453d3ef0110521b68e995889",
+ "chksum_sha256": "c039d1e5934a6dac58d3fa5dae336ebd5cf878c9649b6acbba4206f6ee0e3e0d",
"format": 1
},
{
- "name": "playbooks/ome/application/ome_application_network_webserver.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "2e2e19608508da9365570746e726a36e5a5403e16f0563c2de3e6d775b411216",
+ "name": "roles/redfish_firmware/tests",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/ome/application/ome_application_network_webserver_port_changed_tracking.yml",
+ "name": "roles/redfish_firmware/tests/inventory",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d060d4922337f1b9357e6196bc5dc2dae238f126c76481d65d46e00b3b1d3d79",
+ "chksum_sha256": "e02233819b1a09844410549191813f7cc7ba360f21298578f4ba1727a27d87fc",
"format": 1
},
{
- "name": "playbooks/ome/application/ome_application_security_settings.yml",
+ "name": "roles/redfish_firmware/tests/test.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e423185bb24ce6d7641db2cec93e76029728c4631a0b84c307da0a375a247526",
+ "chksum_sha256": "ec798a2b3ebd1096273bc1776f157f23a7b4a7e33d45669856dd185fb9f6b064",
"format": 1
},
{
- "name": "playbooks/ome/compliance",
+ "name": "roles/idrac_certificate",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/ome/compliance/ome_configuration_compliance_baseline.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "fc3a2176d0d655f312c52f97bfbbbefe8fc5dd45e88b32301aaa56388a52076d",
+ "name": "roles/idrac_certificate/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/ome/compliance/ome_configuration_compliance_info.yml",
+ "name": "roles/idrac_certificate/meta/argument_specs.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e248d9baf3d9a443dd968b7dea92c70aba87d542b52a34d692daf7c3f595587e",
+ "chksum_sha256": "0bcfa9cc16612b7726b60e9091be9e85bff3ea73991cbde070e977c434eeb827",
"format": 1
},
{
- "name": "playbooks/ome/compliance/ome_configuration_compliance_baseline_workflow.yml",
+ "name": "roles/idrac_certificate/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bc42c37b1f2c9492b5b297c8245fc6836f4ae89dcdfebeaf958bce0847df6f73",
+ "chksum_sha256": "b2a5d6be0d2bb806d5549884a7ac88cd841662629e2b15a9a7ab67177250f7bf",
"format": 1
},
{
- "name": "playbooks/ome/firmware",
+ "name": "roles/idrac_certificate/vars",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/ome/firmware/baseline",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "roles/idrac_certificate/vars/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "a35cfc80bcacd3b412a6505f8c5e599a8eee9487788a4dd7c8b69a6882b07c59",
"format": 1
},
{
- "name": "playbooks/ome/firmware/baseline/component_reports_filtering",
+ "name": "roles/idrac_certificate/molecule",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/ome/firmware/baseline/component_reports_filtering/component_complaince_report_with_baseline.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "dbc8e4f4a6f5581fc0479f3fccef4984fb26ef93650a41fa0ff2efc04e741a09",
+ "name": "roles/idrac_certificate/molecule/CSC",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/ome/firmware/baseline/component_reports_filtering/component_complaince_report_with_devices.yml",
+ "name": "roles/idrac_certificate/molecule/CSC/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3d2d40aaca55f42d88a21c9556b08ddfb60ee692a2ff95c2674e33ecaed3a291",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "playbooks/ome/firmware/baseline/ome_firmware_baseline.yml",
+ "name": "roles/idrac_certificate/molecule/CSC/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7b80747cd3904de0e8d86046cc233e411bc4083f44e2c6f6f8a875ade32dd4c6",
+ "chksum_sha256": "99431629f17cbe04456ef0f839377cb71a7ae2b7c541deba9a4b769bba4c06f4",
"format": 1
},
{
- "name": "playbooks/ome/firmware/baseline/ome_firmware_baseline_compliance_info.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "1fec86960e36907b5574822b9a60531ad5812bc465a5473cc57ccd3054f076de",
+ "name": "roles/idrac_certificate/molecule/generateCSR",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/ome/firmware/baseline/ome_firmware_baseline_compliance_info_filters.yml",
+ "name": "roles/idrac_certificate/molecule/generateCSR/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1ee87a539547297b73866e1d075f7c81fc3ad2147b443c6fbf3b2454367d6b47",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "playbooks/ome/firmware/baseline/ome_firmware_baseline_info.yml",
+ "name": "roles/idrac_certificate/molecule/generateCSR/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3b297907849be26643ed73ca919c987c17057759b307c99eaa2176c8502d8294",
+ "chksum_sha256": "57033a2a72d8b711a5c6c2e46131a5b1b1ce8068b43a81f0a51dd44d022bfe36",
"format": 1
},
{
- "name": "playbooks/ome/firmware/catalog",
+ "name": "roles/idrac_certificate/molecule/default",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/ome/firmware/catalog/ome_firmware_catalog.yml",
+ "name": "roles/idrac_certificate/molecule/default/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d6919243b6c8339e13e6fba8d3b97d0205734da51d8b7858bbb450ebe017af45",
+ "chksum_sha256": "5920dd34fb2a64497c3b854bc9df7118c521bbfec7955a27494f53dfba5334c6",
"format": 1
},
{
- "name": "playbooks/ome/firmware/ome_firmware.yml",
+ "name": "roles/idrac_certificate/molecule/default/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7d25058ce25ee9d8ebc654d51279c54cd41fba51abe357c0221d3f41606180a0",
+ "chksum_sha256": "ccae2acde670d513bc24ecadedee7051c324df0de0fcb98668f78d697dc5231d",
"format": 1
},
{
- "name": "playbooks/ome/firmware/ome_firmware_with_job_tracking.yml",
+ "name": "roles/idrac_certificate/molecule/__delete_directory.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0a466f3d8237f5442bd13e312ca31d2a6482d63f0f7ca22f38c826422a1ddbca",
+ "chksum_sha256": "5acaf39dad31445f79924aadfd59f40857cce066a2bd4ef3dc1c3fd9ed2de0c7",
"format": 1
},
{
- "name": "playbooks/ome/inventory",
+ "name": "roles/idrac_certificate/molecule/__extract_firmware_version.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d2c1e0fbf65e8b1c4ecd02182ada8dbbd320fd9828f0e50eea57450892e9c5c5",
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_certificate/molecule/CustomCertificate",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/ome/inventory/ome_inventory_all_groups.yml",
+ "name": "roles/idrac_certificate/molecule/CustomCertificate/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ea301f8125ffc2fb7889cc791e0558f8f7e85b3516e3d6d6fcdd4b8d83557988",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "playbooks/ome/inventory/ome_inventory_by_group_with_host_vars.yml",
+ "name": "roles/idrac_certificate/molecule/CustomCertificate/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6864ace739760c7c3d8bbf3efe1f34c77fa57b7562817febf7f8d0926b7a2f4c",
+ "chksum_sha256": "65da72677ef3b2e1c383087b86fda3d45434287ce1cf2ddb4968b0a2ff0bf7c7",
"format": 1
},
{
- "name": "playbooks/ome/inventory/ome_inventory_by_groups.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "72904dae71c2171978e02bef29d887e94c4f588177cffdac576c67df93ac3b97",
+ "name": "roles/idrac_certificate/molecule/reset",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/ome/inventory/ome_inventory_groups_with_host_and_group_vars.yml",
+ "name": "roles/idrac_certificate/molecule/reset/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a36a617cbe7d9292a04383d2bf0a5716fc645f69c1c7959eca82ceda5d541ab9",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "playbooks/ome/ome_active_directory.yml",
+ "name": "roles/idrac_certificate/molecule/reset/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "dd4d2026942aca6b2fad68425dbb9c6795ba3017fbffe2dd1fd14e9c72ee0d81",
+ "chksum_sha256": "5b646c45281197bec65800696292b5e6d62ba1aa36be2149c7de10b439ddba55",
"format": 1
},
{
- "name": "playbooks/ome/ome_chassis_slots.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "fb87fe3c86cd4506226de40728064dcba9b759220b566342da9c2f0b2e40331e",
+ "name": "roles/idrac_certificate/molecule/CA",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/ome/ome_device_group.yml",
+ "name": "roles/idrac_certificate/molecule/CA/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5669a05a743b069ef53e1dadcf80954feb963fe8fd56932676fb92ddda9ca35e",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "playbooks/ome/ome_device_info.yml",
+ "name": "roles/idrac_certificate/molecule/CA/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ef61ae932b9e642e8db5552525670b6a9645a52fb39de9add50551fcd721190a",
+ "chksum_sha256": "5bf154879b4a34b326240ccb33a490a5f8bc7f228248db93c02baaaa0869d09e",
"format": 1
},
{
- "name": "playbooks/ome/ome_device_local_access_configuration.yml",
+ "name": "roles/idrac_certificate/molecule/__get_helper.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cb3e4d71a9422a25d8bbe4cf2ca376df39d5a5f922de2cfb9facc8f5721c5a88",
+ "chksum_sha256": "d510c89d7a712cd198e199f8b885057154f906a27dc18f90734362ad192b783a",
"format": 1
},
{
- "name": "playbooks/ome/ome_device_location.yml",
+ "name": "roles/idrac_certificate/molecule/__get_ssl_key.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "497ed8e7f6b757cfbc60ccfcecaec04e742b47a6ed80861b58339f79f2f85adf",
+ "chksum_sha256": "7367f06df38c4c2fe8b9a280dfda2ac001eae5e3c8d7c62daec8856e9dd8d55e",
"format": 1
},
{
- "name": "playbooks/ome/ome_device_mgmt_network.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "89748790b471a573b3921ff6e0fff48422b97ec16fb2a1aeae568ccb3fcb013e",
+ "name": "roles/idrac_certificate/molecule/HTTPS",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/ome/ome_device_network_services.yml",
+ "name": "roles/idrac_certificate/molecule/HTTPS/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2fbe1308921f98e05468b4e40687fb1954e365d4c140d73bce5ae550874d95cd",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "playbooks/ome/ome_device_power_settings.yml",
+ "name": "roles/idrac_certificate/molecule/HTTPS/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "18029724ae0804c84143000e04c892cb042fdca99d8cb70eebe301bb964eb9ca",
+ "chksum_sha256": "f06ff94029cdaceaf9865f0299fc6013b0fea5193ddbd07d078f543eb146d27f",
"format": 1
},
{
- "name": "playbooks/ome/ome_device_quick_deploy.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "acb2a7a5c7e73052bced88fbe4ba9dcffab7505be19212d9a11bd7aed261a5e8",
+ "name": "roles/idrac_certificate/molecule/CTC",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/ome/ome_devices.yml",
+ "name": "roles/idrac_certificate/molecule/CTC/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c1331fd531251cd1d82f77bbff149fccae42ed7ad035cb01f3b61afbd76cb508",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "playbooks/ome/ome_diagnostics.yml",
+ "name": "roles/idrac_certificate/molecule/CTC/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b67e463419986332c846988c6c8ba7a877e12fb42b4e15ac9354f1fffac788eb",
+ "chksum_sha256": "ecd35a2c8d4a1223c5a37a4e80a2f9a950f633357f2eb2655f6f20ca30198c5c",
"format": 1
},
{
- "name": "playbooks/ome/ome_discovery.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "d38b03a955441fecd26fcf8f01f75e65d7d459927ac58040d041299009fab7aa",
+ "name": "roles/idrac_certificate/molecule/SSLKEY",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/ome/ome_domain_user_groups.yml",
+ "name": "roles/idrac_certificate/molecule/SSLKEY/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1ec28a9e158a70907e3da5f2a56894028e75daad9e3393666ceb54d5baa96b37",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "playbooks/ome/ome_group_device_action.yml",
+ "name": "roles/idrac_certificate/molecule/SSLKEY/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e28a69eee5cc5383903d9c96e9c6b12a76cf76a8a92d7c9dae9457456bb774a4",
+ "chksum_sha256": "0f8fc730b66884b45530be3fdbdbed659d79387466637b2fb129573fbc74cbee",
"format": 1
},
{
- "name": "playbooks/ome/ome_groups.yml",
+ "name": "roles/idrac_certificate/README.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c8af511d1623d2d4cf697b1e3480fede2228f9f0759bc73af8c88c72d20b6cd8",
+ "chksum_sha256": "8c9a1ba53346f348b656ce43ae836d01e341d173aa793536b7b83d39a478664b",
"format": 1
},
{
- "name": "playbooks/ome/ome_identity_pool.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "371d65813bfc3a77962caccfc53aa30ab4767402de617f35d2db5734803b29e2",
+ "name": "roles/idrac_certificate/defaults",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/ome/ome_job_info.yml",
+ "name": "roles/idrac_certificate/defaults/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7a159c605c87d21e18f581b789ec99a5ec0431f22d4cd223e20483708175b814",
+ "chksum_sha256": "6cf00eee21d159b121fb581e5c186c22803b7605acdd260c54e5d58969a94520",
"format": 1
},
{
- "name": "playbooks/ome/ome_network_port_breakout.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "39bd40122da135d913272454af6d830ba40307d59e66e487aa709a9d1272afec",
+ "name": "roles/idrac_certificate/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/ome/ome_network_port_breakout_job_traking.yml",
+ "name": "roles/idrac_certificate/tasks/generate_csr.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "111f9977f82b1ca2241b5307535fe6833e70c115871c3e856e15c98f2f507eaa",
+ "chksum_sha256": "aef02f22f523b789c3a397b9f9b7c39c166e754b978f4920177bcbc53067e211",
"format": 1
},
{
- "name": "playbooks/ome/ome_network_vlan.yml",
+ "name": "roles/idrac_certificate/tasks/import.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5fc6914c37f5a6b280047c45bc51dde5156fc7f17fcf6ea7b31b4fb52902e867",
+ "chksum_sha256": "1a6a2edb98266da8d17726347dc0b3a0c5c270dd44bc0dadeca28a493163b6cc",
"format": 1
},
{
- "name": "playbooks/ome/ome_network_vlan_info.yml",
+ "name": "roles/idrac_certificate/tasks/export.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "de7ce01fbf0f07f87edbbd56adbc85904b327ab25c3568904a85df7c1635c77c",
+ "chksum_sha256": "80cffca71ccf99a8cbbdcc7e4cad320663e0064ac47faf66b1a62ab2a751d177",
"format": 1
},
{
- "name": "playbooks/ome/ome_server_interface_profile_info.yml",
+ "name": "roles/idrac_certificate/tasks/reset.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "af58749f1aa4451723ccb7fde4f31313eea771ce6818b56df7b4d3450b432509",
+ "chksum_sha256": "82b70c0d2bb2c126a73c04c392513fbfa84f4626a6d5f2c855e45bb1bd5e0df7",
"format": 1
},
{
- "name": "playbooks/ome/ome_server_interface_profile_workflow.yml",
+ "name": "roles/idrac_certificate/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "88b5f582aba465eac8f1ab17c7e40865721921bb7864af0d07f83511879928af",
+ "chksum_sha256": "8500b36628049770984d5c5737419aa9e2e209cda3eae776b981ab8bb70885b5",
"format": 1
},
{
- "name": "playbooks/ome/ome_server_interface_profiles.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "d0e8826dc3911a89aa5733c42495dd1fa40690d391c65c5c4c48bea10716a56f",
+ "name": "roles/idrac_certificate/handlers",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/ome/ome_smart_fabric.yml",
+ "name": "roles/idrac_certificate/handlers/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "03c70e63d7713fe24c11cfb6400669fbe422459b3695ccb6214f5ae5656e4f30",
+ "chksum_sha256": "66d821f9940918fd71847d8db4b05eb937a6de3f457ad7b9b6412762d51d1f41",
"format": 1
},
{
- "name": "playbooks/ome/ome_smart_fabric_info.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "ecaca37de3625197151d6684472b54c9b3bc712bc9c47d055beb8a1ecdc401e0",
+ "name": "roles/idrac_certificate/tests",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/ome/ome_smart_fabric_uplink.yml",
+ "name": "roles/idrac_certificate/tests/inventory",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e9980c7e8d795f73469a974aa6a19c09f998081fe4c40163f9525b5db5596216",
+ "chksum_sha256": "d906aecb61d076a967d9ffe8821c7b04b063f72df9d9e35b33ef36b1c0d98f16",
"format": 1
},
{
- "name": "playbooks/ome/ome_smart_fabric_uplink_info.yml",
+ "name": "roles/idrac_certificate/tests/test.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9fda088244750b408b164dcf07d55bbbdf3168b0eb27627c4f938afd0a8658fb",
+ "chksum_sha256": "05252420e22b201c04db4b4e79f650a9bbb561aea0caec3d3e253d825637f11d",
"format": 1
},
{
- "name": "playbooks/ome/ome_template_identity_pool.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "97ec21aeab7e209f22107f692b67ed54bc80e0d0205561d2a2b73d1f494e0fba",
+ "name": "roles/idrac_gather_facts",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/ome/powerstate",
+ "name": "roles/idrac_gather_facts/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/ome/powerstate/ome_powerstate.yml",
+ "name": "roles/idrac_gather_facts/meta/argument_specs.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ecddfa8e0d0c89d732131128cdb5b5eb2796f2bafc908751866dac67d8896c66",
+ "chksum_sha256": "75d820d28ed29f3ab34caf04022a9d5b3023f9179806d13cb88deb25bbf1d58c",
"format": 1
},
{
- "name": "playbooks/ome/powerstate/ome_powerstate_with_job_tracking.yml",
+ "name": "roles/idrac_gather_facts/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5e7ef89910b84426e2251e5e8900139b87e539f5edc8c2866664dc41be18a3ec",
+ "chksum_sha256": "3bf6b5dc34c2b73a1e1db79a12c608502254e34ec556ba2c261fe607fbfc6ad1",
"format": 1
},
{
- "name": "playbooks/ome/profile",
+ "name": "roles/idrac_gather_facts/vars",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/ome/profile/ome_profile.yml",
+ "name": "roles/idrac_gather_facts/vars/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6ceb1d703cab609a550730151e605a3e85f690e9459efa1c467fbd92e223cb30",
+ "chksum_sha256": "330b6e0ccfb1e2805e8fb61b43ecd5170d98d1fdffaadd905de5ffb2c4339d09",
"format": 1
},
{
- "name": "playbooks/ome/profile/ome_profile_assign_job_tracking.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "09dd5305a95e0c59fd985365a2f96c6f929603f973794bded1405b9316d9c6db",
+ "name": "roles/idrac_gather_facts/molecule",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/ome/profile/ome_profile_info.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "ee5cb574348141c0f41436b8c267660f94a0df9bcf76b67fb7bc555b1b45b687",
+ "name": "roles/idrac_gather_facts/molecule/passensor",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/ome/profile/ome_profile_migrate_job_tracking.yml",
+ "name": "roles/idrac_gather_facts/molecule/passensor/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "832510e01a20607c99b212a65d194fd6c87b0b6d135f395215c50bd4ef2e50d7",
+ "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
"format": 1
},
{
- "name": "playbooks/ome/profile/ome_profile_unassign_job_tracking.yml",
+ "name": "roles/idrac_gather_facts/molecule/passensor/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "91cdfc259bfb71732f8ecc5b64fc69ae06d5053f886f9136aaf78b43d5c9539a",
+ "chksum_sha256": "a2e6fd5b9b1651a697658ee4571079476cf67112ff63dbd6663a086bb096ff53",
"format": 1
},
{
- "name": "playbooks/ome/template",
+ "name": "roles/idrac_gather_facts/molecule/hostnic",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/ome/template/ome_template.yml",
+ "name": "roles/idrac_gather_facts/molecule/hostnic/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3c90c9b41ff444b1bf174211b0ce260bd7bdd80c7a1a012c51314d03ade5400f",
+ "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
"format": 1
},
{
- "name": "playbooks/ome/template/ome_template_create_modify_lcd_display.yml",
+ "name": "roles/idrac_gather_facts/molecule/hostnic/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5abdf1b033e3f22ba9c5d532fccb5e960bd9f04c6fffa4fbae799207e754a877",
+ "chksum_sha256": "e75317b8512ae920138e3c7241d6b4582626d6e236b36128733b92f856543c53",
"format": 1
},
{
- "name": "playbooks/ome/template/ome_template_info.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "7606d5cfee0c932ccc67a090a8a58df3fb85d370de5a727df937d84b871f93e3",
+ "name": "roles/idrac_gather_facts/molecule/sensorsvoltage",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/ome/template/ome_template_info_with_filter.yml",
+ "name": "roles/idrac_gather_facts/molecule/sensorsvoltage/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9b38315b410882eb04f6a19bb0676ae5ddb146831b571572f5d15ff70d430fe2",
+ "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
"format": 1
},
{
- "name": "playbooks/ome/template/ome_template_lcd_display_string_deploy.yml",
+ "name": "roles/idrac_gather_facts/molecule/sensorsvoltage/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4c802f28d538ce55438af15f296e755c1a5f08872f5d55b582e0eb776446975f",
+ "chksum_sha256": "1dffcb2b8b2a5fb2b557d0961eaee2b1cbbd94f1f2aa26d998ad06fe6f314f0b",
"format": 1
},
{
- "name": "playbooks/ome/template/ome_template_network_vlan.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "d72b4eadc6cdf60f554394e325a86d8ffee38bb3e38ae3f38b0945e25864770d",
+ "name": "roles/idrac_gather_facts/molecule/enclosure",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/ome/template/ome_template_network_vlan_info.yml",
+ "name": "roles/idrac_gather_facts/molecule/enclosure/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bf7ebd240aced312384fd6700dcee5f4572c189b18878dd3f2ad1a600a41241c",
+ "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
"format": 1
},
{
- "name": "playbooks/ome/template/ome_template_with_job_tracking.yml",
+ "name": "roles/idrac_gather_facts/molecule/enclosure/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "28cb65035fb452f9d1b688b11f2a9152f010e6502cdf980fdb1fa674da1c613a",
+ "chksum_sha256": "cf1906e1ced1cb774e276f108b46c8fcf7c47db2fd45f9abc75f98b15d5d6661",
"format": 1
},
{
- "name": "playbooks/ome/user",
+ "name": "roles/idrac_gather_facts/molecule/bios",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/ome/user/ome_user.yml",
+ "name": "roles/idrac_gather_facts/molecule/bios/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cab96264060d057d6bd286f21e5ba810a6c9747014fb5c6ef4fa12118adb88e1",
+ "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
"format": 1
},
{
- "name": "playbooks/ome/user/ome_user_info.yml",
+ "name": "roles/idrac_gather_facts/molecule/bios/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ed38db95276274d797ccc43cec2d8b50d53be9bd1e5e092c9ec6f030b7f5c6fc",
+ "chksum_sha256": "6aeb01343669a588a39fb5c079282e515ea89873d6e7bcc036e053f58092ae62",
"format": 1
},
{
- "name": "playbooks/redfish",
+ "name": "roles/idrac_gather_facts/molecule/license",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/redfish/firmware",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "roles/idrac_gather_facts/molecule/license/molecule.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
"format": 1
},
{
- "name": "playbooks/redfish/firmware/redfish_firmware.yml",
+ "name": "roles/idrac_gather_facts/molecule/license/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e9831056b86afc4319cb850d6ed4a723e08ee7a5e2406e178d060bcc0c669796",
+ "chksum_sha256": "555ad87a256a73804d979ffca0fb14349aa5ce521c463fc7daa32d2a6d394a4d",
"format": 1
},
{
- "name": "playbooks/redfish/firmware/redfish_firmware_from_http_jobtracking.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "7e1785959ab3ca10bc07a64de60a1fa8faad059df4b1b7a68a493be98b18d0f0",
+ "name": "roles/idrac_gather_facts/molecule/enclosureemm",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/redfish/firmware/redfish_firmware_from_local_jobtracking.yml",
+ "name": "roles/idrac_gather_facts/molecule/enclosureemm/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "da71c6817f0338f16b2efceec75919579ab4783695d256eb50050e50dd732ace",
+ "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
"format": 1
},
{
- "name": "playbooks/redfish/firmware/redfish_firmware_rollback.yml",
+ "name": "roles/idrac_gather_facts/molecule/enclosureemm/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "67bedde1fa194d706233c12d6aeae0504214b2598a4f848373e31eddcdc571c0",
+ "chksum_sha256": "0283a9aae3c3d8ba622a804822089373823a9f1494266ed068d65766055922d1",
"format": 1
},
{
- "name": "playbooks/redfish/redfish_event_subscription.yml",
+ "name": "roles/idrac_gather_facts/molecule/physicaldisk",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_gather_facts/molecule/physicaldisk/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f901dc08010a5c76bf0fbc8c8f1ba60c1eff5d075878469b3fe1197ad2dc4589",
+ "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
"format": 1
},
{
- "name": "playbooks/redfish/redfish_powerstate.yml",
+ "name": "roles/idrac_gather_facts/molecule/physicaldisk/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "92461b5efd3384e8da363a9a091a18eb762680e516407b78ad55c1139d03c6e6",
+ "chksum_sha256": "7b5798bbd2b702d1564c81aa06945a02e73740939d596f4feb7c3e5a6de27f96",
"format": 1
},
{
- "name": "playbooks/redfish/storage",
+ "name": "roles/idrac_gather_facts/molecule/default",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/redfish/storage/redfish_storage_volume.yml",
+ "name": "roles/idrac_gather_facts/molecule/default/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "071ebfb5602ec9046db2ed9209b8697ff9e9cbc66069236b875bbf65a59af625",
+ "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
"format": 1
},
{
- "name": "playbooks/redfish/storage/redfish_storage_volume_create_job_tracking.yml",
+ "name": "roles/idrac_gather_facts/molecule/default/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "180a6c686410517af84f74f13ee71d1451ea7a736855e521355da7f7b7a36948",
+ "chksum_sha256": "cff3d242b9a5abe65abd87b232d6678b0952aea01ac388e894489e47577dfea3",
"format": 1
},
{
- "name": "playbooks/redfish/storage/redfish_storage_volume_delete_job_tracking.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "54dad57ad6a79cf3d1613917157698b84eadf3f10e456776ce23fd4a1a8c4e61",
+ "name": "roles/idrac_gather_facts/molecule/nic",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/redfish/storage/redfish_storage_volume_initialize_job_tracking.yml",
+ "name": "roles/idrac_gather_facts/molecule/nic/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2c4a6b9df13e5c02c6f74324453b3e2e93607f4fe6cc723a1a2d3337883ea21e",
+ "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
"format": 1
},
{
- "name": "playbooks/redfish/storage/redfish_storage_volume_modify_job_tracking.yml",
+ "name": "roles/idrac_gather_facts/molecule/nic/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "96a4103655813d28c473885a73cdba3b48eafaf01521931f7761f89de461c91e",
+ "chksum_sha256": "88018366757580a7dd6c975481cf2098d8e6add7a9400aae149886c98cec2241",
"format": 1
},
{
- "name": "playbooks/roles",
+ "name": "roles/idrac_gather_facts/molecule/negative",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/roles/README.md",
+ "name": "roles/idrac_gather_facts/molecule/negative/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "abd9f8c450fda2dfbff41f9ee2bbc613150d94e027e8a560e48d3ec14aad97fd",
- "format": 1
- },
- {
- "name": "playbooks/roles/idrac_attributes",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
"format": 1
},
{
- "name": "playbooks/roles/idrac_attributes/idrac_attributes.yml",
+ "name": "roles/idrac_gather_facts/molecule/negative/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b558f4d845b9f870ad3c89c34e3361c41acfcef9437f83bdcf342ce3e7c111d0",
+ "chksum_sha256": "abcc6362b778c7dd7c367130c0f52564cb65a37a314aa41817ae19252f020ff7",
"format": 1
},
{
- "name": "playbooks/roles/idrac_bios",
+ "name": "roles/idrac_gather_facts/molecule/secureboot",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/roles/idrac_bios/idrac_bios.yml",
+ "name": "roles/idrac_gather_facts/molecule/secureboot/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "46eb1addcc8166ef1e320678c15b5c0bcb494e68166b3a4cea65341764af3832",
- "format": 1
- },
- {
- "name": "playbooks/roles/idrac_certificate",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
"format": 1
},
{
- "name": "playbooks/roles/idrac_certificate/idrac_certificate.yml",
+ "name": "roles/idrac_gather_facts/molecule/secureboot/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fc5d65aad27029c224cfbe3945b27725caac02a8bc20db57837283efbac05c85",
+ "chksum_sha256": "90a6628784d8f22ff412233df90b45f4ca670001c322ad02347933ebaac6a04c",
"format": 1
},
{
- "name": "playbooks/roles/idrac_export_server_config_profile",
+ "name": "roles/idrac_gather_facts/molecule/idrac",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/roles/idrac_export_server_config_profile/idrac_export_server_config_profile.yml",
+ "name": "roles/idrac_gather_facts/molecule/idrac/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e5a5df7a2c70cdfc5b97c9a752dddf82b134ba6b5358ab6216097b097c72a533",
- "format": 1
- },
- {
- "name": "playbooks/roles/idrac_firmware",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
"format": 1
},
{
- "name": "playbooks/roles/idrac_firmware/idrac_firmware.yml",
+ "name": "roles/idrac_gather_facts/molecule/idrac/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ce0d0ab244e8a7831b43abafdcc2e610b3b4507ff0a8785000792d94769e71c0",
+ "chksum_sha256": "1793fd4df60558619a9c94aef7599243482429d81c8a28e776b91f850221b59a",
"format": 1
},
{
- "name": "playbooks/roles/idrac_gather_facts",
+ "name": "roles/idrac_gather_facts/molecule/powersupply",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/roles/idrac_gather_facts/idrac_gather_facts.yml",
+ "name": "roles/idrac_gather_facts/molecule/powersupply/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "99a54fcb95b0805ac8ca5f2ce85541caf9dd9666ee3ae0cd1401c90a3a3dbcf8",
- "format": 1
- },
- {
- "name": "playbooks/roles/idrac_import_server_config_profile",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
"format": 1
},
{
- "name": "playbooks/roles/idrac_import_server_config_profile/idrac_import_server_config_profile.yml",
+ "name": "roles/idrac_gather_facts/molecule/powersupply/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ac1ed229206d33006e498e8c0ec6f46ea190dcbcb34698b3560ac7c45859e645",
+ "chksum_sha256": "878678ae878a6dee2d2ab99da3be52a80525d37b57daef898a1deb4e922ea000",
"format": 1
},
{
- "name": "playbooks/roles/idrac_job_queue",
+ "name": "roles/idrac_gather_facts/molecule/fan",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/roles/idrac_job_queue/idrac_job_queue.yml",
+ "name": "roles/idrac_gather_facts/molecule/fan/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a7495a4784f344f1557f1f65ea5c51f56b31d025e304f66ddbb9ebbaffb2991e",
- "format": 1
- },
- {
- "name": "playbooks/roles/idrac_os_deployment",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
"format": 1
},
{
- "name": "playbooks/roles/idrac_os_deployment/idrac_os_deployment.yml",
+ "name": "roles/idrac_gather_facts/molecule/fan/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "484bf0716912aac6c405531874feb47643b82e5ae5bb4048e492adb7269328f1",
+ "chksum_sha256": "3847cf869806ef54d8df586fea8862e60cdcf70de9f90598cd1f3417be3b2aef",
"format": 1
},
{
- "name": "playbooks/roles/idrac_reset",
+ "name": "roles/idrac_gather_facts/molecule/backplane",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/roles/idrac_reset/idrac_reset.yml",
+ "name": "roles/idrac_gather_facts/molecule/backplane/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d7260501d8b6889b34b43524d75c1bf06a6457f44ea303b36aacb2190ff8fa4e",
- "format": 1
- },
- {
- "name": "playbooks/roles/idrac_server_powerstate",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
"format": 1
},
{
- "name": "playbooks/roles/idrac_server_powerstate/idrac_server_powerstate.yml",
+ "name": "roles/idrac_gather_facts/molecule/backplane/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2b27bab99e8b95ed92bddd14d59e53341a3cfa8b3828cfce59af8bed8328ac52",
+ "chksum_sha256": "6802c82e9c0138284ebfd5da6982b3ae8865a2f19ad9df2aef7ad552f07e149f",
"format": 1
},
{
- "name": "playbooks/roles/idrac_storage_controller",
+ "name": "roles/idrac_gather_facts/molecule/memory",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/roles/idrac_storage_controller/idrac_storage_controller.yml",
+ "name": "roles/idrac_gather_facts/molecule/memory/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0792768055d63117233dbbca23e2e7f434da81c3e77eaeb67554602d17ea7fd3",
+ "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
"format": 1
},
{
- "name": "playbooks/roles/inventory",
+ "name": "roles/idrac_gather_facts/molecule/memory/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f78483b3cf76c53e1539f5c47bcae79a21890faa859bc5f0cdea52cbeedc046e",
+ "chksum_sha256": "0a937be08af8078010259968d6dc6ef8836ed89caea61e997db31fec54b7f7b5",
"format": 1
},
{
- "name": "playbooks/roles/redfish_firmware",
+ "name": "roles/idrac_gather_facts/molecule/controller",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/roles/redfish_firmware/redfish_firmware.yml",
+ "name": "roles/idrac_gather_facts/molecule/controller/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3b40bf1bf5fa84bfd210377c89e4b4566076bac113347918c06dcca053c463a8",
- "format": 1
- },
- {
- "name": "playbooks/roles/redfish_storage_volume",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
"format": 1
},
{
- "name": "playbooks/roles/redfish_storage_volume/redfish_storage_volume.yml",
+ "name": "roles/idrac_gather_facts/molecule/controller/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7c8d35ac12005d8a06ca9fbb891edd16c926b4740b7f1a4743df7fcace3404da",
+ "chksum_sha256": "fd6e6417f014ec03b13867ac110beb0328e8d4e421d71e534002153a29e99b8a",
"format": 1
},
{
- "name": "playbooks/roles/vars_files",
+ "name": "roles/idrac_gather_facts/molecule/sensorsintrusion",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/roles/vars_files/attributes.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "1e760a0cb6126d76b49613aac9e612f88ae7e27925160a52b2e0b255b5f47bb2",
- "format": 1
- },
- {
- "name": "playbooks/roles/vars_files/bios.yml",
+ "name": "roles/idrac_gather_facts/molecule/sensorsintrusion/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5d5c75310a93c6d513c747db70e9bea927c1eb5dd6ef75ba5fa766d4c2baaf9e",
+ "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
"format": 1
},
{
- "name": "playbooks/roles/vars_files/certificates.yml",
+ "name": "roles/idrac_gather_facts/molecule/sensorsintrusion/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "91099951ff4f500c14e95ce2a6950ddc012521daf34c1253221cd774e75647ed",
+ "chksum_sha256": "87970ed0a6773bd5ab9c8a78c1555ec0f61228282b314573c33bd30da7102a8d",
"format": 1
},
{
- "name": "playbooks/roles/vars_files/credentials.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "959f184f2bd6eec8a6302784bfc6518c10a712a496e92b6f20be4f5e166be41a",
+ "name": "roles/idrac_gather_facts/molecule/cpu",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/roles/vars_files/export.yml",
+ "name": "roles/idrac_gather_facts/molecule/cpu/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "aa812c14b39721cf11488674c9a0a93b8ca8e94406f335f7722a58b20475a08d",
+ "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
"format": 1
},
{
- "name": "playbooks/roles/vars_files/firmware.yml",
+ "name": "roles/idrac_gather_facts/molecule/cpu/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8ddbe55c1ad9eaa8f33cc617ed5ceeaaf99efd7d43bb5f33d60ea98fd966241c",
+ "chksum_sha256": "03f5f6bec9178116f17d4a1f7e49aa0607b3b34751e35ee4d86f26d4f9912383",
"format": 1
},
{
- "name": "playbooks/roles/vars_files/import.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "90d3589259f331152d96d8ae0c773ed3958c7ef24bb9604387bbfa040a20bb6e",
+ "name": "roles/idrac_gather_facts/molecule/systemmetrics",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/roles/vars_files/osd.yml",
+ "name": "roles/idrac_gather_facts/molecule/systemmetrics/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b1e6e58675cb4af4c9f800681df1be3ea613b4ee09aa6f8c5eb2e8f05f4b2df9",
+ "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
"format": 1
},
{
- "name": "playbooks/roles/vars_files/reset.yml",
+ "name": "roles/idrac_gather_facts/molecule/systemmetrics/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "690bd2b370bbff4b2d9fc3e158f7a85276e343e12167d724d7a30ae7fd7ae7b5",
+ "chksum_sha256": "9e4fd431a9be18f2b24b56f737f3a5c934f8110f11e2aedb0f0152c03589746e",
"format": 1
},
{
- "name": "playbooks/roles/vars_files/storage.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "65e7a405ea04245a99ced19727310b2bdf295578fe6bc2869ee764eeb2c4235d",
+ "name": "roles/idrac_gather_facts/molecule/virtualdisk",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/roles/vars_files/storage_controller.yml",
+ "name": "roles/idrac_gather_facts/molecule/virtualdisk/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9ccc017e74bede8274235bc7358cfa6192c7c7430876016ddd07c827657f16e8",
+ "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
"format": 1
},
{
- "name": "playbooks/roles/vars_files/user.yml",
+ "name": "roles/idrac_gather_facts/molecule/virtualdisk/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4dbddb5c3dedb08b2a42e3b2131cb515bca4ae9d83ae4a28a17ff4919993b587",
+ "chksum_sha256": "d7f64f87068ebc765d1151c2994688505ddf9d29b5a49749565d813a7fd71da9",
"format": 1
},
{
- "name": "playbooks/roles/idrac_user",
+ "name": "roles/idrac_gather_facts/molecule/sensorsbattery",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "playbooks/roles/idrac_user/idrac_user.yml",
+ "name": "roles/idrac_gather_facts/molecule/sensorsbattery/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "44346ffd0acd7bc28a4b0760772385552686673dc6421947e1b6d2a916b6e5b2",
+ "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
"format": 1
},
{
- "name": "plugins",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "roles/idrac_gather_facts/molecule/sensorsbattery/converge.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "678657b674bd8d79ae67af9a8ebca26b47e40fcaf6f958e66a15e1517b6b3cdb",
"format": 1
},
{
- "name": "plugins/doc_fragments",
+ "name": "roles/idrac_gather_facts/molecule/pciedevice",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/doc_fragments/__init__.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
- "format": 1
- },
- {
- "name": "plugins/doc_fragments/idrac_auth_options.py",
+ "name": "roles/idrac_gather_facts/molecule/pciedevice/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4559c52de6583c9d98de9aa9d7ba905a3d1ce3dcaf349e2b3ae8f841037af07a",
+ "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
"format": 1
},
{
- "name": "plugins/doc_fragments/network_share_options.py",
+ "name": "roles/idrac_gather_facts/molecule/pciedevice/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9aef12b8caa7df49c02dde445a8923c811228558bd61893c720ef185b2e7c182",
+ "chksum_sha256": "5c0c515a34feab6e6b68e051513ac03a3ac94d3cc8cba176aaed27996ad0287e",
"format": 1
},
{
- "name": "plugins/doc_fragments/ome_auth_options.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "add3bfb55a976a99dbcd7a2ef87cdc2547991eb4e52e9fd97e0d456341dbde8b",
+ "name": "roles/idrac_gather_facts/molecule/firmware",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/doc_fragments/omem_auth_options.py",
+ "name": "roles/idrac_gather_facts/molecule/firmware/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ee12d1210af2e46053f3f0b1dc5156395c45aaa30303833b5a14a5c788a89062",
+ "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
"format": 1
},
{
- "name": "plugins/doc_fragments/oment_auth_options.py",
+ "name": "roles/idrac_gather_facts/molecule/firmware/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6b5cfb79f913eaa3a1515e9fc0ea060f2c5a27bc4face2fa3a0506fc866a516c",
+ "chksum_sha256": "b9077637b7c957f46e1351f3e3f4e16e925c81a62e3381dd24169a3f0a7b4079",
"format": 1
},
{
- "name": "plugins/doc_fragments/redfish_auth_options.py",
+ "name": "roles/idrac_gather_facts/README.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "437dc8e47ff125164d7a46c2ac329cd99bef1d30d6253ef13ff7cabcb28ffdf0",
+ "chksum_sha256": "babcf9ea882157e54c32df007487cbb8432e214d7c3d5ebbba6631241792cbc3",
"format": 1
},
{
- "name": "plugins/inventory",
+ "name": "roles/idrac_gather_facts/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/inventory/ome_inventory.py",
+ "name": "roles/idrac_gather_facts/defaults/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cb2597106d2e3822ce9f74ae0551904f6f6ccd6cde597bd3ddccf96509bd1e3c",
+ "chksum_sha256": "0e9f5e643ff9212b823aab7dfeb2658e1b1743ee6e296fe7cec64b64c814dd1f",
"format": 1
},
{
- "name": "plugins/module_utils",
+ "name": "roles/idrac_gather_facts/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/__init__.py",
+ "name": "roles/idrac_gather_facts/tasks/get_host_nic_info.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "cbe6b638f4f6bf98263cc35f2ab8e9dc9c20d4f4781179ea84b25dc837940f49",
"format": 1
},
{
- "name": "plugins/module_utils/idrac_redfish.py",
+ "name": "roles/idrac_gather_facts/tasks/get_physical_info.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f58eacbdb2501466181f9fd4e48bc1f9ffc599113e2f558c85a6b1a11d03a45c",
+ "chksum_sha256": "c08ebd517f4809864571641b9bbd54e720e54d3eb9f608b86c2de0e21d5e9154",
"format": 1
},
{
- "name": "plugins/module_utils/ome.py",
+ "name": "roles/idrac_gather_facts/tasks/get_pcie_device_info.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a2b26551a3677c43e45b54a110bbd75fa9bba0108b2ffc5cbc71962d926160c2",
+ "chksum_sha256": "09ded655e39f0510b2432d9e26f24bbcc03d147dae5aef9a4c53ad85b2f8468b",
"format": 1
},
{
- "name": "plugins/module_utils/redfish.py",
+ "name": "roles/idrac_gather_facts/tasks/get_intrusion_info.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "edd7f7b228d441288a51ef80a2d760a3aa5b6458bb9830fe5c2148d16e1799e2",
+ "chksum_sha256": "bfa01c92ff7a53762840eac467d93619b0191e4fe771715be349eef092e96b64",
"format": 1
},
{
- "name": "plugins/module_utils/dellemc_idrac.py",
+ "name": "roles/idrac_gather_facts/tasks/get_fan_info.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e60a6661de248b2be11330274cecf6af56989f04388f210ed3a6cf28753b57ee",
+ "chksum_sha256": "9bb1df958e92876346cb1fffead2be0436ca155725be6326174f6be124812afb",
"format": 1
},
{
- "name": "plugins/module_utils/utils.py",
+ "name": "roles/idrac_gather_facts/tasks/get_metrics_info.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f5b4aa1d4f6bcdf22373061d2dd5d1f873f6cf277c3caded38c75c69d97f6556",
+ "chksum_sha256": "a5bf5239cfc7ea06a7bb797c84272d5ca0f814a1ad8d0150299384670cbd2a8a",
"format": 1
},
{
- "name": "plugins/module_utils/session_utils.py",
+ "name": "roles/idrac_gather_facts/tasks/get_secure_boot_info.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "70a20d812a16708506bdd018e08bd378ed96d3906aa627b3f43c45c5ab1e6a55",
+ "chksum_sha256": "3cdb9d35aed51e1bb8f4626028b87fed75d52df4a29e933a096eb6e1e526b47a",
"format": 1
},
{
- "name": "plugins/modules",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "roles/idrac_gather_facts/tasks/get_enclosure_emm_info.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "39af5a227279476db905f227e7c4321756537f807b697dfd02fe16e5af40ec82",
"format": 1
},
{
- "name": "plugins/modules/__init__.py",
+ "name": "roles/idrac_gather_facts/tasks/get_battery_info.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "148c547f8686797aba487fb562ac318ae933609914899419bc7b08c45f92f76d",
"format": 1
},
{
- "name": "plugins/modules/dellemc_configure_idrac_eventing.py",
+ "name": "roles/idrac_gather_facts/tasks/get_memory_info.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8972622a58eb34b314304c1891c770b0cfa0c35a766303f5d2fb4614f2bc9ca9",
+ "chksum_sha256": "0f1db24485b5870b7a722e07807e8e0df8bebdd162cbb67e12167cbb2b35a3a9",
"format": 1
},
{
- "name": "plugins/modules/dellemc_configure_idrac_services.py",
+ "name": "roles/idrac_gather_facts/tasks/get_virtual_disk_info.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "244953d637c27180cf67aec39005498a0abe58688d3a2b05b1655a6df81a8db9",
+ "chksum_sha256": "91690948737d7d0249d914032490bc42d6260226a1b8fd55922653a1b46b2a61",
"format": 1
},
{
- "name": "plugins/modules/dellemc_idrac_lc_attributes.py",
+ "name": "roles/idrac_gather_facts/tasks/get_system_info.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "79c874570d8c94e5373a26c30e4e1b64576a54a14f62f4499b674c7b2f99bb8d",
+ "chksum_sha256": "4e94f83a0bcae476b8005195648d8fd69b894b0f96d24fa3f837f021f46537d7",
"format": 1
},
{
- "name": "plugins/modules/redfish_storage_volume.py",
+ "name": "roles/idrac_gather_facts/tasks/get_firmware_info.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0c385c1f8206967b4b3a955e4cbe107df5895ee36a7653b87b41c07ac152f634",
+ "chksum_sha256": "024fd4797f5075b0030f378f4b23696e23f193eee0ae3d286793028ac54f45f9",
"format": 1
},
{
- "name": "plugins/modules/dellemc_system_lockdown_mode.py",
+ "name": "roles/idrac_gather_facts/tasks/get_nic_info.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fafecf1bd67f65c03480e16c097a0dab930c2bfaff25a4efda57f46a90f27b5c",
+ "chksum_sha256": "bfc885a21855a4c875dcd2d8d30decb7c970dedeb5c4912a549edf88e9fb9a99",
"format": 1
},
{
- "name": "plugins/modules/idrac_attributes.py",
+ "name": "roles/idrac_gather_facts/tasks/get_voltage_info.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "22cfae5ac4ac90520b412fb9377f72d891920470dffed5f722f0ac8b159b4f51",
+ "chksum_sha256": "baf9e3a82d2f19ccc21fa8721c09004ef99ab237f790d5f676bc533aaa523e0e",
"format": 1
},
{
- "name": "plugins/modules/idrac_bios.py",
+ "name": "roles/idrac_gather_facts/tasks/get_power_supply_info.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "314547ca6619f5e0e397f85dedf7f43e4c7b33bd9db81b8f361c5f05f6844968",
+ "chksum_sha256": "b768e6b3a6d27240d49d04a0f59493047f2052f2d9ba94ede986198682525f77",
"format": 1
},
{
- "name": "plugins/modules/idrac_boot.py",
+ "name": "roles/idrac_gather_facts/tasks/get_license_info.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0052eaeedf12e24462e6564f2b767e26450b4f01f1c1a3d09bca324154a1682a",
+ "chksum_sha256": "61c857b5a12886ad7dde1dee0dec583d999cc435615dd4ffa211ea52e11cea56",
"format": 1
},
{
- "name": "plugins/modules/idrac_certificates.py",
+ "name": "roles/idrac_gather_facts/tasks/get_resource_id.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "49d033ed9292e6644411b0e252f792c4d7f767470cabbdb2983147e0fa238cd8",
+ "chksum_sha256": "71b9b902e3e06e3effa5e21acd8a6f8be1d0dbe49500ff72d7d8cb27def8555c",
"format": 1
},
{
- "name": "plugins/modules/idrac_firmware.py",
+ "name": "roles/idrac_gather_facts/tasks/get_attributes_info.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7cabfda75d3c291689c0c2fee932be51f3021e862f692bcb25ed00a053a1e2c6",
+ "chksum_sha256": "8e4ddc302cfdb0520c6fa9e0a73708a3491412bea218571b184b94926c695030",
"format": 1
},
{
- "name": "plugins/modules/idrac_firmware_info.py",
+ "name": "roles/idrac_gather_facts/tasks/get_bios_info.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a040502c70727b44937a478d5e33d78ecc90ad8c08873325b5217e80b3e69fab",
+ "chksum_sha256": "2d625b06d054c58b3434ee76881384abc724a66b58fe7ecf40fdf870c2e7b0b3",
"format": 1
},
{
- "name": "plugins/modules/idrac_license.py",
+ "name": "roles/idrac_gather_facts/tasks/get_backplane_info.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "66af69170bff339b9c17c072f987cb6dc74d454e5aa060498aac61ecce1a65de",
+ "chksum_sha256": "d1ee6473d831ef5fa2b080fbd7e6c545f65cd0b8ddd75af862f3fa314ccd8b71",
"format": 1
},
{
- "name": "plugins/modules/idrac_lifecycle_controller_job_status_info.py",
+ "name": "roles/idrac_gather_facts/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0030947d9d1bc3f70ed13147d429fa4696627321dc9f75a467f00df10adb8402",
+ "chksum_sha256": "f082f14db62efdca0aa691bfe928fedb660df3fc98f6e32f367cb8dc068335c9",
"format": 1
},
{
- "name": "plugins/modules/idrac_lifecycle_controller_jobs.py",
+ "name": "roles/idrac_gather_facts/tasks/get_cpu_info.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8b57b0f86959fee2a4919d83ef943dd5856fd3b137842fb700478d07159b403f",
+ "chksum_sha256": "151ae93ca668eb1a8900addcfabe7ae999e670e821ad4ced7f06fc95735bd51a",
"format": 1
},
{
- "name": "plugins/modules/idrac_lifecycle_controller_logs.py",
+ "name": "roles/idrac_gather_facts/tasks/get_enclosure_info.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b374487c9c0f19acad5da9f3fb8fe24fa98b672e2df72f92b9b5eaa2d43ed865",
+ "chksum_sha256": "deb7f669cdd9686dc3c1e2d129ce062ea6ac39582fb1c4d1ebadb9d33aaa0672",
"format": 1
},
{
- "name": "plugins/modules/idrac_lifecycle_controller_status_info.py",
+ "name": "roles/idrac_gather_facts/tasks/get_controller_info.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "158d02c1d50ef175e26c7ffa66831575034e077e233ddd992c39d594e004fa38",
+ "chksum_sha256": "9ef30c6af08535b3ec7ae2485bdd9d4aa79675e2b1bbb6b07be726ac7c36651c",
"format": 1
},
{
- "name": "plugins/modules/idrac_network.py",
+ "name": "roles/idrac_gather_facts/tasks/get_pas_sensor_info.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4c1ea93a690a1400cba57600c7ffe9378813c502057a7d76f593ec2b126e5d85",
+ "chksum_sha256": "8e9d20d3c5dcfbd9ba045bdd03395fb421c1b6174f3b4bf17a442855b28b3b84",
"format": 1
},
{
- "name": "plugins/modules/idrac_network_attributes.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "670868f9e7533d5019975faf0e3cf3119187248c7e255eeebefdbe59c59b45a6",
+ "name": "roles/idrac_gather_facts/handlers",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/idrac_os_deployment.py",
+ "name": "roles/idrac_gather_facts/handlers/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fde92ac2396a616812df831e2c576a591bb0b1e0f38e86e8f99cff8f5c75df3d",
+ "chksum_sha256": "f872a794379898660d17bfd9e1ae91bb13f8725a1cf4b16b901d80919192caba",
"format": 1
},
{
- "name": "plugins/modules/idrac_redfish_storage_controller.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "187f4af6b3da0bd0350f782bbb930c30d6e5926b0c64495b7466f209bd37469d",
+ "name": "roles/idrac_gather_facts/tests",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/idrac_session.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "70dedb6e3721bd47cdd4d1d8c058eb0db343c2217c6f5e742b40cea43939e40c",
+ "name": "roles/idrac_gather_facts/tests/asserts",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/idrac_syslog.py",
+ "name": "roles/idrac_gather_facts/tests/asserts/mmetrics_assert.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8a79f5615815e37ffd0e0dd8544a2df2782ba792bb5e283df134c3403b8d4f8c",
+ "chksum_sha256": "2ed3d03bb619d4f8f1e6427e36644de8c77977f32a09ef8b233c0c4ff776ccf2",
"format": 1
},
{
- "name": "plugins/modules/idrac_system_info.py",
+ "name": "roles/idrac_gather_facts/tests/asserts/manager_assert.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ba0422032d21f80ccf2e138499b8dd10821b58ea7e09a4d470446f7929cc5695",
+ "chksum_sha256": "60be5b10f57f9ec1bfaa913610ef448320b476d27cd33b122a7037ea31a95c21",
"format": 1
},
{
- "name": "plugins/modules/idrac_timezone_ntp.py",
+ "name": "roles/idrac_gather_facts/tests/asserts/hostnic_assert.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "540b6fec19d767e940128d79991a7517716052293091e1a98caca550de5d0d8a",
+ "chksum_sha256": "e84c587ac238b74f292b0382d465ee29ecfdb92a53099410bb0691878aa8990b",
"format": 1
},
{
- "name": "plugins/modules/idrac_user.py",
+ "name": "roles/idrac_gather_facts/tests/asserts/firmware_assert.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6670badc421ded85d316b969b60a24b54c0f93dab90d388f62ac82b12f127ca1",
+ "chksum_sha256": "3dc966c3a3c7ec1c7dd3a3bfb2b4e3a97c10ae9f279c4a56d3ccab5313857afe",
"format": 1
},
{
- "name": "plugins/modules/idrac_user_info.py",
+ "name": "roles/idrac_gather_facts/tests/asserts/memory_assert.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2e93386626a7c28ae94efea17aa6af84e7e59966a59e84136af01e19f6d84aa4",
+ "chksum_sha256": "6b16f28108610ba17f95350a75e72ee33ba0db357cc239e74a018e7aebc81057",
"format": 1
},
{
- "name": "plugins/modules/idrac_virtual_media.py",
+ "name": "roles/idrac_gather_facts/tests/asserts/license_assert.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1616e52d126a4c25b6cb7a2a47939a6345e5ae673e975e8b89a9b1eb8c7a0c14",
+ "chksum_sha256": "4d3e30cd29d21fa947ac9f1b43c3f4d73c60c85187d0d9d01025faa1cd969792",
"format": 1
},
{
- "name": "plugins/modules/ome_active_directory.py",
+ "name": "roles/idrac_gather_facts/tests/asserts/tmetrics_assert.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "47a38b69a0862bf550972a674a37d877b2f70d82a27a1bf68ac5b54c2f0678bf",
+ "chksum_sha256": "a2c959692893fd1c864029693d3f818e5a65f7e339c7c0c672c7d35e2f418481",
"format": 1
},
{
- "name": "plugins/modules/ome_alert_policies.py",
+ "name": "roles/idrac_gather_facts/tests/asserts/system_assert.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "744da973ab0686c1f55b81676c86837a846276b5736b0dc364a927b9254f3f28",
+ "chksum_sha256": "78c885b55767b51ba6c4be7998d67e9f9599f9e7518ec683f9bb1b7bd3f337b2",
"format": 1
},
{
- "name": "plugins/modules/ome_alert_policies_actions_info.py",
+ "name": "roles/idrac_gather_facts/tests/asserts/cpu_assert.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "55e49caeb47cec5c51c53fb0b7cf50570eed92d6fc2fceae4b4e8eee2be6611d",
+ "chksum_sha256": "0e8af65eb0a9b3e627d4d8b0a352de3cb82218bae97998be2101290194825c25",
"format": 1
},
{
- "name": "plugins/modules/ome_alert_policies_category_info.py",
+ "name": "roles/idrac_gather_facts/tests/asserts/nic_assert.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b83f7a82cb0fdf713db87623567512b70b3199ddf55b737bbc4ce7f6484b2de3",
+ "chksum_sha256": "bb3184e49419f93cf390a53562c01203db04037606af82aeab51783e21a5fbb1",
"format": 1
},
{
- "name": "plugins/modules/ome_alert_policies_info.py",
+ "name": "roles/idrac_gather_facts/tests/asserts/controller_assert.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bd3cb7cf50c1d71743a3340912db5a58676728727d83c2a91a139dc1776de4db",
+ "chksum_sha256": "48bffad0e6952ab27d7721095ef84c4f13fdc6ab07ee859f36a380409ef6e594",
"format": 1
},
{
- "name": "plugins/modules/ome_alert_policies_message_id_info.py",
+ "name": "roles/idrac_gather_facts/tests/asserts/sensorsvoltage_assert.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ff3bb7fa4f1edfb5025cb6ab0040dc8ee03f79cd20d00dbdd3a5951e5d5a28a1",
+ "chksum_sha256": "e26f5f551fe9d943743159b24c9a01c3c4c804543311330b12ff87ed2f0b7079",
"format": 1
},
{
- "name": "plugins/modules/ome_application_alerts_smtp.py",
+ "name": "roles/idrac_gather_facts/tests/asserts/secureboot_assert.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "de78ff29cce75aa6d57f5e84beed3ca0dd2e91a06ddff572bd9a2bae93aaed85",
+ "chksum_sha256": "e6876b8b8892d421dd0ddd59dcc1986d54361e6c376d8239f42d91a0db707fff",
"format": 1
},
{
- "name": "plugins/modules/ome_application_alerts_syslog.py",
+ "name": "roles/idrac_gather_facts/tests/asserts/fan_assert.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "01e7c7432aafaf16db12d49ac5276f15cf75d6f878c304ad4658ae8e3fc9abeb",
+ "chksum_sha256": "92924ad83dc9f5fe1b6801d09dced4bb70c1209ede2394ca0ced2d25c3c72eba",
"format": 1
},
{
- "name": "plugins/modules/ome_application_certificate.py",
+ "name": "roles/idrac_gather_facts/tests/asserts/pciedevice_assert.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "17abd0bae13c93b2d2100f3150f2dc3e865fd2e7f7c941727c13e553d0c886fe",
+ "chksum_sha256": "1f4bfc03967189c3fa4c2752b75174294075ad6a2649d4c7e0f6c82dc0b0bd55",
"format": 1
},
{
- "name": "plugins/modules/ome_device_local_access_configuration.py",
+ "name": "roles/idrac_gather_facts/tests/asserts/enclosure_assert.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3d3d5e42de5fdb980d19d4f943d725b75145cc112ed42a2524209c65cd66148d",
+ "chksum_sha256": "763ba0a9850c0b62d5224e40cfa49bb970e86f3c49991117ba21254211d11c24",
"format": 1
},
{
- "name": "plugins/modules/ome_application_network_address.py",
+ "name": "roles/idrac_gather_facts/tests/asserts/virtualdisk_assert.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c9844f1c4d958fee67122018e0e1b7d651ee52f747708e6c8f12f5c455e90242",
+ "chksum_sha256": "2e8bd9e08382df2ea34a9b9830b92086569ca4f16e453286cb92c1705f69aa7e",
"format": 1
},
{
- "name": "plugins/modules/ome_application_network_proxy.py",
+ "name": "roles/idrac_gather_facts/tests/asserts/enclosureemm_assert.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1366d581d27bbd7ae40ea96506bc8d6a066adbc3222ff66974733b666f2633c5",
+ "chksum_sha256": "b409927ca4278e7eea565b40cd36ecc0e212901effc08ea13f8264e323224bf0",
"format": 1
},
{
- "name": "plugins/modules/ome_application_network_settings.py",
+ "name": "roles/idrac_gather_facts/tests/asserts/psmetrics_assert.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9bbaaa51f9acf57b58fae95988ed0afde5172cb4610b8c81d3ad56288f0a5bf4",
+ "chksum_sha256": "5ade4267e18da8e945add979e001b40d832e77f7b3435cc3efd55f29bd4647b6",
"format": 1
},
{
- "name": "plugins/modules/ome_application_network_time.py",
+ "name": "roles/idrac_gather_facts/tests/asserts/lc_assert.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7b8fbc8e986b8a44703a5901f6a596b24491313dffabe4c9368ef3f331dd1234",
+ "chksum_sha256": "170dcd03bdb5031bb82a24cc19cc70e1815865a2572e4f14b892221324ef95b5",
"format": 1
},
{
- "name": "plugins/modules/ome_application_network_webserver.py",
+ "name": "roles/idrac_gather_facts/tests/inventory",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f16ad3c4fd7f81bf6dc43c2131bffb2989392efbd7cbabb0d145c0cb08598a82",
+ "chksum_sha256": "e02233819b1a09844410549191813f7cc7ba360f21298578f4ba1727a27d87fc",
"format": 1
},
{
- "name": "plugins/modules/ome_application_security_settings.py",
+ "name": "roles/idrac_gather_facts/tests/test.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ce4ea923efc1aad0fc2b22d8bc4039ddbf6dbe4a34424d844b22bfd141fc128d",
+ "chksum_sha256": "71250fe59fab062631b04fe173573783fcd89b77e8f5ae2c042720d10cd2f266",
"format": 1
},
{
- "name": "plugins/modules/ome_chassis_slots.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "fe72d30c858c612064ba9118b3d7f1e97cdcb908c5e2005e74026f5707681e81",
+ "name": "roles/idrac_export_server_config_profile",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/ome_configuration_compliance_baseline.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "6aafa9fc0f313c12a464ea9c69047b8e9026749bebc709d93319a71887ebf99b",
+ "name": "roles/idrac_export_server_config_profile/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/ome_configuration_compliance_info.py",
+ "name": "roles/idrac_export_server_config_profile/meta/argument_specs.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4f291a981ee898107c4cef6dab34a8140f703f1cd36914665175566ede35cca2",
+ "chksum_sha256": "7df7a3c628dee71fa4ce697d0da28ea42f1eebf667853e84124e2971af446c42",
"format": 1
},
{
- "name": "plugins/modules/ome_device_group.py",
+ "name": "roles/idrac_export_server_config_profile/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1287863b5aea15224cd308bb4f327cfb0f2a198c4415c6d248d25db7ba11728e",
+ "chksum_sha256": "2a7dec7c1e8eb54a57fc78788b66c44e036b676ad62bd998114d7b87fd32e431",
"format": 1
},
{
- "name": "plugins/modules/ome_device_info.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "89d586c756522fa223e28b30ac9efa5f18d9dfd17ed6ac95a6478b6121e0529f",
+ "name": "roles/idrac_export_server_config_profile/vars",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/ome_device_quick_deploy.py",
+ "name": "roles/idrac_export_server_config_profile/vars/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "65d68eb6d1902ba5c818a0eab913cea192c68d807fd455bd74303fbfe31e7757",
+ "chksum_sha256": "ac68a419593778d913a88752aa73cf05946ca0438db7f4584d6174b04c0c400a",
"format": 1
},
{
- "name": "plugins/modules/ome_device_location.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "3bf02a20d37644dcab75df8f862d01e26b309468b8b611e3bc4832411eb1a415",
+ "name": "roles/idrac_export_server_config_profile/molecule",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/ome_device_mgmt_network.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "b56771c54c1de0cd75fae19fc36c0fe07d3511c815941acfb32836025f0a69be",
+ "name": "roles/idrac_export_server_config_profile/molecule/default",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/ome_device_network_services.py",
+ "name": "roles/idrac_export_server_config_profile/molecule/default/cleanup.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a49b72402fbc42fab67786768dff6975d5fe81eb0c0418e975ed8536552d8d63",
+ "chksum_sha256": "cd932a799fefa81f0aafeb32b788e887e555c076f2b5fedb3ea75e81eb7f707b",
"format": 1
},
{
- "name": "plugins/modules/ome_device_power_settings.py",
+ "name": "roles/idrac_export_server_config_profile/molecule/default/verify.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cc6cabcd711d6117ff88bc5e58a8a24c721952671f47bfcf242ab12c9690c3d5",
+ "chksum_sha256": "6674f5a55b2b9311c78b61713e305a7b6419f02de40cabd4fdb337f234cd88fa",
"format": 1
},
{
- "name": "plugins/modules/ome_devices.py",
+ "name": "roles/idrac_export_server_config_profile/molecule/default/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c94ff76fa306c5469bc5930c58f64b4e2423a76c835ed19cd315173e673601ee",
+ "chksum_sha256": "6c0d5bf979a0ad1541b496f173165b11f0ad14283391efde2c86ee35c477eb43",
"format": 1
},
{
- "name": "plugins/modules/idrac_reset.py",
+ "name": "roles/idrac_export_server_config_profile/molecule/default/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0bb2201ed119d6e26aa583427820f4d9b440c4489ad354a8508ed3ae96edee30",
+ "chksum_sha256": "621eb2a293781a0408900a6df0884360a7ed673e610b0cbdfe67db3221f62b16",
"format": 1
},
{
- "name": "plugins/modules/ome_diagnostics.py",
+ "name": "roles/idrac_export_server_config_profile/README.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f37335945247b3e94a4402fb36530d7505c1d3dbafa35553d17e444abc8ae5d7",
+ "chksum_sha256": "36902b75877782020af54e7c969630ae84f2ee9872c4ec4350a6ae79e06c930b",
"format": 1
},
{
- "name": "plugins/modules/ome_discovery.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "b8ca1eb7df50d781df18ecbdcb179bee2903a7e23fbbad9ac2b0044d60c251a1",
+ "name": "roles/idrac_export_server_config_profile/defaults",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/ome_domain_user_groups.py",
+ "name": "roles/idrac_export_server_config_profile/defaults/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c099a1fbe9767fff6644351cf7e728c2fd5413b20ed3a0428a61b2f02133d768",
+ "chksum_sha256": "35809f9a6cc33e2577870d883ad657e720b46ee3a2a0777f895d490ffa845ca1",
"format": 1
},
{
- "name": "plugins/modules/ome_firmware.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "d95825cb26dabcc64c1d153652c2466ef5eda18960cb9ee90270202d3d3c2f11",
+ "name": "roles/idrac_export_server_config_profile/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/ome_firmware_baseline.py",
+ "name": "roles/idrac_export_server_config_profile/tasks/scp_export_local.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "af5f2e984223b1ffb9d78648bd8ab8c18b627d81b0dd0900f05a4f16989e451a",
+ "chksum_sha256": "3d2296ccd8268dbd87cd0dfb3f82bacd1b93cba1ebe898797ac01d681f30f1e5",
"format": 1
},
{
- "name": "plugins/modules/ome_firmware_baseline_compliance_info.py",
+ "name": "roles/idrac_export_server_config_profile/tasks/scp_export_nfs.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a0d08bc596d09894a2d95b76871108de4c87b825115b9b237206981289a672ff",
+ "chksum_sha256": "ce2b8a6305affda2aaac530edf3e05695c8ed2ff994799fe4e84b77050a33617",
"format": 1
},
{
- "name": "plugins/modules/ome_firmware_baseline_info.py",
+ "name": "roles/idrac_export_server_config_profile/tasks/pre_req.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b59977ca8d111c3d82f1315a4a5f1d9fdd77ffb18c6d00a049f60e8f77ed859d",
+ "chksum_sha256": "121b8bc546708066ec1bd268af42e70cb396f1d2a99450d89ecb3eebbacf3c29",
"format": 1
},
{
- "name": "plugins/modules/ome_firmware_catalog.py",
+ "name": "roles/idrac_export_server_config_profile/tasks/scp_export_cifs.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f3e965fd3781e330c127f0b0c861a2f21b3dfb4b65d0900274c8c0ff7e9682f4",
+ "chksum_sha256": "ee2d348b52f7e2711260348db0ff8ad9e0f7c4eb4a391c1de53da94e1002a406",
"format": 1
},
{
- "name": "plugins/modules/ome_groups.py",
+ "name": "roles/idrac_export_server_config_profile/tasks/scp_export_https.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2794ed60e06fc533af5b610c6bebbd71a932eeea6be4a446e429c7a07ee33d49",
+ "chksum_sha256": "3ef7408260c8297ac606633989ae2a26f24a15c493dfbdb7f38ae0c4e20abb84",
"format": 1
},
{
- "name": "plugins/modules/ome_identity_pool.py",
+ "name": "roles/idrac_export_server_config_profile/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ce87d1090d90ff57b3d3a527bd2efe95776eb91c3e80640ae6428f4dbaab0835",
+ "chksum_sha256": "1558146a88520a40c77e451c4a7ab0f54fc2cf83de382bf80b3d608b9eae29a9",
"format": 1
},
{
- "name": "plugins/modules/ome_job_info.py",
+ "name": "roles/idrac_export_server_config_profile/tasks/scp_export_http.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0e575ebfd1a2b6247abdd51e73efe2082f0f22e607f26104c43b6d77dd4ced21",
+ "chksum_sha256": "b8ec53a81eacd4b32bb0b7f5e209d85c386b001c541f581d51c17e38f7095836",
"format": 1
},
{
- "name": "plugins/modules/ome_network_port_breakout.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "aab67ab9d418614cf47102f6883afb629ab72b98be540c904e4e2432888789bf",
+ "name": "roles/idrac_export_server_config_profile/handlers",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/ome_network_vlan.py",
+ "name": "roles/idrac_export_server_config_profile/handlers/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9fe22c4fa7055e0b395315234e4f87e556c6495d13ea43437158f4ab91c3627c",
+ "chksum_sha256": "1bf8b1e4781f050ca3d526439f50cead1680b8fbebfe4ba140b8f05c648ccbf7",
"format": 1
},
{
- "name": "plugins/modules/ome_network_vlan_info.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "f41caea42efc7ab9a4ae7144182986992d37e7490dceccd1ec262c2369650096",
+ "name": "roles/idrac_export_server_config_profile/tests",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/ome_powerstate.py",
+ "name": "roles/idrac_export_server_config_profile/tests/inventory",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b15c98e5a0722cc06a6d060275cd527526536bb3a0fb4b1aad0d463676afaf75",
+ "chksum_sha256": "e02233819b1a09844410549191813f7cc7ba360f21298578f4ba1727a27d87fc",
"format": 1
},
{
- "name": "plugins/modules/ome_profile.py",
+ "name": "roles/idrac_export_server_config_profile/tests/test.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1668387d3adaf9a68264089ef46e011601502512ffeb72aec08c4c0d91264c8e",
+ "chksum_sha256": "6d5634aec64528ab58532686cef5828500fbfcd3b729e0333e20b63365f4a080",
"format": 1
},
{
- "name": "plugins/modules/ome_profile_info.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "e95d312e668311e09b210ea9e21b0b63d46cb5b6cd1fa0e7e2f306ab507a489b",
+ "name": "roles/idrac_import_server_config_profile",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/ome_server_interface_profile_info.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "7828e3370b2228cebd3f442916fe81c26a43fa4c5239c186c98f3df7d39455d7",
+ "name": "roles/idrac_import_server_config_profile/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/ome_server_interface_profiles.py",
+ "name": "roles/idrac_import_server_config_profile/meta/argument_specs.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c27dedba93da5122c64cb09bbad215adf60f6ca640adafaf11cebafb93b853e5",
+ "chksum_sha256": "b1daf5cd34934f57365ef41dd1bdb8eadf73c96faee44442d8b607d3770921a6",
"format": 1
},
{
- "name": "plugins/modules/ome_smart_fabric.py",
+ "name": "roles/idrac_import_server_config_profile/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f55fad4eb0a37b26c225016ce0a62f14289987e8447f60a13d750e724ab3067e",
+ "chksum_sha256": "7a77ddd7746819905a9dd4511b45439de974b04c6628f8f89c5b89490bc9bcc7",
"format": 1
},
{
- "name": "plugins/modules/ome_smart_fabric_info.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "9aa3f9bfa28ec74952f45d6929b8941e467fc72d593d781d4b9dfd4b07573925",
+ "name": "roles/idrac_import_server_config_profile/vars",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/ome_smart_fabric_uplink.py",
+ "name": "roles/idrac_import_server_config_profile/vars/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8b6ec3f94eb2c2926d269de9b35f4f693311a3261e9025b3a08ec665ac2be65b",
+ "chksum_sha256": "a072ea3135c9e0109ae66ae42d86a3947cd27c8f7fde4ea1d62bf14a75ff7b6a",
"format": 1
},
{
- "name": "plugins/modules/ome_smart_fabric_uplink_info.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "0fc404675b2d5f4800e7fe19053556cdaf9e62c415b6867a17a9fc696846655d",
+ "name": "roles/idrac_import_server_config_profile/molecule",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/ome_template.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "e91802d2fd041c04f346ce43ab803596ec548cc29c63c403bf6cb9553903dc38",
+ "name": "roles/idrac_import_server_config_profile/molecule/http_share_with_showerror_certificate_warning",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/ome_template_identity_pool.py",
+ "name": "roles/idrac_import_server_config_profile/molecule/http_share_with_showerror_certificate_warning/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b15832e64fb39bba8d419fda96a0f0d893b7dd3c3f182a51816a5a09dfd9ef41",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/modules/ome_template_info.py",
+ "name": "roles/idrac_import_server_config_profile/molecule/http_share_with_showerror_certificate_warning/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6b1a8cc5c499d76538aa4321ea545ecee0413733526c75cceeebe6e676ef5cc5",
+ "chksum_sha256": "c6440bd7d1c2e2fe63da11cf6345307f8d08d2ff2287a3007e3cb99eea47d7c4",
"format": 1
},
{
- "name": "plugins/modules/ome_template_network_vlan.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "dc39292571dca59e1370eff4f3fabaa5a7767029617a24a14b21477207b6408a",
+ "name": "roles/idrac_import_server_config_profile/molecule/import_multiple_target",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/ome_template_network_vlan_info.py",
+ "name": "roles/idrac_import_server_config_profile/molecule/import_multiple_target/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bd0ea6e5dbb418ace51af5af341b7a704b3cf0ff05163935f7ab1b4eac0a993e",
+ "chksum_sha256": "0d55af04f5706218c384613a4393a6e01ab1ccea2438a1b1cc6ea2b403272225",
"format": 1
},
{
- "name": "plugins/modules/ome_user.py",
+ "name": "roles/idrac_import_server_config_profile/molecule/import_multiple_target/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7a91a7adb4966965349d38badd708bab4713dc69b7f4007d1393d08c6605902c",
+ "chksum_sha256": "e3fb9baf2fda4da08a609e07a9c56bca74569c01a0081e3630e9635295b31f0a",
"format": 1
},
{
- "name": "plugins/modules/ome_user_info.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "6609af8ddab5e1073d5041d40038cd70f8eed65f5d1d3285f77523b6a34126e9",
+ "name": "roles/idrac_import_server_config_profile/molecule/https_share_with_proxy_parameters",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/redfish_event_subscription.py",
+ "name": "roles/idrac_import_server_config_profile/molecule/https_share_with_proxy_parameters/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "675a96365b20bf6bf8808916b467f6810e418271a4c914565c79f41155af8f49",
+ "chksum_sha256": "0d55af04f5706218c384613a4393a6e01ab1ccea2438a1b1cc6ea2b403272225",
"format": 1
},
{
- "name": "plugins/modules/redfish_firmware.py",
+ "name": "roles/idrac_import_server_config_profile/molecule/https_share_with_proxy_parameters/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "380f9d2772004466c6e4c8fc33481b52f0bb05a3dfc3635660f9d8e5b447182c",
+ "chksum_sha256": "2f555e872a266977d5f0acb356ed20321dcf984564e9471d2fdc29b841d5a120",
"format": 1
},
{
- "name": "plugins/modules/redfish_firmware_rollback.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "644c7c2972589c5aa58cd7a909119acc7b3d8a4650f8c9f726d6fedea3a9f1ef",
+ "name": "roles/idrac_import_server_config_profile/molecule/http_share_with_proxy_parameters",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/redfish_powerstate.py",
+ "name": "roles/idrac_import_server_config_profile/molecule/http_share_with_proxy_parameters/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c511e178a205942312cbb1523270a27bf4bb8506b999ad29e8c86a0d9e44b976",
+ "chksum_sha256": "0d55af04f5706218c384613a4393a6e01ab1ccea2438a1b1cc6ea2b403272225",
"format": 1
},
{
- "name": "plugins/modules/idrac_diagnostics.py",
+ "name": "roles/idrac_import_server_config_profile/molecule/http_share_with_proxy_parameters/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cc28820b11d8e6622f429ef77484778b0b9fa9a5d6d4fe559a58fba978fe724f",
+ "chksum_sha256": "4a9673edc4a062a85a757483b1587a1ebea5ec8545b6ec20cdf861afab9b38e5",
"format": 1
},
{
- "name": "plugins/modules/dellemc_idrac_storage_volume.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "4e9530ae54ec5e0e70142ae0c6585cd78af2ce511f24dd829affb0ba719424b3",
+ "name": "roles/idrac_import_server_config_profile/molecule/default",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/idrac_server_config_profile.py",
+ "name": "roles/idrac_import_server_config_profile/molecule/default/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2158f3770b7aea8dfffc46bcea4a960857805a25df674e584c0c3c863dd1c04b",
+ "chksum_sha256": "f3f5fdbd0243581c13e09c4a9347c3197712b89de7ccf5c19bf040002a8e0967",
"format": 1
},
{
- "name": "plugins/modules/idrac_storage_volume.py",
+ "name": "roles/idrac_import_server_config_profile/molecule/default/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b94ebb86f37b60cfcc1d06f924079bdb5502583c63b10dfc9b8eb6683fd02551",
+ "chksum_sha256": "2f2a440f3da11b5d518f74f7fe44fb76709607382cb11bd47fd424767eb8e7da",
"format": 1
},
{
- "name": "plugins/modules/ome_application_console_preferences.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "1eb0dd33e5b833a1688dee170db6e84abaebaea1b38f73908013fd2ca74817a8",
+ "name": "roles/idrac_import_server_config_profile/molecule/http_share",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/README.md",
+ "name": "roles/idrac_import_server_config_profile/molecule/http_share/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bf8697057933ae95e9172b8fb6da9907557f5f086ed7e91da850a2fb573fcf9d",
+ "chksum_sha256": "0d55af04f5706218c384613a4393a6e01ab1ccea2438a1b1cc6ea2b403272225",
"format": 1
},
{
- "name": "requirements.yml",
+ "name": "roles/idrac_import_server_config_profile/molecule/http_share/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "32d916481c121c551a11659f3e0ba3b2e38c580ef5dea95cd1047560f3df514b",
+ "chksum_sha256": "05ee90c1a034c6a7c4f02120c05554526172311af76c5a61767cab3705b114c0",
"format": 1
},
{
- "name": "roles",
+ "name": "roles/idrac_import_server_config_profile/molecule/nfs_share",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/README.md",
+ "name": "roles/idrac_import_server_config_profile/molecule/nfs_share/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8091502909248e459a4b18ea05ca7bf36c1022419c31b23e4765684315129c22",
- "format": 1
- },
- {
- "name": "roles/idrac_attributes",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "0d55af04f5706218c384613a4393a6e01ab1ccea2438a1b1cc6ea2b403272225",
"format": 1
},
{
- "name": "roles/idrac_attributes/README.md",
+ "name": "roles/idrac_import_server_config_profile/molecule/nfs_share/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "245dd1c52d38db33b91d948c3a4403bb0739cf172ffd4e63dbe444477c395902",
+ "chksum_sha256": "c825c7c600c1dccac10930371fb8da29b38df7dfee460ab885266bf6e3006bd1",
"format": 1
},
{
- "name": "roles/idrac_attributes/defaults",
+ "name": "roles/idrac_import_server_config_profile/molecule/resources",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_attributes/defaults/main.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "ccd23c69bb78d5c4da4314139c4f34ec24adb35d0bdfa16c096e2b5c20f9a8c5",
- "format": 1
- },
- {
- "name": "roles/idrac_attributes/handlers",
+ "name": "roles/idrac_import_server_config_profile/molecule/resources/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_attributes/handlers/main.yml",
+ "name": "roles/idrac_import_server_config_profile/molecule/resources/tests/cleanup.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4efd533c8f0a3c63e2ad84713dbb875c27953ade61183e3b98526db8816bbf52",
+ "chksum_sha256": "3985d5b2ca2f11661982e372f5c9e677144651dd2f7f167efd7e0e4b3d2c9231",
"format": 1
},
{
- "name": "roles/idrac_attributes/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "roles/idrac_import_server_config_profile/molecule/resources/tests/prepare.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "b48f7002029c826a3402b360aaabe1a9301d34e249a49718aef9454d6dc9a557",
"format": 1
},
{
- "name": "roles/idrac_attributes/meta/argument_specs.yml",
+ "name": "roles/idrac_import_server_config_profile/molecule/resources/tests/raid_helper.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f061ffd4298bd768dcb142d03cbdc74c96c5b3e6db84ca86c663f04c4ed083bc",
+ "chksum_sha256": "6706025009360630843898df27085681a613507370fb0bb91a2c40fd03a2e8c7",
"format": 1
},
{
- "name": "roles/idrac_attributes/meta/main.yml",
+ "name": "roles/idrac_import_server_config_profile/molecule/resources/tests/export.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0a7e4d3e398ea14415197ac551e718bfc71cce0229df4f4f61a35981096dc47e",
+ "chksum_sha256": "8a4f3a78e9d259c668ae748e224e36007b91372281024e1f514ad6aaaae72606",
"format": 1
},
{
- "name": "roles/idrac_attributes/molecule",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "roles/idrac_import_server_config_profile/molecule/resources/tests/nic_helper.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "f247488b64a9ccaffe3f0240f0cf3a6e527b3ac952a786bcc715c436b397a00d",
"format": 1
},
{
- "name": "roles/idrac_attributes/molecule/default",
+ "name": "roles/idrac_import_server_config_profile/molecule/import_buffer_xml",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_attributes/molecule/default/converge.yml",
+ "name": "roles/idrac_import_server_config_profile/molecule/import_buffer_xml/prepare.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c58427eeb90ebf651a050a72f1a592dcb67808c53b5460041c67a8b9db98d3e0",
+ "chksum_sha256": "49b649577157352b249d241cab5c9f05d2f14e72c6b886ef809b1ec006a6eb0b",
"format": 1
},
{
- "name": "roles/idrac_attributes/molecule/default/molecule.yml",
+ "name": "roles/idrac_import_server_config_profile/molecule/import_buffer_xml/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1c3ff5ada33af88f7d89035e74a24e9f7ebd0bd1ce9aea711a11e456babcedeb",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "roles/idrac_attributes/molecule/idrac_attr",
+ "name": "roles/idrac_import_server_config_profile/molecule/import_buffer_xml/converge.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "57802a2bfa00428e20c2baf45add16b9cb643b46bb5d0c325190699432473e86",
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_import_server_config_profile/molecule/import_buffer_json",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_attributes/molecule/idrac_attr/cleanup.yml",
+ "name": "roles/idrac_import_server_config_profile/molecule/import_buffer_json/prepare.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2eaa72c7f6eb429b320d8c6340ed73e43509fb9b6396c73001e924be3f4b7183",
+ "chksum_sha256": "49b649577157352b249d241cab5c9f05d2f14e72c6b886ef809b1ec006a6eb0b",
"format": 1
},
{
- "name": "roles/idrac_attributes/molecule/idrac_attr/converge.yml",
+ "name": "roles/idrac_import_server_config_profile/molecule/import_buffer_json/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a7bdba9a94a875c15ec6f83690f3d06fe6c078a228c5c2fecd3c220b344f2cc1",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "roles/idrac_attributes/molecule/idrac_attr/molecule.yml",
+ "name": "roles/idrac_import_server_config_profile/molecule/import_buffer_json/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
+ "chksum_sha256": "7e73dc08b0b670f9047ad969ec153670bae08fb4baf7db258216251c4160c083",
"format": 1
},
{
- "name": "roles/idrac_attributes/molecule/lifecycle_controller_attr",
+ "name": "roles/idrac_import_server_config_profile/molecule/https_share",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_attributes/molecule/lifecycle_controller_attr/cleanup.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "2eaa72c7f6eb429b320d8c6340ed73e43509fb9b6396c73001e924be3f4b7183",
- "format": 1
- },
- {
- "name": "roles/idrac_attributes/molecule/lifecycle_controller_attr/converge.yml",
+ "name": "roles/idrac_import_server_config_profile/molecule/https_share/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cf6b840896de4b786ee1a8c174e9c6330883bb9a29f5fad3116c88652328c937",
+ "chksum_sha256": "0d55af04f5706218c384613a4393a6e01ab1ccea2438a1b1cc6ea2b403272225",
"format": 1
},
{
- "name": "roles/idrac_attributes/molecule/lifecycle_controller_attr/molecule.yml",
+ "name": "roles/idrac_import_server_config_profile/molecule/https_share/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
+ "chksum_sha256": "70c1949f3736465e786717cb6a2311c4dfcc92861212161f8957ca6c932e5d6c",
"format": 1
},
{
- "name": "roles/idrac_attributes/molecule/system_attr",
+ "name": "roles/idrac_import_server_config_profile/molecule/cifs_share",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_attributes/molecule/system_attr/cleanup.yml",
+ "name": "roles/idrac_import_server_config_profile/molecule/cifs_share/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2eaa72c7f6eb429b320d8c6340ed73e43509fb9b6396c73001e924be3f4b7183",
+ "chksum_sha256": "0d55af04f5706218c384613a4393a6e01ab1ccea2438a1b1cc6ea2b403272225",
"format": 1
},
{
- "name": "roles/idrac_attributes/molecule/system_attr/converge.yml",
+ "name": "roles/idrac_import_server_config_profile/molecule/cifs_share/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3eef0d294a285ae534ef217c2b07156e28ce8017c57ac02dd3b7d8c71db471bf",
+ "chksum_sha256": "2deadcf076f47e066b749446ece0948b1fc0be171ab883f67a32c64de9a9a7bd",
"format": 1
},
{
- "name": "roles/idrac_attributes/molecule/system_attr/molecule.yml",
+ "name": "roles/idrac_import_server_config_profile/README.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
+ "chksum_sha256": "67521b0bf1ceb30e3ca348cd5f07d4c54aee0c56d7b3935c542fd920c11df288",
"format": 1
},
{
- "name": "roles/idrac_attributes/tasks",
+ "name": "roles/idrac_import_server_config_profile/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_attributes/tasks/main.yml",
+ "name": "roles/idrac_import_server_config_profile/defaults/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0c2023b600b957bbafd3a31e5ec79d21662dffa1fc218927b65078b2b3de54a3",
+ "chksum_sha256": "d569d3374f9ba9960023d769bdc7245afb9f635656bfdb48ac1ad61d0279a53d",
"format": 1
},
{
- "name": "roles/idrac_attributes/tests",
+ "name": "roles/idrac_import_server_config_profile/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_attributes/tests/inventory",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "e02233819b1a09844410549191813f7cc7ba360f21298578f4ba1727a27d87fc",
- "format": 1
- },
- {
- "name": "roles/idrac_attributes/tests/test.yml",
+ "name": "roles/idrac_import_server_config_profile/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "50d768c3b61c1deb820db1bda56cf4a916b4232f9ed448da92035eca8e654b93",
+ "chksum_sha256": "8a329bb5b5a0c7ce2855fddb6eb3b16ab43524ad86e7f12defe55577933fb636",
"format": 1
},
{
- "name": "roles/idrac_attributes/vars",
+ "name": "roles/idrac_import_server_config_profile/handlers",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_attributes/vars/main.yml",
+ "name": "roles/idrac_import_server_config_profile/handlers/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e55a77ecab73769ee48b7cee96d383945f3b74732b244fae6e9b02e0aad3713f",
+ "chksum_sha256": "c31a046bb009c7b68dd25c11a096f2b08aad4e49903648580a05dfbec4da9d2f",
"format": 1
},
{
- "name": "roles/idrac_bios",
+ "name": "roles/idrac_import_server_config_profile/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_bios/README.md",
+ "name": "roles/idrac_import_server_config_profile/tests/inventory",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6c25e672dc7aff51b545c0b96e4660ec49c095f46b7de93843c22e37a2da95e",
- "format": 1
- },
- {
- "name": "roles/idrac_bios/defaults",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "e02233819b1a09844410549191813f7cc7ba360f21298578f4ba1727a27d87fc",
"format": 1
},
{
- "name": "roles/idrac_bios/defaults/main.yml",
+ "name": "roles/idrac_import_server_config_profile/tests/test.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "568c7c8e3c076cbb645325bfe8fbac4651420942e7b58902f3001b4472f12cb2",
+ "chksum_sha256": "4acfc541204ebf3d6c37f48c79e48f8f273d016aa7a2f6af7fef715905245eba",
"format": 1
},
{
- "name": "roles/idrac_bios/handlers",
+ "name": "roles/idrac_bios",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_bios/handlers/main.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "aa8e7c5541c5df8b0c7580807b182ff8d7e5d5cca42e3bbbf3c1aff1ab9cd6b7",
- "format": 1
- },
- {
"name": "roles/idrac_bios/meta",
"ftype": "dir",
"chksum_type": null,
@@ -3375,87 +3291,108 @@
"format": 1
},
{
- "name": "roles/idrac_bios/molecule",
+ "name": "roles/idrac_bios/vars",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_bios/molecule/__get_data.yml",
+ "name": "roles/idrac_bios/vars/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "28a6d3f13871bd19937f4b4d446f9262917979e0a6ccf754b2acd47ed40b3e19",
+ "chksum_sha256": "0f5bc4cf4b6dd8b6d33de490d471c654413d7a5dbe292d025a58e779e14020cf",
"format": 1
},
{
- "name": "roles/idrac_bios/molecule/clear_pending_attributes",
+ "name": "roles/idrac_bios/molecule",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_bios/molecule/clear_pending_attributes/converge.yml",
+ "name": "roles/idrac_bios/molecule/update_attributes_with_maintenance_window",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_bios/molecule/update_attributes_with_maintenance_window/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "833e9d4a796e3d6e80f7ef1091590baf9263cfe66623e468326bb99c12a9c7f9",
+ "chksum_sha256": "dae48a016bbbe31b14341b09c78f16078af041ad7d83f232072bbf07f960fab9",
"format": 1
},
{
- "name": "roles/idrac_bios/molecule/clear_pending_attributes/molecule.yml",
+ "name": "roles/idrac_bios/molecule/update_attributes_with_maintenance_window/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
+ "chksum_sha256": "8884874fc4b2a0445cf1e5cea84a17ece3dbf7e3455dc17691d8f52c91e418d1",
"format": 1
},
{
- "name": "roles/idrac_bios/molecule/clear_pending_attributes/prepare.yml",
+ "name": "roles/idrac_bios/molecule/update_attributes_immediate_with_jobwait",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_bios/molecule/update_attributes_immediate_with_jobwait/molecule.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "0916dfb0e05312c73d1f5e8a7cfc822cfbc0a16e9ce0a837df4110347690994a",
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_bios/molecule/update_attributes_immediate_with_jobwait/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "789337cedb7a0e5f33484b00e7a4c9cbe40cb133841e1656f18cff08f042d7af",
+ "chksum_sha256": "416f65beecc2e8e53ad7abf57e03a7f089401dbbd0f4eb16789de510950706c9",
"format": 1
},
{
- "name": "roles/idrac_bios/molecule/default",
+ "name": "roles/idrac_bios/molecule/update_attributes_on_reset_with_maintenance_window",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_bios/molecule/default/converge.yml",
+ "name": "roles/idrac_bios/molecule/update_attributes_on_reset_with_maintenance_window/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4780273b956a4ca22a04c2c52fe1342f40dec5d42ad58389353f061e0f2cc0d9",
+ "chksum_sha256": "dae48a016bbbe31b14341b09c78f16078af041ad7d83f232072bbf07f960fab9",
"format": 1
},
{
- "name": "roles/idrac_bios/molecule/default/molecule.yml",
+ "name": "roles/idrac_bios/molecule/update_attributes_on_reset_with_maintenance_window/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1c3ff5ada33af88f7d89035e74a24e9f7ebd0bd1ce9aea711a11e456babcedeb",
+ "chksum_sha256": "3f53e8d4b80c2dac745f033695febdb4f0c2dd222b7b8ec447164ffb3b44f210",
"format": 1
},
{
- "name": "roles/idrac_bios/molecule/negative_scenarios_with_maintenance_window",
+ "name": "roles/idrac_bios/molecule/default",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_bios/molecule/negative_scenarios_with_maintenance_window/molecule.yml",
+ "name": "roles/idrac_bios/molecule/default/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "1c3ff5ada33af88f7d89035e74a24e9f7ebd0bd1ce9aea711a11e456babcedeb",
"format": 1
},
{
- "name": "roles/idrac_bios/molecule/negative_scenarios_with_maintenance_window/converge.yml",
+ "name": "roles/idrac_bios/molecule/default/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b9d44e3bf2e9c3dd4a24b59e4b33228d23fca8428f4060d6ace4a7e884fe469e",
+ "chksum_sha256": "4780273b956a4ca22a04c2c52fe1342f40dec5d42ad58389353f061e0f2cc0d9",
"format": 1
},
{
@@ -3466,6 +3403,13 @@
"format": 1
},
{
+ "name": "roles/idrac_bios/molecule/reset_bios/molecule.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "de8583bbc3924432cfe625f9899beb6ad7848058e61d1ecabd745ec810ee5498",
+ "format": 1
+ },
+ {
"name": "roles/idrac_bios/molecule/reset_bios/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
@@ -3473,10 +3417,10 @@
"format": 1
},
{
- "name": "roles/idrac_bios/molecule/reset_bios/molecule.yml",
+ "name": "roles/idrac_bios/molecule/__get_data.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "de8583bbc3924432cfe625f9899beb6ad7848058e61d1ecabd745ec810ee5498",
+ "chksum_sha256": "28a6d3f13871bd19937f4b4d446f9262917979e0a6ccf754b2acd47ed40b3e19",
"format": 1
},
{
@@ -3487,157 +3431,178 @@
"format": 1
},
{
- "name": "roles/idrac_bios/molecule/reset_bios_with_reset_type_as_force_restart/converge.yml",
+ "name": "roles/idrac_bios/molecule/reset_bios_with_reset_type_as_force_restart/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "39dea23cd0f22d83a336b801858f334e655c683729059bab6526419e3c023f66",
+ "chksum_sha256": "de8583bbc3924432cfe625f9899beb6ad7848058e61d1ecabd745ec810ee5498",
"format": 1
},
{
- "name": "roles/idrac_bios/molecule/reset_bios_with_reset_type_as_force_restart/molecule.yml",
+ "name": "roles/idrac_bios/molecule/reset_bios_with_reset_type_as_force_restart/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "de8583bbc3924432cfe625f9899beb6ad7848058e61d1ecabd745ec810ee5498",
+ "chksum_sha256": "39dea23cd0f22d83a336b801858f334e655c683729059bab6526419e3c023f66",
"format": 1
},
{
- "name": "roles/idrac_bios/molecule/resources",
+ "name": "roles/idrac_bios/molecule/update_attributes_on_reset",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_bios/molecule/resources/cleanup.yml",
+ "name": "roles/idrac_bios/molecule/update_attributes_on_reset/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9fa5332de56ac39992c0dfabb7ca55ba96d75b8856879b1d7feaa53b6c06c63c",
+ "chksum_sha256": "dae48a016bbbe31b14341b09c78f16078af041ad7d83f232072bbf07f960fab9",
"format": 1
},
{
- "name": "roles/idrac_bios/molecule/resources/prepare.yml",
+ "name": "roles/idrac_bios/molecule/update_attributes_on_reset/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f1c3c3706f74819eef75c5a6f7fc7a3c8adb78b0ec008d4ef41e382b779b19ef",
+ "chksum_sha256": "223982198fa78ae74e7e1cbbaa1fad058036fc89c58918ccf42009d51ca54d56",
"format": 1
},
{
- "name": "roles/idrac_bios/molecule/update_attributes_immediate",
+ "name": "roles/idrac_bios/molecule/resources",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_bios/molecule/update_attributes_immediate/converge.yml",
+ "name": "roles/idrac_bios/molecule/resources/cleanup.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c3a6e4124abf9d96581688f23ba05ed99f6e6d49a67dc43e0e2f3477e12f39f4",
+ "chksum_sha256": "9fa5332de56ac39992c0dfabb7ca55ba96d75b8856879b1d7feaa53b6c06c63c",
"format": 1
},
{
- "name": "roles/idrac_bios/molecule/update_attributes_immediate/molecule.yml",
+ "name": "roles/idrac_bios/molecule/resources/prepare.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0916dfb0e05312c73d1f5e8a7cfc822cfbc0a16e9ce0a837df4110347690994a",
+ "chksum_sha256": "f1c3c3706f74819eef75c5a6f7fc7a3c8adb78b0ec008d4ef41e382b779b19ef",
"format": 1
},
{
- "name": "roles/idrac_bios/molecule/update_attributes_immediate_with_jobwait",
+ "name": "roles/idrac_bios/molecule/clear_pending_attributes",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_bios/molecule/update_attributes_immediate_with_jobwait/converge.yml",
+ "name": "roles/idrac_bios/molecule/clear_pending_attributes/prepare.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "416f65beecc2e8e53ad7abf57e03a7f089401dbbd0f4eb16789de510950706c9",
+ "chksum_sha256": "f55f93da3bad79fbdc7b2d5fb5b53ad33442e439ff5f3d6b3d742a610fe0d903",
"format": 1
},
{
- "name": "roles/idrac_bios/molecule/update_attributes_immediate_with_jobwait/molecule.yml",
+ "name": "roles/idrac_bios/molecule/clear_pending_attributes/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0916dfb0e05312c73d1f5e8a7cfc822cfbc0a16e9ce0a837df4110347690994a",
+ "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
"format": 1
},
{
- "name": "roles/idrac_bios/molecule/update_attributes_on_reset",
+ "name": "roles/idrac_bios/molecule/clear_pending_attributes/converge.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "833e9d4a796e3d6e80f7ef1091590baf9263cfe66623e468326bb99c12a9c7f9",
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_bios/molecule/update_attributes_immediate",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_bios/molecule/update_attributes_on_reset/converge.yml",
+ "name": "roles/idrac_bios/molecule/update_attributes_immediate/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "223982198fa78ae74e7e1cbbaa1fad058036fc89c58918ccf42009d51ca54d56",
+ "chksum_sha256": "0916dfb0e05312c73d1f5e8a7cfc822cfbc0a16e9ce0a837df4110347690994a",
"format": 1
},
{
- "name": "roles/idrac_bios/molecule/update_attributes_on_reset/molecule.yml",
+ "name": "roles/idrac_bios/molecule/update_attributes_immediate/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "dae48a016bbbe31b14341b09c78f16078af041ad7d83f232072bbf07f960fab9",
+ "chksum_sha256": "c3a6e4124abf9d96581688f23ba05ed99f6e6d49a67dc43e0e2f3477e12f39f4",
"format": 1
},
{
- "name": "roles/idrac_bios/molecule/update_attributes_on_reset_with_maintenance_window",
+ "name": "roles/idrac_bios/molecule/negative_scenarios_with_maintenance_window",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_bios/molecule/update_attributes_on_reset_with_maintenance_window/converge.yml",
+ "name": "roles/idrac_bios/molecule/negative_scenarios_with_maintenance_window/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3f53e8d4b80c2dac745f033695febdb4f0c2dd222b7b8ec447164ffb3b44f210",
+ "chksum_sha256": "1c3ff5ada33af88f7d89035e74a24e9f7ebd0bd1ce9aea711a11e456babcedeb",
"format": 1
},
{
- "name": "roles/idrac_bios/molecule/update_attributes_on_reset_with_maintenance_window/molecule.yml",
+ "name": "roles/idrac_bios/molecule/negative_scenarios_with_maintenance_window/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "dae48a016bbbe31b14341b09c78f16078af041ad7d83f232072bbf07f960fab9",
+ "chksum_sha256": "b9d44e3bf2e9c3dd4a24b59e4b33228d23fca8428f4060d6ace4a7e884fe469e",
"format": 1
},
{
- "name": "roles/idrac_bios/molecule/update_attributes_with_maintenance_window",
+ "name": "roles/idrac_bios/README.md",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "f6c25e672dc7aff51b545c0b96e4660ec49c095f46b7de93843c22e37a2da95e",
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_bios/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_bios/molecule/update_attributes_with_maintenance_window/converge.yml",
+ "name": "roles/idrac_bios/defaults/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8884874fc4b2a0445cf1e5cea84a17ece3dbf7e3455dc17691d8f52c91e418d1",
+ "chksum_sha256": "568c7c8e3c076cbb645325bfe8fbac4651420942e7b58902f3001b4472f12cb2",
"format": 1
},
{
- "name": "roles/idrac_bios/molecule/update_attributes_with_maintenance_window/molecule.yml",
+ "name": "roles/idrac_bios/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_bios/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "dae48a016bbbe31b14341b09c78f16078af041ad7d83f232072bbf07f960fab9",
+ "chksum_sha256": "b45790ebf23ed966d7e99a7ba15314ed882cd366284db9f773386e96687f71e3",
"format": 1
},
{
- "name": "roles/idrac_bios/tasks",
+ "name": "roles/idrac_bios/handlers",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_bios/tasks/main.yml",
+ "name": "roles/idrac_bios/handlers/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b45790ebf23ed966d7e99a7ba15314ed882cd366284db9f773386e96687f71e3",
+ "chksum_sha256": "aa8e7c5541c5df8b0c7580807b182ff8d7e5d5cca42e3bbbf3c1aff1ab9cd6b7",
"format": 1
},
{
@@ -3662,4693 +3627,4742 @@
"format": 1
},
{
- "name": "roles/idrac_bios/vars",
+ "name": "roles/idrac_reset",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_bios/vars/main.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "0f5bc4cf4b6dd8b6d33de490d471c654413d7a5dbe292d025a58e779e14020cf",
- "format": 1
- },
- {
- "name": "roles/idrac_boot",
+ "name": "roles/idrac_reset/templates",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_boot/README.md",
+ "name": "roles/idrac_reset/templates/idrac_lifecycle_controller_status.j2",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "92a0c9cdb08215dd0131c2f5e91d125e2c58a1e9771093c40bf71097f035559a",
+ "chksum_sha256": "cacc1f259dc7dbef66e2f715f138d3a75ffd5852ad56a2fa4e0f17358f30b4ab",
"format": 1
},
{
- "name": "roles/idrac_boot/defaults",
+ "name": "roles/idrac_reset/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_boot/defaults/main.yml",
+ "name": "roles/idrac_reset/meta/argument_specs.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fbc348c1aaf27aaa035a1b34a9e496b64dc515388e9a543689c28dc50a597bb2",
- "format": 1
- },
- {
- "name": "roles/idrac_boot/handlers",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "6af174177901d928f439e83c69caad0fccc601a710ac5b434af7bf5240f70418",
"format": 1
},
{
- "name": "roles/idrac_boot/handlers/main.yml",
+ "name": "roles/idrac_reset/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "867b8c424c2b712712af162203358ba5061e926389d7ad4d8a3ee40a0d5b3920",
+ "chksum_sha256": "0195a66c4e4ac7e78a39c4a8fa223ee0b2d049a09c398eab91080868422c8049",
"format": 1
},
{
- "name": "roles/idrac_boot/meta",
+ "name": "roles/idrac_reset/vars",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_boot/meta/argument_specs.yml",
+ "name": "roles/idrac_reset/vars/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "86cced715db95d2ee43d4cf20d223992c4f5aaf08f8c38b60267fde2f11572e5",
+ "chksum_sha256": "9e4a4575c14328832a6976cc4aed6b19c07dbfe5997a10ae733b99e50702fa0e",
"format": 1
},
{
- "name": "roles/idrac_boot/meta/main.yml",
+ "name": "roles/idrac_reset/README.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e247f0476f74edd90842e7e2507e0efc3f21427f80e411e0d740c74292c55c6f",
+ "chksum_sha256": "00e77ec0ddaa418973cd9cad99137bcef33738171102464f50d3f49f5cf2b3f4",
"format": 1
},
{
- "name": "roles/idrac_boot/molecule",
+ "name": "roles/idrac_reset/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_boot/molecule/boot_options_using_boot_option_reference_enabled_true",
+ "name": "roles/idrac_reset/defaults/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "9cd98c8b578501e6b24cec9c004bc5963e0f479ca4435aabc58ddfa7e63033a0",
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_reset/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_boot/molecule/boot_options_using_boot_option_reference_enabled_true/molecule.yml",
+ "name": "roles/idrac_reset/tasks/lcstatus_check.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
+ "chksum_sha256": "377ab984113ad5d61f3164c7578677c42d727d35eb35c79f61275b794bdb970e",
"format": 1
},
{
- "name": "roles/idrac_boot/molecule/boot_options_using_boot_option_reference_enabled_true/converge.yml",
+ "name": "roles/idrac_reset/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ab30d143cf010145f94f14c05cab91120f717be0cffadc9f348daffa7ac0e8ff",
+ "chksum_sha256": "17feb74dd2b4bbec2b65ad4044f5af9e8f9660c66a9594cce342a79e178c1225",
"format": 1
},
{
- "name": "roles/idrac_boot/molecule/boot_options_using_display_name_enabled_false",
+ "name": "roles/idrac_reset/handlers",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_boot/molecule/boot_options_using_display_name_enabled_false/molecule.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
- "format": 1
- },
- {
- "name": "roles/idrac_boot/molecule/boot_options_using_display_name_enabled_false/converge.yml",
+ "name": "roles/idrac_reset/handlers/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "105480778b833d51453b2b22e7ac419eb3865b523bd5f979789e66feaa46c4db",
+ "chksum_sha256": "0ec20499ab70254c4498ed1783b01bff5fdde39aca5f81f9c7e3f979ba8b384a",
"format": 1
},
{
- "name": "roles/idrac_boot/molecule/boot_order_using_legacy_mode_force_restart",
+ "name": "roles/idrac_reset/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_boot/molecule/boot_order_using_legacy_mode_force_restart/molecule.yml",
+ "name": "roles/idrac_reset/tests/inventory",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
+ "chksum_sha256": "e02233819b1a09844410549191813f7cc7ba360f21298578f4ba1727a27d87fc",
"format": 1
},
{
- "name": "roles/idrac_boot/molecule/boot_order_using_legacy_mode_force_restart/converge.yml",
+ "name": "roles/idrac_reset/tests/test.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "808af97a5a234940ab3c38a93c54f4a164b7cb52ee47107137cc4555b53e9a1d",
+ "chksum_sha256": "43fa08ab4acc6414c37f0fd53406310d4aa81f970d122feeb5604b40251317d5",
"format": 1
},
{
- "name": "roles/idrac_boot/molecule/boot_order_using_uefi_mode_graceful_restart",
+ "name": "roles/idrac_os_deployment",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_boot/molecule/boot_order_using_uefi_mode_graceful_restart/molecule.yml",
+ "name": "roles/idrac_os_deployment/templates",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_os_deployment/templates/RHEL_8.j2",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
+ "chksum_sha256": "c1b54a6b4b5d2ccfeca7376a758389d0de0d5cb2c489189f2690616909b0e6ab",
"format": 1
},
{
- "name": "roles/idrac_boot/molecule/boot_order_using_uefi_mode_graceful_restart/converge.yml",
+ "name": "roles/idrac_os_deployment/templates/RHEL_9.j2",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9fb0c40a0d88c86aa9056a949b626eadceadaf9379731fd83045b2cb1c12aa14",
+ "chksum_sha256": "54d4812cce2199771afaaeedf6fc8d813152f824097195dfb76284bbdebfadb4",
"format": 1
},
{
- "name": "roles/idrac_boot/molecule/boot_source_override_enabled_as_continuous_reset_type_none",
+ "name": "roles/idrac_os_deployment/templates/ESXI_8.j2",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "998e80a725d3e21a3b68e6368decadd5c66196afdc74dc9947ff2282dcec144f",
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_os_deployment/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_boot/molecule/boot_source_override_enabled_as_continuous_reset_type_none/converge.yml",
+ "name": "roles/idrac_os_deployment/meta/argument_specs.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "038690fb11c33de166dc94bf35d151639978151731963af7ec44234ced12eb06",
+ "chksum_sha256": "9e69ff8ddd86e522acf91f9ca4efd9099947ecb857dd1138e8c7aef4793d816a",
"format": 1
},
{
- "name": "roles/idrac_boot/molecule/boot_source_override_enabled_as_continuous_reset_type_none/molecule.yml",
+ "name": "roles/idrac_os_deployment/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "de8583bbc3924432cfe625f9899beb6ad7848058e61d1ecabd745ec810ee5498",
+ "chksum_sha256": "db297ffe635bcee661eaee0844312d69cc364d67cc427bab545865674401c9b9",
"format": 1
},
{
- "name": "roles/idrac_boot/molecule/boot_source_override_enabled_as_disabled_reset_type_none",
+ "name": "roles/idrac_os_deployment/vars",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_boot/molecule/boot_source_override_enabled_as_disabled_reset_type_none/converge.yml",
+ "name": "roles/idrac_os_deployment/vars/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a1eb678af41caaa30a1a302cbf75849714d9e0e804ae64c8145ff3a8f6518660",
+ "chksum_sha256": "a6d9f6ecca54f00e015debd5807eecd29aee9c3118d6a710768074c6eb625b9b",
"format": 1
},
{
- "name": "roles/idrac_boot/molecule/boot_source_override_enabled_as_disabled_reset_type_none/molecule.yml",
+ "name": "roles/idrac_os_deployment/README.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
+ "chksum_sha256": "e800016c9724604f5c12210c94b830bbc27bd4996205fcd74a31768f754b0a1f",
"format": 1
},
{
- "name": "roles/idrac_boot/molecule/boot_source_override_enabled_as_once_reset_type_none",
+ "name": "roles/idrac_os_deployment/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_boot/molecule/boot_source_override_enabled_as_once_reset_type_none/molecule.yml",
+ "name": "roles/idrac_os_deployment/defaults/main",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_os_deployment/defaults/main/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
+ "chksum_sha256": "759f139407f609b8655150fe9df73e69025bbaadc01b6c69e07729867539d583",
"format": 1
},
{
- "name": "roles/idrac_boot/molecule/boot_source_override_enabled_as_once_reset_type_none/converge.yml",
+ "name": "roles/idrac_os_deployment/defaults/main/rhel.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "272d65772e3c0602b288c8afc69a647482a57d9572e0d971aa9c9a35f5944b79",
+ "chksum_sha256": "e2a5308265f0edf9cdb641063bbf2c2e877a9693154b04cf3a4b9a784f3934c0",
"format": 1
},
{
- "name": "roles/idrac_boot/molecule/boot_source_override_mode_legacy_job_wait_false",
+ "name": "roles/idrac_os_deployment/defaults/main/esxi.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "5393035ed221c2ced358b9d585fd1881817a8b31b0dd030ba32e81ecd8a7f4df",
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_os_deployment/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_boot/molecule/boot_source_override_mode_legacy_job_wait_false/molecule.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
+ "name": "roles/idrac_os_deployment/tasks/tracking",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_boot/molecule/boot_source_override_mode_legacy_job_wait_false/converge.yml",
+ "name": "roles/idrac_os_deployment/tasks/tracking/track_for_os_deployment.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "78a4eaa877f5633e31f590a372723c137736dd11b9a113d36e198435755eb54c",
+ "chksum_sha256": "df751495e4839501919602cebe8c24677e11d191394353b13e5b6448af44459e",
"format": 1
},
{
- "name": "roles/idrac_boot/molecule/boot_source_override_mode_uefi_with_resource_id",
+ "name": "roles/idrac_os_deployment/tasks/clean_up",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_boot/molecule/boot_source_override_mode_uefi_with_resource_id/molecule.yml",
+ "name": "roles/idrac_os_deployment/tasks/clean_up/clean_up_working_directory.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
+ "chksum_sha256": "6cb3708d332825d39b859aecd1df938ef1b3bc63b92213c1195432e7ffa9d0aa",
"format": 1
},
{
- "name": "roles/idrac_boot/molecule/boot_source_override_mode_uefi_with_resource_id/converge.yml",
+ "name": "roles/idrac_os_deployment/tasks/clean_up/clean_up_destinations.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b2b06a3776dfd89429ebc65b6e626e1caa9c3bb2c3210c208add9cad25e7b169",
+ "chksum_sha256": "d9a4258e490a7d6f72372a85f128243594c709f26020b61761ff2064d6837d46",
"format": 1
},
{
- "name": "roles/idrac_boot/molecule/default",
+ "name": "roles/idrac_os_deployment/tasks/rhel",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_boot/molecule/default/converge.yml",
+ "name": "roles/idrac_os_deployment/tasks/rhel/compile_iso.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2ef1e42f64b6b1036298447aa4188460ae395dc4af25ab0d0b139d15fbe47df4",
+ "chksum_sha256": "8d4b130336f4349cd1c3bd96c1f52f1fcd81615bcead7e9fa39d46883ebf41f6",
"format": 1
},
{
- "name": "roles/idrac_boot/molecule/default/molecule.yml",
+ "name": "roles/idrac_os_deployment/tasks/idrac",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_os_deployment/tasks/idrac/attach_iso_to_virtual_media.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c7c031cbadc0f08f5dfacd846de58729e2900c340492871c191e4ac6b504ddc9",
+ "chksum_sha256": "2d7ed30e4c808fc88df7f59a669b8bb1b561bf002c332b3a8296c73e19d6c660",
"format": 1
},
{
- "name": "roles/idrac_boot/tasks",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "roles/idrac_os_deployment/tasks/idrac/set_boot_mode_and_restart.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "f0502bbe2d8e5411fda7e5e87820654f14f2641a500eeab7c8a5a5636892cce2",
"format": 1
},
{
- "name": "roles/idrac_boot/tasks/main.yml",
+ "name": "roles/idrac_os_deployment/tasks/idrac/clean_up_virtual_media_slot.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "15e5c2cab8d295c1d6957b2fef3bdbc2222f003b023a0aba83dadb606f11bd03",
+ "chksum_sha256": "f03831c4aa6b588e969bccdce38b75d49969eae2f5543b8ba9e94510cb18eb29",
"format": 1
},
{
- "name": "roles/idrac_boot/tests",
+ "name": "roles/idrac_os_deployment/tasks/iso",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_boot/tests/inventory",
+ "name": "roles/idrac_os_deployment/tasks/iso/extract_iso.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e02233819b1a09844410549191813f7cc7ba360f21298578f4ba1727a27d87fc",
+ "chksum_sha256": "09edb456108a3a29a94573ac4568446c5b401930b1f7bf8b051502fd2d064a7b",
"format": 1
},
{
- "name": "roles/idrac_boot/tests/test.yml",
+ "name": "roles/idrac_os_deployment/tasks/iso/generate_kickstart_file.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "365ea1cad2e4ffb62f1a086752e377bc9a3fa010685b7c15211be1dec90813c4",
+ "chksum_sha256": "aa2a8cd54823f99a1f61b0453048b228e440bdf1c4e274b0f0ae7635990d083e",
"format": 1
},
{
- "name": "roles/idrac_boot/vars",
+ "name": "roles/idrac_os_deployment/tasks/esxi",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_boot/vars/main.yml",
+ "name": "roles/idrac_os_deployment/tasks/esxi/compile_iso.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "50ca20875f667975a478f6fd4cf78866cdfbfc89891bbceede99a1150aa258ff",
+ "chksum_sha256": "ea92a3796b8d00ba6e0e65146d6089201a6bd4f6fdfb01467279414ac69fd024",
"format": 1
},
{
- "name": "roles/idrac_certificate",
+ "name": "roles/idrac_os_deployment/tasks/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "7414db3ca8e33e14b34126b64b5eddefc24d27ac2e3d3d0b9a33284acba29c16",
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_os_deployment/tasks/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_certificate/README.md",
+ "name": "roles/idrac_os_deployment/tasks/common/download_or_copy_source_files.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8c9a1ba53346f348b656ce43ae836d01e341d173aa793536b7b83d39a478664b",
+ "chksum_sha256": "ec7e75367105039f8833bb8bcacf522f137bd7456162c99987aa0a8ddb2761de",
"format": 1
},
{
- "name": "roles/idrac_certificate/defaults",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "roles/idrac_os_deployment/tasks/common/copy_iso_to_destination.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "163156845f185684a862eb1ba7f0023119f51798898d9bb79209a8f15ea80b20",
"format": 1
},
{
- "name": "roles/idrac_certificate/defaults/main.yml",
+ "name": "roles/idrac_os_deployment/tasks/common/create_working_directory_path.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6cf00eee21d159b121fb581e5c186c22803b7605acdd260c54e5d58969a94520",
+ "chksum_sha256": "e5d3c043120b0788f2a4a4e5f8dcd26adc600065c47c423f92eeca3907c21d4a",
"format": 1
},
{
- "name": "roles/idrac_certificate/handlers",
+ "name": "roles/idrac_os_deployment/tasks/common/validate_inputs.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "4e70fbc15dd06a835d5cbf840d4bd93f8c1f1d0d4214f6555e68cbbd4c2256e1",
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_os_deployment/handlers",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_certificate/handlers/main.yml",
+ "name": "roles/idrac_os_deployment/handlers/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "66d821f9940918fd71847d8db4b05eb937a6de3f457ad7b9b6412762d51d1f41",
+ "chksum_sha256": "82cb53c7b2545e1a7883e32efdebbc34bd69bbad9ccbeabd190c3028f7f05777",
"format": 1
},
{
- "name": "roles/idrac_certificate/meta",
+ "name": "roles/idrac_os_deployment/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_certificate/meta/argument_specs.yml",
+ "name": "roles/idrac_os_deployment/tests/inventory",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0bcfa9cc16612b7726b60e9091be9e85bff3ea73991cbde070e977c434eeb827",
+ "chksum_sha256": "0647d824fd28557a0b72bcdc079671c07571c1ee008efd92728e1291b7ca3eac",
"format": 1
},
{
- "name": "roles/idrac_certificate/meta/main.yml",
+ "name": "roles/idrac_os_deployment/tests/test.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b2a5d6be0d2bb806d5549884a7ac88cd841662629e2b15a9a7ab67177250f7bf",
+ "chksum_sha256": "f9d3fffd8a4834828c2dab606405d1fe11f12c5e4b14142c99221f689597c857",
"format": 1
},
{
- "name": "roles/idrac_certificate/molecule",
+ "name": "roles/README.md",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "8091502909248e459a4b18ea05ca7bf36c1022419c31b23e4765684315129c22",
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_firmware",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_certificate/molecule/CA",
+ "name": "roles/idrac_firmware/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_certificate/molecule/CA/molecule.yml",
+ "name": "roles/idrac_firmware/meta/argument_specs.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "7f0cf3892bdc532a0bd6dd0bc448f090139919f4fc9e1500bafe47d2fcda9aca",
"format": 1
},
{
- "name": "roles/idrac_certificate/molecule/CA/converge.yml",
+ "name": "roles/idrac_firmware/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5bf154879b4a34b326240ccb33a490a5f8bc7f228248db93c02baaaa0869d09e",
+ "chksum_sha256": "c8afa3f929d14d3f681ce7379d32b9ac1f64d820d03d79b272a083deb6676017",
"format": 1
},
{
- "name": "roles/idrac_certificate/molecule/CSC",
+ "name": "roles/idrac_firmware/vars",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_certificate/molecule/CSC/molecule.yml",
+ "name": "roles/idrac_firmware/vars/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "76a25139790ece31962351e91c95883e5233a87e63d93e6c044dbf5a0c0604ae",
"format": 1
},
{
- "name": "roles/idrac_certificate/molecule/CSC/converge.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "99431629f17cbe04456ef0f839377cb71a7ae2b7c541deba9a4b769bba4c06f4",
+ "name": "roles/idrac_firmware/molecule",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_certificate/molecule/CTC",
+ "name": "roles/idrac_firmware/molecule/default",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_certificate/molecule/CTC/molecule.yml",
+ "name": "roles/idrac_firmware/molecule/default/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
"format": 1
},
{
- "name": "roles/idrac_certificate/molecule/CTC/converge.yml",
+ "name": "roles/idrac_firmware/molecule/default/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ecd35a2c8d4a1223c5a37a4e80a2f9a950f633357f2eb2655f6f20ca30198c5c",
+ "chksum_sha256": "b8f562b0f8963d6fb1653344be6a0391d63b54ed70b7f23e3086a030fc14463b",
"format": 1
},
{
- "name": "roles/idrac_certificate/molecule/CustomCertificate",
+ "name": "roles/idrac_firmware/molecule/http_share",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_certificate/molecule/CustomCertificate/molecule.yml",
+ "name": "roles/idrac_firmware/molecule/http_share/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "roles/idrac_certificate/molecule/CustomCertificate/converge.yml",
+ "name": "roles/idrac_firmware/molecule/http_share/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "65da72677ef3b2e1c383087b86fda3d45434287ce1cf2ddb4968b0a2ff0bf7c7",
+ "chksum_sha256": "e78dce79455ba2ae773abf7ef878883286686923dbd95b7f157ac006200ca670",
"format": 1
},
{
- "name": "roles/idrac_certificate/molecule/HTTPS",
+ "name": "roles/idrac_firmware/molecule/nfs_share",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_certificate/molecule/HTTPS/molecule.yml",
+ "name": "roles/idrac_firmware/molecule/nfs_share/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "roles/idrac_certificate/molecule/HTTPS/converge.yml",
+ "name": "roles/idrac_firmware/molecule/nfs_share/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f06ff94029cdaceaf9865f0299fc6013b0fea5193ddbd07d078f543eb146d27f",
+ "chksum_sha256": "31631bf50bdb9a5b035dcaa97b4c1cba71139e886468ee04666191f8418a65ec",
"format": 1
},
{
- "name": "roles/idrac_certificate/molecule/SSLKEY",
+ "name": "roles/idrac_firmware/molecule/httpsproxy_share",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_certificate/molecule/SSLKEY/molecule.yml",
+ "name": "roles/idrac_firmware/molecule/httpsproxy_share/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "roles/idrac_certificate/molecule/SSLKEY/converge.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "0f8fc730b66884b45530be3fdbdbed659d79387466637b2fb129573fbc74cbee",
- "format": 1
- },
- {
- "name": "roles/idrac_certificate/molecule/__delete_directory.yml",
+ "name": "roles/idrac_firmware/molecule/httpsproxy_share/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5acaf39dad31445f79924aadfd59f40857cce066a2bd4ef3dc1c3fd9ed2de0c7",
+ "chksum_sha256": "d362f33905add12d47dd5b02d798d55eb29ada085cda85e8cd37ba04f97cde87",
"format": 1
},
{
- "name": "roles/idrac_certificate/molecule/__extract_firmware_version.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "d2c1e0fbf65e8b1c4ecd02182ada8dbbd320fd9828f0e50eea57450892e9c5c5",
+ "name": "roles/idrac_firmware/molecule/negative_scenarios",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_certificate/molecule/__get_helper.yml",
+ "name": "roles/idrac_firmware/molecule/negative_scenarios/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d510c89d7a712cd198e199f8b885057154f906a27dc18f90734362ad192b783a",
+ "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
"format": 1
},
{
- "name": "roles/idrac_certificate/molecule/__get_ssl_key.yml",
+ "name": "roles/idrac_firmware/molecule/negative_scenarios/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7367f06df38c4c2fe8b9a280dfda2ac001eae5e3c8d7c62daec8856e9dd8d55e",
+ "chksum_sha256": "c65a450270cb2725ea0c0d77439695d6fdf0721f6711d35265baab3f8aacbf9e",
"format": 1
},
{
- "name": "roles/idrac_certificate/molecule/default",
+ "name": "roles/idrac_firmware/molecule/ftp_share",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_certificate/molecule/default/molecule.yml",
+ "name": "roles/idrac_firmware/molecule/ftp_share/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "roles/idrac_certificate/molecule/default/converge.yml",
+ "name": "roles/idrac_firmware/molecule/ftp_share/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3b8b6811b2776890e59fdddbf667a5d2f8e25207cb478cf865c5c97140ac586a",
+ "chksum_sha256": "a031dc6f65a6c3a7b75af9a1cff7206ccbd32d061c2738fd180aee6d86368a4e",
"format": 1
},
{
- "name": "roles/idrac_certificate/molecule/generateCSR",
+ "name": "roles/idrac_firmware/molecule/https_share",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_certificate/molecule/generateCSR/molecule.yml",
+ "name": "roles/idrac_firmware/molecule/https_share/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "roles/idrac_certificate/molecule/generateCSR/converge.yml",
+ "name": "roles/idrac_firmware/molecule/https_share/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "57033a2a72d8b711a5c6c2e46131a5b1b1ce8068b43a81f0a51dd44d022bfe36",
+ "chksum_sha256": "23232705312d495854c47436c1481e826373b541e19ec964ed6ad07a9fdd9208",
"format": 1
},
{
- "name": "roles/idrac_certificate/molecule/reset",
+ "name": "roles/idrac_firmware/molecule/cifs_share",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_certificate/molecule/reset/molecule.yml",
+ "name": "roles/idrac_firmware/molecule/cifs_share/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "roles/idrac_certificate/molecule/reset/converge.yml",
+ "name": "roles/idrac_firmware/molecule/cifs_share/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5b646c45281197bec65800696292b5e6d62ba1aa36be2149c7de10b439ddba55",
+ "chksum_sha256": "e9cba7b8006136cf795ca98cca97e649fb9965988a7c5b4669c6fa77919693b9",
"format": 1
},
{
- "name": "roles/idrac_certificate/tasks",
+ "name": "roles/idrac_firmware/README.md",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "2aa85fe4ddb50b123a7922cbdc78e9b8b479816d298a5f77b462c00c4f0a2d0d",
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_firmware/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_certificate/tasks/export.yml",
+ "name": "roles/idrac_firmware/defaults/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "80cffca71ccf99a8cbbdcc7e4cad320663e0064ac47faf66b1a62ab2a751d177",
+ "chksum_sha256": "7d35d5e950afdd8a3f36385d957aef5f2348add525025b7b6e786070e4d1f2a9",
"format": 1
},
{
- "name": "roles/idrac_certificate/tasks/generate_csr.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "aef02f22f523b789c3a397b9f9b7c39c166e754b978f4920177bcbc53067e211",
+ "name": "roles/idrac_firmware/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_certificate/tasks/import.yml",
+ "name": "roles/idrac_firmware/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1a6a2edb98266da8d17726347dc0b3a0c5c270dd44bc0dadeca28a493163b6cc",
+ "chksum_sha256": "9878250caee484ccd585db63e5c38597e606587c475e3e993431329b1d14440d",
"format": 1
},
{
- "name": "roles/idrac_certificate/tasks/main.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "8500b36628049770984d5c5737419aa9e2e209cda3eae776b981ab8bb70885b5",
+ "name": "roles/idrac_firmware/handlers",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_certificate/tasks/reset.yml",
+ "name": "roles/idrac_firmware/handlers/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "82b70c0d2bb2c126a73c04c392513fbfa84f4626a6d5f2c855e45bb1bd5e0df7",
+ "chksum_sha256": "dc76fe5e14c723905484243dd4f75b8158ae7ad1c7c85f21c275ad1f3aafe886",
"format": 1
},
{
- "name": "roles/idrac_certificate/tests",
+ "name": "roles/idrac_firmware/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_certificate/tests/inventory",
+ "name": "roles/idrac_firmware/tests/inventory",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d906aecb61d076a967d9ffe8821c7b04b063f72df9d9e35b33ef36b1c0d98f16",
+ "chksum_sha256": "e02233819b1a09844410549191813f7cc7ba360f21298578f4ba1727a27d87fc",
"format": 1
},
{
- "name": "roles/idrac_certificate/tests/test.yml",
+ "name": "roles/idrac_firmware/tests/test.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "05252420e22b201c04db4b4e79f650a9bbb561aea0caec3d3e253d825637f11d",
+ "chksum_sha256": "892ec6c42a76c0f6ba5fc628f5455f78700271bffabdbafde5eed32df1699d92",
"format": 1
},
{
- "name": "roles/idrac_certificate/vars",
+ "name": "roles/idrac_storage_controller",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_certificate/vars/main.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "a35cfc80bcacd3b412a6505f8c5e599a8eee9487788a4dd7c8b69a6882b07c59",
- "format": 1
- },
- {
- "name": "roles/idrac_export_server_config_profile",
+ "name": "roles/idrac_storage_controller/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_export_server_config_profile/README.md",
+ "name": "roles/idrac_storage_controller/meta/argument_specs.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "36902b75877782020af54e7c969630ae84f2ee9872c4ec4350a6ae79e06c930b",
+ "chksum_sha256": "57c999b43cebe8874baf8ce6a3fbe7a603c69bc0101577c73d7eb1d8720fa066",
"format": 1
},
{
- "name": "roles/idrac_export_server_config_profile/defaults",
+ "name": "roles/idrac_storage_controller/meta/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "3eda64bf454dd351cebf7063807750d690c44072601bae3af6144460a5323e6b",
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_storage_controller/vars",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_export_server_config_profile/defaults/main.yml",
+ "name": "roles/idrac_storage_controller/vars/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "35809f9a6cc33e2577870d883ad657e720b46ee3a2a0777f895d490ffa845ca1",
+ "chksum_sha256": "981bc0f2d638983323e5081845acf0ac97ddf1cd5e603cf517051f69661a9cec",
"format": 1
},
{
- "name": "roles/idrac_export_server_config_profile/handlers",
+ "name": "roles/idrac_storage_controller/README.md",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "64f1956a2b0638551ece563c4f072e2c27553b76e4f381c22316d9e18d47d6b7",
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_storage_controller/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_export_server_config_profile/handlers/main.yml",
+ "name": "roles/idrac_storage_controller/defaults/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1bf8b1e4781f050ca3d526439f50cead1680b8fbebfe4ba140b8f05c648ccbf7",
+ "chksum_sha256": "15213c1a91c203930ede7c03d11efa362d86d0f94e6822268e714e8e7a98b14d",
"format": 1
},
{
- "name": "roles/idrac_export_server_config_profile/meta",
+ "name": "roles/idrac_storage_controller/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_export_server_config_profile/meta/argument_specs.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "7df7a3c628dee71fa4ce697d0da28ea42f1eebf667853e84124e2971af446c42",
- "format": 1
- },
- {
- "name": "roles/idrac_export_server_config_profile/meta/main.yml",
+ "name": "roles/idrac_storage_controller/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2a7dec7c1e8eb54a57fc78788b66c44e036b676ad62bd998114d7b87fd32e431",
+ "chksum_sha256": "ea1e319497a59c9d00d925cec6f1bcaf7c672b1b3e6d9a671c95a1c9e89c9f77",
"format": 1
},
{
- "name": "roles/idrac_export_server_config_profile/molecule",
+ "name": "roles/idrac_storage_controller/handlers",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_export_server_config_profile/molecule/default",
+ "name": "roles/idrac_storage_controller/handlers/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "527bd186562deac98b997410a770da9d22afee10005f965d699859e2a4df5ce0",
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_storage_controller/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_export_server_config_profile/molecule/default/molecule.yml",
+ "name": "roles/idrac_storage_controller/tests/inventory",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6c0d5bf979a0ad1541b496f173165b11f0ad14283391efde2c86ee35c477eb43",
+ "chksum_sha256": "e02233819b1a09844410549191813f7cc7ba360f21298578f4ba1727a27d87fc",
"format": 1
},
{
- "name": "roles/idrac_export_server_config_profile/molecule/default/cleanup.yml",
+ "name": "roles/idrac_storage_controller/tests/test.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cd932a799fefa81f0aafeb32b788e887e555c076f2b5fedb3ea75e81eb7f707b",
+ "chksum_sha256": "6992dd1f8393638377d59efa1ddbd6e728bffc188f6997d3b5c98208ce710853",
"format": 1
},
{
- "name": "roles/idrac_export_server_config_profile/molecule/default/converge.yml",
+ "name": "roles/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "621eb2a293781a0408900a6df0884360a7ed673e610b0cbdfe67db3221f62b16",
+ "chksum_sha256": "a681a11158030f89670466b71c1cc02663571a4b551b2e3a7924ad23096e48cd",
"format": 1
},
{
- "name": "roles/idrac_export_server_config_profile/molecule/default/verify.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "6674f5a55b2b9311c78b61713e305a7b6419f02de40cabd4fdb337f234cd88fa",
+ "name": "roles/redfish_storage_volume",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_export_server_config_profile/tasks",
+ "name": "roles/redfish_storage_volume/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_export_server_config_profile/tasks/main.yml",
+ "name": "roles/redfish_storage_volume/meta/argument_specs.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1558146a88520a40c77e451c4a7ab0f54fc2cf83de382bf80b3d608b9eae29a9",
+ "chksum_sha256": "8098e64b110bba2ed1aaf28aa39abc87c5085f84149271719bb97376d1181aa3",
"format": 1
},
{
- "name": "roles/idrac_export_server_config_profile/tasks/pre_req.yml",
+ "name": "roles/redfish_storage_volume/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "121b8bc546708066ec1bd268af42e70cb396f1d2a99450d89ecb3eebbacf3c29",
+ "chksum_sha256": "310a9f23099629e5643b4dfdfa22af40cd4f78f3037fe1a5f6915c322e12feb0",
"format": 1
},
{
- "name": "roles/idrac_export_server_config_profile/tasks/scp_export_cifs.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "ee2d348b52f7e2711260348db0ff8ad9e0f7c4eb4a391c1de53da94e1002a406",
+ "name": "roles/redfish_storage_volume/vars",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_export_server_config_profile/tasks/scp_export_http.yml",
+ "name": "roles/redfish_storage_volume/vars/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b8ec53a81eacd4b32bb0b7f5e209d85c386b001c541f581d51c17e38f7095836",
+ "chksum_sha256": "5d396acd98a3478d4a32965b818a75ebfddf7cc5725e8a0fac6093830ef34ad9",
"format": 1
},
{
- "name": "roles/idrac_export_server_config_profile/tasks/scp_export_https.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "3ef7408260c8297ac606633989ae2a26f24a15c493dfbdb7f38ae0c4e20abb84",
+ "name": "roles/redfish_storage_volume/molecule",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_export_server_config_profile/tasks/scp_export_local.yml",
+ "name": "roles/redfish_storage_volume/molecule/apply_time_immediate",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "roles/redfish_storage_volume/molecule/apply_time_immediate/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3d2296ccd8268dbd87cd0dfb3f82bacd1b93cba1ebe898797ac01d681f30f1e5",
+ "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
"format": 1
},
{
- "name": "roles/idrac_export_server_config_profile/tasks/scp_export_nfs.yml",
+ "name": "roles/redfish_storage_volume/molecule/apply_time_immediate/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ce2b8a6305affda2aaac530edf3e05695c8ed2ff994799fe4e84b77050a33617",
+ "chksum_sha256": "defd2475ab056ad28cf76397ae6f42ac3f7440c041546b6ecf0c488517fbd2d4",
"format": 1
},
{
- "name": "roles/idrac_export_server_config_profile/tests",
+ "name": "roles/redfish_storage_volume/molecule/RAID60",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_export_server_config_profile/tests/inventory",
+ "name": "roles/redfish_storage_volume/molecule/RAID60/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e02233819b1a09844410549191813f7cc7ba360f21298578f4ba1727a27d87fc",
+ "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
"format": 1
},
{
- "name": "roles/idrac_export_server_config_profile/tests/test.yml",
+ "name": "roles/redfish_storage_volume/molecule/RAID60/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6d5634aec64528ab58532686cef5828500fbfcd3b729e0333e20b63365f4a080",
+ "chksum_sha256": "d33d97618ffcc391d674d3345e4701a2ce217eaf2676694346cc76ad008da05e",
"format": 1
},
{
- "name": "roles/idrac_export_server_config_profile/vars",
+ "name": "roles/redfish_storage_volume/molecule/RAID1",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_export_server_config_profile/vars/main.yml",
+ "name": "roles/redfish_storage_volume/molecule/RAID1/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ac68a419593778d913a88752aa73cf05946ca0438db7f4584d6174b04c0c400a",
- "format": 1
- },
- {
- "name": "roles/idrac_firmware",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "roles/idrac_firmware/README.md",
+ "name": "roles/redfish_storage_volume/molecule/RAID1/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2aa85fe4ddb50b123a7922cbdc78e9b8b479816d298a5f77b462c00c4f0a2d0d",
+ "chksum_sha256": "bf3b39d9fc6450c880a0e5ab61e904844fb9c1e1b112d72be49da7401eef7ba9",
"format": 1
},
{
- "name": "roles/idrac_firmware/defaults",
+ "name": "roles/redfish_storage_volume/molecule/RAID0",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_firmware/defaults/main.yml",
+ "name": "roles/redfish_storage_volume/molecule/RAID0/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7d35d5e950afdd8a3f36385d957aef5f2348add525025b7b6e786070e4d1f2a9",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "roles/idrac_firmware/handlers",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "roles/redfish_storage_volume/molecule/RAID0/converge.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "79d8cf155feafc68b8d2d03b79c6a04b5c66e9e5f9e87741ac71c57f54a1c089",
"format": 1
},
{
- "name": "roles/idrac_firmware/handlers/main.yml",
+ "name": "roles/redfish_storage_volume/molecule/__extract_storage.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "dc76fe5e14c723905484243dd4f75b8158ae7ad1c7c85f21c275ad1f3aafe886",
+ "chksum_sha256": "971c811200c00ba90bf9bd999d45ec7750ba63d9f5e2d75c3929ae89f82b9f72",
"format": 1
},
{
- "name": "roles/idrac_firmware/meta",
+ "name": "roles/redfish_storage_volume/molecule/default",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_firmware/meta/argument_specs.yml",
+ "name": "roles/redfish_storage_volume/molecule/default/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7f0cf3892bdc532a0bd6dd0bc448f090139919f4fc9e1500bafe47d2fcda9aca",
+ "chksum_sha256": "37fbd5cff4f3f926cee6902730801fa0b2b9058379389c6180f7a1224eaaf09c",
"format": 1
},
{
- "name": "roles/idrac_firmware/meta/main.yml",
+ "name": "roles/redfish_storage_volume/molecule/default/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c8afa3f929d14d3f681ce7379d32b9ac1f64d820d03d79b272a083deb6676017",
+ "chksum_sha256": "5d047f2576796d6cd610997fdf03d7febf5095f3695627deeb87f663150f84b4",
"format": 1
},
{
- "name": "roles/idrac_firmware/molecule",
+ "name": "roles/redfish_storage_volume/molecule/RAID10",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_firmware/molecule/cifs_share",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "roles/redfish_storage_volume/molecule/RAID10/molecule.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "roles/idrac_firmware/molecule/cifs_share/molecule.yml",
+ "name": "roles/redfish_storage_volume/molecule/RAID10/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "c443aef978e1312010f754d04cb5c6da231557f5a044f4aa77e6832ae9526908",
"format": 1
},
{
- "name": "roles/idrac_firmware/molecule/cifs_share/converge.yml",
+ "name": "roles/redfish_storage_volume/molecule/__lc_status.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e9cba7b8006136cf795ca98cca97e649fb9965988a7c5b4669c6fa77919693b9",
+ "chksum_sha256": "602b7c37a4a50ca9f143caa2479ff5c737e7592d1c802f8f6ffadbc7579077b1",
"format": 1
},
{
- "name": "roles/idrac_firmware/molecule/default",
+ "name": "roles/redfish_storage_volume/molecule/apply_time_default",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_firmware/molecule/default/molecule.yml",
+ "name": "roles/redfish_storage_volume/molecule/apply_time_default/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
+ "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
"format": 1
},
{
- "name": "roles/idrac_firmware/molecule/default/converge.yml",
+ "name": "roles/redfish_storage_volume/molecule/apply_time_default/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b8f562b0f8963d6fb1653344be6a0391d63b54ed70b7f23e3086a030fc14463b",
+ "chksum_sha256": "65331bfaa443cd7ff715e673d30abb2be2e3acad8ff0c0161cc002b4df3ef030",
"format": 1
},
{
- "name": "roles/idrac_firmware/molecule/ftp_share",
+ "name": "roles/redfish_storage_volume/molecule/apply_time_onreset_reboot_server_true_force_reboot_true",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_firmware/molecule/ftp_share/molecule.yml",
+ "name": "roles/redfish_storage_volume/molecule/apply_time_onreset_reboot_server_true_force_reboot_true/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
"format": 1
},
{
- "name": "roles/idrac_firmware/molecule/ftp_share/converge.yml",
+ "name": "roles/redfish_storage_volume/molecule/apply_time_onreset_reboot_server_true_force_reboot_true/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a031dc6f65a6c3a7b75af9a1cff7206ccbd32d061c2738fd180aee6d86368a4e",
+ "chksum_sha256": "ec06873a01c77fa0cfe3d4e85c647f5c41ee33675b9b254a7cc697451a8c2a87",
"format": 1
},
{
- "name": "roles/idrac_firmware/molecule/http_share",
+ "name": "roles/redfish_storage_volume/molecule/apply_time_onreset_reboot_server_true",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_firmware/molecule/http_share/molecule.yml",
+ "name": "roles/redfish_storage_volume/molecule/apply_time_onreset_reboot_server_true/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
"format": 1
},
{
- "name": "roles/idrac_firmware/molecule/http_share/converge.yml",
+ "name": "roles/redfish_storage_volume/molecule/apply_time_onreset_reboot_server_true/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e78dce79455ba2ae773abf7ef878883286686923dbd95b7f157ac006200ca670",
+ "chksum_sha256": "0a14046372f6e7f33908931a7edeb09e73dc4ec5bde6e1778b61fe23054a2735",
"format": 1
},
{
- "name": "roles/idrac_firmware/molecule/https_share",
+ "name": "roles/redfish_storage_volume/molecule/__job_track.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "4ac8080105eab85b5b077ac94669ff8fc03600675b5d86258ee27ca26e6ceebd",
+ "format": 1
+ },
+ {
+ "name": "roles/redfish_storage_volume/molecule/initialization",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_firmware/molecule/https_share/molecule.yml",
+ "name": "roles/redfish_storage_volume/molecule/initialization/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "roles/idrac_firmware/molecule/https_share/converge.yml",
+ "name": "roles/redfish_storage_volume/molecule/initialization/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "23232705312d495854c47436c1481e826373b541e19ec964ed6ad07a9fdd9208",
- "format": 1
- },
- {
- "name": "roles/idrac_firmware/molecule/httpsproxy_share",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "d2204316b3f71e879600ede864aaa0e5b53ac0b5cc5422b4766a6e789a0d9dfd",
"format": 1
},
{
- "name": "roles/idrac_firmware/molecule/httpsproxy_share/molecule.yml",
+ "name": "roles/redfish_storage_volume/molecule/__get_helper.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "2107089195728fc04d99dd63d9f0d8f61b2d7e519f5127627921ef7ee4139df2",
"format": 1
},
{
- "name": "roles/idrac_firmware/molecule/httpsproxy_share/converge.yml",
+ "name": "roles/redfish_storage_volume/molecule/__delete_virtual_drive.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d362f33905add12d47dd5b02d798d55eb29ada085cda85e8cd37ba04f97cde87",
+ "chksum_sha256": "8d167fa3a1ec0e778272338f9d77af0bb416be6789875be061dcab8375103df6",
"format": 1
},
{
- "name": "roles/idrac_firmware/molecule/negative_scenarios",
+ "name": "roles/redfish_storage_volume/molecule/RAID5",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_firmware/molecule/negative_scenarios/molecule.yml",
+ "name": "roles/redfish_storage_volume/molecule/RAID5/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "roles/idrac_firmware/molecule/negative_scenarios/converge.yml",
+ "name": "roles/redfish_storage_volume/molecule/RAID5/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c65a450270cb2725ea0c0d77439695d6fdf0721f6711d35265baab3f8aacbf9e",
+ "chksum_sha256": "2aa8df2c045cd181b56ee21f6bc7c7f3460797cf3d989de75fafd7025f741ca5",
"format": 1
},
{
- "name": "roles/idrac_firmware/molecule/nfs_share",
+ "name": "roles/redfish_storage_volume/molecule/RAID6",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_firmware/molecule/nfs_share/molecule.yml",
+ "name": "roles/redfish_storage_volume/molecule/RAID6/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
"format": 1
},
{
- "name": "roles/idrac_firmware/molecule/nfs_share/converge.yml",
+ "name": "roles/redfish_storage_volume/molecule/RAID6/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "31631bf50bdb9a5b035dcaa97b4c1cba71139e886468ee04666191f8418a65ec",
+ "chksum_sha256": "fd8a43244b35a395e735e9f9662d01d52a0c2b626e3f11b044c0e2f4d599ecf1",
"format": 1
},
{
- "name": "roles/idrac_firmware/tasks",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "roles/redfish_storage_volume/molecule/__create_virtual_drive.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "5ff523743f2c992be084d96d3e0bebf811c50fd09bad3e9b3bdf346c093a3914",
"format": 1
},
{
- "name": "roles/idrac_firmware/tasks/main.yml",
+ "name": "roles/redfish_storage_volume/molecule/__idrac_reset.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9878250caee484ccd585db63e5c38597e606587c475e3e993431329b1d14440d",
+ "chksum_sha256": "5c004e1ee7b85b34bde8daf3ce4b82d794013745abff33fd2c27801dbf89bf4a",
"format": 1
},
{
- "name": "roles/idrac_firmware/tests",
+ "name": "roles/redfish_storage_volume/molecule/RAID50",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_firmware/tests/inventory",
+ "name": "roles/redfish_storage_volume/molecule/RAID50/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e02233819b1a09844410549191813f7cc7ba360f21298578f4ba1727a27d87fc",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "roles/idrac_firmware/tests/test.yml",
+ "name": "roles/redfish_storage_volume/molecule/RAID50/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "892ec6c42a76c0f6ba5fc628f5455f78700271bffabdbafde5eed32df1699d92",
- "format": 1
- },
- {
- "name": "roles/idrac_firmware/vars",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "86b09f71d7e2238b359eb8ec3be7860f5f531dd8cba020281d8760b6101e8c24",
"format": 1
},
{
- "name": "roles/idrac_firmware/vars/main.yml",
+ "name": "roles/redfish_storage_volume/README.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "76a25139790ece31962351e91c95883e5233a87e63d93e6c044dbf5a0c0604ae",
+ "chksum_sha256": "efb82c5ab08b6d604fa2dfa92911904119f33b20799fca3cccbc0ed9aa9633a8",
"format": 1
},
{
- "name": "roles/idrac_gather_facts",
+ "name": "roles/redfish_storage_volume/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_gather_facts/README.md",
+ "name": "roles/redfish_storage_volume/defaults/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "babcf9ea882157e54c32df007487cbb8432e214d7c3d5ebbba6631241792cbc3",
+ "chksum_sha256": "0a5a221d2fa9a6fc8985007dca4b7b46ea33c4fee921a405ff021bb576740473",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/defaults",
+ "name": "roles/redfish_storage_volume/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_gather_facts/defaults/main.yml",
+ "name": "roles/redfish_storage_volume/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0e9f5e643ff9212b823aab7dfeb2658e1b1743ee6e296fe7cec64b64c814dd1f",
+ "chksum_sha256": "d504045131a1a6e693af5e3be831c677039d54c70765cb14bb5ab1de3d8eb054",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/handlers",
+ "name": "roles/redfish_storage_volume/handlers",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_gather_facts/handlers/main.yml",
+ "name": "roles/redfish_storage_volume/handlers/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f872a794379898660d17bfd9e1ae91bb13f8725a1cf4b16b901d80919192caba",
+ "chksum_sha256": "6877ee1583a2e33f3b3baf52a4a633c1f0bddea5bafda3e91241952bb5f2f19f",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/meta",
+ "name": "roles/redfish_storage_volume/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_gather_facts/meta/argument_specs.yml",
+ "name": "roles/redfish_storage_volume/tests/inventory",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "75d820d28ed29f3ab34caf04022a9d5b3023f9179806d13cb88deb25bbf1d58c",
+ "chksum_sha256": "e02233819b1a09844410549191813f7cc7ba360f21298578f4ba1727a27d87fc",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/meta/main.yml",
+ "name": "roles/redfish_storage_volume/tests/test.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3bf6b5dc34c2b73a1e1db79a12c608502254e34ec556ba2c261fe607fbfc6ad1",
+ "chksum_sha256": "c4aa87d44c3b522744c37ca811b31c1648dfde1bf2e75a231499f5d8210396fd",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule",
+ "name": "roles/idrac_boot",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/backplane",
+ "name": "roles/idrac_boot/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/backplane/molecule.yml",
+ "name": "roles/idrac_boot/meta/argument_specs.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
+ "chksum_sha256": "86cced715db95d2ee43d4cf20d223992c4f5aaf08f8c38b60267fde2f11572e5",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/backplane/converge.yml",
+ "name": "roles/idrac_boot/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2a1a0a21993f25c4bc9b53a65c20dfbc128821bbf5868dad00326fae00ff82e1",
+ "chksum_sha256": "e247f0476f74edd90842e7e2507e0efc3f21427f80e411e0d740c74292c55c6f",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/bios",
+ "name": "roles/idrac_boot/vars",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/bios/molecule.yml",
+ "name": "roles/idrac_boot/vars/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
+ "chksum_sha256": "50ca20875f667975a478f6fd4cf78866cdfbfc89891bbceede99a1150aa258ff",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/bios/converge.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "6aeb01343669a588a39fb5c079282e515ea89873d6e7bcc036e053f58092ae62",
+ "name": "roles/idrac_boot/molecule",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/controller",
+ "name": "roles/idrac_boot/molecule/boot_source_override_enabled_as_once_reset_type_none",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/controller/molecule.yml",
+ "name": "roles/idrac_boot/molecule/boot_source_override_enabled_as_once_reset_type_none/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
+ "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/controller/converge.yml",
+ "name": "roles/idrac_boot/molecule/boot_source_override_enabled_as_once_reset_type_none/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fd6e6417f014ec03b13867ac110beb0328e8d4e421d71e534002153a29e99b8a",
+ "chksum_sha256": "ccd9a6bdb969d79c36b61f0339e702d5b5032b020546c995b312fd34bb6e5741",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/cpu",
+ "name": "roles/idrac_boot/molecule/boot_order_using_legacy_mode_force_restart",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/cpu/converge.yml",
+ "name": "roles/idrac_boot/molecule/boot_order_using_legacy_mode_force_restart/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "03f5f6bec9178116f17d4a1f7e49aa0607b3b34751e35ee4d86f26d4f9912383",
+ "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/cpu/molecule.yml",
+ "name": "roles/idrac_boot/molecule/boot_order_using_legacy_mode_force_restart/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
+ "chksum_sha256": "808af97a5a234940ab3c38a93c54f4a164b7cb52ee47107137cc4555b53e9a1d",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/default",
+ "name": "roles/idrac_boot/molecule/boot_options_using_boot_option_reference_enabled_true",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/default/molecule.yml",
+ "name": "roles/idrac_boot/molecule/boot_options_using_boot_option_reference_enabled_true/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
+ "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/default/converge.yml",
+ "name": "roles/idrac_boot/molecule/boot_options_using_boot_option_reference_enabled_true/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cff3d242b9a5abe65abd87b232d6678b0952aea01ac388e894489e47577dfea3",
+ "chksum_sha256": "ab30d143cf010145f94f14c05cab91120f717be0cffadc9f348daffa7ac0e8ff",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/enclosure",
+ "name": "roles/idrac_boot/molecule/boot_source_override_enabled_as_continuous_reset_type_none",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/enclosure/molecule.yml",
+ "name": "roles/idrac_boot/molecule/boot_source_override_enabled_as_continuous_reset_type_none/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
+ "chksum_sha256": "de8583bbc3924432cfe625f9899beb6ad7848058e61d1ecabd745ec810ee5498",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/enclosure/converge.yml",
+ "name": "roles/idrac_boot/molecule/boot_source_override_enabled_as_continuous_reset_type_none/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cf1906e1ced1cb774e276f108b46c8fcf7c47db2fd45f9abc75f98b15d5d6661",
+ "chksum_sha256": "d93c24cf33c66d6e9b3135506a51df0a298e756eca52d0dd6be9049841ba49ed",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/enclosureemm",
+ "name": "roles/idrac_boot/molecule/boot_options_using_display_name_enabled_false",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/enclosureemm/molecule.yml",
+ "name": "roles/idrac_boot/molecule/boot_options_using_display_name_enabled_false/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
+ "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/enclosureemm/converge.yml",
+ "name": "roles/idrac_boot/molecule/boot_options_using_display_name_enabled_false/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0283a9aae3c3d8ba622a804822089373823a9f1494266ed068d65766055922d1",
+ "chksum_sha256": "105480778b833d51453b2b22e7ac419eb3865b523bd5f979789e66feaa46c4db",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/fan",
+ "name": "roles/idrac_boot/molecule/default",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/fan/molecule.yml",
+ "name": "roles/idrac_boot/molecule/default/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
+ "chksum_sha256": "c7c031cbadc0f08f5dfacd846de58729e2900c340492871c191e4ac6b504ddc9",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/fan/converge.yml",
+ "name": "roles/idrac_boot/molecule/default/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3847cf869806ef54d8df586fea8862e60cdcf70de9f90598cd1f3417be3b2aef",
+ "chksum_sha256": "2ef1e42f64b6b1036298447aa4188460ae395dc4af25ab0d0b139d15fbe47df4",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/firmware",
+ "name": "roles/idrac_boot/molecule/boot_order_using_uefi_mode_graceful_restart",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/firmware/molecule.yml",
+ "name": "roles/idrac_boot/molecule/boot_order_using_uefi_mode_graceful_restart/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
+ "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/firmware/converge.yml",
+ "name": "roles/idrac_boot/molecule/boot_order_using_uefi_mode_graceful_restart/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b9077637b7c957f46e1351f3e3f4e16e925c81a62e3381dd24169a3f0a7b4079",
+ "chksum_sha256": "9fb0c40a0d88c86aa9056a949b626eadceadaf9379731fd83045b2cb1c12aa14",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/hostnic",
+ "name": "roles/idrac_boot/molecule/boot_source_override_mode_uefi_with_resource_id",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/hostnic/molecule.yml",
+ "name": "roles/idrac_boot/molecule/boot_source_override_mode_uefi_with_resource_id/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
+ "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/hostnic/converge.yml",
+ "name": "roles/idrac_boot/molecule/boot_source_override_mode_uefi_with_resource_id/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e75317b8512ae920138e3c7241d6b4582626d6e236b36128733b92f856543c53",
+ "chksum_sha256": "b2b06a3776dfd89429ebc65b6e626e1caa9c3bb2c3210c208add9cad25e7b169",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/idrac",
+ "name": "roles/idrac_boot/molecule/boot_source_override_mode_legacy_job_wait_false",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/idrac/molecule.yml",
+ "name": "roles/idrac_boot/molecule/boot_source_override_mode_legacy_job_wait_false/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
+ "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/idrac/converge.yml",
+ "name": "roles/idrac_boot/molecule/boot_source_override_mode_legacy_job_wait_false/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1793fd4df60558619a9c94aef7599243482429d81c8a28e776b91f850221b59a",
+ "chksum_sha256": "fc69b86c7931abdfc30fa69d8263ee624da0f82e3bd622fabee4ed427cdfad12",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/license",
+ "name": "roles/idrac_boot/molecule/boot_source_override_enabled_as_disabled_reset_type_none",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/license/molecule.yml",
+ "name": "roles/idrac_boot/molecule/boot_source_override_enabled_as_disabled_reset_type_none/molecule.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
+ "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/license/converge.yml",
+ "name": "roles/idrac_boot/molecule/boot_source_override_enabled_as_disabled_reset_type_none/converge.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "555ad87a256a73804d979ffca0fb14349aa5ce521c463fc7daa32d2a6d394a4d",
+ "chksum_sha256": "aafef16f967f4d86b1630a197754ef56b4c23f6bfdb6bac4df181aa1b9c694ed",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/memory",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "roles/idrac_boot/README.md",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "92a0c9cdb08215dd0131c2f5e91d125e2c58a1e9771093c40bf71097f035559a",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/memory/molecule.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
+ "name": "roles/idrac_boot/defaults",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/memory/converge.yml",
+ "name": "roles/idrac_boot/defaults/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0a937be08af8078010259968d6dc6ef8836ed89caea61e997db31fec54b7f7b5",
+ "chksum_sha256": "fbc348c1aaf27aaa035a1b34a9e496b64dc515388e9a543689c28dc50a597bb2",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/negative",
+ "name": "roles/idrac_boot/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/negative/molecule.yml",
+ "name": "roles/idrac_boot/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
+ "chksum_sha256": "15e5c2cab8d295c1d6957b2fef3bdbc2222f003b023a0aba83dadb606f11bd03",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/negative/converge.yml",
+ "name": "roles/idrac_boot/handlers",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "roles/idrac_boot/handlers/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "abcc6362b778c7dd7c367130c0f52564cb65a37a314aa41817ae19252f020ff7",
+ "chksum_sha256": "867b8c424c2b712712af162203358ba5061e926389d7ad4d8a3ee40a0d5b3920",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/nic",
+ "name": "roles/idrac_boot/tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/nic/molecule.yml",
+ "name": "roles/idrac_boot/tests/inventory",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
+ "chksum_sha256": "e02233819b1a09844410549191813f7cc7ba360f21298578f4ba1727a27d87fc",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/nic/converge.yml",
+ "name": "roles/idrac_boot/tests/test.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "88018366757580a7dd6c975481cf2098d8e6add7a9400aae149886c98cec2241",
+ "chksum_sha256": "365ea1cad2e4ffb62f1a086752e377bc9a3fa010685b7c15211be1dec90813c4",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/passensor",
+ "name": "bindep.txt",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "76c27d1138b5331a2bfb3b8b7e92c70cc39fd1fe793a48117b54ce2649cd7b1f",
+ "format": 1
+ },
+ {
+ "name": "meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/passensor/molecule.yml",
+ "name": "meta/execution-environment.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
+ "chksum_sha256": "565e8d831c58bf2a1f80168b5ee92f526fa5fe48b2ec71a9387371c0d50ca9a9",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/passensor/converge.yml",
+ "name": "meta/runtime.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "85eac7dc208c39b391e4f286622829eb99c1f1331cd575f808b374512aed978e",
+ "chksum_sha256": "dac8ac908fdd6b7bffe4a091c3f443b45716b1bc591e80cae03270731ef62598",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/pciedevice",
+ "name": ".ansible-lint",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "b1faa2fe6cb8f8029a0aae03332dc309e5144309c0a7ae2df24c25727e6f70cf",
+ "format": 1
+ },
+ {
+ "name": ".github",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/pciedevice/molecule.yml",
+ "name": ".github/workflows",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": ".github/workflows/ansible-test.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
+ "chksum_sha256": "d797937de9e579f1ecc980d7db21d2e3d36055f4212cad8103d4dba0d7d140cd",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/pciedevice/converge.yml",
+ "name": ".github/PULL_REQUEST_TEMPLATE.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5c0c515a34feab6e6b68e051513ac03a3ac94d3cc8cba176aaed27996ad0287e",
+ "chksum_sha256": "186d97efed69e2211f154847a8ac52f263fa54319620bf1dcea00b59f494a969",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/physicaldisk",
+ "name": ".github/ISSUE_TEMPLATE",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/physicaldisk/molecule.yml",
+ "name": ".github/ISSUE_TEMPLATE/bug_report.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
+ "chksum_sha256": "56f4cdaffe4a97636427b91eb59acb67bc92f869451d137308428771d48e6aad",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/physicaldisk/converge.yml",
+ "name": ".github/ISSUE_TEMPLATE/ask_a_question.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f842fce3a08c9578c56eb1bea950a9f2aef3952b50bb7ebcc55992f0ff089004",
- "format": 1
- },
- {
- "name": "roles/idrac_gather_facts/molecule/powersupply",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "e172d0ed215d9561ccf644c10a0f7fb5ea722a427ff13741f8634053d62a9338",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/powersupply/molecule.yml",
+ "name": ".github/ISSUE_TEMPLATE/config.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
+ "chksum_sha256": "155b8ff8561f87047a5e2cebac1737edfbba156c21cc733fe636d84ca222d627",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/powersupply/converge.yml",
+ "name": ".github/ISSUE_TEMPLATE/feature_request.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b887541028b49abb21f65fbdcf0505d0d9e06681956366b31119264422c2b155",
+ "chksum_sha256": "e15deb214aa7cb6847890eb95491e027c021b7f1ec01c7fbda8dca021d1162a0",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/secureboot",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": ".github/CODEOWNERS",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "a9904179b2b13e66820070b147f7de5623efaecb1e9d0b68c47a5abb28b7fe02",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/secureboot/molecule.yml",
+ "name": ".ansible-lint-ignore",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
+ "chksum_sha256": "9259c9753c32d36dfca9f4c9858b16b12e03ab2cd5eeece7f53ee9ad7bb2b2aa",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/secureboot/converge.yml",
+ "name": "CHANGELOG.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "90a6628784d8f22ff412233df90b45f4ca670001c322ad02347933ebaac6a04c",
+ "chksum_sha256": "316651748c6045de666490a0b9c4d05d50afaeb02129263d9f3a0729314b5f3b",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/sensorsbattery",
+ "name": "changelogs",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/sensorsbattery/molecule.yml",
+ "name": "changelogs/.plugin-cache.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
+ "chksum_sha256": "a52aa92b1346f5c730d603464c8f20375eaead759c3524f64cd876eea4f3ecc4",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/sensorsbattery/converge.yml",
+ "name": "changelogs/config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "678657b674bd8d79ae67af9a8ebca26b47e40fcaf6f958e66a15e1517b6b3cdb",
+ "chksum_sha256": "c00012f627317aec0a7b0b4db3da8c43eb7eb63188eb3f7ee8c2319692aab2d7",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/sensorsintrusion",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "changelogs/changelog.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "080988706ff47cba87e24719247d11d510a12127f99266711c33b0258c90d107",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/sensorsintrusion/molecule.yml",
+ "name": ".gitignore",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
+ "chksum_sha256": "3f8be16c6a53d1a7c659b0a9803866e10395d5fa466d557f8e8089e3a26cf0c3",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/sensorsintrusion/converge.yml",
+ "name": "README.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "87970ed0a6773bd5ab9c8a78c1555ec0f61228282b314573c33bd30da7102a8d",
+ "chksum_sha256": "a308267b76306e4836ea65de6f0bd853b75b036e1e9822cb365ac9b2bd0d58cb",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/sensorsvoltage",
+ "name": "plugins",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/sensorsvoltage/molecule.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
+ "name": "plugins/module_utils",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/sensorsvoltage/converge.yml",
+ "name": "plugins/module_utils/idrac_redfish.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1dffcb2b8b2a5fb2b557d0961eaee2b1cbbd94f1f2aa26d998ad06fe6f314f0b",
+ "chksum_sha256": "8da0c32e39942e6ae943277542fc4395cca78c17958cde82d509876a44e5af12",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/systemmetrics",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/module_utils/dellemc_idrac.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e60a6661de248b2be11330274cecf6af56989f04388f210ed3a6cf28753b57ee",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/systemmetrics/molecule.yml",
+ "name": "plugins/module_utils/redfish.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
+ "chksum_sha256": "989e518c95b741c49045727dc7ac92dc5440e08d7a7cc9c9de7b83481e651dd5",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/systemmetrics/converge.yml",
+ "name": "plugins/module_utils/session_utils.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "54d9f77025d049c9b6572899b5bccc51968e0183e840768d2e3f3c0521aa7ce2",
+ "chksum_sha256": "ae5f22fc431069c53aa88137d6d8c613ab8c22112fc31cf3b4ad0662db497ea3",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/virtualdisk",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/module_utils/__init__.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/virtualdisk/molecule.yml",
+ "name": "plugins/module_utils/ome.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
+ "chksum_sha256": "e78a8b5e1560c08d8e7655f567bf2c53817ff01f0f621419153a0cbd083e050f",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/molecule/virtualdisk/converge.yml",
+ "name": "plugins/module_utils/utils.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d7f64f87068ebc765d1151c2994688505ddf9d29b5a49749565d813a7fd71da9",
+ "chksum_sha256": "f5b4aa1d4f6bcdf22373061d2dd5d1f873f6cf277c3caded38c75c69d97f6556",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/tasks",
+ "name": "plugins/doc_fragments",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_gather_facts/tasks/get_attributes_info.yml",
+ "name": "plugins/doc_fragments/idrac_x_auth_options.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8e4ddc302cfdb0520c6fa9e0a73708a3491412bea218571b184b94926c695030",
+ "chksum_sha256": "e9882f55285d0809089b6b5ed37332873dcd57ef87b3ae1c85d4fecbb7dfb6db",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/tasks/get_backplane_info.yml",
+ "name": "plugins/doc_fragments/ome_auth_options.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d1ee6473d831ef5fa2b080fbd7e6c545f65cd0b8ddd75af862f3fa314ccd8b71",
+ "chksum_sha256": "d16534ca24a8d7324d239d9478df3ea42223f583eb1d1d591d62a44ce8246eed",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/tasks/get_battery_info.yml",
+ "name": "plugins/doc_fragments/omem_auth_options.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "148c547f8686797aba487fb562ac318ae933609914899419bc7b08c45f92f76d",
+ "chksum_sha256": "ff858349aeccc4f1794127890f51291fb7f7664a4812598a0f4b6f9580d674e0",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/tasks/get_bios_info.yml",
+ "name": "plugins/doc_fragments/redfish_auth_options.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2d625b06d054c58b3434ee76881384abc724a66b58fe7ecf40fdf870c2e7b0b3",
+ "chksum_sha256": "7a5bc959218fa69373726a7a322b82e566ee3ec74d915ce133f810b6cb2935b1",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/tasks/get_controller_info.yml",
+ "name": "plugins/doc_fragments/idrac_auth_options.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9ef30c6af08535b3ec7ae2485bdd9d4aa79675e2b1bbb6b07be726ac7c36651c",
+ "chksum_sha256": "09f31bebb6c31d2b5551642f672527679065b89ca380e82cf43866cb14f5bdb7",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/tasks/get_cpu_info.yml",
+ "name": "plugins/doc_fragments/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "151ae93ca668eb1a8900addcfabe7ae999e670e821ad4ced7f06fc95735bd51a",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/tasks/get_enclosure_emm_info.yml",
+ "name": "plugins/doc_fragments/network_share_options.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "39af5a227279476db905f227e7c4321756537f807b697dfd02fe16e5af40ec82",
+ "chksum_sha256": "9aef12b8caa7df49c02dde445a8923c811228558bd61893c720ef185b2e7c182",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/tasks/get_enclosure_info.yml",
+ "name": "plugins/doc_fragments/oment_auth_options.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "deb7f669cdd9686dc3c1e2d129ce062ea6ac39582fb1c4d1ebadb9d33aaa0672",
+ "chksum_sha256": "8bd8699fdcc71eebfb96c08e07c5c39705ab5145afec7f084528bf8ce48cc5c9",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/tasks/get_fan_info.yml",
+ "name": "plugins/README.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9bb1df958e92876346cb1fffead2be0436ca155725be6326174f6be124812afb",
+ "chksum_sha256": "bf8697057933ae95e9172b8fb6da9907557f5f086ed7e91da850a2fb573fcf9d",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/tasks/get_firmware_info.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "024fd4797f5075b0030f378f4b23696e23f193eee0ae3d286793028ac54f45f9",
+ "name": "plugins/inventory",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_gather_facts/tasks/get_host_nic_info.yml",
+ "name": "plugins/inventory/ome_inventory.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cbe6b638f4f6bf98263cc35f2ab8e9dc9c20d4f4781179ea84b25dc837940f49",
+ "chksum_sha256": "cb2597106d2e3822ce9f74ae0551904f6f6ccd6cde597bd3ddccf96509bd1e3c",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/tasks/get_intrusion_info.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "bfa01c92ff7a53762840eac467d93619b0191e4fe771715be349eef092e96b64",
+ "name": "plugins/modules",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_gather_facts/tasks/get_license_info.yml",
+ "name": "plugins/modules/idrac_user.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "61c857b5a12886ad7dde1dee0dec583d999cc435615dd4ffa211ea52e11cea56",
+ "chksum_sha256": "cd9d20cd8d7240570fd5441b944b8fafbb49125e6dd0da0d7a54f118dde25605",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/tasks/get_memory_info.yml",
+ "name": "plugins/modules/ome_device_location.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0f1db24485b5870b7a722e07807e8e0df8bebdd162cbb67e12167cbb2b35a3a9",
+ "chksum_sha256": "fabe70ccd78905c1059c2a44aa3a5f5705ffcf26055fe3c78d07aa7ba472462a",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/tasks/get_metrics_info.yml",
+ "name": "plugins/modules/idrac_session.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a5bf5239cfc7ea06a7bb797c84272d5ca0f814a1ad8d0150299384670cbd2a8a",
+ "chksum_sha256": "a899a46fe702c2bd147f8572071ac8b95c4dc6ea3fe87eb92d87f44bedb580b0",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/tasks/get_nic_info.yml",
+ "name": "plugins/modules/idrac_license.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bfc885a21855a4c875dcd2d8d30decb7c970dedeb5c4912a549edf88e9fb9a99",
+ "chksum_sha256": "3525e7237c2cf1c5c80396e01e0db5286ef82e8f16d96eec0855482a28f250fd",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/tasks/get_pas_sensor_info.yml",
+ "name": "plugins/modules/ome_alert_policies.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8e9d20d3c5dcfbd9ba045bdd03395fb421c1b6174f3b4bf17a442855b28b3b84",
+ "chksum_sha256": "a66dce2ed3c4bc8c74d70af2061ebbb49f74a6f0dac89601f00a2be2e553eafe",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/tasks/get_pcie_device_info.yml",
+ "name": "plugins/modules/ome_template_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "09ded655e39f0510b2432d9e26f24bbcc03d147dae5aef9a4c53ad85b2f8468b",
+ "chksum_sha256": "903d530c5c3b8fbc8df9fefea5edc1a8cc1d7db06a217ec1d786d6e9b7e7c891",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/tasks/get_physical_info.yml",
+ "name": "plugins/modules/ome_template_network_vlan_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c08ebd517f4809864571641b9bbd54e720e54d3eb9f608b86c2de0e21d5e9154",
+ "chksum_sha256": "0981694020ae6e29639c6aae494ab18a6584de43d01b7d06737352fe75be1407",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/tasks/get_power_supply_info.yml",
+ "name": "plugins/modules/redfish_event_subscription.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b768e6b3a6d27240d49d04a0f59493047f2052f2d9ba94ede986198682525f77",
+ "chksum_sha256": "08b2849adce995c599089ba4e11b4e50a42846a47d418391f79e9a0b084757e1",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/tasks/get_resource_id.yml",
+ "name": "plugins/modules/dellemc_configure_idrac_eventing.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "71b9b902e3e06e3effa5e21acd8a6f8be1d0dbe49500ff72d7d8cb27def8555c",
+ "chksum_sha256": "8972622a58eb34b314304c1891c770b0cfa0c35a766303f5d2fb4614f2bc9ca9",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/tasks/get_secure_boot_info.yml",
+ "name": "plugins/modules/ome_firmware_baseline.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3cdb9d35aed51e1bb8f4626028b87fed75d52df4a29e933a096eb6e1e526b47a",
+ "chksum_sha256": "db3dff4b11a58099af5de57b103d8989183b3dc53e650cb35e0db801c0486e81",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/tasks/get_system_info.yml",
+ "name": "plugins/modules/idrac_firmware.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4e94f83a0bcae476b8005195648d8fd69b894b0f96d24fa3f837f021f46537d7",
+ "chksum_sha256": "7cabfda75d3c291689c0c2fee932be51f3021e862f692bcb25ed00a053a1e2c6",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/tasks/get_virtual_disk_info.yml",
+ "name": "plugins/modules/ome_user.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "91690948737d7d0249d914032490bc42d6260226a1b8fd55922653a1b46b2a61",
+ "chksum_sha256": "bfc07fd709855ef17af210715f8d2bf8a1e9f7c06a9558547d572d7332a91307",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/tasks/get_voltage_info.yml",
+ "name": "plugins/modules/idrac_attributes.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "baf9e3a82d2f19ccc21fa8721c09004ef99ab237f790d5f676bc533aaa523e0e",
+ "chksum_sha256": "6e96008e016bfda9fc804bbb3e23287d2027a2608a780627ce1c57e46031db5c",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/tasks/main.yml",
+ "name": "plugins/modules/ome_application_certificate.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f082f14db62efdca0aa691bfe928fedb660df3fc98f6e32f367cb8dc068335c9",
+ "chksum_sha256": "0ac42af776d7288acebfea2e88044a37caeff29b4d99840e5c7c9e55fccaf5ce",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/tests",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/dellemc_idrac_storage_volume.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "4e9530ae54ec5e0e70142ae0c6585cd78af2ce511f24dd829affb0ba719424b3",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/tests/asserts",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/idrac_lifecycle_controller_job_status_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "0030947d9d1bc3f70ed13147d429fa4696627321dc9f75a467f00df10adb8402",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/tests/asserts/controller_assert.yml",
+ "name": "plugins/modules/idrac_network.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "48bffad0e6952ab27d7721095ef84c4f13fdc6ab07ee859f36a380409ef6e594",
+ "chksum_sha256": "4c1ea93a690a1400cba57600c7ffe9378813c502057a7d76f593ec2b126e5d85",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/tests/asserts/cpu_assert.yml",
+ "name": "plugins/modules/ome_template_identity_pool.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0e8af65eb0a9b3e627d4d8b0a352de3cb82218bae97998be2101290194825c25",
+ "chksum_sha256": "984fc429fcdd64a329f1589f29f621db7c3d21508a2079bb3b75b817c0269fec",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/tests/asserts/enclosure_assert.yml",
+ "name": "plugins/modules/idrac_system_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "763ba0a9850c0b62d5224e40cfa49bb970e86f3c49991117ba21254211d11c24",
+ "chksum_sha256": "ba0422032d21f80ccf2e138499b8dd10821b58ea7e09a4d470446f7929cc5695",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/tests/asserts/enclosureemm_assert.yml",
+ "name": "plugins/modules/ome_alert_policies_actions_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b409927ca4278e7eea565b40cd36ecc0e212901effc08ea13f8264e323224bf0",
+ "chksum_sha256": "cdb5a8e28858301ad0652c5e99115ba303fd9a4a470f12fa5fc7179359350ada",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/tests/asserts/fan_assert.yml",
+ "name": "plugins/modules/redfish_storage_volume.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "92924ad83dc9f5fe1b6801d09dced4bb70c1209ede2394ca0ced2d25c3c72eba",
+ "chksum_sha256": "e00729a66bdf4bd718f003564319e6a3ee9770d88c5c5396c5d708a4097653bf",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/tests/asserts/firmware_assert.yml",
+ "name": "plugins/modules/ome_firmware.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3dc966c3a3c7ec1c7dd3a3bfb2b4e3a97c10ae9f279c4a56d3ccab5313857afe",
+ "chksum_sha256": "77b1055a4806445a3e18188e0d9afa8316259bd900e67404dc8e41c9b809e4b2",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/tests/asserts/hostnic_assert.yml",
+ "name": "plugins/modules/ome_application_console_preferences.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e84c587ac238b74f292b0382d465ee29ecfdb92a53099410bb0691878aa8990b",
+ "chksum_sha256": "6455bc7bbda3c9a4d75ff8a99202fe5a133431cace49bbfc40b031a4babde7f9",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/tests/asserts/lc_assert.yml",
+ "name": "plugins/modules/idrac_server_config_profile.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "170dcd03bdb5031bb82a24cc19cc70e1815865a2572e4f14b892221324ef95b5",
+ "chksum_sha256": "86406d06c9edf0cd03923ac68641c70669469fac2452137499817ef32b92252f",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/tests/asserts/license_assert.yml",
+ "name": "plugins/modules/ome_alert_policies_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4d3e30cd29d21fa947ac9f1b43c3f4d73c60c85187d0d9d01025faa1cd969792",
+ "chksum_sha256": "515db8633f523ccacb1fd5182d2cad629ed9862f0c967518883001d84b7a84c6",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/tests/asserts/manager_assert.yml",
+ "name": "plugins/modules/idrac_user_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "60be5b10f57f9ec1bfaa913610ef448320b476d27cd33b122a7037ea31a95c21",
+ "chksum_sha256": "3b2f2c92c4a6ff9c21456fe512349546af2da8c1a46013e147d16c280cd1821b",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/tests/asserts/memory_assert.yml",
+ "name": "plugins/modules/idrac_diagnostics.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6b16f28108610ba17f95350a75e72ee33ba0db357cc239e74a018e7aebc81057",
+ "chksum_sha256": "2afe54778e0b96a26936ae0221f9ec92e0c324ddfa25ee484b0264a565b9ca11",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/tests/asserts/mmetrics_assert.yml",
+ "name": "plugins/modules/idrac_os_deployment.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "28c0563664fa0a63bc2654edf6bb78c89208920d7414fc9bf2c9eb7408835cf0",
+ "chksum_sha256": "fde92ac2396a616812df831e2c576a591bb0b1e0f38e86e8f99cff8f5c75df3d",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/tests/asserts/nic_assert.yml",
+ "name": "plugins/modules/ome_device_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bb3184e49419f93cf390a53562c01203db04037606af82aeab51783e21a5fbb1",
+ "chksum_sha256": "5a9d906f53b99ba6449684729cea638bda648b536476e9f2095a918b9f4fbfeb",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/tests/asserts/passensor_assert.yml",
+ "name": "plugins/modules/idrac_storage_volume.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2796df243ae251d9a6d0358aabd93e12fbc56aaff0208243fc71be0e20bb4182",
+ "chksum_sha256": "877573fda46c5e009e86b371fb09f9a111b48b2bd0c3a4646111ce854d0b4f28",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/tests/asserts/pciedevice_assert.yml",
+ "name": "plugins/modules/ome_profile.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1f4bfc03967189c3fa4c2752b75174294075ad6a2649d4c7e0f6c82dc0b0bd55",
+ "chksum_sha256": "fc8c762332517a5f607b0ebc8ef6f3ef07ad5cfc77f9023878cf8dac1e647664",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/tests/asserts/powersupply_assert.yml",
+ "name": "plugins/modules/ome_server_interface_profiles.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d0c8c6212e46587f25b7366001ec10fbd5a9884b53339505815abdee19164c78",
+ "chksum_sha256": "413f27c5e233980f9f7225a9ee56ac386bcff74507f9afdf40bfe02a7235ed48",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/tests/asserts/psmetrics_assert.yml",
+ "name": "plugins/modules/ome_firmware_baseline_compliance_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5ade4267e18da8e945add979e001b40d832e77f7b3435cc3efd55f29bd4647b6",
+ "chksum_sha256": "db5f4acfe40ae4ab2df5ba9de0c1900a1994c4ee679600efddc310ee1047b3fb",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/tests/asserts/secureboot_assert.yml",
+ "name": "plugins/modules/ome_device_network_services.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e6876b8b8892d421dd0ddd59dcc1986d54361e6c376d8239f42d91a0db707fff",
+ "chksum_sha256": "83b91b59c6953bc76ed3c98cfce20963c59278e1e440eaa6674dbe1e55b7dd4d",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/tests/asserts/sensorsvoltage_assert.yml",
+ "name": "plugins/modules/idrac_firmware_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e26f5f551fe9d943743159b24c9a01c3c4c804543311330b12ff87ed2f0b7079",
+ "chksum_sha256": "a040502c70727b44937a478d5e33d78ecc90ad8c08873325b5217e80b3e69fab",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/tests/asserts/system_assert.yml",
+ "name": "plugins/modules/idrac_lifecycle_controller_logs.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "78c885b55767b51ba6c4be7998d67e9f9599f9e7518ec683f9bb1b7bd3f337b2",
+ "chksum_sha256": "b374487c9c0f19acad5da9f3fb8fe24fa98b672e2df72f92b9b5eaa2d43ed865",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/tests/asserts/tmetrics_assert.yml",
+ "name": "plugins/modules/ome_devices.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7cd404817eae0e42ab4a29329d93ecf2226b7084464c1613dcd85464b83ddd4e",
+ "chksum_sha256": "d835188d595160323aa61ae1e2a2d58b49e85db7d26de7d7a2654392cc06fb32",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/tests/asserts/virtualdisk_assert.yml",
+ "name": "plugins/modules/idrac_network_attributes.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2e8bd9e08382df2ea34a9b9830b92086569ca4f16e453286cb92c1705f69aa7e",
+ "chksum_sha256": "59cc76b19a3f933d43bcf6fcfeb1fdfd7e4012182a2e25a8bd89724762c48e89",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/tests/inventory",
+ "name": "plugins/modules/ome_template.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e02233819b1a09844410549191813f7cc7ba360f21298578f4ba1727a27d87fc",
+ "chksum_sha256": "c3029139c4f6c9a31134f9ef617bb9cf6f9da6c5114c79d32747a42cc1221e93",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/tests/test.yml",
+ "name": "plugins/modules/ome_active_directory.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "71250fe59fab062631b04fe173573783fcd89b77e8f5ae2c042720d10cd2f266",
+ "chksum_sha256": "86178a7cabd672f2d53f9407faed0a06b8a91ae698870fdb033b8649c258b460",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/vars",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/idrac_syslog.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "8a79f5615815e37ffd0e0dd8544a2df2782ba792bb5e283df134c3403b8d4f8c",
"format": 1
},
{
- "name": "roles/idrac_gather_facts/vars/main.yml",
+ "name": "plugins/modules/ome_network_vlan_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "330b6e0ccfb1e2805e8fb61b43ecd5170d98d1fdffaadd905de5ffb2c4339d09",
+ "chksum_sha256": "026ecd2bd7f6cf17fa96599fc95b188fa8b867dd47e1de03276ac37f596a1426",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/ome_configuration_compliance_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "1b1ed854d7a4570a185369dc8b9d8f1485c8c4aa6a79289da6e654934087279a",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/README.md",
+ "name": "plugins/modules/redfish_firmware_rollback.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "67521b0bf1ceb30e3ca348cd5f07d4c54aee0c56d7b3935c542fd920c11df288",
+ "chksum_sha256": "e8ab7816a03db36b4c8b7929daeff49fe37bff9bccaad1436b79d49d5f5bfe58",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/defaults",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/ome_configuration_compliance_baseline.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "1f994c2f1c0ffeb9c9eca96a8291b3cb2fa1bafd54a5ee36c6ccc2222ba0ebe0",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/defaults/main.yml",
+ "name": "plugins/modules/ome_application_security_settings.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d569d3374f9ba9960023d769bdc7245afb9f635656bfdb48ac1ad61d0279a53d",
+ "chksum_sha256": "fcc5d6a663ee2f69579f10293417070805a9730c6bc5fa45f298d256fac210c6",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/handlers",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/ome_smart_fabric_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "31f3b247e21bd57be875f645ba0a3c05410e5a382a40c323fb7eda9eae7e82fb",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/handlers/main.yml",
+ "name": "plugins/modules/idrac_lifecycle_controller_jobs.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c31a046bb009c7b68dd25c11a096f2b08aad4e49903648580a05dfbec4da9d2f",
+ "chksum_sha256": "8b57b0f86959fee2a4919d83ef943dd5856fd3b137842fb700478d07159b403f",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/redfish_powerstate.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "be425ea056cbebe68fc6cb28f13535147c4c776bc133dbb94e45df525a2c5357",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/meta/argument_specs.yml",
+ "name": "plugins/modules/ome_network_vlan.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b1daf5cd34934f57365ef41dd1bdb8eadf73c96faee44442d8b607d3770921a6",
+ "chksum_sha256": "0dee87e880e9d493f08a698de3eea053271579d3b288956c334ec908c66490eb",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/meta/main.yml",
+ "name": "plugins/modules/ome_user_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7a77ddd7746819905a9dd4511b45439de974b04c6628f8f89c5b89490bc9bcc7",
+ "chksum_sha256": "3d4a7a6a62d34ddc9be106675f3da29c96c2c76e27301f5faa85113be3e9986b",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/idrac_lifecycle_controller_status_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "158d02c1d50ef175e26c7ffa66831575034e077e233ddd992c39d594e004fa38",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/cifs_share",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/ome_application_network_address.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "11e8b445e0071012a7b2132caba06415b25e9b81259cbcbeee9ed4975f8268cf",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/cifs_share/molecule.yml",
+ "name": "plugins/modules/ome_alert_policies_message_id_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0d55af04f5706218c384613a4393a6e01ab1ccea2438a1b1cc6ea2b403272225",
+ "chksum_sha256": "2cf9d3fa3e158437de239eea9cb136b07449ff0e6841e9eb2bce6e0fb83bb263",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/cifs_share/converge.yml",
+ "name": "plugins/modules/ome_powerstate.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2deadcf076f47e066b749446ece0948b1fc0be171ab883f67a32c64de9a9a7bd",
+ "chksum_sha256": "efa7f838ef4c600fe74d1060a4fb58d621c71b3a2c1112d0788cc771087dc5de",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/default",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/idrac_timezone_ntp.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "540b6fec19d767e940128d79991a7517716052293091e1a98caca550de5d0d8a",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/default/molecule.yml",
+ "name": "plugins/modules/idrac_reset.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f3f5fdbd0243581c13e09c4a9347c3197712b89de7ccf5c19bf040002a8e0967",
+ "chksum_sha256": "e4421c071a7cfc7dec17eae265fa717a35c10f6b818a091dcf32f839e9e90e8f",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/default/converge.yml",
+ "name": "plugins/modules/ome_smart_fabric_uplink_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2f2a440f3da11b5d518f74f7fe44fb76709607382cb11bd47fd424767eb8e7da",
+ "chksum_sha256": "bd524cb73b25626201e68c3d48b32116eacd935d05de27a5d8ec959862acc7da",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/http_share",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/ome_discovery.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "b4aece5f3d5201dc25862e443aa169dcfdf6c70618b48d631fd345e420693ccc",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/http_share/molecule.yml",
+ "name": "plugins/modules/ome_application_network_webserver.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0d55af04f5706218c384613a4393a6e01ab1ccea2438a1b1cc6ea2b403272225",
+ "chksum_sha256": "a6bb503e6708f2a551196d9dd14958da526365857e91543b18444119c4f1ffc8",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/http_share/converge.yml",
+ "name": "plugins/modules/ome_chassis_slots.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "05ee90c1a034c6a7c4f02120c05554526172311af76c5a61767cab3705b114c0",
+ "chksum_sha256": "ea0c912d561821887d1a42279234a35659d8fbb43789ca2dbff6fa3601338c7a",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/http_share_with_proxy_parameters",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/idrac_virtual_media.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d1078caa3ff3f2fcff08357dd06b9f64d1050efe2cf6145115e33a5fd02065cc",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/http_share_with_proxy_parameters/molecule.yml",
+ "name": "plugins/modules/ome_server_interface_profile_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0d55af04f5706218c384613a4393a6e01ab1ccea2438a1b1cc6ea2b403272225",
+ "chksum_sha256": "a0bef2c7aadb96532d48b22e533218209b0a6c2e68a61a41799f3e1f1d2a32a9",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/http_share_with_proxy_parameters/converge.yml",
+ "name": "plugins/modules/ome_application_alerts_smtp.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4a9673edc4a062a85a757483b1587a1ebea5ec8545b6ec20cdf861afab9b38e5",
+ "chksum_sha256": "c9e288755e7b359671d4651125a77a649c829540bc7faa58dd2424c1932c8fa0",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/http_share_with_showerror_certificate_warning",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/ome_firmware_baseline_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "6146b516cd0a758fa8113ee196fca5233ea279b7934eed092ae4b1767801825d",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/http_share_with_showerror_certificate_warning/molecule.yml",
+ "name": "plugins/modules/dellemc_idrac_lc_attributes.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "79c874570d8c94e5373a26c30e4e1b64576a54a14f62f4499b674c7b2f99bb8d",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/http_share_with_showerror_certificate_warning/converge.yml",
+ "name": "plugins/modules/ome_smart_fabric.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c6440bd7d1c2e2fe63da11cf6345307f8d08d2ff2287a3007e3cb99eea47d7c4",
+ "chksum_sha256": "978a641f8c545ff7c675985cc01fb2ce755eba9d1de6a33b26ae44db93a105c8",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/https_share",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/ome_device_group.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "eb9e953ebb5e83d77c066c64167e6ca55e5c3801ebc2cf8446c9ec7ba3375c91",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/https_share/molecule.yml",
+ "name": "plugins/modules/ome_device_quick_deploy.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0d55af04f5706218c384613a4393a6e01ab1ccea2438a1b1cc6ea2b403272225",
+ "chksum_sha256": "bdad7f9ca49947777ea54855b4a878190711a40e954462bab66c53c468d1d8ac",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/https_share/converge.yml",
+ "name": "plugins/modules/ome_application_network_settings.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "70c1949f3736465e786717cb6a2311c4dfcc92861212161f8957ca6c932e5d6c",
+ "chksum_sha256": "2f476f2600e706c5ccea4ffb37ef1c6cd5eeed491f2983422c6a4abd5d0aab75",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/https_share_with_proxy_parameters",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/ome_application_network_time.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "672aeec4d5f1013efd76e8499104cd0cccfcc0287537550f794054a483b76007",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/https_share_with_proxy_parameters/molecule.yml",
+ "name": "plugins/modules/ome_application_network_proxy.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0d55af04f5706218c384613a4393a6e01ab1ccea2438a1b1cc6ea2b403272225",
+ "chksum_sha256": "14671aeae28183b563fc43e7ae3740172f69b7a55d304b393c47601b2f68a18c",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/https_share_with_proxy_parameters/converge.yml",
+ "name": "plugins/modules/ome_device_mgmt_network.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2f555e872a266977d5f0acb356ed20321dcf984564e9471d2fdc29b841d5a120",
+ "chksum_sha256": "14aa9b8f1645c7231b723b4836b13c24899d59fcf826198672aff0a93f638f95",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/import_buffer_json",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/ome_network_port_breakout.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "6c73b4ce57b32f2afe6537b5eee8456263a2c0a226b3d6418d1c7df26fef8e0a",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/import_buffer_json/molecule.yml",
+ "name": "plugins/modules/ome_domain_user_groups.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "72aed38354e90870ae0fcc2e6695fa3a36b8caf0bd2195ca3bce19be9d8b46aa",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/import_buffer_json/prepare.yml",
+ "name": "plugins/modules/redfish_firmware.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "49b649577157352b249d241cab5c9f05d2f14e72c6b886ef809b1ec006a6eb0b",
+ "chksum_sha256": "6349cdbda830d1cec8dc00c488a85d8b44b35333bb6ee23f411ac755c8761956",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/import_buffer_json/converge.yml",
+ "name": "plugins/modules/ome_device_power_settings.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7e73dc08b0b670f9047ad969ec153670bae08fb4baf7db258216251c4160c083",
+ "chksum_sha256": "aa48c7a06f1d8a0e3d73b7d7b708dc3e8a91a95ff0cebb5ceab7faf7c81cb847",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/import_buffer_xml",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/ome_identity_pool.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "6e160cdaf6357d914322f860688cbfc78bacade3e6abbd53725b4acaa716d8b4",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/import_buffer_xml/molecule.yml",
+ "name": "plugins/modules/idrac_boot.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "0d25592dedc580847e8eaea5ae2d9ce73909234ef212dc97881077a587642147",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/import_buffer_xml/prepare.yml",
+ "name": "plugins/modules/dellemc_system_lockdown_mode.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "49b649577157352b249d241cab5c9f05d2f14e72c6b886ef809b1ec006a6eb0b",
+ "chksum_sha256": "fafecf1bd67f65c03480e16c097a0dab930c2bfaff25a4efda57f46a90f27b5c",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/import_buffer_xml/converge.yml",
+ "name": "plugins/modules/ome_smart_fabric_uplink.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "57802a2bfa00428e20c2baf45add16b9cb643b46bb5d0c325190699432473e86",
+ "chksum_sha256": "3a955bec818da06c3a19e8a9eb2c0da5618f1740535a831a28db41b1bc839410",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/import_multiple_target",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/idrac_bios.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "314547ca6619f5e0e397f85dedf7f43e4c7b33bd9db81b8f361c5f05f6844968",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/import_multiple_target/molecule.yml",
+ "name": "plugins/modules/ome_session.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0d55af04f5706218c384613a4393a6e01ab1ccea2438a1b1cc6ea2b403272225",
+ "chksum_sha256": "2534bef038d21213010240f0493f380550e4bfd307066f0a4eb567e51d0cd998",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/import_multiple_target/converge.yml",
+ "name": "plugins/modules/ome_device_local_access_configuration.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3fb9baf2fda4da08a609e07a9c56bca74569c01a0081e3630e9635295b31f0a",
+ "chksum_sha256": "8bd1dcade5df034272f51fe091061d0032aa559833ba2b58503b1290c625f9c1",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/nfs_share",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/idrac_certificates.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d30ac54367b5df7f43157a407de8b3ebc2a729b2643b1504585f6675780fbe09",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/nfs_share/molecule.yml",
+ "name": "plugins/modules/ome_alert_policies_category_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0d55af04f5706218c384613a4393a6e01ab1ccea2438a1b1cc6ea2b403272225",
+ "chksum_sha256": "eebfc28d1cc523796a2357e3d5b03c21fe4a9f30ab40f504241ae7c640c67b0d",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/nfs_share/converge.yml",
+ "name": "plugins/modules/ome_firmware_catalog.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c825c7c600c1dccac10930371fb8da29b38df7dfee460ab885266bf6e3006bd1",
+ "chksum_sha256": "1c439cf8efb07f813cbc64b1de0113ddbcfce19293d68133a537e68d5f7df856",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/resources",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/ome_application_alerts_syslog.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "2c21624c78bc8dd29d77504c1f96964d9748c971815ea7ab1a04c93fe2035f36",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/resources/tests",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/ome_profile_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "4e281235b6a83ff06114a92ce302dabb6e455fb84d2fbf6c262d6d6363f5c355",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/resources/tests/nic_helper.yml",
+ "name": "plugins/modules/ome_diagnostics.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f247488b64a9ccaffe3f0240f0cf3a6e527b3ac952a786bcc715c436b397a00d",
+ "chksum_sha256": "7763f1443e013bc330d4cf66280fae08d1c5beee21b664f0ce03a9f2195a2ace",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/resources/tests/raid_helper.yml",
+ "name": "plugins/modules/ome_groups.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6706025009360630843898df27085681a613507370fb0bb91a2c40fd03a2e8c7",
+ "chksum_sha256": "3faaf1de9dd04f8c0757e60c81c608ad5c7f7fd7f615c1d5b92302973a1ad503",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/resources/tests/cleanup.yml",
+ "name": "plugins/modules/dellemc_configure_idrac_services.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3985d5b2ca2f11661982e372f5c9e677144651dd2f7f167efd7e0e4b3d2c9231",
+ "chksum_sha256": "244953d637c27180cf67aec39005498a0abe58688d3a2b05b1655a6df81a8db9",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/resources/tests/export.yml",
+ "name": "plugins/modules/idrac_redfish_storage_controller.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8a4f3a78e9d259c668ae748e224e36007b91372281024e1f514ad6aaaae72606",
+ "chksum_sha256": "fd937628d1466bb8588f017b9f6a90e8bf1d7a3376942c0e15e3b84f72c67432",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/molecule/resources/tests/prepare.yml",
+ "name": "plugins/modules/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b48f7002029c826a3402b360aaabe1a9301d34e249a49718aef9454d6dc9a557",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/tasks",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/ome_job_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "307aa1d9a37868e417542a78e29a2128b98199bc3f041929e7e8edcdd5a7d8a6",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/tasks/main.yml",
+ "name": "plugins/modules/ome_template_network_vlan.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8a329bb5b5a0c7ce2855fddb6eb3b16ab43524ad86e7f12defe55577933fb636",
+ "chksum_sha256": "de15c6f3694b636ec1413057f165328ed3f0f4712f8063e98f1029872b0f9ff7",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/tests",
+ "name": "docs",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/tests/inventory",
+ "name": "docs/COMMITTER_GUIDE.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e02233819b1a09844410549191813f7cc7ba360f21298578f4ba1727a27d87fc",
+ "chksum_sha256": "9faa99978e41d71c8f84fb1e764ab495075c57e81c8a26edcf0bd41bdedda326",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/tests/test.yml",
+ "name": "docs/BRANCHING.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4acfc541204ebf3d6c37f48c79e48f8f273d016aa7a2f6af7fef715905245eba",
+ "chksum_sha256": "347e9f1d9850cbb807f3ac5680a4db0145a90f6714ee23c2d9ea3618806c8609",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/vars",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "docs/CODE_OF_CONDUCT.md",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "b115034e51a649fda55572de32880b12224311bc35517d0534de26223683230a",
"format": 1
},
{
- "name": "roles/idrac_import_server_config_profile/vars/main.yml",
+ "name": "docs/SECURITY.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a072ea3135c9e0109ae66ae42d86a3947cd27c8f7fde4ea1d62bf14a75ff7b6a",
+ "chksum_sha256": "b4bcf55efb2913e91247bcbc9e2944cd6bdca0c80a2f6314dbd62802b167f3c0",
"format": 1
},
{
- "name": "roles/idrac_job_queue",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "docs/ATTRIBUTION.md",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "0e55990cf8c6c469b4c35f0cf9ba6dd92d89f39d36e8d22f45a939a6edd06c20",
"format": 1
},
{
- "name": "roles/idrac_job_queue/README.md",
+ "name": "docs/ADDITIONAL_INFORMATION.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3e961b7f75abd408cc8ab2d76fab6a3175b32ddca1f3e87c64b009ff8e53daf",
+ "chksum_sha256": "83c4a24d8d137a954c891ffcc1c75a8157d7957204a59797c5fd4a9dfd3326da",
"format": 1
},
{
- "name": "roles/idrac_job_queue/defaults",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "docs/SUPPORT.md",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "cd2fd536176f16d1722ee4fb035b8c8a7b92d77395a8fd6b74f56a9dc5cb3d86",
"format": 1
},
{
- "name": "roles/idrac_job_queue/defaults/main.yml",
+ "name": "docs/DOCUMENTATION.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cf4f24cabcc7843106afbcf1ad295d130bc31c4b9431d8e149ef9611ec66f0e4",
+ "chksum_sha256": "d79a78be68eeb1be24c29a4d65bb253a1c90f43eaefc661d402e3bab0b5b37f3",
"format": 1
},
{
- "name": "roles/idrac_job_queue/handlers",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "docs/MAINTAINERS.md",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "480717a42cbfbb59ee6afb30d1df16f4e6f586b1375a262ba2c87032811fb929",
"format": 1
},
{
- "name": "roles/idrac_job_queue/handlers/main.yml",
+ "name": "docs/CONTRIBUTING.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2ae7c77fc2f339f8db094a30043ad551606c6bca9e5bdfbd9e34a79563f7334c",
+ "chksum_sha256": "ffe89c3652d21757f689570ac1a14eaa4dc6e35bc99793f48e6eef21d2b84d54",
"format": 1
},
{
- "name": "roles/idrac_job_queue/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "docs/EXECUTION_ENVIRONMENT.md",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "c853f1c80c075bc775ec977562ae868177567256bef9d69543f6519cfd7b852a",
"format": 1
},
{
- "name": "roles/idrac_job_queue/meta/argument_specs.yml",
+ "name": "docs/README.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e63d726e475ad59773fa26fe3918bd6cb5b042b508834e49c8c344fbf4945ccd",
+ "chksum_sha256": "7e931cceff02aadb24f0f4afa56b4e1283505d82cbd41d1f535aabd3a58bf8c9",
"format": 1
},
{
- "name": "roles/idrac_job_queue/meta/main.yml",
+ "name": "docs/ISSUE_TRIAGE.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "081550bbea995e8776778cb69f4f3003f5dc9ef2148439bb0ab441f28fd67949",
+ "chksum_sha256": "25dd3183365794c152fe138272727efcded3456422c5dc16c18f2b31a8f2ab16",
"format": 1
},
{
- "name": "roles/idrac_job_queue/molecule",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "docs/DEBUG.md",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "9b2ce4f68fc99226b34a0ebced6fd1919f514f0a33c877b377566b146972d4b6",
"format": 1
},
{
- "name": "roles/idrac_job_queue/molecule/clear_job_queue",
+ "name": "docs/modules",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_job_queue/molecule/clear_job_queue/molecule.yml",
+ "name": "docs/modules/ome_alert_policies_message_id_info.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
+ "chksum_sha256": "f8d66036646c2cfffd36018236fc6ec18f879d90bd3622d142bb867b952b35e8",
"format": 1
},
{
- "name": "roles/idrac_job_queue/molecule/clear_job_queue/converge.yml",
+ "name": "docs/modules/ome_configuration_compliance_info.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f040659df832a82b82f060fe657dcefd0e06f397600604b86b65ba6964e9e338",
+ "chksum_sha256": "848d7c9d9e61c5c8c97c35a9733e03334148653d0aca06a0c22fe6e794a2fc10",
"format": 1
},
{
- "name": "roles/idrac_job_queue/molecule/default",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "docs/modules/redfish_powerstate.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "9e3f38feb33e73aa3be84121fbd8746e69d5d0437efd179b6720a12318841428",
"format": 1
},
{
- "name": "roles/idrac_job_queue/molecule/default/molecule.yml",
+ "name": "docs/modules/ome_application_network_proxy.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
+ "chksum_sha256": "c23edd851d595fe884ac6bd6f9a470d2e9729b9b61f5fbe8f6f6615bda6bf394",
"format": 1
},
{
- "name": "roles/idrac_job_queue/molecule/default/converge.yml",
+ "name": "docs/modules/idrac_network.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0424782236b3a6a800a4a192f73f528f65b87c135f3a53203547d640e6cde330",
+ "chksum_sha256": "37cc7f880f97091f5e0c7e89d640122bd626a46f7581800355c4af98b12d9cb4",
"format": 1
},
{
- "name": "roles/idrac_job_queue/molecule/delete_job",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "docs/modules/ome_template.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d940fbcd82901b91da31f2af54dc997ed744d66fb339798d5df36eddc43f90f3",
"format": 1
},
{
- "name": "roles/idrac_job_queue/molecule/delete_job/molecule.yml",
+ "name": "docs/modules/ome_discovery.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6128fe631d272bed3adb4c98223a2b04d554093dc05a2c499eeb9e6d80c9ac7",
+ "chksum_sha256": "456b287079a08bb118e95154e247110f5b85c0716428b5762014a23af904df6e",
"format": 1
},
{
- "name": "roles/idrac_job_queue/molecule/delete_job/converge.yml",
+ "name": "docs/modules/ome_network_port_breakout.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f17ecfe1a83f4d34185ba19886855028b67ea7665f4a24b712ad741ee6d1e0fc",
+ "chksum_sha256": "fbb030907564880d8e31d25eacfa0ed90eda9265f3acde489863a77e51fbb497",
"format": 1
},
{
- "name": "roles/idrac_job_queue/tasks",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "docs/modules/ome_configuration_compliance_baseline.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "071d48887ac78876fe56f6df99d6bf9d8e00793943600ee8bacb4bda35020f1b",
"format": 1
},
{
- "name": "roles/idrac_job_queue/tasks/clear_jobs_with_api.yml",
+ "name": "docs/modules/idrac_session.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "32e48bc5f0ceedcf0077e0ab7f6931e5dc1f4c34324aef0957ef440b44f69369",
+ "chksum_sha256": "daf45ea7d1c992590d3d54de993d95a5549bec0f5f09baae86f05e8af73f23f3",
"format": 1
},
{
- "name": "roles/idrac_job_queue/tasks/clear_jobs_with_wsman.yml",
+ "name": "docs/modules/idrac_bios.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fc5e2cbcef3dffe45fbae0ccdb4310621cde5b940b81f9051ec1039126e894a0",
+ "chksum_sha256": "876e2511b34de5e945314102ae16688b32161821e05e0dc2eb8b9a7611337bc3",
"format": 1
},
{
- "name": "roles/idrac_job_queue/tasks/delete_job_with_id.yml",
+ "name": "docs/modules/ome_profile_info.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5aec3e437d0a24ee7b8a41737abd2db6889a1c4a0c0543943212573d06a2fc83",
+ "chksum_sha256": "0c329fec5f6e78bf10236c018391c92792e6bee7dbbe8245a37c6c4f62c31c6c",
"format": 1
},
{
- "name": "roles/idrac_job_queue/tasks/get_idrac_firmware_version.yml",
+ "name": "docs/modules/ome_network_vlan.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a193b7f5af28f17972f0da7e52e5fc0ecd34f4b8ae6398bd1b04769a7cffa453",
+ "chksum_sha256": "a1988d55e769b2dc54585dccfae8329bdf0258b0b11e88004ff038784f844b76",
"format": 1
},
{
- "name": "roles/idrac_job_queue/tasks/main.yml",
+ "name": "docs/modules/ome_chassis_slots.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "edba2b2d2854747abff9b2b993b2ac119117eaa60533c517b5d7586af869d784",
+ "chksum_sha256": "adc0e0e23bcd34fdab97a05ef1a57422e7ea982593101791a1f49207000d9b09",
"format": 1
},
{
- "name": "roles/idrac_job_queue/tasks/validate_input_data.yml",
+ "name": "docs/modules/idrac_attributes.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0c862aa3048f9061f02fff2a55ec1324b7b1c81b6175617d3e739eeda382d7ee",
+ "chksum_sha256": "0f5add96d08322ab923f657ff2b569bef8974fd286e2d0cd562c177b493c1af8",
"format": 1
},
{
- "name": "roles/idrac_job_queue/templates",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "docs/modules/dellemc_configure_idrac_services.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "c5da2d61f8d536f4835cad6c0b11619759d47177e8121c5f52a605ae6698239d",
"format": 1
},
{
- "name": "roles/idrac_job_queue/templates/idrac_delete_job_queue.j2",
+ "name": "docs/modules/dellemc_idrac_storage_volume.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f83008a98424af87bd8817bb29a82a175e175a8459146267c86e7cbd1895c31b",
+ "chksum_sha256": "8c37ba05337b65851c5996643e77bf4c4e29165693f840e39c8b4e837c70ca1b",
"format": 1
},
{
- "name": "roles/idrac_job_queue/templates/idrac_delete_job_queue_force.j2",
+ "name": "docs/modules/ome_application_network_time.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b777b52ecc380bc79f8a903a8bd2fb8613e98ea665d41e01efa2532fc9f9ef5b",
+ "chksum_sha256": "9df4886749f7d4afe394f0fc587421dd9c54ba98eb1ddd3a262dd04170c7b188",
"format": 1
},
{
- "name": "roles/idrac_job_queue/tests",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "docs/modules/idrac_user_info.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "1bbd394ae4cb4461fdc7104b64eeecd4014457b646439b0616d77de7b64a07d4",
"format": 1
},
{
- "name": "roles/idrac_job_queue/tests/inventory",
+ "name": "docs/modules/ome_server_interface_profiles.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e02233819b1a09844410549191813f7cc7ba360f21298578f4ba1727a27d87fc",
+ "chksum_sha256": "28933d624342c6800cfa7664b74c0f967d0efe97371b52cef884ed426462d2e0",
"format": 1
},
{
- "name": "roles/idrac_job_queue/tests/test.yml",
+ "name": "docs/modules/redfish_firmware_rollback.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8898537e0e2c4d23b06938b4e5181cc005b3eedad5c6f3c848c78a8a93f7ad21",
+ "chksum_sha256": "a4c57e3744ea281d1162044f12956e2c73a86c014a555dfadfc55ff88f1cb754",
"format": 1
},
{
- "name": "roles/idrac_job_queue/vars",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "docs/modules/ome_firmware_baseline.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "6721b19fe4192107c6cefcaabb6592fae9045980e6560ce432d215b87edd131d",
"format": 1
},
{
- "name": "roles/idrac_job_queue/vars/main.yml",
+ "name": "docs/modules/ome_firmware_baseline_compliance_info.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "12bd53e6ac521d02cf9d16a83ef864467aa1ca7398b0b20e15836de87c74f9cb",
+ "chksum_sha256": "20c12253eb1fee5aab877300d5e93d70548061bb0fcec984d8c67a4061248879",
"format": 1
},
{
- "name": "roles/idrac_os_deployment",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "docs/modules/ome_smart_fabric.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "047eed044d7abd2a4f0afa9f194608325799603c0e79db850199316dacf52c93",
"format": 1
},
{
- "name": "roles/idrac_os_deployment/README.md",
+ "name": "docs/modules/ome_diagnostics.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e800016c9724604f5c12210c94b830bbc27bd4996205fcd74a31768f754b0a1f",
+ "chksum_sha256": "4ee631e78019ef817608cdf0bdef6d1ad186663fc5dcb665838e42e6fae044dc",
"format": 1
},
{
- "name": "roles/idrac_os_deployment/defaults",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "docs/modules/idrac_firmware.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "030134a6b0cd3023cbfba3e91dc5db447e5be1009ba2e4c540b1cb8e7f247181",
"format": 1
},
{
- "name": "roles/idrac_os_deployment/defaults/main",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "docs/modules/ome_network_vlan_info.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "5a9a3d0470553733c5b9871a7d8242e0273cf63f41653f6cda7b46dee9b42580",
"format": 1
},
{
- "name": "roles/idrac_os_deployment/defaults/main/esxi.yml",
+ "name": "docs/modules/ome_device_local_access_configuration.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5393035ed221c2ced358b9d585fd1881817a8b31b0dd030ba32e81ecd8a7f4df",
+ "chksum_sha256": "5e0a535a51887b24547dc3f0a143a8fdc526fe8aa6439d43ea5705b6e13fc45a",
"format": 1
},
{
- "name": "roles/idrac_os_deployment/defaults/main/main.yml",
+ "name": "docs/modules/ome_alert_policies_category_info.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "759f139407f609b8655150fe9df73e69025bbaadc01b6c69e07729867539d583",
+ "chksum_sha256": "9bd6d149cd343cf5350acb9ca1fd72866442cfd1fbafea6e12abf8d3e579be8e",
"format": 1
},
{
- "name": "roles/idrac_os_deployment/defaults/main/rhel.yml",
+ "name": "docs/modules/idrac_lifecycle_controller_jobs.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e2a5308265f0edf9cdb641063bbf2c2e877a9693154b04cf3a4b9a784f3934c0",
+ "chksum_sha256": "df32077f463558a26fb05faf7c5d614763dc81ceeaf36e9c1425446a686840bf",
"format": 1
},
{
- "name": "roles/idrac_os_deployment/handlers",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "docs/modules/ome_user_info.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "c5c8daa9a295d1d7eed88142829b1f5bd374c93bc917b88d07ca894dcd5b9ee6",
"format": 1
},
{
- "name": "roles/idrac_os_deployment/handlers/main.yml",
+ "name": "docs/modules/ome_device_network_services.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "82cb53c7b2545e1a7883e32efdebbc34bd69bbad9ccbeabd190c3028f7f05777",
+ "chksum_sha256": "ebe843a8d5c20d24b72e63e1ef8095a185d4cf11f6af7c09d35669a68d2a2a58",
"format": 1
},
{
- "name": "roles/idrac_os_deployment/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "docs/modules/ome_smart_fabric_uplink.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "bae22448e52f0d31385c31b0764ed3fd0fee20dd6aee2def06651c666685e21d",
"format": 1
},
{
- "name": "roles/idrac_os_deployment/meta/argument_specs.yml",
+ "name": "docs/modules/ome_template_network_vlan_info.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9e69ff8ddd86e522acf91f9ca4efd9099947ecb857dd1138e8c7aef4793d816a",
+ "chksum_sha256": "5005543155f59599f813a2a1bd67a97ab14a749024d979e0a2bc9da0ab1666e9",
"format": 1
},
{
- "name": "roles/idrac_os_deployment/meta/main.yml",
+ "name": "docs/modules/ome_active_directory.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "db297ffe635bcee661eaee0844312d69cc364d67cc427bab545865674401c9b9",
+ "chksum_sha256": "4ce5a6dacde5757e14c22347eec7d7ae32768318424b42f5f73971053482b55d",
"format": 1
},
{
- "name": "roles/idrac_os_deployment/tasks",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "docs/modules/idrac_server_config_profile.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "eeb3208bc71ad29f0dfd425a0c30c791310b7072094ae290ccb68335164e5818",
"format": 1
},
{
- "name": "roles/idrac_os_deployment/tasks/clean_up",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "docs/modules/ome_device_power_settings.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "6e03448b0741bb436788f1b56d7f6796c22b2754081689f4a09e82f415796977",
"format": 1
},
{
- "name": "roles/idrac_os_deployment/tasks/clean_up/clean_up_destinations.yml",
+ "name": "docs/modules/idrac_lifecycle_controller_logs.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d9a4258e490a7d6f72372a85f128243594c709f26020b61761ff2064d6837d46",
+ "chksum_sha256": "9d4d6cb107ccb5031d4d2fcbcd08f80456b0587745b4990e752cbccb638ed689",
"format": 1
},
{
- "name": "roles/idrac_os_deployment/tasks/clean_up/clean_up_working_directory.yml",
+ "name": "docs/modules/ome_device_group.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6cb3708d332825d39b859aecd1df938ef1b3bc63b92213c1195432e7ffa9d0aa",
+ "chksum_sha256": "347f004bbb1f406f6458be14294f11cb78424e68b8f4ec8cd82657cc78669480",
"format": 1
},
{
- "name": "roles/idrac_os_deployment/tasks/common",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "docs/modules/redfish_storage_volume.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "0ce0a96661c3276e70d93afd3e0bc61646db6d17541a044eecd969d0954b6321",
"format": 1
},
{
- "name": "roles/idrac_os_deployment/tasks/common/copy_iso_to_destination.yml",
+ "name": "docs/modules/ome_application_network_webserver.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "163156845f185684a862eb1ba7f0023119f51798898d9bb79209a8f15ea80b20",
+ "chksum_sha256": "d7db8c65b7938549a1c0a6b81c556245c7bcf106b133148ea9e744042735aed7",
"format": 1
},
{
- "name": "roles/idrac_os_deployment/tasks/common/create_working_directory_path.yml",
+ "name": "docs/modules/ome_groups.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e5d3c043120b0788f2a4a4e5f8dcd26adc600065c47c423f92eeca3907c21d4a",
+ "chksum_sha256": "49f25cd54bb29daf2e4bf821e70eae9a36a9bd0bad650d84183a635c36173e19",
"format": 1
},
{
- "name": "roles/idrac_os_deployment/tasks/common/download_or_copy_source_files.yml",
+ "name": "docs/modules/idrac_lifecycle_controller_job_status_info.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ec7e75367105039f8833bb8bcacf522f137bd7456162c99987aa0a8ddb2761de",
+ "chksum_sha256": "c32793f384d344b0713ede1ebf841cbbc928c837244123e09e6388874109c45a",
"format": 1
},
{
- "name": "roles/idrac_os_deployment/tasks/common/validate_inputs.yml",
+ "name": "docs/modules/ome_firmware_baseline_info.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4e70fbc15dd06a835d5cbf840d4bd93f8c1f1d0d4214f6555e68cbbd4c2256e1",
+ "chksum_sha256": "c086a7474a9e0f49073027326df33a8e9993739489dfc4bc3643043e94bdecf0",
"format": 1
},
{
- "name": "roles/idrac_os_deployment/tasks/esxi",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "docs/modules/ome_firmware.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "072801f3145a2152fe1d16705142146b17a96cb8691ad13a34ed5998fbeb1cd7",
"format": 1
},
{
- "name": "roles/idrac_os_deployment/tasks/esxi/compile_iso.yml",
+ "name": "docs/modules/idrac_boot.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ea92a3796b8d00ba6e0e65146d6089201a6bd4f6fdfb01467279414ac69fd024",
+ "chksum_sha256": "90795661dc6d6ea275a2d7230d9f5dfbb8ed33d851b7121daf912969eaa69e65",
"format": 1
},
{
- "name": "roles/idrac_os_deployment/tasks/idrac",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "docs/modules/ome_domain_user_groups.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "76c7947459bbb9638ed412c611ab594ee192862d15baf99490c56ee0f46584e8",
"format": 1
},
{
- "name": "roles/idrac_os_deployment/tasks/idrac/attach_iso_to_virtual_media.yml",
+ "name": "docs/modules/ome_powerstate.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2d7ed30e4c808fc88df7f59a669b8bb1b561bf002c332b3a8296c73e19d6c660",
+ "chksum_sha256": "0e3ca639cc60095bd21f564dadc75aeec6f465b92493ba80d59043993ac4752b",
"format": 1
},
{
- "name": "roles/idrac_os_deployment/tasks/idrac/clean_up_virtual_media_slot.yml",
+ "name": "docs/modules/idrac_os_deployment.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f03831c4aa6b588e969bccdce38b75d49969eae2f5543b8ba9e94510cb18eb29",
+ "chksum_sha256": "689e1bb13ae21d66693214fd75c59b8c67fc22532ddd2175961d2e48f165c1c5",
"format": 1
},
{
- "name": "roles/idrac_os_deployment/tasks/idrac/set_boot_mode_and_restart.yml",
+ "name": "docs/modules/ome_job_info.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f0502bbe2d8e5411fda7e5e87820654f14f2641a500eeab7c8a5a5636892cce2",
+ "chksum_sha256": "8f73985a066380ccf1cbe407f336ca1a3e0663c213ce6a9766d6f98e1350330f",
"format": 1
},
{
- "name": "roles/idrac_os_deployment/tasks/iso",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "docs/modules/idrac_lifecycle_controller_status_info.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "05e28f1bdbc85a6955d8d4a9167e4ea77c7b8304dceb5dfc7bc0b6407b2451e3",
"format": 1
},
{
- "name": "roles/idrac_os_deployment/tasks/iso/extract_iso.yml",
+ "name": "docs/modules/idrac_firmware_info.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "09edb456108a3a29a94573ac4568446c5b401930b1f7bf8b051502fd2d064a7b",
+ "chksum_sha256": "aa2c5aa0262ea0a4cffb710871fc51c50af268d729018eab32126a1a5e434cef",
"format": 1
},
{
- "name": "roles/idrac_os_deployment/tasks/iso/generate_kickstart_file.yml",
+ "name": "docs/modules/ome_application_alerts_syslog.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "aa2a8cd54823f99a1f61b0453048b228e440bdf1c4e274b0f0ae7635990d083e",
+ "chksum_sha256": "403735d99edf39433b21366ba7340e2ac14ccb6cf5dc995773f949386ef88f44",
"format": 1
},
{
- "name": "roles/idrac_os_deployment/tasks/main.yml",
+ "name": "docs/modules/ome_device_info.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7414db3ca8e33e14b34126b64b5eddefc24d27ac2e3d3d0b9a33284acba29c16",
+ "chksum_sha256": "418d5ed4d1f124744c5d2fb90d3bb857b53ce3d466f3f3f60177213321616712",
"format": 1
},
{
- "name": "roles/idrac_os_deployment/tasks/rhel",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "docs/modules/ome_device_location.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "f2423423b018612ea934fb545ebcb28cfef6fe96d0734cb4f9230839902cb9b1",
"format": 1
},
{
- "name": "roles/idrac_os_deployment/tasks/rhel/compile_iso.yml",
+ "name": "docs/modules/ome_devices.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8d4b130336f4349cd1c3bd96c1f52f1fcd81615bcead7e9fa39d46883ebf41f6",
+ "chksum_sha256": "fcc2e9b7350ec5db29e644f44fe46637d15244826551b89a23183b8265ad5559",
"format": 1
},
{
- "name": "roles/idrac_os_deployment/tasks/tracking",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "docs/modules/ome_template_identity_pool.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "b12712f00923d44af672c624132d113dac1e9b78c0eaa7c1140a1cf1bd031fa7",
"format": 1
},
{
- "name": "roles/idrac_os_deployment/tasks/tracking/track_for_os_deployment.yml",
+ "name": "docs/modules/ome_application_security_settings.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "df751495e4839501919602cebe8c24677e11d191394353b13e5b6448af44459e",
+ "chksum_sha256": "4a3b9f5c516af482d286247e659da9dca38c7568ac497b45fc470312565ffc2e",
"format": 1
},
{
- "name": "roles/idrac_os_deployment/templates",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "docs/modules/ome_application_network_address.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "9e53ca365382a3ad3d31bc47588bc10c0f0b53e8b87903e5d7105c926cf0c5ea",
"format": 1
},
{
- "name": "roles/idrac_os_deployment/templates/ESXI_8.j2",
+ "name": "docs/modules/ome_alert_policies.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "998e80a725d3e21a3b68e6368decadd5c66196afdc74dc9947ff2282dcec144f",
+ "chksum_sha256": "e4be3fa0c0a9ebbf9582fddb91a35e631c15c978e557e3f4d64ddd2380a94fd7",
"format": 1
},
{
- "name": "roles/idrac_os_deployment/templates/RHEL_8.j2",
+ "name": "docs/modules/ome_application_console_preferences.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c1b54a6b4b5d2ccfeca7376a758389d0de0d5cb2c489189f2690616909b0e6ab",
+ "chksum_sha256": "0946954fea93136cd7d3107e45fafdaa42435b45ed325017d6df712dc11f5f79",
"format": 1
},
{
- "name": "roles/idrac_os_deployment/templates/RHEL_9.j2",
+ "name": "docs/modules/idrac_user.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "54d4812cce2199771afaaeedf6fc8d813152f824097195dfb76284bbdebfadb4",
+ "chksum_sha256": "ff53c38d06a284c0cbea9cc4d0169eff46d744868cad3191165dc6692f04a3b5",
"format": 1
},
{
- "name": "roles/idrac_os_deployment/tests",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "docs/modules/ome_device_mgmt_network.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "5b1ea044e5dc5e43f519d5e63061c80e348eb71c2bfd71e8cfa99bead301f5b5",
"format": 1
},
{
- "name": "roles/idrac_os_deployment/tests/inventory",
+ "name": "docs/modules/idrac_diagnostics.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0647d824fd28557a0b72bcdc079671c07571c1ee008efd92728e1291b7ca3eac",
+ "chksum_sha256": "bc6e3f9926798690930f371007c66576061d0d6acf8186f494f58812ca10aa51",
"format": 1
},
{
- "name": "roles/idrac_os_deployment/tests/test.yml",
+ "name": "docs/modules/ome_smart_fabric_info.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f9d3fffd8a4834828c2dab606405d1fe11f12c5e4b14142c99221f689597c857",
+ "chksum_sha256": "f090dc150e8e80d3819dc35cee1b5b37748d3527fb8a001cbfbc8bf83f25bda9",
"format": 1
},
{
- "name": "roles/idrac_os_deployment/vars",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "docs/modules/ome_firmware_catalog.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "061d78e77b60d446fbcf94b76105df0b8af72ed2cef85522d207fbd3afe39091",
"format": 1
},
{
- "name": "roles/idrac_os_deployment/vars/main.yml",
+ "name": "docs/modules/redfish_firmware.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a6d9f6ecca54f00e015debd5807eecd29aee9c3118d6a710768074c6eb625b9b",
+ "chksum_sha256": "47f46d78fb9b6bc6c5255d679440fce23af9c8717cc1d6192e44882a94ccc3db",
"format": 1
},
{
- "name": "roles/idrac_reset",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "docs/modules/ome_application_network_settings.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "890dacb57ef47d75e4b10aea918d26e69474a5604fbd159ce2c18b89eebe7b76",
"format": 1
},
{
- "name": "roles/idrac_reset/README.md",
+ "name": "docs/modules/ome_template_info.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "00e77ec0ddaa418973cd9cad99137bcef33738171102464f50d3f49f5cf2b3f4",
+ "chksum_sha256": "a3028bb6f9c65f7c3e5314ef121653291a00b7f8d29c847309952facd67c8b68",
"format": 1
},
{
- "name": "roles/idrac_reset/defaults",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "docs/modules/idrac_virtual_media.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "db0471c8a0ea1b732918b78a036b531d59db90eaa7cfbc0fc66664f0f20a0f97",
"format": 1
},
{
- "name": "roles/idrac_reset/defaults/main.yml",
+ "name": "docs/modules/idrac_syslog.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9cd98c8b578501e6b24cec9c004bc5963e0f479ca4435aabc58ddfa7e63033a0",
+ "chksum_sha256": "687900acee641629277d0638e553dc60dc429aaf13ad2d0ea72b0e6117800fc1",
"format": 1
},
{
- "name": "roles/idrac_reset/handlers",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "docs/modules/dellemc_configure_idrac_eventing.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "15a235619160acc869ef11202c81b3d702762fe22e7796a8f7dd2f09f8fe2036",
"format": 1
},
{
- "name": "roles/idrac_reset/handlers/main.yml",
+ "name": "docs/modules/ome_profile.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0ec20499ab70254c4498ed1783b01bff5fdde39aca5f81f9c7e3f979ba8b384a",
+ "chksum_sha256": "60493fff0865e3f6c4dad14362a9d309f3382f876b9abccad987b0f12e2b17f4",
"format": 1
},
{
- "name": "roles/idrac_reset/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "docs/modules/ome_identity_pool.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "65a6256b4338dfa694afdf2e4ed3fe517cac466e22b936b60e98f868e1b271de",
"format": 1
},
{
- "name": "roles/idrac_reset/meta/argument_specs.yml",
+ "name": "docs/modules/idrac_license.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6af174177901d928f439e83c69caad0fccc601a710ac5b434af7bf5240f70418",
+ "chksum_sha256": "f730b26296ef7a95f41c36a32ef71764ed48a7cef6dbb9dad1e5fce3371afe9b",
"format": 1
},
{
- "name": "roles/idrac_reset/meta/main.yml",
+ "name": "docs/modules/idrac_storage_volume.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0195a66c4e4ac7e78a39c4a8fa223ee0b2d049a09c398eab91080868422c8049",
+ "chksum_sha256": "480a539da1885ef09b84152bd29a3606257306d96660ecbc4e04fafd73505994",
"format": 1
},
{
- "name": "roles/idrac_reset/tasks",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "docs/modules/ome_smart_fabric_uplink_info.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "dd352d0bbad655ee40d19f3c5d5d78fd4a5adedca123d8e58cd6d4a429c6bcd9",
"format": 1
},
{
- "name": "roles/idrac_reset/tasks/lcstatus_check.yml",
+ "name": "docs/modules/idrac_system_info.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "377ab984113ad5d61f3164c7578677c42d727d35eb35c79f61275b794bdb970e",
+ "chksum_sha256": "b330714479ce873966c6c3a6285f859f653aa2c05d7e97f26a192b19b4861c65",
"format": 1
},
{
- "name": "roles/idrac_reset/tasks/main.yml",
+ "name": "docs/modules/ome_session.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "17feb74dd2b4bbec2b65ad4044f5af9e8f9660c66a9594cce342a79e178c1225",
+ "chksum_sha256": "95a2659b9930891280bf478d7d257f5b1e96c612b5734e5a56f9c123a7d37034",
"format": 1
},
{
- "name": "roles/idrac_reset/templates",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "docs/modules/dellemc_system_lockdown_mode.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "2868bb81a87c50614180c0615c9b3203ba6fdbca3401b447e6feee7f55e78ca4",
"format": 1
},
{
- "name": "roles/idrac_reset/templates/idrac_lifecycle_controller_status.j2",
+ "name": "docs/modules/ome_alert_policies_actions_info.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cacc1f259dc7dbef66e2f715f138d3a75ffd5852ad56a2fa4e0f17358f30b4ab",
+ "chksum_sha256": "edff59a28ced2f64b9e9e450dc04350c2fe1e3fa787d545b949629ed327a18dd",
"format": 1
},
{
- "name": "roles/idrac_reset/tests",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "docs/modules/idrac_reset.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "f513a60d623f8deb39294534d1d8f47e4c5d68a0864ef618832f7533e3de14c2",
"format": 1
},
{
- "name": "roles/idrac_reset/tests/inventory",
+ "name": "docs/modules/ome_alert_policies_info.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e02233819b1a09844410549191813f7cc7ba360f21298578f4ba1727a27d87fc",
+ "chksum_sha256": "1e9df83b5853255bf511e51a123d385d2b9338fa7eb2593691711f84c1b3120c",
"format": 1
},
{
- "name": "roles/idrac_reset/tests/test.yml",
+ "name": "docs/modules/ome_template_network_vlan.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "43fa08ab4acc6414c37f0fd53406310d4aa81f970d122feeb5604b40251317d5",
+ "chksum_sha256": "a5246aecab609ef52c03d9aa586c891780eadce33bd691a7694127430d474bc0",
"format": 1
},
{
- "name": "roles/idrac_reset/vars",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "docs/modules/idrac_redfish_storage_controller.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "21b934146055a3488b07061723c4a700136f42c745e33832f81ea58c99f1463a",
"format": 1
},
{
- "name": "roles/idrac_reset/vars/main.yml",
+ "name": "docs/modules/redfish_event_subscription.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9e4a4575c14328832a6976cc4aed6b19c07dbfe5997a10ae733b99e50702fa0e",
+ "chksum_sha256": "52d39d898363e14508228f97aab3489ddca05e938e722c7d8938876074d6ddeb",
"format": 1
},
{
- "name": "roles/idrac_server_powerstate",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "docs/modules/ome_user.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "54acdf02a55c4ae9fb98e9474939c7fd296c45a0fecee45ccbd87afdbd64052f",
"format": 1
},
{
- "name": "roles/idrac_server_powerstate/README.md",
+ "name": "docs/modules/ome_server_interface_profile_info.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "241acc485326568db77e4c7c6aa738605779ea7949337b58425a58cd2d26bfc5",
+ "chksum_sha256": "20f24755213bbc6ac0b215d258b71c5a5d43a003b791aaf6908fdf7cfa326e2c",
"format": 1
},
{
- "name": "roles/idrac_server_powerstate/defaults",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "docs/modules/dellemc_idrac_lc_attributes.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "0be40c9b7e8fe75321a78edeac8f5d8e38c425ea93b85a2da535de9e6615a851",
"format": 1
},
{
- "name": "roles/idrac_server_powerstate/defaults/main.yml",
+ "name": "docs/modules/ome_application_alerts_smtp.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "016ff520beef55bf26d90d19c4ff8187fcf861cc6697999b71d4c756e8cd1af4",
+ "chksum_sha256": "9a0eb93827023a6fe75d0f8b6aa89511ff7b132d7f236973c2e367bc9845061f",
"format": 1
},
{
- "name": "roles/idrac_server_powerstate/handlers",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "docs/modules/idrac_network_attributes.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "c1d139c1ac7ffc1da524ac4087f0972a6545327e254611fef00fb2e5853eba73",
"format": 1
},
{
- "name": "roles/idrac_server_powerstate/handlers/main.yml",
+ "name": "docs/modules/idrac_certificates.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8fb9435cbde9641256ffb3cad4e82e8169969ecf0faebed24c6e1eca315a17c6",
+ "chksum_sha256": "059c080471428b321ff474a8e804f69794cfdff2c2608f858df4ea97e6c10a86",
"format": 1
},
{
- "name": "roles/idrac_server_powerstate/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "docs/modules/ome_device_quick_deploy.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "a1bf2906d12158918026bcf4feebd4aed8207491e68f38febec3a6ba5d7926eb",
"format": 1
},
{
- "name": "roles/idrac_server_powerstate/meta/argument_specs.yml",
+ "name": "docs/modules/idrac_timezone_ntp.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "689583bd8c529dfe982c3dcdf84e968eeee49eea7da2eac65ad34f74afc2dd3c",
+ "chksum_sha256": "b0e50c42077beb1d71809c3a59bd51b77e68e0545dd195a2bd112e0c39cc12f5",
"format": 1
},
{
- "name": "roles/idrac_server_powerstate/meta/main.yml",
+ "name": "docs/modules/ome_application_certificate.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f1119ecdb9b37bfa6f3860f8c767174cca920a521a95946f04a93f5c2ca4cd0e",
+ "chksum_sha256": "4dc78e6882651932dff408bc22298f8255b772bfc3213d13188ac3d7710230e1",
"format": 1
},
{
- "name": "roles/idrac_server_powerstate/molecule",
+ "name": "playbooks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_server_powerstate/molecule/default",
+ "name": "playbooks/roles",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_server_powerstate/molecule/default/converge.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "8790919530e9b792ac0a0bb5aa7f39756369e469fdcdead5ed849377be400f55",
+ "name": "playbooks/roles/idrac_server_powerstate",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_server_powerstate/molecule/default/molecule.yml",
+ "name": "playbooks/roles/idrac_server_powerstate/idrac_server_powerstate.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f3f5fdbd0243581c13e09c4a9347c3197712b89de7ccf5c19bf040002a8e0967",
+ "chksum_sha256": "2b27bab99e8b95ed92bddd14d59e53341a3cfa8b3828cfce59af8bed8328ac52",
"format": 1
},
{
- "name": "roles/idrac_server_powerstate/molecule/forceoff",
+ "name": "playbooks/roles/idrac_user",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_server_powerstate/molecule/forceoff/converge.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "8d81e28400e1ecd4d4f63e3880ad12efcad626f1b482aea24e3781db13463872",
- "format": 1
- },
- {
- "name": "roles/idrac_server_powerstate/molecule/forceoff/molecule.yml",
+ "name": "playbooks/roles/idrac_user/idrac_user.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "44346ffd0acd7bc28a4b0760772385552686673dc6421947e1b6d2a916b6e5b2",
"format": 1
},
{
- "name": "roles/idrac_server_powerstate/molecule/forcerestart",
+ "name": "playbooks/roles/idrac_job_queue",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_server_powerstate/molecule/forcerestart/converge.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "1fc6007dac86a8108d44f5d57cafbbedd7f31ec317258a090cdd20ca7338d5d7",
- "format": 1
- },
- {
- "name": "roles/idrac_server_powerstate/molecule/forcerestart/molecule.yml",
+ "name": "playbooks/roles/idrac_job_queue/idrac_job_queue.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "a7495a4784f344f1557f1f65ea5c51f56b31d025e304f66ddbb9ebbaffb2991e",
"format": 1
},
{
- "name": "roles/idrac_server_powerstate/molecule/gracefulrestart",
+ "name": "playbooks/roles/idrac_attributes",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_server_powerstate/molecule/gracefulrestart/converge.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "495911d67e2d1421a451dc2ecc4d52705486791f8be17321134fd2d91fb4aa9b",
- "format": 1
- },
- {
- "name": "roles/idrac_server_powerstate/molecule/gracefulrestart/molecule.yml",
+ "name": "playbooks/roles/idrac_attributes/idrac_attributes.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "b558f4d845b9f870ad3c89c34e3361c41acfcef9437f83bdcf342ce3e7c111d0",
"format": 1
},
{
- "name": "roles/idrac_server_powerstate/molecule/gracefulshutdown",
+ "name": "playbooks/roles/redfish_firmware",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_server_powerstate/molecule/gracefulshutdown/converge.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "fb25fbf9767297093fc2c5fcb74c38fdae2de2870dd4a7f1d264e32ab3fd2f34",
- "format": 1
- },
- {
- "name": "roles/idrac_server_powerstate/molecule/gracefulshutdown/molecule.yml",
+ "name": "playbooks/roles/redfish_firmware/redfish_firmware.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "3b40bf1bf5fa84bfd210377c89e4b4566076bac113347918c06dcca053c463a8",
"format": 1
},
{
- "name": "roles/idrac_server_powerstate/molecule/nmi",
+ "name": "playbooks/roles/idrac_certificate",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_server_powerstate/molecule/nmi/converge.yml",
+ "name": "playbooks/roles/idrac_certificate/idrac_certificate.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c163e9d4d92dfc89ad31c68ee2172162ce2cf9f2c8c1a65f28d5f42437bbc90d",
+ "chksum_sha256": "fc5d65aad27029c224cfbe3945b27725caac02a8bc20db57837283efbac05c85",
"format": 1
},
{
- "name": "roles/idrac_server_powerstate/molecule/nmi/molecule.yml",
+ "name": "playbooks/roles/idrac_gather_facts",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "playbooks/roles/idrac_gather_facts/idrac_gather_facts.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "99a54fcb95b0805ac8ca5f2ce85541caf9dd9666ee3ae0cd1401c90a3a3dbcf8",
"format": 1
},
{
- "name": "roles/idrac_server_powerstate/molecule/on",
+ "name": "playbooks/roles/vars_files",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_server_powerstate/molecule/on/converge.yml",
+ "name": "playbooks/roles/vars_files/certificates.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d71c25f25f9e3ba08d70dd9f7dc33d304e69950e6245781aeb8ed71ac00d23d1",
+ "chksum_sha256": "91099951ff4f500c14e95ce2a6950ddc012521daf34c1253221cd774e75647ed",
"format": 1
},
{
- "name": "roles/idrac_server_powerstate/molecule/on/molecule.yml",
+ "name": "playbooks/roles/vars_files/attributes.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
- "format": 1
- },
- {
- "name": "roles/idrac_server_powerstate/molecule/powercycle",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "1e760a0cb6126d76b49613aac9e612f88ae7e27925160a52b2e0b255b5f47bb2",
"format": 1
},
{
- "name": "roles/idrac_server_powerstate/molecule/powercycle/converge.yml",
+ "name": "playbooks/roles/vars_files/storage.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be3e16e9141db31d632e532d034f33ed6d72a9151f1c3194080d16b1ab4961f2",
+ "chksum_sha256": "65e7a405ea04245a99ced19727310b2bdf295578fe6bc2869ee764eeb2c4235d",
"format": 1
},
{
- "name": "roles/idrac_server_powerstate/molecule/powercycle/molecule.yml",
+ "name": "playbooks/roles/vars_files/storage_controller.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
- "format": 1
- },
- {
- "name": "roles/idrac_server_powerstate/molecule/pushpowerbutton",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "9ccc017e74bede8274235bc7358cfa6192c7c7430876016ddd07c827657f16e8",
"format": 1
},
{
- "name": "roles/idrac_server_powerstate/molecule/pushpowerbutton/converge.yml",
+ "name": "playbooks/roles/vars_files/import.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e4566f9dc07e3d658986932a9190fadcd98950e65e54829810aac38898b43e77",
+ "chksum_sha256": "90d3589259f331152d96d8ae0c773ed3958c7ef24bb9604387bbfa040a20bb6e",
"format": 1
},
{
- "name": "roles/idrac_server_powerstate/molecule/pushpowerbutton/molecule.yml",
+ "name": "playbooks/roles/vars_files/export.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "aa812c14b39721cf11488674c9a0a93b8ca8e94406f335f7722a58b20475a08d",
"format": 1
},
{
- "name": "roles/idrac_server_powerstate/tasks",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "playbooks/roles/vars_files/firmware.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "8ddbe55c1ad9eaa8f33cc617ed5ceeaaf99efd7d43bb5f33d60ea98fd966241c",
"format": 1
},
{
- "name": "roles/idrac_server_powerstate/tasks/init.yml",
+ "name": "playbooks/roles/vars_files/bios.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "5d5c75310a93c6d513c747db70e9bea927c1eb5dd6ef75ba5fa766d4c2baaf9e",
"format": 1
},
{
- "name": "roles/idrac_server_powerstate/tasks/main.yml",
+ "name": "playbooks/roles/vars_files/osd.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "28a5dd2d4dd0c53d832afbc13fc43dd71180b4d8579e5e37d62073e2fa51ce07",
+ "chksum_sha256": "b1e6e58675cb4af4c9f800681df1be3ea613b4ee09aa6f8c5eb2e8f05f4b2df9",
"format": 1
},
{
- "name": "roles/idrac_server_powerstate/tests",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "playbooks/roles/vars_files/user.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "4dbddb5c3dedb08b2a42e3b2131cb515bca4ae9d83ae4a28a17ff4919993b587",
"format": 1
},
{
- "name": "roles/idrac_server_powerstate/tests/inventory",
+ "name": "playbooks/roles/vars_files/credentials.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e02233819b1a09844410549191813f7cc7ba360f21298578f4ba1727a27d87fc",
+ "chksum_sha256": "959f184f2bd6eec8a6302784bfc6518c10a712a496e92b6f20be4f5e166be41a",
"format": 1
},
{
- "name": "roles/idrac_server_powerstate/tests/test.yml",
+ "name": "playbooks/roles/vars_files/reset.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a60392a56123662801eb3957572a4c57d858026c0c0222d8e7a3b2c04a24c5f1",
+ "chksum_sha256": "690bd2b370bbff4b2d9fc3e158f7a85276e343e12167d724d7a30ae7fd7ae7b5",
"format": 1
},
{
- "name": "roles/idrac_server_powerstate/vars",
+ "name": "playbooks/roles/idrac_export_server_config_profile",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_server_powerstate/vars/main.yml",
+ "name": "playbooks/roles/idrac_export_server_config_profile/idrac_export_server_config_profile.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f92fe5299b10e093b76f7527715368bf5d9a243323f74a69b3a74bc75b172886",
+ "chksum_sha256": "e5a5df7a2c70cdfc5b97c9a752dddf82b134ba6b5358ab6216097b097c72a533",
"format": 1
},
{
- "name": "roles/idrac_storage_controller",
+ "name": "playbooks/roles/idrac_import_server_config_profile",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_storage_controller/README.md",
+ "name": "playbooks/roles/idrac_import_server_config_profile/idrac_import_server_config_profile.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "64f1956a2b0638551ece563c4f072e2c27553b76e4f381c22316d9e18d47d6b7",
+ "chksum_sha256": "ac1ed229206d33006e498e8c0ec6f46ea190dcbcb34698b3560ac7c45859e645",
"format": 1
},
{
- "name": "roles/idrac_storage_controller/defaults",
+ "name": "playbooks/roles/idrac_bios",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_storage_controller/defaults/main.yml",
+ "name": "playbooks/roles/idrac_bios/idrac_bios.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "15213c1a91c203930ede7c03d11efa362d86d0f94e6822268e714e8e7a98b14d",
+ "chksum_sha256": "46eb1addcc8166ef1e320678c15b5c0bcb494e68166b3a4cea65341764af3832",
"format": 1
},
{
- "name": "roles/idrac_storage_controller/handlers",
+ "name": "playbooks/roles/idrac_reset",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_storage_controller/handlers/main.yml",
+ "name": "playbooks/roles/idrac_reset/idrac_reset.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "527bd186562deac98b997410a770da9d22afee10005f965d699859e2a4df5ce0",
+ "chksum_sha256": "d7260501d8b6889b34b43524d75c1bf06a6457f44ea303b36aacb2190ff8fa4e",
"format": 1
},
{
- "name": "roles/idrac_storage_controller/meta",
+ "name": "playbooks/roles/idrac_os_deployment",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_storage_controller/meta/argument_specs.yml",
+ "name": "playbooks/roles/idrac_os_deployment/idrac_os_deployment.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "57c999b43cebe8874baf8ce6a3fbe7a603c69bc0101577c73d7eb1d8720fa066",
+ "chksum_sha256": "484bf0716912aac6c405531874feb47643b82e5ae5bb4048e492adb7269328f1",
"format": 1
},
{
- "name": "roles/idrac_storage_controller/meta/main.yml",
+ "name": "playbooks/roles/README.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3eda64bf454dd351cebf7063807750d690c44072601bae3af6144460a5323e6b",
+ "chksum_sha256": "abd9f8c450fda2dfbff41f9ee2bbc613150d94e027e8a560e48d3ec14aad97fd",
"format": 1
},
{
- "name": "roles/idrac_storage_controller/tasks",
+ "name": "playbooks/roles/idrac_firmware",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_storage_controller/tasks/main.yml",
+ "name": "playbooks/roles/idrac_firmware/idrac_firmware.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ea1e319497a59c9d00d925cec6f1bcaf7c672b1b3e6d9a671c95a1c9e89c9f77",
+ "chksum_sha256": "ce0d0ab244e8a7831b43abafdcc2e610b3b4507ff0a8785000792d94769e71c0",
"format": 1
},
{
- "name": "roles/idrac_storage_controller/tests",
+ "name": "playbooks/roles/idrac_storage_controller",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_storage_controller/tests/inventory",
+ "name": "playbooks/roles/idrac_storage_controller/idrac_storage_controller.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e02233819b1a09844410549191813f7cc7ba360f21298578f4ba1727a27d87fc",
+ "chksum_sha256": "0792768055d63117233dbbca23e2e7f434da81c3e77eaeb67554602d17ea7fd3",
"format": 1
},
{
- "name": "roles/idrac_storage_controller/tests/test.yml",
+ "name": "playbooks/roles/inventory",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6992dd1f8393638377d59efa1ddbd6e728bffc188f6997d3b5c98208ce710853",
+ "chksum_sha256": "f78483b3cf76c53e1539f5c47bcae79a21890faa859bc5f0cdea52cbeedc046e",
"format": 1
},
{
- "name": "roles/idrac_storage_controller/vars",
+ "name": "playbooks/roles/redfish_storage_volume",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_storage_controller/vars/main.yml",
+ "name": "playbooks/roles/redfish_storage_volume/redfish_storage_volume.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "981bc0f2d638983323e5081845acf0ac97ddf1cd5e603cf517051f69661a9cec",
+ "chksum_sha256": "7c8d35ac12005d8a06ca9fbb891edd16c926b4740b7f1a4743df7fcace3404da",
"format": 1
},
{
- "name": "roles/molecule.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "a681a11158030f89670466b71c1cc02663571a4b551b2e3a7924ad23096e48cd",
+ "name": "playbooks/redfish",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "roles/redfish_firmware",
+ "name": "playbooks/redfish/storage",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/redfish_firmware/README.md",
+ "name": "playbooks/redfish/storage/redfish_storage_volume_delete_job_tracking.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e5c84a2ed15c9a447689ca3a576997db0b740caf3318d6c53f5f4bd94ee8ad26",
- "format": 1
- },
- {
- "name": "roles/redfish_firmware/defaults",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "54dad57ad6a79cf3d1613917157698b84eadf3f10e456776ce23fd4a1a8c4e61",
"format": 1
},
{
- "name": "roles/redfish_firmware/defaults/main.yml",
+ "name": "playbooks/redfish/storage/redfish_storage_volume_modify_job_tracking.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "db1cab86f255f2f7829eb72d7f45c792a3c1560d5c23fc42076c70893f4dd784",
+ "chksum_sha256": "96a4103655813d28c473885a73cdba3b48eafaf01521931f7761f89de461c91e",
"format": 1
},
{
- "name": "roles/redfish_firmware/handlers",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "playbooks/redfish/storage/redfish_storage_volume_create_job_tracking.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "180a6c686410517af84f74f13ee71d1451ea7a736855e521355da7f7b7a36948",
"format": 1
},
{
- "name": "roles/redfish_firmware/handlers/main.yml",
+ "name": "playbooks/redfish/storage/redfish_storage_volume_initialize_job_tracking.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c039d1e5934a6dac58d3fa5dae336ebd5cf878c9649b6acbba4206f6ee0e3e0d",
+ "chksum_sha256": "2c4a6b9df13e5c02c6f74324453b3e2e93607f4fe6cc723a1a2d3337883ea21e",
"format": 1
},
{
- "name": "roles/redfish_firmware/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "playbooks/redfish/storage/redfish_storage_volume.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "071ebfb5602ec9046db2ed9209b8697ff9e9cbc66069236b875bbf65a59af625",
"format": 1
},
{
- "name": "roles/redfish_firmware/meta/argument_specs.yml",
+ "name": "playbooks/redfish/redfish_event_subscription.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f8553c37be4eecc702220849c497ff5cb9dc0712567fc85d3cbed123c450f89f",
+ "chksum_sha256": "f901dc08010a5c76bf0fbc8c8f1ba60c1eff5d075878469b3fe1197ad2dc4589",
"format": 1
},
{
- "name": "roles/redfish_firmware/meta/main.yml",
+ "name": "playbooks/redfish/redfish_powerstate.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "669ae8e69c647cf300677acbc0990147d93ded846aa44fd40c45664768c9d6c6",
+ "chksum_sha256": "92461b5efd3384e8da363a9a091a18eb762680e516407b78ad55c1139d03c6e6",
"format": 1
},
{
- "name": "roles/redfish_firmware/molecule",
+ "name": "playbooks/redfish/firmware",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/redfish_firmware/molecule/default",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "playbooks/redfish/firmware/redfish_firmware.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e9831056b86afc4319cb850d6ed4a723e08ee7a5e2406e178d060bcc0c669796",
"format": 1
},
{
- "name": "roles/redfish_firmware/molecule/default/converge.yml",
+ "name": "playbooks/redfish/firmware/redfish_firmware_rollback.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "12286deeca54ad309b8ad6898fbe72fecbcfd6c35885753b2e8a63de6d2666c1",
+ "chksum_sha256": "67bedde1fa194d706233c12d6aeae0504214b2598a4f848373e31eddcdc571c0",
"format": 1
},
{
- "name": "roles/redfish_firmware/molecule/default/molecule.yml",
+ "name": "playbooks/redfish/firmware/redfish_firmware_from_local_jobtracking.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f2a39596d4b1e4313eca1a4958921f7d8c57b84115d3f81080704d742edf30b1",
+ "chksum_sha256": "da71c6817f0338f16b2efceec75919579ab4783695d256eb50050e50dd732ace",
"format": 1
},
{
- "name": "roles/redfish_firmware/molecule/negative",
+ "name": "playbooks/redfish/firmware/redfish_firmware_from_http_jobtracking.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "7e1785959ab3ca10bc07a64de60a1fa8faad059df4b1b7a68a493be98b18d0f0",
+ "format": 1
+ },
+ {
+ "name": "playbooks/idrac",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/redfish_firmware/molecule/negative/converge.yml",
+ "name": "playbooks/idrac/idrac_reset_result_tracking.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ca7ab87d800e87cded135f5d6b008e71d7784794dca7aed1c67e87db1cb3d1ad",
+ "chksum_sha256": "1065ba2ac114119031a719e466be0b1cf3d70a05590d7a021855502dc9877f08",
"format": 1
},
{
- "name": "roles/redfish_firmware/molecule/negative/molecule.yml",
+ "name": "playbooks/idrac/idrac_lifecycle_controller_logs.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f2a39596d4b1e4313eca1a4958921f7d8c57b84115d3f81080704d742edf30b1",
+ "chksum_sha256": "65432bf367476a0dddc30fbfa2ad9bc77a5423daf5dad6518447fc1bea124cd2",
"format": 1
},
{
- "name": "roles/redfish_firmware/molecule/resources",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "playbooks/idrac/idrac_user.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d3aedae3410210ff0893afd6e160929f8e7581a5337928f5ee00f0b89a81a9f9",
"format": 1
},
{
- "name": "roles/redfish_firmware/molecule/resources/cleanup.yml",
+ "name": "playbooks/idrac/idrac_os_deployment.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e0599c04f5f5203a69b80ed580e6a03f0586e029a482936479beef09f03ef165",
+ "chksum_sha256": "42fc0b056a6282757e5b0be577e896fc6b62ab4ce0ce4c673eec2f99012d12d6",
"format": 1
},
{
- "name": "roles/redfish_firmware/molecule/resources/prepare.yml",
+ "name": "playbooks/idrac/idrac_boot_virtual_media_workflow.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "70be84b8715d7ea5e18f87fbfd34763293a17ebfa3210f7bd4aa767e0b3ab8fc",
+ "chksum_sha256": "542c406f1a42e6130c8c4a75b97a2ea5fbb8f0c0cfeea41e724972855457e486",
"format": 1
},
{
- "name": "roles/redfish_firmware/tasks",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "playbooks/idrac/idrac_network_attributes.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "b7dd0ba1946e68eacb3df926df9a2f7d8fdc9f922ea4044b304de3945b0af77d",
"format": 1
},
{
- "name": "roles/redfish_firmware/tasks/main.yml",
+ "name": "playbooks/idrac/idrac_session.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5c256607b253027b52ff6b9ffa117ac4ee5adf9f6fd3a0adfe23251aedcade66",
+ "chksum_sha256": "b0440a47587c5d5ddb9912b53731727e93d0b889c12241594343f919d22f5bda",
"format": 1
},
{
- "name": "roles/redfish_firmware/tests",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "playbooks/idrac/idrac_reset.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "de8fe2c4ad4c761b39cd3f2f26a33de198d13764460cd4ae48ddc41e3055c129",
"format": 1
},
{
- "name": "roles/redfish_firmware/tests/inventory",
+ "name": "playbooks/idrac/idrac_virtual_media.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e02233819b1a09844410549191813f7cc7ba360f21298578f4ba1727a27d87fc",
+ "chksum_sha256": "466e5e23ac6ac17de823878df10bc89f90762170e96ca4682626b4ef95d028bc",
"format": 1
},
{
- "name": "roles/redfish_firmware/tests/test.yml",
+ "name": "playbooks/idrac/idrac_diagnostics.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ec798a2b3ebd1096273bc1776f157f23a7b4a7e33d45669856dd185fb9f6b064",
+ "chksum_sha256": "f14fa78ada0d1c85ecd18419f0ec1940cbd387af3fd35e78da2e676228ed897e",
"format": 1
},
{
- "name": "roles/redfish_firmware/vars",
+ "name": "playbooks/idrac/deprecated",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/redfish_firmware/vars/main.yml",
+ "name": "playbooks/idrac/deprecated/dellemc_configure_idrac_services.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "95897ec515cd6dc828104d2b67e61b8842802aec9e73eac05462c52cd111f653",
+ "chksum_sha256": "82a62b3e894b89054dcc4d01e56614212501a76ed120d0b98c310bb609ef9836",
"format": 1
},
{
- "name": "roles/redfish_storage_volume",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "playbooks/idrac/deprecated/idrac_network.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "75b64d730751d3543b829f7baa60bfe506b5c36a068aad0f52db166c7c6b7f15",
"format": 1
},
{
- "name": "roles/redfish_storage_volume/README.md",
+ "name": "playbooks/idrac/deprecated/dellemc_idrac_lc_attributes.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "efb82c5ab08b6d604fa2dfa92911904119f33b20799fca3cccbc0ed9aa9633a8",
+ "chksum_sha256": "965bcf06720fc6b878fed5cc6d77e64b4830027670406a0de451fb519277b842",
"format": 1
},
{
- "name": "roles/redfish_storage_volume/defaults",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "playbooks/idrac/deprecated/dellemc_configure_idrac_eventing.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "333f32a604a8273f9048d333fb835e9717dd561b4583360503885f3ef49cfa41",
"format": 1
},
{
- "name": "roles/redfish_storage_volume/defaults/main.yml",
+ "name": "playbooks/idrac/deprecated/dellemc_system_lockdown_mode.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0a5a221d2fa9a6fc8985007dca4b7b46ea33c4fee921a405ff021bb576740473",
+ "chksum_sha256": "d8ec2bdb41b68c6c16783cab302a56739474ba2db2a144340906d83d411d8549",
"format": 1
},
{
- "name": "roles/redfish_storage_volume/handlers",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "playbooks/idrac/deprecated/idrac_timezone_ntp.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "7fbea4b5c1db843d6a0091e3b19d96062a724fa1059bbb23651692663386b13d",
"format": 1
},
{
- "name": "roles/redfish_storage_volume/handlers/main.yml",
+ "name": "playbooks/idrac/idrac_lifecycle_controller_status_info.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6877ee1583a2e33f3b3baf52a4a633c1f0bddea5bafda3e91241952bb5f2f19f",
+ "chksum_sha256": "faff447c9a135cfba87c04161408d2f9491d42fe85da06223c821a30cd3c611f",
"format": 1
},
{
- "name": "roles/redfish_storage_volume/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "playbooks/idrac/idrac_certificates.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "304bead36bc953c79bdb064a6c3372945953d0b6d8addbdc4be45d0e59c51fcc",
"format": 1
},
{
- "name": "roles/redfish_storage_volume/meta/argument_specs.yml",
+ "name": "playbooks/idrac/idrac_storage_volume.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8098e64b110bba2ed1aaf28aa39abc87c5085f84149271719bb97376d1181aa3",
+ "chksum_sha256": "739c488df021adb4d9cf2f8843dd4314bdad00f4076e22c2e0580d2d8b08ba7d",
"format": 1
},
{
- "name": "roles/redfish_storage_volume/meta/main.yml",
+ "name": "playbooks/idrac/idrac_firmware_info.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "310a9f23099629e5643b4dfdfa22af40cd4f78f3037fe1a5f6915c322e12feb0",
+ "chksum_sha256": "eaa3b675ac75e62eead40e8ccab34205de55128cbfdeddac97cff7c1ad7dbbec",
"format": 1
},
{
- "name": "roles/redfish_storage_volume/molecule",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "playbooks/idrac/idrac_server_config_profile.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "95fbf44d10f20da1cb0a866ec7578e2546e215e535c094b0b0270317eefa8bcb",
"format": 1
},
{
- "name": "roles/redfish_storage_volume/molecule/RAID0",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "playbooks/idrac/idrac_lifecycle_controller_job_status_info.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "453984f1feb8ed3332755cd58dd3b3d13eacf365db73f0a4231e929c28913ec1",
"format": 1
},
{
- "name": "roles/redfish_storage_volume/molecule/RAID0/converge.yml",
+ "name": "playbooks/idrac/idrac_system_info.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "79d8cf155feafc68b8d2d03b79c6a04b5c66e9e5f9e87741ac71c57f54a1c089",
+ "chksum_sha256": "e385033356b74133e34ecdd3aa63f6ad1d134cc5857ad48bd847f4fc6766c350",
"format": 1
},
{
- "name": "roles/redfish_storage_volume/molecule/RAID0/molecule.yml",
+ "name": "playbooks/idrac/idrac_bios.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "2f4892fed0e96b2bd7532645fe590d2ae4456bc1de316b405dceb9ef2c10c8fa",
"format": 1
},
{
- "name": "roles/redfish_storage_volume/molecule/RAID1",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "playbooks/idrac/idrac_boot.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "a3f384d9f351aca517327d735422ff3a7d7e48fd0a8370f9d41dd1da6dd1295b",
"format": 1
},
{
- "name": "roles/redfish_storage_volume/molecule/RAID1/converge.yml",
+ "name": "playbooks/idrac/idrac_syslog.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bf3b39d9fc6450c880a0e5ab61e904844fb9c1e1b112d72be49da7401eef7ba9",
+ "chksum_sha256": "be1eeae26000a909eb88c2f7ae2f45d5c492a0716cca5d56ce8f9e709aefaac9",
"format": 1
},
{
- "name": "roles/redfish_storage_volume/molecule/RAID1/molecule.yml",
+ "name": "playbooks/idrac/idrac_redfish_storage_controller_job_tracking.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "a417f1624b45912c41ce6f116140e65bdd241cb60473269ee25efc428d5c2cc9",
"format": 1
},
{
- "name": "roles/redfish_storage_volume/molecule/RAID10",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "playbooks/idrac/idrac_redfish_storage_controller.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "0f12fa63252f024245e3644b89527a023e718107110a8580cc332c8b57c19b22",
"format": 1
},
{
- "name": "roles/redfish_storage_volume/molecule/RAID10/converge.yml",
+ "name": "playbooks/idrac/idrac_attributes.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c443aef978e1312010f754d04cb5c6da231557f5a044f4aa77e6832ae9526908",
+ "chksum_sha256": "335fab9fb27b80e9bb72c6b1b312dc5b344d2f1ae9b6614255f854bc5ca6b3a8",
"format": 1
},
{
- "name": "roles/redfish_storage_volume/molecule/RAID10/molecule.yml",
+ "name": "playbooks/idrac/idrac_license.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "de01f4b83a795770f2b12c9c3d5fbadeb6f9351334083f5fc21bef71d29609b9",
"format": 1
},
{
- "name": "roles/redfish_storage_volume/molecule/RAID5",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "playbooks/idrac/idrac_user_info.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "5068eb579dea93aca8ef6edab4e4254bc341c00d2ef373cfdffb50a34a64c755",
"format": 1
},
{
- "name": "roles/redfish_storage_volume/molecule/RAID5/converge.yml",
+ "name": "playbooks/idrac/idrac_lifecycle_controller_jobs.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2aa8df2c045cd181b56ee21f6bc7c7f3460797cf3d989de75fafd7025f741ca5",
+ "chksum_sha256": "2a3912a6f0afe6778d57a2eed40df6183a3ed2838400a16cfdc2eaa4755ea14d",
"format": 1
},
{
- "name": "roles/redfish_storage_volume/molecule/RAID5/molecule.yml",
+ "name": "playbooks/idrac/idrac_firmware.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "971813db1ea19f8f3e3b9d513f7f15ab2e8015c202323836cb43a3075d0d1b90",
"format": 1
},
{
- "name": "roles/redfish_storage_volume/molecule/RAID50",
+ "name": "playbooks/ome",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/redfish_storage_volume/molecule/RAID50/converge.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "86b09f71d7e2238b359eb8ec3be7860f5f531dd8cba020281d8760b6101e8c24",
- "format": 1
- },
- {
- "name": "roles/redfish_storage_volume/molecule/RAID50/molecule.yml",
+ "name": "playbooks/ome/ome_device_info.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "ef61ae932b9e642e8db5552525670b6a9645a52fb39de9add50551fcd721190a",
"format": 1
},
{
- "name": "roles/redfish_storage_volume/molecule/RAID6",
+ "name": "playbooks/ome/profile",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/redfish_storage_volume/molecule/RAID6/converge.yml",
+ "name": "playbooks/ome/profile/ome_profile_info.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fd8a43244b35a395e735e9f9662d01d52a0c2b626e3f11b044c0e2f4d599ecf1",
+ "chksum_sha256": "ee5cb574348141c0f41436b8c267660f94a0df9bcf76b67fb7bc555b1b45b687",
"format": 1
},
{
- "name": "roles/redfish_storage_volume/molecule/RAID6/molecule.yml",
+ "name": "playbooks/ome/profile/ome_profile_unassign_job_tracking.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
+ "chksum_sha256": "91cdfc259bfb71732f8ecc5b64fc69ae06d5053f886f9136aaf78b43d5c9539a",
"format": 1
},
{
- "name": "roles/redfish_storage_volume/molecule/RAID60",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "playbooks/ome/profile/ome_profile_migrate_job_tracking.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "832510e01a20607c99b212a65d194fd6c87b0b6d135f395215c50bd4ef2e50d7",
"format": 1
},
{
- "name": "roles/redfish_storage_volume/molecule/RAID60/converge.yml",
+ "name": "playbooks/ome/profile/ome_profile_assign_job_tracking.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d33d97618ffcc391d674d3345e4701a2ce217eaf2676694346cc76ad008da05e",
+ "chksum_sha256": "09dd5305a95e0c59fd985365a2f96c6f929603f973794bded1405b9316d9c6db",
"format": 1
},
{
- "name": "roles/redfish_storage_volume/molecule/RAID60/molecule.yml",
+ "name": "playbooks/ome/profile/ome_profile.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
+ "chksum_sha256": "6ceb1d703cab609a550730151e605a3e85f690e9459efa1c467fbd92e223cb30",
"format": 1
},
{
- "name": "roles/redfish_storage_volume/molecule/__delete_virtual_drive.yml",
+ "name": "playbooks/ome/ome_device_mgmt_network.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8d167fa3a1ec0e778272338f9d77af0bb416be6789875be061dcab8375103df6",
+ "chksum_sha256": "89748790b471a573b3921ff6e0fff48422b97ec16fb2a1aeae568ccb3fcb013e",
"format": 1
},
{
- "name": "roles/redfish_storage_volume/molecule/__extract_storage.yml",
+ "name": "playbooks/ome/ome_device_group.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "971c811200c00ba90bf9bd999d45ec7750ba63d9f5e2d75c3929ae89f82b9f72",
+ "chksum_sha256": "5669a05a743b069ef53e1dadcf80954feb963fe8fd56932676fb92ddda9ca35e",
"format": 1
},
{
- "name": "roles/redfish_storage_volume/molecule/__get_helper.yml",
+ "name": "playbooks/ome/ome_device_location.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2107089195728fc04d99dd63d9f0d8f61b2d7e519f5127627921ef7ee4139df2",
+ "chksum_sha256": "497ed8e7f6b757cfbc60ccfcecaec04e742b47a6ed80861b58339f79f2f85adf",
"format": 1
},
{
- "name": "roles/redfish_storage_volume/molecule/__idrac_reset.yml",
+ "name": "playbooks/ome/alert_policy",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "playbooks/ome/alert_policy/ome_alert_policies.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5c004e1ee7b85b34bde8daf3ce4b82d794013745abff33fd2c27801dbf89bf4a",
+ "chksum_sha256": "d0d6be7aa3a27a5adaa464911c691ec919d592384a57a651a23fd377f28df60d",
"format": 1
},
{
- "name": "roles/redfish_storage_volume/molecule/__lc_status.yml",
+ "name": "playbooks/ome/alert_policy/ome_alert_policies_category_info.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "602b7c37a4a50ca9f143caa2479ff5c737e7592d1c802f8f6ffadbc7579077b1",
+ "chksum_sha256": "cdfb33544a84e3accabf9b330d92733266c8747b56a4a89d0fdfc14d531d9f75",
"format": 1
},
{
- "name": "roles/redfish_storage_volume/molecule/apply_time_default",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "playbooks/ome/alert_policy/ome_alert_policies_message_id_info.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "b98604773e4979bb9807e2652fa6217e3ce2c7414597998a84be4592c9d078c4",
"format": 1
},
{
- "name": "roles/redfish_storage_volume/molecule/apply_time_default/converge.yml",
+ "name": "playbooks/ome/alert_policy/ome_alert_policies_actions_info.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "65331bfaa443cd7ff715e673d30abb2be2e3acad8ff0c0161cc002b4df3ef030",
+ "chksum_sha256": "d26bc85e086d2fe2560c3b15a0cef97d58380ffa4a670cd86d1521746250d55a",
"format": 1
},
{
- "name": "roles/redfish_storage_volume/molecule/apply_time_default/molecule.yml",
+ "name": "playbooks/ome/alert_policy/ome_alert_policies_info.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
+ "chksum_sha256": "4ed4df04af14ae90aec17feffb5fb5ce93617856f43b270072e0501328580397",
"format": 1
},
{
- "name": "roles/redfish_storage_volume/molecule/apply_time_immediate",
+ "name": "playbooks/ome/user",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/redfish_storage_volume/molecule/apply_time_immediate/converge.yml",
+ "name": "playbooks/ome/user/ome_user_info.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "defd2475ab056ad28cf76397ae6f42ac3f7440c041546b6ecf0c488517fbd2d4",
+ "chksum_sha256": "ed38db95276274d797ccc43cec2d8b50d53be9bd1e5e092c9ec6f030b7f5c6fc",
"format": 1
},
{
- "name": "roles/redfish_storage_volume/molecule/apply_time_immediate/molecule.yml",
+ "name": "playbooks/ome/user/ome_user.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
+ "chksum_sha256": "cab96264060d057d6bd286f21e5ba810a6c9747014fb5c6ef4fa12118adb88e1",
"format": 1
},
{
- "name": "roles/redfish_storage_volume/molecule/apply_time_onreset_reboot_server_true",
+ "name": "playbooks/ome/ome_session.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "69cd94bccd08b319a360e5f0fd22ce388e1d4dc8f7b4d285fb1e3d2cbee87b6e",
+ "format": 1
+ },
+ {
+ "name": "playbooks/ome/ome_discovery.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d38b03a955441fecd26fcf8f01f75e65d7d459927ac58040d041299009fab7aa",
+ "format": 1
+ },
+ {
+ "name": "playbooks/ome/application",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/redfish_storage_volume/molecule/apply_time_onreset_reboot_server_true/converge.yml",
+ "name": "playbooks/ome/application/ome_application_network_address.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0a14046372f6e7f33908931a7edeb09e73dc4ec5bde6e1778b61fe23054a2735",
+ "chksum_sha256": "2941e928032f4e93b02e7f56db09bb784702d1782e1e3c87c094a8b2e68bceef",
"format": 1
},
{
- "name": "roles/redfish_storage_volume/molecule/apply_time_onreset_reboot_server_true/molecule.yml",
+ "name": "playbooks/ome/application/ome_application_network_time.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
+ "chksum_sha256": "5d54fa4f854c3871c631114f2bd1a70e33a6f6b87a593529ac4b7bc2a620bb48",
"format": 1
},
{
- "name": "roles/redfish_storage_volume/molecule/apply_time_onreset_reboot_server_true_force_reboot_true",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "playbooks/ome/application/ome_application_certificate.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "451395c662a84f8d3a030be78a6c1cb7bc32dd5223c2f0c21c4ee6a6433e7d56",
"format": 1
},
{
- "name": "roles/redfish_storage_volume/molecule/apply_time_onreset_reboot_server_true_force_reboot_true/converge.yml",
+ "name": "playbooks/ome/application/ome_application_network_proxy.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ec06873a01c77fa0cfe3d4e85c647f5c41ee33675b9b254a7cc697451a8c2a87",
+ "chksum_sha256": "afbcdc3fd81f79fec948b8898ed212e48d494d521af66e2c3cfa91538ebfeaa7",
"format": 1
},
{
- "name": "roles/redfish_storage_volume/molecule/apply_time_onreset_reboot_server_true_force_reboot_true/molecule.yml",
+ "name": "playbooks/ome/application/ome_application_console_preferences.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f52d711103d50a437830c6fbcd04fb4bab49a0f82f6d26d1c791c6e8488dd090",
+ "chksum_sha256": "d69507043f5d3ab6e177a969f319b3dcbf8edfb5678e58e348653c8f6b8556b2",
"format": 1
},
{
- "name": "roles/redfish_storage_volume/molecule/default",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "playbooks/ome/application/ome_application_alerts_smtp.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d2eb95a8fcea5239a320840fc4b4ff2321d071a06638ae5d5860651e1b3a4587",
"format": 1
},
{
- "name": "roles/redfish_storage_volume/molecule/default/molecule.yml",
+ "name": "playbooks/ome/application/ome_application_network_settings.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "d61531249118fb36496a87f9bfc892ae34483d6997ff5cb09155f0269a7ae329",
"format": 1
},
{
- "name": "roles/redfish_storage_volume/molecule/default/converge.yml",
+ "name": "playbooks/ome/application/ome_application_network_address_with_job_tracking.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "171f69d2607d6cf5d8088b2e7a9231406cbf90c2bf74d40e8997aced0f0f08ce",
+ "chksum_sha256": "3375ed92eb8f5143b01108debff662d6f83c98483fd340328315618aa4e801fd",
"format": 1
},
{
- "name": "roles/redfish_storage_volume/molecule/initialization",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "playbooks/ome/application/ome_application_network_time_zone_info.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "b66d01170f512a566b92b06b43aae3faa1755f0e453d3ef0110521b68e995889",
"format": 1
},
{
- "name": "roles/redfish_storage_volume/molecule/initialization/molecule.yml",
+ "name": "playbooks/ome/application/ome_application_network_webserver_port_changed_tracking.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "d060d4922337f1b9357e6196bc5dc2dae238f126c76481d65d46e00b3b1d3d79",
"format": 1
},
{
- "name": "roles/redfish_storage_volume/molecule/initialization/converge.yml",
+ "name": "playbooks/ome/application/ome_application_network_webserver.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d2204316b3f71e879600ede864aaa0e5b53ac0b5cc5422b4766a6e789a0d9dfd",
+ "chksum_sha256": "2e2e19608508da9365570746e726a36e5a5403e16f0563c2de3e6d775b411216",
"format": 1
},
{
- "name": "roles/redfish_storage_volume/molecule/__create_virtual_drive.yml",
+ "name": "playbooks/ome/application/ome_application_security_settings.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5ff523743f2c992be084d96d3e0bebf811c50fd09bad3e9b3bdf346c093a3914",
+ "chksum_sha256": "e423185bb24ce6d7641db2cec93e76029728c4631a0b84c307da0a375a247526",
"format": 1
},
{
- "name": "roles/redfish_storage_volume/molecule/__job_track.yml",
+ "name": "playbooks/ome/application/ome_application_alerts_syslog.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4ac8080105eab85b5b077ac94669ff8fc03600675b5d86258ee27ca26e6ceebd",
+ "chksum_sha256": "56af4d077ccfe2b4bdb46de7587fbfb03609ff7484108df4840b69d4118d0c2c",
"format": 1
},
{
- "name": "roles/redfish_storage_volume/tasks",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "playbooks/ome/ome_chassis_slots.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "fb87fe3c86cd4506226de40728064dcba9b759220b566342da9c2f0b2e40331e",
"format": 1
},
{
- "name": "roles/redfish_storage_volume/tasks/main.yml",
+ "name": "playbooks/ome/ome_smart_fabric.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d504045131a1a6e693af5e3be831c677039d54c70765cb14bb5ab1de3d8eb054",
+ "chksum_sha256": "03c70e63d7713fe24c11cfb6400669fbe422459b3695ccb6214f5ae5656e4f30",
"format": 1
},
{
- "name": "roles/redfish_storage_volume/tests",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "playbooks/ome/ome_device_local_access_configuration.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "cb3e4d71a9422a25d8bbe4cf2ca376df39d5a5f922de2cfb9facc8f5721c5a88",
"format": 1
},
{
- "name": "roles/redfish_storage_volume/tests/inventory",
+ "name": "playbooks/ome/ome_template_identity_pool.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e02233819b1a09844410549191813f7cc7ba360f21298578f4ba1727a27d87fc",
+ "chksum_sha256": "97ec21aeab7e209f22107f692b67ed54bc80e0d0205561d2a2b73d1f494e0fba",
"format": 1
},
{
- "name": "roles/redfish_storage_volume/tests/test.yml",
+ "name": "playbooks/ome/ome_domain_user_groups.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c4aa87d44c3b522744c37ca811b31c1648dfde1bf2e75a231499f5d8210396fd",
+ "chksum_sha256": "1ec28a9e158a70907e3da5f2a56894028e75daad9e3393666ceb54d5baa96b37",
"format": 1
},
{
- "name": "roles/redfish_storage_volume/vars",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "playbooks/ome/ome_group_device_action.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e28a69eee5cc5383903d9c96e9c6b12a76cf76a8a92d7c9dae9457456bb774a4",
"format": 1
},
{
- "name": "roles/redfish_storage_volume/vars/main.yml",
+ "name": "playbooks/ome/ome_device_network_services.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5d396acd98a3478d4a32965b818a75ebfddf7cc5725e8a0fac6093830ef34ad9",
+ "chksum_sha256": "2fbe1308921f98e05468b4e40687fb1954e365d4c140d73bce5ae550874d95cd",
"format": 1
},
{
- "name": "roles/idrac_user",
+ "name": "playbooks/ome/powerstate",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_user/README.md",
+ "name": "playbooks/ome/powerstate/ome_powerstate.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "369f4ae5faa868c85788d6df11bf9446ae9fb5ed5d3c9f2808ed9e84cbc06c1a",
+ "chksum_sha256": "ecddfa8e0d0c89d732131128cdb5b5eb2796f2bafc908751866dac67d8896c66",
"format": 1
},
{
- "name": "roles/idrac_user/defaults",
+ "name": "playbooks/ome/powerstate/ome_powerstate_with_job_tracking.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "5e7ef89910b84426e2251e5e8900139b87e539f5edc8c2866664dc41be18a3ec",
+ "format": 1
+ },
+ {
+ "name": "playbooks/ome/template",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_user/defaults/main.yml",
+ "name": "playbooks/ome/template/ome_template_info_with_filter.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6ab3a9e6149fca242c0436f5630a97a2063f6232f42a9df4482b3977471d0be3",
+ "chksum_sha256": "9b38315b410882eb04f6a19bb0676ae5ddb146831b571572f5d15ff70d430fe2",
"format": 1
},
{
- "name": "roles/idrac_user/handlers",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "playbooks/ome/template/ome_template_network_vlan_info.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "bf7ebd240aced312384fd6700dcee5f4572c189b18878dd3f2ad1a600a41241c",
"format": 1
},
{
- "name": "roles/idrac_user/handlers/main.yml",
+ "name": "playbooks/ome/template/ome_template_info.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "00a6e4df8f9fe8c7b2be5ed666c8a779836b8bdd3a57736d81729e080d52d27b",
+ "chksum_sha256": "7606d5cfee0c932ccc67a090a8a58df3fb85d370de5a727df937d84b871f93e3",
"format": 1
},
{
- "name": "roles/idrac_user/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "playbooks/ome/template/ome_template_with_job_tracking.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "28cb65035fb452f9d1b688b11f2a9152f010e6502cdf980fdb1fa674da1c613a",
"format": 1
},
{
- "name": "roles/idrac_user/meta/argument_specs.yml",
+ "name": "playbooks/ome/template/ome_template_lcd_display_string_deploy.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be066c11750b38f72617e0d31f8c107f945bb65448118a94bdb923a80babb2c0",
+ "chksum_sha256": "4c802f28d538ce55438af15f296e755c1a5f08872f5d55b582e0eb776446975f",
"format": 1
},
{
- "name": "roles/idrac_user/meta/main.yml",
+ "name": "playbooks/ome/template/ome_template.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "36cbc64418daec070e7d63dcecf4ec4e7be341ef8a7661b9c5487e8903a48c2c",
+ "chksum_sha256": "3c90c9b41ff444b1bf174211b0ce260bd7bdd80c7a1a012c51314d03ade5400f",
"format": 1
},
{
- "name": "roles/idrac_user/molecule",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "playbooks/ome/template/ome_template_create_modify_lcd_display.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "5abdf1b033e3f22ba9c5d532fccb5e960bd9f04c6fffa4fbae799207e754a877",
"format": 1
},
{
- "name": "roles/idrac_user/molecule/TC-152120",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "playbooks/ome/template/ome_template_network_vlan.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d72b4eadc6cdf60f554394e325a86d8ffee38bb3e38ae3f38b0945e25864770d",
"format": 1
},
{
- "name": "roles/idrac_user/molecule/TC-152120/converge.yml",
+ "name": "playbooks/ome/ome_network_vlan_info.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "600419dfff67b4f0e13aceef503059c3573db9322dc487db74440b6489e93177",
+ "chksum_sha256": "de7ce01fbf0f07f87edbbd56adbc85904b327ab25c3568904a85df7c1635c77c",
"format": 1
},
{
- "name": "roles/idrac_user/molecule/TC-152120/molecule.yml",
+ "name": "playbooks/ome/ome_network_port_breakout.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "58ffd7e1cf3dfebbc59c5e0ee4062434ecf4f0c76969a060fc207ea06905e906",
+ "chksum_sha256": "39bd40122da135d913272454af6d830ba40307d59e66e487aa709a9d1272afec",
"format": 1
},
{
- "name": "roles/idrac_user/molecule/TC-152146",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "playbooks/ome/ome_identity_pool.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "371d65813bfc3a77962caccfc53aa30ab4767402de617f35d2db5734803b29e2",
"format": 1
},
{
- "name": "roles/idrac_user/molecule/TC-152146/converge.yml",
+ "name": "playbooks/ome/ome_device_quick_deploy.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c4fbf2ba3ab40583ae88f9b5b63744498b858acb41a33bb75a882f0974b47b3d",
+ "chksum_sha256": "acb2a7a5c7e73052bced88fbe4ba9dcffab7505be19212d9a11bd7aed261a5e8",
"format": 1
},
{
- "name": "roles/idrac_user/molecule/TC-152146/molecule.yml",
+ "name": "playbooks/ome/ome_server_interface_profile_info.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "58ffd7e1cf3dfebbc59c5e0ee4062434ecf4f0c76969a060fc207ea06905e906",
+ "chksum_sha256": "af58749f1aa4451723ccb7fde4f31313eea771ce6818b56df7b4d3450b432509",
"format": 1
},
{
- "name": "roles/idrac_user/molecule/TC-152147",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "playbooks/ome/ome_device_power_settings.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "18029724ae0804c84143000e04c892cb042fdca99d8cb70eebe301bb964eb9ca",
"format": 1
},
{
- "name": "roles/idrac_user/molecule/TC-152147/converge.yml",
+ "name": "playbooks/ome/ome_active_directory.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1096107ae7f0281cbc4f43477562d8c9709ceb968ee5058b5880afb035cfac59",
+ "chksum_sha256": "dd4d2026942aca6b2fad68425dbb9c6795ba3017fbffe2dd1fd14e9c72ee0d81",
"format": 1
},
{
- "name": "roles/idrac_user/molecule/TC-152147/molecule.yml",
+ "name": "playbooks/ome/ome_network_vlan.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "13fc1cf138058346df47e0863e038de0d6a3fe59cf9ce35d2af2d872f237444c",
+ "chksum_sha256": "5fc6914c37f5a6b280047c45bc51dde5156fc7f17fcf6ea7b31b4fb52902e867",
"format": 1
},
{
- "name": "roles/idrac_user/molecule/TC-152148",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "playbooks/ome/ome_devices.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "c1331fd531251cd1d82f77bbff149fccae42ed7ad035cb01f3b61afbd76cb508",
"format": 1
},
{
- "name": "roles/idrac_user/molecule/TC-152148/converge.yml",
+ "name": "playbooks/ome/ome_smart_fabric_uplink.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "38da55bda10bc1de8eb098d795abe2588f27159b217e6c3157c5396273916e02",
+ "chksum_sha256": "e9980c7e8d795f73469a974aa6a19c09f998081fe4c40163f9525b5db5596216",
"format": 1
},
{
- "name": "roles/idrac_user/molecule/TC-152148/molecule.yml",
+ "name": "playbooks/ome/ome_server_interface_profile_workflow.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "58ffd7e1cf3dfebbc59c5e0ee4062434ecf4f0c76969a060fc207ea06905e906",
+ "chksum_sha256": "88b5f582aba465eac8f1ab17c7e40865721921bb7864af0d07f83511879928af",
"format": 1
},
{
- "name": "roles/idrac_user/molecule/TC-152148/prepare.yml",
+ "name": "playbooks/ome/ome_server_interface_profiles.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "06c0f8c4fb2670cc81d9f1cfbd7d0664c88249ec23d5d0c35bc4177da0fd296c",
+ "chksum_sha256": "d0e8826dc3911a89aa5733c42495dd1fa40690d391c65c5c4c48bea10716a56f",
"format": 1
},
{
- "name": "roles/idrac_user/molecule/TC-152149",
+ "name": "playbooks/ome/compliance",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_user/molecule/TC-152149/converge.yml",
+ "name": "playbooks/ome/compliance/ome_configuration_compliance_baseline.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "20d79a5f4a9dafe606d8b6cde2accf782af12b9f1781f5df0682b93fcdf16ddf",
+ "chksum_sha256": "fc3a2176d0d655f312c52f97bfbbbefe8fc5dd45e88b32301aaa56388a52076d",
"format": 1
},
{
- "name": "roles/idrac_user/molecule/TC-152149/molecule.yml",
+ "name": "playbooks/ome/compliance/ome_configuration_compliance_info.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "58ffd7e1cf3dfebbc59c5e0ee4062434ecf4f0c76969a060fc207ea06905e906",
+ "chksum_sha256": "e248d9baf3d9a443dd968b7dea92c70aba87d542b52a34d692daf7c3f595587e",
"format": 1
},
{
- "name": "roles/idrac_user/molecule/TC-152149/prepare.yml",
+ "name": "playbooks/ome/compliance/ome_configuration_compliance_baseline_workflow.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6018b3098204f927d9412e33103fb6cc93847ba09cd25a01df104efe45037753",
+ "chksum_sha256": "bc42c37b1f2c9492b5b297c8245fc6836f4ae89dcdfebeaf958bce0847df6f73",
"format": 1
},
{
- "name": "roles/idrac_user/molecule/TC-152150",
+ "name": "playbooks/ome/ome_diagnostics.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "b67e463419986332c846988c6c8ba7a877e12fb42b4e15ac9354f1fffac788eb",
+ "format": 1
+ },
+ {
+ "name": "playbooks/ome/ome_groups.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "c8af511d1623d2d4cf697b1e3480fede2228f9f0759bc73af8c88c72d20b6cd8",
+ "format": 1
+ },
+ {
+ "name": "playbooks/ome/inventory",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_user/molecule/TC-152150/converge.yml",
+ "name": "playbooks/ome/inventory/ome_inventory_by_groups.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6220849df8cee2230b06defb296fc7569be79bff4e9f2e53dfc4c0ff4f108708",
+ "chksum_sha256": "72904dae71c2171978e02bef29d887e94c4f588177cffdac576c67df93ac3b97",
"format": 1
},
{
- "name": "roles/idrac_user/molecule/TC-152150/molecule.yml",
+ "name": "playbooks/ome/inventory/ome_inventory_all_groups.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "58ffd7e1cf3dfebbc59c5e0ee4062434ecf4f0c76969a060fc207ea06905e906",
+ "chksum_sha256": "ea301f8125ffc2fb7889cc791e0558f8f7e85b3516e3d6d6fcdd4b8d83557988",
"format": 1
},
{
- "name": "roles/idrac_user/molecule/TC-152150/prepare.yml",
+ "name": "playbooks/ome/inventory/ome_inventory_by_group_with_host_vars.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f5f0a568105643945bdd6b58bf10914d1a66c46130e7ee8f4ffa7b70f0c387c7",
+ "chksum_sha256": "6864ace739760c7c3d8bbf3efe1f34c77fa57b7562817febf7f8d0926b7a2f4c",
"format": 1
},
{
- "name": "roles/idrac_user/molecule/default",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "playbooks/ome/inventory/ome_inventory_groups_with_host_and_group_vars.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "a36a617cbe7d9292a04383d2bf0a5716fc645f69c1c7959eca82ceda5d541ab9",
"format": 1
},
{
- "name": "roles/idrac_user/molecule/default/converge.yml",
+ "name": "playbooks/ome/ome_network_port_breakout_job_traking.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6b58bbd18e562ced8fbaccd5a23d479b69b4bec8659d246d62de5c135b291dcc",
+ "chksum_sha256": "111f9977f82b1ca2241b5307535fe6833e70c115871c3e856e15c98f2f507eaa",
"format": 1
},
{
- "name": "roles/idrac_user/molecule/default/molecule.yml",
+ "name": "playbooks/ome/ome_smart_fabric_uplink_info.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "13fc1cf138058346df47e0863e038de0d6a3fe59cf9ce35d2af2d872f237444c",
+ "chksum_sha256": "9fda088244750b408b164dcf07d55bbbdf3168b0eb27627c4f938afd0a8658fb",
"format": 1
},
{
- "name": "roles/idrac_user/molecule/resources",
+ "name": "playbooks/ome/firmware",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_user/molecule/resources/idrac_user",
+ "name": "playbooks/ome/firmware/catalog",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_user/molecule/resources/idrac_user/cleanup.yml",
+ "name": "playbooks/ome/firmware/catalog/ome_firmware_catalog.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "291fe02f4f127b148c6bc9a2325d06c7e6497f4c853152a7fc42f3c5ccf479e5",
+ "chksum_sha256": "d6919243b6c8339e13e6fba8d3b97d0205734da51d8b7858bbb450ebe017af45",
"format": 1
},
{
- "name": "roles/idrac_user/molecule/resources/idrac_user/get_user_info.yml",
+ "name": "playbooks/ome/firmware/ome_firmware.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6e080f7169503086fc1e8fc4e4fa95e56a0dd39403fe183f086ad9770ded41e2",
+ "chksum_sha256": "7d25058ce25ee9d8ebc654d51279c54cd41fba51abe357c0221d3f41606180a0",
"format": 1
},
{
- "name": "roles/idrac_user/tasks",
+ "name": "playbooks/ome/firmware/baseline",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_user/tasks/absent.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "6fcebf5c4162fe906c18e2babb948835808726952abe198cc14caaaee1454546",
- "format": 1
- },
- {
- "name": "roles/idrac_user/tasks/get_user.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "7250bb2b6723ad29148ec605badbcc828f6656088ceaa7f4ad02b46dc4aa25dc",
- "format": 1
- },
- {
- "name": "roles/idrac_user/tasks/main.yml",
+ "name": "playbooks/ome/firmware/baseline/ome_firmware_baseline_info.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cdf0aed407cb791aaabd992919f0d1449b6199b9fe04fe1ccdee43c7e8a3ef67",
+ "chksum_sha256": "3b297907849be26643ed73ca919c987c17057759b307c99eaa2176c8502d8294",
"format": 1
},
{
- "name": "roles/idrac_user/tasks/present.yml",
+ "name": "playbooks/ome/firmware/baseline/ome_firmware_baseline_compliance_info_filters.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "31094d92ad1e121dfb3f27d6dc0e8fdf471ee01b2168ba2cfbd66078dd0054c0",
+ "chksum_sha256": "1ee87a539547297b73866e1d075f7c81fc3ad2147b443c6fbf3b2454367d6b47",
"format": 1
},
{
- "name": "roles/idrac_user/tests",
+ "name": "playbooks/ome/firmware/baseline/component_reports_filtering",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "roles/idrac_user/tests/inventory",
+ "name": "playbooks/ome/firmware/baseline/component_reports_filtering/component_complaince_report_with_baseline.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e02233819b1a09844410549191813f7cc7ba360f21298578f4ba1727a27d87fc",
+ "chksum_sha256": "dbc8e4f4a6f5581fc0479f3fccef4984fb26ef93650a41fa0ff2efc04e741a09",
"format": 1
},
{
- "name": "roles/idrac_user/tests/test.yml",
+ "name": "playbooks/ome/firmware/baseline/component_reports_filtering/component_complaince_report_with_devices.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "594b1a865eae9e2fc2a8f2f9daf0872a5c4a6b697af7167eadfb52df1d5009be",
+ "chksum_sha256": "3d2d40aaca55f42d88a21c9556b08ddfb60ee692a2ff95c2674e33ecaed3a291",
"format": 1
},
{
- "name": "roles/idrac_user/vars",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "playbooks/ome/firmware/baseline/ome_firmware_baseline_compliance_info.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "1fec86960e36907b5574822b9a60531ad5812bc465a5473cc57ccd3054f076de",
"format": 1
},
{
- "name": "roles/idrac_user/vars/main.yml",
+ "name": "playbooks/ome/firmware/baseline/ome_firmware_baseline.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "71a5ee7c5ff8f63dcdb4c815fd75a8694be391c701903e8297b8e3eecf83d12d",
+ "chksum_sha256": "7b80747cd3904de0e8d86046cc233e411bc4083f44e2c6f6f8a875ade32dd4c6",
"format": 1
},
{
- "name": "tests",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "playbooks/ome/firmware/ome_firmware_with_job_tracking.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "0a466f3d8237f5442bd13e312ca31d2a6482d63f0f7ca22f38c826422a1ddbca",
"format": 1
},
{
- "name": "tests/.gitignore",
+ "name": "playbooks/ome/ome_job_info.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "67b0f799f12726b149fd9677334c10b059e231bf2fa7150d55e9b4d5fd242062",
+ "chksum_sha256": "7a159c605c87d21e18f581b789ec99a5ec0431f22d4cd223e20483708175b814",
"format": 1
},
{
- "name": "tests/README.md",
+ "name": "playbooks/ome/ome_smart_fabric_info.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5e37abcd3cbb5e2200f5702471e8a808aa759c9bf012137d4391f605700af12b",
+ "chksum_sha256": "ecaca37de3625197151d6684472b54c9b3bc712bc9c47d055beb8a1ecdc401e0",
"format": 1
},
{
- "name": "tests/__init__.py",
+ "name": "requirements.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "32d916481c121c551a11659f3e0ba3b2e38c580ef5dea95cd1047560f3df514b",
"format": 1
},
{
- "name": "tests/config.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "5b21de52fbe6ce3aa2369ef41e5ee7fc7e2204d20d6232f2d29fe58886899f10",
+ "name": "tests",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
@@ -8359,10 +8373,10 @@
"format": 1
},
{
- "name": "tests/unit/__init__.py",
+ "name": "tests/unit/requirements.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "5ec603ab1d2b3071743853324fd0db34d886f78f1543c1fa700ad1c904a6fd25",
"format": 1
},
{
@@ -8373,13 +8387,6 @@
"format": 1
},
{
- "name": "tests/unit/plugins/__init__.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
- "format": 1
- },
- {
"name": "tests/unit/plugins/module_utils",
"ftype": "dir",
"chksum_type": null,
@@ -8387,38 +8394,38 @@
"format": 1
},
{
- "name": "tests/unit/plugins/module_utils/__init__.py",
+ "name": "tests/unit/plugins/module_utils/test_ome.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "abdff09b79463b5bab943ab1eed8fe083290c62d5cd3f37e344cc72735687f6b",
"format": 1
},
{
- "name": "tests/unit/plugins/module_utils/test_idrac_redfish.py",
+ "name": "tests/unit/plugins/module_utils/test_session_utils.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "789188c5bb56dbf5925ed648e6024de2c575580961f9ea94b48817d7edb98901",
+ "chksum_sha256": "b8279ca95da6d91825fc327a48c0a90cef7e714c15d9cac0a98f75dd823191ec",
"format": 1
},
{
- "name": "tests/unit/plugins/module_utils/test_ome.py",
+ "name": "tests/unit/plugins/module_utils/test_redfish.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "abdff09b79463b5bab943ab1eed8fe083290c62d5cd3f37e344cc72735687f6b",
+ "chksum_sha256": "ea6f698bd5328030a3260bf0bdd354a268e67e3b1f2ad8a51a9eed59ad36218e",
"format": 1
},
{
- "name": "tests/unit/plugins/module_utils/test_redfish.py",
+ "name": "tests/unit/plugins/module_utils/test_idrac_redfish.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ea6f698bd5328030a3260bf0bdd354a268e67e3b1f2ad8a51a9eed59ad36218e",
+ "chksum_sha256": "789188c5bb56dbf5925ed648e6024de2c575580961f9ea94b48817d7edb98901",
"format": 1
},
{
- "name": "tests/unit/plugins/module_utils/test_session_utils.py",
+ "name": "tests/unit/plugins/module_utils/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b8279ca95da6d91825fc327a48c0a90cef7e714c15d9cac0a98f75dd823191ec",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
@@ -8429,339 +8436,346 @@
"format": 1
},
{
- "name": "tests/unit/plugins/modules/__init__.py",
+ "name": "tests/unit/plugins/modules/test_ome_server_interface_profiles.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "0f48cb24a32aabf756cb9576ba969840c1575574a63a44a705d8778776b342ff",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/common.py",
+ "name": "tests/unit/plugins/modules/test_idrac_os_deployment.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2f706a3caa873b5e81b6f4a3076628f2383e570cda58814a4e2d874e22989cc1",
+ "chksum_sha256": "4e13dd9169319f1f6b659c372203b2537d9702a0da6f52c1278b618f7eec9d2e",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/conftest.py",
+ "name": "tests/unit/plugins/modules/test_idrac_system_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1d50a8a54e1c284f2db79461ec1b50a7555f1b2dfd71aa1d99ffa1c0271c6977",
+ "chksum_sha256": "c2dd4206fb7a8fd1b9fd0075e62ec831f85d4678370a3cf39485df2bd27fd53e",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_dellemc_configure_idrac_eventing.py",
+ "name": "tests/unit/plugins/modules/test_ome_user_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "51b5435f256be611f442d7e14924493b476e7e965150d8f001e2080255e1ca45",
+ "chksum_sha256": "d953dab52deacad582e4bf060b3c2e6d82d490b2e0f872f2cbec4c7eac95df81",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_dellemc_configure_idrac_services.py",
+ "name": "tests/unit/plugins/modules/test_ome_application_certificate.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e8b05f177533512dc258068adbc93270d8fc076de433893f64457a782c37df7b",
+ "chksum_sha256": "2b221a80551c727c27c22e7bfebdf3968b4d5b322814e330fd3923c9558ad491",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_dellemc_idrac_lc_attributes.py",
+ "name": "tests/unit/plugins/modules/test_idrac_timezone_ntp.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3aa7c78efc385f8f23769186e578cfea349e4918a7039391b2dad85ad866e9c4",
+ "chksum_sha256": "8b4464785500ba413275080c10cbc08884ae3a1219ced7b374ae1d4f1125bb76",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_dellemc_idrac_storage_volume.py",
+ "name": "tests/unit/plugins/modules/test_idrac_firmware.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7172c977250e7d810c1b37c93ac2c9d9e27c778f350c5be3f323305a9ef82d8a",
+ "chksum_sha256": "ba3a0726790706f990aa56f1a75091fb47622d722d0fd58e6ac635d6e167ff54",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_dellemc_system_lockdown_mode.py",
+ "name": "tests/unit/plugins/modules/test_idrac_user_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9cc17795c8921e9b46e4b8f5ceeb4b48cedeb7f1c9d0f674841c49fb98096cc7",
+ "chksum_sha256": "04bf9afbb2aea8e129c1592329ff3701c13ed332487432674d7b86a989488334",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_idrac_attributes.py",
+ "name": "tests/unit/plugins/modules/test_ome_firmware_baseline_compliance_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "443eea5340887b4af7d7a796e34d3ed99c07394a8fb9ac0af804757fb790f897",
+ "chksum_sha256": "e387e2956c280621c1d898b18785bcf281ed4f00c181aeb334697c492108d2bc",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_idrac_bios.py",
+ "name": "tests/unit/plugins/modules/test_dellemc_configure_idrac_eventing.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "56961e8894b7997b6bafec9e21398a444cbafe2073f2263752a82d55f794a61d",
+ "chksum_sha256": "51b5435f256be611f442d7e14924493b476e7e965150d8f001e2080255e1ca45",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_idrac_boot.py",
+ "name": "tests/unit/plugins/modules/test_ome_device_group.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f4066f490341d728623ed7b407319e3b467f9960aeb6f4d82ce441cfdcb0dd6c",
+ "chksum_sha256": "fe679abfeb71c0791e644ac6d501639c229c1ea6c64ae06ce0058d0930079490",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_idrac_certificates.py",
+ "name": "tests/unit/plugins/modules/test_dellemc_system_lockdown_mode.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "69ddcc325468f32296935a5aba441ebd2ad2983f9b898cd8475c3e49ac545831",
+ "chksum_sha256": "9cc17795c8921e9b46e4b8f5ceeb4b48cedeb7f1c9d0f674841c49fb98096cc7",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_idrac_firmware.py",
+ "name": "tests/unit/plugins/modules/test_ome_device_network_services.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ba3a0726790706f990aa56f1a75091fb47622d722d0fd58e6ac635d6e167ff54",
+ "chksum_sha256": "01b4ac372b95cbdea27248e39999953945a4a668be10194887f924668ee5808b",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_idrac_firmware_info.py",
+ "name": "tests/unit/plugins/modules/test_ome_devices.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5d60aee695e2ea2a27d566dff581483cf66c27cdcc1601fdd7fc3f325d2cefca",
+ "chksum_sha256": "2db61429200f99069795d059690c4fdac00d946bad828827b8f8d6a686ea3da8",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_idrac_license.py",
+ "name": "tests/unit/plugins/modules/test_idrac_redfish_storage_controller.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7efe5b4fdf172a307c837bdcd8cf55e5b911cd17feb5db80509207ff0d813923",
+ "chksum_sha256": "df154e600f856210e4847b458fff5d98def22d85724bae5e85ecb02f347fbe49",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_idrac_lifecycle_controller_job_status_info.py",
+ "name": "tests/unit/plugins/modules/test_ome_active_directory.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c18b501718e37c52efe369dcbd0779cc522d919107ed2828bb9b3396ecf1b49d",
+ "chksum_sha256": "3f031be7fd8a4448bd473d19b67b8c12ac1f5978b7a12fb5814b7604e3c71af0",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_idrac_lifecycle_controller_jobs.py",
+ "name": "tests/unit/plugins/modules/test_ome_application_network_time.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "860c64f7be6696f28329e37411e69d02dd3c8ebb85f06dc41a7ef8bcfd231b97",
+ "chksum_sha256": "42985f90c0f14499d6455b3be358b826d90e443bed4aef6948dca564befe2366",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_idrac_lifecycle_controller_logs.py",
+ "name": "tests/unit/plugins/modules/test_ome_application_alerts_smtp.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be3d0c6ae7c2127c97172c75dbe50d2739d7a14f07eae6b6b1d566ddddb15dae",
+ "chksum_sha256": "a0f68b19dd63014f99f993b5049322aa10c962f37abc8b01f0c1edc056e14dc2",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_idrac_lifecycle_controller_status_info.py",
+ "name": "tests/unit/plugins/modules/test_ome_job_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "45e4e9c2e94584738e18f1be859172e06bb9ff3b66b89d857529b77cfab01a57",
+ "chksum_sha256": "528679c836f439cfadd6dede9b9cb7790b32329e8ddb834dff7acee88827f529",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_idrac_network.py",
+ "name": "tests/unit/plugins/modules/test_idrac_server_config_profile.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bff1e11af44e955de3e2b3f93de22efe39676b27e587083531fa3ad55504ccee",
+ "chksum_sha256": "e014b78fdccacfb78721b7f2955cedeb4581e2dd12e75f38751ca3855e6bd9b1",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_idrac_network_attributes.py",
+ "name": "tests/unit/plugins/modules/test_ome_groups.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "10c2a11793c47a4b49723d09f7e370875bc0bc3eb9e01ed5c02a2e06683752bb",
+ "chksum_sha256": "dafbbc8ff84e5d7195c1dfa0c4aac877cfc84524318fb9b93daee52ace901fa1",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_idrac_os_deployment.py",
+ "name": "tests/unit/plugins/modules/test_idrac_network.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4e13dd9169319f1f6b659c372203b2537d9702a0da6f52c1278b618f7eec9d2e",
+ "chksum_sha256": "bff1e11af44e955de3e2b3f93de22efe39676b27e587083531fa3ad55504ccee",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_idrac_redfish_storage_controller.py",
+ "name": "tests/unit/plugins/modules/test_ome_firmware_baseline_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "df154e600f856210e4847b458fff5d98def22d85724bae5e85ecb02f347fbe49",
+ "chksum_sha256": "2b2b8d194c62941b6912236c9e0f6781c9c2940f9c19f81f56ee0b9350115642",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_idrac_reset.py",
+ "name": "tests/unit/plugins/modules/test_redfish_storage_volume.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0760350d332018dc32da2f005b39f7ae94983a2061df43084332ba6c78369eb6",
+ "chksum_sha256": "8673a7d654d7eebe6b54ace65b26922d3c23c8f35563cb27ebb8b15cd68899a2",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_idrac_server_config_profile.py",
+ "name": "tests/unit/plugins/modules/test_ome_application_security_settings.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e014b78fdccacfb78721b7f2955cedeb4581e2dd12e75f38751ca3855e6bd9b1",
+ "chksum_sha256": "e7bf2f655672078476cc228eb5fb457beaebf1f955b34e5d624eb3ed1b0c346f",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_idrac_syslog.py",
+ "name": "tests/unit/plugins/modules/test_ome_configuration_compliance_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bf10fcb460fa9bc23a992d7bdfd5b2dfeb9b6ee83a918f257c5ae470c434f5d4",
+ "chksum_sha256": "2bac6fc0e5be8cd2c4841f6ee758233db54442c8eb00fb4ada5ee4a044160447",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_idrac_system_info.py",
+ "name": "tests/unit/plugins/modules/test_dellemc_idrac_lc_attributes.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c2dd4206fb7a8fd1b9fd0075e62ec831f85d4678370a3cf39485df2bd27fd53e",
+ "chksum_sha256": "3aa7c78efc385f8f23769186e578cfea349e4918a7039391b2dad85ad866e9c4",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_idrac_timezone_ntp.py",
+ "name": "tests/unit/plugins/modules/test_ome_server_interface_profile_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8b4464785500ba413275080c10cbc08884ae3a1219ced7b374ae1d4f1125bb76",
+ "chksum_sha256": "33e27297dbf2ba70e8a54ac1dfaf6967cd25e3c5d0fa67992661561bddc3d13e",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_idrac_user.py",
+ "name": "tests/unit/plugins/modules/test_ome_application_console_preferences.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4284c52644fd69a018da7f858f8eae06c9d3f680517d748519820cda2780a5da",
+ "chksum_sha256": "23a93ab54d65b2e1d87c70f0ffedea617f124daad04b292a91d487d5cf8d25f8",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_idrac_user_info.py",
+ "name": "tests/unit/plugins/modules/test_ome_smart_fabric_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "04bf9afbb2aea8e129c1592329ff3701c13ed332487432674d7b86a989488334",
+ "chksum_sha256": "f91e9b2df7be4c4127d990c3a63f0e2c02c201deb40fb9a3627b14cc9a05e9f1",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_idrac_virtual_media.py",
+ "name": "tests/unit/plugins/modules/test_ome_template_network_vlan_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c40d6af3e3701c4578b48ecdcd4d07848ae5399b0624295f636539ee862360f5",
+ "chksum_sha256": "53f47c4f82c035207a38e1c6989461f33f39faaf7e8949aba963dd37a1cedaeb",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_ome_active_directory.py",
+ "name": "tests/unit/plugins/modules/test_ome_smart_fabric_uplink.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3f031be7fd8a4448bd473d19b67b8c12ac1f5978b7a12fb5814b7604e3c71af0",
+ "chksum_sha256": "738841207e64e35da9d00cfc93fbf21efb040fbd0fbabb10ba4f08d469a2f98a",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_ome_alert_policies.py",
+ "name": "tests/unit/plugins/modules/test_ome_template.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bafaade223c90287303f473dfd824732bc557b3fd757522ea3be72e276f77881",
+ "chksum_sha256": "435c50720b4e475953395e40d1e728a5167da0efc799a2bd49d9dbc199b7c391",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_ome_alert_policies_actions_info.py",
+ "name": "tests/unit/plugins/modules/test_dellemc_idrac_storage_volume.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "44a90dc50b1678910dc345a2b88d1692a63a00a0d575ecc7ce9f235c9f19949b",
+ "chksum_sha256": "7172c977250e7d810c1b37c93ac2c9d9e27c778f350c5be3f323305a9ef82d8a",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_ome_alert_policies_category_info.py",
+ "name": "tests/unit/plugins/modules/test_ome_session.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "40124ea4b56fdc618ac554ed78b05746fa9f83e0e075cabddd639e193ace14ae",
+ "chksum_sha256": "510a5f61683a6a5ebf8d7cc52326a325fe18d7206dc175db65bd96d58cbee2fe",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_ome_alert_policies_info.py",
+ "name": "tests/unit/plugins/modules/test_idrac_attributes.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "eef7bb09da512ee64a1528dc8e639db7bbef21aa2835e20e8cca8144c6dfc967",
+ "chksum_sha256": "443eea5340887b4af7d7a796e34d3ed99c07394a8fb9ac0af804757fb790f897",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_ome_alert_policies_message_id_info.py",
+ "name": "tests/unit/plugins/modules/test_ome_smart_fabric.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cac02856eaad4cb77781ab7c481660e7e4efa2bc72ef8530d68c62fa4e7012d7",
+ "chksum_sha256": "050f3d3f67a9f3da7dd18e92cb31548bc26ede439bfbf27d2413a6da02cc4c9d",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_ome_application_alerts_smtp.py",
+ "name": "tests/unit/plugins/modules/test_idrac_diagnostics.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a0f68b19dd63014f99f993b5049322aa10c962f37abc8b01f0c1edc056e14dc2",
+ "chksum_sha256": "fc3660e232516c0e1e625ddacefbb2b17b78969d065e76f99cd97aa944eda43d",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_ome_application_alerts_syslog.py",
+ "name": "tests/unit/plugins/modules/test_idrac_firmware_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1562c23b20553b15252713cd5443dd6f7ffcea445f0f0841885121af22385c06",
+ "chksum_sha256": "5d60aee695e2ea2a27d566dff581483cf66c27cdcc1601fdd7fc3f325d2cefca",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_ome_application_certificate.py",
+ "name": "tests/unit/plugins/modules/test_ome_alert_policies.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4cf9865aade153e9f8263120ed885fb3d56a07f810396649be3c8fe26d6e2e3a",
+ "chksum_sha256": "bafaade223c90287303f473dfd824732bc557b3fd757522ea3be72e276f77881",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_ome_application_console_preferences.py",
+ "name": "tests/unit/plugins/modules/test_redfish_firmware_rollback.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "23a93ab54d65b2e1d87c70f0ffedea617f124daad04b292a91d487d5cf8d25f8",
+ "chksum_sha256": "b74f2f92f2785380b6a40972f3fe64936d8111ec990b61498d198b2bfccf6f68",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_ome_application_network_address.py",
+ "name": "tests/unit/plugins/modules/test_ome_discovery.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ba7257a64dfa1324277c9c8159f0a9e246162e2ef68c0a07b8dff4213c518bac",
+ "chksum_sha256": "d8d8270271f1f92ab4b4efe84f40de6e1c03fd6d434f75ccbed76699e3bd63f4",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_ome_application_network_proxy.py",
+ "name": "tests/unit/plugins/modules/test_ome_alert_policies_message_id_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "523422cb1cdbc42857be9f768b501b95274c93fa4bf5b1ccba5514cc3501bd90",
+ "chksum_sha256": "cac02856eaad4cb77781ab7c481660e7e4efa2bc72ef8530d68c62fa4e7012d7",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_ome_application_network_settings.py",
+ "name": "tests/unit/plugins/modules/test_dellemc_configure_idrac_services.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b8ae4cb9602e2cf66fcbc81e803fcc423875deb008eeb3cfe9965ac44a5dfa1a",
+ "chksum_sha256": "e8b05f177533512dc258068adbc93270d8fc076de433893f64457a782c37df7b",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_ome_application_network_time.py",
+ "name": "tests/unit/plugins/modules/test_idrac_reset.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "42985f90c0f14499d6455b3be358b826d90e443bed4aef6948dca564befe2366",
+ "chksum_sha256": "e0eda341160f4409f879446bcd4b8313a623f8c9d000438e3ec0c9d11729af62",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_ome_application_network_webserver.py",
+ "name": "tests/unit/plugins/modules/test_ome_device_location.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c5dc8fbbc99361e13eb3a4617f6e260c55044b69c53c552b7e0921580e608de6",
+ "chksum_sha256": "cbd189a7cd877069342d9d55a3abedde08cc7c8982aa3d1387f256baf69c5ade",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_ome_application_security_settings.py",
+ "name": "tests/unit/plugins/modules/test_ome_profile_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e7bf2f655672078476cc228eb5fb457beaebf1f955b34e5d624eb3ed1b0c346f",
+ "chksum_sha256": "4aea850eca33936d709e5fc967578b5ccc077388f94cc2ae7541dfaf1342ef5d",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_ome_chassis_slots.py",
+ "name": "tests/unit/plugins/modules/test_idrac_network_attributes.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e4668f13509305834df305adef43400dfb621032bc29580ad15b52bead4b3083",
+ "chksum_sha256": "10c2a11793c47a4b49723d09f7e370875bc0bc3eb9e01ed5c02a2e06683752bb",
+ "format": 1
+ },
+ {
+ "name": "tests/unit/plugins/modules/test_ome_identity_pool.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "df5bb7d2ab7b18151f47ee0cd2a4304de9e6d11517015148b0d58c405f0b868f",
"format": 1
},
{
@@ -8772,38 +8786,38 @@
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_ome_configuration_compliance_info.py",
+ "name": "tests/unit/plugins/modules/test_ome_application_alerts_syslog.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2bac6fc0e5be8cd2c4841f6ee758233db54442c8eb00fb4ada5ee4a044160447",
+ "chksum_sha256": "1562c23b20553b15252713cd5443dd6f7ffcea445f0f0841885121af22385c06",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_ome_device_group.py",
+ "name": "tests/unit/plugins/modules/test_idrac_lifecycle_controller_status_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fe679abfeb71c0791e644ac6d501639c229c1ea6c64ae06ce0058d0930079490",
+ "chksum_sha256": "45e4e9c2e94584738e18f1be859172e06bb9ff3b66b89d857529b77cfab01a57",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_ome_device_info.py",
+ "name": "tests/unit/plugins/modules/test_idrac_syslog.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f3427f3a4be484d1349156864ae1476badd4c88ad84a901ac5afd832df2fa724",
+ "chksum_sha256": "bf10fcb460fa9bc23a992d7bdfd5b2dfeb9b6ee83a918f257c5ae470c434f5d4",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_ome_device_local_access_configuration.py",
+ "name": "tests/unit/plugins/modules/test_idrac_license.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f83c9a95759be04419109a20d5a49512a7fab40c5d655aa3057d0a0e44bad861",
+ "chksum_sha256": "95bdde1ac82063491da54968ac56f56d9be5f7364f7c7b06635573e428814b16",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_ome_device_location.py",
+ "name": "tests/unit/plugins/modules/test_ome_domain_user_groups.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cbd189a7cd877069342d9d55a3abedde08cc7c8982aa3d1387f256baf69c5ade",
+ "chksum_sha256": "502d0f2033c22d39a971906066eb5e2533a915d5079d0ba4a42347a2da76eb22",
"format": 1
},
{
@@ -8814,339 +8828,346 @@
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_ome_device_network_services.py",
+ "name": "tests/unit/plugins/modules/test_idrac_lifecycle_controller_jobs.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "01b4ac372b95cbdea27248e39999953945a4a668be10194887f924668ee5808b",
+ "chksum_sha256": "860c64f7be6696f28329e37411e69d02dd3c8ebb85f06dc41a7ef8bcfd231b97",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_ome_device_power_settings.py",
+ "name": "tests/unit/plugins/modules/test_ome_alert_policies_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5c305977b799fc5acacb5a13a1bb1b724b874c15c47a51402921b4b9edda6a4c",
+ "chksum_sha256": "eef7bb09da512ee64a1528dc8e639db7bbef21aa2835e20e8cca8144c6dfc967",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_ome_device_quick_deploy.py",
+ "name": "tests/unit/plugins/modules/test_ome_alert_policies_category_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9635a328e7d8b444de75b441e3fd8f15c7bbb407ea4b8f4ee7c970215596b695",
+ "chksum_sha256": "40124ea4b56fdc618ac554ed78b05746fa9f83e0e075cabddd639e193ace14ae",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_ome_devices.py",
+ "name": "tests/unit/plugins/modules/test_idrac_lifecycle_controller_logs.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2db61429200f99069795d059690c4fdac00d946bad828827b8f8d6a686ea3da8",
+ "chksum_sha256": "be3d0c6ae7c2127c97172c75dbe50d2739d7a14f07eae6b6b1d566ddddb15dae",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_ome_diagnostics.py",
+ "name": "tests/unit/plugins/modules/test_ome_application_network_webserver.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3fb5b4940be19ce98ef0a2d8290019b5f06093a5230a4f8e9b46eef8cc13a1bb",
+ "chksum_sha256": "c5dc8fbbc99361e13eb3a4617f6e260c55044b69c53c552b7e0921580e608de6",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_ome_discovery.py",
+ "name": "tests/unit/plugins/modules/test_ome_firmware_catalog.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d8d8270271f1f92ab4b4efe84f40de6e1c03fd6d434f75ccbed76699e3bd63f4",
+ "chksum_sha256": "3b16df578978033fd82dcd342f3e3286b807a480dc76723f5698dc1b5eb2b61d",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_ome_domain_user_groups.py",
+ "name": "tests/unit/plugins/modules/test_idrac_virtual_media.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "502d0f2033c22d39a971906066eb5e2533a915d5079d0ba4a42347a2da76eb22",
+ "chksum_sha256": "c40d6af3e3701c4578b48ecdcd4d07848ae5399b0624295f636539ee862360f5",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_ome_firmware.py",
+ "name": "tests/unit/plugins/modules/test_ome_chassis_slots.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2a88df0a8140232d3e9aeeba3686515d430d1626d96b518d5f7fcb28eb8d61e1",
+ "chksum_sha256": "e4668f13509305834df305adef43400dfb621032bc29580ad15b52bead4b3083",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_ome_firmware_baseline.py",
+ "name": "tests/unit/plugins/modules/test_ome_alert_policies_actions_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "10f367578ed301729cfa4190a45191545ed5796d902248f34590c4481fdcd5eb",
+ "chksum_sha256": "44a90dc50b1678910dc345a2b88d1692a63a00a0d575ecc7ce9f235c9f19949b",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_ome_firmware_baseline_compliance_info.py",
+ "name": "tests/unit/plugins/modules/test_idrac_boot.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e387e2956c280621c1d898b18785bcf281ed4f00c181aeb334697c492108d2bc",
+ "chksum_sha256": "2246110a56d5cb49d91190e4e86ed3dcb43faa6c99c14fa5054fd64dda35bba8",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_ome_firmware_baseline_info.py",
+ "name": "tests/unit/plugins/modules/test_ome_application_network_address.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2b2b8d194c62941b6912236c9e0f6781c9c2940f9c19f81f56ee0b9350115642",
+ "chksum_sha256": "ba7257a64dfa1324277c9c8159f0a9e246162e2ef68c0a07b8dff4213c518bac",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_ome_firmware_catalog.py",
+ "name": "tests/unit/plugins/modules/test_ome_application_network_proxy.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3b16df578978033fd82dcd342f3e3286b807a480dc76723f5698dc1b5eb2b61d",
+ "chksum_sha256": "523422cb1cdbc42857be9f768b501b95274c93fa4bf5b1ccba5514cc3501bd90",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_ome_groups.py",
+ "name": "tests/unit/plugins/modules/test_ome_device_quick_deploy.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "dafbbc8ff84e5d7195c1dfa0c4aac877cfc84524318fb9b93daee52ace901fa1",
+ "chksum_sha256": "9635a328e7d8b444de75b441e3fd8f15c7bbb407ea4b8f4ee7c970215596b695",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_ome_identity_pool.py",
+ "name": "tests/unit/plugins/modules/test_redfish_event_subscription.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "df5bb7d2ab7b18151f47ee0cd2a4304de9e6d11517015148b0d58c405f0b868f",
+ "chksum_sha256": "8b63ef2ac94d46a42ad2ff543ac70afde793e5caf967bc940566fa6ee6c289a2",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_ome_job_info.py",
+ "name": "tests/unit/plugins/modules/test_ome_firmware_baseline.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "528679c836f439cfadd6dede9b9cb7790b32329e8ddb834dff7acee88827f529",
+ "chksum_sha256": "10f367578ed301729cfa4190a45191545ed5796d902248f34590c4481fdcd5eb",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_ome_network_port_breakout.py",
+ "name": "tests/unit/plugins/modules/test_idrac_certificates.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "98c509977de9377cc6fb865530149d1a58f3230edf111cea2da5fe0a8da6fc20",
+ "chksum_sha256": "f59d4a38f670810dfa34815fa07562da89e2f5833b7f52b15216a621b069a064",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_ome_network_vlan.py",
+ "name": "tests/unit/plugins/modules/test_ome_firmware.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d764453f483885c2265e3de93cd2750af9248d6c90e3e27a82b8e67b2a03a2cf",
+ "chksum_sha256": "2a88df0a8140232d3e9aeeba3686515d430d1626d96b518d5f7fcb28eb8d61e1",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_ome_network_vlan_info.py",
+ "name": "tests/unit/plugins/modules/test_idrac_lifecycle_controller_job_status_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e2b407402c38936eff8f9f2e20e33ca7e356f664c51d8aa6984d27dd84302f5a",
+ "chksum_sha256": "c18b501718e37c52efe369dcbd0779cc522d919107ed2828bb9b3396ecf1b49d",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_ome_powerstate.py",
+ "name": "tests/unit/plugins/modules/test_idrac_session.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3b5979e60e2a0d99df0b1a598cb28763134e0e19bbae5ebbf2b48762d4a3f860",
+ "chksum_sha256": "34236d7174336a6e3105105579812e20cb1128fe96aec567e30814df86af4ac3",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_ome_profile.py",
+ "name": "tests/unit/plugins/modules/test_ome_device_local_access_configuration.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1ef49f3f85e0a585ae05ee9fb61322533d1863cd8853a477bb26d3f81874c7b5",
+ "chksum_sha256": "f83c9a95759be04419109a20d5a49512a7fab40c5d655aa3057d0a0e44bad861",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_ome_profile_info.py",
+ "name": "tests/unit/plugins/modules/common.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4aea850eca33936d709e5fc967578b5ccc077388f94cc2ae7541dfaf1342ef5d",
+ "chksum_sha256": "2f706a3caa873b5e81b6f4a3076628f2383e570cda58814a4e2d874e22989cc1",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_ome_server_interface_profile_info.py",
+ "name": "tests/unit/plugins/modules/test_ome_network_vlan.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "33e27297dbf2ba70e8a54ac1dfaf6967cd25e3c5d0fa67992661561bddc3d13e",
+ "chksum_sha256": "d764453f483885c2265e3de93cd2750af9248d6c90e3e27a82b8e67b2a03a2cf",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_ome_server_interface_profiles.py",
+ "name": "tests/unit/plugins/modules/test_idrac_storage_volume.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0f48cb24a32aabf756cb9576ba969840c1575574a63a44a705d8778776b342ff",
+ "chksum_sha256": "6dc99be4366d1e9d363f5af96a2dbc2110ba8389029399b063814f6ff341fc6a",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_ome_smart_fabric.py",
+ "name": "tests/unit/plugins/modules/test_idrac_bios.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "050f3d3f67a9f3da7dd18e92cb31548bc26ede439bfbf27d2413a6da02cc4c9d",
+ "chksum_sha256": "56961e8894b7997b6bafec9e21398a444cbafe2073f2263752a82d55f794a61d",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_ome_smart_fabric_info.py",
+ "name": "tests/unit/plugins/modules/test_ome_template_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f91e9b2df7be4c4127d990c3a63f0e2c02c201deb40fb9a3627b14cc9a05e9f1",
+ "chksum_sha256": "35eb0483714d6a253d32010fdcd4421c5006dd565bd4c4f5bad984ad25f7b941",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_ome_smart_fabric_uplink.py",
+ "name": "tests/unit/plugins/modules/test_ome_device_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "738841207e64e35da9d00cfc93fbf21efb040fbd0fbabb10ba4f08d469a2f98a",
+ "chksum_sha256": "f3427f3a4be484d1349156864ae1476badd4c88ad84a901ac5afd832df2fa724",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_ome_smart_fabric_uplink_info.py",
+ "name": "tests/unit/plugins/modules/test_redfish_powerstate.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be29eacdaab10d28070c5c410b3de63e6427c76dbed8f9690837847a31124cd8",
+ "chksum_sha256": "711d5df101c83c29218d70c8952a3cf185658e3de1ac4f9a737e9ba082d9bdf4",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_ome_template.py",
+ "name": "tests/unit/plugins/modules/test_ome_template_identity_pool.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "435c50720b4e475953395e40d1e728a5167da0efc799a2bd49d9dbc199b7c391",
+ "chksum_sha256": "650fc4c361e3a0d79f6a794ad3e43efac1a102a9b49ea75a1d3ae9dbd2b3cb3b",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_ome_template_identity_pool.py",
+ "name": "tests/unit/plugins/modules/test_ome_device_power_settings.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "650fc4c361e3a0d79f6a794ad3e43efac1a102a9b49ea75a1d3ae9dbd2b3cb3b",
+ "chksum_sha256": "5c305977b799fc5acacb5a13a1bb1b724b874c15c47a51402921b4b9edda6a4c",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_ome_template_info.py",
+ "name": "tests/unit/plugins/modules/test_ome_user.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "35eb0483714d6a253d32010fdcd4421c5006dd565bd4c4f5bad984ad25f7b941",
+ "chksum_sha256": "72117dcb003f648d2b2e0d58612903eb64e2dc1c120eaef5f3571331a79e0f3f",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_ome_template_network_vlan.py",
+ "name": "tests/unit/plugins/modules/test_ome_profile.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "23b09e9db14b8c7c758d72b118178c18c96418478b5e276a35d69ae06d4d2553",
+ "chksum_sha256": "1ef49f3f85e0a585ae05ee9fb61322533d1863cd8853a477bb26d3f81874c7b5",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_ome_template_network_vlan_info.py",
+ "name": "tests/unit/plugins/modules/test_redfish_firmware.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "53f47c4f82c035207a38e1c6989461f33f39faaf7e8949aba963dd37a1cedaeb",
+ "chksum_sha256": "2661f131bdea868cfe15bf380f4bd2e465c15c95533f12f82f680d6fa6d67691",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_ome_user.py",
+ "name": "tests/unit/plugins/modules/test_ome_diagnostics.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "72117dcb003f648d2b2e0d58612903eb64e2dc1c120eaef5f3571331a79e0f3f",
+ "chksum_sha256": "3fb5b4940be19ce98ef0a2d8290019b5f06093a5230a4f8e9b46eef8cc13a1bb",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_ome_user_info.py",
+ "name": "tests/unit/plugins/modules/test_ome_powerstate.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d953dab52deacad582e4bf060b3c2e6d82d490b2e0f872f2cbec4c7eac95df81",
+ "chksum_sha256": "3b5979e60e2a0d99df0b1a598cb28763134e0e19bbae5ebbf2b48762d4a3f860",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_redfish_event_subscription.py",
+ "name": "tests/unit/plugins/modules/test_ome_network_vlan_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8b63ef2ac94d46a42ad2ff543ac70afde793e5caf967bc940566fa6ee6c289a2",
+ "chksum_sha256": "e2b407402c38936eff8f9f2e20e33ca7e356f664c51d8aa6984d27dd84302f5a",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_redfish_firmware.py",
+ "name": "tests/unit/plugins/modules/test_idrac_user.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2661f131bdea868cfe15bf380f4bd2e465c15c95533f12f82f680d6fa6d67691",
+ "chksum_sha256": "4284c52644fd69a018da7f858f8eae06c9d3f680517d748519820cda2780a5da",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_redfish_firmware_rollback.py",
+ "name": "tests/unit/plugins/modules/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b74f2f92f2785380b6a40972f3fe64936d8111ec990b61498d198b2bfccf6f68",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_redfish_powerstate.py",
+ "name": "tests/unit/plugins/modules/test_ome_smart_fabric_uplink_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "711d5df101c83c29218d70c8952a3cf185658e3de1ac4f9a737e9ba082d9bdf4",
+ "chksum_sha256": "be29eacdaab10d28070c5c410b3de63e6427c76dbed8f9690837847a31124cd8",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/utils.py",
+ "name": "tests/unit/plugins/modules/test_ome_template_network_vlan.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6dd69e26e1abab9e11a3c0d8e6212b37d8619036e394b351ccc99e480976da28",
+ "chksum_sha256": "23b09e9db14b8c7c758d72b118178c18c96418478b5e276a35d69ae06d4d2553",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_idrac_diagnostics.py",
+ "name": "tests/unit/plugins/modules/test_ome_application_network_settings.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8dc7291aa625412b4b452c5e7931cb7a62e1f41e2f32fa84679242398fcd5d50",
+ "chksum_sha256": "b8ae4cb9602e2cf66fcbc81e803fcc423875deb008eeb3cfe9965ac44a5dfa1a",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_idrac_storage_volume.py",
+ "name": "tests/unit/plugins/modules/test_ome_network_port_breakout.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bd47447b5e83792a73cab14880da8ba3dc5300f1d3e5b2e46f199d5de323826b",
+ "chksum_sha256": "98c509977de9377cc6fb865530149d1a58f3230edf111cea2da5fe0a8da6fc20",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_redfish_storage_volume.py",
+ "name": "tests/unit/plugins/modules/conftest.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8673a7d654d7eebe6b54ace65b26922d3c23c8f35563cb27ebb8b15cd68899a2",
+ "chksum_sha256": "1d50a8a54e1c284f2db79461ec1b50a7555f1b2dfd71aa1d99ffa1c0271c6977",
"format": 1
},
{
- "name": "tests/unit/plugins/modules/test_idrac_session.py",
+ "name": "tests/unit/plugins/modules/utils.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b2417f4e5552f889987522e92e20aa0d2bf0d042022d157b71745df9549e4a16",
+ "chksum_sha256": "6dd69e26e1abab9e11a3c0d8e6212b37d8619036e394b351ccc99e480976da28",
"format": 1
},
{
- "name": "tests/unit/requirements.txt",
+ "name": "tests/unit/plugins/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5ec603ab1d2b3071743853324fd0db34d886f78f1543c1fa700ad1c904a6fd25",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": ".ansible-lint-ignore",
+ "name": "tests/unit/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9259c9753c32d36dfca9f4c9858b16b12e03ab2cd5eeece7f53ee9ad7bb2b2aa",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "requirements.txt",
+ "name": "tests/.gitignore",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6f0f045a08a6f73f1903309c1636aefca64bad5073cf5e1b3d092f0f4fc1806b",
+ "chksum_sha256": "67b0f799f12726b149fd9677334c10b059e231bf2fa7150d55e9b4d5fd242062",
"format": 1
},
{
- "name": "CHANGELOG.rst",
+ "name": "tests/README.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c9d74f37191657712d94c31aa6fdddebc11c869e8c6c77817fd005b71bce07dd",
+ "chksum_sha256": "0f8f02e27b21fcc1c8ef2a5d315a0f671b31cebb4dd515701db329db5c76fc2e",
"format": 1
},
{
- "name": "README.md",
+ "name": "tests/config.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b4a32d700abcd8da5d5122d404d99655a440ea072d77a6c84b5cc02382f4887b",
+ "chksum_sha256": "5b21de52fbe6ce3aa2369ef41e5ee7fc7e2204d20d6232f2d29fe58886899f10",
+ "format": 1
+ },
+ {
+ "name": "tests/__init__.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
}
],
diff --git a/ansible_collections/dellemc/openmanage/MANIFEST.json b/ansible_collections/dellemc/openmanage/MANIFEST.json
index f1bda6107..ef0fd32ef 100644
--- a/ansible_collections/dellemc/openmanage/MANIFEST.json
+++ b/ansible_collections/dellemc/openmanage/MANIFEST.json
@@ -2,7 +2,7 @@
"collection_info": {
"namespace": "dellemc",
"name": "openmanage",
- "version": "9.2.0",
+ "version": "9.3.0",
"authors": [
"Jagadeesh N V <Jagadeesh.N.V@Dell.com>",
"Felix Stephen <Felix.S@Dell.com>",
@@ -46,7 +46,7 @@
"name": "FILES.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3c38b92a827649678ff0be97765d225e8df0902c89362e974b355d0a24549830",
+ "chksum_sha256": "361945caf4fa9f87213384a2b146bdabe0cdc1eb05f1c50843dd8f62f4911265",
"format": 1
},
"format": 1
diff --git a/ansible_collections/dellemc/openmanage/README.md b/ansible_collections/dellemc/openmanage/README.md
index 5223d3612..817c2db90 100644
--- a/ansible_collections/dellemc/openmanage/README.md
+++ b/ansible_collections/dellemc/openmanage/README.md
@@ -27,31 +27,73 @@ OpenManage Ansible Modules simplifies and automates provisioning, deployment, an
## Supported Platforms
* iDRAC8 based Dell PowerEdge Servers with firmware versions 2.85.85.85 and above.
* iDRAC9 based Dell PowerEdge Servers with firmware versions 6.10.80.00 and above.
- * Dell OpenManage Enterprise versions 4.0.0 and 4.0.1.
+ * Dell OpenManage Enterprise versions 3.10 and 4.1.
* Dell OpenManage Enterprise Modular versions 2.10.10 and above.
-## Prerequisites
- * [Ansible Core >= 2.16.4 and 2.15.9](https://github.com/ansible/ansible)
+## Requirements
+ * [Ansible Core >= 2.16.6 and 2.15.11](https://github.com/ansible/ansible)
* Python >= 3.9.6
* To run the iDRAC modules, install OpenManage Python Software Development Kit (OMSDK)
using either ```pip install omsdk --upgrade``` or ```pip install -r requirements.txt```.
OMSDK can also be installed from [Dell OpenManage Python SDK](https://github.com/dell/omsdk)
* Operating System
- * Red Hat Enterprise Linux (RHEL) 9.3 and 8.9
+ * Red Hat Enterprise Linux (RHEL) 9.4 and 8.9
* SUSE Linux Enterprise Server (SLES) 15 SP5 and 15 SP4
- * Ubuntu 22.04.4 and 22.04.3
+ * Ubuntu 24.04 and 22.04.4
## Installation
-* From [galaxy](https://galaxy.ansible.com/dellemc/openmanage):
-```ansible-galaxy collection install dellemc.openmanage```
+* From [Galaxy](https://galaxy.ansible.com/dellemc/openmanage) or [Automation Hub](https://console.redhat.com/ansible/automation-hub/repo/published/dellemc/openmanage):
+Install the latest Ansible collection from the Ansible Galaxy or Automation hub
+ ```
+ ansible-galaxy collection install dellemc.openmanage
+ ```
- - For offline installation on the Ansible control machine, download the required tar archive version of the collection from [Dell OpenManage collection](https://galaxy.ansible.com/dellemc/openmanage) and run the command given below:
- ```ansible-galaxy collection install dellemc-openmanage-<version>.tar.gz```
+* From [GitHub](https://github.com/dell/dellemc-openmanage-ansible-modules/tree/collections):
+Install the Ansible collection from the GitHub repository using the latest commit with the branch name 'collections'
+ ```
+ ansible-galaxy collection install git+https://github.com/dell/dellemc-openmanage-ansible-modules.git,collections
+ ```
-* From [github](https://github.com/dell/dellemc-openmanage-ansible-modules/tree/collections):
-Install the collection from the github repository using the latest commit on the branch 'collections'
-```ansible-galaxy collection install git+https://github.com/dell/dellemc-openmanage-ansible-modules.git,collections```
+* To Upgrade:
+Update the `dellemc.openmanage` collection to the latest version available on [Galaxy](https://galaxy.ansible.com/dellemc/openmanage) and [Automation Hub](https://console.redhat.com/ansible/automation-hub/repo/published/dellemc/openmanage)
+ ```
+ ansible-galaxy collection install dellemc.openmanage --upgrade
+ ```
-## About
+* To specific version:
+Install a specifc version of the collection from the [Galaxy](https://galaxy.ansible.com/dellemc/openmanage) and [Automation Hub](https://console.redhat.com/ansible/automation-hub/repo/published/dellemc/openmanage)
+ ```
+ ansible-galaxy collection install dellemc.openmanage:==<version>
+ ```
+
+* Offline Installation:
+For offline installation on the Ansible control machine, download the required tar archive version of the collection from [Dell OpenManage collection](https://galaxy.ansible.com/dellemc/openmanage) and run the command given below:
+ ```
+ ansible-galaxy collection install dellemc-openmanage-<version>.tar.gz
+ ```
+
+For more details, see [Using Ansible collections](https://docs.ansible.com/ansible/devel/user_guide/collections_using.html)
+
+## Use Cases
+For more information about how to use the collection, refer to [OME modules](https://github.com/dell/dellemc-openmanage-ansible-modules/tree/collections/playbooks/ome), [iDRAC modules](https://github.com/dell/dellemc-openmanage-ansible-modules/tree/collections/playbooks/idrac), [Redfish modules](https://github.com/dell/dellemc-openmanage-ansible-modules/tree/collections/playbooks/redfish) and [roles](https://github.com/dell/dellemc-openmanage-ansible-modules/tree/collections/playbooks/roles).
+
+## Testing
+
+For more information about Unit testing, see [Unit testing](https://github.com/dell/dellemc-openmanage-ansible-modules/blob/collections/tests/README.md).
+
+For more information about Integration testing, see [Intergation testing](https://github.com/dell/dellemc-openmanage-ansible-modules/blob/collections/tests/integrations/README.md).
+
+## Support
+
+For support, see [SUPPORT.md](https://github.com/dell/dellemc-openmanage-ansible-modules/blob/collections/docs/SUPPORT.md).
+
+## Release Notes
+
+For release notes, see [CHANGELOG.rst](https://github.com/dell/dellemc-openmanage-ansible-modules/blob/collections/CHANGELOG.rst).
+
+## Related Information
+Refer the [Table of Contents](https://github.com/dell/dellemc-openmanage-ansible-modules/blob/collections/README.md#table-of-contents) for any other information on the Dell OpenManage Ansible Modules documentations.
+
+## License Information
Dell OpenManage Ansible Modules is 100% open source and community-driven. All components are available under [GPL-3.0-only](https://www.gnu.org/licenses/gpl-3.0.html) on GitHub.
diff --git a/ansible_collections/dellemc/openmanage/changelogs/changelog.yaml b/ansible_collections/dellemc/openmanage/changelogs/changelog.yaml
index 391fa597d..f027a4bba 100644
--- a/ansible_collections/dellemc/openmanage/changelogs/changelog.yaml
+++ b/ansible_collections/dellemc/openmanage/changelogs/changelog.yaml
@@ -1834,3 +1834,58 @@ releases:
name: idrac_session
namespace: ''
release_date: '2024-04-30'
+ 9.3.0:
+ changes:
+ release_summary: '- The ``ome_session`` module is added to allow you to
+ create and delete the sessions on OpenManage Enterprise and
+ OpenManage Enterprise Modular.
+
+ - Added support to use session ID for authentication of iDRAC,
+ OpenManage Enterprise and OpenManage Enterprise Modular modules.
+
+ - Added time_to_wait option in ``idrac_storage_volume`` module.
+
+ - Added support for Python 3.12.'
+ major_changes:
+ - ome_session - This module allows you to create and delete the
+ sessions on OpenManage Enterprise and OpenManage Enterprise Modular.
+ - Added support to use session ID for authentication of iDRAC,
+ OpenManage Enterprise and OpenManage Enterprise Modular.
+ minor_changes:
+ - Added time_to_wait option in ``idrac_storage_volume`` module.
+ - Added support for Python 3.12.
+ bugfixes:
+ - Resolved the issue in ``idrac_virtual_media`` module where the
+ Authorization request header was included in the request.
+ (https://github.com/dell/dellemc-openmanage-ansible-modules/issues/612)
+ - Resolved the issue in ``idrac_certificates`` module where
+ subject_alt_name parameter was only accepting first item in list.
+ (https://github.com/dell/dellemc-openmanage-ansible-modules/issues/584)
+ - Resolved the issue in ``ome_application_certificate`` module related
+ to a padding error in generated CSR file.
+ (https://github.com/dell/dellemc-openmanage-ansible-modules/issues/370)
+ known_issues:
+ - idrac_storage_volume - Issue(290766) - The module will report success
+ instead of showing failure for new virtual creation on the BOSS-N1
+ controller if a virtual disk is already present on the same
+ controller.
+ - idrac_diagnostics - Issue(285322) - This module doesn't support
+ export of diagnostics file to HTTP and HTTPS share via SOCKS
+ proxy.
+ - idrac_firmware - Issue(279282) - This module does not support
+ firmware update using HTTP, HTTPS, and FTP shares with
+ authentication on iDRAC8.
+ - ome_diagnostics - Issue(279193) - Export of SupportAssist
+ collection logs to the share location fails on OME version 4.0.0.
+ - ome_smart_fabric_uplink - Issue(186024) - The module supported by
+ OpenManage Enterprise Modular, however it does not allow the
+ creation of multiple uplinks of the same name. If an uplink is
+ created using the same name as an existing uplink, then the existing
+ uplink is modified.
+ modules:
+ - description:
+ This module allows you to create and delete sessions on OpenManage
+ Enterprise and OpenManage Enterprise Modular.
+ name: ome_session
+ namespace: ''
+ release_date: '2024-05-31'
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/idrac_attributes.rst b/ansible_collections/dellemc/openmanage/docs/modules/idrac_attributes.rst
index 13ffa9fa3..c6ded6257 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/idrac_attributes.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/idrac_attributes.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.8.6
+- python \>= 3.9.6
@@ -28,21 +28,21 @@ Parameters
----------
idrac_attributes (optional, dict, None)
- Dictionary of iDRAC attributes and value. The attributes should be part of the Integrated Dell Remote Access Controller Attribute Registry. To view the list of attributes in Attribute Registry for iDRAC9 and above, see, https://*idrac_ip*/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DellAttributes/iDRAC.Embedded.1 and https://*idrac_ip*/redfish/v1/Registries/ManagerAttributeRegistry.
+ Dictionary of iDRAC attributes and value. The attributes should be part of the Integrated Dell Remote Access Controller Attribute Registry. To view the list of attributes in Attribute Registry for iDRAC9 and above, see, \ https://I(idrac_ip\ /redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DellAttributes/iDRAC.Embedded.1) and \ https://I(idrac_ip\ /redfish/v1/Registries/ManagerAttributeRegistry).
- For iDRAC8 based servers, derive the manager attribute name from Server Configuration Profile. If the manager attribute name in Server Configuration Profile is <GroupName>.<Instance>#<AttributeName> (for Example, 'SNMP.1#AgentCommunity') then the equivalent attribute name for Redfish is <GroupName>.<Instance>.<AttributeName> (for Example, 'SNMP.1.AgentCommunity').
+ For iDRAC8 based servers, derive the manager attribute name from Server Configuration Profile. If the manager attribute name in Server Configuration Profile is \<GroupName\>.\<Instance\>#\<AttributeName\> (for Example, 'SNMP.1#AgentCommunity') then the equivalent attribute name for Redfish is \<GroupName\>.\<Instance\>.\<AttributeName\> (for Example, 'SNMP.1.AgentCommunity').
system_attributes (optional, dict, None)
- Dictionary of System attributes and value. The attributes should be part of the Integrated Dell Remote Access Controller Attribute Registry. To view the list of attributes in Attribute Registry for iDRAC9 and above, see, https://*idrac_ip*/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DellAttributes/System.Embedded.1 and https://*idrac_ip*/redfish/v1/Registries/ManagerAttributeRegistry.
+ Dictionary of System attributes and value. The attributes should be part of the Integrated Dell Remote Access Controller Attribute Registry. To view the list of attributes in Attribute Registry for iDRAC9 and above, see, \ https://I(idrac_ip\ /redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DellAttributes/System.Embedded.1) and \ https://I(idrac_ip\ /redfish/v1/Registries/ManagerAttributeRegistry).
- For iDRAC8 based servers, derive the manager attribute name from Server Configuration Profile. If the manager attribute name in Server Configuration Profile is <GroupName>.<Instance>#<AttributeName> (for Example, 'ThermalSettings.1#ThermalProfile') then the equivalent attribute name for Redfish is <GroupName>.<Instance>.<AttributeName> (for Example, 'ThermalSettings.1.ThermalProfile').
+ For iDRAC8 based servers, derive the manager attribute name from Server Configuration Profile. If the manager attribute name in Server Configuration Profile is \<GroupName\>.\<Instance\>#\<AttributeName\> (for Example, 'ThermalSettings.1#ThermalProfile') then the equivalent attribute name for Redfish is \<GroupName\>.\<Instance\>.\<AttributeName\> (for Example, 'ThermalSettings.1.ThermalProfile').
lifecycle_controller_attributes (optional, dict, None)
- Dictionary of Lifecycle Controller attributes and value. The attributes should be part of the Integrated Dell Remote Access Controller Attribute Registry.To view the list of attributes in Attribute Registry for iDRAC9 and above, see, https://*idrac_ip*/redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DellAttributes/LifecycleController.Embedded.1 and https://*idrac_ip*/redfish/v1/Registries/ManagerAttributeRegistry.
+ Dictionary of Lifecycle Controller attributes and value. The attributes should be part of the Integrated Dell Remote Access Controller Attribute Registry.To view the list of attributes in Attribute Registry for iDRAC9 and above, see, \ https://I(idrac_ip\ /redfish/v1/Managers/iDRAC.Embedded.1/Oem/Dell/DellAttributes/LifecycleController.Embedded.1) and \ https://I(idrac_ip\ /redfish/v1/Registries/ManagerAttributeRegistry).
- For iDRAC8 based servers, derive the manager attribute name from Server Configuration Profile. If the manager attribute name in Server Configuration Profile is <GroupName>.<Instance>#<AttributeName> (for Example, 'LCAttributes.1#AutoUpdate') then the equivalent attribute name for Redfish is <GroupName>.<Instance>.<AttributeName> (for Example, 'LCAttributes.1.AutoUpdate').
+ For iDRAC8 based servers, derive the manager attribute name from Server Configuration Profile. If the manager attribute name in Server Configuration Profile is \<GroupName\>.\<Instance\>#\<AttributeName\> (for Example, 'LCAttributes.1#AutoUpdate') then the equivalent attribute name for Redfish is \<GroupName\>.\<Instance\>.\<AttributeName\> (for Example, 'LCAttributes.1.AutoUpdate').
resource_id (optional, str, None)
@@ -53,24 +53,40 @@ Parameters
iDRAC IP Address.
- idrac_user (True, str, None)
+ idrac_user (False, str, None)
iDRAC username.
+ If the username is not provided, then the environment variable \ :envvar:`IDRAC\_USERNAME`\ is used.
- idrac_password (True, str, None)
+ Example: export IDRAC\_USERNAME=username
+
+
+ idrac_password (False, str, None)
iDRAC user password.
+ If the password is not provided, then the environment variable \ :envvar:`IDRAC\_PASSWORD`\ is used.
+
+ Example: export IDRAC\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`IDRAC\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export IDRAC\_X\_AUTH\_TOKEN=x\_auth\_token
+
idrac_port (optional, int, 443)
iDRAC port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -89,7 +105,7 @@ Notes
.. note::
- Run this module from a system that has direct access to Dell iDRAC.
- - This module supports ``check_mode``.
+ - This module supports \ :literal:`check\_mode`\ .
- For iDRAC8 based servers, the value provided for the attributes are not be validated. Ensure appropriate values are passed.
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/idrac_bios.rst b/ansible_collections/dellemc/openmanage/docs/modules/idrac_bios.rst
index 569b36553..588e397aa 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/idrac_bios.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/idrac_bios.rst
@@ -22,8 +22,8 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- omsdk >= 1.2.490
-- python >= 3.9.6
+- omsdk \>= 1.2.490
+- python \>= 3.9.6
@@ -47,31 +47,31 @@ Parameters
apply_time (optional, str, Immediate)
- Apply time of the *attributes*.
+ Apply time of the \ :emphasis:`attributes`\ .
- This is applicable only to *attributes*.
+ This is applicable only to \ :emphasis:`attributes`\ .
- ``Immediate`` Allows the user to immediately reboot the host and apply the changes. *job_wait* is applicable.
+ \ :literal:`Immediate`\ Allows the user to immediately reboot the host and apply the changes. \ :emphasis:`job\_wait`\ is applicable.
- ``OnReset`` Allows the user to apply the changes on the next reboot of the host server.
+ \ :literal:`OnReset`\ Allows the user to apply the changes on the next reboot of the host server.
- ``AtMaintenanceWindowStart`` Allows the user to apply at the start of a maintenance window as specified in *maintenance_window*. A reboot job will be scheduled.
+ \ :literal:`AtMaintenanceWindowStart`\ Allows the user to apply at the start of a maintenance window as specified in \ :emphasis:`maintenance\_window`\ . A reboot job will be scheduled.
- ``InMaintenanceWindowOnReset`` Allows to apply after a manual reset but within the maintenance window as specified in *maintenance_window*.
+ \ :literal:`InMaintenanceWindowOnReset`\ Allows to apply after a manual reset but within the maintenance window as specified in \ :emphasis:`maintenance\_window`\ .
maintenance_window (optional, dict, None)
Option to schedule the maintenance window.
- This is required when *apply_time* is ``AtMaintenanceWindowStart`` or ``InMaintenanceWindowOnReset``.
+ This is required when \ :emphasis:`apply\_time`\ is \ :literal:`AtMaintenanceWindowStart`\ or \ :literal:`InMaintenanceWindowOnReset`\ .
start_time (True, str, None)
The start time for the maintenance window to be scheduled.
- The format is YYYY-MM-DDThh:mm:ss<offset>
+ The format is YYYY-MM-DDThh:mm:ss\<offset\>
- <offset> is the time offset from UTC that the current timezone set in iDRAC in the format: +05:30 for IST.
+ \<offset\> is the time offset from UTC that the current timezone set in iDRAC in the format: +05:30 for IST.
duration (True, int, None)
@@ -80,33 +80,33 @@ Parameters
attributes (optional, dict, None)
- Dictionary of BIOS attributes and value pair. Attributes should be part of the Redfish Dell BIOS Attribute Registry. Use https://*idrac_ip*/redfish/v1/Systems/System.Embedded.1/Bios to view the Redfish URI.
+ Dictionary of BIOS attributes and value pair. Attributes should be part of the Redfish Dell BIOS Attribute Registry. Use \ https://I(idrac_ip\ /redfish/v1/Systems/System.Embedded.1/Bios) to view the Redfish URI.
- This is mutually exclusive with *boot_sources*, *clear_pending*, and *reset_bios*.
+ This is mutually exclusive with \ :emphasis:`boot\_sources`\ , \ :emphasis:`clear\_pending`\ , and \ :emphasis:`reset\_bios`\ .
boot_sources (optional, list, None)
(deprecated)List of boot devices to set the boot sources settings.
- *boot_sources* is mutually exclusive with *attributes*, *clear_pending*, and *reset_bios*.
+ \ :emphasis:`boot\_sources`\ is mutually exclusive with \ :emphasis:`attributes`\ , \ :emphasis:`clear\_pending`\ , and \ :emphasis:`reset\_bios`\ .
- *job_wait* is not applicable. The module waits till the completion of this task.
+ \ :emphasis:`job\_wait`\ is not applicable. The module waits till the completion of this task.
- This feature is deprecated, please use :ref:`dellemc.openmanage.idrac_boot <dellemc.openmanage.idrac_boot_module>` for configuring boot sources.
+ This feature is deprecated, please use \ :ref:`dellemc.openmanage.idrac\_boot <ansible_collections.dellemc.openmanage.idrac_boot_module>`\ for configuring boot sources.
clear_pending (optional, bool, None)
Allows the user to clear all pending BIOS attributes changes.
- ``true`` will discard any pending changes to bios attributes or remove job if in scheduled state.
+ \ :literal:`true`\ will discard any pending changes to bios attributes or remove job if in scheduled state.
This operation will not create any job.
- ``false`` will not perform any operation.
+ \ :literal:`false`\ will not perform any operation.
- This is mutually exclusive with *boot_sources*, *attributes*, and *reset_bios*.
+ This is mutually exclusive with \ :emphasis:`boot\_sources`\ , \ :emphasis:`attributes`\ , and \ :emphasis:`reset\_bios`\ .
- ``Note`` Any BIOS job scheduled due to boot sources configuration will not be cleared.
+ \ :literal:`Note`\ Any BIOS job scheduled due to boot sources configuration will not be cleared.
reset_bios (optional, bool, None)
@@ -116,31 +116,31 @@ Parameters
This operation will not create any job.
- ``false`` will not perform any operation.
+ \ :literal:`false`\ will not perform any operation.
- This is mutually exclusive with *boot_sources*, *attributes*, and *clear_pending*.
+ This is mutually exclusive with \ :emphasis:`boot\_sources`\ , \ :emphasis:`attributes`\ , and \ :emphasis:`clear\_pending`\ .
- When ``true``, this action will always report as changes found to be applicable.
+ When \ :literal:`true`\ , this action will always report as changes found to be applicable.
reset_type (optional, str, graceful_restart)
- ``force_restart`` Forcefully reboot the host system.
+ \ :literal:`force\_restart`\ Forcefully reboot the host system.
- ``graceful_restart`` Gracefully reboot the host system.
+ \ :literal:`graceful\_restart`\ Gracefully reboot the host system.
- This is applicable for *reset_bios*, and *attributes* when *apply_time* is ``Immediate``.
+ This is applicable for \ :emphasis:`reset\_bios`\ , and \ :emphasis:`attributes`\ when \ :emphasis:`apply\_time`\ is \ :literal:`Immediate`\ .
job_wait (optional, bool, True)
Provides the option to wait for job completion.
- This is applicable for *attributes* when *apply_time* is ``Immediate``.
+ This is applicable for \ :emphasis:`attributes`\ when \ :emphasis:`apply\_time`\ is \ :literal:`Immediate`\ .
job_wait_timeout (optional, int, 1200)
- The maximum wait time of *job_wait* in seconds. The job is tracked only for this duration.
+ The maximum wait time of \ :emphasis:`job\_wait`\ in seconds. The job is tracked only for this duration.
- This option is applicable when *job_wait* is ``true``.
+ This option is applicable when \ :emphasis:`job\_wait`\ is \ :literal:`true`\ .
idrac_ip (True, str, None)
@@ -150,21 +150,29 @@ Parameters
idrac_user (True, str, None)
iDRAC username.
+ If the username is not provided, then the environment variable \ :envvar:`IDRAC\_USERNAME`\ is used.
+
+ Example: export IDRAC\_USERNAME=username
+
idrac_password (True, str, None)
iDRAC user password.
+ If the password is not provided, then the environment variable \ :envvar:`IDRAC\_PASSWORD`\ is used.
+
+ Example: export IDRAC\_PASSWORD=password
+
idrac_port (optional, int, 443)
iDRAC port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -182,11 +190,11 @@ Notes
-----
.. note::
- - omsdk is required to be installed only for *boot_sources* operation.
- - This module requires 'Administrator' privilege for *idrac_user*.
+ - omsdk is required to be installed only for \ :emphasis:`boot\_sources`\ operation.
+ - This module requires 'Administrator' privilege for \ :emphasis:`idrac\_user`\ .
- Run this module from a system that has direct access to Dell iDRAC.
- - This module supports both IPv4 and IPv6 address for *idrac_ip*.
- - This module supports ``check_mode``.
+ - This module supports both IPv4 and IPv6 address for \ :emphasis:`idrac\_ip`\ .
+ - This module supports \ :literal:`check\_mode`\ .
@@ -312,7 +320,7 @@ status_msg (success, str, Successfully cleared pending BIOS attributes.)
msg (success, dict, {'CompletionTime': '2020-04-20T18:50:20', 'Description': 'Job Instance', 'EndTime': None, 'Id': 'JID_873888162305', 'JobState': 'Completed', 'JobType': 'ImportConfiguration', 'Message': 'Successfully imported and applied Server Configuration Profile.', 'MessageArgs': [], 'MessageId': 'SYS053', 'Name': 'Import Configuration', 'PercentComplete': 100, 'StartTime': 'TIME_NOW', 'Status': 'Success', 'TargetSettingsURI': None, 'retval': True})
- Status of the job for *boot_sources* or status of the action performed on bios.
+ Status of the job for \ :emphasis:`boot\_sources`\ or status of the action performed on bios.
invalid_attributes (on invalid attributes or values., dict, {'PxeDev1VlanId': 'Not a valid integer.', 'AcPwrRcvryUserDelay': 'Integer out of valid range.', 'BootSeqRetry': 'Invalid value for Enumeration.', 'Proc1Brand': 'Read only Attribute cannot be modified.', 'AssetTag': 'Attribute does not exist.'})
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/idrac_boot.rst b/ansible_collections/dellemc/openmanage/docs/modules/idrac_boot.rst
index 980423ee2..121c0bdab 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/idrac_boot.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/idrac_boot.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.8.6
+- python \>= 3.9.6
@@ -30,19 +30,19 @@ Parameters
boot_options (optional, list, None)
Options to enable or disable the boot devices.
- This is mutually exclusive with *boot_order*, *boot_source_override_mode*, *boot_source_override_enabled* *boot_source_override_target*, and *uefi_target_boot_source_override*.
+ This is mutually exclusive with \ :emphasis:`boot\_order`\ , \ :emphasis:`boot\_source\_override\_mode`\ , \ :emphasis:`boot\_source\_override\_enabled`\ \ :emphasis:`boot\_source\_override\_target`\ , and \ :emphasis:`uefi\_target\_boot\_source\_override`\ .
boot_option_reference (optional, str, None)
FQDD of the boot device.
- This is mutually exclusive with *display_name*.
+ This is mutually exclusive with \ :emphasis:`display\_name`\ .
display_name (optional, str, None)
Display name of the boot source device.
- This is mutually exclusive with *boot_option_reference*.
+ This is mutually exclusive with \ :emphasis:`boot\_option\_reference`\ .
enabled (True, bool, None)
@@ -53,85 +53,85 @@ Parameters
boot_order (optional, list, None)
This option allows to set the boot devices in the required boot order sequences.
- This is mutually exclusive with *boot_options*.
+ This is mutually exclusive with \ :emphasis:`boot\_options`\ .
boot_source_override_mode (optional, str, None)
- The BIOS boot mode (either Legacy or UEFI) to be used when *boot_source_override_target* boot source is booted from.
+ The BIOS boot mode (either Legacy or UEFI) to be used when \ :emphasis:`boot\_source\_override\_target`\ boot source is booted from.
- ``legacy`` The system boot in non-UEF*Legacy* boot mode to the *boot_source_override_target*.
+ \ :literal:`legacy`\ The system boot in non-UEFI(Legacy) boot mode to the \ :emphasis:`boot\_source\_override\_target`\ .
- ``uefi`` The system boot in UEFI boot mode to the *boot_source_override_target*.
+ \ :literal:`uefi`\ The system boot in UEFI boot mode to the \ :emphasis:`boot\_source\_override\_target`\ .
- This is mutually exclusive with *boot_options*.
+ This is mutually exclusive with \ :emphasis:`boot\_options`\ .
boot_source_override_enabled (optional, str, None)
The state of the Boot Source Override feature.
- ``disabled`` The system boots normally.
+ \ :literal:`disabled`\ The system boots normally.
- ``once`` The system boots (one time) to the *boot_source_override_target*.
+ \ :literal:`once`\ The system boots (one time) to the \ :emphasis:`boot\_source\_override\_target`\ .
- ``continuous`` The system boots to the target specified in the *boot_source_override_target* until this property is set to Disabled.
+ \ :literal:`continuous`\ The system boots to the target specified in the \ :emphasis:`boot\_source\_override\_target`\ until this property is set to Disabled.
- The state is set to ``once`` for the one-time boot override and ``continuous`` for the remain-active-until—canceled override. If the state is set ``once`` or ``continuous``, the value is reset to ``disabled`` after the *boot_source_override_target* actions have completed successfully.
+ The state is set to \ :literal:`once`\ for the one-time boot override and \ :literal:`continuous`\ for the remain-active-until—canceled override. If the state is set \ :literal:`once`\ or \ :literal:`continuous`\ , the value is reset to \ :literal:`disabled`\ after the \ :emphasis:`boot\_source\_override\_target`\ actions have completed successfully.
Changes to this options do not alter the BIOS persistent boot order configuration.
- This is mutually exclusive with *boot_options*.
+ This is mutually exclusive with \ :emphasis:`boot\_options`\ .
boot_source_override_target (optional, str, None)
The boot source override target device to use during the next boot instead of the normal boot device.
- ``pxe`` performs PXE boot from the primary NIC.
+ \ :literal:`pxe`\ performs PXE boot from the primary NIC.
- ``floppy``, ``cd``, ``hdd``, ``sd_card`` performs boot from their devices respectively.
+ \ :literal:`floppy`\ , \ :literal:`cd`\ , \ :literal:`hdd`\ , \ :literal:`sd\_card`\ performs boot from their devices respectively.
- ``bios_setup`` performs boot into the native BIOS setup.
+ \ :literal:`bios\_setup`\ performs boot into the native BIOS setup.
- ``utilities`` performs boot from the local utilities.
+ \ :literal:`utilities`\ performs boot from the local utilities.
- ``uefi_target`` performs boot from the UEFI device path found in *uefi_target_boot_source_override*.
+ \ :literal:`uefi\_target`\ performs boot from the UEFI device path found in \ :emphasis:`uefi\_target\_boot\_source\_override`\ .
- If the *boot_source_override_target* is set to a value other than ``none`` then the *boot_source_override_enabled* is automatically set to ``once``.
+ If the \ :emphasis:`boot\_source\_override\_target`\ is set to a value other than \ :literal:`none`\ then the \ :emphasis:`boot\_source\_override\_enabled`\ is automatically set to \ :literal:`once`\ .
Changes to this options do not alter the BIOS persistent boot order configuration.
- This is mutually exclusive with *boot_options*.
+ This is mutually exclusive with \ :emphasis:`boot\_options`\ .
uefi_target_boot_source_override (optional, str, None)
- The UEFI device path of the device from which to boot when *boot_source_override_target* is ``uefi_target``.
+ The UEFI device path of the device from which to boot when \ :emphasis:`boot\_source\_override\_target`\ is \ :literal:`uefi\_target`\ .
- *boot_source_override_enabled* cannot be set to c(continuous) if *boot_source_override_target* set to ``uefi_target`` because this settings is defined in UEFI as a one-time-boot setting.
+ \ :emphasis:`boot\_source\_override\_enabled`\ cannot be set to c(continuous) if \ :emphasis:`boot\_source\_override\_target`\ set to \ :literal:`uefi\_target`\ because this settings is defined in UEFI as a one-time-boot setting.
Changes to this options do not alter the BIOS persistent boot order configuration.
- This is required if *boot_source_override_target* is ``uefi_target``.
+ This is required if \ :emphasis:`boot\_source\_override\_target`\ is \ :literal:`uefi\_target`\ .
- This is mutually exclusive with *boot_options*.
+ This is mutually exclusive with \ :emphasis:`boot\_options`\ .
reset_type (optional, str, graceful_restart)
- ``none`` Host system is not rebooted and *job_wait* is not applicable.
+ \ :literal:`none`\ Host system is not rebooted and \ :emphasis:`job\_wait`\ is not applicable.
- ``force_restart`` Forcefully reboot the Host system.
+ \ :literal:`force\_restart`\ Forcefully reboot the Host system.
- ``graceful_restart`` Gracefully reboot the Host system.
+ \ :literal:`graceful\_restart`\ Gracefully reboot the Host system.
job_wait (optional, bool, True)
Provides the option to wait for job completion.
- This is applicable when *reset_type* is ``force_reset`` or ``graceful_reset``.
+ This is applicable when \ :emphasis:`reset\_type`\ is \ :literal:`force\_reset`\ or \ :literal:`graceful\_reset`\ .
job_wait_timeout (optional, int, 900)
- The maximum wait time of *job_wait* in seconds. The job is tracked only for this duration.
+ The maximum wait time of \ :emphasis:`job\_wait`\ in seconds. The job is tracked only for this duration.
- This option is applicable when *job_wait* is ``true``.
+ This option is applicable when \ :emphasis:`job\_wait`\ is \ :literal:`true`\ .
resource_id (optional, str, None)
@@ -142,24 +142,40 @@ Parameters
iDRAC IP Address.
- idrac_user (True, str, None)
+ idrac_user (False, str, None)
iDRAC username.
+ If the username is not provided, then the environment variable \ :envvar:`IDRAC\_USERNAME`\ is used.
- idrac_password (True, str, None)
+ Example: export IDRAC\_USERNAME=username
+
+
+ idrac_password (False, str, None)
iDRAC user password.
+ If the password is not provided, then the environment variable \ :envvar:`IDRAC\_PASSWORD`\ is used.
+
+ Example: export IDRAC\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`IDRAC\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export IDRAC\_X\_AUTH\_TOKEN=x\_auth\_token
+
idrac_port (optional, int, 443)
iDRAC port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -178,7 +194,7 @@ Notes
.. note::
- Run this module from a system that has direct access to Dell iDRAC.
- - This module supports ``check_mode``.
+ - This module supports \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/idrac_certificates.rst b/ansible_collections/dellemc/openmanage/docs/modules/idrac_certificates.rst
index 747f15381..202b5a1a6 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/idrac_certificates.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/idrac_certificates.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python \>= 3.8.6
+- python \>= 3.9.6
@@ -126,13 +126,29 @@ Parameters
iDRAC IP Address.
- idrac_user (True, str, None)
+ idrac_user (False, str, None)
iDRAC username.
+ If the username is not provided, then the environment variable \ :envvar:`IDRAC\_USERNAME`\ is used.
- idrac_password (True, str, None)
+ Example: export IDRAC\_USERNAME=username
+
+
+ idrac_password (False, str, None)
iDRAC user password.
+ If the password is not provided, then the environment variable \ :envvar:`IDRAC\_PASSWORD`\ is used.
+
+ Example: export IDRAC\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`IDRAC\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export IDRAC\_X\_AUTH\_TOKEN=x\_auth\_token
+
idrac_port (optional, int, 443)
iDRAC port.
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/idrac_diagnostics.rst b/ansible_collections/dellemc/openmanage/docs/modules/idrac_diagnostics.rst
index 1a29769e0..ba5831ab7 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/idrac_diagnostics.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/idrac_diagnostics.rst
@@ -214,13 +214,29 @@ Parameters
iDRAC IP Address.
- idrac_user (True, str, None)
+ idrac_user (False, str, None)
iDRAC username.
+ If the username is not provided, then the environment variable \ :envvar:`IDRAC\_USERNAME`\ is used.
- idrac_password (True, str, None)
+ Example: export IDRAC\_USERNAME=username
+
+
+ idrac_password (False, str, None)
iDRAC user password.
+ If the password is not provided, then the environment variable \ :envvar:`IDRAC\_PASSWORD`\ is used.
+
+ Example: export IDRAC\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`IDRAC\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export IDRAC\_X\_AUTH\_TOKEN=x\_auth\_token
+
idrac_port (optional, int, 443)
iDRAC port.
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/idrac_firmware.rst b/ansible_collections/dellemc/openmanage/docs/modules/idrac_firmware.rst
index 1cbb31095..7475b06b1 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/idrac_firmware.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/idrac_firmware.rst
@@ -26,8 +26,8 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- omsdk >= 1.2.503
-- python >= 3.9.6
+- omsdk \>= 1.2.503
+- python \>= 3.9.6
@@ -39,7 +39,7 @@ Parameters
share_user (optional, str, None)
- Network share user in the format 'user@domain' or 'domain\\user' if user is part of a domain else 'user'. This option is mandatory for CIFS Network Share.
+ Network share user in the format 'user@domain' or 'domain\\\\user' if user is part of a domain else 'user'. This option is mandatory for CIFS Network Share.
share_password (optional, str, None)
@@ -57,25 +57,25 @@ Parameters
catalog_file_name (optional, str, Catalog.xml)
- Catalog file name relative to the *share_name*.
+ Catalog file name relative to the \ :emphasis:`share\_name`\ .
ignore_cert_warning (optional, bool, True)
- Specifies if certificate warnings are ignored when HTTPS share is used. If ``true`` option is set, then the certificate warnings are ignored.
+ Specifies if certificate warnings are ignored when HTTPS share is used. If \ :literal:`true`\ option is set, then the certificate warnings are ignored.
apply_update (optional, bool, True)
- If *apply_update* is set to ``true``, then the packages are applied.
+ If \ :emphasis:`apply\_update`\ is set to \ :literal:`true`\ , then the packages are applied.
- If *apply_update* is set to ``false``, no updates are applied, and a catalog report of packages is generated and returned.
+ If \ :emphasis:`apply\_update`\ is set to \ :literal:`false`\ , no updates are applied, and a catalog report of packages is generated and returned.
reboot (optional, bool, False)
Provides the option to apply the update packages immediately or in the next reboot.
- If *reboot* is set to ``true``, then the packages are applied immediately.
+ If \ :emphasis:`reboot`\ is set to \ :literal:`true`\ , then the packages are applied immediately.
- If *reboot* is set to ``false``, then the packages are staged and applied in the next reboot.
+ If \ :emphasis:`reboot`\ is set to \ :literal:`false`\ , then the packages are staged and applied in the next reboot.
Packages that do not require a reboot are applied immediately irrespective of I (reboot).
@@ -83,15 +83,15 @@ Parameters
proxy_support (optional, str, Off)
Specifies if a proxy should be used.
- Proxy parameters are applicable on ``HTTP``, ``HTTPS``, and ``FTP`` share type of repositories.
+ Proxy parameters are applicable on \ :literal:`HTTP`\ , \ :literal:`HTTPS`\ , and \ :literal:`FTP`\ share type of repositories.
- ``ParametersProxy``, sets the proxy parameters for the current firmware operation.
+ \ :literal:`ParametersProxy`\ , sets the proxy parameters for the current firmware operation.
- ``DefaultProxy``, iDRAC uses the proxy values set by default.
+ \ :literal:`DefaultProxy`\ , iDRAC uses the proxy values set by default.
- Default Proxy can be set in the Lifecycle Controller attributes using :ref:`dellemc.openmanage.idrac_attributes <dellemc.openmanage.idrac_attributes_module>`.
+ Default Proxy can be set in the Lifecycle Controller attributes using \ :ref:`dellemc.openmanage.idrac\_attributes <ansible_collections.dellemc.openmanage.idrac_attributes_module>`\ .
- ``Off``, will not use the proxy.
+ \ :literal:`Off`\ , will not use the proxy.
For iDRAC8 based servers, use proxy server with basic authentication.
@@ -101,21 +101,21 @@ Parameters
proxy_server (optional, str, None)
The IP address of the proxy server.
- This IP will not be validated. The download job will be created even for invalid *proxy_server*. Please check the results of the job for error details.
+ This IP will not be validated. The download job will be created even for invalid \ :emphasis:`proxy\_server`\ . Please check the results of the job for error details.
- This is required when *proxy_support* is ``ParametersProxy``.
+ This is required when \ :emphasis:`proxy\_support`\ is \ :literal:`ParametersProxy`\ .
proxy_port (optional, int, None)
The Port for the proxy server.
- This is required when *proxy_support* is ``ParametersProxy``.
+ This is required when \ :emphasis:`proxy\_support`\ is \ :literal:`ParametersProxy`\ .
proxy_type (optional, str, None)
The proxy type of the proxy server.
- This is required when *proxy_support* is ``ParametersProxy``.
+ This is required when \ :emphasis:`proxy\_support`\ is \ :literal:`ParametersProxy`\ .
Note: SOCKS4 proxy does not support IPv6 address.
@@ -135,21 +135,29 @@ Parameters
idrac_user (True, str, None)
iDRAC username.
+ If the username is not provided, then the environment variable \ :envvar:`IDRAC\_USERNAME`\ is used.
+
+ Example: export IDRAC\_USERNAME=username
+
idrac_password (True, str, None)
iDRAC user password.
+ If the password is not provided, then the environment variable \ :envvar:`IDRAC\_PASSWORD`\ is used.
+
+ Example: export IDRAC\_PASSWORD=password
+
idrac_port (optional, int, 443)
iDRAC port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -170,8 +178,8 @@ Notes
- Run this module from a system that has direct access to Dell iDRAC.
- Module will report success based on the iDRAC firmware update parent job status if there are no individual component jobs present.
- For server with iDRAC firmware 5.00.00.00 and later, if the repository contains unsupported packages, then the module will return success with a proper message.
- - This module supports both IPv4 and IPv6 address for *idrac_ip* and *share_name*.
- - This module supports ``check_mode``.
+ - This module supports both IPv4 and IPv6 address for \ :emphasis:`idrac\_ip`\ and \ :emphasis:`share\_name`\ .
+ - This module supports \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/idrac_firmware_info.rst b/ansible_collections/dellemc/openmanage/docs/modules/idrac_firmware_info.rst
index 04c46a32e..9b33926a7 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/idrac_firmware_info.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/idrac_firmware_info.rst
@@ -20,8 +20,8 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- omsdk >= 1.2.488
-- python >= 3.9.6
+- omsdk \>= 1.2.488
+- python \>= 3.9.6
@@ -35,21 +35,29 @@ Parameters
idrac_user (True, str, None)
iDRAC username.
+ If the username is not provided, then the environment variable \ :envvar:`IDRAC\_USERNAME`\ is used.
+
+ Example: export IDRAC\_USERNAME=username
+
idrac_password (True, str, None)
iDRAC user password.
+ If the password is not provided, then the environment variable \ :envvar:`IDRAC\_PASSWORD`\ is used.
+
+ Example: export IDRAC\_PASSWORD=password
+
idrac_port (optional, int, 443)
iDRAC port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -68,8 +76,8 @@ Notes
.. note::
- Run this module from a system that has direct access to Dell iDRAC.
- - This module supports both IPv4 and IPv6 address for *idrac_ip*.
- - This module supports ``check_mode``.
+ - This module supports both IPv4 and IPv6 address for \ :emphasis:`idrac\_ip`\ .
+ - This module supports \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/idrac_license.rst b/ansible_collections/dellemc/openmanage/docs/modules/idrac_license.rst
index d4441a3ab..6dd16768c 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/idrac_license.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/idrac_license.rst
@@ -180,13 +180,29 @@ Parameters
iDRAC IP Address.
- idrac_user (True, str, None)
+ idrac_user (False, str, None)
iDRAC username.
+ If the username is not provided, then the environment variable \ :envvar:`IDRAC\_USERNAME`\ is used.
- idrac_password (True, str, None)
+ Example: export IDRAC\_USERNAME=username
+
+
+ idrac_password (False, str, None)
iDRAC user password.
+ If the password is not provided, then the environment variable \ :envvar:`IDRAC\_PASSWORD`\ is used.
+
+ Example: export IDRAC\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`IDRAC\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export IDRAC\_X\_AUTH\_TOKEN=x\_auth\_token
+
idrac_port (optional, int, 443)
iDRAC port.
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/idrac_lifecycle_controller_job_status_info.rst b/ansible_collections/dellemc/openmanage/docs/modules/idrac_lifecycle_controller_job_status_info.rst
index 8cf0e133b..e2e437822 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/idrac_lifecycle_controller_job_status_info.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/idrac_lifecycle_controller_job_status_info.rst
@@ -20,8 +20,8 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- omsdk >= 1.2.488
-- python >= 3.9.6
+- omsdk \>= 1.2.488
+- python \>= 3.9.6
@@ -29,7 +29,7 @@ Parameters
----------
job_id (True, str, None)
- JOB ID in the format "JID_123456789012".
+ JOB ID in the format "JID\_123456789012".
idrac_ip (True, str, None)
@@ -39,21 +39,29 @@ Parameters
idrac_user (True, str, None)
iDRAC username.
+ If the username is not provided, then the environment variable \ :envvar:`IDRAC\_USERNAME`\ is used.
+
+ Example: export IDRAC\_USERNAME=username
+
idrac_password (True, str, None)
iDRAC user password.
+ If the password is not provided, then the environment variable \ :envvar:`IDRAC\_PASSWORD`\ is used.
+
+ Example: export IDRAC\_PASSWORD=password
+
idrac_port (optional, int, 443)
iDRAC port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -72,8 +80,8 @@ Notes
.. note::
- Run this module from a system that has direct access to Dell iDRAC.
- - This module supports both IPv4 and IPv6 address for *idrac_ip*.
- - This module supports ``check_mode``.
+ - This module supports both IPv4 and IPv6 address for \ :emphasis:`idrac\_ip`\ .
+ - This module supports \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/idrac_lifecycle_controller_jobs.rst b/ansible_collections/dellemc/openmanage/docs/modules/idrac_lifecycle_controller_jobs.rst
index 385894eb6..e89af22ae 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/idrac_lifecycle_controller_jobs.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/idrac_lifecycle_controller_jobs.rst
@@ -20,8 +20,8 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- omsdk >= 1.2.488
-- python >= 3.9.6
+- omsdk \>= 1.2.488
+- python \>= 3.9.6
@@ -41,21 +41,29 @@ Parameters
idrac_user (True, str, None)
iDRAC username.
+ If the username is not provided, then the environment variable \ :envvar:`IDRAC\_USERNAME`\ is used.
+
+ Example: export IDRAC\_USERNAME=username
+
idrac_password (True, str, None)
iDRAC user password.
+ If the password is not provided, then the environment variable \ :envvar:`IDRAC\_PASSWORD`\ is used.
+
+ Example: export IDRAC\_PASSWORD=password
+
idrac_port (optional, int, 443)
iDRAC port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -74,8 +82,8 @@ Notes
.. note::
- Run this module from a system that has direct access to Dell iDRAC.
- - This module supports both IPv4 and IPv6 address for *idrac_ip*.
- - This module does not support ``check_mode``.
+ - This module supports both IPv4 and IPv6 address for \ :emphasis:`idrac\_ip`\ .
+ - This module does not support \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/idrac_lifecycle_controller_logs.rst b/ansible_collections/dellemc/openmanage/docs/modules/idrac_lifecycle_controller_logs.rst
index 1b414e76e..e369d95ef 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/idrac_lifecycle_controller_logs.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/idrac_lifecycle_controller_logs.rst
@@ -20,8 +20,8 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- omsdk >= 1.2.488
-- python >= 3.9.6
+- omsdk \>= 1.2.488
+- python \>= 3.9.6
@@ -35,7 +35,7 @@ Parameters
share_user (optional, str, None)
- Network share user in the format 'user@domain' or 'domain\user' if user is part of a domain else 'user'. This option is mandatory for CIFS Network Share.
+ Network share user in the format 'user@domain' or 'domain\\user' if user is part of a domain else 'user'. This option is mandatory for CIFS Network Share.
share_password (optional, str, None)
@@ -53,21 +53,29 @@ Parameters
idrac_user (True, str, None)
iDRAC username.
+ If the username is not provided, then the environment variable \ :envvar:`IDRAC\_USERNAME`\ is used.
+
+ Example: export IDRAC\_USERNAME=username
+
idrac_password (True, str, None)
iDRAC user password.
+ If the password is not provided, then the environment variable \ :envvar:`IDRAC\_PASSWORD`\ is used.
+
+ Example: export IDRAC\_PASSWORD=password
+
idrac_port (optional, int, 443)
iDRAC port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -85,11 +93,11 @@ Notes
-----
.. note::
- - This module requires 'Administrator' privilege for *idrac_user*.
+ - This module requires 'Administrator' privilege for \ :emphasis:`idrac\_user`\ .
- Exporting data to a local share is supported only on iDRAC9-based PowerEdge Servers and later.
- Run this module from a system that has direct access to Dell iDRAC.
- - This module supports both IPv4 and IPv6 address for *idrac_ip*.
- - This module does not support ``check_mode``.
+ - This module supports both IPv4 and IPv6 address for \ :emphasis:`idrac\_ip`\ .
+ - This module does not support \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/idrac_lifecycle_controller_status_info.rst b/ansible_collections/dellemc/openmanage/docs/modules/idrac_lifecycle_controller_status_info.rst
index 07ffe9446..63bc664ff 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/idrac_lifecycle_controller_status_info.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/idrac_lifecycle_controller_status_info.rst
@@ -20,8 +20,8 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- omsdk >= 1.2.488
-- python >= 3.9.6
+- omsdk \>= 1.2.488
+- python \>= 3.9.6
@@ -35,21 +35,29 @@ Parameters
idrac_user (True, str, None)
iDRAC username.
+ If the username is not provided, then the environment variable \ :envvar:`IDRAC\_USERNAME`\ is used.
+
+ Example: export IDRAC\_USERNAME=username
+
idrac_password (True, str, None)
iDRAC user password.
+ If the password is not provided, then the environment variable \ :envvar:`IDRAC\_PASSWORD`\ is used.
+
+ Example: export IDRAC\_PASSWORD=password
+
idrac_port (optional, int, 443)
iDRAC port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -68,8 +76,8 @@ Notes
.. note::
- Run this module from a system that has direct access to Dell iDRAC.
- - This module supports both IPv4 and IPv6 address for *idrac_ip*.
- - This module supports ``check_mode``.
+ - This module supports both IPv4 and IPv6 address for \ :emphasis:`idrac\_ip`\ .
+ - This module supports \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/idrac_network.rst b/ansible_collections/dellemc/openmanage/docs/modules/idrac_network.rst
index c7bb593ae..3c631862b 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/idrac_network.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/idrac_network.rst
@@ -20,8 +20,8 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- omsdk >= 1.2.488
-- python >= 3.9.6
+- omsdk \>= 1.2.488
+- python \>= 3.9.6
@@ -35,7 +35,7 @@ Parameters
share_user (optional, str, None)
- (deprecated)Network share user name. Use the format 'user@domain' or 'domain\user' if user is part of a domain. This option is mandatory for CIFS share.
+ (deprecated)Network share user name. Use the format 'user@domain' or 'domain\\user' if user is part of a domain. This option is mandatory for CIFS share.
This option is deprecated and will be removed in the later version.
@@ -151,21 +151,29 @@ Parameters
idrac_user (True, str, None)
iDRAC username.
+ If the username is not provided, then the environment variable \ :envvar:`IDRAC\_USERNAME`\ is used.
+
+ Example: export IDRAC\_USERNAME=username
+
idrac_password (True, str, None)
iDRAC user password.
+ If the password is not provided, then the environment variable \ :envvar:`IDRAC\_PASSWORD`\ is used.
+
+ Example: export IDRAC\_PASSWORD=password
+
idrac_port (optional, int, 443)
iDRAC port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -183,10 +191,10 @@ Notes
-----
.. note::
- - This module requires 'Administrator' privilege for *idrac_user*.
+ - This module requires 'Administrator' privilege for \ :emphasis:`idrac\_user`\ .
- Run this module from a system that has direct access to Dell iDRAC.
- - This module supports both IPv4 and IPv6 address for *idrac_ip*.
- - This module supports ``check_mode``.
+ - This module supports both IPv4 and IPv6 address for \ :emphasis:`idrac\_ip`\ .
+ - This module supports \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/idrac_network_attributes.rst b/ansible_collections/dellemc/openmanage/docs/modules/idrac_network_attributes.rst
index fa0e89cae..df9fb1be0 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/idrac_network_attributes.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/idrac_network_attributes.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.9.6
+- python \>= 3.9.6
@@ -40,17 +40,17 @@ Parameters
network_attributes (optional, dict, None)
- Dictionary of network attributes and value. To view the list of attributes and its structure, see the below API https://*idrac_ip*/redfish/v1/Systems/System.Embedded.1/NetworkAdapters/<network_id>/NetworkDeviceFunctions/ <network_port_id>/Settings and https://<idrac_ip>/redfish/v1/Schemas/NetworkDeviceFunction.v1_8_0.json.
+ Dictionary of network attributes and value. To view the list of attributes and its structure, see the below API \ https://I(idrac_ip\ /redfish/v1/Systems/System.Embedded.1/NetworkAdapters/\<network\_adapter\_id\>/NetworkDeviceFunctions/ \<network\_device\_function\_id\>/Settings) and \ https://%3Cidrac_ip%3E/redfish/v1/Schemas/NetworkDeviceFunction.v1_8_0.json\ .
- *network_attributes* is mutually exclusive with *oem_network_attributes*.
+ \ :emphasis:`network\_attributes`\ is mutually exclusive with \ :emphasis:`oem\_network\_attributes`\ .
oem_network_attributes (optional, dict, None)
- The attributes must be part of the Integrated Dell Remote Access Controller Attribute Registry. To view the list of attributes in Attribute Registry for iDRAC9 and newer versions. For more information, see, https://*idrac_ip*/redfish/v1/Chassis/System.Embedded.1/NetworkAdapters/<network_id>/NetworkDeviceFunctions/ <network_port_id>/Oem/Dell/DellNetworkAttributes/<network_port_id> and https://*idrac_ip*/redfish/v1/Registries/NetworkAttributesRegistry_<network_port_id>/ NetworkAttributesRegistry_network_port_id.json.
+ The attributes must be part of the Integrated Dell Remote Access Controller Attribute Registry. To view the list of attributes in Attribute Registry for iDRAC9 and newer versions. For more information, see, \ https://I(idrac_ip\ /redfish/v1/Chassis/System.Embedded.1/NetworkAdapters/\<network\_adapter\_id\>/NetworkDeviceFunctions/ \<network\_device\_function\_id\>/Oem/Dell/DellNetworkAttributes/\<network\_device\_function\_id\>) and \ https://I(idrac_ip\ /redfish/v1/Registries/NetworkAttributesRegistry\_\<network\_device\_function\_id\>/ NetworkAttributesRegistry\_network\_port\_id.json).
For iDRAC8 based servers, derive the network attribute name from Server Configuration Profile.
- *oem_network_attributes* is mutually exclusive with *network_attributes*.
+ \ :emphasis:`oem\_network\_attributes`\ is mutually exclusive with \ :emphasis:`network\_attributes`\ .
resource_id (optional, str, None)
@@ -62,43 +62,43 @@ Parameters
clear_pending (optional, bool, False)
This parameter allows you to clear all the pending OEM network attributes changes.
- ``false`` does not perform any operation.
+ \ :literal:`false`\ does not perform any operation.
- ``true`` discards any pending changes to network attributes, or if a job is in scheduled state, removes the job.
+ \ :literal:`true`\ discards any pending changes to network attributes, or if a job is in scheduled state, removes the job.
- *apply_time* value will be ignored and will not have any impact for *clear_pending* operation.
+ \ :emphasis:`apply\_time`\ value will be ignored and will not have any impact for \ :emphasis:`clear\_pending`\ operation.
This operation is not supported for iDRAC8.
apply_time (True, str, None)
- Apply time of the *network_attributes* and *oem_network_attributes*.
+ Apply time of the \ :emphasis:`network\_attributes`\ and \ :emphasis:`oem\_network\_attributes`\ .
- This is applicable only to *network_attributes* and *oem_network_attributes*.
+ This is applicable only to \ :emphasis:`network\_attributes`\ and \ :emphasis:`oem\_network\_attributes`\ .
- ``Immediate`` allows the user to immediately reboot the host and apply the changes. *job_wait* is applicable. This is applicable for *oem_network_attributes* and *job_wait*.
+ \ :literal:`Immediate`\ allows the user to immediately reboot the host and apply the changes. \ :emphasis:`job\_wait`\ is applicable. This is applicable for \ :emphasis:`oem\_network\_attributes`\ and \ :emphasis:`job\_wait`\ .
- ``OnReset`` allows the user to apply the changes on the next reboot of the host server.
+ \ :literal:`OnReset`\ allows the user to apply the changes on the next reboot of the host server.
- ``AtMaintenanceWindowStart`` allows the user to apply at the start of a maintenance window as specified in *maintenance_window*. A reboot job is scheduled.
+ \ :literal:`AtMaintenanceWindowStart`\ allows the user to apply at the start of a maintenance window as specified in \ :emphasis:`maintenance\_window`\ . A reboot job is scheduled.
- ``InMaintenanceWindowOnReset`` allows to apply after a manual reset but within the maintenance window as specified in *maintenance_window*.
+ \ :literal:`InMaintenanceWindowOnReset`\ allows to apply after a manual reset but within the maintenance window as specified in \ :emphasis:`maintenance\_window`\ .
- This is not applicable for iDRAC8 and value will be ignored and will not have any impact for configuring *oem_network_attributes*.
+ This is not applicable for iDRAC8 and value will be ignored and will not have any impact for configuring \ :emphasis:`oem\_network\_attributes`\ .
maintenance_window (optional, dict, None)
This option allows you to schedule the maintenance window.
- This is required when *apply_time* is ``AtMaintenanceWindowStart`` or ``InMaintenanceWindowOnReset``.
+ This is required when \ :emphasis:`apply\_time`\ is \ :literal:`AtMaintenanceWindowStart`\ or \ :literal:`InMaintenanceWindowOnReset`\ .
start_time (True, str, None)
The start time for the maintenance window to be scheduled.
- The format is YYYY-MM-DDThh:mm:ss<offset>
+ The format is YYYY-MM-DDThh:mm:ss\<offset\>
- <offset> is the time offset from UTC that the current timezone set in iDRAC in the format: +05:30 for IST.
+ \<offset\> is the time offset from UTC that the current timezone set in iDRAC in the format: +05:30 for IST.
duration (True, int, None)
@@ -109,37 +109,53 @@ Parameters
job_wait (optional, bool, True)
Provides the option to wait for job completion.
- This is applicable when *apply_time* is ``Immediate`` for *oem_network_attributes*.
+ This is applicable when \ :emphasis:`apply\_time`\ is \ :literal:`Immediate`\ for \ :emphasis:`oem\_network\_attributes`\ .
job_wait_timeout (optional, int, 1200)
- The maximum wait time of *job_wait* in seconds. The job is tracked only for this duration.
+ The maximum wait time of \ :emphasis:`job\_wait`\ in seconds. The job is tracked only for this duration.
- This option is applicable when *job_wait* is ``true``.
+ This option is applicable when \ :emphasis:`job\_wait`\ is \ :literal:`true`\ .
idrac_ip (True, str, None)
iDRAC IP Address.
- idrac_user (True, str, None)
+ idrac_user (False, str, None)
iDRAC username.
+ If the username is not provided, then the environment variable \ :envvar:`IDRAC\_USERNAME`\ is used.
- idrac_password (True, str, None)
+ Example: export IDRAC\_USERNAME=username
+
+
+ idrac_password (False, str, None)
iDRAC user password.
+ If the password is not provided, then the environment variable \ :envvar:`IDRAC\_PASSWORD`\ is used.
+
+ Example: export IDRAC\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`IDRAC\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export IDRAC\_X\_AUTH\_TOKEN=x\_auth\_token
+
idrac_port (optional, int, 443)
iDRAC port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -159,7 +175,7 @@ Notes
.. note::
- Run this module from a system that has direct access to Dell iDRAC.
- This module supports both IPv4 and IPv6 address.
- - This module supports ``check_mode``.
+ - This module supports \ :literal:`check\_mode`\ .
@@ -176,8 +192,8 @@ Examples
idrac_ip: "192.168.0.1"
idrac_user: "user_name"
idrac_password: "user_password"
- network_id: "NIC.Integrated.1"
- network_port_id: "NIC.Integrated.1-1-1"
+ network_adapter_id: "NIC.Integrated.1"
+ network_device_function_id: "NIC.Integrated.1-1-1"
apply_time: "Immediate"
oem_network_attributes:
BannerMessageTimeout: "4"
@@ -187,8 +203,8 @@ Examples
idrac_ip: "192.168.0.1"
idrac_user: "user_name"
idrac_password: "user_password"
- network_id: NIC.Integrated.1
- network_port_id: "NIC.Integrated.1-1-1"
+ network_adapter_id: NIC.Integrated.1
+ network_device_function_id: "NIC.Integrated.1-1-1"
oem_network_attributes:
BannerMessageTimeout: "4"
apply_time: OnReset
@@ -198,8 +214,8 @@ Examples
idrac_ip: "192.168.0.1"
idrac_user: "user_name"
idrac_password: "user_password"
- network_id: NIC.Integrated.1
- network_port_id: "NIC.Integrated.1-1-1"
+ network_adapter_id: NIC.Integrated.1
+ network_device_function_id: "NIC.Integrated.1-1-1"
oem_network_attributes:
BannerMessageTimeout: "4"
apply_time: AtMaintenanceWindowStart
@@ -212,8 +228,8 @@ Examples
idrac_ip: "192.168.0.1"
idrac_user: "user_name"
idrac_password: "user_password"
- network_id: NIC.Integrated.1
- network_port_id: "NIC.Integrated.1-1-1"
+ network_adapter_id: NIC.Integrated.1
+ network_device_function_id: "NIC.Integrated.1-1-1"
apply_time: "Immediate"
clear_pending: true
@@ -222,8 +238,8 @@ Examples
idrac_ip: "192.168.0.1"
idrac_user: "user_name"
idrac_password: "user_password"
- network_id: NIC.Integrated.1
- network_port_id: "NIC.Integrated.1-1-1"
+ network_adapter_id: NIC.Integrated.1
+ network_device_function_id: "NIC.Integrated.1-1-1"
apply_time: "Immediate"
clear_pending: true
oem_network_attributes:
@@ -234,8 +250,8 @@ Examples
idrac_ip: "192.168.0.1"
idrac_user: "user_name"
idrac_password: "user_password"
- network_id: NIC.Integrated.1
- network_port_id: "NIC.Integrated.1-1-1"
+ network_adapter_id: NIC.Integrated.1
+ network_device_function_id: "NIC.Integrated.1-1-1"
apply_time: "Immediate"
oem_network_attributes:
LnkSpeed: "10MbpsHalf"
@@ -249,8 +265,8 @@ Examples
idrac_ip: "192.168.0.1"
idrac_user: "user_name"
idrac_password: "user_password"
- network_id: NIC.Integrated.1
- network_port_id: "NIC.Integrated.1-1-1"
+ network_adapter_id: NIC.Integrated.1
+ network_device_function_id: "NIC.Integrated.1-1-1"
apply_time: OnReset
network_attributes:
Ethernet:
@@ -262,8 +278,8 @@ Examples
idrac_ip: "192.168.0.1"
idrac_user: "user_name"
idrac_password: "user_password"
- network_id: NIC.Integrated.1
- network_port_id: "NIC.Integrated.1-1-1"
+ network_adapter_id: NIC.Integrated.1
+ network_device_function_id: "NIC.Integrated.1-1-1"
network_attributes:
Ethernet:
VLAN:
@@ -275,8 +291,8 @@ Examples
idrac_ip: "192.168.0.1"
idrac_user: "user_name"
idrac_password: "user_password"
- network_id: NIC.Integrated.1
- network_port_id: "NIC.Integrated.1-1-1"
+ network_adapter_id: NIC.Integrated.1
+ network_device_function_id: "NIC.Integrated.1-1-1"
network_attributes:
iSCSIBoot:
InitiatorIPAddress: 1.0.0.1
@@ -290,8 +306,8 @@ Examples
idrac_ip: "192.168.0.1"
idrac_user: "user_name"
idrac_password: "user_password"
- network_id: NIC.Integrated.1
- network_port_id: "NIC.Integrated.1-1-1"
+ network_adapter_id: NIC.Integrated.1
+ network_device_function_id: "NIC.Integrated.1-1-1"
network_attributes:
Ethernet:
VLAN:
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/idrac_os_deployment.rst b/ansible_collections/dellemc/openmanage/docs/modules/idrac_os_deployment.rst
index c5b526a87..5a7f2ced8 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/idrac_os_deployment.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/idrac_os_deployment.rst
@@ -20,8 +20,8 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- omsdk >= 1.2.488
-- python >= 3.9.6
+- omsdk \>= 1.2.488
+- python \>= 3.9.6
@@ -33,7 +33,7 @@ Parameters
share_user (optional, str, None)
- Network share user in the format 'user@domain' or 'domain\\user' if user is part of a domain else 'user'. This option is mandatory for CIFS Network Share.
+ Network share user in the format 'user@domain' or 'domain\\\\user' if user is part of a domain else 'user'. This option is mandatory for CIFS Network Share.
share_password (optional, str, None)
@@ -55,21 +55,29 @@ Parameters
idrac_user (True, str, None)
iDRAC username.
+ If the username is not provided, then the environment variable \ :envvar:`IDRAC\_USERNAME`\ is used.
+
+ Example: export IDRAC\_USERNAME=username
+
idrac_password (True, str, None)
iDRAC user password.
+ If the password is not provided, then the environment variable \ :envvar:`IDRAC\_PASSWORD`\ is used.
+
+ Example: export IDRAC\_PASSWORD=password
+
idrac_port (optional, int, 443)
iDRAC port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -88,8 +96,8 @@ Notes
.. note::
- Run this module from a system that has direct access to Dell iDRAC.
- - This module supports both IPv4 and IPv6 address for *idrac_ip*.
- - This module does not support ``check_mode``.
+ - This module supports both IPv4 and IPv6 address for \ :emphasis:`idrac\_ip`\ .
+ - This module does not support \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/idrac_redfish_storage_controller.rst b/ansible_collections/dellemc/openmanage/docs/modules/idrac_redfish_storage_controller.rst
index c3592acc6..e7d372417 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/idrac_redfish_storage_controller.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/idrac_redfish_storage_controller.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.9.6
+- python \>= 3.9.6
@@ -30,55 +30,55 @@ Parameters
command (optional, str, None)
These actions may require a system reset, depending on the capabilities of the controller.
- ``ResetConfig`` - Deletes all the virtual disks and unassigns all hot spares on physical disks. *controller_id* is required for this operation.
+ \ :literal:`ResetConfig`\ - Deletes all the virtual disks and unassigns all hot spares on physical disks. \ :emphasis:`controller\_id`\ is required for this operation.
- ``AssignSpare`` - Assigns a physical disk as a dedicated or global hot spare for a virtual disk. *target* is required for this operation.
+ \ :literal:`AssignSpare`\ - Assigns a physical disk as a dedicated or global hot spare for a virtual disk. \ :emphasis:`target`\ is required for this operation.
- ``SetControllerKey`` - Sets the key on controllers, which is used to encrypt the drives in Local Key Management(LKM). *controller_id*, *key*, and *key_id* are required for this operation.
+ \ :literal:`SetControllerKey`\ - Sets the key on controllers, which is used to encrypt the drives in Local Key Management(LKM). \ :emphasis:`controller\_id`\ , \ :emphasis:`key`\ , and \ :emphasis:`key\_id`\ are required for this operation.
- ``RemoveControllerKey`` - Deletes the encryption key on the controller. *controller_id* is required for this operation.
+ \ :literal:`RemoveControllerKey`\ - Deletes the encryption key on the controller. \ :emphasis:`controller\_id`\ is required for this operation.
- ``ReKey`` - Resets the key on the controller and it always reports as changes found when check mode is enabled. *controller_id*, *old_key*, *key_id*, and *key* is required for this operation.
+ \ :literal:`ReKey`\ - Resets the key on the controller and it always reports as changes found when check mode is enabled. \ :emphasis:`controller\_id`\ , \ :emphasis:`old\_key`\ , \ :emphasis:`key\_id`\ , and \ :emphasis:`key`\ is required for this operation.
- ``UnassignSpare`` - To unassign the Global or Dedicated hot spare. *target* is required for this operation.
+ \ :literal:`UnassignSpare`\ - To unassign the Global or Dedicated hot spare. \ :emphasis:`target`\ is required for this operation.
- ``EnableControllerEncryption`` - To enable Local Key Management (LKM) or Secure Enterprise Key Manager (SEKM) on controllers that support encryption of the drives. *controller_id*, *key*, and *key_id* are required for this operation.
+ \ :literal:`EnableControllerEncryption`\ - To enable Local Key Management (LKM) or Secure Enterprise Key Manager (SEKM) on controllers that support encryption of the drives. \ :emphasis:`controller\_id`\ , \ :emphasis:`key`\ , and \ :emphasis:`key\_id`\ are required for this operation.
- ``BlinkTarget`` - Blinks the target virtual drive or physical disk and it always reports as changes found when check mode is enabled. *target* or *volume_id* is required for this operation.
+ \ :literal:`BlinkTarget`\ - Blinks the target virtual drive or physical disk and it always reports as changes found when check mode is enabled. \ :emphasis:`target`\ or \ :emphasis:`volume\_id`\ is required for this operation.
- ``UnBlinkTarget`` - Unblink the target virtual drive or physical disk and and it always reports as changes found when check mode is enabled. *target* or *volume_id* is required for this operation.
+ \ :literal:`UnBlinkTarget`\ - Unblink the target virtual drive or physical disk and and it always reports as changes found when check mode is enabled. \ :emphasis:`target`\ or \ :emphasis:`volume\_id`\ is required for this operation.
- ``ConvertToRAID`` - Converts the disk form non-Raid to Raid. *target* is required for this operation.
+ \ :literal:`ConvertToRAID`\ - Converts the disk form non-Raid to Raid. \ :emphasis:`target`\ is required for this operation.
- ``ConvertToNonRAID`` - Converts the disk form Raid to non-Raid. *target* is required for this operation.
+ \ :literal:`ConvertToNonRAID`\ - Converts the disk form Raid to non-Raid. \ :emphasis:`target`\ is required for this operation.
- ``ChangePDStateToOnline`` - To set the disk status to online. *target* is required for this operation.
+ \ :literal:`ChangePDStateToOnline`\ - To set the disk status to online. \ :emphasis:`target`\ is required for this operation.
- ``ChangePDStateToOffline`` - To set the disk status to offline. *target* is required for this operation.
+ \ :literal:`ChangePDStateToOffline`\ - To set the disk status to offline. \ :emphasis:`target`\ is required for this operation.
- ``LockVirtualDisk`` - To encrypt the virtual disk. *volume_id* is required for this operation.
+ \ :literal:`LockVirtualDisk`\ - To encrypt the virtual disk. \ :emphasis:`volume\_id`\ is required for this operation.
- ``OnlineCapacityExpansion`` - To expand the size of virtual disk. *volume_id*, and *target* or *size* is required for this operation.
+ \ :literal:`OnlineCapacityExpansion`\ - To expand the size of virtual disk. \ :emphasis:`volume\_id`\ , and \ :emphasis:`target`\ or \ :emphasis:`size`\ is required for this operation.
target (optional, list, None)
Fully Qualified Device Descriptor (FQDD) of the target physical drive.
- This is mandatory when *command* is ``AssignSpare``, ``UnassisgnSpare``, ``ChangePDStateToOnline``, ``ChangePDStateToOffline``, ``ConvertToRAID``, or ``ConvertToNonRAID``.
+ This is mandatory when \ :emphasis:`command`\ is \ :literal:`AssignSpare`\ , \ :literal:`UnassisgnSpare`\ , \ :literal:`ChangePDStateToOnline`\ , \ :literal:`ChangePDStateToOffline`\ , \ :literal:`ConvertToRAID`\ , or \ :literal:`ConvertToNonRAID`\ .
- If *volume_id* is not specified or empty, this physical drive will be assigned as a global hot spare when *command* is ``AssignSpare``.
+ If \ :emphasis:`volume\_id`\ is not specified or empty, this physical drive will be assigned as a global hot spare when \ :emphasis:`command`\ is \ :literal:`AssignSpare`\ .
- When *command* is ``OnlineCapacityExpansion``, then *target* is mutually exclusive with *size*.
+ When \ :emphasis:`command`\ is \ :literal:`OnlineCapacityExpansion`\ , then \ :emphasis:`target`\ is mutually exclusive with \ :emphasis:`size`\ .
- Notes: Global or Dedicated hot spare can be assigned only once for a physical disk, Re-assign cannot be done when *command* is ``AssignSpare``.
+ Notes: Global or Dedicated hot spare can be assigned only once for a physical disk, Re-assign cannot be done when \ :emphasis:`command`\ is \ :literal:`AssignSpare`\ .
volume_id (optional, list, None)
Fully Qualified Device Descriptor (FQDD) of the volume.
- Applicable if *command* is ``AssignSpare``, ``BlinkTarget``, ``UnBlinkTarget`` or ``LockVirtualDisk``.
+ Applicable if \ :emphasis:`command`\ is \ :literal:`AssignSpare`\ , \ :literal:`BlinkTarget`\ , \ :literal:`UnBlinkTarget`\ or \ :literal:`LockVirtualDisk`\ .
- *volume_id* or *target* is required when the *command* is ``BlinkTarget`` or ``UnBlinkTarget``, if both are specified *target* is considered.
+ \ :emphasis:`volume\_id`\ or \ :emphasis:`target`\ is required when the \ :emphasis:`command`\ is \ :literal:`BlinkTarget`\ or \ :literal:`UnBlinkTarget`\ , if both are specified \ :emphasis:`target`\ is considered.
To know the number of volumes to which a hot spare can be assigned, refer iDRAC Redfish API documentation.
@@ -86,15 +86,15 @@ Parameters
controller_id (optional, str, None)
Fully Qualified Device Descriptor (FQDD) of the storage controller. For example-'RAID.Slot.1-1'.
- This option is mandatory when *command* is ``ResetConfig``, ``SetControllerKey``, ``RemoveControllerKey``, ``ReKey``, or ``EnableControllerEncryption``.
+ This option is mandatory when \ :emphasis:`command`\ is \ :literal:`ResetConfig`\ , \ :literal:`SetControllerKey`\ , \ :literal:`RemoveControllerKey`\ , \ :literal:`ReKey`\ , or \ :literal:`EnableControllerEncryption`\ .
- This option is mandatory for *attributes*.
+ This option is mandatory for \ :emphasis:`attributes`\ .
key (optional, str, None)
A new security key passphrase that the encryption-capable controller uses to create the encryption key. The controller uses the encryption key to lock or unlock access to the Self-Encrypting Drive (SED). Only one encryption key can be created for each controller.
- This is mandatory when *command* is ``SetControllerKey``, ``ReKey``, or ``EnableControllerEncryption`` and when *mode* is ``LKM``.
+ This is mandatory when \ :emphasis:`command`\ is \ :literal:`SetControllerKey`\ , \ :literal:`ReKey`\ , or \ :literal:`EnableControllerEncryption`\ and when \ :emphasis:`mode`\ is \ :literal:`LKM`\ .
The length of the key can be a maximum of 32 characters in length, where the expanded form of the special character is counted as a single character.
@@ -104,35 +104,35 @@ Parameters
key_id (optional, str, None)
This is a user supplied text label associated with the passphrase.
- This is mandatory when *command* is ``SetControllerKey``, ``ReKey``, or ``EnableControllerEncryption`` and when *mode* is ``LKM``.
+ This is mandatory when \ :emphasis:`command`\ is \ :literal:`SetControllerKey`\ , \ :literal:`ReKey`\ , or \ :literal:`EnableControllerEncryption`\ and when \ :emphasis:`mode`\ is \ :literal:`LKM`\ .
- The length of *key_id* can be a maximum of 32 characters in length and should not have any spaces.
+ The length of \ :emphasis:`key\_id`\ can be a maximum of 32 characters in length and should not have any spaces.
old_key (optional, str, None)
Security key passphrase used by the encryption-capable controller.
- This option is mandatory when *command* is ``ReKey`` and *mode* is ``LKM``.
+ This option is mandatory when \ :emphasis:`command`\ is \ :literal:`ReKey`\ and \ :emphasis:`mode`\ is \ :literal:`LKM`\ .
mode (optional, str, LKM)
Encryption mode of the encryption capable controller.
- This option is applicable only when *command* is ``ReKey`` or ``EnableControllerEncryption``.
+ This option is applicable only when \ :emphasis:`command`\ is \ :literal:`ReKey`\ or \ :literal:`EnableControllerEncryption`\ .
- ``SEKM`` requires secure enterprise key manager license on the iDRAC.
+ \ :literal:`SEKM`\ requires secure enterprise key manager license on the iDRAC.
- ``LKM`` to choose mode as local key mode.
+ \ :literal:`LKM`\ to choose mode as local key mode.
size (optional, int, None)
Capacity of the virtual disk to be expanded in MB.
- Check mode and Idempotency is not supported for *size*.
+ Check mode and Idempotency is not supported for \ :emphasis:`size`\ .
Minimum Online Capacity Expansion size must be greater than 100 MB of the current size.
- When *command* is ``OnlineCapacityExpansion``, then *size* is mutually exclusive with *target*.
+ When \ :emphasis:`command`\ is \ :literal:`OnlineCapacityExpansion`\ , then \ :emphasis:`size`\ is mutually exclusive with \ :emphasis:`target`\ .
attributes (optional, dict, None)
@@ -140,41 +140,41 @@ Parameters
This feature is only supported for iDRAC9 with firmware version 6.00.00.00 and above
- *controller_id* is required for this operation.
+ \ :emphasis:`controller\_id`\ is required for this operation.
- *apply_time* and *maintenance_window* is applicable for *attributes*.
+ \ :emphasis:`apply\_time`\ and \ :emphasis:`maintenance\_window`\ is applicable for \ :emphasis:`attributes`\ .
- *attributes* is mutually exclusive with *command*.
+ \ :emphasis:`attributes`\ is mutually exclusive with \ :emphasis:`command`\ .
- Use https://*idrac_ip*/redfish/v1/Schemas/DellOemStorageController.json to view the attributes.
+ Use \ https://I(idrac_ip\ /redfish/v1/Schemas/DellOemStorageController.json) to view the attributes.
apply_time (optional, str, Immediate)
- Apply time of the *attributes*.
+ Apply time of the \ :emphasis:`attributes`\ .
- This is applicable only to *attributes*.
+ This is applicable only to \ :emphasis:`attributes`\ .
- ``Immediate`` Allows the user to immediately reboot the host and apply the changes. *job_wait* is applicable.
+ \ :literal:`Immediate`\ Allows the user to immediately reboot the host and apply the changes. \ :emphasis:`job\_wait`\ is applicable.
- ``OnReset`` Allows the user to apply the changes on the next reboot of the host server.
+ \ :literal:`OnReset`\ Allows the user to apply the changes on the next reboot of the host server.
- ``AtMaintenanceWindowStart`` Allows the user to apply at the start of a maintenance window as specified in *maintenance_window*.
+ \ :literal:`AtMaintenanceWindowStart`\ Allows the user to apply at the start of a maintenance window as specified in \ :emphasis:`maintenance\_window`\ .
- ``InMaintenanceWindowOnReset`` Allows to apply after a manual reset but within the maintenance window as specified in *maintenance_window*.
+ \ :literal:`InMaintenanceWindowOnReset`\ Allows to apply after a manual reset but within the maintenance window as specified in \ :emphasis:`maintenance\_window`\ .
maintenance_window (optional, dict, None)
Option to schedule the maintenance window.
- This is required when *apply_time* is ``AtMaintenanceWindowStart`` or ``InMaintenanceWindowOnReset``.
+ This is required when \ :emphasis:`apply\_time`\ is \ :literal:`AtMaintenanceWindowStart`\ or \ :literal:`InMaintenanceWindowOnReset`\ .
start_time (True, str, None)
The start time for the maintenance window to be scheduled.
- The format is YYYY-MM-DDThh:mm:ss<offset>
+ The format is YYYY-MM-DDThh:mm:ss\<offset\>
- <offset> is the time offset from UTC that the current timezone set in iDRAC in the format: +05:30 for IST.
+ \<offset\> is the time offset from UTC that the current timezone set in iDRAC in the format: +05:30 for IST.
duration (optional, int, 900)
@@ -185,33 +185,49 @@ Parameters
job_wait (optional, bool, False)
Provides the option if the module has to wait for the job to be completed.
- This is applicable for *attributes* when *apply_time* is ``Immediate``.
+ This is applicable for \ :emphasis:`attributes`\ when \ :emphasis:`apply\_time`\ is \ :literal:`Immediate`\ .
job_wait_timeout (optional, int, 120)
The maximum wait time of job completion in seconds before the job tracking is stopped.
- This option is applicable when *job_wait* is ``true``.
+ This option is applicable when \ :emphasis:`job\_wait`\ is \ :literal:`true`\ .
baseuri (True, str, None)
- IP address of the target out-of-band controller. For example- <ipaddress>:<port>.
+ IP address of the target out-of-band controller. For example- \<ipaddress\>:\<port\>.
- username (True, str, None)
+ username (False, str, None)
Username of the target out-of-band controller.
+ If the username is not provided, then the environment variable \ :envvar:`IDRAC\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export IDRAC\_USERNAME=username
+
+
+ password (False, str, None)
Password of the target out-of-band controller.
+ If the password is not provided, then the environment variable \ :envvar:`IDRAC\_PASSWORD`\ is used.
+
+ Example: export IDRAC\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`IDRAC\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export IDRAC\_X\_AUTH\_TOKEN=x\_auth\_token
+
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -231,8 +247,8 @@ Notes
.. note::
- Run this module from a system that has direct access to Dell iDRAC.
- This module is supported on iDRAC9.
- - This module always reports as changes found when *command* is ``ReKey``, ``BlinkTarget``, and ``UnBlinkTarget``.
- - This module supports ``check_mode``.
+ - This module always reports as changes found when \ :emphasis:`command`\ is \ :literal:`ReKey`\ , \ :literal:`BlinkTarget`\ , and \ :literal:`UnBlinkTarget`\ .
+ - This module supports \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/idrac_reset.rst b/ansible_collections/dellemc/openmanage/docs/modules/idrac_reset.rst
index 85ec3ca3c..57aed7dd1 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/idrac_reset.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/idrac_reset.rst
@@ -62,7 +62,7 @@ Parameters
job_wait_timeout (optional, int, 600)
Time in seconds to wait for job completion.
- This is applicable when \ :emphasis:`job\_wait`\ is \ :literal:`true`\ .
+ This is applicable when \ :emphasis:`wait\_for\_idrac`\ is \ :literal:`true`\ .
force_reset (optional, bool, False)
@@ -75,22 +75,30 @@ Parameters
iDRAC IP Address.
- idrac_user (True, str, None)
+ idrac_user (False, str, None)
iDRAC username.
- If the username is not provided, then the environment variable \ :literal:`IDRAC\_USERNAME`\ is used.
+ If the username is not provided, then the environment variable \ :envvar:`IDRAC\_USERNAME`\ is used.
Example: export IDRAC\_USERNAME=username
- idrac_password (True, str, None)
+ idrac_password (False, str, None)
iDRAC user password.
- If the password is not provided, then the environment variable \ :literal:`IDRAC\_PASSWORD`\ is used.
+ If the password is not provided, then the environment variable \ :envvar:`IDRAC\_PASSWORD`\ is used.
Example: export IDRAC\_PASSWORD=password
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`IDRAC\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export IDRAC\_X\_AUTH\_TOKEN=x\_auth\_token
+
+
idrac_port (optional, int, 443)
iDRAC port.
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/idrac_server_config_profile.rst b/ansible_collections/dellemc/openmanage/docs/modules/idrac_server_config_profile.rst
index 4de82dd84..97e744a56 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/idrac_server_config_profile.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/idrac_server_config_profile.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.9.14
+- python \>= 3.9.14
@@ -28,11 +28,11 @@ Parameters
----------
command (optional, str, export)
- If ``import``, the module performs SCP import operation.
+ If \ :literal:`import`\ , the module performs SCP import operation.
- If ``export``, the module performs SCP export operation.
+ If \ :literal:`export`\ , the module performs SCP export operation.
- If ``preview``, the module performs SCP preview operation.
+ If \ :literal:`preview`\ , the module performs SCP preview operation.
job_wait (True, bool, None)
@@ -44,11 +44,11 @@ Parameters
CIFS, NFS, HTTP, and HTTPS network share types are supported.
- *share_name* is mutually exclusive with *import_buffer*.
+ \ :emphasis:`share\_name`\ is mutually exclusive with \ :emphasis:`import\_buffer`\ .
share_user (optional, str, None)
- Network share user in the format 'user@domain' or 'domain\\user' if user is part of a domain else 'user'. This option is mandatory for CIFS Network Share.
+ Network share user in the format 'user@domain' or 'domain\\\\user' if user is part of a domain else 'user'. This option is mandatory for CIFS Network Share.
share_password (optional, str, None)
@@ -58,167 +58,183 @@ Parameters
scp_file (optional, str, None)
Name of the server configuration profile (SCP) file.
- This option is mandatory if *command* is ``import``.
+ This option is mandatory if \ :emphasis:`command`\ is \ :literal:`import`\ .
- The default format <idrac_ip>_YYmmdd_HHMMSS_scp is used if this option is not specified for ``import``.
+ The default format \<idrac\_ip\>\_YYmmdd\_HHMMSS\_scp is used if this option is not specified for \ :literal:`import`\ .
- *export_format* is used if the valid extension file is not provided for ``import``.
+ \ :emphasis:`export\_format`\ is used if the valid extension file is not provided for \ :literal:`import`\ .
scp_components (optional, list, ALL)
- If ``ALL``, this option exports or imports all components configurations from the SCP file.
+ If \ :literal:`ALL`\ , this option exports or imports all components configurations from the SCP file.
- If ``IDRAC``, this option exports or imports iDRAC configuration from the SCP file.
+ If \ :literal:`IDRAC`\ , this option exports or imports iDRAC configuration from the SCP file.
- If ``BIOS``, this option exports or imports BIOS configuration from the SCP file.
+ If \ :literal:`BIOS`\ , this option exports or imports BIOS configuration from the SCP file.
- If ``NIC``, this option exports or imports NIC configuration from the SCP file.
+ If \ :literal:`NIC`\ , this option exports or imports NIC configuration from the SCP file.
- If ``RAID``, this option exports or imports RAID configuration from the SCP file.
+ If \ :literal:`RAID`\ , this option exports or imports RAID configuration from the SCP file.
- If ``FC``, this option exports or imports FiberChannel configurations from the SCP file.
+ If \ :literal:`FC`\ , this option exports or imports FiberChannel configurations from the SCP file.
- If ``InfiniBand``, this option exports or imports InfiniBand configuration from the SCP file.
+ If \ :literal:`InfiniBand`\ , this option exports or imports InfiniBand configuration from the SCP file.
- If ``SupportAssist``, this option exports or imports SupportAssist configuration from the SCP file.
+ If \ :literal:`SupportAssist`\ , this option exports or imports SupportAssist configuration from the SCP file.
- If ``EventFilters``, this option exports or imports EventFilters configuration from the SCP file.
+ If \ :literal:`EventFilters`\ , this option exports or imports EventFilters configuration from the SCP file.
- If ``System``, this option exports or imports System configuration from the SCP file.
+ If \ :literal:`System`\ , this option exports or imports System configuration from the SCP file.
- If ``LifecycleController``, this option exports or imports SupportAssist configuration from the SCP file.
+ If \ :literal:`LifecycleController`\ , this option exports or imports SupportAssist configuration from the SCP file.
- If ``AHCI``, this option exports or imports EventFilters configuration from the SCP file.
+ If \ :literal:`AHCI`\ , this option exports or imports EventFilters configuration from the SCP file.
- If ``PCIeSSD``, this option exports or imports PCIeSSD configuration from the SCP file.
+ If \ :literal:`PCIeSSD`\ , this option exports or imports PCIeSSD configuration from the SCP file.
- When *command* is ``export`` or ``import`` *target* with multiple components is supported only on iDRAC9 with firmware 6.10.00.00 and above.
+ When \ :emphasis:`command`\ is \ :literal:`export`\ or \ :literal:`import`\ \ :emphasis:`target`\ with multiple components is supported only on iDRAC9 with firmware 6.10.00.00 and above.
shutdown_type (optional, str, Graceful)
- This option is applicable for ``import`` command.
+ This option is applicable for \ :literal:`import`\ command.
- If ``Graceful``, the job gracefully shuts down the operating system and turns off the server.
+ If \ :literal:`Graceful`\ , the job gracefully shuts down the operating system and turns off the server.
- If ``Forced``, it forcefully shuts down the server.
+ If \ :literal:`Forced`\ , it forcefully shuts down the server.
- If ``NoReboot``, the job that applies the SCP will pause until you manually reboot the server.
+ If \ :literal:`NoReboot`\ , the job that applies the SCP will pause until you manually reboot the server.
end_host_power_state (optional, str, On)
- This option is applicable for ``import`` command.
+ This option is applicable for \ :literal:`import`\ command.
- If ``On``, End host power state is on.
+ If \ :literal:`On`\ , End host power state is on.
- If ``Off``, End host power state is off.
+ If \ :literal:`Off`\ , End host power state is off.
export_format (optional, str, XML)
- Specify the output file format. This option is applicable for ``export`` command.
+ Specify the output file format. This option is applicable for \ :literal:`export`\ command.
export_use (optional, str, Default)
Specify the type of Server Configuration Profile (SCP) to be exported.
- This option is applicable when *command* is ``export``.
+ This option is applicable when \ :emphasis:`command`\ is \ :literal:`export`\ .
- ``Default`` Creates a non-destructive snapshot of the configuration.
+ \ :literal:`Default`\ Creates a non-destructive snapshot of the configuration.
- ``Replace`` Replaces a server with another or restores the servers settings to a known baseline.
+ \ :literal:`Replace`\ Replaces a server with another or restores the servers settings to a known baseline.
- ``Clone`` Clones settings from one server to another server with the identical hardware setup. All settings except I/O identity are updated (e.g. will reset RAID). The settings in this export will be destructive when uploaded to another system.
+ \ :literal:`Clone`\ Clones settings from one server to another server with the identical hardware setup. All settings except I/O identity are updated (e.g. will reset RAID). The settings in this export will be destructive when uploaded to another system.
ignore_certificate_warning (optional, str, ignore)
- If ``ignore``, it ignores the certificate warnings.
+ If \ :literal:`ignore`\ , it ignores the certificate warnings.
- If ``showerror``, it shows the certificate warnings.
+ If \ :literal:`showerror`\ , it shows the certificate warnings.
- *ignore_certificate_warning* is considered only when *share_name* is of type HTTPS and is supported only on iDRAC9.
+ \ :emphasis:`ignore\_certificate\_warning`\ is considered only when \ :emphasis:`share\_name`\ is of type HTTPS and is supported only on iDRAC9.
include_in_export (optional, str, default)
- This option is applicable when *command* is ``export``.
+ This option is applicable when \ :emphasis:`command`\ is \ :literal:`export`\ .
- If ``default``, it exports the default Server Configuration Profile.
+ If \ :literal:`default`\ , it exports the default Server Configuration Profile.
- If ``readonly``, it exports the SCP with readonly attributes.
+ If \ :literal:`readonly`\ , it exports the SCP with readonly attributes.
- If ``passwordhashvalues``, it exports the SCP with password hash values.
+ If \ :literal:`passwordhashvalues`\ , it exports the SCP with password hash values.
- If ``customtelemetry``, exports the SCP with custom telemetry attributes supported only in the iDRAC9.
+ If \ :literal:`customtelemetry`\ , exports the SCP with custom telemetry attributes supported only in the iDRAC9.
import_buffer (optional, str, None)
Used to import the buffer input of xml or json into the iDRAC.
- This option is applicable when *command* is ``import`` and ``preview``.
+ This option is applicable when \ :emphasis:`command`\ is \ :literal:`import`\ and \ :literal:`preview`\ .
- *import_buffer* is mutually exclusive with *share_name*.
+ \ :emphasis:`import\_buffer`\ is mutually exclusive with \ :emphasis:`share\_name`\ .
proxy_support (optional, bool, False)
Proxy to be enabled or disabled.
- *proxy_support* is considered only when *share_name* is of type HTTP or HTTPS and is supported only on iDRAC9.
+ \ :emphasis:`proxy\_support`\ is considered only when \ :emphasis:`share\_name`\ is of type HTTP or HTTPS and is supported only on iDRAC9.
proxy_type (optional, str, http)
- ``http`` to select HTTP type proxy.
+ \ :literal:`http`\ to select HTTP type proxy.
- ``socks4`` to select SOCKS4 type proxy.
+ \ :literal:`socks4`\ to select SOCKS4 type proxy.
- *proxy_type* is considered only when *share_name* is of type HTTP or HTTPS and is supported only on iDRAC9.
+ \ :emphasis:`proxy\_type`\ is considered only when \ :emphasis:`share\_name`\ is of type HTTP or HTTPS and is supported only on iDRAC9.
proxy_server (optional, str, None)
- *proxy_server* is required when *share_name* is of type HTTPS or HTTP and *proxy_support* is ``true``.
+ \ :emphasis:`proxy\_server`\ is required when \ :emphasis:`share\_name`\ is of type HTTPS or HTTP and \ :emphasis:`proxy\_support`\ is \ :literal:`true`\ .
- *proxy_server* is considered only when *share_name* is of type HTTP or HTTPS and is supported only on iDRAC9.
+ \ :emphasis:`proxy\_server`\ is considered only when \ :emphasis:`share\_name`\ is of type HTTP or HTTPS and is supported only on iDRAC9.
proxy_port (optional, str, 80)
Proxy port to authenticate.
- *proxy_port* is required when *share_name* is of type HTTPS or HTTP and *proxy_support* is ``true``.
+ \ :emphasis:`proxy\_port`\ is required when \ :emphasis:`share\_name`\ is of type HTTPS or HTTP and \ :emphasis:`proxy\_support`\ is \ :literal:`true`\ .
- *proxy_port* is considered only when *share_name* is of type HTTP or HTTPS and is supported only on iDRAC9.
+ \ :emphasis:`proxy\_port`\ is considered only when \ :emphasis:`share\_name`\ is of type HTTP or HTTPS and is supported only on iDRAC9.
proxy_username (optional, str, None)
Proxy username to authenticate.
- *proxy_username* is considered only when *share_name* is of type HTTP or HTTPS and is supported only on iDRAC9.
+ \ :emphasis:`proxy\_username`\ is considered only when \ :emphasis:`share\_name`\ is of type HTTP or HTTPS and is supported only on iDRAC9.
proxy_password (optional, str, None)
Proxy password to authenticate.
- *proxy_password* is considered only when *share_name* is of type HTTP or HTTPS and is supported only on iDRAC9.
+ \ :emphasis:`proxy\_password`\ is considered only when \ :emphasis:`share\_name`\ is of type HTTP or HTTPS and is supported only on iDRAC9.
idrac_ip (True, str, None)
iDRAC IP Address.
- idrac_user (True, str, None)
+ idrac_user (False, str, None)
iDRAC username.
+ If the username is not provided, then the environment variable \ :envvar:`IDRAC\_USERNAME`\ is used.
- idrac_password (True, str, None)
+ Example: export IDRAC\_USERNAME=username
+
+
+ idrac_password (False, str, None)
iDRAC user password.
+ If the password is not provided, then the environment variable \ :envvar:`IDRAC\_PASSWORD`\ is used.
+
+ Example: export IDRAC\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`IDRAC\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export IDRAC\_X\_AUTH\_TOKEN=x\_auth\_token
+
idrac_port (optional, int, 443)
iDRAC port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -236,11 +252,11 @@ Notes
-----
.. note::
- - This module requires 'Administrator' privilege for *idrac_user*.
+ - This module requires 'Administrator' privilege for \ :emphasis:`idrac\_user`\ .
- Run this module from a system that has direct access to Dell iDRAC.
- - This module supports ``check_mode``.
+ - This module supports \ :literal:`check\_mode`\ .
- To import Server Configuration Profile (SCP) on the iDRAC8-based servers, the servers must have iDRAC Enterprise license or later.
- - For ``import`` operation, ``check_mode`` is supported only when *target* is ``ALL``.
+ - For \ :literal:`import`\ operation, \ :literal:`check\_mode`\ is supported only when \ :emphasis:`target`\ is \ :literal:`ALL`\ .
- This module supports IPv4 and IPv6 addresses.
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/idrac_session.rst b/ansible_collections/dellemc/openmanage/docs/modules/idrac_session.rst
index d8f980043..30b612fb3 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/idrac_session.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/idrac_session.rst
@@ -32,13 +32,13 @@ Parameters
username (optional, str, None)
- Username of the iDRAC.
+ Username of the iDRAC. If the username is not provided, then the environment variable \ :envvar:`IDRAC\_USERNAME`\ is used.
\ :emphasis:`username`\ is required when \ :emphasis:`state`\ is \ :literal:`present`\ .
password (optional, str, None)
- Password of the iDRAC.
+ Password of the iDRAC. If the password is not provided, then the environment variable \ :envvar:`IDRAC\_PASSWORD`\ is used.
\ :emphasis:`password`\ is required when \ :emphasis:`state`\ is \ :literal:`present`\ .
@@ -71,10 +71,10 @@ Parameters
Module will always report changes found to be applied when \ :emphasis:`state`\ is \ :literal:`present`\ .
- auth_token (optional, str, None)
+ x_auth_token (optional, str, None)
Authentication token.
- \ :emphasis:`auth\_token`\ is required when \ :emphasis:`state`\ is \ :literal:`absent`\ .
+ \ :emphasis:`x\_auth\_token`\ is required when \ :emphasis:`state`\ is \ :literal:`absent`\ .
session_id (optional, int, None)
@@ -110,14 +110,47 @@ Examples
hostname: 198.162.0.1
username: username
password: password
+ ca_path: "/path/to/ca_cert.pem"
state: present
- name: Delete a session
dellemc.openmanage.idrac_session:
hostname: 198.162.0.1
+ ca_path: "/path/to/ca_cert.pem"
state: absent
- auth_token: aed4aa802b748d2f3b31deec00a6b28a
- session_is: 2
+ x_auth_token: aed4aa802b748d2f3b31deec00a6b28a
+ session_id: 2
+
+ - name: Create a session and execute other modules
+ block:
+ - name: Create a session
+ dellemc.openmanage.idrac_session:
+ hostname: 198.162.0.1
+ username: username
+ password: password
+ ca_path: "/path/to/ca_cert.pem"
+ state: present
+ register: authData
+
+ - name: Call idrac_firmware_info module
+ dellemc.openmanage.idrac_firmware_info:
+ idrac_ip: 198.162.0.1
+ ca_path: "/path/to/ca_cert.pem"
+ x_auth_token: "{{ authData.x_auth_token }}"
+
+ - name: Call idrac_user_info module
+ dellemc.openmanage.idrac_user_info:
+ idrac_ip: 198.162.0.1
+ ca_path: "/path/to/ca_cert.pem"
+ x_auth_token: "{{ authData.x_auth_token }}"
+ always:
+ - name: Destroy a session
+ dellemc.openmanage.idrac_session:
+ hostname: 198.162.0.1
+ ca_path: "/path/to/ca_cert.pem"
+ state: absent
+ x_auth_token: "{{ authData.x_auth_token }}"
+ session_id: "{{ authData.session_data.Id }}"
@@ -154,4 +187,5 @@ Authors
~~~~~~~
- Rajshekar P(@rajshekarp87)
+- Kritika Bhateja (@Kritika-Bhateja-03)
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/idrac_storage_volume.rst b/ansible_collections/dellemc/openmanage/docs/modules/idrac_storage_volume.rst
index 5cfefc036..99fb14257 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/idrac_storage_volume.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/idrac_storage_volume.rst
@@ -125,26 +125,40 @@ Parameters
This option is applicable when \ :emphasis:`job\_wait`\ is \ :literal:`true`\ .
+ time_to_wait (optional, int, 300)
+ The maximum wait time before shutdown in seconds for the Server Configuration Profile (SCP) import operation.
+
+ This option is applicable when \ :emphasis:`state`\ is \ :literal:`create`\ or \ :literal:`delete`\ .
+
+
idrac_ip (True, str, None)
iDRAC IP Address.
- idrac_user (True, str, None)
+ idrac_user (False, str, None)
iDRAC username.
- If the username is not provided, then the environment variable \ :literal:`IDRAC\_USERNAME`\ is used.
+ If the username is not provided, then the environment variable \ :envvar:`IDRAC\_USERNAME`\ is used.
Example: export IDRAC\_USERNAME=username
- idrac_password (True, str, None)
+ idrac_password (False, str, None)
iDRAC user password.
- If the password is not provided, then the environment variable \ :literal:`IDRAC\_PASSWORD`\ is used.
+ If the password is not provided, then the environment variable \ :envvar:`IDRAC\_PASSWORD`\ is used.
Example: export IDRAC\_PASSWORD=password
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`IDRAC\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export IDRAC\_X\_AUTH\_TOKEN=x\_auth\_token
+
+
idrac_port (optional, int, 443)
iDRAC port.
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/idrac_syslog.rst b/ansible_collections/dellemc/openmanage/docs/modules/idrac_syslog.rst
index 24a3ac09e..a67f38f76 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/idrac_syslog.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/idrac_syslog.rst
@@ -20,8 +20,8 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- omsdk >= 1.2.488
-- python >= 3.9.6
+- omsdk \>= 1.2.488
+- python \>= 3.9.6
@@ -39,21 +39,29 @@ Parameters
idrac_user (True, str, None)
iDRAC username.
+ If the username is not provided, then the environment variable \ :envvar:`IDRAC\_USERNAME`\ is used.
+
+ Example: export IDRAC\_USERNAME=username
+
idrac_password (True, str, None)
iDRAC user password.
+ If the password is not provided, then the environment variable \ :envvar:`IDRAC\_PASSWORD`\ is used.
+
+ Example: export IDRAC\_PASSWORD=password
+
idrac_port (optional, int, 443)
iDRAC port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -69,7 +77,7 @@ Parameters
share_user (optional, str, None)
- Network share user name. Use the format 'user@domain' or 'domain\\user' if user is part of a domain. This option is mandatory for CIFS share.
+ Network share user name. Use the format 'user@domain' or 'domain\\\\user' if user is part of a domain. This option is mandatory for CIFS share.
share_password (optional, str, None)
@@ -87,10 +95,10 @@ Notes
-----
.. note::
- - This module requires 'Administrator' privilege for *idrac_user*.
+ - This module requires 'Administrator' privilege for \ :emphasis:`idrac\_user`\ .
- Run this module from a system that has direct access to Dell iDRAC.
- - This module supports both IPv4 and IPv6 address for *idrac_ip*.
- - This module supports ``check_mode``.
+ - This module supports both IPv4 and IPv6 address for \ :emphasis:`idrac\_ip`\ .
+ - This module supports \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/idrac_system_info.rst b/ansible_collections/dellemc/openmanage/docs/modules/idrac_system_info.rst
index 4c2d4fa8e..246431131 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/idrac_system_info.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/idrac_system_info.rst
@@ -20,8 +20,8 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- omsdk >= 1.2.488
-- python >= 3.9.6
+- omsdk \>= 1.2.488
+- python \>= 3.9.6
@@ -35,21 +35,29 @@ Parameters
idrac_user (True, str, None)
iDRAC username.
+ If the username is not provided, then the environment variable \ :envvar:`IDRAC\_USERNAME`\ is used.
+
+ Example: export IDRAC\_USERNAME=username
+
idrac_password (True, str, None)
iDRAC user password.
+ If the password is not provided, then the environment variable \ :envvar:`IDRAC\_PASSWORD`\ is used.
+
+ Example: export IDRAC\_PASSWORD=password
+
idrac_port (optional, int, 443)
iDRAC port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -68,8 +76,8 @@ Notes
.. note::
- Run this module from a system that has direct access to Dell iDRAC.
- - This module supports both IPv4 and IPv6 address for *idrac_ip*.
- - This module supports ``check_mode``.
+ - This module supports both IPv4 and IPv6 address for \ :emphasis:`idrac\_ip`\ .
+ - This module supports \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/idrac_timezone_ntp.rst b/ansible_collections/dellemc/openmanage/docs/modules/idrac_timezone_ntp.rst
index 10d7447f4..9872cbdf2 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/idrac_timezone_ntp.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/idrac_timezone_ntp.rst
@@ -20,8 +20,8 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- omsdk >= 1.2.488
-- python >= 3.9.6
+- omsdk \>= 1.2.488
+- python \>= 3.9.6
@@ -55,7 +55,7 @@ Parameters
share_user (optional, str, None)
- (deprecated)Network share user name. Use the format 'user@domain' or 'domain\user' if user is part of a domain. This option is mandatory for CIFS share.
+ (deprecated)Network share user name. Use the format 'user@domain' or 'domain\\user' if user is part of a domain. This option is mandatory for CIFS share.
This option is deprecated and will be removed in the later version.
@@ -79,21 +79,29 @@ Parameters
idrac_user (True, str, None)
iDRAC username.
+ If the username is not provided, then the environment variable \ :envvar:`IDRAC\_USERNAME`\ is used.
+
+ Example: export IDRAC\_USERNAME=username
+
idrac_password (True, str, None)
iDRAC user password.
+ If the password is not provided, then the environment variable \ :envvar:`IDRAC\_PASSWORD`\ is used.
+
+ Example: export IDRAC\_PASSWORD=password
+
idrac_port (optional, int, 443)
iDRAC port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -111,10 +119,10 @@ Notes
-----
.. note::
- - This module requires 'Administrator' privilege for *idrac_user*.
+ - This module requires 'Administrator' privilege for \ :emphasis:`idrac\_user`\ .
- Run this module from a system that has direct access to Dell iDRAC.
- - This module supports both IPv4 and IPv6 address for *idrac_ip*.
- - This module supports ``check_mode``.
+ - This module supports both IPv4 and IPv6 address for \ :emphasis:`idrac\_ip`\ .
+ - This module supports \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/idrac_user.rst b/ansible_collections/dellemc/openmanage/docs/modules/idrac_user.rst
index 9d92e0d0e..19778f88a 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/idrac_user.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/idrac_user.rst
@@ -26,7 +26,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.8.6
+- python \>= 3.9.6
@@ -34,37 +34,37 @@ Parameters
----------
state (optional, str, present)
- Select ``present`` to create or modify a user account.
+ Select \ :literal:`present`\ to create or modify a user account.
- Select ``absent`` to remove a user account.
+ Select \ :literal:`absent`\ to remove a user account.
user_name (True, str, None)
- Provide the *user_name* of the account to be created, deleted or modified.
+ Provide the \ :emphasis:`user\_name`\ of the account to be created, deleted or modified.
user_password (optional, str, None)
Provide the password for the user account. The password can be changed when the user account is modified.
- To ensure security, the *user_password* must be at least eight characters long and must contain lowercase and upper-case characters, numbers, and special characters.
+ To ensure security, the \ :emphasis:`user\_password`\ must be at least eight characters long and must contain lowercase and upper-case characters, numbers, and special characters.
new_user_name (optional, str, None)
- Provide the *user_name* for the account to be modified.
+ Provide the \ :emphasis:`user\_name`\ for the account to be modified.
privilege (optional, str, None)
Following are the role-based privileges.
- A user with ``Administrator`` privilege can log in to iDRAC, and then configure iDRAC, configure users, clear logs, control and configure system, access virtual console, access virtual media, test alerts, and execute debug commands.
+ A user with \ :literal:`Administrator`\ privilege can log in to iDRAC, and then configure iDRAC, configure users, clear logs, control and configure system, access virtual console, access virtual media, test alerts, and execute debug commands.
- A user with ``Operator`` privilege can log in to iDRAC, and then configure iDRAC, control and configure system, access virtual console, access virtual media, and execute debug commands.
+ A user with \ :literal:`Operator`\ privilege can log in to iDRAC, and then configure iDRAC, control and configure system, access virtual console, access virtual media, and execute debug commands.
- A user with ``ReadOnly`` privilege can only log in to iDRAC.
+ A user with \ :literal:`ReadOnly`\ privilege can only log in to iDRAC.
- A user with ``None``, no privileges assigned.
+ A user with \ :literal:`None`\ , no privileges assigned.
- Will be ignored, if custom_privilege parameter is provided.
+ Will be ignored, if custom\_privilege parameter is provided.
custom_privilege (optional, int, None)
@@ -96,45 +96,61 @@ Parameters
authentication_protocol (optional, str, None)
This option allows to configure one of the following authentication protocol types to authenticate the iDRAC user.
- Secure Hash Algorithm ``SHA``.
+ Secure Hash Algorithm \ :literal:`SHA`\ .
- Message Digest 5 ``MD5``.
+ Message Digest 5 \ :literal:`MD5`\ .
- An authentication protocol is not configured if ``None`` is selected.
+ An authentication protocol is not configured if \ :literal:`None`\ is selected.
privacy_protocol (optional, str, None)
This option allows to configure one of the following privacy encryption protocols for the iDRAC user.
- Data Encryption Standard ``DES``.
+ Data Encryption Standard \ :literal:`DES`\ .
- Advanced Encryption Standard ``AES``.
+ Advanced Encryption Standard \ :literal:`AES`\ .
- A privacy protocol is not configured if ``None`` is selected.
+ A privacy protocol is not configured if \ :literal:`None`\ is selected.
idrac_ip (True, str, None)
iDRAC IP Address.
- idrac_user (True, str, None)
+ idrac_user (False, str, None)
iDRAC username.
+ If the username is not provided, then the environment variable \ :envvar:`IDRAC\_USERNAME`\ is used.
- idrac_password (True, str, None)
+ Example: export IDRAC\_USERNAME=username
+
+
+ idrac_password (False, str, None)
iDRAC user password.
+ If the password is not provided, then the environment variable \ :envvar:`IDRAC\_PASSWORD`\ is used.
+
+ Example: export IDRAC\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`IDRAC\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export IDRAC\_X\_AUTH\_TOKEN=x\_auth\_token
+
idrac_port (optional, int, 443)
iDRAC port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -153,7 +169,7 @@ Notes
.. note::
- Run this module from a system that has direct access to Dell iDRAC.
- - This module supports ``check_mode``.
+ - This module supports \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/idrac_user_info.rst b/ansible_collections/dellemc/openmanage/docs/modules/idrac_user_info.rst
index 85e84ff65..3389f9c7f 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/idrac_user_info.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/idrac_user_info.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.8.6
+- python \>= 3.9.6
@@ -30,37 +30,53 @@ Parameters
user_id (optional, int, None)
Sequential user id numbers that supports from 1 to 16.
- *user_id* is mutually exclusive with *username*
+ \ :emphasis:`user\_id`\ is mutually exclusive with \ :emphasis:`username`\
username (optional, str, None)
Username of the account that is created in iDRAC local users.
- *username* is mutually exclusive with *user_id*
+ \ :emphasis:`username`\ is mutually exclusive with \ :emphasis:`user\_id`\
idrac_ip (True, str, None)
iDRAC IP Address.
- idrac_user (True, str, None)
+ idrac_user (False, str, None)
iDRAC username.
+ If the username is not provided, then the environment variable \ :envvar:`IDRAC\_USERNAME`\ is used.
- idrac_password (True, str, None)
+ Example: export IDRAC\_USERNAME=username
+
+
+ idrac_password (False, str, None)
iDRAC user password.
+ If the password is not provided, then the environment variable \ :envvar:`IDRAC\_PASSWORD`\ is used.
+
+ Example: export IDRAC\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`IDRAC\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export IDRAC\_X\_AUTH\_TOKEN=x\_auth\_token
+
idrac_port (optional, int, 443)
iDRAC port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -79,7 +95,7 @@ Notes
.. note::
- Run this module on a system that has direct access to Dell iDRAC.
- - This module supports ``check_mode``.
+ - This module supports \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/idrac_virtual_media.rst b/ansible_collections/dellemc/openmanage/docs/modules/idrac_virtual_media.rst
index 479ba05bb..a47695bd9 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/idrac_virtual_media.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/idrac_virtual_media.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.8.6
+- python \>= 3.9.6
@@ -32,9 +32,9 @@ Parameters
insert (True, bool, None)
- ``true`` connects the remote image file.
+ \ :literal:`true`\ connects the remote image file.
- ``false`` ejects the remote image file if connected.
+ \ :literal:`false`\ ejects the remote image file if connected.
image (optional, path, None)
@@ -42,9 +42,9 @@ Parameters
The file name with .img extension is redirected as a virtual floppy and a file name with .iso extension is redirected as a virtual CDROM.
- This option is required when *insert* is ``true``.
+ This option is required when \ :emphasis:`insert`\ is \ :literal:`true`\ .
- The following are the examples of the share location: CIFS share: //192.168.0.1/file_path/image_name.iso, NFS share: 192.168.0.2:/file_path/image_name.img, HTTP share: http://192.168.0.3/file_path/image_name.iso, HTTPS share: https://192.168.0.4/file_path/image_name.img
+ The following are the examples of the share location: CIFS share: //192.168.0.1/file\_path/image\_name.iso, NFS share: 192.168.0.2:/file\_path/image\_name.img, HTTP share: http://192.168.0.3/file\_path/image\_name.iso, HTTPS share: https://192.168.0.4/file\_path/image\_name.img
CIFS share is not supported by iDRAC8.
@@ -52,7 +52,7 @@ Parameters
index (optional, int, None)
- Index of the Remote File Share. For example, to specify the Remote File Share 1, the value of *index* should be 1. If *index* is not specified, the order of *virtual_media* list will be considered.
+ Index of the Remote File Share. For example, to specify the Remote File Share 1, the value of \ :emphasis:`index`\ should be 1. If \ :emphasis:`index`\ is not specified, the order of \ :emphasis:`virtual\_media`\ list will be considered.
domain (optional, str, None)
@@ -66,16 +66,16 @@ Parameters
password (optional, str, None)
Network share password. This option is applicable for CIFS and HTTPS share.
- This module always reports as the changes found when *password* is provided.
+ This module always reports as the changes found when \ :emphasis:`password`\ is provided.
media_type (optional, str, None)
- Type of the image file. This is applicable when *insert* is ``true``.
+ Type of the image file. This is applicable when \ :emphasis:`insert`\ is \ :literal:`true`\ .
force (optional, bool, False)
- ``true`` ejects the image file if already connected and inserts the file provided in *image*. This is applicable when *insert* is ``true``.
+ \ :literal:`true`\ ejects the image file if already connected and inserts the file provided in \ :emphasis:`image`\ . This is applicable when \ :emphasis:`insert`\ is \ :literal:`true`\ .
resource_id (optional, str, None)
@@ -86,24 +86,40 @@ Parameters
iDRAC IP Address.
- idrac_user (True, str, None)
+ idrac_user (False, str, None)
iDRAC username.
+ If the username is not provided, then the environment variable \ :envvar:`IDRAC\_USERNAME`\ is used.
- idrac_password (True, str, None)
+ Example: export IDRAC\_USERNAME=username
+
+
+ idrac_password (False, str, None)
iDRAC user password.
+ If the password is not provided, then the environment variable \ :envvar:`IDRAC\_PASSWORD`\ is used.
+
+ Example: export IDRAC\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`IDRAC\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export IDRAC\_X\_AUTH\_TOKEN=x\_auth\_token
+
idrac_port (optional, int, 443)
iDRAC port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -122,7 +138,7 @@ Notes
.. note::
- Run this module from a system that has direct access to Dell iDRAC.
- - This module supports ``check_mode``.
+ - This module supports \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_active_directory.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_active_directory.rst
index 19092497d..31a655394 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_active_directory.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_active_directory.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.8.6
+- python \>= 3.9.6
@@ -30,9 +30,9 @@ Parameters
domain_server (optional, list, None)
Enter the domain name or FQDN or IP address of the domain controller.
- If *domain_controller_lookup* is ``DNS``, enter the domain name to query DNS for the domain controllers.
+ If \ :emphasis:`domain\_controller\_lookup`\ is \ :literal:`DNS`\ , enter the domain name to query DNS for the domain controllers.
- If *domain_controller_lookup* is ``MANUAL``, enter the FQDN or the IP address of the domain controller. The maximum number of Active Directory servers that can be added is three.
+ If \ :emphasis:`domain\_controller\_lookup`\ is \ :literal:`MANUAL`\ , enter the FQDN or the IP address of the domain controller. The maximum number of Active Directory servers that can be added is three.
domain_controller_lookup (optional, str, DNS)
@@ -46,11 +46,11 @@ Parameters
For the Domain Controller Access, enter 636 as the port number.
- ``NOTE``, Only LDAPS ports are supported.
+ \ :literal:`NOTE`\ , Only LDAPS ports are supported.
group_domain (optional, str, None)
- Provide the group domain in the format ``example.com`` or ``ou=org, dc=example, dc=com``.
+ Provide the group domain in the format \ :literal:`example.com`\ or \ :literal:`ou=org, dc=example, dc=com`\ .
id (optional, int, None)
@@ -58,7 +58,7 @@ Parameters
This is applicable for modification and deletion.
- This is mutually exclusive with *name*.
+ This is mutually exclusive with \ :emphasis:`name`\ .
name (optional, str, None)
@@ -66,7 +66,7 @@ Parameters
This is applicable for creation and deletion.
- This is mutually exclusive with *name*.
+ This is mutually exclusive with \ :emphasis:`name`\ .
network_timeout (optional, int, 120)
@@ -82,9 +82,9 @@ Parameters
state (optional, str, present)
- ``present`` allows to create or modify an Active Directory service.
+ \ :literal:`present`\ allows to create or modify an Active Directory service.
- ``absent`` allows to delete a Active Directory service.
+ \ :literal:`absent`\ allows to delete a Active Directory service.
test_connection (optional, bool, False)
@@ -94,25 +94,25 @@ Parameters
If test fails, module will error out.
- If ``true``, *domain_username* and *domain_password* has to be provided.
+ If \ :literal:`true`\ , \ :emphasis:`domain\_username`\ and \ :emphasis:`domain\_password`\ has to be provided.
domain_password (optional, str, None)
Provide the domain password.
- This is applicable when *test_connection* is ``true``.
+ This is applicable when \ :emphasis:`test\_connection`\ is \ :literal:`true`\ .
domain_username (optional, str, None)
- Provide the domain username either in the UPN (username@domain) or NetBIOS (domain\\username) format.
+ Provide the domain username either in the UPN (username@domain) or NetBIOS (domain\\\\username) format.
- This is applicable when *test_connection* is ``true``.
+ This is applicable when \ :emphasis:`test\_connection`\ is \ :literal:`true`\ .
validate_certificate (optional, bool, False)
Enables validation of SSL certificate of the domain controller.
- The module will always report change when this is ``true``.
+ The module will always report change when this is \ :literal:`true`\ .
certificate_file (optional, path, None)
@@ -120,31 +120,47 @@ Parameters
The certificate should be a Root CA Certificate encoded in Base64 format.
- This is applicable when *validate_certificate* is ``true``.
+ This is applicable when \ :emphasis:`validate\_certificate`\ is \ :literal:`true`\ .
hostname (True, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular IP address or hostname.
- username (True, str, None)
+ username (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular username.
+ If the username is not provided, then the environment variable \ :envvar:`OME\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export OME\_USERNAME=username
+
+
+ password (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular password.
+ If the password is not provided, then the environment variable \ :envvar:`OME\_PASSWORD`\ is used.
+
+ Example: export OME\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`OME\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export OME\_X\_AUTH\_TOKEN=x\_auth\_token
+
port (optional, int, 443)
OpenManage Enterprise or OpenManage Enterprise Modular HTTPS port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -162,9 +178,9 @@ Notes
-----
.. note::
- - The module will always report change when *validate_certificate* is ``true``.
+ - The module will always report change when \ :emphasis:`validate\_certificate`\ is \ :literal:`true`\ .
- Run this module from a system that has direct access to OpenManage Enterprise.
- - This module supports ``check_mode``.
+ - This module supports \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_alert_policies.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_alert_policies.rst
index 0c14c7d9d..39c8c8791 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_alert_policies.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_alert_policies.rst
@@ -266,24 +266,40 @@ Parameters
OpenManage Enterprise or OpenManage Enterprise Modular IP address or hostname.
- username (True, str, None)
+ username (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular username.
+ If the username is not provided, then the environment variable \ :envvar:`OME\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export OME\_USERNAME=username
+
+
+ password (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular password.
+ If the password is not provided, then the environment variable \ :envvar:`OME\_PASSWORD`\ is used.
+
+ Example: export OME\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`OME\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export OME\_X\_AUTH\_TOKEN=x\_auth\_token
+
port (optional, int, 443)
OpenManage Enterprise or OpenManage Enterprise Modular HTTPS port.
validate_certs (optional, bool, True)
- If \ :literal:`False`\ , the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure \ :literal:`False`\ only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`False`\ by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_alert_policies_actions_info.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_alert_policies_actions_info.rst
index a8138a0e6..635d7a50e 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_alert_policies_actions_info.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_alert_policies_actions_info.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.9.6
+- python \>= 3.9.6
@@ -31,24 +31,40 @@ Parameters
OpenManage Enterprise or OpenManage Enterprise Modular IP address or hostname.
- username (True, str, None)
+ username (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular username.
+ If the username is not provided, then the environment variable \ :envvar:`OME\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export OME\_USERNAME=username
+
+
+ password (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular password.
+ If the password is not provided, then the environment variable \ :envvar:`OME\_PASSWORD`\ is used.
+
+ Example: export OME\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`OME\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export OME\_X\_AUTH\_TOKEN=x\_auth\_token
+
port (optional, int, 443)
OpenManage Enterprise or OpenManage Enterprise Modular HTTPS port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -68,7 +84,7 @@ Notes
.. note::
- Run this module from a system that has direct access to Dell OpenManage Enterprise or OpenManage Enterprise Modular.
- This module supports both IPv4 and IPv6 addresses.
- - This module supports ``check_mode``.
+ - This module supports \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_alert_policies_category_info.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_alert_policies_category_info.rst
index 20edbe90b..d8098a363 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_alert_policies_category_info.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_alert_policies_category_info.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.9.6
+- python \>= 3.9.6
@@ -31,24 +31,40 @@ Parameters
OpenManage Enterprise or OpenManage Enterprise Modular IP address or hostname.
- username (True, str, None)
+ username (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular username.
+ If the username is not provided, then the environment variable \ :envvar:`OME\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export OME\_USERNAME=username
+
+
+ password (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular password.
+ If the password is not provided, then the environment variable \ :envvar:`OME\_PASSWORD`\ is used.
+
+ Example: export OME\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`OME\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export OME\_X\_AUTH\_TOKEN=x\_auth\_token
+
port (optional, int, 443)
OpenManage Enterprise or OpenManage Enterprise Modular HTTPS port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -68,7 +84,7 @@ Notes
.. note::
- Run this module from a system that has direct access to Dell OpenManage Enterprise or OpenManage Enterprise Modular.
- This module supports IPv4 and IPv6 addresses.
- - This module supports ``check_mode``.
+ - This module supports \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_alert_policies_info.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_alert_policies_info.rst
index 8dbe503d8..c038bb06f 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_alert_policies_info.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_alert_policies_info.rst
@@ -24,7 +24,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.9.6
+- python \>= 3.9.6
@@ -39,24 +39,40 @@ Parameters
OpenManage Enterprise or OpenManage Enterprise Modular IP address or hostname.
- username (True, str, None)
+ username (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular username.
+ If the username is not provided, then the environment variable \ :envvar:`OME\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export OME\_USERNAME=username
+
+
+ password (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular password.
+ If the password is not provided, then the environment variable \ :envvar:`OME\_PASSWORD`\ is used.
+
+ Example: export OME\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`OME\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export OME\_X\_AUTH\_TOKEN=x\_auth\_token
+
port (optional, int, 443)
OpenManage Enterprise or OpenManage Enterprise Modular HTTPS port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -76,7 +92,7 @@ Notes
.. note::
- Run this module from a system that has direct access to Dell OpenManage Enterprise or OpenManage Enterprise Modular.
- This module supports both IPv4 and IPv6 addresses.
- - This module supports ``check_mode``.
+ - This module supports \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_alert_policies_message_id_info.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_alert_policies_message_id_info.rst
index 1be11ecbb..768c4b1b4 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_alert_policies_message_id_info.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_alert_policies_message_id_info.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.9.6
+- python \>= 3.9.6
@@ -31,24 +31,40 @@ Parameters
OpenManage Enterprise or OpenManage Enterprise Modular IP address or hostname.
- username (True, str, None)
+ username (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular username.
+ If the username is not provided, then the environment variable \ :envvar:`OME\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export OME\_USERNAME=username
+
+
+ password (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular password.
+ If the password is not provided, then the environment variable \ :envvar:`OME\_PASSWORD`\ is used.
+
+ Example: export OME\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`OME\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export OME\_X\_AUTH\_TOKEN=x\_auth\_token
+
port (optional, int, 443)
OpenManage Enterprise or OpenManage Enterprise Modular HTTPS port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -67,7 +83,7 @@ Notes
.. note::
- Run this module from a system that has direct access to Dell OpenManage Enterprise or OpenManage Enterprise Modular.
- - This module supports ``check_mode``.
+ - This module supports \ :literal:`check\_mode`\ .
- This module supports IPv4 and IPv6 addresses.
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_application_alerts_smtp.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_application_alerts_smtp.rst
index ce2b05be0..08e92b320 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_application_alerts_smtp.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_application_alerts_smtp.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.8.6
+- python \>= 3.9.6
@@ -42,9 +42,9 @@ Parameters
enable_authentication (True, bool, None)
Enable or disable authentication to access the SMTP server.
- The *credentials* are mandatory if *enable_authentication* is ``true``.
+ The \ :emphasis:`credentials`\ are mandatory if \ :emphasis:`enable\_authentication`\ is \ :literal:`true`\ .
- The module will always report change when this is ``true``.
+ The module will always report change when this is \ :literal:`true`\ .
credentials (optional, dict, None)
@@ -64,24 +64,40 @@ Parameters
OpenManage Enterprise or OpenManage Enterprise Modular IP address or hostname.
- username (True, str, None)
+ username (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular username.
+ If the username is not provided, then the environment variable \ :envvar:`OME\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export OME\_USERNAME=username
+
+
+ password (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular password.
+ If the password is not provided, then the environment variable \ :envvar:`OME\_PASSWORD`\ is used.
+
+ Example: export OME\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`OME\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export OME\_X\_AUTH\_TOKEN=x\_auth\_token
+
port (optional, int, 443)
OpenManage Enterprise or OpenManage Enterprise Modular HTTPS port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -99,9 +115,9 @@ Notes
-----
.. note::
- - The module will always report change when *enable_authentication* is ``true``.
+ - The module will always report change when \ :emphasis:`enable\_authentication`\ is \ :literal:`true`\ .
- Run this module from a system that has direct access to Dell OpenManage Enterprise or OpenManage Enterprise Modular.
- - This module support ``check_mode``.
+ - This module support \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_application_alerts_syslog.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_application_alerts_syslog.rst
index 7fddd6e13..cbaa2606b 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_application_alerts_syslog.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_application_alerts_syslog.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.8.6
+- python \>= 3.9.6
@@ -42,7 +42,7 @@ Parameters
destination_address (optional, str, None)
The IP address, FQDN or hostname of the syslog server.
- This is required if *enabled* is ``true``.
+ This is required if \ :emphasis:`enabled`\ is \ :literal:`true`\ .
port_number (optional, int, None)
@@ -54,24 +54,40 @@ Parameters
OpenManage Enterprise or OpenManage Enterprise Modular IP address or hostname.
- username (True, str, None)
+ username (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular username.
+ If the username is not provided, then the environment variable \ :envvar:`OME\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export OME\_USERNAME=username
+
+
+ password (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular password.
+ If the password is not provided, then the environment variable \ :envvar:`OME\_PASSWORD`\ is used.
+
+ Example: export OME\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`OME\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export OME\_X\_AUTH\_TOKEN=x\_auth\_token
+
port (optional, int, 443)
OpenManage Enterprise or OpenManage Enterprise Modular HTTPS port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -90,7 +106,7 @@ Notes
.. note::
- Run this module from a system that has direct access to Dell OpenManage Enterprise or Dell OpenManage Enterprise Modular.
- - This module supports ``check_mode``.
+ - This module supports \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_application_certificate.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_application_certificate.rst
index d3c8a2a0d..8a917ef37 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_application_certificate.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_application_certificate.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.9.6
+- python \>= 3.9.6
@@ -28,35 +28,35 @@ Parameters
----------
command (optional, str, generate_csr)
- ``generate_csr`` allows the generation of a CSR and ``upload`` uploads the certificate.
+ \ :literal:`generate\_csr`\ allows the generation of a CSR and \ :literal:`upload`\ uploads the certificate.
distinguished_name (optional, str, None)
- Name of the certificate issuer. This option is applicable for ``generate_csr``.
+ Name of the certificate issuer. This option is applicable for \ :literal:`generate\_csr`\ .
department_name (optional, str, None)
- Name of the department that issued the certificate. This option is applicable for ``generate_csr``.
+ Name of the department that issued the certificate. This option is applicable for \ :literal:`generate\_csr`\ .
business_name (optional, str, None)
- Name of the business that issued the certificate. This option is applicable for ``generate_csr``.
+ Name of the business that issued the certificate. This option is applicable for \ :literal:`generate\_csr`\ .
locality (optional, str, None)
- Local address of the issuer of the certificate. This option is applicable for ``generate_csr``.
+ Local address of the issuer of the certificate. This option is applicable for \ :literal:`generate\_csr`\ .
country_state (optional, str, None)
- State in which the issuer resides. This option is applicable for ``generate_csr``.
+ State in which the issuer resides. This option is applicable for \ :literal:`generate\_csr`\ .
country (optional, str, None)
- Country in which the issuer resides. This option is applicable for ``generate_csr``.
+ Country in which the issuer resides. This option is applicable for \ :literal:`generate\_csr`\ .
email (optional, str, None)
- Email associated with the issuer. This option is applicable for ``generate_csr``.
+ Email associated with the issuer. This option is applicable for \ :literal:`generate\_csr`\ .
subject_alternative_names (optional, str, None)
@@ -66,31 +66,47 @@ Parameters
upload_file (optional, str, None)
- Local path of the certificate file to be uploaded. This option is applicable for ``upload``. Once the certificate is uploaded, OpenManage Enterprise cannot be accessed for a few seconds.
+ Local path of the certificate file to be uploaded. This option is applicable for \ :literal:`upload`\ . Once the certificate is uploaded, OpenManage Enterprise cannot be accessed for a few seconds.
hostname (True, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular IP address or hostname.
- username (True, str, None)
+ username (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular username.
+ If the username is not provided, then the environment variable \ :envvar:`OME\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export OME\_USERNAME=username
+
+
+ password (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular password.
+ If the password is not provided, then the environment variable \ :envvar:`OME\_PASSWORD`\ is used.
+
+ Example: export OME\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`OME\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export OME\_X\_AUTH\_TOKEN=x\_auth\_token
+
port (optional, int, 443)
OpenManage Enterprise or OpenManage Enterprise Modular HTTPS port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -109,7 +125,7 @@ Notes
.. note::
- If a certificate is uploaded, which is identical to an already existing certificate, it is accepted by the module.
- - This module does not support ``check_mode``.
+ - This module does not support \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_application_console_preferences.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_application_console_preferences.rst
index 45292acd0..f91984942 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_application_console_preferences.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_application_console_preferences.rst
@@ -153,13 +153,29 @@ Parameters
OpenManage Enterprise or OpenManage Enterprise Modular IP address or hostname.
- username (True, str, None)
+ username (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular username.
+ If the username is not provided, then the environment variable \ :envvar:`OME\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export OME\_USERNAME=username
+
+
+ password (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular password.
+ If the password is not provided, then the environment variable \ :envvar:`OME\_PASSWORD`\ is used.
+
+ Example: export OME\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`OME\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export OME\_X\_AUTH\_TOKEN=x\_auth\_token
+
port (optional, int, 443)
OpenManage Enterprise or OpenManage Enterprise Modular HTTPS port.
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_application_network_address.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_application_network_address.rst
index c5931f2c6..6305c4caf 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_application_network_address.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_application_network_address.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.8.6
+- python \>= 3.9.6
@@ -40,7 +40,7 @@ Parameters
ipv4_configuration (optional, dict, None)
IPv4 network configuration.
- *Warning* Ensure that you have an alternate interface to access OpenManage Enterprise as these options can change the current IPv4 address for *hostname*.
+ \ :emphasis:`Warning`\ Ensure that you have an alternate interface to access OpenManage Enterprise as these options can change the current IPv4 address for \ :emphasis:`hostname`\ .
enable (True, bool, None)
@@ -50,50 +50,50 @@ Parameters
enable_dhcp (optional, bool, None)
Enable or disable the automatic request to get an IPv4 address from the IPv4 Dynamic Host Configuration Protocol (DHCP) server
- If *enable_dhcp* option is true, OpenManage Enterprise retrieves the IP configuration—IPv4 address, subnet mask, and gateway from a DHCP server on the existing network.
+ If \ :emphasis:`enable\_dhcp`\ option is true, OpenManage Enterprise retrieves the IP configuration—IPv4 address, subnet mask, and gateway from a DHCP server on the existing network.
static_ip_address (optional, str, None)
Static IPv4 address
- This option is applicable when *enable_dhcp* is false.
+ This option is applicable when \ :emphasis:`enable\_dhcp`\ is false.
static_subnet_mask (optional, str, None)
Static IPv4 subnet mask address
- This option is applicable when *enable_dhcp* is false.
+ This option is applicable when \ :emphasis:`enable\_dhcp`\ is false.
static_gateway (optional, str, None)
Static IPv4 gateway address
- This option is applicable when *enable_dhcp* is false.
+ This option is applicable when \ :emphasis:`enable\_dhcp`\ is false.
use_dhcp_for_dns_server_names (optional, bool, None)
This option allows to automatically request and obtain a DNS server IPv4 address from the DHCP server.
- This option is applicable when *enable_dhcp* is true.
+ This option is applicable when \ :emphasis:`enable\_dhcp`\ is true.
static_preferred_dns_server (optional, str, None)
Static IPv4 DNS preferred server
- This option is applicable when *use_dhcp_for_dns_server_names* is false.
+ This option is applicable when \ :emphasis:`use\_dhcp\_for\_dns\_server\_names`\ is false.
static_alternate_dns_server (optional, str, None)
Static IPv4 DNS alternate server
- This option is applicable when *use_dhcp_for_dns_server_names* is false.
+ This option is applicable when \ :emphasis:`use\_dhcp\_for\_dns\_server\_names`\ is false.
ipv6_configuration (optional, dict, None)
IPv6 network configuration.
- *Warning* Ensure that you have an alternate interface to access OpenManage Enterprise as these options can change the current IPv6 address for *hostname*.
+ \ :emphasis:`Warning`\ Ensure that you have an alternate interface to access OpenManage Enterprise as these options can change the current IPv6 address for \ :emphasis:`hostname`\ .
enable (True, bool, None)
@@ -103,43 +103,43 @@ Parameters
enable_auto_configuration (optional, bool, None)
Enable or disable the automatic request to get an IPv6 address from the IPv6 DHCP server or router advertisements(RA)
- If *enable_auto_configuration* is true, OME retrieves IP configuration-IPv6 address, prefix, and gateway, from a DHCPv6 server on the existing network
+ If \ :emphasis:`enable\_auto\_configuration`\ is true, OME retrieves IP configuration-IPv6 address, prefix, and gateway, from a DHCPv6 server on the existing network
static_ip_address (optional, str, None)
Static IPv6 address
- This option is applicable when *enable_auto_configuration* is false.
+ This option is applicable when \ :emphasis:`enable\_auto\_configuration`\ is false.
static_prefix_length (optional, int, None)
Static IPv6 prefix length
- This option is applicable when *enable_auto_configuration* is false.
+ This option is applicable when \ :emphasis:`enable\_auto\_configuration`\ is false.
static_gateway (optional, str, None)
Static IPv6 gateway address
- This option is applicable when *enable_auto_configuration* is false.
+ This option is applicable when \ :emphasis:`enable\_auto\_configuration`\ is false.
use_dhcp_for_dns_server_names (optional, bool, None)
This option allows to automatically request and obtain a DNS server IPv6 address from the DHCP server.
- This option is applicable when *enable_auto_configuration* is true
+ This option is applicable when \ :emphasis:`enable\_auto\_configuration`\ is true
static_preferred_dns_server (optional, str, None)
Static IPv6 DNS preferred server
- This option is applicable when *use_dhcp_for_dns_server_names* is false.
+ This option is applicable when \ :emphasis:`use\_dhcp\_for\_dns\_server\_names`\ is false.
static_alternate_dns_server (optional, str, None)
Static IPv6 DNS alternate server
- This option is applicable when *use_dhcp_for_dns_server_names* is false.
+ This option is applicable when \ :emphasis:`use\_dhcp\_for\_dns\_server\_names`\ is false.
@@ -152,15 +152,15 @@ Parameters
enable_vlan (True, bool, None)
Enable or disable vLAN for management.
- The vLAN configuration cannot be updated if the *register_with_dns* field under *dns_configuration* is true.
+ The vLAN configuration cannot be updated if the \ :emphasis:`register\_with\_dns`\ field under \ :emphasis:`dns\_configuration`\ is true.
- *WARNING* Ensure that the network cable is plugged to the correct port after the vLAN configuration changes have been made. If not, the configuration change may not be effective.
+ \ :emphasis:`WARNING`\ Ensure that the network cable is plugged to the correct port after the vLAN configuration changes have been made. If not, the configuration change may not be effective.
vlan_id (optional, int, None)
vLAN ID.
- This option is applicable when *enable_vlan* is true.
+ This option is applicable when \ :emphasis:`enable\_vlan`\ is true.
@@ -169,25 +169,25 @@ Parameters
register_with_dns (optional, bool, None)
- Register/Unregister *dns_name* on the DNS Server.
+ Register/Unregister \ :emphasis:`dns\_name`\ on the DNS Server.
This option cannot be updated if vLAN configuration changes.
use_dhcp_for_dns_domain_name (optional, bool, None)
- Get the *dns_domain_name* using a DHCP server.
+ Get the \ :emphasis:`dns\_domain\_name`\ using a DHCP server.
dns_name (optional, str, None)
- DNS name for *hostname*
+ DNS name for \ :emphasis:`hostname`\
- This is applicable when *register_with_dns* is true.
+ This is applicable when \ :emphasis:`register\_with\_dns`\ is true.
dns_domain_name (optional, str, None)
Static DNS domain name
- This is applicable when *use_dhcp_for_dns_domain_name* is false.
+ This is applicable when \ :emphasis:`use\_dhcp\_for\_dns\_domain\_name`\ is false.
@@ -201,24 +201,40 @@ Parameters
OpenManage Enterprise or OpenManage Enterprise Modular IP address or hostname.
- username (True, str, None)
+ username (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular username.
+ If the username is not provided, then the environment variable \ :envvar:`OME\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export OME\_USERNAME=username
+
+
+ password (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular password.
+ If the password is not provided, then the environment variable \ :envvar:`OME\_PASSWORD`\ is used.
+
+ Example: export OME\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`OME\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export OME\_X\_AUTH\_TOKEN=x\_auth\_token
+
port (optional, int, 443)
OpenManage Enterprise or OpenManage Enterprise Modular HTTPS port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -238,7 +254,7 @@ Notes
.. note::
- The configuration changes can only be applied to one interface at a time.
- The system management consoles might be unreachable for some time after the configuration changes are applied.
- - This module supports ``check_mode``.
+ - This module supports \ :literal:`check\_mode`\ .
@@ -370,7 +386,7 @@ network_configuration (on success, dict, {'Delay': 0, 'DnsConfiguration': {'DnsD
job_info (on success, dict, {'Builtin': False, 'CreatedBy': 'system', 'Editable': True, 'EndTime': None, 'Id': 14902, 'JobDescription': 'Generic OME runtime task', 'JobName': 'OMERealtime_Task', 'JobStatus': {'Id': 2080, 'Name': 'New'}, 'JobType': {'Id': 207, 'Internal': True, 'Name': 'OMERealtime_Task'}, 'LastRun': None, 'LastRunStatus': {'Id': 2080, 'Name': 'New'}, 'NextRun': None, 'Params': [{'JobId': 14902, 'Key': 'Nmcli_Update', 'Value': '{"interfaceName":"eth0","profileName":"eth0","enableNIC":true, "ipv4Configuration":{"enable":true,"enableDHCP":true,"staticIPAddress":"", "staticSubnetMask":"","staticGateway":"","useDHCPForDNSServerNames":true, "staticPreferredDNSServer":"","staticAlternateDNSServer":""}, "ipv6Configuration":{"enable":false,"enableAutoConfiguration":true,"staticIPAddress":"", "staticPrefixLength":0,"staticGateway":"","useDHCPForDNSServerNames":false, "staticPreferredDNSServer":"","staticAlternateDNSServer":""}, "managementVLAN":{"enableVLAN":false,"id":0},"dnsConfiguration":{"registerWithDNS":false, "dnsName":"","useDHCPForDNSDomainName":false,"dnsDomainName":"","fqdndomainName":"", "ipv4CurrentPreferredDNSServer":"","ipv4CurrentAlternateDNSServer":"", "ipv6CurrentPreferredDNSServer":"","ipv6CurrentAlternateDNSServer":""}, "currentSettings":{"ipv4Address":[],"ipv4Gateway":"","ipv4Dns":[],"ipv4Domain":"", "ipv6Address":[],"ipv6LinkLocalAddress":"","ipv6Gateway":"","ipv6Dns":[], "ipv6Domain":""},"delay":0,"primaryInterface":true,"modifiedConfigs":{}}'}], 'Schedule': 'startnow', 'StartTime': None, 'State': 'Enabled', 'Targets': [], 'UpdatedBy': None, 'Visible': True})
- Details of the job to update in case OME version is >= 3.3.
+ Details of the job to update in case OME version is \>= 3.3.
error_info (on HTTP error, dict, {'error': {'@Message.ExtendedInfo': [{'Message': 'Unable to update the address configuration because a dependent field is missing for Use DHCP for DNS Domain Name, Enable DHCP for ipv4 or Enable Autoconfig for ipv6 settings for valid configuration .', 'MessageArgs': ['Use DHCP for DNS Domain Name, Enable DHCP for ipv4 or Enable Autoconfig for ipv6 settings for valid configuration'], 'MessageId': 'CAPP1304', 'RelatedProperties': [], 'Resolution': 'Make sure that all dependent fields contain valid content and retry the operation.', 'Severity': 'Critical'}], 'code': 'Base.1.0.GeneralError', 'message': 'A general error has occurred. See ExtendedInfo for more information.'}})
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_application_network_proxy.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_application_network_proxy.rst
index ed48d90f6..8bd375b97 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_application_network_proxy.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_application_network_proxy.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.8.6
+- python \>= 3.9.6
@@ -30,63 +30,79 @@ Parameters
enable_proxy (True, bool, None)
Enables or disables the HTTP proxy configuration.
- If *enable proxy* is false, then the HTTP proxy configuration is set to its default value.
+ If \ :emphasis:`enable proxy`\ is false, then the HTTP proxy configuration is set to its default value.
ip_address (optional, str, None)
Proxy server address.
- This option is mandatory when *enable_proxy* is true.
+ This option is mandatory when \ :emphasis:`enable\_proxy`\ is true.
proxy_port (optional, int, None)
Proxy server's port number.
- This option is mandatory when *enable_proxy* is true.
+ This option is mandatory when \ :emphasis:`enable\_proxy`\ is true.
enable_authentication (optional, bool, None)
Enable or disable proxy authentication.
- If *enable_authentication* is true, *proxy_username* and *proxy_password* must be provided.
+ If \ :emphasis:`enable\_authentication`\ is true, \ :emphasis:`proxy\_username`\ and \ :emphasis:`proxy\_password`\ must be provided.
- If *enable_authentication* is false, the proxy username and password are set to its default values.
+ If \ :emphasis:`enable\_authentication`\ is false, the proxy username and password are set to its default values.
proxy_username (optional, str, None)
Proxy server username.
- This option is mandatory when *enable_authentication* is true.
+ This option is mandatory when \ :emphasis:`enable\_authentication`\ is true.
proxy_password (optional, str, None)
Proxy server password.
- This option is mandatory when *enable_authentication* is true.
+ This option is mandatory when \ :emphasis:`enable\_authentication`\ is true.
hostname (True, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular IP address or hostname.
- username (True, str, None)
+ username (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular username.
+ If the username is not provided, then the environment variable \ :envvar:`OME\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export OME\_USERNAME=username
+
+
+ password (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular password.
+ If the password is not provided, then the environment variable \ :envvar:`OME\_PASSWORD`\ is used.
+
+ Example: export OME\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`OME\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export OME\_X\_AUTH\_TOKEN=x\_auth\_token
+
port (optional, int, 443)
OpenManage Enterprise or OpenManage Enterprise Modular HTTPS port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -105,7 +121,7 @@ Notes
.. note::
- Run this module from a system that has direct access to Dell OpenManage Enterprise.
- - This module does not support ``check_mode``.
+ - This module does not support \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_application_network_settings.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_application_network_settings.rst
index 1cc02239c..c98dc08e9 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_application_network_settings.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_application_network_settings.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.8.6
+- python \>= 3.9.6
@@ -38,15 +38,15 @@ Parameters
universal_timeout (optional, float, None)
Duration of inactivity in minutes after which all sessions end.
- This is applicable when *enable_universal_timeout* is ``true``.
+ This is applicable when \ :emphasis:`enable\_universal\_timeout`\ is \ :literal:`true`\ .
- This is mutually exclusive with *api_timeout*, *gui_timeout*, *ssh_timeout* and *serial_timeout*.
+ This is mutually exclusive with \ :emphasis:`api\_timeout`\ , \ :emphasis:`gui\_timeout`\ , \ :emphasis:`ssh\_timeout`\ and \ :emphasis:`serial\_timeout`\ .
api_timeout (optional, float, None)
Duration of inactivity in minutes after which the API session ends.
- This is mutually exclusive with *universal_timeout*.
+ This is mutually exclusive with \ :emphasis:`universal\_timeout`\ .
api_sessions (optional, int, None)
@@ -56,7 +56,7 @@ Parameters
gui_timeout (optional, float, None)
Duration of inactivity in minutes after which the web interface of Graphical User Interface (GUI) session ends.
- This is mutually exclusive with *universal_timeout*.
+ This is mutually exclusive with \ :emphasis:`universal\_timeout`\ .
gui_sessions (optional, int, None)
@@ -68,7 +68,7 @@ Parameters
This is applicable only for OpenManage Enterprise Modular.
- This is mutually exclusive with *universal_timeout*.
+ This is mutually exclusive with \ :emphasis:`universal\_timeout`\ .
ssh_sessions (optional, int, None)
@@ -82,7 +82,7 @@ Parameters
This is applicable only for OpenManage Enterprise Modular.
- This is mutually exclusive with *universal_timeout*.
+ This is mutually exclusive with \ :emphasis:`universal\_timeout`\ .
serial_sessions (optional, int, None)
@@ -96,24 +96,40 @@ Parameters
OpenManage Enterprise or OpenManage Enterprise Modular IP address or hostname.
- username (True, str, None)
+ username (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular username.
+ If the username is not provided, then the environment variable \ :envvar:`OME\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export OME\_USERNAME=username
+
+
+ password (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular password.
+ If the password is not provided, then the environment variable \ :envvar:`OME\_PASSWORD`\ is used.
+
+ Example: export OME\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`OME\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export OME\_X\_AUTH\_TOKEN=x\_auth\_token
+
port (optional, int, 443)
OpenManage Enterprise or OpenManage Enterprise Modular HTTPS port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -133,7 +149,7 @@ Notes
.. note::
- Run this module from a system that has direct access to Dell OpenManage Enterprise or OpenManage Enterprise Modular.
- To configure other network settings such as network address, web server, and so on, refer to the respective OpenManage Enterprise application network setting modules.
- - This module supports ``check_mode``.
+ - This module supports \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_application_network_time.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_application_network_time.rst
index e0f939646..b37675ffc 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_application_network_time.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_application_network_time.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.8.6
+- python \>= 3.9.6
@@ -30,13 +30,13 @@ Parameters
enable_ntp (True, bool, None)
Enables or disables Network Time Protocol(NTP).
- If *enable_ntp* is false, then the NTP addresses reset to their default values.
+ If \ :emphasis:`enable\_ntp`\ is false, then the NTP addresses reset to their default values.
system_time (optional, str, None)
Time in the current system.
- This option is only applicable when *enable_ntp* is false.
+ This option is only applicable when \ :emphasis:`enable\_ntp`\ is false.
This option must be provided in following format 'yyyy-mm-dd hh:mm:ss'.
@@ -50,43 +50,59 @@ Parameters
primary_ntp_address (optional, str, None)
The primary NTP address.
- This option is applicable when *enable_ntp* is true.
+ This option is applicable when \ :emphasis:`enable\_ntp`\ is true.
secondary_ntp_address1 (optional, str, None)
The first secondary NTP address.
- This option is applicable when *enable_ntp* is true.
+ This option is applicable when \ :emphasis:`enable\_ntp`\ is true.
secondary_ntp_address2 (optional, str, None)
The second secondary NTP address.
- This option is applicable when *enable_ntp* is true.
+ This option is applicable when \ :emphasis:`enable\_ntp`\ is true.
hostname (True, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular IP address or hostname.
- username (True, str, None)
+ username (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular username.
+ If the username is not provided, then the environment variable \ :envvar:`OME\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export OME\_USERNAME=username
+
+
+ password (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular password.
+ If the password is not provided, then the environment variable \ :envvar:`OME\_PASSWORD`\ is used.
+
+ Example: export OME\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`OME\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export OME\_X\_AUTH\_TOKEN=x\_auth\_token
+
port (optional, int, 443)
OpenManage Enterprise or OpenManage Enterprise Modular HTTPS port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -105,7 +121,7 @@ Notes
.. note::
- Run this module from a system that has direct access to Dell OpenManage Enterprise.
- - This module supports ``check_mode``.
+ - This module supports \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_application_network_webserver.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_application_network_webserver.rst
index 64adb1bf5..0cf55561d 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_application_network_webserver.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_application_network_webserver.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.8.6
+- python \>= 3.9.6
@@ -30,7 +30,7 @@ Parameters
webserver_port (optional, int, None)
Port number used by OpenManage Enterprise to establish a secure server connection.
- *WARNING* A change in port number results in a loss of connectivity in the current session for more than a minute.
+ \ :emphasis:`WARNING`\ A change in port number results in a loss of connectivity in the current session for more than a minute.
webserver_timeout (optional, int, None)
@@ -43,24 +43,40 @@ Parameters
OpenManage Enterprise or OpenManage Enterprise Modular IP address or hostname.
- username (True, str, None)
+ username (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular username.
+ If the username is not provided, then the environment variable \ :envvar:`OME\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export OME\_USERNAME=username
+
+
+ password (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular password.
+ If the password is not provided, then the environment variable \ :envvar:`OME\_PASSWORD`\ is used.
+
+ Example: export OME\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`OME\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export OME\_X\_AUTH\_TOKEN=x\_auth\_token
+
port (optional, int, 443)
OpenManage Enterprise or OpenManage Enterprise Modular HTTPS port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -79,7 +95,7 @@ Notes
.. note::
- Run this module from a system that has direct access to Dell OpenManage Enterprise.
- - This module supports ``check_mode``.
+ - This module supports \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_application_security_settings.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_application_security_settings.rst
index f7ca82d94..8bdc2865f 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_application_security_settings.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_application_security_settings.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.8.6
+- python \>= 3.9.6
@@ -30,9 +30,9 @@ Parameters
restrict_allowed_ip_range (optional, dict, None)
Restrict to allow inbound connections only from the specified IP address range.
- This is mutually exclusive with *fips_mode_enable*.
+ This is mutually exclusive with \ :emphasis:`fips\_mode\_enable`\ .
- ``NOTE`` When *restrict_allowed_ip_range* is configured on the appliance, any inbound connection to the appliance, such as alert reception, firmware update, and network identities are blocked from the devices that are outside the specified IP address range. However, any outbound connection from the appliance will work on all devices.
+ \ :literal:`NOTE`\ When \ :emphasis:`restrict\_allowed\_ip\_range`\ is configured on the appliance, any inbound connection to the appliance, such as alert reception, firmware update, and network identities are blocked from the devices that are outside the specified IP address range. However, any outbound connection from the appliance will work on all devices.
enable_ip_range (True, bool, None)
@@ -47,7 +47,7 @@ Parameters
login_lockout_policy (optional, dict, None)
Locks the application after multiple unsuccessful login attempts.
- This is mutually exclusive with *fips_mode_enable*.
+ This is mutually exclusive with \ :emphasis:`fips\_mode\_enable`\ .
by_user_name (optional, bool, None)
@@ -76,9 +76,9 @@ Parameters
job_wait_timeout (optional, int, 120)
- The maximum wait time of *job_wait* in seconds. The job is tracked only for this duration.
+ The maximum wait time of \ :emphasis:`job\_wait`\ in seconds. The job is tracked only for this duration.
- This option is applicable when *job_wait* is ``true``.
+ This option is applicable when \ :emphasis:`job\_wait`\ is \ :literal:`true`\ .
fips_mode_enable (optional, bool, None)
@@ -86,37 +86,53 @@ Parameters
This is applicable only for OpenManage Enterprise Modular only
- This is mutually exclusive with *restrict_allowed_ip_range* and *login_lockout_policy*.
+ This is mutually exclusive with \ :emphasis:`restrict\_allowed\_ip\_range`\ and \ :emphasis:`login\_lockout\_policy`\ .
- ``WARNING`` Enabling or Disabling this option resets your chassis to default settings. This may cause change in IP settings and loss of network connectivity.
+ \ :literal:`WARNING`\ Enabling or Disabling this option resets your chassis to default settings. This may cause change in IP settings and loss of network connectivity.
- ``WARNING`` The FIPS mode cannot be enabled on a lead chassis in a multi-chassis management configuration. To toggle enable FIPS on a lead chassis, delete the chassis group, enable FIPS and recreate the group.
+ \ :literal:`WARNING`\ The FIPS mode cannot be enabled on a lead chassis in a multi-chassis management configuration. To toggle enable FIPS on a lead chassis, delete the chassis group, enable FIPS and recreate the group.
- ``WARNING`` For a Standalone or member chassis, enabling the FIPS mode deletes any fabrics created. This may cause loss of network connectivity and data paths to the compute sleds.
+ \ :literal:`WARNING`\ For a Standalone or member chassis, enabling the FIPS mode deletes any fabrics created. This may cause loss of network connectivity and data paths to the compute sleds.
hostname (True, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular IP address or hostname.
- username (True, str, None)
+ username (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular username.
+ If the username is not provided, then the environment variable \ :envvar:`OME\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export OME\_USERNAME=username
+
+
+ password (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular password.
+ If the password is not provided, then the environment variable \ :envvar:`OME\_PASSWORD`\ is used.
+
+ Example: export OME\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`OME\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export OME\_X\_AUTH\_TOKEN=x\_auth\_token
+
port (optional, int, 443)
OpenManage Enterprise or OpenManage Enterprise Modular HTTPS port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -135,7 +151,7 @@ Notes
.. note::
- Run this module from a system that has direct access to Dell OpenManage Enterprise or OpenManage Enterprise Modular.
- - This module supports ``check_mode``.
+ - This module supports \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_chassis_slots.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_chassis_slots.rst
index bc05c9616..58f492d9a 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_chassis_slots.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_chassis_slots.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.8.6
+- python \>= 3.9.6
@@ -30,19 +30,19 @@ Parameters
device_options (optional, list, None)
The ID or service tag of the sled in the slot and the new name for the slot.
- *device_options* is mutually exclusive with *slot_options*.
+ \ :emphasis:`device\_options`\ is mutually exclusive with \ :emphasis:`slot\_options`\ .
device_id (optional, int, None)
Device ID of the sled in the slot.
- This is mutually exclusive with *device_service_tag*.
+ This is mutually exclusive with \ :emphasis:`device\_service\_tag`\ .
device_service_tag (optional, str, None)
Service tag of the sled in the slot.
- This is mutually exclusive with *device_id*.
+ This is mutually exclusive with \ :emphasis:`device\_id`\ .
slot_name (True, str, None)
@@ -53,7 +53,7 @@ Parameters
slot_options (optional, list, None)
The service tag of the chassis, slot number of the slot to be renamed, and the new name for the slot.
- *slot_options* is mutually exclusive with *device_options*.
+ \ :emphasis:`slot\_options`\ is mutually exclusive with \ :emphasis:`device\_options`\ .
chassis_service_tag (True, str, None)
@@ -78,24 +78,40 @@ Parameters
OpenManage Enterprise Modular IP address or hostname.
- username (True, str, None)
+ username (False, str, None)
OpenManage Enterprise Modular username.
+ If the username is not provided, then the environment variable \ :envvar:`OME\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export OME\_USERNAME=username
+
+
+ password (False, str, None)
OpenManage Enterprise Modular password.
+ If the password is not provided, then the environment variable \ :envvar:`OME\_PASSWORD`\ is used.
+
+ Example: export OME\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`OME\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export OME\_X\_AUTH\_TOKEN=x\_auth\_token
+
port (optional, int, 443)
OpenManage Enterprise Modular HTTPS port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -115,7 +131,7 @@ Notes
.. note::
- This module initiates the refresh inventory task. It may take a minute for new names to be reflected. If the task exceeds 300 seconds to refresh, the task times out.
- Run this module from a system that has direct access to Dell OpenManage Enterprise Modular.
- - This module supports ``check_mode``.
+ - This module supports \ :literal:`check\_mode`\ .
@@ -195,17 +211,17 @@ msg (always, str, Successfully renamed the slot(s).)
slot_info (if at least one slot renamed, list, [{'ChassisId': 10053, 'ChassisServiceTag': 'ABCD123', 'DeviceName': '', 'DeviceType': 1000, 'JobId': 15746, 'SlotId': '10072', 'SlotName': 'slot_op2', 'SlotNumber': '6', 'SlotType': 2000}, {'ChassisId': 10053, 'ChassisName': 'MX-ABCD123', 'ChassisServiceTag': 'ABCD123', 'DeviceType': '3000', 'JobId': 15747, 'SlotId': '10070', 'SlotName': 'slot_op2', 'SlotNumber': '4', 'SlotType': '2000'}, {'ChassisId': '10053', 'ChassisName': 'MX-PQRS123', 'ChassisServiceTag': 'PQRS123', 'DeviceId': '10054', 'DeviceServiceTag': 'XYZ5678', 'DeviceType': '1000', 'JobId': 15761, 'SlotId': '10067', 'SlotName': 'a1', 'SlotNumber': '1', 'SlotType': '2000'}])
Information of the slots that are renamed successfully.
- The ``DeviceServiceTag`` and ``DeviceId`` options are available only if *device_options* is used.
+ The \ :literal:`DeviceServiceTag`\ and \ :literal:`DeviceId`\ options are available only if \ :emphasis:`device\_options`\ is used.
- ``NOTE`` Only the slots which were renamed are listed.
+ \ :literal:`NOTE`\ Only the slots which were renamed are listed.
rename_failed_slots (if at least one slot renaming fails, list, [{'ChassisId': '12345', 'ChassisName': 'MX-ABCD123', 'ChassisServiceTag': 'ABCD123', 'DeviceType': '4000', 'JobId': 1234, 'JobStatus': 'Aborted', 'SlotId': '10061', 'SlotName': 'c2', 'SlotNumber': '1', 'SlotType': '4000'}, {'ChassisId': '10053', 'ChassisName': 'MX-PQRS123', 'ChassisServiceTag': 'PQRS123', 'DeviceType': '1000', 'JobId': 0, 'JobStatus': 'HTTP Error 400: Bad Request', 'SlotId': '10069', 'SlotName': 'b2', 'SlotNumber': '3', 'SlotType': '2000'}])
Information of the valid slots that are not renamed.
- ``JobStatus`` is shown if rename job fails.
+ \ :literal:`JobStatus`\ is shown if rename job fails.
- ``NOTE`` Only slots which were not renamed are listed.
+ \ :literal:`NOTE`\ Only slots which were not renamed are listed.
error_info (on HTTP error, dict, {'error': {'code': 'Base.1.0.GeneralError', 'message': 'A general error has occurred. See ExtendedInfo for more information.', '@Message.ExtendedInfo': [{'MessageId': 'CGEN1014', 'RelatedProperties': [], 'Message': 'Unable to complete the operation because an invalid value is entered for the property Invalid json type: STRING for Edm.Int64 property: Id .', 'MessageArgs': ['Invalid json type: STRING for Edm.Int64 property: Id'], 'Severity': 'Critical', 'Resolution': "Enter a valid value for the property and retry the operation. For more information about valid values, see the OpenManage Enterprise-Modular User's Guide available on the support site."}]}})
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_configuration_compliance_baseline.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_configuration_compliance_baseline.rst
index 4b1ba6c0f..13327ccfa 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_configuration_compliance_baseline.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_configuration_compliance_baseline.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.8.6
+- python \>= 3.9.6
@@ -28,31 +28,31 @@ Parameters
----------
command (optional, str, create)
- ``create`` creates a configuration baseline from an existing compliance template.``create`` supports ``check_mode`` or idempotency checking for only *names*.
+ \ :literal:`create`\ creates a configuration baseline from an existing compliance template.\ :literal:`create`\ supports \ :literal:`check\_mode`\ or idempotency checking for only \ :emphasis:`names`\ .
- ``modify`` modifies an existing baseline.Only *names*, *description*, *device_ids*, *device_service_tags*, and *device_group_names* can be modified
+ \ :literal:`modify`\ modifies an existing baseline.Only \ :emphasis:`names`\ , \ :emphasis:`description`\ , \ :emphasis:`device\_ids`\ , \ :emphasis:`device\_service\_tags`\ , and \ :emphasis:`device\_group\_names`\ can be modified
- *WARNING* When a baseline is modified, the provided *device_ids*, *device_group_names*, and *device_service_tags* replaces the devices previously present in the baseline.
+ \ :emphasis:`WARNING`\ When a baseline is modified, the provided \ :emphasis:`device\_ids`\ , \ :emphasis:`device\_group\_names`\ , and \ :emphasis:`device\_service\_tags`\ replaces the devices previously present in the baseline.
- ``delete`` deletes the list of configuration compliance baselines based on the baseline name. Invalid baseline names are ignored.
+ \ :literal:`delete`\ deletes the list of configuration compliance baselines based on the baseline name. Invalid baseline names are ignored.
- ``remediate`` remediates devices that are non-compliant with the baseline by changing the attributes of devices to match with the associated baseline attributes.
+ \ :literal:`remediate`\ remediates devices that are non-compliant with the baseline by changing the attributes of devices to match with the associated baseline attributes.
- ``remediate`` is performed on all the non-compliant devices if either *device_ids*, or *device_service_tags* is not provided.
+ \ :literal:`remediate`\ is performed on all the non-compliant devices if either \ :emphasis:`device\_ids`\ , or \ :emphasis:`device\_service\_tags`\ is not provided.
names (True, list, None)
Name(s) of the configuration compliance baseline.
- This option is applicable when *command* is ``create``, ``modify``, or ``delete``.
+ This option is applicable when \ :emphasis:`command`\ is \ :literal:`create`\ , \ :literal:`modify`\ , or \ :literal:`delete`\ .
- Provide the list of configuration compliance baselines names that are supported when *command* is ``delete``.
+ Provide the list of configuration compliance baselines names that are supported when \ :emphasis:`command`\ is \ :literal:`delete`\ .
new_name (optional, str, None)
New name of the compliance baseline to be modified.
- This option is applicable when *command* is ``modify``.
+ This option is applicable when \ :emphasis:`command`\ is \ :literal:`modify`\ .
template_name (optional, str, None)
@@ -60,73 +60,89 @@ Parameters
Name of the deployment template to be used for creating a compliance baseline.
- This option is applicable when *command* is ``create`` and is mutually exclusive with *template_id*.
+ This option is applicable when \ :emphasis:`command`\ is \ :literal:`create`\ and is mutually exclusive with \ :emphasis:`template\_id`\ .
template_id (optional, int, None)
ID of the deployment template to be used for creating a compliance baseline.
- This option is applicable when *command* is ``create`` and is mutually exclusive with *template_name*.
+ This option is applicable when \ :emphasis:`command`\ is \ :literal:`create`\ and is mutually exclusive with \ :emphasis:`template\_name`\ .
device_ids (optional, list, None)
IDs of the target devices.
- This option is applicable when *command* is ``create``, ``modify``, or ``remediate``, and is mutually exclusive with *device_service_tag* and *device_group_names*.
+ This option is applicable when \ :emphasis:`command`\ is \ :literal:`create`\ , \ :literal:`modify`\ , or \ :literal:`remediate`\ , and is mutually exclusive with \ :emphasis:`device\_service\_tag`\ and \ :emphasis:`device\_group\_names`\ .
device_service_tags (optional, list, None)
Service tag of the target device.
- This option is applicable when *command* is ``create``, ``modify``, or ``remediate`` and is mutually exclusive with *device_ids* and *device_group_names*.
+ This option is applicable when \ :emphasis:`command`\ is \ :literal:`create`\ , \ :literal:`modify`\ , or \ :literal:`remediate`\ and is mutually exclusive with \ :emphasis:`device\_ids`\ and \ :emphasis:`device\_group\_names`\ .
device_group_names (optional, list, None)
Name of the target device group.
- This option is applicable when *command* is ``create``, or ``modify`` and is mutually exclusive with *device_ids* and *device_service_tag*.
+ This option is applicable when \ :emphasis:`command`\ is \ :literal:`create`\ , or \ :literal:`modify`\ and is mutually exclusive with \ :emphasis:`device\_ids`\ and \ :emphasis:`device\_service\_tag`\ .
description (optional, str, None)
Description of the compliance baseline.
- This option is applicable when *command* is ``create``, or ``modify``.
+ This option is applicable when \ :emphasis:`command`\ is \ :literal:`create`\ , or \ :literal:`modify`\ .
job_wait (optional, bool, True)
Provides the option to wait for job completion.
- This option is applicable when *command* is ``create``, ``modify``, or ``remediate``.
+ This option is applicable when \ :emphasis:`command`\ is \ :literal:`create`\ , \ :literal:`modify`\ , or \ :literal:`remediate`\ .
job_wait_timeout (optional, int, 10800)
- The maximum wait time of *job_wait* in seconds.The job will only be tracked for this duration.
+ The maximum wait time of \ :emphasis:`job\_wait`\ in seconds.The job will only be tracked for this duration.
- This option is applicable when *job_wait* is ``true``.
+ This option is applicable when \ :emphasis:`job\_wait`\ is \ :literal:`true`\ .
hostname (True, str, None)
OpenManage Enterprise IP address or hostname.
- username (True, str, None)
+ username (False, str, None)
OpenManage Enterprise username.
+ If the username is not provided, then the environment variable \ :envvar:`OME\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export OME\_USERNAME=username
+
+
+ password (False, str, None)
OpenManage Enterprise password.
+ If the password is not provided, then the environment variable \ :envvar:`OME\_PASSWORD`\ is used.
+
+ Example: export OME\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`OME\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export OME\_X\_AUTH\_TOKEN=x\_auth\_token
+
port (optional, int, 443)
OpenManage Enterprise HTTPS port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -144,7 +160,7 @@ Notes
-----
.. note::
- - This module supports ``check_mode``.
+ - This module supports \ :literal:`check\_mode`\ .
- Ensure that the devices have the required licenses to perform the baseline compliance operations.
@@ -273,7 +289,7 @@ compliance_status (when I(command) is C(create) or C(modify), dict, {'Id': 13, '
job_id (when I(command) is C(remediate), int, 14123)
- Task ID created when *command* is ``remediate``.
+ Task ID created when \ :emphasis:`command`\ is \ :literal:`remediate`\ .
error_info (on HTTP error, dict, {'error': {'code': 'Base.1.0.GeneralError', 'message': 'A general error has occurred. See ExtendedInfo for more information.', '@Message.ExtendedInfo': [{'MessageId': 'GEN1234', 'RelatedProperties': [], 'Message': 'Unable to process the request because an error occurred.', 'MessageArgs': [], 'Severity': 'Critical', 'Resolution': 'Retry the operation. If the issue persists, contact your system administrator.'}]}})
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_configuration_compliance_info.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_configuration_compliance_info.rst
index a04e3404d..77f85faf9 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_configuration_compliance_info.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_configuration_compliance_info.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.8.6
+- python \>= 3.9.6
@@ -34,37 +34,53 @@ Parameters
device_id (False, int, None)
- The ID of the target device which is associated with the *baseline*.
+ The ID of the target device which is associated with the \ :emphasis:`baseline`\ .
device_service_tag (False, str, None)
- The device service tag of the target device associated with the *baseline*.
+ The device service tag of the target device associated with the \ :emphasis:`baseline`\ .
- *device_service_tag* is mutually exclusive with *device_id*.
+ \ :emphasis:`device\_service\_tag`\ is mutually exclusive with \ :emphasis:`device\_id`\ .
hostname (True, str, None)
OpenManage Enterprise IP address or hostname.
- username (True, str, None)
+ username (False, str, None)
OpenManage Enterprise username.
+ If the username is not provided, then the environment variable \ :envvar:`OME\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export OME\_USERNAME=username
+
+
+ password (False, str, None)
OpenManage Enterprise password.
+ If the password is not provided, then the environment variable \ :envvar:`OME\_PASSWORD`\ is used.
+
+ Example: export OME\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`OME\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export OME\_X\_AUTH\_TOKEN=x\_auth\_token
+
port (optional, int, 443)
OpenManage Enterprise HTTPS port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -83,7 +99,7 @@ Notes
.. note::
- Run this module from a system that has direct access to Dell OpenManage Enterprise.
- - This module supports ``check_mode``.
+ - This module supports \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_device_group.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_device_group.rst
index c111d5f2f..42f6de3a5 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_device_group.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_device_group.rst
@@ -20,8 +20,8 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.8.6
-- netaddr >= 0.7.19
+- python \>= 3.9.6
+- netaddr \>= 0.7.19
@@ -29,39 +29,39 @@ Parameters
----------
state (optional, str, present)
- ``present`` allows to add the device(s) to a static device group.
+ \ :literal:`present`\ allows to add the device(s) to a static device group.
- ``absent`` allows to remove the device(s) from a static device group.
+ \ :literal:`absent`\ allows to remove the device(s) from a static device group.
name (optional, str, None)
Name of the static group.
- *name* is mutually exclusive with *group_id*.
+ \ :emphasis:`name`\ is mutually exclusive with \ :emphasis:`group\_id`\ .
group_id (optional, int, None)
ID of the static device.
- *group_id* is mutually exclusive with *name*.
+ \ :emphasis:`group\_id`\ is mutually exclusive with \ :emphasis:`name`\ .
device_ids (optional, list, None)
List of ID(s) of the device(s) to be added or removed from the device group.
- *device_ids* is mutually exclusive with *device_service_tags* and *ip_addresses*.
+ \ :emphasis:`device\_ids`\ is mutually exclusive with \ :emphasis:`device\_service\_tags`\ and \ :emphasis:`ip\_addresses`\ .
device_service_tags (optional, list, None)
List of service tag(s) of the device(s) to be added or removed from the device group.
- *device_service_tags* is mutually exclusive with *device_ids* and *ip_addresses*.
+ \ :emphasis:`device\_service\_tags`\ is mutually exclusive with \ :emphasis:`device\_ids`\ and \ :emphasis:`ip\_addresses`\ .
ip_addresses (optional, list, None)
List of IPs of the device(s) to be added or removed from the device group.
- *ip_addresses* is mutually exclusive with *device_ids* and *device_service_tags*.
+ \ :emphasis:`ip\_addresses`\ is mutually exclusive with \ :emphasis:`device\_ids`\ and \ :emphasis:`device\_service\_tags`\ .
Supported IP address range formats:
@@ -77,35 +77,51 @@ Parameters
- fe80::ffff:ffff:ffff:1111-fe80::ffff:ffff:ffff:ffff
- ``NOTE`` Hostname is not supported.
+ \ :literal:`NOTE`\ Hostname is not supported.
- ``NOTE`` *ip_addresses* requires python's netaddr packages to work on IP Addresses.
+ \ :literal:`NOTE`\ \ :emphasis:`ip\_addresses`\ requires python's netaddr packages to work on IP Addresses.
- ``NOTE`` This module reports success even if one of the IP addresses provided in the *ip_addresses* list is available in OpenManage Enterprise.The module reports failure only if none of the IP addresses provided in the list are available in OpenManage Enterprise.
+ \ :literal:`NOTE`\ This module reports success even if one of the IP addresses provided in the \ :emphasis:`ip\_addresses`\ list is available in OpenManage Enterprise.The module reports failure only if none of the IP addresses provided in the list are available in OpenManage Enterprise.
hostname (True, str, None)
OpenManage Enterprise IP address or hostname.
- username (True, str, None)
+ username (False, str, None)
OpenManage Enterprise username.
+ If the username is not provided, then the environment variable \ :envvar:`OME\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export OME\_USERNAME=username
+
+
+ password (False, str, None)
OpenManage Enterprise password.
+ If the password is not provided, then the environment variable \ :envvar:`OME\_PASSWORD`\ is used.
+
+ Example: export OME\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`OME\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export OME\_X\_AUTH\_TOKEN=x\_auth\_token
+
port (optional, int, 443)
OpenManage Enterprise HTTPS port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -124,7 +140,7 @@ Notes
.. note::
- Run this module from a system that has direct access to Dell OpenManage Enterprise.
- - This module supports ``check_mode``.
+ - This module supports \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_device_info.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_device_info.rst
index 747bc2adc..8b4731b58 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_device_info.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_device_info.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.8.6
+- python \>= 3.9.6
@@ -28,31 +28,31 @@ Parameters
----------
fact_subset (optional, str, basic_inventory)
- ``basic_inventory`` returns the list of the devices.
+ \ :literal:`basic\_inventory`\ returns the list of the devices.
- ``detailed_inventory`` returns the inventory details of specified devices.
+ \ :literal:`detailed\_inventory`\ returns the inventory details of specified devices.
- ``subsystem_health`` returns the health status of specified devices.
+ \ :literal:`subsystem\_health`\ returns the health status of specified devices.
system_query_options (optional, dict, None)
- *system_query_options* applicable for the choices of the fact_subset. Either *device_id* or *device_service_tag* is mandatory for ``detailed_inventory`` and ``subsystem_health`` or both can be applicable.
+ \ :emphasis:`system\_query\_options`\ applicable for the choices of the fact\_subset. Either \ :emphasis:`device\_id`\ or \ :emphasis:`device\_service\_tag`\ is mandatory for \ :literal:`detailed\_inventory`\ and \ :literal:`subsystem\_health`\ or both can be applicable.
device_id (optional, list, None)
- A list of unique identifier is applicable for ``detailed_inventory`` and ``subsystem_health``.
+ A list of unique identifier is applicable for \ :literal:`detailed\_inventory`\ and \ :literal:`subsystem\_health`\ .
device_service_tag (optional, list, None)
- A list of service tags are applicable for ``detailed_inventory`` and ``subsystem_health``.
+ A list of service tags are applicable for \ :literal:`detailed\_inventory`\ and \ :literal:`subsystem\_health`\ .
inventory_type (optional, str, None)
- For ``detailed_inventory``, it returns details of the specified inventory type.
+ For \ :literal:`detailed\_inventory`\ , it returns details of the specified inventory type.
filter (optional, str, None)
- For ``basic_inventory``, it filters the collection of devices. *filter* query format should be aligned with OData standards.
+ For \ :literal:`basic\_inventory`\ , it filters the collection of devices. \ :emphasis:`filter`\ query format should be aligned with OData standards.
@@ -60,24 +60,40 @@ Parameters
OpenManage Enterprise or OpenManage Enterprise Modular IP address or hostname.
- username (True, str, None)
+ username (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular username.
+ If the username is not provided, then the environment variable \ :envvar:`OME\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export OME\_USERNAME=username
+
+
+ password (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular password.
+ If the password is not provided, then the environment variable \ :envvar:`OME\_PASSWORD`\ is used.
+
+ Example: export OME\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`OME\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export OME\_X\_AUTH\_TOKEN=x\_auth\_token
+
port (optional, int, 443)
OpenManage Enterprise or OpenManage Enterprise Modular HTTPS port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -96,7 +112,7 @@ Notes
.. note::
- Run this module from a system that has direct access to Dell OpenManage Enterprise.
- - This module supports ``check_mode``.
+ - This module supports \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_device_local_access_configuration.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_device_local_access_configuration.rst
index 1deafefb0..644eeadbb 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_device_local_access_configuration.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_device_local_access_configuration.rst
@@ -166,13 +166,29 @@ Parameters
OpenManage Enterprise Modular IP address or hostname.
- username (True, str, None)
+ username (False, str, None)
OpenManage Enterprise Modular username.
+ If the username is not provided, then the environment variable \ :envvar:`OME\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export OME\_USERNAME=username
+
+
+ password (False, str, None)
OpenManage Enterprise Modular password.
+ If the password is not provided, then the environment variable \ :envvar:`OME\_PASSWORD`\ is used.
+
+ Example: export OME\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`OME\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export OME\_X\_AUTH\_TOKEN=x\_auth\_token
+
port (optional, int, 443)
OpenManage Enterprise Modular HTTPS port.
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_device_location.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_device_location.rst
index de96c2c7e..55afe2429 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_device_location.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_device_location.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.8.6
+- python \>= 3.9.6
@@ -30,17 +30,17 @@ Parameters
device_id (optional, int, None)
The ID of the chassis for which the settings need to be updated.
- If the device ID is not specified, this module updates the location settings for the *hostname*.
+ If the device ID is not specified, this module updates the location settings for the \ :emphasis:`hostname`\ .
- *device_id* is mutually exclusive with *device_service_tag*.
+ \ :emphasis:`device\_id`\ is mutually exclusive with \ :emphasis:`device\_service\_tag`\ .
device_service_tag (optional, str, None)
The service tag of the chassis for which the settings need to be updated.
- If the device service tag is not specified, this module updates the location settings for the *hostname*.
+ If the device service tag is not specified, this module updates the location settings for the \ :emphasis:`hostname`\ .
- *device_service_tag* is mutually exclusive with *device_id*.
+ \ :emphasis:`device\_service\_tag`\ is mutually exclusive with \ :emphasis:`device\_id`\ .
data_center (optional, str, None)
@@ -71,24 +71,40 @@ Parameters
OpenManage Enterprise Modular IP address or hostname.
- username (True, str, None)
+ username (False, str, None)
OpenManage Enterprise Modular username.
+ If the username is not provided, then the environment variable \ :envvar:`OME\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export OME\_USERNAME=username
+
+
+ password (False, str, None)
OpenManage Enterprise Modular password.
+ If the password is not provided, then the environment variable \ :envvar:`OME\_PASSWORD`\ is used.
+
+ Example: export OME\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`OME\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export OME\_X\_AUTH\_TOKEN=x\_auth\_token
+
port (optional, int, 443)
OpenManage Enterprise Modular HTTPS port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -107,7 +123,7 @@ Notes
.. note::
- Run this module from a system that has direct access to Dell OpenManage Enterprise Modular.
- - This module supports ``check_mode``.
+ - This module supports \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_device_mgmt_network.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_device_mgmt_network.rst
index 3939b6e38..a378ab21c 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_device_mgmt_network.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_device_mgmt_network.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.8.6
+- python \>= 3.9.6
@@ -30,13 +30,13 @@ Parameters
device_service_tag (optional, str, None)
Service tag of the device.
- This option is mutually exclusive with *device_id*.
+ This option is mutually exclusive with \ :emphasis:`device\_id`\ .
device_id (optional, int, None)
ID of the device.
- This option is mutually exclusive with *device_service_tag*.
+ This option is mutually exclusive with \ :emphasis:`device\_service\_tag`\ .
enable_nic (optional, bool, True)
@@ -54,7 +54,7 @@ Parameters
ipv4_configuration (optional, dict, None)
IPv4 network configuration.
- ``WARNING`` Ensure that you have an alternate interface to access OpenManage Enterprise Modular because these options can change the current IPv4 address for *hostname*.
+ \ :literal:`WARNING`\ Ensure that you have an alternate interface to access OpenManage Enterprise Modular because these options can change the current IPv4 address for \ :emphasis:`hostname`\ .
enable_ipv4 (True, bool, None)
@@ -64,52 +64,52 @@ Parameters
enable_dhcp (optional, bool, None)
Enable or disable the automatic request to obtain an IPv4 address from the IPv4 Dynamic Host Configuration Protocol (DHCP) server.
- ``NOTE`` If this option is ``true``, the values provided for *static_ip_address*, *static_subnet_mask*, and *static_gateway* are not applied for these fields. However, the module may report changes.
+ \ :literal:`NOTE`\ If this option is \ :literal:`true`\ , the values provided for \ :emphasis:`static\_ip\_address`\ , \ :emphasis:`static\_subnet\_mask`\ , and \ :emphasis:`static\_gateway`\ are not applied for these fields. However, the module may report changes.
static_ip_address (optional, str, None)
Static IPv4 address
- This option is applicable when *enable_dhcp* is false.
+ This option is applicable when \ :emphasis:`enable\_dhcp`\ is false.
static_subnet_mask (optional, str, None)
Static IPv4 subnet mask address
- This option is applicable when *enable_dhcp* is false.
+ This option is applicable when \ :emphasis:`enable\_dhcp`\ is false.
static_gateway (optional, str, None)
Static IPv4 gateway address
- This option is applicable when *enable_dhcp* is false.
+ This option is applicable when \ :emphasis:`enable\_dhcp`\ is false.
use_dhcp_to_obtain_dns_server_address (optional, bool, None)
This option allows to automatically request and obtain IPv4 address for the DNS Server from the DHCP server.
- This option is applicable when *enable_dhcp* is true.
+ This option is applicable when \ :emphasis:`enable\_dhcp`\ is true.
- ``NOTE`` If this option is ``true``, the values provided for *static_preferred_dns_server* and *static_alternate_dns_server* are not applied for these fields. However, the module may report changes.
+ \ :literal:`NOTE`\ If this option is \ :literal:`true`\ , the values provided for \ :emphasis:`static\_preferred\_dns\_server`\ and \ :emphasis:`static\_alternate\_dns\_server`\ are not applied for these fields. However, the module may report changes.
static_preferred_dns_server (optional, str, None)
Static IPv4 DNS preferred server
- This option is applicable when *use_dhcp_for_dns_server_names* is false.
+ This option is applicable when \ :emphasis:`use\_dhcp\_for\_dns\_server\_names`\ is false.
static_alternate_dns_server (optional, str, None)
Static IPv4 DNS alternate server
- This option is applicable when *use_dhcp_for_dns_server_names* is false.
+ This option is applicable when \ :emphasis:`use\_dhcp\_for\_dns\_server\_names`\ is false.
ipv6_configuration (optional, dict, None)
IPv6 network configuration.
- ``WARNING`` Ensure that you have an alternate interface to access OpenManage Enterprise Modular because these options can change the current IPv6 address for *hostname*.
+ \ :literal:`WARNING`\ Ensure that you have an alternate interface to access OpenManage Enterprise Modular because these options can change the current IPv6 address for \ :emphasis:`hostname`\ .
enable_ipv6 (True, bool, None)
@@ -119,47 +119,47 @@ Parameters
enable_auto_configuration (optional, bool, None)
Enable or disable the automatic request to obtain an IPv6 address from the IPv6 DHCP server or router advertisements(RA)
- If *enable_auto_configuration* is ``true``, OpenManage Enterprise Modular retrieves IP configuration (IPv6 address, prefix, and gateway address) from a DHCPv6 server on the existing network.
+ If \ :emphasis:`enable\_auto\_configuration`\ is \ :literal:`true`\ , OpenManage Enterprise Modular retrieves IP configuration (IPv6 address, prefix, and gateway address) from a DHCPv6 server on the existing network.
- ``NOTE`` If this option is ``true``, the values provided for *static_ip_address*, *static_prefix_length*, and *static_gateway* are not applied for these fields. However, the module may report changes.
+ \ :literal:`NOTE`\ If this option is \ :literal:`true`\ , the values provided for \ :emphasis:`static\_ip\_address`\ , \ :emphasis:`static\_prefix\_length`\ , and \ :emphasis:`static\_gateway`\ are not applied for these fields. However, the module may report changes.
static_ip_address (optional, str, None)
Static IPv6 address
- This option is applicable when *enable_auto_configuration* is false.
+ This option is applicable when \ :emphasis:`enable\_auto\_configuration`\ is false.
static_prefix_length (optional, int, None)
Static IPv6 prefix length
- This option is applicable when *enable_auto_configuration* is false.
+ This option is applicable when \ :emphasis:`enable\_auto\_configuration`\ is false.
static_gateway (optional, str, None)
Static IPv6 gateway address
- This option is applicable when *enable_auto_configuration* is false.
+ This option is applicable when \ :emphasis:`enable\_auto\_configuration`\ is false.
use_dhcpv6_to_obtain_dns_server_address (optional, bool, None)
This option allows to automatically request and obtain a IPv6 address for the DNS server from the DHCP server.
- This option is applicable when *enable_auto_configuration* is true
+ This option is applicable when \ :emphasis:`enable\_auto\_configuration`\ is true
- ``NOTE`` If this option is ``true``, the values provided for *static_preferred_dns_server* and *static_alternate_dns_server* are not applied for these fields. However, the module may report changes.
+ \ :literal:`NOTE`\ If this option is \ :literal:`true`\ , the values provided for \ :emphasis:`static\_preferred\_dns\_server`\ and \ :emphasis:`static\_alternate\_dns\_server`\ are not applied for these fields. However, the module may report changes.
static_preferred_dns_server (optional, str, None)
Static IPv6 DNS preferred server
- This option is applicable when *use_dhcp_for_dns_server_names* is false.
+ This option is applicable when \ :emphasis:`use\_dhcp\_for\_dns\_server\_names`\ is false.
static_alternate_dns_server (optional, str, None)
Static IPv6 DNS alternate server
- This option is applicable when *use_dhcp_for_dns_server_names* is false.
+ This option is applicable when \ :emphasis:`use\_dhcp\_for\_dns\_server\_names`\ is false.
@@ -170,9 +170,9 @@ Parameters
enable_vlan (True, bool, None)
Enable or disable VLAN for management.
- The VLAN configuration cannot be updated if the *register_with_dns* field under *dns_configuration* is true.
+ The VLAN configuration cannot be updated if the \ :emphasis:`register\_with\_dns`\ field under \ :emphasis:`dns\_configuration`\ is true.
- ``WARNING`` Ensure that the network cable is connected to the correct port after the VLAN configuration is changed. If not, the VLAN configuration changes may not be applied.
+ \ :literal:`WARNING`\ Ensure that the network cable is connected to the correct port after the VLAN configuration is changed. If not, the VLAN configuration changes may not be applied.
vlan_id (optional, int, None)
@@ -180,7 +180,7 @@ Parameters
The valid VLAN IDs are: 1 to 4000, and 4021 to 4094.
- This option is applicable when *enable_vlan* is true.
+ This option is applicable when \ :emphasis:`enable\_vlan`\ is true.
@@ -189,33 +189,33 @@ Parameters
register_with_dns (optional, bool, None)
- Register/Unregister *dns_name* on the DNS Server.
+ Register/Unregister \ :emphasis:`dns\_name`\ on the DNS Server.
- ``WARNING`` This option cannot be updated if VLAN configuration changes.
+ \ :literal:`WARNING`\ This option cannot be updated if VLAN configuration changes.
use_dhcp_for_dns_domain_name (optional, bool, None)
- Get the *dns_domain_name* using a DHCP server.
+ Get the \ :emphasis:`dns\_domain\_name`\ using a DHCP server.
dns_name (optional, str, None)
- DNS name for *hostname*
+ DNS name for \ :emphasis:`hostname`\
- This is applicable when *register_with_dns* is true.
+ This is applicable when \ :emphasis:`register\_with\_dns`\ is true.
dns_domain_name (optional, str, None)
Static DNS domain name
- This is applicable when *use_dhcp_for_dns_domain_name* is false.
+ This is applicable when \ :emphasis:`use\_dhcp\_for\_dns\_domain\_name`\ is false.
auto_negotiation (optional, bool, None)
Enables or disables the auto negation of the network speed.
- ``NOTE``: Setting *auto_negotiation* to false and choosing a network port speed may result in the chassis loosing link to the top of rack network switch, or to the neighboring chassis in case of MCM mode. It is recommended that the *auto_negotiation* is set to ``true`` for most use cases.
+ \ :literal:`NOTE`\ : Setting \ :emphasis:`auto\_negotiation`\ to false and choosing a network port speed may result in the chassis loosing link to the top of rack network switch, or to the neighboring chassis in case of MCM mode. It is recommended that the \ :emphasis:`auto\_negotiation`\ is set to \ :literal:`true`\ for most use cases.
- This is applicable when *use_dhcp_for_dns_domain_name* is false.
+ This is applicable when \ :emphasis:`use\_dhcp\_for\_dns\_domain\_name`\ is false.
This is applicable only for Chassis.
@@ -223,11 +223,11 @@ Parameters
network_speed (optional, str, None)
The speed of the network port.
- This is applicable when *auto_negotiation* is false.
+ This is applicable when \ :emphasis:`auto\_negotiation`\ is false.
- ``10_MB`` to select network speed of 10 MB.
+ \ :literal:`10\_MB`\ to select network speed of 10 MB.
- ``100_MB`` to select network speed of 100 MB.
+ \ :literal:`100\_MB`\ to select network speed of 100 MB.
This is applicable only for Chassis.
@@ -256,24 +256,40 @@ Parameters
OpenManage Enterprise Modular IP address or hostname.
- username (True, str, None)
+ username (False, str, None)
OpenManage Enterprise Modular username.
+ If the username is not provided, then the environment variable \ :envvar:`OME\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export OME\_USERNAME=username
+
+
+ password (False, str, None)
OpenManage Enterprise Modular password.
+ If the password is not provided, then the environment variable \ :envvar:`OME\_PASSWORD`\ is used.
+
+ Example: export OME\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`OME\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export OME\_X\_AUTH\_TOKEN=x\_auth\_token
+
port (optional, int, 443)
OpenManage Enterprise Modular HTTPS port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -292,7 +308,7 @@ Notes
.. note::
- Run this module from a system that has direct access to Dell OpenManage Enterprise Modular.
- - This module supports ``check_mode``.
+ - This module supports \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_device_network_services.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_device_network_services.rst
index f4f84f340..94b518843 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_device_network_services.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_device_network_services.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.8.6
+- python \>= 3.9.6
@@ -30,17 +30,17 @@ Parameters
device_id (optional, int, None)
The ID of the chassis for which the settings need to be updated.
- If the device ID is not specified, this module updates the network services settings for the *hostname*.
+ If the device ID is not specified, this module updates the network services settings for the \ :emphasis:`hostname`\ .
- *device_id* is mutually exclusive with *device_service_tag*.
+ \ :emphasis:`device\_id`\ is mutually exclusive with \ :emphasis:`device\_service\_tag`\ .
device_service_tag (optional, str, None)
The service tag of the chassis for which the setting needs to be updated.
- If the device service tag is not specified, this module updates the network services settings for the *hostname*.
+ If the device service tag is not specified, this module updates the network services settings for the \ :emphasis:`hostname`\ .
- *device_service_tag* is mutually exclusive with *device_id*.
+ \ :emphasis:`device\_service\_tag`\ is mutually exclusive with \ :emphasis:`device\_id`\ .
snmp_settings (optional, dict, None)
@@ -58,7 +58,7 @@ Parameters
community_name (optional, str, None)
The SNMP community string.
- Required when *enabled* is ``true``.
+ Required when \ :emphasis:`enabled`\ is \ :literal:`true`\ .
@@ -100,24 +100,40 @@ Parameters
OpenManage Enterprise Modular IP address or hostname.
- username (True, str, None)
+ username (False, str, None)
OpenManage Enterprise Modular username.
+ If the username is not provided, then the environment variable \ :envvar:`OME\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export OME\_USERNAME=username
+
+
+ password (False, str, None)
OpenManage Enterprise Modular password.
+ If the password is not provided, then the environment variable \ :envvar:`OME\_PASSWORD`\ is used.
+
+ Example: export OME\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`OME\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export OME\_X\_AUTH\_TOKEN=x\_auth\_token
+
port (optional, int, 443)
OpenManage Enterprise Modular HTTPS port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -136,7 +152,7 @@ Notes
.. note::
- Run this module from a system that has direct access to Dell OpenManage Enterprise Modular.
- - This module supports ``check_mode``.
+ - This module supports \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_device_power_settings.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_device_power_settings.rst
index 11ff3c2c4..542792937 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_device_power_settings.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_device_power_settings.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.8.6
+- python \>= 3.9.6
@@ -30,17 +30,17 @@ Parameters
device_id (optional, int, None)
The ID of the chassis for which the settings need to be updated.
- If the device ID is not specified, this module updates the power settings for the *hostname*.
+ If the device ID is not specified, this module updates the power settings for the \ :emphasis:`hostname`\ .
- *device_id* is mutually exclusive with *device_service_tag*.
+ \ :emphasis:`device\_id`\ is mutually exclusive with \ :emphasis:`device\_service\_tag`\ .
device_service_tag (optional, str, None)
The service tag of the chassis for which the setting needs to be updated.
- If the device service tag is not specified, this module updates the power settings for the *hostname*.
+ If the device service tag is not specified, this module updates the power settings for the \ :emphasis:`hostname`\ .
- *device_service_tag* is mutually exclusive with *device_id*.
+ \ :emphasis:`device\_service\_tag`\ is mutually exclusive with \ :emphasis:`device\_id`\ .
power_configuration (optional, dict, None)
@@ -54,7 +54,7 @@ Parameters
power_cap (optional, int, None)
The maximum power consumption limit of the device. Specify the consumption limit in Watts.
- This is required if *enable_power_cap* is set to true.
+ This is required if \ :emphasis:`enable\_power\_cap`\ is set to true.
@@ -65,11 +65,11 @@ Parameters
redundancy_policy (optional, str, NO_REDUNDANCY)
The choices to configure the redundancy policy.
- ``NO_REDUNDANCY`` no redundancy policy is used.
+ \ :literal:`NO\_REDUNDANCY`\ no redundancy policy is used.
- ``GRID_REDUNDANCY`` to distributes power by dividing the PSUs into two grids.
+ \ :literal:`GRID\_REDUNDANCY`\ to distributes power by dividing the PSUs into two grids.
- ``PSU_REDUNDANCY`` to distribute power between all the PSUs.
+ \ :literal:`PSU\_REDUNDANCY`\ to distribute power between all the PSUs.
@@ -84,9 +84,9 @@ Parameters
primary_grid (optional, str, GRID_1)
The choices for PSU grid.
- ``GRID_1`` Hot Spare on Grid 1.
+ \ :literal:`GRID\_1`\ Hot Spare on Grid 1.
- ``GRID_2`` Hot Spare on Grid 2.
+ \ :literal:`GRID\_2`\ Hot Spare on Grid 2.
@@ -94,24 +94,40 @@ Parameters
OpenManage Enterprise Modular IP address or hostname.
- username (True, str, None)
+ username (False, str, None)
OpenManage Enterprise Modular username.
+ If the username is not provided, then the environment variable \ :envvar:`OME\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export OME\_USERNAME=username
+
+
+ password (False, str, None)
OpenManage Enterprise Modular password.
+ If the password is not provided, then the environment variable \ :envvar:`OME\_PASSWORD`\ is used.
+
+ Example: export OME\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`OME\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export OME\_X\_AUTH\_TOKEN=x\_auth\_token
+
port (optional, int, 443)
OpenManage Enterprise Modular HTTPS port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -130,7 +146,7 @@ Notes
.. note::
- Run this module from a system that has direct access to Dell OpenManage Enterprise Modular.
- - This module supports ``check_mode``.
+ - This module supports \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_device_quick_deploy.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_device_quick_deploy.rst
index 6d1e1adf3..399a98956 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_device_quick_deploy.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_device_quick_deploy.rst
@@ -149,22 +149,30 @@ Parameters
OpenManage Enterprise Modular IP address or hostname.
- username (True, str, None)
+ username (False, str, None)
OpenManage Enterprise Modular username.
- If the username is not provided, then the environment variable \ :literal:`OME\_USERNAME`\ is used.
+ If the username is not provided, then the environment variable \ :envvar:`OME\_USERNAME`\ is used.
Example: export OME\_USERNAME=username
- password (True, str, None)
+ password (False, str, None)
OpenManage Enterprise Modular password.
- If the password is not provided, then the environment variable \ :literal:`OME\_PASSWORD`\ is used.
+ If the password is not provided, then the environment variable \ :envvar:`OME\_PASSWORD`\ is used.
Example: export OME\_PASSWORD=password
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`OME\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export OME\_X\_AUTH\_TOKEN=x\_auth\_token
+
+
port (optional, int, 443)
OpenManage Enterprise Modular HTTPS port.
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_devices.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_devices.rst
index b1d56a045..ad4715d33 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_devices.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_devices.rst
@@ -85,13 +85,29 @@ Parameters
OpenManage Enterprise IP address or hostname.
- username (True, str, None)
+ username (False, str, None)
OpenManage Enterprise username.
+ If the username is not provided, then the environment variable \ :envvar:`OME\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export OME\_USERNAME=username
+
+
+ password (False, str, None)
OpenManage Enterprise password.
+ If the password is not provided, then the environment variable \ :envvar:`OME\_PASSWORD`\ is used.
+
+ Example: export OME\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`OME\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export OME\_X\_AUTH\_TOKEN=x\_auth\_token
+
port (optional, int, 443)
OpenManage Enterprise HTTPS port.
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_diagnostics.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_diagnostics.rst
index 3469c4f4a..05315e43c 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_diagnostics.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_diagnostics.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.8.6
+- python \>= 3.9.6
@@ -30,43 +30,43 @@ Parameters
device_ids (optional, list, None)
List of target device IDs.
- This is applicable for ``support_assist_collection`` and ``supportassist_collection`` logs.
+ This is applicable for \ :literal:`support\_assist\_collection`\ and \ :literal:`supportassist\_collection`\ logs.
- This option is mutually exclusive with *device_service_tags* and *device_group_name*.
+ This option is mutually exclusive with \ :emphasis:`device\_service\_tags`\ and \ :emphasis:`device\_group\_name`\ .
device_service_tags (optional, list, None)
List of target identifier.
- This is applicable for ``support_assist_collection`` and ``supportassist_collection`` logs.
+ This is applicable for \ :literal:`support\_assist\_collection`\ and \ :literal:`supportassist\_collection`\ logs.
- This option is mutually exclusive with *device_ids* and *device_group_name*.
+ This option is mutually exclusive with \ :emphasis:`device\_ids`\ and \ :emphasis:`device\_group\_name`\ .
device_group_name (optional, str, None)
- Name of the device group to export ``support_assist_collection`` or ``supportassist_collection`` logs of all devices within the group.
+ Name of the device group to export \ :literal:`support\_assist\_collection`\ or \ :literal:`supportassist\_collection`\ logs of all devices within the group.
- This is applicable for ``support_assist_collection`` and ``supportassist_collection`` logs.
+ This is applicable for \ :literal:`support\_assist\_collection`\ and \ :literal:`supportassist\_collection`\ logs.
This option is not applicable for OpenManage Enterprise Modular.
- This option is mutually exclusive with *device_ids* and *device_service_tags*.
+ This option is mutually exclusive with \ :emphasis:`device\_ids`\ and \ :emphasis:`device\_service\_tags`\ .
log_type (optional, str, support_assist_collection)
- ``application`` is applicable for OpenManage Enterprise Modular to export the application log bundle.
+ \ :literal:`application`\ is applicable for OpenManage Enterprise Modular to export the application log bundle.
- ``support_assist_collection`` and ``supportassist_collection`` is applicable for one or more devices to export SupportAssist logs.
+ \ :literal:`support\_assist\_collection`\ and \ :literal:`supportassist\_collection`\ is applicable for one or more devices to export SupportAssist logs.
- ``support_assist_collection`` and ``supportassist_collection`` supports both OpenManage Enterprise and OpenManage Enterprise Modular.
+ \ :literal:`support\_assist\_collection`\ and \ :literal:`supportassist\_collection`\ supports both OpenManage Enterprise and OpenManage Enterprise Modular.
- ``support_assist_collection`` and ``supportassist_collection`` does not support export of ``OS_LOGS`` from OpenManage Enterprise. If tried to export, the tasks will complete with errors, and the module fails.
+ \ :literal:`support\_assist\_collection`\ and \ :literal:`supportassist\_collection`\ does not support export of \ :literal:`OS\_LOGS`\ from OpenManage Enterprise. If tried to export, the tasks will complete with errors, and the module fails.
mask_sensitive_info (optional, bool, False)
Select this option to mask the personal identification information such as IPAddress, DNS, alert destination, email, gateway, inet6, MacAddress, netmask etc.
- This option is applicable for ``application`` of *log_type*.
+ This option is applicable for \ :literal:`application`\ of \ :emphasis:`log\_type`\ .
log_selectors (optional, list, None)
@@ -74,13 +74,13 @@ Parameters
If the log types are not specified, only the hardware logs are exported.
- ``OS_LOGS`` to collect OS Logs.
+ \ :literal:`OS\_LOGS`\ to collect OS Logs.
- ``RAID_LOGS`` to collect RAID controller logs.
+ \ :literal:`RAID\_LOGS`\ to collect RAID controller logs.
- ``DEBUG_LOGS`` to collect Debug logs.
+ \ :literal:`DEBUG\_LOGS`\ to collect Debug logs.
- This option is applicable only for ``support_assist_collection`` and ``supportassist_collection`` of *log_type*.
+ This option is applicable only for \ :literal:`support\_assist\_collection`\ and \ :literal:`supportassist\_collection`\ of \ :emphasis:`log\_type`\ .
share_address (True, str, None)
@@ -90,7 +90,7 @@ Parameters
share_name (True, str, None)
Network share path.
- Filename is auto generated and should not be provided as part of *share_name*.
+ Filename is auto generated and should not be provided as part of \ :emphasis:`share\_name`\ .
share_type (True, str, None)
@@ -100,67 +100,83 @@ Parameters
share_user (optional, str, None)
Network share username.
- This option is applicable for ``CIFS`` of *share_type*.
+ This option is applicable for \ :literal:`CIFS`\ of \ :emphasis:`share\_type`\ .
share_password (optional, str, None)
Network share password
- This option is applicable for ``CIFS`` of *share_type*.
+ This option is applicable for \ :literal:`CIFS`\ of \ :emphasis:`share\_type`\ .
share_domain (optional, str, None)
Network share domain name.
- This option is applicable for ``CIFS`` if *share_type*.
+ This option is applicable for \ :literal:`CIFS`\ if \ :emphasis:`share\_type`\ .
job_wait (optional, bool, True)
Whether to wait for the Job completion or not.
- The maximum wait time is *job_wait_timeout*.
+ The maximum wait time is \ :emphasis:`job\_wait\_timeout`\ .
job_wait_timeout (optional, int, 60)
- The maximum wait time of *job_wait* in minutes.
+ The maximum wait time of \ :emphasis:`job\_wait`\ in minutes.
- This option is applicable *job_wait* is true.
+ This option is applicable \ :emphasis:`job\_wait`\ is true.
test_connection (optional, bool, False)
Test the availability of the network share location.
- *job_wait* and *job_wait_timeout* options are not applicable for *test_connection*.
+ \ :emphasis:`job\_wait`\ and \ :emphasis:`job\_wait\_timeout`\ options are not applicable for \ :emphasis:`test\_connection`\ .
lead_chassis_only (optional, bool, False)
Extract the logs from Lead chassis only.
- *lead_chassis_only* is only applicable when *log_type* is ``application`` on OpenManage Enterprise Modular.
+ \ :emphasis:`lead\_chassis\_only`\ is only applicable when \ :emphasis:`log\_type`\ is \ :literal:`application`\ on OpenManage Enterprise Modular.
hostname (True, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular IP address or hostname.
- username (True, str, None)
+ username (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular username.
+ If the username is not provided, then the environment variable \ :envvar:`OME\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export OME\_USERNAME=username
+
+
+ password (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular password.
+ If the password is not provided, then the environment variable \ :envvar:`OME\_PASSWORD`\ is used.
+
+ Example: export OME\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`OME\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export OME\_X\_AUTH\_TOKEN=x\_auth\_token
+
port (optional, int, 443)
OpenManage Enterprise or OpenManage Enterprise Modular HTTPS port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -180,7 +196,7 @@ Notes
.. note::
- Run this module from a system that has direct access to OpenManage Enterprise.
- This module performs the test connection and device validations. It does not create a job for copying the logs in check mode and always reports as changes found.
- - This module supports ``check_mode``.
+ - This module supports \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_discovery.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_discovery.rst
index e290629b1..b02860a94 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_discovery.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_discovery.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.8.6
+- python \>= 3.9.6
@@ -28,27 +28,27 @@ Parameters
----------
state (optional, str, present)
- ``present`` creates a discovery job or modifies an existing discovery job.
+ \ :literal:`present`\ creates a discovery job or modifies an existing discovery job.
- *discovery_job_name* is mandatory for the creation of a new discovery job.
+ \ :emphasis:`discovery\_job\_name`\ is mandatory for the creation of a new discovery job.
- If multiple discoveries of the same *discovery_job_name* exist, then the new discovery job will not be created.
+ If multiple discoveries of the same \ :emphasis:`discovery\_job\_name`\ exist, then the new discovery job will not be created.
- ``absent`` deletes an existing discovery job(s) with the specified *discovery_job_name*.
+ \ :literal:`absent`\ deletes an existing discovery job(s) with the specified \ :emphasis:`discovery\_job\_name`\ .
discovery_job_name (optional, str, None)
Name of the discovery configuration job.
- It is mutually exclusive with *discovery_id*.
+ It is mutually exclusive with \ :emphasis:`discovery\_id`\ .
discovery_id (optional, int, None)
ID of the discovery configuration group.
- This value is DiscoveryConfigGroupId in the return values under discovery_status.
+ This value is DiscoveryConfigGroupId in the return values under discovery\_status.
- It is mutually exclusive with *discovery_job_name*.
+ It is mutually exclusive with \ :emphasis:`discovery\_job\_name`\ .
new_name (optional, str, None)
@@ -58,7 +58,7 @@ Parameters
schedule (optional, str, RunNow)
Provides the option to schedule the discovery job.
- If ``RunLater`` is selected, then *cron* must be specified.
+ If \ :literal:`RunLater`\ is selected, then \ :emphasis:`cron`\ must be specified.
cron (optional, str, None)
@@ -82,33 +82,33 @@ Parameters
job_wait (optional, bool, True)
Provides the option to wait for job completion.
- This option is applicable when *state* is ``present``.
+ This option is applicable when \ :emphasis:`state`\ is \ :literal:`present`\ .
job_wait_timeout (optional, int, 10800)
- The maximum wait time of *job_wait* in seconds. The job is tracked only for this duration.
+ The maximum wait time of \ :emphasis:`job\_wait`\ in seconds. The job is tracked only for this duration.
- This option is applicable when *job_wait* is ``true``.
+ This option is applicable when \ :emphasis:`job\_wait`\ is \ :literal:`true`\ .
ignore_partial_failure (optional, bool, False)
Provides the option to ignore partial failures. Partial failures occur when there is a combination of both discovered and undiscovered IPs.
- If ``false``, then the partial failure is not ignored, and the module will error out.
+ If \ :literal:`false`\ , then the partial failure is not ignored, and the module will error out.
- If ``true``, then the partial failure is ignored.
+ If \ :literal:`true`\ , then the partial failure is ignored.
- This option is only applicable if *job_wait* is ``true``.
+ This option is only applicable if \ :emphasis:`job\_wait`\ is \ :literal:`true`\ .
discovery_config_targets (optional, list, None)
Provide the list of discovery targets.
- Each discovery target is a set of *network_address_detail*, *device_types*, and one or more protocol credentials.
+ Each discovery target is a set of \ :emphasis:`network\_address\_detail`\ , \ :emphasis:`device\_types`\ , and one or more protocol credentials.
- This is mandatory when *state* is ``present``.
+ This is mandatory when \ :emphasis:`state`\ is \ :literal:`present`\ .
- ``WARNING`` Modification of this field is not supported, this field is overwritten every time. Ensure to provide all the required details for this field.
+ \ :literal:`WARNING`\ Modification of this field is not supported, this field is overwritten every time. Ensure to provide all the required details for this field.
network_address_detail (True, list, None)
@@ -134,15 +134,15 @@ Parameters
Sample Invalid IP Range Formats
- 192.35.0.*
+ 192.35.0.\*
192.36.0.0-255
192.35.0.0/255.255.255.0
- ``NOTE`` The range size for the number of IP addresses is limited to 16,385 (0x4001).
+ \ :literal:`NOTE`\ The range size for the number of IP addresses is limited to 16,385 (0x4001).
- ``NOTE`` Both IPv6 and IPv6 CIDR formats are supported.
+ \ :literal:`NOTE`\ Both IPv6 and IPv6 CIDR formats are supported.
device_types (True, list, None)
@@ -154,13 +154,13 @@ Parameters
Supported protocols for each device type are:
- SERVER - *wsman*, *redfish*, *snmp*, *ipmi*, *ssh*, and *vmware*.
+ SERVER - \ :emphasis:`wsman`\ , \ :emphasis:`redfish`\ , \ :emphasis:`snmp`\ , \ :emphasis:`ipmi`\ , \ :emphasis:`ssh`\ , and \ :emphasis:`vmware`\ .
- CHASSIS - *wsman*, and *redfish*.
+ CHASSIS - \ :emphasis:`wsman`\ , and \ :emphasis:`redfish`\ .
- NETWORK SWITCH - *snmp*.
+ NETWORK SWITCH - \ :emphasis:`snmp`\ .
- STORAGE - *storage*, and *snmp*.
+ STORAGE - \ :emphasis:`storage`\ , and \ :emphasis:`snmp`\ .
wsman (optional, dict, None)
@@ -411,24 +411,40 @@ Parameters
OpenManage Enterprise IP address or hostname.
- username (True, str, None)
+ username (False, str, None)
OpenManage Enterprise username.
+ If the username is not provided, then the environment variable \ :envvar:`OME\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export OME\_USERNAME=username
+
+
+ password (False, str, None)
OpenManage Enterprise password.
+ If the password is not provided, then the environment variable \ :envvar:`OME\_PASSWORD`\ is used.
+
+ Example: export OME\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`OME\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export OME\_X\_AUTH\_TOKEN=x\_auth\_token
+
port (optional, int, 443)
OpenManage Enterprise HTTPS port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -447,8 +463,8 @@ Notes
.. note::
- Run this module from a system that has direct access to Dell OpenManage Enterprise.
- - This module does not support ``check_mode``.
- - If *state* is ``present``, then Idempotency is not supported.
+ - This module does not support \ :literal:`check\_mode`\ .
+ - If \ :emphasis:`state`\ is \ :literal:`present`\ , then Idempotency is not supported.
@@ -636,7 +652,7 @@ msg (always, str, Successfully deleted 1 discovery job(s).)
discovery_status (when I(state) is C(present), dict, {'Completed': ['192.168.24.17', '192.168.24.20', '192.168.24.22'], 'Failed': ['192.168.24.15', '192.168.24.16', '192.168.24.18', '192.168.24.19', '192.168.24.21', 'host123'], 'DiscoveredDevicesByType': [{'Count': 3, 'DeviceType': 'SERVER'}], 'DiscoveryConfigDiscoveredDeviceCount': 3, 'DiscoveryConfigEmailRecipient': 'myemail@dell.com', 'DiscoveryConfigExpectedDeviceCount': 9, 'DiscoveryConfigGroupId': 125, 'JobDescription': 'D1', 'JobEnabled': True, 'JobEndTime': '2021-01-01 06:27:29.99', 'JobId': 12666, 'JobName': 'D1', 'JobNextRun': None, 'JobProgress': '100', 'JobSchedule': 'startnow', 'JobStartTime': '2021-01-01 06:24:10.071', 'JobStatusId': 2090, 'LastUpdateTime': '2021-01-01 06:27:30.001', 'UpdatedBy': 'admin'})
Details of the discovery job created or modified.
- If *job_wait* is true, Completed and Failed IPs are also listed.
+ If \ :emphasis:`job\_wait`\ is true, Completed and Failed IPs are also listed.
discovery_ids (when discoveries with duplicate name exist for I(state) is C(present), list, [1234, 5678])
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_domain_user_groups.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_domain_user_groups.rst
index a9d971a78..5beafe144 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_domain_user_groups.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_domain_user_groups.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.9.6
+- python \>= 3.9.6
@@ -28,9 +28,9 @@ Parameters
----------
state (optional, str, present)
- ``present`` imports or modifies the Active Directory/LDAP user group.
+ \ :literal:`present`\ imports or modifies the Active Directory/LDAP user group.
- ``absent`` deletes an existing Active Directory/LDAP user group.
+ \ :literal:`absent`\ deletes an existing Active Directory/LDAP user group.
group_name (True, str, None)
@@ -38,7 +38,7 @@ Parameters
Examples for user group name: Administrator or Account Operators or Access Control Assistance Operator.
- *group_name* value is case insensitive.
+ \ :emphasis:`group\_name`\ value is case insensitive.
role (optional, str, None)
@@ -48,13 +48,13 @@ Parameters
OpenManage Enterprise Roles: ADMINISTRATOR, DEVICE MANAGER, VIEWER.
- *role* value is case insensitive.
+ \ :emphasis:`role`\ value is case insensitive.
directory_name (optional, str, None)
The directory name set while adding the Active Directory/LDAP.
- *directory_name* is mutually exclusive with *directory_id*.
+ \ :emphasis:`directory\_name`\ is mutually exclusive with \ :emphasis:`directory\_id`\ .
directory_type (optional, str, AD)
@@ -64,13 +64,13 @@ Parameters
directory_id (optional, int, None)
The ID of the Active Directory/LDAP.
- *directory_id* is mutually exclusive with *directory_name*.
+ \ :emphasis:`directory\_id`\ is mutually exclusive with \ :emphasis:`directory\_name`\ .
domain_username (optional, str, None)
Active Directory/LDAP domain username.
- Example: username@domain or domain\username.
+ Example: username@domain or domain\\username.
domain_password (optional, str, None)
@@ -81,24 +81,40 @@ Parameters
OpenManage Enterprise or OpenManage Enterprise Modular IP address or hostname.
- username (True, str, None)
+ username (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular username.
+ If the username is not provided, then the environment variable \ :envvar:`OME\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export OME\_USERNAME=username
+
+
+ password (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular password.
+ If the password is not provided, then the environment variable \ :envvar:`OME\_PASSWORD`\ is used.
+
+ Example: export OME\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`OME\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export OME\_X\_AUTH\_TOKEN=x\_auth\_token
+
port (optional, int, 443)
OpenManage Enterprise or OpenManage Enterprise Modular HTTPS port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -116,7 +132,7 @@ Notes
-----
.. note::
- - This module supports ``check_mode`` and idempotency.
+ - This module supports \ :literal:`check\_mode`\ and idempotency.
- Run this module from a system that has direct access to OpenManage Enterprise or OpenManage Enterprise Modular.
@@ -194,7 +210,7 @@ msg (always, str, Successfully imported the Active Directory/LDAP user group.)
domain_user_status (When I(state) is C(present)., dict, {'Description': None, 'DirectoryServiceId': 16097, 'Enabled': True, 'Id': '16617', 'IsBuiltin': False, 'IsVisible': True, 'Locked': False, 'Name': 'Account Operators', 'ObjectGuid': 'a491859c-031e-42a3-ae5e-0ab148ecf1d6', 'ObjectSid': None, 'Oem': None, 'Password': None, 'PlainTextPassword': None, 'RoleId': '16', 'UserName': 'Account Operators', 'UserTypeId': 2})
- Details of the domain user operation, when *state* is ``present``.
+ Details of the domain user operation, when \ :emphasis:`state`\ is \ :literal:`present`\ .
error_info (on HTTP error, dict, {'error': {'code': 'Base.1.0.GeneralError', 'message': 'A general error has occurred. See ExtendedInfo for more information.', '@Message.ExtendedInfo': [{'MessageId': 'GEN1234', 'RelatedProperties': [], 'Message': 'Unable to process the request because an error occurred.', 'MessageArgs': [], 'Severity': 'Critical', 'Resolution': 'Retry the operation. If the issue persists, contact your system administrator.'}]}})
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_firmware.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_firmware.rst
index f9870d4f9..2168ebe93 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_firmware.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_firmware.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.9.6
+- python \>= 3.9.6
@@ -30,35 +30,35 @@ Parameters
device_service_tag (optional, list, None)
List of service tags of the targeted devices.
- Either *device_id* or *device_service_tag* can be used individually or together.
+ Either \ :emphasis:`device\_id`\ or \ :emphasis:`device\_service\_tag`\ can be used individually or together.
- This option is mutually exclusive with *device_group_names* and *devices*.
+ This option is mutually exclusive with \ :emphasis:`device\_group\_names`\ and \ :emphasis:`devices`\ .
device_id (optional, list, None)
List of ids of the targeted device.
- Either *device_id* or *device_service_tag* can be used individually or together.
+ Either \ :emphasis:`device\_id`\ or \ :emphasis:`device\_service\_tag`\ can be used individually or together.
- This option is mutually exclusive with *device_group_names* and *devices*.
+ This option is mutually exclusive with \ :emphasis:`device\_group\_names`\ and \ :emphasis:`devices`\ .
device_group_names (optional, list, None)
Enter the name of the device group that contains the devices on which firmware needs to be updated.
- This option is mutually exclusive with *device_id* and *device_service_tag*.
+ This option is mutually exclusive with \ :emphasis:`device\_id`\ and \ :emphasis:`device\_service\_tag`\ .
dup_file (optional, path, None)
The path of the Dell Update Package (DUP) file that contains the firmware or drivers required to update the target system device or individual device components.
- This is mutually exclusive with *baseline_name*, *components*, and *devices*.
+ This is mutually exclusive with \ :emphasis:`baseline\_name`\ , \ :emphasis:`components`\ , and \ :emphasis:`devices`\ .
baseline_name (optional, str, None)
Enter the baseline name to update the firmware of all devices or list of devices that are not complaint.
- This option is mutually exclusive with *dup_file* and *device_group_names*.
+ This option is mutually exclusive with \ :emphasis:`dup\_file`\ and \ :emphasis:`device\_group\_names`\ .
components (optional, list, [])
@@ -68,25 +68,25 @@ Parameters
This option is case sensitive.
- This is applicable to *device_service_tag*, *device_id*, and *baseline_name*.
+ This is applicable to \ :emphasis:`device\_service\_tag`\ , \ :emphasis:`device\_id`\ , and \ :emphasis:`baseline\_name`\ .
devices (optional, list, None)
This option allows to select components on each device for firmware update.
- This option is mutually exclusive with *dup_file*, *device_group_names*, *device_id*, and *device_service_tag*.
+ This option is mutually exclusive with \ :emphasis:`dup\_file`\ , \ :emphasis:`device\_group\_names`\ , \ :emphasis:`device\_id`\ , and \ :emphasis:`device\_service\_tag`\ .
id (optional, int, None)
The id of the target device to be updated.
- This option is mutually exclusive with *service_tag*.
+ This option is mutually exclusive with \ :emphasis:`service\_tag`\ .
service_tag (optional, str, None)
The service tag of the target device to be updated.
- This option is mutually exclusive with *id*.
+ This option is mutually exclusive with \ :emphasis:`id`\ .
components (optional, list, [])
@@ -97,45 +97,61 @@ Parameters
schedule (optional, str, RebootNow)
Select the schedule for the firmware update.
- if ``StageForNextReboot`` is chosen, the firmware will be staged and updated during the next reboot of the target device.
+ if \ :literal:`StageForNextReboot`\ is chosen, the firmware will be staged and updated during the next reboot of the target device.
- if ``RebootNow`` will apply the firmware updates immediately.
+ if \ :literal:`RebootNow`\ will apply the firmware updates immediately.
reboot_type (optional, str, GracefulRebootForce)
This option provides the choices to reboot the server immediately after the firmware update.
- This is applicable when *schedule* is ``RebootNow``.
+ This is applicable when \ :emphasis:`schedule`\ is \ :literal:`RebootNow`\ .
- ``GracefulRebootForce`` performs a graceful reboot with forced shutdown.
+ \ :literal:`GracefulRebootForce`\ performs a graceful reboot with forced shutdown.
- ``GracefulReboot`` performs a graceful reboot without forced shutdown.
+ \ :literal:`GracefulReboot`\ performs a graceful reboot without forced shutdown.
- ``PowerCycle`` performs a power cycle for a hard reset on the device.
+ \ :literal:`PowerCycle`\ performs a power cycle for a hard reset on the device.
hostname (True, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular IP address or hostname.
- username (True, str, None)
+ username (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular username.
+ If the username is not provided, then the environment variable \ :envvar:`OME\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export OME\_USERNAME=username
+
+
+ password (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular password.
+ If the password is not provided, then the environment variable \ :envvar:`OME\_PASSWORD`\ is used.
+
+ Example: export OME\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`OME\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export OME\_X\_AUTH\_TOKEN=x\_auth\_token
+
port (optional, int, 443)
OpenManage Enterprise or OpenManage Enterprise Modular HTTPS port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -154,7 +170,7 @@ Notes
.. note::
- Run this module from a system that has direct access to Dell OpenManage Enterprise.
- - This module supports ``check_mode``.
+ - This module supports \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_firmware_baseline.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_firmware_baseline.rst
index 971551b21..4552b2fc4 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_firmware_baseline.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_firmware_baseline.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.8.6
+- python \>= 3.9.6
@@ -28,21 +28,21 @@ Parameters
----------
state (optional, str, present)
- ``present`` creates or modifies a baseline.
+ \ :literal:`present`\ creates or modifies a baseline.
- ``absent`` deletes an existing baseline.
+ \ :literal:`absent`\ deletes an existing baseline.
baseline_name (optional, str, None)
Name of the the baseline.
- This option is mutually exclusive with *baseline_id*.
+ This option is mutually exclusive with \ :emphasis:`baseline\_id`\ .
baseline_id (optional, int, None)
ID of the existing baseline.
- This option is mutually exclusive with *baseline_name*.
+ This option is mutually exclusive with \ :emphasis:`baseline\_name`\ .
new_baseline_name (optional, str, None)
@@ -60,43 +60,43 @@ Parameters
downgrade_enabled (optional, bool, None)
Indicates whether firmware downgrade is allowed for the devices in the baseline.
- This value will be set to ``true`` by default, if not provided during baseline creation.
+ This value will be set to \ :literal:`true`\ by default, if not provided during baseline creation.
is_64_bit (optional, bool, None)
Indicates if the repository contains 64-bit DUPs.
- This value will be set to ``true`` by default, if not provided during baseline creation.
+ This value will be set to \ :literal:`true`\ by default, if not provided during baseline creation.
device_ids (optional, list, None)
List of device IDs.
- This option is mutually exclusive with *device_service_tags* and *device_group_names*.
+ This option is mutually exclusive with \ :emphasis:`device\_service\_tags`\ and \ :emphasis:`device\_group\_names`\ .
device_service_tags (optional, list, None)
List of device service tags.
- This option is mutually exclusive with *device_ids* and *device_group_names*.
+ This option is mutually exclusive with \ :emphasis:`device\_ids`\ and \ :emphasis:`device\_group\_names`\ .
device_group_names (optional, list, None)
List of group names.
- This option is mutually exclusive with *device_ids* and *device_service_tags*.
+ This option is mutually exclusive with \ :emphasis:`device\_ids`\ and \ :emphasis:`device\_service\_tags`\ .
job_wait (optional, bool, True)
Provides the option to wait for job completion.
- This option is applicable when *state* is ``present``.
+ This option is applicable when \ :emphasis:`state`\ is \ :literal:`present`\ .
job_wait_timeout (optional, int, 600)
- The maximum wait time of *job_wait* in seconds. The job is tracked only for this duration.
+ The maximum wait time of \ :emphasis:`job\_wait`\ in seconds. The job is tracked only for this duration.
- This option is applicable when *job_wait* is ``true``.
+ This option is applicable when \ :emphasis:`job\_wait`\ is \ :literal:`true`\ .
filter_no_reboot_required (optional, bool, None)
@@ -107,24 +107,40 @@ Parameters
OpenManage Enterprise or OpenManage Enterprise Modular IP address or hostname.
- username (True, str, None)
+ username (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular username.
+ If the username is not provided, then the environment variable \ :envvar:`OME\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export OME\_USERNAME=username
+
+
+ password (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular password.
+ If the password is not provided, then the environment variable \ :envvar:`OME\_PASSWORD`\ is used.
+
+ Example: export OME\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`OME\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export OME\_X\_AUTH\_TOKEN=x\_auth\_token
+
port (optional, int, 443)
OpenManage Enterprise or OpenManage Enterprise Modular HTTPS port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -143,8 +159,8 @@ Notes
.. note::
- Run this module from a system that has direct access to Dell OpenManage Enterprise or OpenManage Enterprise Modular.
- - *device_group_names* option is not applicable for OpenManage Enterprise Modular.
- - This module supports ``check_mode``.
+ - \ :emphasis:`device\_group\_names`\ option is not applicable for OpenManage Enterprise Modular.
+ - This module supports \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_firmware_baseline_compliance_info.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_firmware_baseline_compliance_info.rst
index a1c5ca246..dcbcd3a58 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_firmware_baseline_compliance_info.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_firmware_baseline_compliance_info.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.8.6
+- python \>= 3.9.6
@@ -32,15 +32,15 @@ Parameters
This option is mandatory for generating baseline based device compliance report.
- *baseline_name* is mutually exclusive with *device_ids*, *device_service_tags* and *device_group_names*.
+ \ :emphasis:`baseline\_name`\ is mutually exclusive with \ :emphasis:`device\_ids`\ , \ :emphasis:`device\_service\_tags`\ and \ :emphasis:`device\_group\_names`\ .
device_ids (optional, list, None)
A list of unique identifier for device based compliance report.
- Either *device_ids*, *device_service_tags* or *device_group_names* is required to generate device based compliance report.
+ Either \ :emphasis:`device\_ids`\ , \ :emphasis:`device\_service\_tags`\ or \ :emphasis:`device\_group\_names`\ is required to generate device based compliance report.
- *device_ids* is mutually exclusive with *device_service_tags*, *device_group_names* and *baseline_name*.
+ \ :emphasis:`device\_ids`\ is mutually exclusive with \ :emphasis:`device\_service\_tags`\ , \ :emphasis:`device\_group\_names`\ and \ :emphasis:`baseline\_name`\ .
Devices without reports are ignored.
@@ -48,9 +48,9 @@ Parameters
device_service_tags (optional, list, None)
A list of service tags for device based compliance report.
- Either *device_ids*, *device_service_tags* or *device_group_names* is required to generate device based compliance report.
+ Either \ :emphasis:`device\_ids`\ , \ :emphasis:`device\_service\_tags`\ or \ :emphasis:`device\_group\_names`\ is required to generate device based compliance report.
- *device_service_tags* is mutually exclusive with *device_ids*, *device_group_names* and *baseline_name*.
+ \ :emphasis:`device\_service\_tags`\ is mutually exclusive with \ :emphasis:`device\_ids`\ , \ :emphasis:`device\_group\_names`\ and \ :emphasis:`baseline\_name`\ .
Devices without reports are ignored.
@@ -58,9 +58,9 @@ Parameters
device_group_names (optional, list, None)
A list of group names for device based compliance report.
- Either *device_ids*, *device_service_tags* or *device_group_names* is required to generate device based compliance report.
+ Either \ :emphasis:`device\_ids`\ , \ :emphasis:`device\_service\_tags`\ or \ :emphasis:`device\_group\_names`\ is required to generate device based compliance report.
- *device_group_names* is mutually exclusive with *device_ids*, *device_service_tags* and *baseline_name*.
+ \ :emphasis:`device\_group\_names`\ is mutually exclusive with \ :emphasis:`device\_ids`\ , \ :emphasis:`device\_service\_tags`\ and \ :emphasis:`baseline\_name`\ .
Devices without reports are ignored.
@@ -69,24 +69,40 @@ Parameters
OpenManage Enterprise or OpenManage Enterprise Modular IP address or hostname.
- username (True, str, None)
+ username (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular username.
+ If the username is not provided, then the environment variable \ :envvar:`OME\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export OME\_USERNAME=username
+
+
+ password (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular password.
+ If the password is not provided, then the environment variable \ :envvar:`OME\_PASSWORD`\ is used.
+
+ Example: export OME\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`OME\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export OME\_X\_AUTH\_TOKEN=x\_auth\_token
+
port (optional, int, 443)
OpenManage Enterprise or OpenManage Enterprise Modular HTTPS port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -105,7 +121,7 @@ Notes
.. note::
- Run this module from a system that has direct access to Dell OpenManage Enterprise.
- - This module supports ``check_mode``.
+ - This module supports \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_firmware_baseline_info.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_firmware_baseline_info.rst
index 49ba4cd8f..ee60b1488 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_firmware_baseline_info.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_firmware_baseline_info.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.8.6
+- python \>= 3.9.6
@@ -28,31 +28,47 @@ Parameters
----------
baseline_name (optional, str, None)
- Name of the baseline.If *baseline_name* is not provided, all the available firmware baselines are returned.
+ Name of the baseline.If \ :emphasis:`baseline\_name`\ is not provided, all the available firmware baselines are returned.
hostname (True, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular IP address or hostname.
- username (True, str, None)
+ username (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular username.
+ If the username is not provided, then the environment variable \ :envvar:`OME\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export OME\_USERNAME=username
+
+
+ password (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular password.
+ If the password is not provided, then the environment variable \ :envvar:`OME\_PASSWORD`\ is used.
+
+ Example: export OME\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`OME\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export OME\_X\_AUTH\_TOKEN=x\_auth\_token
+
port (optional, int, 443)
OpenManage Enterprise or OpenManage Enterprise Modular HTTPS port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -71,7 +87,7 @@ Notes
.. note::
- Run this module from a system that has direct access to Dell OpenManage Enterprise.
- - This module supports ``check_mode``.
+ - This module supports \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_firmware_catalog.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_firmware_catalog.rst
index 2813e66a6..b5a4d5073 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_firmware_catalog.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_firmware_catalog.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.8.6
+- python \>= 3.9.6
@@ -28,17 +28,17 @@ Parameters
----------
state (optional, str, present)
- ``present`` creates or modifies a catalog.
+ \ :literal:`present`\ creates or modifies a catalog.
- ``absent`` deletes an existing catalog.
+ \ :literal:`absent`\ deletes an existing catalog.
catalog_name (optional, list, None)
Name of the firmware catalog to be created.
- This option is mutually exclusive with *catalog_id*.
+ This option is mutually exclusive with \ :emphasis:`catalog\_id`\ .
- Provide the list of firmware catalog names that are supported when *state* is ``absent``.
+ Provide the list of firmware catalog names that are supported when \ :emphasis:`state`\ is \ :literal:`absent`\ .
new_catalog_name (optional, str, None)
@@ -48,9 +48,9 @@ Parameters
catalog_id (optional, list, None)
ID of the catalog.
- This option is mutually exclusive with *catalog_name*.
+ This option is mutually exclusive with \ :emphasis:`catalog\_name`\ .
- Provide the list of firmware catalog IDs that are supported when *state* is ``absent``.
+ Provide the list of firmware catalog IDs that are supported when \ :emphasis:`state`\ is \ :literal:`absent`\ .
catalog_description (optional, str, None)
@@ -60,87 +60,103 @@ Parameters
source (optional, str, None)
The IP address of the system where the firmware catalog is stored on the local network.
- By default, this option is set to downloads.dell.com when *repository_type* is ``DELL_ONLINE``.
+ By default, this option is set to downloads.dell.com when \ :emphasis:`repository\_type`\ is \ :literal:`DELL\_ONLINE`\ .
source_path (optional, str, None)
Specify the complete path of the catalog file location without the file name.
- This is option ignored when *repository_type* is ``DELL_ONLINE``.
+ This is option ignored when \ :emphasis:`repository\_type`\ is \ :literal:`DELL\_ONLINE`\ .
file_name (optional, str, None)
- Catalog file name associated with the *source_path*.
+ Catalog file name associated with the \ :emphasis:`source\_path`\ .
- This option is ignored when *repository_type* is ``DELL_ONLINE``.
+ This option is ignored when \ :emphasis:`repository\_type`\ is \ :literal:`DELL\_ONLINE`\ .
repository_type (optional, str, None)
- Type of repository. The supported types are NFS, CIFS, HTTP, HTTPS,and DELL_ONLINE.
+ Type of repository. The supported types are NFS, CIFS, HTTP, HTTPS,and DELL\_ONLINE.
repository_username (optional, str, None)
User name of the repository where the catalog is stored.
- This option is mandatory when *repository_type* is CIFS.
+ This option is mandatory when \ :emphasis:`repository\_type`\ is CIFS.
- This option is ignored when *repository_type* is ``DELL_ONLINE``.
+ This option is ignored when \ :emphasis:`repository\_type`\ is \ :literal:`DELL\_ONLINE`\ .
repository_password (optional, str, None)
Password to access the repository.
- This option is mandatory when *repository_type* is CIFS.
+ This option is mandatory when \ :emphasis:`repository\_type`\ is CIFS.
- This option is ignored when *repository_type* is ``DELL_ONLINE``.
+ This option is ignored when \ :emphasis:`repository\_type`\ is \ :literal:`DELL\_ONLINE`\ .
- ``NOTE`` The module always reports the changed status, when this is provided.
+ \ :literal:`NOTE`\ The module always reports the changed status, when this is provided.
repository_domain (optional, str, None)
Domain name of the repository.
- This option is ignored when *repository_type* is ``DELL_ONLINE``.
+ This option is ignored when \ :emphasis:`repository\_type`\ is \ :literal:`DELL\_ONLINE`\ .
check_certificate (optional, bool, False)
- The certificate warnings are ignored when *repository_type* is HTTPS. If ``true``. If not, certificate warnings are not ignored.
+ The certificate warnings are ignored when \ :emphasis:`repository\_type`\ is HTTPS. If \ :literal:`true`\ . If not, certificate warnings are not ignored.
job_wait (optional, bool, True)
Provides the option to wait for job completion.
- This option is applicable when *state* is ``present``.
+ This option is applicable when \ :emphasis:`state`\ is \ :literal:`present`\ .
job_wait_timeout (optional, int, 600)
- The maximum wait time of *job_wait* in seconds. The job is tracked only for this duration.
+ The maximum wait time of \ :emphasis:`job\_wait`\ in seconds. The job is tracked only for this duration.
- This option is applicable when *job_wait* is ``true``.
+ This option is applicable when \ :emphasis:`job\_wait`\ is \ :literal:`true`\ .
hostname (True, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular IP address or hostname.
- username (True, str, None)
+ username (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular username.
+ If the username is not provided, then the environment variable \ :envvar:`OME\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export OME\_USERNAME=username
+
+
+ password (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular password.
+ If the password is not provided, then the environment variable \ :envvar:`OME\_PASSWORD`\ is used.
+
+ Example: export OME\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`OME\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export OME\_X\_AUTH\_TOKEN=x\_auth\_token
+
port (optional, int, 443)
OpenManage Enterprise or OpenManage Enterprise Modular HTTPS port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -158,10 +174,10 @@ Notes
-----
.. note::
- - If *repository_password* is provided, then the module always reports the changed status.
+ - If \ :emphasis:`repository\_password`\ is provided, then the module always reports the changed status.
- Run this module from a system that has direct access to Dell OpenManage Enterprise or OpenManage Enterprise Modular.
- This module supports IPv4 and IPv6 addresses.
- - This module supports ``check_mode``.
+ - This module supports \ :literal:`check\_mode`\ .
@@ -294,7 +310,7 @@ msg (always, str, Successfully triggered the job to create a catalog with Task I
Overall status of the firmware catalog operation.
-catalog_status (When I(state) is C(present), dict, {'AssociatedBaselines': [], 'BaseLocation': None, 'BundlesCount': 0, 'Filename': 'catalog.gz', 'Id': 0, 'LastUpdated': None, 'ManifestIdentifier': None, 'ManifestVersion': None, 'NextUpdate': None, 'PredecessorIdentifier': None, 'ReleaseDate': None, 'ReleaseIdentifier': None, 'Repository': {'CheckCertificate': True, 'Description': 'HTTPS Desc', 'DomainName': None, 'Id': None, 'Name': 'catalog4', 'Password': None, 'RepositoryType': 'HTTPS', 'Source': 'company.com', 'Username': None}, 'Schedule': None, 'SourcePath': 'catalog', 'Status': None, 'TaskId': 10094})
+catalog_status (When I(state) is C(present), dict, {'AssociatedBaselines': [], 'BaseLocation': None, 'BundlesCount': 0, 'Filename': 'catalog.gz', 'Id': 12, 'LastUpdated': None, 'ManifestIdentifier': None, 'ManifestVersion': None, 'NextUpdate': None, 'PredecessorIdentifier': None, 'ReleaseDate': None, 'ReleaseIdentifier': None, 'Repository': {'CheckCertificate': True, 'Description': 'HTTPS Desc', 'DomainName': None, 'Id': None, 'Name': 'catalog4', 'Password': None, 'RepositoryType': 'HTTPS', 'Source': 'company.com', 'Username': None}, 'Schedule': None, 'SourcePath': 'catalog', 'Status': None, 'TaskId': 10094})
Details of the catalog operation.
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_groups.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_groups.rst
index 4322d460c..5671d3b8c 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_groups.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_groups.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.8.6
+- python \>= 3.9.6
@@ -28,83 +28,99 @@ Parameters
----------
state (optional, str, present)
- ``present`` allows to create or modify a device group.
+ \ :literal:`present`\ allows to create or modify a device group.
- ``absent`` allows to delete a device group.
+ \ :literal:`absent`\ allows to delete a device group.
name (optional, list, None)
Name of the device group to be created, modified, or deleted.
- If *state* is absent, multiple names can be provided.
+ If \ :emphasis:`state`\ is absent, multiple names can be provided.
This option is case insensitive.
- This option is mutually exclusive with *group_id*.
+ This option is mutually exclusive with \ :emphasis:`group\_id`\ .
group_id (optional, list, None)
ID of the device group to be created, modified, or deleted.
- If *state* is absent, multiple IDs can be provided.
+ If \ :emphasis:`state`\ is absent, multiple IDs can be provided.
- This option is mutually exclusive with *name*.
+ This option is mutually exclusive with \ :emphasis:`name`\ .
new_name (optional, str, None)
New name for the existing device group.
- This is applicable only when *state* is ``present``.
+ This is applicable only when \ :emphasis:`state`\ is \ :literal:`present`\ .
description (optional, str, None)
Description for the device group.
- This is applicable only when *state* is ``present``.
+ This is applicable only when \ :emphasis:`state`\ is \ :literal:`present`\ .
parent_group_name (optional, str, Static Groups)
Name of the parent device group under which the device group to be created or modified.
- This is applicable only when *state* is ``present``.
+ This is applicable only when \ :emphasis:`state`\ is \ :literal:`present`\ .
- ``NOTE`` If device group with such a name does not exist, device group with *parent_group_name* is created.
+ \ :literal:`NOTE`\ If device group with such a name does not exist, device group with \ :emphasis:`parent\_group\_name`\ is created.
This option is case insensitive.
- This option is mutually exclusive with *parent_group_id*.
+ This option is mutually exclusive with \ :emphasis:`parent\_group\_id`\ .
parent_group_id (optional, int, None)
ID of the parent device group under which the device group to be created or modified.
- This is applicable only when *state* is ``present``.
+ This is applicable only when \ :emphasis:`state`\ is \ :literal:`present`\ .
- This option is mutually exclusive with *parent_group_name*.
+ This option is mutually exclusive with \ :emphasis:`parent\_group\_name`\ .
hostname (True, str, None)
OpenManage Enterprise IP address or hostname.
- username (True, str, None)
+ username (False, str, None)
OpenManage Enterprise username.
+ If the username is not provided, then the environment variable \ :envvar:`OME\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export OME\_USERNAME=username
+
+
+ password (False, str, None)
OpenManage Enterprise password.
+ If the password is not provided, then the environment variable \ :envvar:`OME\_PASSWORD`\ is used.
+
+ Example: export OME\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`OME\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export OME\_X\_AUTH\_TOKEN=x\_auth\_token
+
port (optional, int, 443)
OpenManage Enterprise HTTPS port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -123,10 +139,10 @@ Notes
.. note::
- This module manages only static device groups on Dell OpenManage Enterprise.
- - If a device group with the name *parent_group_name* does not exist, a new device group with the same name is created.
+ - If a device group with the name \ :emphasis:`parent\_group\_name`\ does not exist, a new device group with the same name is created.
- Make sure the entered parent group is not the descendant of the provided group.
- Run this module from a system that has direct access to Dell OpenManage Enterprise.
- - This module supports ``check_mode``.
+ - This module supports \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_identity_pool.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_identity_pool.rst
index 4df20d14f..a2a966962 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_identity_pool.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_identity_pool.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.8.6
+- python \>= 3.9.6
@@ -28,15 +28,15 @@ Parameters
----------
state (optional, str, present)
- ``present`` modifies an existing identity pool. If the provided I (pool_name) does not exist, it creates an identity pool. - ``absent`` deletes an existing identity pool.
+ \ :literal:`present`\ modifies an existing identity pool. If the provided I (pool\_name) does not exist, it creates an identity pool. - \ :literal:`absent`\ deletes an existing identity pool.
pool_name (True, str, None)
- This option is mandatory for *state* when creating, modifying and deleting an identity pool.
+ This option is mandatory for \ :emphasis:`state`\ when creating, modifying and deleting an identity pool.
new_pool_name (optional, str, None)
- After creating an identity pool, *pool_name* can be changed to *new_pool_name*.
+ After creating an identity pool, \ :emphasis:`pool\_name`\ can be changed to \ :emphasis:`new\_pool\_name`\ .
This option is ignored when creating an identity pool.
@@ -48,7 +48,7 @@ Parameters
ethernet_settings (optional, dict, None)
Applicable for creating and modifying an identity pool using Ethernet settings.
- *starting_mac_address* and *identity_count* are required to create an identity pool.
+ \ :emphasis:`starting\_mac\_address`\ and \ :emphasis:`identity\_count`\ are required to create an identity pool.
starting_mac_address (optional, str, None)
@@ -63,7 +63,7 @@ Parameters
fcoe_settings (optional, dict, None)
Applicable for creating and modifying an identity pool using FCoE settings.
- *starting_mac_address* and *identity_count* are required to create an identity pool.
+ \ :emphasis:`starting\_mac\_address`\ and \ :emphasis:`identity\_count`\ are required to create an identity pool.
starting_mac_address (optional, str, None)
@@ -78,7 +78,7 @@ Parameters
iscsi_settings (optional, dict, None)
Applicable for creating and modifying an identity pool using ISCSI settings.
- *starting_mac_address*, *identity_count*, *iqn_prefix*, *ip_range* and *subnet_mask* are required to create an identity pool.
+ \ :emphasis:`starting\_mac\_address`\ , \ :emphasis:`identity\_count`\ , \ :emphasis:`iqn\_prefix`\ , \ :emphasis:`ip\_range`\ and \ :emphasis:`subnet\_mask`\ are required to create an identity pool.
starting_mac_address (optional, str, None)
@@ -107,7 +107,7 @@ Parameters
subnet_mask (optional, str, None)
- Subnet mask for *ip_range*.
+ Subnet mask for \ :emphasis:`ip\_range`\ .
gateway (optional, str, None)
@@ -131,15 +131,15 @@ Parameters
The value 0x2001 is beginning to the starting address for the generation of a WWPN, and 0x2000 for a WWNN.
- *starting_address* and *identity_count* are required to create an identity pool.
+ \ :emphasis:`starting\_address`\ and \ :emphasis:`identity\_count`\ are required to create an identity pool.
starting_address (optional, str, None)
- Starting MAC Address of FC setting.*starting_address* is required to option to create FC settings.
+ Starting MAC Address of FC setting.\ :emphasis:`starting\_address`\ is required to option to create FC settings.
identity_count (optional, int, None)
- Number of MAC addresses.*identity_count* is required to option to create FC settings.
+ Number of MAC addresses.\ :emphasis:`identity\_count`\ is required to option to create FC settings.
@@ -147,24 +147,40 @@ Parameters
OpenManage Enterprise or OpenManage Enterprise Modular IP address or hostname.
- username (True, str, None)
+ username (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular username.
+ If the username is not provided, then the environment variable \ :envvar:`OME\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export OME\_USERNAME=username
+
+
+ password (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular password.
+ If the password is not provided, then the environment variable \ :envvar:`OME\_PASSWORD`\ is used.
+
+ Example: export OME\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`OME\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export OME\_X\_AUTH\_TOKEN=x\_auth\_token
+
port (optional, int, 443)
OpenManage Enterprise or OpenManage Enterprise Modular HTTPS port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -183,7 +199,7 @@ Notes
.. note::
- Run this module from a system that has direct access to Dell OpenManage Enterprise.
- - This module supports ``check_mode``.
+ - This module supports \ :literal:`check\_mode`\ .
@@ -291,7 +307,7 @@ msg (always, str, Successfully created an identity pool.)
pool_status (success, dict, {'Id': 29, 'IsSuccessful': True, 'Issues': []})
- Details of the user operation, when *state* is ``present``.
+ Details of the user operation, when \ :emphasis:`state`\ is \ :literal:`present`\ .
error_info (on HTTP error, dict, {'error': {'@Message.ExtendedInfo': [{'Message': 'Unable to process the request because an error occurred: Ethernet-MAC Range overlap found (in this Identity Pool or in a different one) .', 'MessageArgs': ['Ethernet-MAC Range overlap found (in this Identity Pool or in a different one)"'], 'MessageId': 'CGEN6001', 'RelatedProperties': [], 'Resolution': 'Retry the operation. If the issue persists, contact your system administrator.', 'Severity': 'Critical'}], 'code': 'Base.1.0.GeneralError', 'message': 'A general error has occurred. See ExtendedInfo for more information.'}})
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_job_info.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_job_info.rst
index 5c998cf44..902d399ce 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_job_info.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_job_info.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.8.6
+- python \>= 3.9.6
@@ -51,35 +51,51 @@ Parameters
fetch_execution_history (optional, bool, False)
Fetches the execution history of the job.
- *fetch_execution_history* is only applicable when valid *job_id* is given.
+ \ :emphasis:`fetch\_execution\_history`\ is only applicable when valid \ :emphasis:`job\_id`\ is given.
- When ``true``, fetches all the execution history details.
+ When \ :literal:`true`\ , fetches all the execution history details.
- When ``false``, fetches only the job info and last execution details.
+ When \ :literal:`false`\ , fetches only the job info and last execution details.
hostname (True, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular IP address or hostname.
- username (True, str, None)
+ username (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular username.
+ If the username is not provided, then the environment variable \ :envvar:`OME\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export OME\_USERNAME=username
+
+
+ password (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular password.
+ If the password is not provided, then the environment variable \ :envvar:`OME\_PASSWORD`\ is used.
+
+ Example: export OME\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`OME\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export OME\_X\_AUTH\_TOKEN=x\_auth\_token
+
port (optional, int, 443)
OpenManage Enterprise or OpenManage Enterprise Modular HTTPS port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -98,7 +114,7 @@ Notes
.. note::
- Run this module from a system that has direct access to Dell OpenManage Enterprise.
- - This module supports ``check_mode``.
+ - This module supports \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_network_port_breakout.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_network_port_breakout.rst
index 839554682..0adee41b9 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_network_port_breakout.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_network_port_breakout.rst
@@ -22,7 +22,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.8.6
+- python \>= 3.9.6
@@ -43,24 +43,40 @@ Parameters
OpenManage Enterprise Modular IP address or hostname.
- username (True, str, None)
+ username (False, str, None)
OpenManage Enterprise Modular username.
+ If the username is not provided, then the environment variable \ :envvar:`OME\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export OME\_USERNAME=username
+
+
+ password (False, str, None)
OpenManage Enterprise Modular password.
+ If the password is not provided, then the environment variable \ :envvar:`OME\_PASSWORD`\ is used.
+
+ Example: export OME\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`OME\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export OME\_X\_AUTH\_TOKEN=x\_auth\_token
+
port (optional, int, 443)
OpenManage Enterprise Modular HTTPS port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -79,7 +95,7 @@ Notes
.. note::
- Run this module from a system that has direct access to Dell OpenManage Enterprise Modular.
- - This module supports ``check_mode``.
+ - This module supports \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_network_vlan.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_network_vlan.rst
index 0ab868cb9..d6b4810ad 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_network_vlan.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_network_vlan.rst
@@ -24,7 +24,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.8.6
+- python \>= 3.9.6
@@ -32,19 +32,19 @@ Parameters
----------
state (optional, str, present)
- ``present`` creates a new VLAN or modifies an existing VLAN.
+ \ :literal:`present`\ creates a new VLAN or modifies an existing VLAN.
- ``absent`` deletes an existing VLAN.
+ \ :literal:`absent`\ deletes an existing VLAN.
- *WARNING* Deleting a VLAN can impact the network infrastructure.
+ \ :emphasis:`WARNING`\ Deleting a VLAN can impact the network infrastructure.
name (True, str, None)
- Provide the *name* of the VLAN to be created, deleted or modified.
+ Provide the \ :emphasis:`name`\ of the VLAN to be created, deleted or modified.
new_name (optional, str, None)
- Provide the *name* of the VLAN to be modified.
+ Provide the \ :emphasis:`name`\ of the VLAN to be modified.
description (optional, str, None)
@@ -58,37 +58,53 @@ Parameters
vlan_maximum (optional, int, None)
The maximum VLAN value of the range.
- A single value VLAN is created if the vlan_maximum and vlan_minmum values are the same.
+ A single value VLAN is created if the vlan\_maximum and vlan\_minmum values are the same.
type (optional, str, None)
Types of supported VLAN networks.
- For the description of each network type, use API https://*hostname*/api/NetworkConfigurationService/NetworkTypes.
+ For the description of each network type, use API \ https://I(hostname\ /api/NetworkConfigurationService/NetworkTypes).
hostname (True, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular IP address or hostname.
- username (True, str, None)
+ username (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular username.
+ If the username is not provided, then the environment variable \ :envvar:`OME\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export OME\_USERNAME=username
+
+
+ password (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular password.
+ If the password is not provided, then the environment variable \ :envvar:`OME\_PASSWORD`\ is used.
+
+ Example: export OME\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`OME\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export OME\_X\_AUTH\_TOKEN=x\_auth\_token
+
port (optional, int, 443)
OpenManage Enterprise or OpenManage Enterprise Modular HTTPS port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -107,7 +123,7 @@ Notes
.. note::
- Run this module from a system that has direct access to Dell OpenManage Enterprise.
- - This module supports ``check_mode``.
+ - This module supports \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_network_vlan_info.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_network_vlan_info.rst
index af790fa30..89d53d9d4 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_network_vlan_info.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_network_vlan_info.rst
@@ -12,7 +12,7 @@ ome_network_vlan_info -- Retrieves the information about networks VLAN(s) presen
Synopsis
--------
-This module allows to retrieve the following. - A list of all the network VLANs with their detailed information. - Information about a specific network VLAN using VLAN *id* or VLAN *name*.
+This module allows to retrieve the following. - A list of all the network VLANs with their detailed information. - Information about a specific network VLAN using VLAN \ :emphasis:`id`\ or VLAN \ :emphasis:`name`\ .
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.8.6
+- python \>= 3.9.6
@@ -30,37 +30,53 @@ Parameters
id (optional, int, None)
A unique identifier of the network VLAN available in the device.
- *id* and *name* are mutually exclusive.
+ \ :emphasis:`id`\ and \ :emphasis:`name`\ are mutually exclusive.
name (optional, str, None)
A unique name of the network VLAN available in the device.
- *name* and *id* are mutually exclusive.
+ \ :emphasis:`name`\ and \ :emphasis:`id`\ are mutually exclusive.
hostname (True, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular IP address or hostname.
- username (True, str, None)
+ username (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular username.
+ If the username is not provided, then the environment variable \ :envvar:`OME\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export OME\_USERNAME=username
+
+
+ password (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular password.
+ If the password is not provided, then the environment variable \ :envvar:`OME\_PASSWORD`\ is used.
+
+ Example: export OME\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`OME\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export OME\_X\_AUTH\_TOKEN=x\_auth\_token
+
port (optional, int, 443)
OpenManage Enterprise or OpenManage Enterprise Modular HTTPS port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -79,7 +95,7 @@ Notes
.. note::
- Run this module from a system that has direct access to Dell OpenManage Enterprise.
- - This module supports ``check_mode``.
+ - This module supports \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_powerstate.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_powerstate.rst
index a9046a98e..c1d8f316b 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_powerstate.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_powerstate.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.8.6
+- python \>= 3.9.6
@@ -34,37 +34,53 @@ Parameters
device_service_tag (optional, str, None)
Targeted device service tag.
- *device_service_tag* is mutually exclusive with *device_id*.
+ \ :emphasis:`device\_service\_tag`\ is mutually exclusive with \ :emphasis:`device\_id`\ .
device_id (optional, int, None)
Targeted device id.
- *device_id* is mutually exclusive with *device_service_tag*.
+ \ :emphasis:`device\_id`\ is mutually exclusive with \ :emphasis:`device\_service\_tag`\ .
hostname (True, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular IP address or hostname.
- username (True, str, None)
+ username (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular username.
+ If the username is not provided, then the environment variable \ :envvar:`OME\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export OME\_USERNAME=username
+
+
+ password (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular password.
+ If the password is not provided, then the environment variable \ :envvar:`OME\_PASSWORD`\ is used.
+
+ Example: export OME\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`OME\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export OME\_X\_AUTH\_TOKEN=x\_auth\_token
+
port (optional, int, 443)
OpenManage Enterprise or OpenManage Enterprise Modular HTTPS port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -83,7 +99,7 @@ Notes
.. note::
- Run this module from a system that has direct access to Dell OpenManage Enterprise.
- - This module supports ``check_mode``.
+ - This module supports \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_profile.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_profile.rst
index 8faf8b8fe..418acddf4 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_profile.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_profile.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.8.6
+- python \>= 3.9.6
@@ -28,17 +28,17 @@ Parameters
----------
command (optional, str, create)
- ``create`` creates new profiles.
+ \ :literal:`create`\ creates new profiles.
- ``modify`` modifies an existing profile. Only *name*, *description*, *boot_to_network_iso*, and *attributes* can be modified.
+ \ :literal:`modify`\ modifies an existing profile. Only \ :emphasis:`name`\ , \ :emphasis:`description`\ , \ :emphasis:`boot\_to\_network\_iso`\ , and \ :emphasis:`attributes`\ can be modified.
- ``delete`` deletes an existing profile.
+ \ :literal:`delete`\ deletes an existing profile.
- ``assign`` Deploys an existing profile on a target device and returns a task ID.
+ \ :literal:`assign`\ Deploys an existing profile on a target device and returns a task ID.
- ``unassign`` unassigns a profile from a specified target and returns a task ID.
+ \ :literal:`unassign`\ unassigns a profile from a specified target and returns a task ID.
- ``migrate`` migrates an existing profile and returns a task ID.
+ \ :literal:`migrate`\ migrates an existing profile and returns a task ID.
name_prefix (optional, str, Profile)
@@ -46,7 +46,7 @@ Parameters
This is applicable only for a create operation.
- This option is mutually exclusive with *name*.
+ This option is mutually exclusive with \ :emphasis:`name`\ .
name (optional, str, None)
@@ -54,21 +54,21 @@ Parameters
This is applicable for modify, delete, assign, unassign, and migrate operations.
- This option is mutually exclusive with *name_prefix* and *number_of_profiles*.
+ This option is mutually exclusive with \ :emphasis:`name\_prefix`\ and \ :emphasis:`number\_of\_profiles`\ .
new_name (optional, str, None)
New name of the profile.
- Applicable when *command* is ``modify``.
+ Applicable when \ :emphasis:`command`\ is \ :literal:`modify`\ .
number_of_profiles (optional, int, 1)
Provide the number of profiles to be created.
- This is applicable when *name_prefix* is used with ``create``.
+ This is applicable when \ :emphasis:`name\_prefix`\ is used with \ :literal:`create`\ .
- This option is mutually exclusive with *name*.
+ This option is mutually exclusive with \ :emphasis:`name`\ .
Openmanage Enterprise can create a maximum of 100 profiles.
@@ -76,25 +76,25 @@ Parameters
template_name (optional, str, None)
Name of the template for creating the profile(s).
- This is applicable when *command* is ``create``.
+ This is applicable when \ :emphasis:`command`\ is \ :literal:`create`\ .
- This option is mutually exclusive with *template_id*.
+ This option is mutually exclusive with \ :emphasis:`template\_id`\ .
template_id (optional, int, None)
ID of the template.
- This is applicable when *command* is ``create``.
+ This is applicable when \ :emphasis:`command`\ is \ :literal:`create`\ .
- This option is mutually exclusive with *template_name*.
+ This option is mutually exclusive with \ :emphasis:`template\_name`\ .
device_id (optional, int, None)
ID of the target device.
- This is applicable when *command* is ``assign`` and ``migrate``.
+ This is applicable when \ :emphasis:`command`\ is \ :literal:`assign`\ and \ :literal:`migrate`\ .
- This option is mutually exclusive with *device_service_tag*.
+ This option is mutually exclusive with \ :emphasis:`device\_service\_tag`\ .
device_service_tag (optional, str, None)
@@ -102,11 +102,11 @@ Parameters
This is typically 7 to 8 characters in length.
- Applicable when *command* is ``assign``, and ``migrate``.
+ Applicable when \ :emphasis:`command`\ is \ :literal:`assign`\ , and \ :literal:`migrate`\ .
- This option is mutually exclusive with *device_id*.
+ This option is mutually exclusive with \ :emphasis:`device\_id`\ .
- If the device does not exist when *command* is ``assign`` then the profile is auto-deployed.
+ If the device does not exist when \ :emphasis:`command`\ is \ :literal:`assign`\ then the profile is auto-deployed.
description (optional, str, None)
@@ -116,7 +116,7 @@ Parameters
boot_to_network_iso (optional, dict, None)
Details of the Share iso.
- Applicable when *command* is ``create``, ``assign``, and ``modify``.
+ Applicable when \ :emphasis:`command`\ is \ :literal:`create`\ , \ :literal:`assign`\ , and \ :literal:`modify`\ .
boot_to_network (True, bool, None)
@@ -132,15 +132,15 @@ Parameters
share_user (optional, str, None)
- User name when *share_type* is ``CIFS``.
+ User name when \ :emphasis:`share\_type`\ is \ :literal:`CIFS`\ .
share_password (optional, str, None)
- User password when *share_type* is ``CIFS``.
+ User password when \ :emphasis:`share\_type`\ is \ :literal:`CIFS`\ .
workgroup (optional, str, None)
- User workgroup when *share_type* is ``CIFS``.
+ User workgroup when \ :emphasis:`share\_type`\ is \ :literal:`CIFS`\ .
iso_path (optional, str, None)
@@ -155,45 +155,45 @@ Parameters
filters (optional, dict, None)
Filters the profiles based on selected criteria.
- This is applicable when *command* is ``delete`` or ``unassign``.
+ This is applicable when \ :emphasis:`command`\ is \ :literal:`delete`\ or \ :literal:`unassign`\ .
- This supports suboption *ProfileIds* which takes a list of profile IDs.
+ This supports suboption \ :emphasis:`ProfileIds`\ which takes a list of profile IDs.
- This also supports OData filter expressions with the suboption *Filters*.
+ This also supports OData filter expressions with the suboption \ :emphasis:`Filters`\ .
See OpenManage Enterprise REST API guide for the filtering options available.
- *WARNING* When this option is used in case of ``unassign``, task ID is not returned for any of the profiles affected.
+ \ :emphasis:`WARNING`\ When this option is used in case of \ :literal:`unassign`\ , task ID is not returned for any of the profiles affected.
force (optional, bool, False)
Provides the option to force the migration of a profile even if the source device cannot be contacted.
- This option is applicable when *command* is ``migrate``.
+ This option is applicable when \ :emphasis:`command`\ is \ :literal:`migrate`\ .
attributes (optional, dict, None)
- Attributes for ``modify`` and ``assign``.
+ Attributes for \ :literal:`modify`\ and \ :literal:`assign`\ .
Attributes (optional, list, None)
- List of attributes to be modified, when *command* is ``modify``.
+ List of attributes to be modified, when \ :emphasis:`command`\ is \ :literal:`modify`\ .
- List of attributes to be overridden when *command* is ``assign``.
+ List of attributes to be overridden when \ :emphasis:`command`\ is \ :literal:`assign`\ .
- Use the *Id* If the attribute Id is available. If not, use the comma separated I (DisplayName). For more details about using the *DisplayName*, see the example provided.
+ Use the \ :emphasis:`Id`\ If the attribute Id is available. If not, use the comma separated I (DisplayName). For more details about using the \ :emphasis:`DisplayName`\ , see the example provided.
Options (optional, dict, None)
Provides the different shut down options.
- This is applicable when *command* is ``assign``.
+ This is applicable when \ :emphasis:`command`\ is \ :literal:`assign`\ .
Schedule (optional, dict, None)
Schedule for profile deployment.
- This is applicable when *command* is ``assign``.
+ This is applicable when \ :emphasis:`command`\ is \ :literal:`assign`\ .
@@ -201,24 +201,40 @@ Parameters
OpenManage Enterprise or OpenManage Enterprise Modular IP address or hostname.
- username (True, str, None)
+ username (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular username.
+ If the username is not provided, then the environment variable \ :envvar:`OME\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export OME\_USERNAME=username
+
+
+ password (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular password.
+ If the password is not provided, then the environment variable \ :envvar:`OME\_PASSWORD`\ is used.
+
+ Example: export OME\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`OME\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export OME\_X\_AUTH\_TOKEN=x\_auth\_token
+
port (optional, int, 443)
OpenManage Enterprise or OpenManage Enterprise Modular HTTPS port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -237,8 +253,8 @@ Notes
.. note::
- Run this module from a system that has direct access to Dell OpenManage Enterprise.
- - This module supports ``check_mode``.
- - ``assign`` operation on a already assigned profile will not redeploy.
+ - This module supports \ :literal:`check\_mode`\ .
+ - \ :literal:`assign`\ operation on a already assigned profile will not redeploy.
@@ -444,9 +460,9 @@ profile_ids (when I(command) is C(create), list, [1234, 5678])
job_id (when I(command) is C(assign), C(migrate) or C(unassign), int, 14123)
- Task ID created when *command* is ``assign``, ``migrate`` or ``unassign``.
+ Task ID created when \ :emphasis:`command`\ is \ :literal:`assign`\ , \ :literal:`migrate`\ or \ :literal:`unassign`\ .
- ``assign`` and ``unassign`` operations do not trigger a task if a profile is auto-deployed.
+ \ :literal:`assign`\ and \ :literal:`unassign`\ operations do not trigger a task if a profile is auto-deployed.
error_info (on HTTP error, dict, {'error': {'code': 'Base.1.0.GeneralError', 'message': 'A general error has occurred. See ExtendedInfo for more information.', '@Message.ExtendedInfo': [{'MessageId': 'GEN1234', 'RelatedProperties': [], 'Message': 'Unable to process the request because an error occurred.', 'MessageArgs': [], 'Severity': 'Critical', 'Resolution': 'Retry the operation. If the issue persists, contact your system administrator.'}]}})
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_profile_info.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_profile_info.rst
index 697a8c05f..d501e43ba 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_profile_info.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_profile_info.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.9.6
+- python \>= 3.9.6
@@ -30,25 +30,25 @@ Parameters
profile_id (optional, int, None)
Id of the profile.
- This is mutually exclusive with *profile_name*, *system_query_options*, *template_id*, and *template_name*.
+ This is mutually exclusive with \ :emphasis:`profile\_name`\ , \ :emphasis:`system\_query\_options`\ , \ :emphasis:`template\_id`\ , and \ :emphasis:`template\_name`\ .
profile_name (optional, str, None)
Name of the profile.
- This is mutually exclusive with *template_id*, *profile_id*, *system_query_options*, and *template_name*.
+ This is mutually exclusive with \ :emphasis:`template\_id`\ , \ :emphasis:`profile\_id`\ , \ :emphasis:`system\_query\_options`\ , and \ :emphasis:`template\_name`\ .
template_id (optional, int, None)
Provide the ID of the template to retrieve the list of profile(s) linked to it.
- This is mutually exclusive with *profile_name*, *profile_id*, *system_query_options*, and *template_name*.
+ This is mutually exclusive with \ :emphasis:`profile\_name`\ , \ :emphasis:`profile\_id`\ , \ :emphasis:`system\_query\_options`\ , and \ :emphasis:`template\_name`\ .
template_name (optional, str, None)
Provide the name of the template to retrieve the list of profile(s) linked to it.
- This is mutually exclusive with *profile_name*, *profile_id*, *template_id*, and *system_query_options*.
+ This is mutually exclusive with \ :emphasis:`profile\_name`\ , \ :emphasis:`profile\_id`\ , \ :emphasis:`template\_id`\ , and \ :emphasis:`system\_query\_options`\ .
system_query_options (optional, dict, None)
@@ -56,33 +56,49 @@ Parameters
The profile list can be fetched and sorted based on ProfileName, TemplateName, TargetTypeId, TargetName, ChassisName, ProfileState, LastRunStatus, or ProfileModified.
- This is mutually exclusive with *profile_name*, *profile_id*, *template_id*, and *template_name*.
+ This is mutually exclusive with \ :emphasis:`profile\_name`\ , \ :emphasis:`profile\_id`\ , \ :emphasis:`template\_id`\ , and \ :emphasis:`template\_name`\ .
- ``Note`` If *profile_name*, *profile_id*, *template_id*, or *template_name* option is not provided, the module retrieves all the profiles.
+ \ :literal:`Note`\ If \ :emphasis:`profile\_name`\ , \ :emphasis:`profile\_id`\ , \ :emphasis:`template\_id`\ , or \ :emphasis:`template\_name`\ option is not provided, the module retrieves all the profiles.
hostname (True, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular IP address or hostname.
- username (True, str, None)
+ username (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular username.
+ If the username is not provided, then the environment variable \ :envvar:`OME\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export OME\_USERNAME=username
+
+
+ password (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular password.
+ If the password is not provided, then the environment variable \ :envvar:`OME\_PASSWORD`\ is used.
+
+ Example: export OME\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`OME\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export OME\_X\_AUTH\_TOKEN=x\_auth\_token
+
port (optional, int, 443)
OpenManage Enterprise or OpenManage Enterprise Modular HTTPS port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -101,7 +117,7 @@ Notes
.. note::
- Run this module on a system that has direct access to Dell OpenManage Enterprise.
- - This module supports ``check_mode``.
+ - This module supports \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_server_interface_profile_info.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_server_interface_profile_info.rst
index 95cd95bd5..9f6c1db33 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_server_interface_profile_info.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_server_interface_profile_info.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.8.6
+- python \>= 3.9.6
@@ -30,37 +30,53 @@ Parameters
device_id (optional, list, None)
The ID of the device.
- *device_id* is mutually exclusive with *device_service_tag*.
+ \ :emphasis:`device\_id`\ is mutually exclusive with \ :emphasis:`device\_service\_tag`\ .
device_service_tag (optional, list, None)
The service tag of the device.
- *device_service_tag* is mutually exclusive with *device_id*.
+ \ :emphasis:`device\_service\_tag`\ is mutually exclusive with \ :emphasis:`device\_id`\ .
hostname (True, str, None)
OpenManage Enterprise Modular IP address or hostname.
- username (True, str, None)
+ username (False, str, None)
OpenManage Enterprise Modular username.
+ If the username is not provided, then the environment variable \ :envvar:`OME\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export OME\_USERNAME=username
+
+
+ password (False, str, None)
OpenManage Enterprise Modular password.
+ If the password is not provided, then the environment variable \ :envvar:`OME\_PASSWORD`\ is used.
+
+ Example: export OME\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`OME\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export OME\_X\_AUTH\_TOKEN=x\_auth\_token
+
port (optional, int, 443)
OpenManage Enterprise Modular HTTPS port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -79,7 +95,7 @@ Notes
.. note::
- Run this module from a system that has direct access to OpenManage Enterprise Modular.
- - This module supports ``check_mode``.
+ - This module supports \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_server_interface_profiles.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_server_interface_profiles.rst
index f70a50125..ff0031683 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_server_interface_profiles.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_server_interface_profiles.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.8.6
+- python \>= 3.9.6
@@ -30,23 +30,23 @@ Parameters
device_id (optional, list, None)
Device id of the Server under chassis fabric.
- *device_id* and *device_service_tag* is mutually exclusive.
+ \ :emphasis:`device\_id`\ and \ :emphasis:`device\_service\_tag`\ is mutually exclusive.
device_service_tag (optional, list, None)
Service tag of the Server under chassis fabric.
- *device_service_tag* and *device_id* is mutually exclusive.
+ \ :emphasis:`device\_service\_tag`\ and \ :emphasis:`device\_id`\ is mutually exclusive.
nic_teaming (optional, str, None)
NIC teaming options.
- ``NoTeaming`` the NICs are not bonded and provide no load balancing or redundancy.
+ \ :literal:`NoTeaming`\ the NICs are not bonded and provide no load balancing or redundancy.
- ``LACP`` use LACP for NIC teaming.
+ \ :literal:`LACP`\ use LACP for NIC teaming.
- ``Other`` use other technology for NIC teaming.
+ \ :literal:`Other`\ use other technology for NIC teaming.
nic_configuration (optional, list, None)
@@ -56,23 +56,23 @@ Parameters
nic_identifier (True, str, None)
ID of the NIC or port number.
- ``Note`` This will not be validated.
+ \ :literal:`Note`\ This will not be validated.
team (optional, bool, None)
Group two or more ports. The ports must be connected to the same pair of Ethernet switches.
- *team* is applicable only if *nic_teaming* is ``LACP``.
+ \ :emphasis:`team`\ is applicable only if \ :emphasis:`nic\_teaming`\ is \ :literal:`LACP`\ .
untagged_network (optional, int, None)
The maximum or minimum VLAN id of the network to be untagged.
- The *untagged_network* can be retrieved using the :ref:`dellemc.openmanage.ome_network_vlan_info <dellemc.openmanage.ome_network_vlan_info_module>`
+ The \ :emphasis:`untagged\_network`\ can be retrieved using the \ :ref:`dellemc.openmanage.ome\_network\_vlan\_info <ansible_collections.dellemc.openmanage.ome_network_vlan_info_module>`\
- If *untagged_network* needs to be unset this needs to be sent as ``0``
+ If \ :emphasis:`untagged\_network`\ needs to be unset this needs to be sent as \ :literal:`0`\
- ``Note`` The network cannot be added as a untagged network if it is already assigned to a tagged network.
+ \ :literal:`Note`\ The network cannot be added as a untagged network if it is already assigned to a tagged network.
tagged_networks (optional, dict, None)
@@ -84,15 +84,15 @@ Parameters
state (optional, str, present)
Indicates if a list of networks needs to be added or deleted.
- ``present`` to add the network to the tagged list
+ \ :literal:`present`\ to add the network to the tagged list
- ``absent`` to delete the Network from the tagged list
+ \ :literal:`absent`\ to delete the Network from the tagged list
names (True, list, None)
List of network name to be marked as tagged networks
- The *names* can be retrieved using the :ref:`dellemc.openmanage.ome_network_vlan_info <dellemc.openmanage.ome_network_vlan_info_module>`
+ The \ :emphasis:`names`\ can be retrieved using the \ :ref:`dellemc.openmanage.ome\_network\_vlan\_info <ansible_collections.dellemc.openmanage.ome_network_vlan_info_module>`\
@@ -102,33 +102,49 @@ Parameters
job_wait_timeout (optional, int, 120)
- The maximum wait time of *job_wait* in seconds. The job is tracked only for this duration.
+ The maximum wait time of \ :emphasis:`job\_wait`\ in seconds. The job is tracked only for this duration.
- This option is applicable when *job_wait* is ``true``.
+ This option is applicable when \ :emphasis:`job\_wait`\ is \ :literal:`true`\ .
hostname (True, str, None)
OpenManage Enterprise Modular IP address or hostname.
- username (True, str, None)
+ username (False, str, None)
OpenManage Enterprise Modular username.
+ If the username is not provided, then the environment variable \ :envvar:`OME\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export OME\_USERNAME=username
+
+
+ password (False, str, None)
OpenManage Enterprise Modular password.
+ If the password is not provided, then the environment variable \ :envvar:`OME\_PASSWORD`\ is used.
+
+ Example: export OME\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`OME\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export OME\_X\_AUTH\_TOKEN=x\_auth\_token
+
port (optional, int, 443)
OpenManage Enterprise Modular HTTPS port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -146,7 +162,7 @@ Notes
-----
.. note::
- - This module supports ``check_mode``.
+ - This module supports \ :literal:`check\_mode`\ .
- Run this module from a system that has direct access to Dell OpenManage Enterprise Modular.
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_session.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_session.rst
new file mode 100644
index 000000000..d18023729
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_session.rst
@@ -0,0 +1,190 @@
+.. _ome_session_module:
+
+
+ome_session -- Manage OpenManage Enterprise and OpenManage Enterprise modular sessions
+======================================================================================
+
+.. contents::
+ :local:
+ :depth: 1
+
+
+Synopsis
+--------
+
+This module allows you to create and delete sessions on OpenManage Enterprise and OpenManage Enterprise Modular.
+
+
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- python \>= 3.9.6
+
+
+
+Parameters
+----------
+
+ hostname (optional, str, None)
+ IP address or hostname of the OpenManage Enterprise.
+
+
+ username (optional, str, None)
+ Username of the OpenManage Enterprise. If the username is not provided, then the environment variable \ :envvar:`OME\_USERNAME`\ is used.
+
+ \ :emphasis:`username`\ is required when \ :emphasis:`state`\ is \ :literal:`present`\ .
+
+
+ password (optional, str, None)
+ Password of the OpenManage Enterprise. If the password is not provided, then the environment variable \ :envvar:`OME\_PASSWORD`\ is used.
+
+ \ :emphasis:`password`\ is required when \ :emphasis:`state`\ is \ :literal:`present`\ .
+
+
+ port (optional, int, 443)
+ Port of the OpenManage Enterprise.
+
+
+ validate_certs (optional, bool, True)
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
+
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
+
+
+ ca_path (optional, path, None)
+ The Privacy Enhanced Mail (PEM) file that contains a CA certificate to be used for the validation.
+
+
+ timeout (optional, int, 30)
+ The HTTPS socket level timeout in seconds.
+
+
+ state (optional, str, present)
+ The state of the session in OpenManage Enterprise.
+
+ \ :literal:`present`\ creates a session.
+
+ \ :literal:`absent`\ deletes a session.
+
+ Module will always report changes found to be applied when \ :emphasis:`state`\ is \ :literal:`present`\ .
+
+
+ x_auth_token (optional, str, None)
+ Authentication token.
+
+ \ :emphasis:`x\_auth\_token`\ is required when \ :emphasis:`state`\ is \ :literal:`absent`\ .
+
+
+ session_id (optional, str, None)
+ Session ID of the OpenManage Enterprise.
+
+ \ :emphasis:`session\_id`\ is required when \ :emphasis:`state`\ is \ :literal:`absent`\ .
+
+
+
+
+
+Notes
+-----
+
+.. note::
+ - Run this module from a system that has direct access to Dell OpenManage Enterprise.
+ - This module supports IPv4 and IPv6 addresses.
+ - This module supports \ :literal:`check\_mode`\ .
+ - This module will always report changes found to be applied when \ :emphasis:`state`\ is \ :literal:`present`\ .
+
+
+
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ ---
+ - name: Create a session
+ dellemc.openmanage.ome_session:
+ hostname: 198.162.0.1
+ username: username
+ password: password
+ ca_path: "/path/to/ca_cert.pem"
+ state: present
+
+ - name: Delete a session
+ dellemc.openmanage.ome_session:
+ hostname: 198.162.0.1
+ ca_path: "/path/to/ca_cert.pem"
+ state: absent
+ x_auth_token: aed4aa802b748d2f3b31deec00a6b28a
+ session_id: 4b48e9ab-809e-4087-b7c4-201a16e0143d
+
+ - name: Create a session and execute other modules
+ block:
+ - name: Create a session
+ dellemc.openmanage.ome_session:
+ hostname: 198.162.0.1
+ username: username
+ password: password
+ ca_path: "/path/to/ca_cert.pem"
+ state: present
+ register: authData
+
+ - name: Call ome_user_info module
+ dellemc.openmanage.ome_user_info:
+ hostname: 198.162.0.1
+ ca_path: "/path/to/ca_cert.pem"
+ x_auth_token: "{{ authData.x_auth_token }}"
+
+ - name: Call ome_network_vlan_info module
+ dellemc.openmanage.ome_network_vlan_info:
+ hostname: 198.162.0.1
+ ca_path: "/path/to/ca_cert.pem"
+ x_auth_token: "{{ authData.x_auth_token }}"
+ always:
+ - name: Destroy a session
+ dellemc.openmanage.ome_session:
+ hostname: 198.162.0.1
+ ca_path: "/path/to/ca_cert.pem"
+ state: absent
+ x_auth_token: "{{ authData.x_auth_token }}"
+ session_id: "{{ authData.session_data.Id }}"
+
+
+
+Return Values
+-------------
+
+msg (always, str, The session has been created successfully.)
+ Status of the session operation.
+
+
+session_data (For session creation operation, dict, {'Id': 'd5c28d8e-1084-4055-9c01-e1051cfee2dd', 'Description': 'admin', 'Name': 'API', 'UserName': 'admin', 'UserId': 10078, 'Password': None, 'Roles': ['BACKUP_ADMINISTRATOR'], 'IpAddress': '100.198.162.0', 'StartTimeStamp': '2023-07-03 07:22:43.683', 'LastAccessedTimeStamp': '2023-07-03 07:22:43.683', 'DirectoryGroup': []})
+ The session details.
+
+
+x_auth_token (For session creation operation, str, d15f17f01cd627c30173b1582642497d)
+ Authentication token.
+
+
+error_info (On HTTP error, dict, {'error': {'@Message.ExtendedInfo': [{'Message': 'Unable to complete the operation because an invalid username and/or password is entered, and therefore authentication failed.', 'MessageArgs': [], 'MessageArgs@odata.count': 0, 'MessageId': 'IDRAC.2.7.SYS415', 'RelatedProperties': [], 'RelatedProperties@odata.count': 0, 'Resolution': 'Enter valid user name and password and retry the operation.', 'Severity': 'Warning'}], 'code': 'Base.1.12.GeneralError', 'message': 'A general error has occurred. See ExtendedInfo for more information'}})
+ Details of the HTTP Error.
+
+
+
+
+
+Status
+------
+
+
+
+
+
+Authors
+~~~~~~~
+
+- Kritika Bhateja (@Kritika-Bhateja-03)
+
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_smart_fabric.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_smart_fabric.rst
index 69e22ab98..412ead3d3 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_smart_fabric.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_smart_fabric.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.8.6
+- python \>= 3.9.6
@@ -28,19 +28,19 @@ Parameters
----------
state (optional, str, present)
- ``present`` creates a new fabric or modifies an existing fabric.
+ \ :literal:`present`\ creates a new fabric or modifies an existing fabric.
- ``absent`` deletes an existing fabric.
+ \ :literal:`absent`\ deletes an existing fabric.
Notes: The create, modify, or delete fabric operation takes around 15-20 minutes to complete. It is recommended not to start an another operation until the current operation is completed.
name (True, str, None)
- Provide the *name* of the fabric to be created, deleted or modified.
+ Provide the \ :emphasis:`name`\ of the fabric to be created, deleted or modified.
new_name (optional, str, None)
- Provide the *name* of the fabric to be modified.
+ Provide the \ :emphasis:`name`\ of the fabric to be modified.
description (optional, str, None)
@@ -48,25 +48,25 @@ Parameters
fabric_design (optional, str, None)
- Specify the fabric topology.See the use API https://www.dell.com/support/manuals/en-in/poweredge-mx7000/omem_1_20_10_ug/smartfabric-network-topologies to know why its topology.
+ Specify the fabric topology.See the use API \ https://www.dell.com/support/manuals/en-in/poweredge-mx7000/omem_1_20_10_ug/smartfabric-network-topologies\ to know why its topology.
- *fabric_design* is mandatory for fabric creation.
+ \ :emphasis:`fabric\_design`\ is mandatory for fabric creation.
primary_switch_service_tag (optional, str, None)
Service tag of the first switch.
- *primary_switch_service_tag* is mandatory for fabric creation.
+ \ :emphasis:`primary\_switch\_service\_tag`\ is mandatory for fabric creation.
- *primary_switch_service_tag* must belong to the model selected in *fabric_design*.
+ \ :emphasis:`primary\_switch\_service\_tag`\ must belong to the model selected in \ :emphasis:`fabric\_design`\ .
secondary_switch_service_tag (optional, str, None)
Service tag of the second switch.
- *secondary_switch_service_tag* is mandatory for fabric creation.
+ \ :emphasis:`secondary\_switch\_service\_tag`\ is mandatory for fabric creation.
- *secondary_switch_service_tag* must belong to the model selected in *fabric_design*.
+ \ :emphasis:`secondary\_switch\_service\_tag`\ must belong to the model selected in \ :emphasis:`fabric\_design`\ .
override_LLDP_configuration (optional, str, None)
@@ -79,24 +79,40 @@ Parameters
OpenManage Enterprise Modular IP address or hostname.
- username (True, str, None)
+ username (False, str, None)
OpenManage Enterprise Modular username.
+ If the username is not provided, then the environment variable \ :envvar:`OME\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export OME\_USERNAME=username
+
+
+ password (False, str, None)
OpenManage Enterprise Modular password.
+ If the password is not provided, then the environment variable \ :envvar:`OME\_PASSWORD`\ is used.
+
+ Example: export OME\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`OME\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export OME\_X\_AUTH\_TOKEN=x\_auth\_token
+
port (optional, int, 443)
OpenManage Enterprise Modular HTTPS port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -115,7 +131,7 @@ Notes
.. note::
- Run this module from a system that has direct access to Dell OpenManage Enterprise Modular.
- - This module supports ``check_mode``.
+ - This module supports \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_smart_fabric_info.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_smart_fabric_info.rst
index ccad0973a..bde6942b0 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_smart_fabric_info.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_smart_fabric_info.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.9.6
+- python \>= 3.9.6
@@ -30,37 +30,53 @@ Parameters
fabric_id (optional, str, None)
Unique Id of the fabric.
- *fabric_id* is mutually exclusive with *fabric_name*.
+ \ :emphasis:`fabric\_id`\ is mutually exclusive with \ :emphasis:`fabric\_name`\ .
fabric_name (optional, str, None)
Name of the fabric.
- *fabric_name* is mutually exclusive with *fabric_id*.
+ \ :emphasis:`fabric\_name`\ is mutually exclusive with \ :emphasis:`fabric\_id`\ .
hostname (True, str, None)
OpenManage Enterprise Modular IP address or hostname.
- username (True, str, None)
+ username (False, str, None)
OpenManage Enterprise Modular username.
+ If the username is not provided, then the environment variable \ :envvar:`OME\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export OME\_USERNAME=username
+
+
+ password (False, str, None)
OpenManage Enterprise Modular password.
+ If the password is not provided, then the environment variable \ :envvar:`OME\_PASSWORD`\ is used.
+
+ Example: export OME\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`OME\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export OME\_X\_AUTH\_TOKEN=x\_auth\_token
+
port (optional, int, 443)
OpenManage Enterprise Modular HTTPS port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -79,7 +95,7 @@ Notes
.. note::
- Run this module from a system that has direct access to Dell OpenManage Enterprise Modular.
- - This module supports ``check_mode``.
+ - This module supports \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_smart_fabric_uplink.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_smart_fabric_uplink.rst
index fa691dd07..4815ea356 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_smart_fabric_uplink.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_smart_fabric_uplink.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.8.6
+- python \>= 3.9.6
@@ -28,23 +28,23 @@ Parameters
----------
state (optional, str, present)
- ``present`` - Creates a new uplink with the provided *name*. - Modifies an existing uplink with the provided *name*.
+ \ :literal:`present`\ - Creates a new uplink with the provided \ :emphasis:`name`\ . - Modifies an existing uplink with the provided \ :emphasis:`name`\ .
- ``absent`` – Deletes the uplink with the provided *name*.
+ \ :literal:`absent`\ – Deletes the uplink with the provided \ :emphasis:`name`\ .
- *WARNING* Delete operation can impact the network infrastructure.
+ \ :emphasis:`WARNING`\ Delete operation can impact the network infrastructure.
fabric_name (True, str, None)
- Provide the *fabric_name* of the fabric for which the uplink is to be configured.
+ Provide the \ :emphasis:`fabric\_name`\ of the fabric for which the uplink is to be configured.
name (True, str, None)
- Provide the *name* of the uplink to be created, modified or deleted.
+ Provide the \ :emphasis:`name`\ of the uplink to be created, modified or deleted.
new_name (optional, str, None)
- Provide the new *new_name* for the uplink.
+ Provide the new \ :emphasis:`new\_name`\ for the uplink.
description (optional, str, None)
@@ -54,13 +54,13 @@ Parameters
uplink_type (optional, str, None)
Specify the uplink type.
- *NOTE* The uplink type cannot be changed for an existing uplink.
+ \ :emphasis:`NOTE`\ The uplink type cannot be changed for an existing uplink.
ufd_enable (optional, str, None)
Add or Remove the uplink to the Uplink Failure Detection (UFD) group. The UFD group identifies the loss of connectivity to the upstream switch and notifies the servers that are connected to the switch. During an uplink failure, the switch disables the corresponding downstream server ports. The downstream servers can then select alternate connectivity routes, if available.
- *WARNING* The firmware version of the I/O Module running the Fabric Manager must support this configuration feature. If not, uplink creation will be successful with an appropriate error message in response.
+ \ :emphasis:`WARNING`\ The firmware version of the I/O Module running the Fabric Manager must support this configuration feature. If not, uplink creation will be successful with an appropriate error message in response.
primary_switch_service_tag (optional, str, None)
@@ -70,7 +70,7 @@ Parameters
primary_switch_ports (optional, list, None)
The IOM slots to be connected to the primary switch.
- *primary_switch_service_tag* is mandatory for this option.
+ \ :emphasis:`primary\_switch\_service\_tag`\ is mandatory for this option.
secondary_switch_service_tag (optional, str, None)
@@ -80,11 +80,11 @@ Parameters
secondary_switch_ports (optional, list, None)
The IOM slots to be connected to the secondary switch.
- *secondary_switch_service_tag* is mandatory for this option.
+ \ :emphasis:`secondary\_switch\_service\_tag`\ is mandatory for this option.
tagged_networks (optional, list, None)
- VLANs to be associated with the uplink *name*.
+ VLANs to be associated with the uplink \ :emphasis:`name`\ .
untagged_network (optional, str, None)
@@ -95,24 +95,40 @@ Parameters
OpenManage Enterprise Modular IP address or hostname.
- username (True, str, None)
+ username (False, str, None)
OpenManage Enterprise Modular username.
+ If the username is not provided, then the environment variable \ :envvar:`OME\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export OME\_USERNAME=username
+
+
+ password (False, str, None)
OpenManage Enterprise Modular password.
+ If the password is not provided, then the environment variable \ :envvar:`OME\_PASSWORD`\ is used.
+
+ Example: export OME\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`OME\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export OME\_X\_AUTH\_TOKEN=x\_auth\_token
+
port (optional, int, 443)
OpenManage Enterprise Modular HTTPS port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -131,7 +147,7 @@ Notes
.. note::
- Run this module from a system that has direct access to Dell OpenManage Enterprise Modular.
- - This module supports ``check_mode``.
+ - This module supports \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_smart_fabric_uplink_info.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_smart_fabric_uplink_info.rst
index 96a111bcb..a38f88c17 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_smart_fabric_uplink_info.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_smart_fabric_uplink_info.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.9.6
+- python \>= 3.9.6
@@ -30,53 +30,69 @@ Parameters
fabric_id (optional, str, None)
Unique id of the fabric.
- *fabric_id* is mutually exclusive with *fabric_name*.
+ \ :emphasis:`fabric\_id`\ is mutually exclusive with \ :emphasis:`fabric\_name`\ .
fabric_name (optional, str, None)
Unique name of the fabric.
- *fabric_name* is mutually exclusive with *fabric_id*.
+ \ :emphasis:`fabric\_name`\ is mutually exclusive with \ :emphasis:`fabric\_id`\ .
uplink_id (optional, str, None)
Unique id of the uplink.
- *uplink_id* is mutually exclusive with *uplink_name*.
+ \ :emphasis:`uplink\_id`\ is mutually exclusive with \ :emphasis:`uplink\_name`\ .
- *fabric_id* or *fabric_name* is required along with *uplink_id*.
+ \ :emphasis:`fabric\_id`\ or \ :emphasis:`fabric\_name`\ is required along with \ :emphasis:`uplink\_id`\ .
uplink_name (optional, str, None)
Unique name of the uplink.
- *uplink_name* is mutually exclusive with *uplink_id*.
+ \ :emphasis:`uplink\_name`\ is mutually exclusive with \ :emphasis:`uplink\_id`\ .
- *fabric_id* or *fabric_name* is required along with *uplink_name*.
+ \ :emphasis:`fabric\_id`\ or \ :emphasis:`fabric\_name`\ is required along with \ :emphasis:`uplink\_name`\ .
hostname (True, str, None)
OpenManage Enterprise Modular IP address or hostname.
- username (True, str, None)
+ username (False, str, None)
OpenManage Enterprise Modular username.
+ If the username is not provided, then the environment variable \ :envvar:`OME\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export OME\_USERNAME=username
+
+
+ password (False, str, None)
OpenManage Enterprise Modular password.
+ If the password is not provided, then the environment variable \ :envvar:`OME\_PASSWORD`\ is used.
+
+ Example: export OME\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`OME\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export OME\_X\_AUTH\_TOKEN=x\_auth\_token
+
port (optional, int, 443)
OpenManage Enterprise Modular HTTPS port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -95,7 +111,7 @@ Notes
.. note::
- Run this module from a system that has direct access to Dell OpenManage Enterprise Modular.
- - This module supports ``check_mode``.
+ - This module supports \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_template.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_template.rst
index 409d2f7e8..a03fa7a0b 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_template.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_template.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.8.6
+- python \>= 3.9.6
@@ -28,83 +28,83 @@ Parameters
----------
command (optional, str, create)
- ``create`` creates a new template.
+ \ :literal:`create`\ creates a new template.
- ``modify`` modifies an existing template.
+ \ :literal:`modify`\ modifies an existing template.
- ``deploy`` creates a template-deployment job.
+ \ :literal:`deploy`\ creates a template-deployment job.
- ``delete`` deletes an existing template.
+ \ :literal:`delete`\ deletes an existing template.
- ``export`` exports an existing template.
+ \ :literal:`export`\ exports an existing template.
- ``import`` creates a template from a specified configuration text in SCP XML format.
+ \ :literal:`import`\ creates a template from a specified configuration text in SCP XML format.
- ``clone`` creates a clone of a existing template.
+ \ :literal:`clone`\ creates a clone of a existing template.
template_id (optional, int, None)
ID of the existing template.
- This option is applicable when *command* is ``modify``, ``deploy``, ``delete``, ``clone`` and ``export``.
+ This option is applicable when \ :emphasis:`command`\ is \ :literal:`modify`\ , \ :literal:`deploy`\ , \ :literal:`delete`\ , \ :literal:`clone`\ and \ :literal:`export`\ .
- This option is mutually exclusive with *template_name*.
+ This option is mutually exclusive with \ :emphasis:`template\_name`\ .
template_name (optional, str, None)
Name of the existing template.
- This option is applicable when *command* is ``modify``, ``deploy``, ``delete``, ``clone`` and ``export``.
+ This option is applicable when \ :emphasis:`command`\ is \ :literal:`modify`\ , \ :literal:`deploy`\ , \ :literal:`delete`\ , \ :literal:`clone`\ and \ :literal:`export`\ .
- This option is mutually exclusive with *template_id*.
+ This option is mutually exclusive with \ :emphasis:`template\_id`\ .
device_id (optional, list, [])
- Specify the list of targeted device ID(s) when *command* is ``deploy``. When I (command) is ``create``, specify the ID of a single device.
+ Specify the list of targeted device ID(s) when \ :emphasis:`command`\ is \ :literal:`deploy`\ . When I (command) is \ :literal:`create`\ , specify the ID of a single device.
- Either *device_id* or *device_service_tag* is mandatory or both can be applicable.
+ Either \ :emphasis:`device\_id`\ or \ :emphasis:`device\_service\_tag`\ is mandatory or both can be applicable.
device_service_tag (optional, list, [])
- Specify the list of targeted device service tags when I (command) is ``deploy``. When *command* is ``create``, specify the service tag of a single device.
+ Specify the list of targeted device service tags when I (command) is \ :literal:`deploy`\ . When \ :emphasis:`command`\ is \ :literal:`create`\ , specify the service tag of a single device.
- Either *device_id* or *device_service_tag* is mandatory or both can be applicable.
+ Either \ :emphasis:`device\_id`\ or \ :emphasis:`device\_service\_tag`\ is mandatory or both can be applicable.
device_group_names (optional, list, [])
- Specify the list of groups when I (command) is ``deploy``.
+ Specify the list of groups when I (command) is \ :literal:`deploy`\ .
- Provide at least one of the mandatory options *device_id*, *device_service_tag*, or *device_group_names*.
+ Provide at least one of the mandatory options \ :emphasis:`device\_id`\ , \ :emphasis:`device\_service\_tag`\ , or \ :emphasis:`device\_group\_names`\ .
template_view_type (optional, str, Deployment)
Select the type of view of the OME template.
- This is applicable when *command* is ``create``,``clone`` and ``import``.
+ This is applicable when \ :emphasis:`command`\ is \ :literal:`create`\ ,\ :literal:`clone`\ and \ :literal:`import`\ .
attributes (optional, dict, None)
- Payload data for the template operations. All the variables in this option are added as payload for ``create``, ``modify``, ``deploy``, ``import``, and ``clone`` operations. It takes the following attributes.
+ Payload data for the template operations. All the variables in this option are added as payload for \ :literal:`create`\ , \ :literal:`modify`\ , \ :literal:`deploy`\ , \ :literal:`import`\ , and \ :literal:`clone`\ operations. It takes the following attributes.
- Attributes: List of dictionaries of attributes (if any) to be modified in the deployment template. This is applicable when *command* is ``deploy`` and ``modify``. Use the *Id* If the attribute Id is available. If not, use the comma separated I (DisplayName). For more details about using the *DisplayName*, see the example provided.
+ Attributes: List of dictionaries of attributes (if any) to be modified in the deployment template. This is applicable when \ :emphasis:`command`\ is \ :literal:`deploy`\ and \ :literal:`modify`\ . Use the \ :emphasis:`Id`\ If the attribute Id is available. If not, use the comma separated I (DisplayName). For more details about using the \ :emphasis:`DisplayName`\ , see the example provided.
- Name: Name of the template. This is mandatory when *command* is ``create``, ``import``, ``clone``, and optional when *command* is ``modify``.
+ Name: Name of the template. This is mandatory when \ :emphasis:`command`\ is \ :literal:`create`\ , \ :literal:`import`\ , \ :literal:`clone`\ , and optional when \ :emphasis:`command`\ is \ :literal:`modify`\ .
- Description: Description for the template. This is applicable when *command* is ``create`` or ``modify``.
+ Description: Description for the template. This is applicable when \ :emphasis:`command`\ is \ :literal:`create`\ or \ :literal:`modify`\ .
- Fqdds: This allows to create a template using components from a specified reference server. One or more, of the following values must be specified in a comma-separated string: iDRAC, System, BIOS, NIC, LifeCycleController, RAID, and EventFilters. If none of the values are specified, the default value 'All' is selected. This is applicable when I (command) is ``create``.
+ Fqdds: This allows to create a template using components from a specified reference server. One or more, of the following values must be specified in a comma-separated string: iDRAC, System, BIOS, NIC, LifeCycleController, RAID, and EventFilters. If none of the values are specified, the default value 'All' is selected. This is applicable when I (command) is \ :literal:`create`\ .
- Options: Options to control device shutdown or end power state post template deployment. This is applicable for ``deploy`` operation.
+ Options: Options to control device shutdown or end power state post template deployment. This is applicable for \ :literal:`deploy`\ operation.
- Schedule: Provides options to schedule the deployment task immediately, or at a specified time. This is applicable when *command* is ``deploy``.
+ Schedule: Provides options to schedule the deployment task immediately, or at a specified time. This is applicable when \ :emphasis:`command`\ is \ :literal:`deploy`\ .
- NetworkBootIsoModel: Payload to specify the ISO deployment details. This is applicable when *command* is ``deploy``.
+ NetworkBootIsoModel: Payload to specify the ISO deployment details. This is applicable when \ :emphasis:`command`\ is \ :literal:`deploy`\ .
- Content: The XML content of template. This is applicable when *command* is ``import``.
+ Content: The XML content of template. This is applicable when \ :emphasis:`command`\ is \ :literal:`import`\ .
- Type: Template type ID, indicating the type of device for which configuration is supported, such as chassis and servers. This is applicable when *command* is ``import``.
+ Type: Template type ID, indicating the type of device for which configuration is supported, such as chassis and servers. This is applicable when \ :emphasis:`command`\ is \ :literal:`import`\ .
- TypeId: Template type ID, indicating the type of device for which configuration is supported, such as chassis and servers. This is applicable when *command* is ``create``.
+ TypeId: Template type ID, indicating the type of device for which configuration is supported, such as chassis and servers. This is applicable when \ :emphasis:`command`\ is \ :literal:`create`\ .
Refer OpenManage Enterprise API Reference Guide for more details.
@@ -112,37 +112,53 @@ Parameters
job_wait (optional, bool, True)
Provides the option to wait for job completion.
- This option is applicable when *command* is ``create``, or ``deploy``.
+ This option is applicable when \ :emphasis:`command`\ is \ :literal:`create`\ , or \ :literal:`deploy`\ .
job_wait_timeout (optional, int, 1200)
- The maximum wait time of *job_wait* in seconds. The job is tracked only for this duration.
+ The maximum wait time of \ :emphasis:`job\_wait`\ in seconds. The job is tracked only for this duration.
- This option is applicable when *job_wait* is ``true``.
+ This option is applicable when \ :emphasis:`job\_wait`\ is \ :literal:`true`\ .
hostname (True, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular IP address or hostname.
- username (True, str, None)
+ username (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular username.
+ If the username is not provided, then the environment variable \ :envvar:`OME\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export OME\_USERNAME=username
+
+
+ password (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular password.
+ If the password is not provided, then the environment variable \ :envvar:`OME\_PASSWORD`\ is used.
+
+ Example: export OME\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`OME\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export OME\_X\_AUTH\_TOKEN=x\_auth\_token
+
port (optional, int, 443)
OpenManage Enterprise or OpenManage Enterprise Modular HTTPS port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -161,7 +177,7 @@ Notes
.. note::
- Run this module from a system that has direct access to Dell OpenManage Enterprise.
- - This module supports ``check_mode``.
+ - This module supports \ :literal:`check\_mode`\ .
@@ -523,11 +539,11 @@ msg (always, str, Successfully created a template with ID 23)
return_id (success, when I(command) is C(create), C(modify), C(import), C(clone) and C(deploy), int, 12)
- ID of the template for ``create``, ``modify``, ``import`` and ``clone`` or task created in case of ``deploy``.
+ ID of the template for \ :literal:`create`\ , \ :literal:`modify`\ , \ :literal:`import`\ and \ :literal:`clone`\ or task created in case of \ :literal:`deploy`\ .
TemplateId (success, when I(command) is C(export), int, 13)
- ID of the template for ``export``.
+ ID of the template for \ :literal:`export`\ .
Content (success, when I(command) is C(export), str, <SystemConfiguration Model="PowerEdge R940" ServiceTag="DEFG123" TimeStamp="Tue Sep 24 09:20:57.872551 2019">
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_template_identity_pool.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_template_identity_pool.rst
index 64a63b477..ce31bea37 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_template_identity_pool.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_template_identity_pool.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.8.6
+- python \>= 3.9.6
@@ -39,24 +39,40 @@ Parameters
OpenManage Enterprise or OpenManage Enterprise Modular IP address or hostname.
- username (True, str, None)
+ username (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular username.
+ If the username is not provided, then the environment variable \ :envvar:`OME\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export OME\_USERNAME=username
+
+
+ password (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular password.
+ If the password is not provided, then the environment variable \ :envvar:`OME\_PASSWORD`\ is used.
+
+ Example: export OME\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`OME\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export OME\_X\_AUTH\_TOKEN=x\_auth\_token
+
port (optional, int, 443)
OpenManage Enterprise or OpenManage Enterprise Modular HTTPS port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -75,7 +91,7 @@ Notes
.. note::
- Run this module from a system that has direct access to Dell OpenManage Enterprise.
- - This module supports ``check_mode``.
+ - This module supports \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_template_info.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_template_info.rst
index d1da26467..e0aebd802 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_template_info.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_template_info.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.8.6
+- python \>= 3.9.6
@@ -44,24 +44,40 @@ Parameters
OpenManage Enterprise or OpenManage Enterprise Modular IP address or hostname.
- username (True, str, None)
+ username (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular username.
+ If the username is not provided, then the environment variable \ :envvar:`OME\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export OME\_USERNAME=username
+
+
+ password (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular password.
+ If the password is not provided, then the environment variable \ :envvar:`OME\_PASSWORD`\ is used.
+
+ Example: export OME\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`OME\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export OME\_X\_AUTH\_TOKEN=x\_auth\_token
+
port (optional, int, 443)
OpenManage Enterprise or OpenManage Enterprise Modular HTTPS port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -80,7 +96,7 @@ Notes
.. note::
- Run this module from a system that has direct access to Dell OpenManage Enterprise.
- - This module supports ``check_mode``.
+ - This module supports \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_template_network_vlan.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_template_network_vlan.rst
index 89d908556..2277c137a 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_template_network_vlan.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_template_network_vlan.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.8.6
+- python \>= 3.9.6
@@ -30,13 +30,13 @@ Parameters
template_name (optional, str, None)
Name of the template.
- It is mutually exclusive with *template_id*.
+ It is mutually exclusive with \ :emphasis:`template\_id`\ .
template_id (optional, int, None)
Id of the template.
- It is mutually exclusive with *template_name*.
+ It is mutually exclusive with \ :emphasis:`template\_name`\ .
nic_identifier (True, str, None)
@@ -62,17 +62,17 @@ Parameters
Enter 0 to clear the untagged VLAN from the port.
- This option is mutually exclusive with *untagged_network_name*
+ This option is mutually exclusive with \ :emphasis:`untagged\_network\_name`\
- To get the VLAN network ID use the API https://*hostname*/api/NetworkConfigurationService/Networks
+ To get the VLAN network ID use the API \ %20https://I(hostname\ /api/NetworkConfigurationService/Networks)
untagged_network_name (optional, str, None)
name of the vlan for untagging
- provide 0 for clearing the untagging for this *port*
+ provide 0 for clearing the untagging for this \ :emphasis:`port`\
- This parameter is mutually exclusive with *untagged_network_id*
+ This parameter is mutually exclusive with \ :emphasis:`untagged\_network\_id`\
@@ -89,9 +89,9 @@ Parameters
Enter [] to remove the tagged VLAN from a port.
- List of *tagged_network_ids* is combined with list of *tagged_network_names* when adding tagged VLANs to a port.
+ List of \ :emphasis:`tagged\_network\_ids`\ is combined with list of \ :emphasis:`tagged\_network\_names`\ when adding tagged VLANs to a port.
- To get the VLAN network ID use the API https://*hostname*/api/NetworkConfigurationService/Networks
+ To get the VLAN network ID use the API \ %20https://I(hostname\ /api/NetworkConfigurationService/Networks)
tagged_network_names (optional, list, None)
@@ -99,7 +99,7 @@ Parameters
Enter [] to remove the tagged VLAN from a port.
- List of *tagged_network_names* is combined with list of *tagged_network_ids* when adding tagged VLANs to a port.
+ List of \ :emphasis:`tagged\_network\_names`\ is combined with list of \ :emphasis:`tagged\_network\_ids`\ when adding tagged VLANs to a port.
@@ -107,24 +107,40 @@ Parameters
OpenManage Enterprise or OpenManage Enterprise Modular IP address or hostname.
- username (True, str, None)
+ username (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular username.
+ If the username is not provided, then the environment variable \ :envvar:`OME\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export OME\_USERNAME=username
+
+
+ password (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular password.
+ If the password is not provided, then the environment variable \ :envvar:`OME\_PASSWORD`\ is used.
+
+ Example: export OME\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`OME\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export OME\_X\_AUTH\_TOKEN=x\_auth\_token
+
port (optional, int, 443)
OpenManage Enterprise or OpenManage Enterprise Modular HTTPS port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -143,7 +159,7 @@ Notes
.. note::
- Run this module from a system that has direct access to Dell OpenManage Enterprise.
- - This module supports ``check_mode``.
+ - This module supports \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_template_network_vlan_info.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_template_network_vlan_info.rst
index 6d5c9fad8..25c309dda 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_template_network_vlan_info.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_template_network_vlan_info.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.9.6
+- python \>= 3.9.6
@@ -30,39 +30,55 @@ Parameters
template_id (optional, int, None)
Id of the template.
- This is mutually exclusive with *template_name*.
+ This is mutually exclusive with \ :emphasis:`template\_name`\ .
template_name (optional, str, None)
Name of the template.
- This is mutually exclusive with *template_id*.
+ This is mutually exclusive with \ :emphasis:`template\_id`\ .
- ``Note`` If *template_id* or *template_name* option is not provided, the module retrieves network VLAN info of all templates.
+ \ :literal:`Note`\ If \ :emphasis:`template\_id`\ or \ :emphasis:`template\_name`\ option is not provided, the module retrieves network VLAN info of all templates.
hostname (True, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular IP address or hostname.
- username (True, str, None)
+ username (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular username.
+ If the username is not provided, then the environment variable \ :envvar:`OME\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export OME\_USERNAME=username
+
+
+ password (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular password.
+ If the password is not provided, then the environment variable \ :envvar:`OME\_PASSWORD`\ is used.
+
+ Example: export OME\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`OME\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export OME\_X\_AUTH\_TOKEN=x\_auth\_token
+
port (optional, int, 443)
OpenManage Enterprise or OpenManage Enterprise Modular HTTPS port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -81,7 +97,7 @@ Notes
.. note::
- Run this module on a system that has direct access to Dell OpenManage Enterprise.
- - This module supports ``check_mode``.
+ - This module supports \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_user.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_user.rst
index 908640f5e..d58653d1c 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_user.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_user.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.8.6
+- python \>= 3.9.6
@@ -28,27 +28,27 @@ Parameters
----------
state (optional, str, present)
- ``present`` creates a user in case the *UserName* provided inside *attributes* does not exist.
+ \ :literal:`present`\ creates a user in case the \ :emphasis:`UserName`\ provided inside \ :emphasis:`attributes`\ does not exist.
- ``present`` modifies a user in case the *UserName* provided inside *attributes* exists.
+ \ :literal:`present`\ modifies a user in case the \ :emphasis:`UserName`\ provided inside \ :emphasis:`attributes`\ exists.
- ``absent`` deletes an existing user.
+ \ :literal:`absent`\ deletes an existing user.
user_id (optional, int, None)
Unique ID of the user to be deleted.
- Either *user_id* or *name* is mandatory for ``absent`` operation.
+ Either \ :emphasis:`user\_id`\ or \ :emphasis:`name`\ is mandatory for \ :literal:`absent`\ operation.
name (optional, str, None)
Unique Name of the user to be deleted.
- Either *user_id* or *name* is mandatory for ``absent`` operation.
+ Either \ :emphasis:`user\_id`\ or \ :emphasis:`name`\ is mandatory for \ :literal:`absent`\ operation.
attributes (optional, dict, {})
- Payload data for the user operations. It can take the following attributes for ``present``.
+ Payload data for the user operations. It can take the following attributes for \ :literal:`present`\ .
UserTypeId, DirectoryServiceId, Description, Name, Password, UserName, RoleId, Locked, Enabled.
@@ -61,24 +61,40 @@ Parameters
OpenManage Enterprise or OpenManage Enterprise Modular IP address or hostname.
- username (True, str, None)
+ username (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular username.
+ If the username is not provided, then the environment variable \ :envvar:`OME\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export OME\_USERNAME=username
+
+
+ password (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular password.
+ If the password is not provided, then the environment variable \ :envvar:`OME\_PASSWORD`\ is used.
+
+ Example: export OME\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`OME\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export OME\_X\_AUTH\_TOKEN=x\_auth\_token
+
port (optional, int, 443)
OpenManage Enterprise or OpenManage Enterprise Modular HTTPS port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -97,7 +113,7 @@ Notes
.. note::
- Run this module from a system that has direct access to Dell OpenManage Enterprise.
- - This module does not support ``check_mode``.
+ - This module does not support \ :literal:`check\_mode`\ .
@@ -179,7 +195,7 @@ msg (always, str, Successfully created a User)
user_status (When I(state) is C(present)., dict, {'Description': 'Test user creation', 'DirectoryServiceId': 0, 'Enabled': True, 'Id': '61546', 'IsBuiltin': False, 'Locked': False, 'Name': 'test', 'Password': None, 'PlainTextPassword': None, 'RoleId': '10', 'UserName': 'test', 'UserTypeId': 1})
- Details of the user operation, when *state* is ``present``.
+ Details of the user operation, when \ :emphasis:`state`\ is \ :literal:`present`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/ome_user_info.rst b/ansible_collections/dellemc/openmanage/docs/modules/ome_user_info.rst
index ba0043f28..c8f409231 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/ome_user_info.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/ome_user_info.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.8.6
+- python \>= 3.9.6
@@ -44,24 +44,40 @@ Parameters
OpenManage Enterprise or OpenManage Enterprise Modular IP address or hostname.
- username (True, str, None)
+ username (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular username.
+ If the username is not provided, then the environment variable \ :envvar:`OME\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export OME\_USERNAME=username
+
+
+ password (False, str, None)
OpenManage Enterprise or OpenManage Enterprise Modular password.
+ If the password is not provided, then the environment variable \ :envvar:`OME\_PASSWORD`\ is used.
+
+ Example: export OME\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`OME\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export OME\_X\_AUTH\_TOKEN=x\_auth\_token
+
port (optional, int, 443)
OpenManage Enterprise or OpenManage Enterprise Modular HTTPS port.
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -80,7 +96,7 @@ Notes
.. note::
- Run this module from a system that has direct access to Dell OpenManage Enterprise.
- - This module supports ``check_mode``.
+ - This module supports \ :literal:`check\_mode`\ .
@@ -142,5 +158,5 @@ Status
Authors
~~~~~~~
-- Jagadeesh N V(@jagadeeshnv)
+- Jagadeesh N V (@jagadeeshnv)
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/redfish_event_subscription.rst b/ansible_collections/dellemc/openmanage/docs/modules/redfish_event_subscription.rst
index 0ea5da0d6..d828b35da 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/redfish_event_subscription.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/redfish_event_subscription.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.8.6
+- python \>= 3.9.6
@@ -36,43 +36,59 @@ Parameters
event_type (optional, str, Alert)
Specifies the event type to be subscribed.
- ``Alert`` used to subscribe for alert.
+ \ :literal:`Alert`\ used to subscribe for alert.
- ``MetricReport`` used to subscribe for the metrics report.
+ \ :literal:`MetricReport`\ used to subscribe for the metrics report.
event_format_type (optional, str, Event)
Specifies the format type of the event to be subscribed.
- ``Event`` used to subscribe for Event format type.
+ \ :literal:`Event`\ used to subscribe for Event format type.
- ``MetricReport`` used to subscribe for the metrics report format type.
+ \ :literal:`MetricReport`\ used to subscribe for the metrics report format type.
state (optional, str, present)
- ``present`` adds new event subscription.
+ \ :literal:`present`\ adds new event subscription.
- ``absent`` deletes event subscription with the specified *destination*.
+ \ :literal:`absent`\ deletes event subscription with the specified \ :emphasis:`destination`\ .
baseuri (True, str, None)
- IP address of the target out-of-band controller. For example- <ipaddress>:<port>.
+ IP address of the target out-of-band controller. For example- \<ipaddress\>:\<port\>.
- username (True, str, None)
+ username (False, str, None)
Username of the target out-of-band controller.
+ If the username is not provided, then the environment variable \ :envvar:`IDRAC\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export IDRAC\_USERNAME=username
+
+
+ password (False, str, None)
Password of the target out-of-band controller.
+ If the password is not provided, then the environment variable \ :envvar:`IDRAC\_PASSWORD`\ is used.
+
+ Example: export IDRAC\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`IDRAC\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export IDRAC\_X\_AUTH\_TOKEN=x\_auth\_token
+
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -90,11 +106,11 @@ Notes
-----
.. note::
- - *event_type* needs to be ``MetricReport`` and *event_format_type* needs to be ``MetricReport`` for metrics subscription.
- - *event_type* needs to be ``Alert`` and *event_format_type* needs to be ``Event`` for event subscription.
+ - \ :emphasis:`event\_type`\ needs to be \ :literal:`MetricReport`\ and \ :emphasis:`event\_format\_type`\ needs to be \ :literal:`MetricReport`\ for metrics subscription.
+ - \ :emphasis:`event\_type`\ needs to be \ :literal:`Alert`\ and \ :emphasis:`event\_format\_type`\ needs to be \ :literal:`Event`\ for event subscription.
- Modifying a subscription is not supported.
- Context is always set to RedfishEvent.
- - This module supports ``check_mode``.
+ - This module supports \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/redfish_firmware.rst b/ansible_collections/dellemc/openmanage/docs/modules/redfish_firmware.rst
index 3a3471c1f..ad66141c4 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/redfish_firmware.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/redfish_firmware.rst
@@ -22,7 +22,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.8.6
+- python \>= 3.9.6
- urllib3
@@ -33,7 +33,7 @@ Parameters
image_uri (True, str, None)
Firmware Image location URI or local path.
- For example- http://<web_address>/components.exe or /home/firmware_repo/component.exe.
+ For example- \ http://%3Cweb_address%3E/components.exe\ or /home/firmware\_repo/component.exe.
transfer_protocol (optional, str, HTTP)
@@ -45,31 +45,47 @@ Parameters
job_wait_timeout (optional, int, 3600)
- The maximum wait time of *job_wait* in seconds. The job is tracked only for this duration.
+ The maximum wait time of \ :emphasis:`job\_wait`\ in seconds. The job is tracked only for this duration.
- This option is applicable when *job_wait* is ``true``.
+ This option is applicable when \ :emphasis:`job\_wait`\ is \ :literal:`true`\ .
- Note: If a firmware update needs a reboot, the job will get scheduled and waits for no of seconds specfied in *job_wait_time*. to reduce the wait time either give *job_wait_time* minimum or make *job_wait*as false and retrigger.
+ Note: If a firmware update needs a reboot, the job will get scheduled and waits for no of seconds specfied in \ :emphasis:`job\_wait\_time`\ . to reduce the wait time either give \ :emphasis:`job\_wait\_time`\ minimum or make \ :emphasis:`job\_wait`\ as false and retrigger.
baseuri (True, str, None)
- IP address of the target out-of-band controller. For example- <ipaddress>:<port>.
+ IP address of the target out-of-band controller. For example- \<ipaddress\>:\<port\>.
- username (True, str, None)
+ username (False, str, None)
Username of the target out-of-band controller.
+ If the username is not provided, then the environment variable \ :envvar:`IDRAC\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export IDRAC\_USERNAME=username
+
+
+ password (False, str, None)
Password of the target out-of-band controller.
+ If the password is not provided, then the environment variable \ :envvar:`IDRAC\_PASSWORD`\ is used.
+
+ Example: export IDRAC\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`IDRAC\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export IDRAC\_X\_AUTH\_TOKEN=x\_auth\_token
+
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -90,7 +106,7 @@ Notes
- Run this module from a system that has direct access to Redfish APIs.
- This module supports both IPv4 and IPv6 addresses.
- This module supports only iDRAC9 and above.
- - This module does not support ``check_mode``.
+ - This module does not support \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/redfish_firmware_rollback.rst b/ansible_collections/dellemc/openmanage/docs/modules/redfish_firmware_rollback.rst
index 466239e40..8fa80d7bd 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/redfish_firmware_rollback.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/redfish_firmware_rollback.rst
@@ -22,7 +22,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.9.6
+- python \>= 3.9.6
@@ -36,11 +36,11 @@ Parameters
reboot (optional, bool, True)
Reboot the server to apply the previous version of the firmware.
- ``true`` reboots the server to rollback the firmware to the available version.
+ \ :literal:`true`\ reboots the server to rollback the firmware to the available version.
- ``false`` schedules the rollback of firmware until the next restart.
+ \ :literal:`false`\ schedules the rollback of firmware until the next restart.
- When *reboot* is ``false``, some components update immediately, and the server may reboot. So, the module must wait till the server is accessible.
+ When \ :emphasis:`reboot`\ is \ :literal:`false`\ , some components update immediately, and the server may reboot. So, the module must wait till the server is accessible.
reboot_timeout (optional, int, 900)
@@ -48,23 +48,39 @@ Parameters
baseuri (True, str, None)
- IP address of the target out-of-band controller. For example- <ipaddress>:<port>.
+ IP address of the target out-of-band controller. For example- \<ipaddress\>:\<port\>.
- username (True, str, None)
+ username (False, str, None)
Username of the target out-of-band controller.
+ If the username is not provided, then the environment variable \ :envvar:`IDRAC\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export IDRAC\_USERNAME=username
+
+
+ password (False, str, None)
Password of the target out-of-band controller.
+ If the password is not provided, then the environment variable \ :envvar:`IDRAC\_PASSWORD`\ is used.
+
+ Example: export IDRAC\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`IDRAC\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export IDRAC\_X\_AUTH\_TOKEN=x\_auth\_token
+
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -83,9 +99,9 @@ Notes
.. note::
- Run this module from a system that has direct access to Redfish APIs.
- - For components that do not require a reboot, firmware rollback proceeds irrespective of *reboot* is ``true`` or ``false``.
+ - For components that do not require a reboot, firmware rollback proceeds irrespective of \ :emphasis:`reboot`\ is \ :literal:`true`\ or \ :literal:`false`\ .
- This module supports IPv4 and IPv6 addresses.
- - This module supports ``check_mode``.
+ - This module supports \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/redfish_powerstate.rst b/ansible_collections/dellemc/openmanage/docs/modules/redfish_powerstate.rst
index fa8ac069e..027efa96b 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/redfish_powerstate.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/redfish_powerstate.rst
@@ -20,7 +20,7 @@ Requirements
------------
The below requirements are needed on the host that executes this module.
-- python >= 3.8.6
+- python \>= 3.9.6
@@ -28,55 +28,71 @@ Parameters
----------
resource_id (False, str, None)
- The unique identifier of the device being managed. For example- https://<*baseuri*>/redfish/v1/Systems/<*resource_id*>.
+ The unique identifier of the device being managed. For example- \ https://%3CI(baseuri\ \>/redfish/v1/Systems/\<\ :emphasis:`resource\_id`\ \>).
- This option is mandatory for *base_uri* with multiple devices.
+ This option is mandatory for \ :emphasis:`base\_uri`\ with multiple devices.
- To get the device details, use the API https://<*baseuri*>/redfish/v1/Systems.
+ To get the device details, use the API \ https://%3CI(baseuri\ \>/redfish/v1/Systems).
reset_type (True, str, None)
This option resets the device.
- If ``ForceOff``, Turns off the device immediately.
+ If \ :literal:`ForceOff`\ , Turns off the device immediately.
- If ``ForceOn``, Turns on the device immediately.
+ If \ :literal:`ForceOn`\ , Turns on the device immediately.
- If ``ForceRestart``, Turns off the device immediately, and then restarts the device.
+ If \ :literal:`ForceRestart`\ , Turns off the device immediately, and then restarts the device.
- If ``GracefulRestart``, Performs graceful shutdown of the device, and then restarts the device.
+ If \ :literal:`GracefulRestart`\ , Performs graceful shutdown of the device, and then restarts the device.
- If ``GracefulShutdown``, Performs a graceful shutdown of the device, and the turns off the device.
+ If \ :literal:`GracefulShutdown`\ , Performs a graceful shutdown of the device, and the turns off the device.
- If ``Nmi``, Sends a diagnostic interrupt to the device. This is usually a non-maskable interrupt (NMI) on x86 device.
+ If \ :literal:`Nmi`\ , Sends a diagnostic interrupt to the device. This is usually a non-maskable interrupt (NMI) on x86 device.
- If ``On``, Turns on the device.
+ If \ :literal:`On`\ , Turns on the device.
- If ``PowerCycle``, Performs power cycle on the device.
+ If \ :literal:`PowerCycle`\ , Performs power cycle on the device.
- If ``PushPowerButton``, Simulates the pressing of a physical power button on the device.
+ If \ :literal:`PushPowerButton`\ , Simulates the pressing of a physical power button on the device.
When a power control operation is performed, which is not supported on the device, an error message is displayed with the list of operations that can be performed.
baseuri (True, str, None)
- IP address of the target out-of-band controller. For example- <ipaddress>:<port>.
+ IP address of the target out-of-band controller. For example- \<ipaddress\>:\<port\>.
- username (True, str, None)
+ username (False, str, None)
Username of the target out-of-band controller.
+ If the username is not provided, then the environment variable \ :envvar:`IDRAC\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export IDRAC\_USERNAME=username
+
+
+ password (False, str, None)
Password of the target out-of-band controller.
+ If the password is not provided, then the environment variable \ :envvar:`IDRAC\_PASSWORD`\ is used.
+
+ Example: export IDRAC\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`IDRAC\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export IDRAC\_X\_AUTH\_TOKEN=x\_auth\_token
+
validate_certs (optional, bool, True)
- If ``false``, the SSL certificates will not be validated.
+ If \ :literal:`false`\ , the SSL certificates will not be validated.
- Configure ``false`` only on personally controlled sites where self-signed certificates are used.
+ Configure \ :literal:`false`\ only on personally controlled sites where self-signed certificates are used.
- Prior to collection version ``5.0.0``, the *validate_certs* is ``false`` by default.
+ Prior to collection version \ :literal:`5.0.0`\ , the \ :emphasis:`validate\_certs`\ is \ :literal:`false`\ by default.
ca_path (optional, path, None)
@@ -95,7 +111,7 @@ Notes
.. note::
- Run this module from a system that has direct access to Redfish APIs.
- - This module supports ``check_mode``.
+ - This module supports \ :literal:`check\_mode`\ .
diff --git a/ansible_collections/dellemc/openmanage/docs/modules/redfish_storage_volume.rst b/ansible_collections/dellemc/openmanage/docs/modules/redfish_storage_volume.rst
index c349b7d99..ade865af9 100644
--- a/ansible_collections/dellemc/openmanage/docs/modules/redfish_storage_volume.rst
+++ b/ansible_collections/dellemc/openmanage/docs/modules/redfish_storage_volume.rst
@@ -193,13 +193,29 @@ Parameters
IP address of the target out-of-band controller. For example- \<ipaddress\>:\<port\>.
- username (True, str, None)
+ username (False, str, None)
Username of the target out-of-band controller.
+ If the username is not provided, then the environment variable \ :envvar:`IDRAC\_USERNAME`\ is used.
- password (True, str, None)
+ Example: export IDRAC\_USERNAME=username
+
+
+ password (False, str, None)
Password of the target out-of-band controller.
+ If the password is not provided, then the environment variable \ :envvar:`IDRAC\_PASSWORD`\ is used.
+
+ Example: export IDRAC\_PASSWORD=password
+
+
+ x_auth_token (False, str, None)
+ Authentication token.
+
+ If the x\_auth\_token is not provided, then the environment variable \ :envvar:`IDRAC\_X\_AUTH\_TOKEN`\ is used.
+
+ Example: export IDRAC\_X\_AUTH\_TOKEN=x\_auth\_token
+
validate_certs (optional, bool, True)
If \ :literal:`false`\ , the SSL certificates will not be validated.
diff --git a/ansible_collections/dellemc/openmanage/playbooks/ome/ome_session.yml b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_session.yml
new file mode 100644
index 000000000..f734b016d
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/playbooks/ome/ome_session.yml
@@ -0,0 +1,52 @@
+---
+- name: Dell OpenManage Ansible iDRAC Session Management.
+ hosts: ome
+ gather_facts: false
+
+ tasks:
+ - name: Create a session
+ dellemc.openmanage.ome_session:
+ hostname: 198.162.0.1
+ username: username
+ password: password
+ state: present
+ delegate_to: localhost
+
+ - name: Delete a session
+ dellemc.openmanage.ome_session:
+ hostname: 198.162.0.1
+ state: absent
+ x_auth_token: aed4aa802b748d2f3b31deec00a6b28a
+ session_id: 4b48e9ab-809e-4087-b7c4-201a16e0143d
+ delegate_to: localhost
+
+ - name: Create a session and execute other modules
+ block:
+ - name: Create a session
+ dellemc.openmanage.ome_session:
+ hostname: 198.162.0.1
+ username: username
+ password: password
+ ca_path: "/path/to/ca_cert.pem"
+ state: present
+ register: authdata
+
+ - name: Call ome_user_info module
+ dellemc.openmanage.ome_user_info:
+ hostname: 198.162.0.1
+ ca_path: "/path/to/ca_cert.pem"
+ x_auth_token: "{{ authdata.x_auth_token }}"
+
+ - name: Call ome_network_vlan_info module
+ dellemc.openmanage.ome_network_vlan_info:
+ hostname: 198.162.0.1
+ ca_path: "/path/to/ca_cert.pem"
+ x_auth_token: "{{ authdata.x_auth_token }}"
+ always:
+ - name: Destroy a session
+ dellemc.openmanage.ome_session:
+ hostname: 198.162.0.1
+ ca_path: "/path/to/ca_cert.pem"
+ state: absent
+ x_auth_token: "{{ authData.x_auth_token }}"
+ session_id: "{{ authData.session_data.Id }}"
diff --git a/ansible_collections/dellemc/openmanage/plugins/doc_fragments/idrac_auth_options.py b/ansible_collections/dellemc/openmanage/plugins/doc_fragments/idrac_auth_options.py
index cdad6d1b8..49eebcd2e 100644
--- a/ansible_collections/dellemc/openmanage/plugins/doc_fragments/idrac_auth_options.py
+++ b/ansible_collections/dellemc/openmanage/plugins/doc_fragments/idrac_auth_options.py
@@ -26,14 +26,14 @@ options:
type: str
description:
- iDRAC username.
- - If the username is not provided, then the environment variable C(IDRAC_USERNAME) is used.
+ - If the username is not provided, then the environment variable E(IDRAC_USERNAME) is used.
- "Example: export IDRAC_USERNAME=username"
idrac_password:
required: true
type: str
description:
- iDRAC user password.
- - If the password is not provided, then the environment variable C(IDRAC_PASSWORD) is used.
+ - If the password is not provided, then the environment variable E(IDRAC_PASSWORD) is used.
- "Example: export IDRAC_PASSWORD=password"
aliases: ['idrac_pwd']
idrac_port:
diff --git a/ansible_collections/dellemc/openmanage/plugins/doc_fragments/idrac_x_auth_options.py b/ansible_collections/dellemc/openmanage/plugins/doc_fragments/idrac_x_auth_options.py
new file mode 100644
index 000000000..4df58680e
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/plugins/doc_fragments/idrac_x_auth_options.py
@@ -0,0 +1,69 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell OpenManage Ansible Modules
+# Version 9.3.0
+# Copyright (C) 2024 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+
+class ModuleDocFragment(object):
+
+ DOCUMENTATION = r'''
+options:
+ idrac_ip:
+ required: true
+ type: str
+ description: iDRAC IP Address.
+ idrac_user:
+ required: false
+ type: str
+ description:
+ - iDRAC username.
+ - If the username is not provided, then the environment variable E(IDRAC_USERNAME) is used.
+ - "Example: export IDRAC_USERNAME=username"
+ idrac_password:
+ required: false
+ type: str
+ description:
+ - iDRAC user password.
+ - If the password is not provided, then the environment variable E(IDRAC_PASSWORD) is used.
+ - "Example: export IDRAC_PASSWORD=password"
+ aliases: ['idrac_pwd']
+ x_auth_token:
+ description:
+ - Authentication token.
+ - If the x_auth_token is not provided, then the environment variable E(IDRAC_X_AUTH_TOKEN) is used.
+ - "Example: export IDRAC_X_AUTH_TOKEN=x_auth_token"
+ type: str
+ required: false
+ version_added: 9.3.0
+ idrac_port:
+ type: int
+ description: iDRAC port.
+ default: 443
+ validate_certs:
+ description:
+ - If C(false), the SSL certificates will not be validated.
+ - Configure C(false) only on personally controlled sites where self-signed certificates are used.
+ - Prior to collection version C(5.0.0), the I(validate_certs) is C(false) by default.
+ type: bool
+ default: true
+ version_added: 5.0.0
+ ca_path:
+ description:
+ - The Privacy Enhanced Mail (PEM) file that contains a CA certificate to be used for the validation.
+ type: path
+ version_added: 5.0.0
+ timeout:
+ description: The socket level timeout in seconds.
+ type: int
+ default: 30
+ version_added: 5.0.0
+'''
diff --git a/ansible_collections/dellemc/openmanage/plugins/doc_fragments/ome_auth_options.py b/ansible_collections/dellemc/openmanage/plugins/doc_fragments/ome_auth_options.py
index 7627a6621..28e68f7cb 100644
--- a/ansible_collections/dellemc/openmanage/plugins/doc_fragments/ome_auth_options.py
+++ b/ansible_collections/dellemc/openmanage/plugins/doc_fragments/ome_auth_options.py
@@ -2,8 +2,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 7.0.0
-# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2020-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -24,17 +24,25 @@ options:
username:
description:
- OpenManage Enterprise or OpenManage Enterprise Modular username.
- - If the username is not provided, then the environment variable C(OME_USERNAME) is used.
+ - If the username is not provided, then the environment variable E(OME_USERNAME) is used.
- "Example: export OME_USERNAME=username"
type: str
- required: true
+ required: false
password:
description:
- OpenManage Enterprise or OpenManage Enterprise Modular password.
- - If the password is not provided, then the environment variable C(OME_PASSWORD) is used.
+ - If the password is not provided, then the environment variable E(OME_PASSWORD) is used.
- "Example: export OME_PASSWORD=password"
type: str
- required: true
+ required: false
+ x_auth_token:
+ description:
+ - Authentication token.
+ - If the x_auth_token is not provided, then the environment variable E(OME_X_AUTH_TOKEN) is used.
+ - "Example: export OME_X_AUTH_TOKEN=x_auth_token"
+ type: str
+ required: false
+ version_added: 9.3.0
port:
description: OpenManage Enterprise or OpenManage Enterprise Modular HTTPS port.
type: int
diff --git a/ansible_collections/dellemc/openmanage/plugins/doc_fragments/omem_auth_options.py b/ansible_collections/dellemc/openmanage/plugins/doc_fragments/omem_auth_options.py
index e611e02df..8a7b4f8d9 100644
--- a/ansible_collections/dellemc/openmanage/plugins/doc_fragments/omem_auth_options.py
+++ b/ansible_collections/dellemc/openmanage/plugins/doc_fragments/omem_auth_options.py
@@ -2,8 +2,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 7.0.0
-# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2020-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -24,17 +24,25 @@ options:
username:
description:
- OpenManage Enterprise Modular username.
- - If the username is not provided, then the environment variable C(OME_USERNAME) is used.
+ - If the username is not provided, then the environment variable E(OME_USERNAME) is used.
- "Example: export OME_USERNAME=username"
type: str
- required: true
+ required: false
password:
description:
- OpenManage Enterprise Modular password.
- - If the password is not provided, then the environment variable C(OME_PASSWORD) is used.
+ - If the password is not provided, then the environment variable E(OME_PASSWORD) is used.
- "Example: export OME_PASSWORD=password"
type: str
- required: true
+ required: false
+ x_auth_token:
+ description:
+ - Authentication token.
+ - If the x_auth_token is not provided, then the environment variable E(OME_X_AUTH_TOKEN) is used.
+ - "Example: export OME_X_AUTH_TOKEN=x_auth_token"
+ type: str
+ required: false
+ version_added: 9.3.0
port:
description: OpenManage Enterprise Modular HTTPS port.
type: int
diff --git a/ansible_collections/dellemc/openmanage/plugins/doc_fragments/oment_auth_options.py b/ansible_collections/dellemc/openmanage/plugins/doc_fragments/oment_auth_options.py
index 676ded435..9430cb9a6 100644
--- a/ansible_collections/dellemc/openmanage/plugins/doc_fragments/oment_auth_options.py
+++ b/ansible_collections/dellemc/openmanage/plugins/doc_fragments/oment_auth_options.py
@@ -2,8 +2,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 7.0.0
-# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2020-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -24,17 +24,25 @@ options:
username:
description:
- OpenManage Enterprise username.
- - If the username is not provided, then the environment variable C(OME_USERNAME) is used.
+ - If the username is not provided, then the environment variable E(OME_USERNAME) is used.
- "Example: export OME_USERNAME=username"
type: str
- required: true
+ required: false
password:
description:
- OpenManage Enterprise password.
- - If the password is not provided, then the environment variable C(OME_PASSWORD) is used.
+ - If the password is not provided, then the environment variable E(OME_PASSWORD) is used.
- "Example: export OME_PASSWORD=password"
type: str
- required: true
+ required: false
+ x_auth_token:
+ description:
+ - Authentication token.
+ - If the x_auth_token is not provided, then the environment variable E(OME_X_AUTH_TOKEN) is used.
+ - "Example: export OME_X_AUTH_TOKEN=x_auth_token"
+ type: str
+ required: false
+ version_added: 9.3.0
port:
description: OpenManage Enterprise HTTPS port.
type: int
diff --git a/ansible_collections/dellemc/openmanage/plugins/doc_fragments/redfish_auth_options.py b/ansible_collections/dellemc/openmanage/plugins/doc_fragments/redfish_auth_options.py
index 452bc8a2d..7e15aef25 100644
--- a/ansible_collections/dellemc/openmanage/plugins/doc_fragments/redfish_auth_options.py
+++ b/ansible_collections/dellemc/openmanage/plugins/doc_fragments/redfish_auth_options.py
@@ -2,8 +2,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 7.0.0
-# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2020-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -24,17 +24,25 @@ options:
username:
description:
- Username of the target out-of-band controller.
- - If the username is not provided, then the environment variable C(IDRAC_USERNAME) is used.
+ - If the username is not provided, then the environment variable E(IDRAC_USERNAME) is used.
- "Example: export IDRAC_USERNAME=username"
type: str
- required: true
+ required: false
password:
description:
- Password of the target out-of-band controller.
- - If the password is not provided, then the environment variable C(IDRAC_PASSWORD) is used.
+ - If the password is not provided, then the environment variable E(IDRAC_PASSWORD) is used.
- "Example: export IDRAC_PASSWORD=password"
type: str
- required: true
+ required: false
+ x_auth_token:
+ description:
+ - Authentication token.
+ - If the x_auth_token is not provided, then the environment variable E(IDRAC_X_AUTH_TOKEN) is used.
+ - "Example: export IDRAC_X_AUTH_TOKEN=x_auth_token"
+ type: str
+ required: false
+ version_added: 9.3.0
validate_certs:
description:
- If C(false), the SSL certificates will not be validated.
diff --git a/ansible_collections/dellemc/openmanage/plugins/module_utils/idrac_redfish.py b/ansible_collections/dellemc/openmanage/plugins/module_utils/idrac_redfish.py
index cf4581e89..7e55364a1 100644
--- a/ansible_collections/dellemc/openmanage/plugins/module_utils/idrac_redfish.py
+++ b/ansible_collections/dellemc/openmanage/plugins/module_utils/idrac_redfish.py
@@ -1,8 +1,8 @@
# -*- coding: utf-8 -*-
# Dell OpenManage Ansible Modules
-# Version 8.0.0
-# Copyright (C) 2019-2023 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2019-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
@@ -38,6 +38,7 @@ from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.six.moves.urllib.parse import urlencode
from ansible.module_utils.common.parameters import env_fallback
from ansible_collections.dellemc.openmanage.plugins.module_utils.utils import config_ipv6
+from ansible.module_utils.basic import AnsibleModule
idrac_auth_params = {
"idrac_ip": {"required": True, "type": 'str'},
@@ -101,6 +102,7 @@ class iDRACRedfishAPI(object):
self.ipaddress = module_params['idrac_ip']
self.username = module_params['idrac_user']
self.password = module_params['idrac_password']
+ self.x_auth_token = module_params.get('x_auth_token')
self.port = module_params['idrac_port']
self.validate_certs = module_params.get("validate_certs", False)
self.ca_path = module_params.get("ca_path")
@@ -180,7 +182,7 @@ class iDRACRedfishAPI(object):
def __enter__(self):
"""Creates sessions by passing it to header"""
- if self.req_session:
+ if self.req_session and not self.x_auth_token:
payload = {'UserName': self.username,
'Password': self.password}
path = SESSION_RESOURCE_COLLECTION["SESSION"]
@@ -191,6 +193,8 @@ class iDRACRedfishAPI(object):
else:
msg = "Could not create the session"
raise ConnectionError(msg)
+ elif self.x_auth_token is not None:
+ self._headers["X-Auth-Token"] = self.x_auth_token
return self
def __exit__(self, exc_type, exc_value, traceback):
@@ -300,7 +304,7 @@ class iDRACRedfishAPI(object):
return response
def import_scp_share(self, shutdown_type=None, host_powerstate=None, job_wait=True,
- target=None, import_buffer=None, share=None):
+ target=None, import_buffer=None, share=None, time_to_wait=300):
"""
This method imports system configuration using share.
:param shutdown_type: graceful
@@ -312,7 +316,7 @@ class iDRACRedfishAPI(object):
:return: json response
"""
payload = {"ShutdownType": shutdown_type, "EndHostPowerState": host_powerstate,
- "ShareParameters": {"Target": target}}
+ "ShareParameters": {"Target": target}, "TimeToWait": time_to_wait}
if import_buffer is not None:
payload["ImportBuffer"] = import_buffer
if share is None:
@@ -384,7 +388,7 @@ class iDRACRedfishAPI(object):
response = self.wait_for_job_complete(task_uri, job_wait=job_wait)
return response
- def import_scp(self, import_buffer=None, target=None, job_wait=False):
+ def import_scp(self, import_buffer=None, target=None, job_wait=False, time_to_wait=300):
"""
This method imports system configuration details to the system.
:param import_buffer: import buffer payload content xml or json format
@@ -392,7 +396,7 @@ class iDRACRedfishAPI(object):
:param job_wait: True or False decide whether to wait till the job completion.
:return: json response
"""
- payload = {"ImportBuffer": import_buffer, "ShareParameters": {"Target": target}}
+ payload = {"ImportBuffer": import_buffer, "ShareParameters": {"Target": target}, "TimeToWait": time_to_wait}
response = self.invoke_request(IMPORT_URI, "POST", data=payload)
if response.status_code == 202 and job_wait:
task_uri = response.headers["Location"]
@@ -433,3 +437,42 @@ class iDRACRedfishAPI(object):
def _get_omam_ca_env(self):
"""Check if the value is set in REQUESTS_CA_BUNDLE or CURL_CA_BUNDLE or OMAM_CA_BUNDLE or returns None"""
return os.environ.get("REQUESTS_CA_BUNDLE") or os.environ.get("CURL_CA_BUNDLE") or os.environ.get("OMAM_CA_BUNDLE")
+
+
+class IdracAnsibleModule(AnsibleModule):
+ def __init__(self, argument_spec, bypass_checks=False, no_log=False,
+ mutually_exclusive=None, required_together=None,
+ required_one_of=None, add_file_common_args=False,
+ supports_check_mode=False, required_if=None, required_by=None):
+ idrac_argument_spec = {
+ "idrac_ip": {"required": True, "type": 'str'},
+ "idrac_user": {"required": False, "type": 'str', "fallback": (env_fallback, ['IDRAC_USERNAME'])},
+ "idrac_password": {"required": False, "type": 'str', "aliases": ['idrac_pwd'], "no_log": True, "fallback": (env_fallback, ['IDRAC_PASSWORD'])},
+ "x_auth_token": {"required": False, "type": 'str', "no_log": True, "fallback": (env_fallback, ['IDRAC_X_AUTH_TOKEN'])},
+ "idrac_port": {"required": False, "default": 443, "type": 'int'},
+ "validate_certs": {"type": "bool", "default": True},
+ "ca_path": {"type": "path"},
+ "timeout": {"type": "int", "default": 30},
+ }
+ argument_spec.update(idrac_argument_spec)
+
+ auth_mutually_exclusive = [("idrac_user", "x_auth_token"), ("idrac_password", "x_auth_token")]
+ auth_required_one_of = [("idrac_user", "x_auth_token")]
+ auth_required_together = [("idrac_user", "idrac_password")]
+
+ if mutually_exclusive is None:
+ mutually_exclusive = []
+ mutually_exclusive.extend(auth_mutually_exclusive)
+ if required_together is None:
+ required_together = []
+ required_together.extend(auth_required_together)
+ if required_one_of is None:
+ required_one_of = []
+ required_one_of.extend(auth_required_one_of)
+ if required_by is None:
+ required_by = {}
+
+ super().__init__(argument_spec, bypass_checks, no_log,
+ mutually_exclusive, required_together,
+ required_one_of, add_file_common_args,
+ supports_check_mode, required_if, required_by)
diff --git a/ansible_collections/dellemc/openmanage/plugins/module_utils/ome.py b/ansible_collections/dellemc/openmanage/plugins/module_utils/ome.py
index cd0bb6be0..4aeba6201 100644
--- a/ansible_collections/dellemc/openmanage/plugins/module_utils/ome.py
+++ b/ansible_collections/dellemc/openmanage/plugins/module_utils/ome.py
@@ -1,8 +1,8 @@
# -*- coding: utf-8 -*-
# Dell OpenManage Ansible Modules
-# Version 8.2.0
-# Copyright (C) 2019-2023 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2019-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
@@ -38,6 +38,7 @@ from ansible.module_utils.common.parameters import env_fallback
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.six.moves.urllib.parse import urlencode
from ansible_collections.dellemc.openmanage.plugins.module_utils.utils import config_ipv6
+from ansible.module_utils.basic import AnsibleModule
ome_auth_params = {
"hostname": {"required": True, "type": "str"},
@@ -96,6 +97,7 @@ class RestOME(object):
self.hostname = str(self.module_params["hostname"]).strip('][')
self.username = self.module_params["username"]
self.password = self.module_params["password"]
+ self.x_auth_token = self.module_params.get("x_auth_token")
self.port = self.module_params["port"]
self.validate_certs = self.module_params.get("validate_certs", True)
self.ca_path = self.module_params.get("ca_path")
@@ -191,7 +193,7 @@ class RestOME(object):
def __enter__(self):
"""Creates sessions by passing it to header"""
- if self.req_session:
+ if self.req_session and not self.x_auth_token:
payload = {'UserName': self.username,
'Password': self.password,
'SessionType': 'API', }
@@ -203,6 +205,8 @@ class RestOME(object):
else:
msg = "Could not create the session"
raise ConnectionError(msg)
+ elif self.x_auth_token is not None:
+ self._headers["X-Auth-Token"] = self.x_auth_token
return self
def __exit__(self, exc_type, exc_value, traceback):
@@ -401,3 +405,42 @@ class RestOME(object):
def _get_omam_ca_env(self):
"""Check if the value is set in REQUESTS_CA_BUNDLE or CURL_CA_BUNDLE or OMAM_CA_BUNDLE or returns None"""
return os.environ.get("REQUESTS_CA_BUNDLE") or os.environ.get("CURL_CA_BUNDLE") or os.environ.get("OMAM_CA_BUNDLE")
+
+
+class OmeAnsibleModule(AnsibleModule):
+ def __init__(self, argument_spec, bypass_checks=False, no_log=False,
+ mutually_exclusive=None, required_together=None,
+ required_one_of=None, add_file_common_args=False,
+ supports_check_mode=False, required_if=None, required_by=None):
+ ome_argument_spec = {
+ "hostname": {"required": True, "type": "str"},
+ "username": {"required": False, "type": "str", "fallback": (env_fallback, ['OME_USERNAME'])},
+ "password": {"required": False, "type": "str", "no_log": True, "fallback": (env_fallback, ['OME_PASSWORD'])},
+ "x_auth_token": {"required": False, "type": "str", "no_log": True, "fallback": (env_fallback, ['OME_X_AUTH_TOKEN'])},
+ "port": {"type": "int", "default": 443},
+ "validate_certs": {"type": "bool", "default": True},
+ "ca_path": {"type": "path"},
+ "timeout": {"type": "int", "default": 30},
+ }
+ argument_spec.update(ome_argument_spec)
+
+ auth_mutually_exclusive = [("username", "x_auth_token"), ("password", "x_auth_token")]
+ auth_required_one_of = [("username", "x_auth_token")]
+ auth_required_together = [("username", "password")]
+
+ if mutually_exclusive is None:
+ mutually_exclusive = []
+ mutually_exclusive.extend(auth_mutually_exclusive)
+ if required_together is None:
+ required_together = []
+ required_together.extend(auth_required_together)
+ if required_one_of is None:
+ required_one_of = []
+ required_one_of.extend(auth_required_one_of)
+ if required_by is None:
+ required_by = {}
+
+ super().__init__(argument_spec, bypass_checks, no_log,
+ mutually_exclusive, required_together,
+ required_one_of, add_file_common_args,
+ supports_check_mode, required_if, required_by)
diff --git a/ansible_collections/dellemc/openmanage/plugins/module_utils/redfish.py b/ansible_collections/dellemc/openmanage/plugins/module_utils/redfish.py
index 8a26eaf60..7e1615ab4 100644
--- a/ansible_collections/dellemc/openmanage/plugins/module_utils/redfish.py
+++ b/ansible_collections/dellemc/openmanage/plugins/module_utils/redfish.py
@@ -1,8 +1,8 @@
# -*- coding: utf-8 -*-
# Dell OpenManage Ansible Modules
-# Version 8.2.0
-# Copyright (C) 2019-2023 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2019-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
@@ -36,6 +36,7 @@ from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.six.moves.urllib.parse import urlencode
from ansible.module_utils.common.parameters import env_fallback
from ansible_collections.dellemc.openmanage.plugins.module_utils.utils import config_ipv6
+from ansible.module_utils.basic import AnsibleModule
redfish_auth_params = {
"baseuri": {"required": True, "type": "str"},
@@ -96,6 +97,7 @@ class Redfish(object):
self.hostname = self.module_params["baseuri"]
self.username = self.module_params["username"]
self.password = self.module_params["password"]
+ self.x_auth_token = self.module_params.get("x_auth_token")
self.validate_certs = self.module_params.get("validate_certs", True)
self.ca_path = self.module_params.get("ca_path")
self.timeout = self.module_params.get("timeout", 30)
@@ -191,7 +193,7 @@ class Redfish(object):
def __enter__(self):
"""Creates sessions by passing it to header"""
- if self.req_session:
+ if self.req_session and not self.x_auth_token:
payload = {'UserName': self.username,
'Password': self.password}
path = SESSION_RESOURCE_COLLECTION["SESSION"]
@@ -202,6 +204,8 @@ class Redfish(object):
else:
msg = "Could not create the session"
raise ConnectionError(msg)
+ elif self.x_auth_token is not None:
+ self._headers["X-Auth-Token"] = self.x_auth_token
return self
def __exit__(self, exc_type, exc_value, traceback):
@@ -222,3 +226,41 @@ class Redfish(object):
def _get_omam_ca_env(self):
"""Check if the value is set in REQUESTS_CA_BUNDLE or CURL_CA_BUNDLE or OMAM_CA_BUNDLE or returns None"""
return os.environ.get("REQUESTS_CA_BUNDLE") or os.environ.get("CURL_CA_BUNDLE") or os.environ.get("OMAM_CA_BUNDLE")
+
+
+class RedfishAnsibleModule(AnsibleModule):
+ def __init__(self, argument_spec, bypass_checks=False, no_log=False,
+ mutually_exclusive=None, required_together=None,
+ required_one_of=None, add_file_common_args=False,
+ supports_check_mode=False, required_if=None, required_by=None):
+ redfish_argument_spec = {
+ "baseuri": {"required": True, "type": "str"},
+ "username": {"required": False, "type": "str", "fallback": (env_fallback, ['IDRAC_USERNAME'])},
+ "password": {"required": False, "type": "str", "no_log": True, "fallback": (env_fallback, ['IDRAC_PASSWORD'])},
+ "x_auth_token": {"required": False, "type": "str", "no_log": True, "fallback": (env_fallback, ['IDRAC_X_AUTH_TOKEN'])},
+ "validate_certs": {"type": "bool", "default": True},
+ "ca_path": {"type": "path"},
+ "timeout": {"type": "int", "default": 30},
+ }
+ argument_spec.update(redfish_argument_spec)
+
+ auth_mutually_exclusive = [("username", "x_auth_token"), ("password", "x_auth_token")]
+ auth_required_one_of = [("username", "x_auth_token")]
+ auth_required_together = [("username", "password")]
+
+ if mutually_exclusive is None:
+ mutually_exclusive = []
+ mutually_exclusive.extend(auth_mutually_exclusive)
+ if required_together is None:
+ required_together = []
+ required_together.extend(auth_required_together)
+ if required_one_of is None:
+ required_one_of = []
+ required_one_of.extend(auth_required_one_of)
+ if required_by is None:
+ required_by = {}
+
+ super().__init__(argument_spec, bypass_checks, no_log,
+ mutually_exclusive, required_together,
+ required_one_of, add_file_common_args,
+ supports_check_mode, required_if, required_by)
diff --git a/ansible_collections/dellemc/openmanage/plugins/module_utils/session_utils.py b/ansible_collections/dellemc/openmanage/plugins/module_utils/session_utils.py
index 4bead057a..54728a771 100644
--- a/ansible_collections/dellemc/openmanage/plugins/module_utils/session_utils.py
+++ b/ansible_collections/dellemc/openmanage/plugins/module_utils/session_utils.py
@@ -34,6 +34,8 @@ import os
from ansible.module_utils.urls import open_url
from ansible.module_utils.six.moves.urllib.parse import urlencode
from ansible_collections.dellemc.openmanage.plugins.module_utils.utils import config_ipv6
+from ansible.module_utils.urls import open_url
+from abc import ABC, abstractmethod
HEADER_TYPE = "application/json"
@@ -177,7 +179,7 @@ class SessionAPI():
self._headers = {
'Content-Type': HEADER_TYPE,
'Accept': HEADER_TYPE,
- 'X-Auth-Token': module_params.get("auth_token")
+ 'X-Auth-Token': module_params.get("x_auth_token")
}
def _get_url(self, uri):
@@ -220,7 +222,7 @@ class SessionAPI():
url += f"?{urlencode(query_param)}"
return url
- def _url_common_args_spec(self, method, api_timeout, headers=None):
+ def _url_common_args_spec(self, method, api_timeout, headers=None, url_kwargs=None):
"""
Generates the common arguments for a URL request.
@@ -242,25 +244,27 @@ class SessionAPI():
- follow_redirects (str): The policy for following redirects.
"""
- req_header = self._headers
- if headers:
- req_header.update(headers)
if api_timeout is None:
api_timeout = self.timeout
if self.ca_path is None:
self.ca_path = self._get_omam_ca_env()
- url_kwargs = {
+ req_header = self._headers
+ if headers:
+ req_header.update(headers)
+ url_params = {
"method": method,
"validate_certs": self.validate_certs,
"ca_path": self.ca_path,
"use_proxy": self.use_proxy,
"headers": req_header,
"timeout": api_timeout,
- "follow_redirects": 'all',
+ "follow_redirects": 'all'
}
- return url_kwargs
+ if url_kwargs:
+ url_params.update(url_kwargs)
+ return url_params
- def _args_session(self, method, api_timeout, headers=None):
+ def _args_session(self, method, api_timeout, headers=None, url_kwargs=None):
"""
Returns a dictionary containing the arguments needed to establish a session.
@@ -275,11 +279,11 @@ class SessionAPI():
req_header = self._headers
if headers:
req_header.update(headers)
- url_kwargs = self._url_common_args_spec(method, api_timeout, headers=headers)
+ url_kwargs = self._url_common_args_spec(method, api_timeout, headers=headers, url_kwargs=url_kwargs)
return url_kwargs
def invoke_request(self, uri, method, data=None, query_param=None, headers=None,
- api_timeout=None, dump=True):
+ api_timeout=None, dump=True, url_kwargs=None):
"""
Invokes a request to the specified URI using the given method and optional parameters.
@@ -300,7 +304,7 @@ class SessionAPI():
:return: The response data from the request.
:rtype: OpenURLResponse
"""
- url_kwargs = self._args_session(method, api_timeout, headers=headers)
+ url_kwargs = self._args_session(method, api_timeout, headers=headers, url_kwargs=url_kwargs)
if data and dump:
data = json.dumps(data)
url = self._build_url(uri, query_param=query_param)
@@ -320,3 +324,50 @@ class SessionAPI():
return (os.environ.get("REQUESTS_CA_BUNDLE") or
os.environ.get("CURL_CA_BUNDLE") or
os.environ.get("OMAM_CA_BUNDLE"))
+
+
+class Session(ABC):
+ """
+ Parent class for all session operations.
+ """
+ def __init__(self, module):
+ """
+ Initializes the object with the given instance and module parameters.
+
+ Args:
+ instance (object): The ome object.
+ module (object): The module object.
+
+ Returns:
+ None
+ """
+ self.instance = SessionAPI(module.params)
+ self.module = module
+
+ @abstractmethod
+ def create_session(self):
+ """
+ Abstract method to create a session.
+ Must be implemented by subclasses.
+
+ Args:
+ None
+
+ Returns:
+ None
+ """
+ pass
+
+ @abstractmethod
+ def delete_session(self):
+ """
+ Abstract method to delete a session.
+ Must be implemented by subclasses.
+
+ Args:
+ None
+
+ Returns:
+ None
+ """
+ pass
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/idrac_attributes.py b/ansible_collections/dellemc/openmanage/plugins/modules/idrac_attributes.py
index 1b61b3ebe..2dd0d4bf9 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/idrac_attributes.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/idrac_attributes.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 8.1.0
-# Copyright (C) 2022-2023 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2022-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -22,7 +22,7 @@ version_added: "6.0.0"
description:
- This module allows to configure the iDRAC attributes.
extends_documentation_fragment:
- - dellemc.openmanage.idrac_auth_options
+ - dellemc.openmanage.idrac_x_auth_options
options:
idrac_attributes:
type: dict
@@ -62,7 +62,7 @@ options:
type: str
description: Redfish ID of the resource.
requirements:
- - "python >= 3.8.6"
+ - "python >= 3.9.6"
author:
- Husniya Abdul Hameed (@husniya-hameed)
- Felix Stephen (@felixs88)
@@ -262,9 +262,8 @@ import json
import re
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError
-from ansible_collections.dellemc.openmanage.plugins.module_utils.idrac_redfish import iDRACRedfishAPI, idrac_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.idrac_redfish import iDRACRedfishAPI, IdracAnsibleModule
from ansible_collections.dellemc.openmanage.plugins.module_utils.utils import get_manager_res_id
-from ansible.module_utils.basic import AnsibleModule
SUCCESS_MSG = "Successfully updated the attributes."
@@ -496,8 +495,8 @@ def main():
"lifecycle_controller_attributes": {"required": False, "type": 'dict'},
"resource_id": {"required": False, "type": 'str'}
}
- specs.update(idrac_auth_params)
- module = AnsibleModule(
+
+ module = IdracAnsibleModule(
argument_spec=specs,
required_one_of=[('idrac_attributes', 'system_attributes', 'lifecycle_controller_attributes')],
supports_check_mode=True
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/idrac_boot.py b/ansible_collections/dellemc/openmanage/plugins/modules/idrac_boot.py
index 1e28ef30d..a02e7498e 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/idrac_boot.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/idrac_boot.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 8.0.0
-# Copyright (C) 2022-2023 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2022-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -21,7 +21,7 @@ version_added: "6.1.0"
description:
- This module allows to configure the boot order settings.
extends_documentation_fragment:
- - dellemc.openmanage.idrac_auth_options
+ - dellemc.openmanage.idrac_x_auth_options
options:
boot_options:
type: list
@@ -121,7 +121,7 @@ options:
type: str
description: Redfish ID of the resource.
requirements:
- - "python >= 3.8.6"
+ - "python >= 3.9.6"
author:
- "Felix Stephen (@felixs88)"
notes:
@@ -264,11 +264,11 @@ import json
import time
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
-from ansible_collections.dellemc.openmanage.plugins.module_utils.idrac_redfish import iDRACRedfishAPI, idrac_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.idrac_redfish import iDRACRedfishAPI, IdracAnsibleModule
from ansible_collections.dellemc.openmanage.plugins.module_utils.utils import (strip_substr_dict, idrac_system_reset,
get_system_res_id,
wait_for_idrac_job_completion)
-from ansible.module_utils.basic import AnsibleModule
+
SYSTEM_URI = "/redfish/v1/Systems"
BOOT_OPTIONS_URI = "/redfish/v1/Systems/{0}/BootOptions?$expand=*($levels=1)"
@@ -286,7 +286,7 @@ INVALID_BOOT_OPT = "{0} boot order reference provided."
SUCCESS_MSG = "Successfully updated the boot settings."
FAILED_MSG = "Failed to update the boot settings."
UNSUPPORTED_MSG = "The system does not support the BootOptions feature."
-JOB_WAIT_MSG = "The boot settings job is triggered successfully."
+JOB_WAIT_MSG = "The boot settings operation is triggered/submitted successfully."
AUTH_ERROR_MSG = "Unable to communicate with iDRAC {0}. This may be due to one of the following: " \
"Incorrect username or password, unreachable iDRAC IP or a failure in TLS/SSL handshake."
@@ -512,8 +512,8 @@ def main():
"job_wait_timeout": {"required": False, "type": "int", "default": 900},
"resource_id": {"required": False, "type": "str"}
}
- specs.update(idrac_auth_params)
- module = AnsibleModule(
+
+ module = IdracAnsibleModule(
argument_spec=specs,
required_one_of=[["boot_options", "boot_order", "boot_source_override_mode",
"boot_source_override_enabled", "boot_source_override_target",
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/idrac_certificates.py b/ansible_collections/dellemc/openmanage/plugins/modules/idrac_certificates.py
index a429c639b..274bcf898 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/idrac_certificates.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/idrac_certificates.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 8.6.0
-# Copyright (C) 2022-2023 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2022-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -22,7 +22,7 @@ version_added: "5.5.0"
description:
- This module allows to generate certificate signing request, import, and export certificates on iDRAC.
extends_documentation_fragment:
- - dellemc.openmanage.idrac_auth_options
+ - dellemc.openmanage.idrac_x_auth_options
options:
command:
description:
@@ -113,7 +113,7 @@ options:
type: int
default: 300
requirements:
- - "python >= 3.8.6"
+ - "python >= 3.9.6"
author:
- "Jagadeesh N V(@jagadeeshnv)"
- "Rajshekar P(@rajshekarp87)"
@@ -248,8 +248,7 @@ import json
import base64
import os
from datetime import datetime
-from ansible.module_utils.basic import AnsibleModule
-from ansible_collections.dellemc.openmanage.plugins.module_utils.idrac_redfish import iDRACRedfishAPI, idrac_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.idrac_redfish import iDRACRedfishAPI, IdracAnsibleModule
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
from ansible_collections.dellemc.openmanage.plugins.module_utils.utils import reset_idrac
@@ -383,7 +382,11 @@ def _build_generate_csr_payload(module, cert_type):
for key, value in csr_transform.items():
if cert_params.get(key) is not None:
- payload[value] = cert_params.get(key)
+ if value == 'AlternativeNames':
+ forming_string = ",".join(cert_params.get(key))
+ payload[value] = [forming_string]
+ else:
+ payload[value] = cert_params.get(key)
if rfish_cert_coll.get(cert_type):
payload["CertificateCollection"] = rfish_cert_coll.get(cert_type)
@@ -564,8 +567,8 @@ def main():
"reset": {"type": 'bool', "default": True},
"wait": {"type": 'int', "default": 300}
}
- specs.update(idrac_auth_params)
- module = AnsibleModule(
+
+ module = IdracAnsibleModule(
argument_spec=specs,
required_if=[
['command', 'generate_csr', ('cert_params', 'certificate_path',)],
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/idrac_diagnostics.py b/ansible_collections/dellemc/openmanage/plugins/modules/idrac_diagnostics.py
index 3df5a68fd..e19c0107b 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/idrac_diagnostics.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/idrac_diagnostics.py
@@ -3,7 +3,7 @@
#
# Dell OpenManage Ansible Modules
-# Version 9.0.0
+# Version 9.3.0
# Copyright (C) 2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
@@ -22,7 +22,7 @@ version_added: "9.0.0"
description:
- This module allows you to run and export diagnostics on iDRAC.
extends_documentation_fragment:
- - dellemc.openmanage.idrac_auth_options
+ - dellemc.openmanage.idrac_x_auth_options
options:
run:
description:
@@ -344,8 +344,7 @@ error_info:
import json
import os
-from ansible.module_utils.basic import AnsibleModule
-from ansible_collections.dellemc.openmanage.plugins.module_utils.idrac_redfish import iDRACRedfishAPI, idrac_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.idrac_redfish import iDRACRedfishAPI, IdracAnsibleModule
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
from ansible_collections.dellemc.openmanage.plugins.module_utils.utils import (
@@ -768,8 +767,8 @@ class DiagnosticsType:
def main():
specs = get_argument_spec()
- specs.update(idrac_auth_params)
- module = AnsibleModule(
+
+ module = IdracAnsibleModule(
argument_spec=specs,
required_one_of=[["run", "export"]],
required_if=[
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/idrac_license.py b/ansible_collections/dellemc/openmanage/plugins/modules/idrac_license.py
index 565c61cd4..1bf0e2b3c 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/idrac_license.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/idrac_license.py
@@ -3,7 +3,7 @@
#
# Dell OpenManage Ansible Modules
-# Version 8.7.0
+# Version 9.3.0
# Copyright (C) 2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
@@ -22,7 +22,7 @@ version_added: "8.7.0"
description:
- This module allows to import, export and delete licenses on iDRAC.
extends_documentation_fragment:
- - dellemc.openmanage.idrac_auth_options
+ - dellemc.openmanage.idrac_x_auth_options
options:
license_id:
description:
@@ -390,8 +390,7 @@ import json
import os
import base64
from urllib.error import HTTPError, URLError
-from ansible_collections.dellemc.openmanage.plugins.module_utils.idrac_redfish import iDRACRedfishAPI, idrac_auth_params
-from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.dellemc.openmanage.plugins.module_utils.idrac_redfish import iDRACRedfishAPI, IdracAnsibleModule
from ansible.module_utils.urls import ConnectionError, SSLValidationError
from ansible.module_utils.compat.version import LooseVersion
from ansible_collections.dellemc.openmanage.plugins.module_utils.utils import (
@@ -634,15 +633,15 @@ class ExportLicense(License):
self.module.exit_json(msg=INSUFFICIENT_DIRECTORY_PERMISSION_MSG.format(path=path), failed=True)
license_name = self.module.params.get('share_parameters').get('file_name')
if license_name:
- license_file_name = f"{license_name}_iDRAC_license.txt"
+ license_file_name = f"{license_name}"
else:
- license_file_name = f"{self.module.params['license_id']}_iDRAC_license.txt"
+ license_file_name = f"{self.module.params['license_id']}_iDRAC_license.xml"
license_status = self.idrac.invoke_request(export_license_url, "POST", data=payload)
license_data = license_status.json_data
- license_file = license_data.get("LicenseFile")
+ license_file = base64.b64decode(license_data.get("LicenseFile")).decode('utf-8')
file_name = os.path.join(path, license_file_name)
with open(file_name, "w") as fp:
- fp.writelines(license_file)
+ fp.write(license_file)
return license_status
def __export_license_http(self, export_license_url):
@@ -737,7 +736,7 @@ class ExportLicense(License):
"""
license_name = self.module.params.get('share_parameters').get('file_name')
if license_name:
- license_file_name = f"{license_name}_iDRAC_license.xml"
+ license_file_name = f"{license_name}"
else:
license_file_name = f"{self.module.params['license_id']}_iDRAC_license.xml"
payload["FileName"] = license_file_name
@@ -999,8 +998,8 @@ def main():
None
"""
specs = get_argument_spec()
- specs.update(idrac_auth_params)
- module = AnsibleModule(
+
+ module = IdracAnsibleModule(
argument_spec=specs,
mutually_exclusive=[("import", "export", "delete")],
required_if=[
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/idrac_network_attributes.py b/ansible_collections/dellemc/openmanage/plugins/modules/idrac_network_attributes.py
index 0103570be..dbaa9b4f2 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/idrac_network_attributes.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/idrac_network_attributes.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 8.4.0
-# Copyright (C) 2023 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2023-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -22,7 +22,7 @@ version_added: "8.4.0"
description:
- This module allows you to configure the port and partition network attributes on the network interface cards.
extends_documentation_fragment:
- - dellemc.openmanage.idrac_auth_options
+ - dellemc.openmanage.idrac_x_auth_options
options:
network_adapter_id:
type: str
@@ -40,17 +40,17 @@ options:
type: dict
description:
- "Dictionary of network attributes and value. To view the list of attributes and its structure, see the below API
- U(https://I(idrac_ip)/redfish/v1/Systems/System.Embedded.1/NetworkAdapters/<network_id>/NetworkDeviceFunctions/
- <network_port_id>/Settings) and U(https://<idrac_ip>/redfish/v1/Schemas/NetworkDeviceFunction.v1_8_0.json)."
+ U(https://I(idrac_ip)/redfish/v1/Systems/System.Embedded.1/NetworkAdapters/<network_adapter_id>/NetworkDeviceFunctions/
+ <network_device_function_id>/Settings) and U(https://<idrac_ip>/redfish/v1/Schemas/NetworkDeviceFunction.v1_8_0.json)."
- I(network_attributes) is mutually exclusive with I(oem_network_attributes).
oem_network_attributes:
type: dict
description:
- "The attributes must be part of the Integrated Dell Remote Access Controller Attribute Registry.
To view the list of attributes in Attribute Registry for iDRAC9 and newer versions. For more information,
- see, U(https://I(idrac_ip)/redfish/v1/Chassis/System.Embedded.1/NetworkAdapters/<network_id>/NetworkDeviceFunctions/
- <network_port_id>/Oem/Dell/DellNetworkAttributes/<network_port_id>)
- and U(https://I(idrac_ip)/redfish/v1/Registries/NetworkAttributesRegistry_<network_port_id>/
+ see, U(https://I(idrac_ip)/redfish/v1/Chassis/System.Embedded.1/NetworkAdapters/<network_adapter_id>/NetworkDeviceFunctions/
+ <network_device_function_id>/Oem/Dell/DellNetworkAttributes/<network_device_function_id>)
+ and U(https://I(idrac_ip)/redfish/v1/Registries/NetworkAttributesRegistry_<network_device_function_id>/
NetworkAttributesRegistry_network_port_id.json)."
- For iDRAC8 based servers, derive the network attribute name from Server Configuration Profile.
- I(oem_network_attributes) is mutually exclusive with I(network_attributes).
@@ -131,8 +131,8 @@ EXAMPLES = """
idrac_ip: "192.168.0.1"
idrac_user: "user_name"
idrac_password: "user_password"
- network_id: "NIC.Integrated.1"
- network_port_id: "NIC.Integrated.1-1-1"
+ network_adapter_id: "NIC.Integrated.1"
+ network_device_function_id: "NIC.Integrated.1-1-1"
apply_time: "Immediate"
oem_network_attributes:
BannerMessageTimeout: "4"
@@ -142,8 +142,8 @@ EXAMPLES = """
idrac_ip: "192.168.0.1"
idrac_user: "user_name"
idrac_password: "user_password"
- network_id: NIC.Integrated.1
- network_port_id: "NIC.Integrated.1-1-1"
+ network_adapter_id: NIC.Integrated.1
+ network_device_function_id: "NIC.Integrated.1-1-1"
oem_network_attributes:
BannerMessageTimeout: "4"
apply_time: OnReset
@@ -153,8 +153,8 @@ EXAMPLES = """
idrac_ip: "192.168.0.1"
idrac_user: "user_name"
idrac_password: "user_password"
- network_id: NIC.Integrated.1
- network_port_id: "NIC.Integrated.1-1-1"
+ network_adapter_id: NIC.Integrated.1
+ network_device_function_id: "NIC.Integrated.1-1-1"
oem_network_attributes:
BannerMessageTimeout: "4"
apply_time: AtMaintenanceWindowStart
@@ -167,8 +167,8 @@ EXAMPLES = """
idrac_ip: "192.168.0.1"
idrac_user: "user_name"
idrac_password: "user_password"
- network_id: NIC.Integrated.1
- network_port_id: "NIC.Integrated.1-1-1"
+ network_adapter_id: NIC.Integrated.1
+ network_device_function_id: "NIC.Integrated.1-1-1"
apply_time: "Immediate"
clear_pending: true
@@ -177,8 +177,8 @@ EXAMPLES = """
idrac_ip: "192.168.0.1"
idrac_user: "user_name"
idrac_password: "user_password"
- network_id: NIC.Integrated.1
- network_port_id: "NIC.Integrated.1-1-1"
+ network_adapter_id: NIC.Integrated.1
+ network_device_function_id: "NIC.Integrated.1-1-1"
apply_time: "Immediate"
clear_pending: true
oem_network_attributes:
@@ -189,8 +189,8 @@ EXAMPLES = """
idrac_ip: "192.168.0.1"
idrac_user: "user_name"
idrac_password: "user_password"
- network_id: NIC.Integrated.1
- network_port_id: "NIC.Integrated.1-1-1"
+ network_adapter_id: NIC.Integrated.1
+ network_device_function_id: "NIC.Integrated.1-1-1"
apply_time: "Immediate"
oem_network_attributes:
LnkSpeed: "10MbpsHalf"
@@ -204,8 +204,8 @@ EXAMPLES = """
idrac_ip: "192.168.0.1"
idrac_user: "user_name"
idrac_password: "user_password"
- network_id: NIC.Integrated.1
- network_port_id: "NIC.Integrated.1-1-1"
+ network_adapter_id: NIC.Integrated.1
+ network_device_function_id: "NIC.Integrated.1-1-1"
apply_time: OnReset
network_attributes:
Ethernet:
@@ -217,8 +217,8 @@ EXAMPLES = """
idrac_ip: "192.168.0.1"
idrac_user: "user_name"
idrac_password: "user_password"
- network_id: NIC.Integrated.1
- network_port_id: "NIC.Integrated.1-1-1"
+ network_adapter_id: NIC.Integrated.1
+ network_device_function_id: "NIC.Integrated.1-1-1"
network_attributes:
Ethernet:
VLAN:
@@ -230,8 +230,8 @@ EXAMPLES = """
idrac_ip: "192.168.0.1"
idrac_user: "user_name"
idrac_password: "user_password"
- network_id: NIC.Integrated.1
- network_port_id: "NIC.Integrated.1-1-1"
+ network_adapter_id: NIC.Integrated.1
+ network_device_function_id: "NIC.Integrated.1-1-1"
network_attributes:
iSCSIBoot:
InitiatorIPAddress: 1.0.0.1
@@ -245,8 +245,8 @@ EXAMPLES = """
idrac_ip: "192.168.0.1"
idrac_user: "user_name"
idrac_password: "user_password"
- network_id: NIC.Integrated.1
- network_port_id: "NIC.Integrated.1-1-1"
+ network_adapter_id: NIC.Integrated.1
+ network_device_function_id: "NIC.Integrated.1-1-1"
network_attributes:
Ethernet:
VLAN:
@@ -319,12 +319,9 @@ error_info:
import json
import time
from urllib.error import HTTPError, URLError
-
-from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.compat.version import LooseVersion
from ansible.module_utils.urls import ConnectionError, SSLValidationError
-from ansible_collections.dellemc.openmanage.plugins.module_utils.idrac_redfish import (
- idrac_auth_params, iDRACRedfishAPI)
+from ansible_collections.dellemc.openmanage.plugins.module_utils.idrac_redfish import iDRACRedfishAPI, IdracAnsibleModule
from ansible_collections.dellemc.openmanage.plugins.module_utils.utils import (
delete_job, get_current_time, get_dynamic_uri, get_idrac_firmware_version,
get_scheduled_job_resp, remove_key, validate_and_get_first_resource_id_uri,
@@ -713,13 +710,12 @@ def main():
"job_wait": {"type": "bool", "default": True},
"job_wait_timeout": {"type": "int", "default": 1200}
}
- specs.update(idrac_auth_params)
- module = AnsibleModule(argument_spec=specs,
- mutually_exclusive=[
- ('network_attributes', 'oem_network_attributes')],
- required_if=[["apply_time", "AtMaintenanceWindowStart", ("maintenance_window",)],
- ["apply_time", "InMaintenanceWindowOnReset", ("maintenance_window",)]],
- supports_check_mode=True)
+
+ module = IdracAnsibleModule(argument_spec=specs,
+ mutually_exclusive=[('network_attributes', 'oem_network_attributes')],
+ required_if=[["apply_time", "AtMaintenanceWindowStart", ("maintenance_window",)],
+ ["apply_time", "InMaintenanceWindowOnReset", ("maintenance_window",)]],
+ supports_check_mode=True)
with iDRACRedfishAPI(module.params, req_session=True) as idrac:
if module_attribute := module.params.get('network_attributes'):
network_attr_obj = NetworkAttributes(idrac, module)
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/idrac_redfish_storage_controller.py b/ansible_collections/dellemc/openmanage/plugins/modules/idrac_redfish_storage_controller.py
index 40cc5768d..1695f2b6f 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/idrac_redfish_storage_controller.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/idrac_redfish_storage_controller.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 8.1.0
-# Copyright (C) 2019-2023 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2019-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -519,9 +519,8 @@ error_info:
import json
-from ansible_collections.dellemc.openmanage.plugins.module_utils.redfish import Redfish, redfish_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.redfish import Redfish, RedfishAnsibleModule
from ansible_collections.dellemc.openmanage.plugins.module_utils.utils import wait_for_job_completion, strip_substr_dict
-from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
@@ -1001,8 +1000,7 @@ def main():
"job_wait_timeout": {"required": False, "type": "int", "default": 120},
"size": {"required": False, "type": "int"}
}
- specs.update(redfish_auth_params)
- module = AnsibleModule(
+ module = RedfishAnsibleModule(
argument_spec=specs,
mutually_exclusive=[('attributes', 'command'), ("target", "size")],
required_one_of=[('attributes', 'command')],
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/idrac_reset.py b/ansible_collections/dellemc/openmanage/plugins/modules/idrac_reset.py
index 940f86dc2..48aff3dcf 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/idrac_reset.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/idrac_reset.py
@@ -3,7 +3,7 @@
#
# Dell OpenManage Ansible Modules
-# Version 9.2.0
+# Version 9.3.0
# Copyright (C) 2018-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
@@ -20,7 +20,7 @@ version_added: "2.1.0"
description:
- This module resets the iDRAC to factory default settings.
extends_documentation_fragment:
- - dellemc.openmanage.idrac_auth_options
+ - dellemc.openmanage.idrac_x_auth_options
options:
reset_to_default:
type: str
@@ -55,7 +55,7 @@ options:
job_wait_timeout:
description:
- Time in seconds to wait for job completion.
- - This is applicable when I(job_wait) is C(true).
+ - This is applicable when I(wait_for_idrac) is C(true).
type: int
default: 600
version_added: 9.2.0
@@ -184,8 +184,7 @@ import os
import json
import time
from urllib.error import HTTPError, URLError
-from ansible_collections.dellemc.openmanage.plugins.module_utils.idrac_redfish import iDRACRedfishAPI, idrac_auth_params
-from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.dellemc.openmanage.plugins.module_utils.idrac_redfish import iDRACRedfishAPI, IdracAnsibleModule
from ansible.module_utils.compat.version import LooseVersion
from ansible.module_utils.urls import ConnectionError, SSLValidationError
from ansible_collections.dellemc.openmanage.plugins.module_utils.utils import (
@@ -532,8 +531,8 @@ def main():
"job_wait_timeout": {"type": 'int', "default": 600},
"force_reset": {"type": "bool", "default": False}
}
- specs.update(idrac_auth_params)
- module = AnsibleModule(
+
+ module = IdracAnsibleModule(
argument_spec=specs,
mutually_exclusive=[("custom_defaults_file", "custom_defaults_buffer")],
supports_check_mode=True)
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/idrac_server_config_profile.py b/ansible_collections/dellemc/openmanage/plugins/modules/idrac_server_config_profile.py
index f89272ca3..216a79da4 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/idrac_server_config_profile.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/idrac_server_config_profile.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 8.4.0
-# Copyright (C) 2019-2023 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2019-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -22,7 +22,7 @@ description:
- Export the Server Configuration Profile (SCP) from the iDRAC or import from a
network share (CIFS, NFS, HTTP, HTTPS) or a local path.
extends_documentation_fragment:
- - dellemc.openmanage.idrac_auth_options
+ - dellemc.openmanage.idrac_x_auth_options
options:
command:
description:
@@ -527,8 +527,7 @@ import os
import json
from datetime import datetime
from os.path import exists
-from ansible.module_utils.basic import AnsibleModule
-from ansible_collections.dellemc.openmanage.plugins.module_utils.idrac_redfish import iDRACRedfishAPI, idrac_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.idrac_redfish import iDRACRedfishAPI, IdracAnsibleModule
from ansible_collections.dellemc.openmanage.plugins.module_utils.utils import idrac_redfish_job_tracking, \
strip_substr_dict
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
@@ -919,8 +918,8 @@ class PreviewCommand():
def main():
specs = get_argument_spec()
- specs.update(idrac_auth_params)
- module = AnsibleModule(
+
+ module = IdracAnsibleModule(
argument_spec=specs,
required_if=[
["command", "export", ["share_name"]],
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/idrac_session.py b/ansible_collections/dellemc/openmanage/plugins/modules/idrac_session.py
index 3303b4ade..ac4e9b85c 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/idrac_session.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/idrac_session.py
@@ -3,7 +3,7 @@
#
# Dell OpenManage Ansible Modules
-# Version 9.2.0
+# Version 9.3.0
# Copyright (C) 2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
@@ -28,12 +28,14 @@ options:
type: str
username:
description:
- - Username of the iDRAC.
+ - Username of the iDRAC. If the username is not provided, then
+ the environment variable E(IDRAC_USERNAME) is used.
- I(username) is required when I(state) is C(present).
type: str
password:
description:
- - Password of the iDRAC.
+ - Password of the iDRAC. If the password is not provided, then
+ the environment variable E(IDRAC_PASSWORD) is used.
- I(password) is required when I(state) is C(present).
type: str
port:
@@ -65,20 +67,22 @@ options:
choices: [present, absent]
type: str
default: present
- auth_token:
+ x_auth_token:
description:
- Authentication token.
- - I(auth_token) is required when I(state) is C(absent).
+ - I(x_auth_token) is required when I(state) is C(absent).
type: str
session_id:
description:
- Session ID of the iDRAC.
- I(session_id) is required when I(state) is C(absent).
type: int
+ aliases: ['auth_token']
requirements:
- "python >= 3.9.6"
author:
- "Rajshekar P(@rajshekarp87)"
+ - "Kritika Bhateja (@Kritika-Bhateja-03)"
notes:
- Run this module from a system that has direct access to Dell iDRAC.
- This module supports IPv4 and IPv6 addresses.
@@ -93,14 +97,47 @@ EXAMPLES = r"""
hostname: 198.162.0.1
username: username
password: password
+ ca_path: "/path/to/ca_cert.pem"
state: present
- name: Delete a session
dellemc.openmanage.idrac_session:
hostname: 198.162.0.1
+ ca_path: "/path/to/ca_cert.pem"
state: absent
- auth_token: aed4aa802b748d2f3b31deec00a6b28a
- session_is: 2
+ x_auth_token: aed4aa802b748d2f3b31deec00a6b28a
+ session_id: 2
+
+- name: Create a session and execute other modules
+ block:
+ - name: Create a session
+ dellemc.openmanage.idrac_session:
+ hostname: 198.162.0.1
+ username: username
+ password: password
+ ca_path: "/path/to/ca_cert.pem"
+ state: present
+ register: authData
+
+ - name: Call idrac_firmware_info module
+ dellemc.openmanage.idrac_firmware_info:
+ idrac_ip: 198.162.0.1
+ ca_path: "/path/to/ca_cert.pem"
+ x_auth_token: "{{ authData.x_auth_token }}"
+
+ - name: Call idrac_user_info module
+ dellemc.openmanage.idrac_user_info:
+ idrac_ip: 198.162.0.1
+ ca_path: "/path/to/ca_cert.pem"
+ x_auth_token: "{{ authData.x_auth_token }}"
+ always:
+ - name: Destroy a session
+ dellemc.openmanage.idrac_session:
+ hostname: 198.162.0.1
+ ca_path: "/path/to/ca_cert.pem"
+ state: absent
+ x_auth_token: "{{ authData.x_auth_token }}"
+ session_id: "{{ authData.session_data.Id }}"
"""
RETURN = r'''
@@ -361,7 +398,7 @@ def main():
argument_spec=specs,
required_if=[
["state", "present", ("username", "password",)],
- ["state", "absent", ("auth_token", "session_id",)]
+ ["state", "absent", ("x_auth_token", "session_id",)]
],
supports_check_mode=True
)
@@ -400,7 +437,7 @@ def get_argument_spec():
- "timeout": An integer representing the timeout value. The default value is 30.
- "state": A string representing the state. The default value is "present". The choices are
["present", "absent"].
- - "auth_token": A string representing the authentication token. It is marked as not to be
+ - "x_auth_token": A string representing the authentication token. It is marked as not to be
logged.
- "session_id": An integer representing the session ID.
@@ -416,7 +453,7 @@ def get_argument_spec():
"ca_path": {"type": "path", "default": None},
"timeout": {"type": "int", "default": 30},
"state": {"type": 'str', "default": "present", "choices": ["present", "absent"]},
- "auth_token": {"type": "str", "no_log": True},
+ "x_auth_token": {"type": "str", "no_log": True, "aliases": ['auth_token']},
"session_id": {"type": "int"}
}
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/idrac_storage_volume.py b/ansible_collections/dellemc/openmanage/plugins/modules/idrac_storage_volume.py
index 13c1ec065..eec8ef043 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/idrac_storage_volume.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/idrac_storage_volume.py
@@ -3,7 +3,7 @@
#
# Dell OpenManage Ansible Modules
-# Version 9.1.0
+# Version 9.3.0
# Copyright (C) 2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
@@ -21,7 +21,7 @@ version_added: "9.1.0"
description:
- This module is responsible for configuring the RAID attributes.
extends_documentation_fragment:
- - dellemc.openmanage.idrac_auth_options
+ - dellemc.openmanage.idrac_x_auth_options
options:
state:
type: str
@@ -128,6 +128,12 @@ options:
- This option is applicable when I(job_wait) is C(true).
type: int
default: 900
+ time_to_wait:
+ description:
+ - The maximum wait time before shutdown in seconds for the Server Configuration Profile (SCP) import operation.
+ - This option is applicable when I(state) is C(create) or C(delete).
+ type: int
+ default: 300
requirements:
- "python >= 3.9.6"
@@ -281,8 +287,7 @@ import re
import operator
from urllib.error import HTTPError, URLError
from copy import deepcopy
-from ansible_collections.dellemc.openmanage.plugins.module_utils.idrac_redfish import iDRACRedfishAPI, idrac_auth_params
-from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.dellemc.openmanage.plugins.module_utils.idrac_redfish import iDRACRedfishAPI, IdracAnsibleModule
from ansible.module_utils.urls import ConnectionError, SSLValidationError
from ansible_collections.dellemc.openmanage.plugins.module_utils.utils import (
get_dynamic_uri, validate_and_get_first_resource_id_uri, xml_data_conversion, idrac_redfish_job_tracking, remove_key, get_idrac_firmware_version)
@@ -303,6 +308,7 @@ ID_AND_LOCATION_BOTH_NOT_DEFINED = "Either id or location should be specified."
DRIVES_NOT_DEFINED = "Drives must be defined for volume creation."
NOT_ENOUGH_DRIVES = "Number of sufficient disks not found in Controller '{controller_id}'!"
WAIT_TIMEOUT_MSG = "The job is not complete after {0} seconds."
+TIME_TO_WAIT_MSG = "Time to wait value is invalid. Minimum value is 300 and Maximum is 3600 seconds."
JOB_TRIGERRED = "Successfully triggered the {0} storage volume operation."
VOLUME_NAME_REQUIRED_FOR_DELETE = "Virtual disk name is a required parameter for remove virtual disk operations."
VOLUME_NOT_FOUND = "Unable to find the virtual disk."
@@ -551,6 +557,11 @@ class StorageValidation(StorageBase):
self.idrac_data = StorageData(idrac, module).all_storage_data()
self.controller_id = module.params.get("controller_id")
+ def validate_time_to_wait(self):
+ to_wait = self.module_ext_params.get("time_to_wait")
+ if to_wait < 300 or to_wait > 3600:
+ self.module.exit_json(msg=TIME_TO_WAIT_MSG, failed=True)
+
def validate_controller_exists(self):
if not self.controller_id:
self.module.exit_json(msg=CONTROLLER_NOT_DEFINED, failed=True)
@@ -720,6 +731,7 @@ class StorageCreate(StorageValidation):
def validate(self):
# Validate upper layer input
+ self.validate_time_to_wait()
self.validate_controller_exists()
self.validate_job_wait_negative_values()
# Validate std raid validation for inner layer
@@ -741,7 +753,7 @@ class StorageCreate(StorageValidation):
parent_payload = """<SystemConfiguration>{0}</SystemConfiguration>"""
payload = self.constuct_payload(name_id_mapping)
parent_payload = parent_payload.format(payload)
- resp = self.idrac.import_scp(import_buffer=parent_payload, target="RAID", job_wait=False)
+ resp = self.idrac.import_scp(import_buffer=parent_payload, target="RAID", job_wait=False, time_to_wait=self.module.params.get('time_to_wait'))
job_dict = self.wait_for_job_completion(resp)
return job_dict
@@ -768,6 +780,7 @@ class StorageDelete(StorageValidation):
def validate(self):
# Validate upper layer input
+ self.validate_time_to_wait()
self.validate_job_wait_negative_values()
# Validate for volume and volume_name
@@ -803,7 +816,7 @@ class StorageDelete(StorageValidation):
self.validate_volume_exists_in_server(set(volume_name_input_list))
cntrl_id_vd_id_mapping = self.get_vd_id_based_on_controller_id_vd_name(set(volume_name_input_list))
payload = self.construct_payload_for_delete(cntrl_id_vd_id_mapping)
- resp = self.idrac.import_scp(import_buffer=payload, target="RAID", job_wait=False)
+ resp = self.idrac.import_scp(import_buffer=payload, target="RAID", job_wait=False, time_to_wait=self.module.params.get('time_to_wait'))
job_dict = self.wait_for_job_completion(resp)
return job_dict
@@ -891,10 +904,11 @@ def main():
"raid_reset_config": {"choices": ['true', 'false'], "default": 'false'},
"raid_init_operation": {"choices": ['None', 'Fast']},
"job_wait": {"type": "bool", "default": True},
- "job_wait_timeout": {"type": "int", "default": 900}
+ "job_wait_timeout": {"type": "int", "default": 900},
+ "time_to_wait": {"type": "int", "default": 300}
}
- specs.update(idrac_auth_params)
- module = AnsibleModule(
+
+ module = IdracAnsibleModule(
argument_spec=specs,
supports_check_mode=True)
try:
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/idrac_user.py b/ansible_collections/dellemc/openmanage/plugins/modules/idrac_user.py
index bcd16b872..4cd67b7b5 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/idrac_user.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/idrac_user.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 8.1.0
-# Copyright (C) 2018-2023 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2018-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -24,7 +24,7 @@ description:
- Edit a user account.
- Enable or Disable a user account.
extends_documentation_fragment:
- - dellemc.openmanage.idrac_auth_options
+ - dellemc.openmanage.idrac_x_auth_options
options:
state:
type: str
@@ -101,7 +101,7 @@ options:
- A privacy protocol is not configured if C(None) is selected.
choices: [None, DES, AES]
requirements:
- - "python >= 3.8.6"
+ - "python >= 3.9.6"
author: "Felix Stephen (@felixs88)"
notes:
- Run this module from a system that has direct access to Dell iDRAC.
@@ -209,9 +209,7 @@ import time
from ssl import SSLError
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
-from ansible_collections.dellemc.openmanage.plugins.module_utils.idrac_redfish import iDRACRedfishAPI, idrac_auth_params
-from ansible.module_utils.basic import AnsibleModule
-
+from ansible_collections.dellemc.openmanage.plugins.module_utils.idrac_redfish import iDRACRedfishAPI, IdracAnsibleModule
ACCOUNT_URI = "/redfish/v1/Managers/iDRAC.Embedded.1/Accounts/"
ATTRIBUTE_URI = "/redfish/v1/Managers/iDRAC.Embedded.1/Attributes/"
@@ -412,8 +410,7 @@ def main():
"authentication_protocol": {"required": False, "choices": ['SHA', 'MD5', 'None']},
"privacy_protocol": {"required": False, "choices": ['AES', 'DES', 'None']},
}
- specs.update(idrac_auth_params)
- module = AnsibleModule(
+ module = IdracAnsibleModule(
argument_spec=specs,
supports_check_mode=True)
try:
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/idrac_user_info.py b/ansible_collections/dellemc/openmanage/plugins/modules/idrac_user_info.py
index 6d06a60be..895e0d338 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/idrac_user_info.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/idrac_user_info.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 7.0.0
-# Copyright (C) 2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2022-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -22,7 +22,7 @@ description:
- "This module retrieves the list and basic details of all users or details of a specific user on
iDRAC"
extends_documentation_fragment:
- - dellemc.openmanage.idrac_auth_options
+ - dellemc.openmanage.idrac_x_auth_options
options:
user_id:
description:
@@ -35,7 +35,7 @@ options:
- Username of the account that is created in iDRAC local users.
- I(username) is mutually exclusive with I(user_id)
requirements:
- - "python >= 3.8.6"
+ - "python >= 3.9.6"
author: "Husniya Hameed(@husniya_hameed)"
notes:
- Run this module on a system that has direct access to Dell iDRAC.
@@ -116,8 +116,7 @@ import json
from ssl import SSLError
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
-from ansible_collections.dellemc.openmanage.plugins.module_utils.idrac_redfish import iDRACRedfishAPI, idrac_auth_params
-from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.dellemc.openmanage.plugins.module_utils.idrac_redfish import iDRACRedfishAPI, IdracAnsibleModule
from ansible_collections.dellemc.openmanage.plugins.module_utils.utils import strip_substr_dict
@@ -193,8 +192,8 @@ def main():
"user_id": {"type": 'int'},
"username": {"type": 'str'}
}
- specs.update(idrac_auth_params)
- module = AnsibleModule(
+
+ module = IdracAnsibleModule(
argument_spec=specs,
mutually_exclusive=[
('user_id', 'username')
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/idrac_virtual_media.py b/ansible_collections/dellemc/openmanage/plugins/modules/idrac_virtual_media.py
index 4c5fb10db..aef7c2a70 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/idrac_virtual_media.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/idrac_virtual_media.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 6.3.0
-# Copyright (C) 2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2022-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -21,7 +21,7 @@ version_added: "6.3.0"
description:
- This module allows to configure Remote File Share settings.
extends_documentation_fragment:
- - dellemc.openmanage.idrac_auth_options
+ - dellemc.openmanage.idrac_x_auth_options
options:
virtual_media:
required: true
@@ -78,7 +78,7 @@ options:
type: str
description: Resource id of the iDRAC, if not specified manager collection id will be used.
requirements:
- - "python >= 3.8.6"
+ - "python >= 3.9.6"
author:
- "Felix Stephen (@felixs88)"
notes:
@@ -217,8 +217,8 @@ import copy
import time
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
-from ansible_collections.dellemc.openmanage.plugins.module_utils.idrac_redfish import iDRACRedfishAPI, idrac_auth_params
-from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.dellemc.openmanage.plugins.module_utils.idrac_redfish import iDRACRedfishAPI, IdracAnsibleModule
+
MANAGER_BASE = "/redfish/v1/Managers/iDRAC.Embedded.1/VirtualMedia"
SYSTEM_BASE = "/redfish/v1/Systems/System.Embedded.1/VirtualMedia"
@@ -441,8 +441,8 @@ def main():
"force": {"required": False, "type": "bool", "default": False},
"resource_id": {"required": False, "type": 'str'},
}
- specs.update(idrac_auth_params)
- module = AnsibleModule(argument_spec=specs, supports_check_mode=True)
+
+ module = IdracAnsibleModule(argument_spec=specs, supports_check_mode=True)
try:
with iDRACRedfishAPI(module.params, req_session=True) as idrac:
vr_media = module.params["virtual_media"]
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_active_directory.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_active_directory.py
index 6f420bec7..b23f2d0ef 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_active_directory.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_active_directory.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 7.0.0
-# Copyright (C) 2021-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2021-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -117,7 +117,7 @@ options:
- The certificate should be a Root CA Certificate encoded in Base64 format.
- This is applicable when I(validate_certificate) is C(true).
requirements:
- - "python >= 3.8.6"
+ - "python >= 3.9.6"
notes:
- The module will always report change when I(validate_certificate) is C(true).
- Run this module from a system that has direct access to OpenManage Enterprise.
@@ -241,10 +241,9 @@ error_info:
import json
import os
from ssl import SSLError
-from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError
-from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, OmeAnsibleModule
from ansible.module_utils.common.dict_transformations import recursive_diff
AD_URI = "AccountService/ExternalAccountProvider/ADAccountProvider"
@@ -418,8 +417,8 @@ def main():
"domain_username": {"type": 'str'},
"domain_password": {"type": 'str', "no_log": True}
}
- specs.update(ome_auth_params)
- module = AnsibleModule(
+
+ module = OmeAnsibleModule(
argument_spec=specs,
required_one_of=[('name', 'id')],
required_if=[
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_alert_policies.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_alert_policies.py
index 9e8a17fd2..b279d9739 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_alert_policies.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_alert_policies.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 8.3.0
-# Copyright (C) 2023 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2023-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -478,9 +478,8 @@ error_info:
import csv
import os
import json
-from ansible.module_utils.basic import AnsibleModule
from ansible_collections.dellemc.openmanage.plugins.module_utils.utils import get_all_data_with_pagination, strip_substr_dict
-from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, OmeAnsibleModule
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
from ansible.module_utils.common.dict_transformations import recursive_diff
@@ -1083,11 +1082,11 @@ def main():
}
}
}
- specs.update(ome_auth_params)
+
present_args = ['enable', 'new_name', 'description', 'device_service_tag', 'device_group',
'specific_undiscovered_devices', 'any_undiscovered_devices', 'all_devices',
'category', 'message_ids', 'message_file', 'date_and_time', 'severity', 'actions']
- module = AnsibleModule(
+ module = OmeAnsibleModule(
argument_spec=specs,
required_if=[['state', 'present', present_args, True]],
mutually_exclusive=[('device_service_tag', 'device_group', 'any_undiscovered_devices', 'specific_undiscovered_devices', 'all_devices',),
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_alert_policies_actions_info.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_alert_policies_actions_info.py
index 0d1f0c726..6bb50acf1 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_alert_policies_actions_info.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_alert_policies_actions_info.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 8.2.0
-# Copyright (C) 2023 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2023-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -253,8 +253,7 @@ msg:
'''
import json
-from ansible.module_utils.basic import AnsibleModule
-from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, OmeAnsibleModule
from ansible_collections.dellemc.openmanage.plugins.module_utils.utils import remove_key
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
@@ -267,8 +266,8 @@ EMPTY_ALERT_POLICY_ACTION_MSG = "No alert policies action information were found
def main():
""" function to retrieve the information on actions of alert policies """
- specs = ome_auth_params
- module = AnsibleModule(
+ specs = {}
+ module = OmeAnsibleModule(
argument_spec=specs,
supports_check_mode=True)
try:
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_alert_policies_category_info.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_alert_policies_category_info.py
index 6d3151fe9..0af0bf338 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_alert_policies_category_info.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_alert_policies_category_info.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 8.2.0
-# Copyright (C) 2023 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2023-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -278,10 +278,9 @@ error_info:
'''
import json
-from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.urls import ConnectionError, SSLValidationError
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
-from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, OmeAnsibleModule
from ansible_collections.dellemc.openmanage.plugins.module_utils.utils import remove_key
from ansible_collections.dellemc.openmanage.plugins.module_utils.utils import get_all_data_with_pagination
@@ -296,8 +295,8 @@ def get_formatted_categories(rest_obj):
def main():
- specs = ome_auth_params
- module = AnsibleModule(
+ specs = {}
+ module = OmeAnsibleModule(
argument_spec=specs,
supports_check_mode=True)
try:
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_alert_policies_info.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_alert_policies_info.py
index d9a97c070..51ed53b9d 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_alert_policies_info.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_alert_policies_info.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 8.2.0
-# Copyright (C) 2023 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2023-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -129,8 +129,7 @@ error_info:
'''
import json
-from ansible.module_utils.basic import AnsibleModule
-from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, OmeAnsibleModule
from ansible_collections.dellemc.openmanage.plugins.module_utils.utils import remove_key
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
@@ -182,13 +181,13 @@ class OMEAlertPolicyInfo:
self.module.exit_json(msg=str(err), failed=True)
-def get_module_parameters() -> AnsibleModule:
+def get_module_parameters() -> OmeAnsibleModule:
specs = {
"policy_name": {"type": 'str'}
}
- specs.update(ome_auth_params)
- module = AnsibleModule(argument_spec=specs,
- supports_check_mode=True)
+
+ module = OmeAnsibleModule(argument_spec=specs,
+ supports_check_mode=True)
return module
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_alert_policies_message_id_info.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_alert_policies_message_id_info.py
index 577eac7d0..3bde29a33 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_alert_policies_message_id_info.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_alert_policies_message_id_info.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 8.2.0
-# Copyright (C) 2023 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2023-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -105,8 +105,7 @@ error_info:
import json
from ssl import SSLError
-from ansible.module_utils.basic import AnsibleModule
-from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, OmeAnsibleModule
from ansible_collections.dellemc.openmanage.plugins.module_utils.utils import remove_key, get_all_data_with_pagination
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
@@ -117,8 +116,8 @@ EMPTY_MSG = "No alert policies message id information were found."
def main():
- specs = ome_auth_params
- module = AnsibleModule(
+ specs = {}
+ module = OmeAnsibleModule(
argument_spec=specs,
supports_check_mode=True
)
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_application_alerts_smtp.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_application_alerts_smtp.py
index 58572bae0..855c7b022 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_application_alerts_smtp.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_application_alerts_smtp.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 7.0.0
-# Copyright (C) 2021-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2021-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -57,7 +57,7 @@ options:
type: str
required: true
requirements:
- - "python >= 3.8.6"
+ - "python >= 3.9.6"
notes:
- The module will always report change when I(enable_authentication) is C(true).
- Run this module from a system that has direct access to Dell OpenManage Enterprise
@@ -139,10 +139,9 @@ error_info:
import json
from ssl import SSLError
-from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError
-from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, OmeAnsibleModule
from ansible.module_utils.common.dict_transformations import recursive_diff
SUCCESS_MSG = "Successfully updated the SMTP settings."
@@ -235,8 +234,8 @@ def main():
"options": credentials_options,
},
}
- specs.update(ome_auth_params)
- module = AnsibleModule(
+
+ module = OmeAnsibleModule(
argument_spec=specs,
required_if=[['enable_authentication', True, ['credentials']], ],
supports_check_mode=True
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_application_alerts_syslog.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_application_alerts_syslog.py
index a72093752..ecb20b336 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_application_alerts_syslog.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_application_alerts_syslog.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 7.0.0
-# Copyright (C) 2021-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2021-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -44,7 +44,7 @@ options:
description: The UDP port number of the syslog server.
type: int
requirements:
- - "python >= 3.8.6"
+ - "python >= 3.9.6"
author:
- Jagadeesh N V(@jagadeeshnv)
notes:
@@ -152,10 +152,9 @@ error_info:
import json
from ssl import SSLError
-from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError
-from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, OmeAnsibleModule
from ansible.module_utils.common.dict_transformations import recursive_diff
from ansible.module_utils.common.dict_transformations import snake_dict_to_camel_dict
@@ -232,9 +231,8 @@ def main():
"required_if": [("enabled", True, ("destination_address",))]
}
}
- specs.update(ome_auth_params)
- module = AnsibleModule(
+ module = OmeAnsibleModule(
argument_spec=specs,
supports_check_mode=True
)
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_application_certificate.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_application_certificate.py
index 60f170f76..b8283537c 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_application_certificate.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_application_certificate.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 8.3.0
-# Copyright (C) 2020-2023 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2020-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -159,8 +159,7 @@ error_info:
import json
import os
-from ansible.module_utils.basic import AnsibleModule
-from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, OmeAnsibleModule
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
@@ -195,6 +194,21 @@ def get_san(subject_alternative_names):
return subject_alternative_names.replace(" ", "")
+def format_csr_string(csr_string):
+ # Remove the header and footer
+ csr_string = csr_string.replace("-----BEGIN CERTIFICATE REQUEST-----", "")
+ csr_string = csr_string.replace("-----END CERTIFICATE REQUEST-----", "")
+ csr_string = csr_string.replace("\n", "")
+
+ # Format the remaining string with proper line breaks
+ formatted_csr = '\n'.join([csr_string[i:i + 64] for i in range(0, len(csr_string), 64)])
+
+ # Add the header and footer back
+ formatted_csr = "-----BEGIN CERTIFICATE REQUEST-----\n" + formatted_csr + "\n-----END CERTIFICATE REQUEST-----"
+
+ return formatted_csr
+
+
def main():
specs = {
"command": {"type": "str", "required": False,
@@ -209,8 +223,8 @@ def main():
"upload_file": {"required": False, "type": "str"},
"subject_alternative_names": {"required": False, "type": "str"}
}
- specs.update(ome_auth_params)
- module = AnsibleModule(
+
+ module = OmeAnsibleModule(
argument_spec=specs,
required_if=[["command", "generate_csr", ["distinguished_name", "department_name",
"business_name", "locality", "country_state",
@@ -228,8 +242,11 @@ def main():
resp = rest_obj.invoke_request(method, uri, headers=headers, data=payload, dump=dump)
if resp.success:
if command == "generate_csr":
+ resp_copy = resp.json_data
+ formated_csr = format_csr_string(resp_copy["CertificateData"])
+ resp_copy["CertificateData"] = formated_csr
module.exit_json(msg="Successfully generated certificate signing request.",
- csr_status=resp.json_data)
+ csr_status=resp_copy)
module.exit_json(msg="Successfully uploaded application certificate.", changed=True)
except HTTPError as err:
module.fail_json(msg=str(err), error_info=json.load(err))
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_application_console_preferences.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_application_console_preferences.py
index 760546f02..93c0b9af2 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_application_console_preferences.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_application_console_preferences.py
@@ -3,7 +3,7 @@
#
# Dell OpenManage Ansible Modules
-# Version 9.1.0
+# Version 9.3.0
# Copyright (C) 2022-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
@@ -365,10 +365,9 @@ error_info:
import json
from ssl import SSLError
-from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError
-from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, OmeAnsibleModule
from ansible_collections.dellemc.openmanage.plugins.module_utils.utils import strip_substr_dict
SUCCESS_MSG = "Successfully updated the Console Preferences settings."
@@ -631,13 +630,13 @@ def main():
"trap_forwarding_format": {"required": False, "type": "str", "choices": ["Normalized", "Original"]},
"metrics_collection_settings": {"required": False, "type": "int"},
}
- specs.update(ome_auth_params)
- module = AnsibleModule(argument_spec=specs,
- required_one_of=[["report_row_limit", "device_health", "discovery_settings",
- "server_initiated_discovery", "mx7000_onboarding_preferences",
- "builtin_appliance_share", "email_sender_settings",
- "trap_forwarding_format", "metrics_collection_settings"]],
- supports_check_mode=True, )
+
+ module = OmeAnsibleModule(argument_spec=specs,
+ required_one_of=[["report_row_limit", "device_health", "discovery_settings",
+ "server_initiated_discovery", "mx7000_onboarding_preferences",
+ "builtin_appliance_share", "email_sender_settings",
+ "trap_forwarding_format", "metrics_collection_settings"]],
+ supports_check_mode=True)
try:
_validate_params(module)
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_application_network_address.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_application_network_address.py
index ab8814a42..bdea896f4 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_application_network_address.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_application_network_address.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 7.0.0
-# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2020-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -182,7 +182,7 @@ options:
- This option is not mandatory.
type: int
requirements:
- - "python >= 3.8.6"
+ - "python >= 3.9.6"
author:
- "Jagadeesh N V(@jagadeeshnv)"
'''
@@ -426,8 +426,7 @@ error_info:
import json
import socket
from ssl import SSLError
-from ansible.module_utils.basic import AnsibleModule
-from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, OmeAnsibleModule
from ansible.module_utils.urls import ConnectionError, SSLValidationError
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
@@ -716,8 +715,8 @@ def main():
},
"reboot_delay": {"required": False, "type": "int"}
}
- specs.update(ome_auth_params)
- module = AnsibleModule(
+
+ module = OmeAnsibleModule(
argument_spec=specs,
required_if=[
["enable_nic", True,
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_application_network_proxy.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_application_network_proxy.py
index 0ca58de09..76ee7c961 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_application_network_proxy.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_application_network_proxy.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 7.0.0
-# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2020-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -56,7 +56,7 @@ options:
- This option is mandatory when I(enable_authentication) is true.
type: str
requirements:
- - "python >= 3.8.6"
+ - "python >= 3.9.6"
author:
- "Sajna Shetty(@Sajna-Shetty)"
notes:
@@ -145,8 +145,7 @@ error_info:
import json
from ssl import SSLError
-from ansible.module_utils.basic import AnsibleModule
-from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, OmeAnsibleModule
from ansible.module_utils.urls import ConnectionError, SSLValidationError
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
@@ -225,8 +224,8 @@ def main():
"proxy_password": {"required": False, "type": "str", "no_log": True},
"enable_authentication": {"required": False, "type": "bool"},
}
- specs.update(ome_auth_params)
- module = AnsibleModule(
+
+ module = OmeAnsibleModule(
argument_spec=specs,
required_if=[['enable_proxy', True, ['ip_address', 'proxy_port']],
['enable_authentication', True, ['proxy_username', 'proxy_password']], ],
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_application_network_settings.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_application_network_settings.py
index 91a0de1d7..d14388e95 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_application_network_settings.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_application_network_settings.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 7.0.0
-# Copyright (C) 2021-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2021-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -81,7 +81,7 @@ options:
- This is applicable only for OpenManage Enterprise Modular.
type: int
requirements:
- - "python >= 3.8.6"
+ - "python >= 3.9.6"
notes:
- Run this module from a system that has direct access to Dell OpenManage Enterprise
or OpenManage Enterprise Modular.
@@ -250,10 +250,9 @@ error_info:
import json
from ssl import SSLError
-from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError
-from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, OmeAnsibleModule
SUCCESS_MSG = "Successfully updated the session timeout settings."
SESSION_INACTIVITY_GET = "SessionService/SessionConfiguration"
@@ -355,9 +354,8 @@ def main():
]
}
}
- specs.update(ome_auth_params)
- module = AnsibleModule(
+ module = OmeAnsibleModule(
argument_spec=specs,
supports_check_mode=True
)
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_application_network_time.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_application_network_time.py
index baf533c0a..c206b59a4 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_application_network_time.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_application_network_time.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 7.0.0
-# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2020-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -55,7 +55,7 @@ options:
- This option is applicable when I(enable_ntp) is true.
type: str
requirements:
- - "python >= 3.8.6"
+ - "python >= 3.9.6"
author:
- "Sajna Shetty(@Sajna-Shetty)"
notes:
@@ -139,8 +139,7 @@ error_info:
import json
from ssl import SSLError
-from ansible.module_utils.basic import AnsibleModule
-from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, OmeAnsibleModule
from ansible.module_utils.urls import ConnectionError, SSLValidationError
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
@@ -230,8 +229,8 @@ def main():
"secondary_ntp_address1": {"required": False, "type": "str"},
"secondary_ntp_address2": {"required": False, "type": "str"},
}
- specs.update(ome_auth_params)
- module = AnsibleModule(
+
+ module = OmeAnsibleModule(
argument_spec=specs,
required_if=[['enable_ntp', False, ('time_zone', 'system_time',), True],
['enable_ntp', True, ('time_zone', 'primary_ntp_address',
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_application_network_webserver.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_application_network_webserver.py
index 9e6cdffd5..f012fa977 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_application_network_webserver.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_application_network_webserver.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 7.0.0
-# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2020-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -35,7 +35,7 @@ options:
- If a change is made to the session timeout, it will only take effect after the next log in.
type: int
requirements:
- - "python >= 3.8.6"
+ - "python >= 3.9.6"
author:
- "Jagadeesh N V(@jagadeeshnv)"
notes:
@@ -114,8 +114,7 @@ error_info:
import json
from ssl import SSLError
-from ansible.module_utils.basic import AnsibleModule
-from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, OmeAnsibleModule
from ansible.module_utils.urls import ConnectionError, SSLValidationError
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
@@ -162,8 +161,8 @@ def main():
"webserver_port": {"required": False, "type": "int"},
"webserver_timeout": {"required": False, "type": "int"},
}
- specs.update(ome_auth_params)
- module = AnsibleModule(
+
+ module = OmeAnsibleModule(
argument_spec=specs,
required_one_of=[["webserver_port", "webserver_timeout"]],
supports_check_mode=True
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_application_security_settings.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_application_security_settings.py
index af869fb16..f75554f02 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_application_security_settings.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_application_security_settings.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 7.0.0
-# Copyright (C) 2021-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2021-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -94,7 +94,7 @@ options:
author:
- Jagadeesh N V(@jagadeeshnv)
requirements:
- - "python >= 3.8.6"
+ - "python >= 3.9.6"
notes:
- Run this module from a system that has direct access to Dell OpenManage Enterprise or OpenManage Enterprise Modular.
- This module supports C(check_mode).
@@ -202,8 +202,7 @@ JOB_POLL_INTERVAL = 3
import json
import time
from ssl import SSLError
-from ansible.module_utils.basic import AnsibleModule
-from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, OmeAnsibleModule
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
@@ -333,9 +332,8 @@ def main():
"job_wait": {"type": 'bool', "default": True},
"job_wait_timeout": {"type": 'int', "default": 120}
}
- specs.update(ome_auth_params)
- module = AnsibleModule(
+ module = OmeAnsibleModule(
argument_spec=specs,
mutually_exclusive=[("fips_mode_enable", "login_lockout_policy"),
("fips_mode_enable", "restrict_allowed_ip_range")],
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_chassis_slots.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_chassis_slots.py
index adcc53566..65274ec5b 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_chassis_slots.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_chassis_slots.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 7.0.0
-# Copyright (C) 2021-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2021-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -72,7 +72,7 @@ options:
description: Provide name for the slot.
required: true
requirements:
- - "python >= 3.8.6"
+ - "python >= 3.9.6"
notes:
- "This module initiates the refresh inventory task. It may take a minute for new names to be reflected.
If the task exceeds 300 seconds to refresh, the task times out."
@@ -254,10 +254,9 @@ error_info:
import json
import time
from ssl import SSLError
-from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError
-from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, OmeAnsibleModule
from ansible.module_utils.common.dict_transformations import recursive_diff
DEVICE_URI = "DeviceService/Devices"
@@ -578,8 +577,8 @@ def main():
},
},
}
- specs.update(ome_auth_params)
- module = AnsibleModule(
+
+ module = OmeAnsibleModule(
argument_spec=specs,
required_one_of=[('slot_options', 'device_options')],
mutually_exclusive=[('slot_options', 'device_options')],
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_configuration_compliance_baseline.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_configuration_compliance_baseline.py
index fa0f2a90a..31f4f6ed1 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_configuration_compliance_baseline.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_configuration_compliance_baseline.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 7.6.0
-# Copyright (C) 2021-2023 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2021-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -106,7 +106,7 @@ options:
type: int
default: 10800
requirements:
- - "python >= 3.8.6"
+ - "python >= 3.9.6"
author:
- "Sajna Shetty(@Sajna-Shetty)"
- "Abhishek Sinha(@Abhishek-Dell)"
@@ -291,8 +291,7 @@ error_info:
import json
import time
from ssl import SSLError
-from ansible.module_utils.basic import AnsibleModule
-from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, OmeAnsibleModule
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
from ansible.module_utils.compat.version import LooseVersion
@@ -811,8 +810,8 @@ def main():
"job_wait_timeout": {"required": False, "type": 'int', "default": 10800},
"new_name": {"type": 'str'},
}
- specs.update(ome_auth_params)
- module = AnsibleModule(
+
+ module = OmeAnsibleModule(
argument_spec=specs,
required_if=[
['command', 'create', ['template_name', 'template_id'], True],
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_configuration_compliance_info.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_configuration_compliance_info.py
index 8132ffe9d..39c202f92 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_configuration_compliance_info.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_configuration_compliance_info.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 6.1.0
-# Copyright (C) 2021-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2021-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -42,7 +42,7 @@ options:
- I(device_service_tag) is mutually exclusive with I(device_id).
type: str
requirements:
- - "python >= 3.8.6"
+ - "python >= 3.9.6"
author:
- "Felix Stephen A (@felixs88)"
- "Kritika Bhateja (@Kritika-Bhateja)"
@@ -148,8 +148,7 @@ error_info:
import json
from ssl import SSLError
-from ansible.module_utils.basic import AnsibleModule
-from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, OmeAnsibleModule
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
@@ -222,8 +221,8 @@ def main():
"device_id": {"required": False, "type": "int"},
"device_service_tag": {"required": False, "type": "str"},
}
- specs.update(ome_auth_params)
- module = AnsibleModule(
+
+ module = OmeAnsibleModule(
argument_spec=specs,
mutually_exclusive=[["device_id", "device_service_tag"]],
supports_check_mode=True
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_device_group.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_device_group.py
index f6a085cd9..49bb23d96 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_device_group.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_device_group.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 6.1.0
-# Copyright (C) 2021-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2021-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -70,7 +70,7 @@ options:
available in OpenManage Enterprise.The module reports failure only if none of the IP addresses provided in the
list are available in OpenManage Enterprise.
requirements:
- - "python >= 3.8.6"
+ - "python >= 3.9.6"
- "netaddr >= 0.7.19"
author:
- "Felix Stephen (@felixs88)"
@@ -277,8 +277,7 @@ error_info:
import json
from ssl import SSLError
-from ansible.module_utils.basic import AnsibleModule
-from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, OmeAnsibleModule
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError
@@ -480,16 +479,16 @@ def main():
"device_ids": {"required": False, "type": "list", "elements": 'int'},
"ip_addresses": {"required": False, "type": "list", "elements": 'str'},
}
- specs.update(ome_auth_params)
- module = AnsibleModule(
+
+ module = OmeAnsibleModule(
argument_spec=specs,
- required_if=(
+ required_if=[
["state", "present", ("device_ids", "device_service_tags", "ip_addresses"), True],
- ),
- mutually_exclusive=(
+ ],
+ mutually_exclusive=[
("name", "group_id"),
("device_ids", "device_service_tags", "ip_addresses"),
- ),
+ ],
required_one_of=[("name", "group_id"),
("device_ids", "device_service_tags", "ip_addresses")],
supports_check_mode=True
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_device_info.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_device_info.py
index 62430402c..ee1ca2c46 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_device_info.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_device_info.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 8.1.0
-# Copyright (C) 2019-2023 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2019-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -59,7 +59,7 @@ options:
type: str
requirements:
- - "python >= 3.8.6"
+ - "python >= 3.9.6"
author:
- "Sajna Shetty (@Sajna-Shetty)"
- "Felix Stephen (@felixs88)"
@@ -195,8 +195,8 @@ device_info:
'''
from ssl import SSLError
-from ansible.module_utils.basic import AnsibleModule
-from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params
+
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, OmeAnsibleModule
from ansible_collections.dellemc.openmanage.plugins.module_utils.utils import get_all_data_with_pagination
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
@@ -376,8 +376,8 @@ def main():
"choices": ['basic_inventory', 'detailed_inventory', 'subsystem_health']},
"system_query_options": system_query_options,
}
- specs.update(ome_auth_params)
- module = AnsibleModule(
+
+ module = OmeAnsibleModule(
argument_spec=specs,
required_if=[['fact_subset', 'detailed_inventory', ['system_query_options']],
['fact_subset', 'subsystem_health', ['system_query_options']]],
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_device_local_access_configuration.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_device_local_access_configuration.py
index c6245a6de..7cec5d985 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_device_local_access_configuration.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_device_local_access_configuration.py
@@ -3,7 +3,7 @@
#
# Dell OpenManage Ansible Modules
-# Version 9.1.0
+# Version 9.3.0
# Copyright (C) 2022-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
@@ -261,10 +261,9 @@ import json
import socket
import copy
from ssl import SSLError
-from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError
-from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, OmeAnsibleModule
DOMAIN_URI = "ManagementDomainService/Domains"
DEVICE_URI = "DeviceService/Devices"
@@ -450,8 +449,8 @@ def main():
"required": False, "type": "dict", "options": lcd_options,
},
}
- specs.update(ome_auth_params)
- module = AnsibleModule(
+
+ module = OmeAnsibleModule(
argument_spec=specs,
mutually_exclusive=[('device_id', 'device_service_tag')],
required_one_of=[["enable_kvm_access", "enable_chassis_direct_access",
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_device_location.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_device_location.py
index 9c73b7c46..a03ba5116 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_device_location.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_device_location.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 7.0.0
-# Copyright (C) 2021-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2021-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -55,7 +55,7 @@ options:
type: str
description: The physical location of the chassis.
requirements:
- - "python >= 3.8.6"
+ - "python >= 3.9.6"
author:
- "Felix Stephen (@felixs88)"
notes:
@@ -153,10 +153,9 @@ error_info:
import json
import socket
from ssl import SSLError
-from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError
-from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, OmeAnsibleModule
LOCATION_API = "DeviceService/Devices({0})/Settings('Location')"
DEVICE_URI = "DeviceService/Devices"
@@ -278,8 +277,8 @@ def main():
"rack_slot": {"required": False, "type": "int"},
"location": {"required": False, "type": "str"},
}
- specs.update(ome_auth_params)
- module = AnsibleModule(
+
+ module = OmeAnsibleModule(
argument_spec=specs,
mutually_exclusive=[('device_id', 'device_service_tag')],
supports_check_mode=True
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_device_mgmt_network.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_device_mgmt_network.py
index 0d4b0a483..a7d5477a5 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_device_mgmt_network.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_device_mgmt_network.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 7.0.0
-# Copyright (C) 2021-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2021-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -229,7 +229,7 @@ options:
- Enter the IP address of the second alternate DNS server.
type: str
requirements:
- - "python >= 3.8.6"
+ - "python >= 3.9.6"
notes:
- Run this module from a system that has direct access to Dell OpenManage Enterprise Modular.
- This module supports C(check_mode).
@@ -370,10 +370,9 @@ import json
import socket
import copy
from ssl import SSLError
-from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError
-from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, OmeAnsibleModule
from ansible.module_utils.common.dict_transformations import recursive_diff
DEVICE_URI = "DeviceService/Devices"
@@ -748,8 +747,8 @@ def main():
"required_one_of": [("preferred_dns_server", "alternate_dns_server1", "alternate_dns_server2")]
}
}
- specs.update(ome_auth_params)
- module = AnsibleModule(
+
+ module = OmeAnsibleModule(
argument_spec=specs,
required_one_of=[('device_id', 'device_service_tag')],
mutually_exclusive=[('device_id', 'device_service_tag')],
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_device_network_services.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_device_network_services.py
index 6d1518b34..d91d6e136 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_device_network_services.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_device_network_services.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 7.0.0
-# Copyright (C) 2021-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2021-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -79,7 +79,7 @@ options:
required: true
description: Enables or disables the remote RACADM settings.
requirements:
- - "python >= 3.8.6"
+ - "python >= 3.9.6"
author:
- "Felix Stephen (@felixs88)"
notes:
@@ -192,10 +192,9 @@ error_info:
import json
import socket
from ssl import SSLError
-from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError
-from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, OmeAnsibleModule
DOMAIN_URI = "ManagementDomainService/Domains"
DEVICE_URI = "DeviceService/Devices"
@@ -369,8 +368,8 @@ def main():
"ssh_settings": {"type": "dict", "required": False, "options": ssh_options},
"remote_racadm_settings": {"type": "dict", "required": False, "options": racadm_options},
}
- specs.update(ome_auth_params)
- module = AnsibleModule(
+
+ module = OmeAnsibleModule(
argument_spec=specs,
mutually_exclusive=[('device_id', 'device_service_tag')],
required_one_of=[["snmp_settings", "ssh_settings", "remote_racadm_settings"]],
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_device_power_settings.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_device_power_settings.py
index f0587791e..9bab4de02 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_device_power_settings.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_device_power_settings.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 7.0.0
-# Copyright (C) 2021-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2021-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -77,7 +77,7 @@ options:
choices: ['GRID_1', 'GRID_2']
default: GRID_1
requirements:
- - "python >= 3.8.6"
+ - "python >= 3.9.6"
author:
- "Felix Stephen (@felixs88)"
notes:
@@ -168,10 +168,9 @@ import json
import socket
import copy
from ssl import SSLError
-from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError
-from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, OmeAnsibleModule
POWER_API = "DeviceService/Devices({0})/Settings('Power')"
DEVICE_URI = "DeviceService/Devices"
DOMAIN_URI = "ManagementDomainService/Domains"
@@ -314,8 +313,8 @@ def main():
"hot_spare_configuration": {"type": "dict", "required": False, "options": hot_spare_options,
"required_if": [["enable_hot_spare", True, ("primary_grid",)]]},
}
- specs.update(ome_auth_params)
- module = AnsibleModule(
+
+ module = OmeAnsibleModule(
argument_spec=specs,
mutually_exclusive=[('device_id', 'device_service_tag')],
required_one_of=[["power_configuration", "redundancy_configuration", "hot_spare_configuration"]],
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_device_quick_deploy.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_device_quick_deploy.py
index 7d45bf7e9..d343576be 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_device_quick_deploy.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_device_quick_deploy.py
@@ -3,7 +3,7 @@
#
# Dell OpenManage Ansible Modules
-# Version 9.1.0
+# Version 9.3.0
# Copyright (C) 2022-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
@@ -349,10 +349,9 @@ import copy
import json
import socket
from ssl import SSLError
-from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError
-from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, OmeAnsibleModule
DOMAIN_URI = "ManagementDomainService/Domains"
DEVICE_URI = "DeviceService/Devices"
@@ -663,10 +662,10 @@ def main():
"job_wait": {"type": "bool", "default": True},
"job_wait_timeout": {"type": "int", "default": 120},
}
- specs.update(ome_auth_params)
- module = AnsibleModule(argument_spec=specs,
- mutually_exclusive=[('device_id', 'device_service_tag')],
- supports_check_mode=True,)
+
+ module = OmeAnsibleModule(argument_spec=specs,
+ mutually_exclusive=[('device_id', 'device_service_tag')],
+ supports_check_mode=True,)
if module.params["quick_deploy_options"] is None:
module.exit_json(msg="missing required arguments: quick_deploy_options", failed=True)
fields = [("ipv4_subnet_mask", "IPV4"), ("ipv4_gateway", "IPV4"), ("ipv6_gateway", "IPV6")]
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_devices.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_devices.py
index 2dc7b625d..0b8181932 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_devices.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_devices.py
@@ -3,7 +3,7 @@
#
# Dell OpenManage Ansible Modules
-# Version 9.1.0
+# Version 9.3.0
# Copyright (C) 2022-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
@@ -230,10 +230,9 @@ error_info:
import json
from ssl import SSLError
-from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError
-from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, OmeAnsibleModule
from ansible_collections.dellemc.openmanage.plugins.module_utils.utils import strip_substr_dict, job_tracking
from ansible_collections.dellemc.openmanage.plugins.module_utils.utils import CHANGES_MSG, NO_CHANGES_MSG
@@ -410,8 +409,8 @@ def main():
"job_description": {"type": "str"},
# "job_params": {"type": "dict"}
}
- specs.update(ome_auth_params)
- module = AnsibleModule(
+
+ module = OmeAnsibleModule(
argument_spec=specs,
required_if=[],
mutually_exclusive=[
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_diagnostics.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_diagnostics.py
index b16604c3c..64650b9ff 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_diagnostics.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_diagnostics.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 8.2.0
-# Copyright (C) 2021-2023 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2021-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -128,7 +128,7 @@ options:
- I(lead_chassis_only) is only applicable when I(log_type) is C(application) on OpenManage Enterprise Modular.
default: false
requirements:
- - "python >= 3.8.6"
+ - "python >= 3.9.6"
author:
- "Felix Stephen (@felixs88)"
- "Sachin Apagundi(@sachin-apa)"
@@ -267,8 +267,7 @@ error_info:
import json
import re
from ssl import SSLError
-from ansible.module_utils.basic import AnsibleModule
-from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, OmeAnsibleModule
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
LOG_SELECTOR = {"OS_LOGS": 1, "RAID_LOGS": 2, "DEBUG_LOGS": 3}
@@ -431,8 +430,8 @@ def main():
"test_connection": {"required": False, "type": "bool", "default": False},
"lead_chassis_only": {"required": False, "type": "bool", "default": False},
}
- specs.update(ome_auth_params)
- module = AnsibleModule(
+
+ module = OmeAnsibleModule(
argument_spec=specs,
required_if=[
['log_type', 'application', ['mask_sensitive_info']],
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_discovery.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_discovery.py
index f50d8f25e..f0747d5d1 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_discovery.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_discovery.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 8.1.0
-# Copyright (C) 2021-2023 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2021-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -366,7 +366,7 @@ options:
description: KgKey for the IPMI protocol.
type: str
requirements:
- - "python >= 3.8.6"
+ - "python >= 3.9.6"
author:
- "Jagadeesh N V (@jagadeeshnv)"
- "Sajna Shetty (@Sajna-Shetty)"
@@ -644,8 +644,7 @@ error_info:
import json
import time
-from ansible.module_utils.basic import AnsibleModule
-from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, OmeAnsibleModule
from ansible_collections.dellemc.openmanage.plugins.module_utils.utils import strip_substr_dict
from ansible.module_utils.urls import ConnectionError, SSLValidationError
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
@@ -1056,8 +1055,8 @@ def main():
"email_recipient": {"type": 'str'},
"ignore_partial_failure": {"type": 'bool', "default": False}
}
- specs.update(ome_auth_params)
- module = AnsibleModule(
+
+ module = OmeAnsibleModule(
argument_spec=specs,
required_if=[
['state', 'present', ('discovery_config_targets',)],
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_domain_user_groups.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_domain_user_groups.py
index bd15dccc6..52fee50df 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_domain_user_groups.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_domain_user_groups.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 7.2.0
-# Copyright (C) 2021-2023 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2021-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -194,8 +194,7 @@ error_info:
import json
from ssl import SSLError
-from ansible.module_utils.basic import AnsibleModule
-from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, OmeAnsibleModule
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
ROLE_URI = "AccountService/Roles"
@@ -346,8 +345,8 @@ def main():
"domain_username": {"required": False, "type": 'str'},
"domain_password": {"required": False, "type": 'str', "no_log": True},
}
- specs.update(ome_auth_params)
- module = AnsibleModule(
+
+ module = OmeAnsibleModule(
argument_spec=specs,
mutually_exclusive=[['directory_name', 'directory_id'], ],
supports_check_mode=True)
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_firmware.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_firmware.py
index 5e83a07d5..ee15e45b7 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_firmware.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_firmware.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 8.3.0
-# Copyright (C) 2019-2023 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2019-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -351,8 +351,7 @@ error_info:
import json
from ssl import SSLError
-from ansible.module_utils.basic import AnsibleModule
-from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, OmeAnsibleModule
from ansible.module_utils.urls import ConnectionError
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
@@ -650,8 +649,8 @@ def main():
"required_one_of": [('id', 'service_tag')]
},
}
- specs.update(ome_auth_params)
- module = AnsibleModule(
+
+ module = OmeAnsibleModule(
argument_spec=specs,
required_one_of=[["dup_file", "baseline_name"]],
mutually_exclusive=[
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_firmware_baseline.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_firmware_baseline.py
index 6c2c6a1c5..3b13d272d 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_firmware_baseline.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_firmware_baseline.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 8.1.0
-# Copyright (C) 2019-2023 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2019-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -109,7 +109,7 @@ options:
version_added: 8.1.0
requirements:
- - "python >= 3.8.6"
+ - "python >= 3.9.6"
notes:
- Run this module from a system that has direct access to Dell OpenManage Enterprise or OpenManage Enterprise Modular.
- I(device_group_names) option is not applicable for OpenManage Enterprise Modular.
@@ -315,8 +315,7 @@ GROUP_ID = 6000
import json
import time
-from ansible.module_utils.basic import AnsibleModule
-from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, OmeAnsibleModule
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
from ansible.module_utils.common.dict_transformations import recursive_diff
@@ -568,8 +567,8 @@ def main():
"job_wait_timeout": {"type": 'int', "default": 600},
"filter_no_reboot_required": {"type": 'bool'}
}
- specs.update(ome_auth_params)
- module = AnsibleModule(
+
+ module = OmeAnsibleModule(
argument_spec=specs,
mutually_exclusive=[
('device_ids', 'device_service_tags', 'device_group_names'),
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_firmware_baseline_compliance_info.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_firmware_baseline_compliance_info.py
index af48fc151..8866cc993 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_firmware_baseline_compliance_info.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_firmware_baseline_compliance_info.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 7.0.0
-# Copyright (C) 2019-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2019-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -61,7 +61,7 @@ options:
type: list
elements: str
requirements:
- - "python >= 3.8.6"
+ - "python >= 3.9.6"
author: "Sajna Shetty(@Sajna-Shetty)"
notes:
- Run this module from a system that has direct access to Dell OpenManage Enterprise.
@@ -225,8 +225,7 @@ error_info:
import json
from ssl import SSLError
-from ansible.module_utils.basic import AnsibleModule
-from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, OmeAnsibleModule
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
@@ -391,8 +390,8 @@ def main():
"device_ids": {"required": False, "type": "list", "elements": 'int'},
"device_group_names": {"required": False, "type": "list", "elements": 'str'},
}
- specs.update(ome_auth_params)
- module = AnsibleModule(
+
+ module = OmeAnsibleModule(
argument_spec=specs,
mutually_exclusive=[['baseline_name', 'device_service_tags', 'device_ids', 'device_group_names']],
required_one_of=[['device_ids', 'device_service_tags', 'device_group_names', 'baseline_name']],
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_firmware_baseline_info.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_firmware_baseline_info.py
index 261d67030..ddc6531fb 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_firmware_baseline_info.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_firmware_baseline_info.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 7.0.0
-# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2020-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -28,7 +28,7 @@ options:
all the available firmware baselines are returned.
type: str
requirements:
- - "python >= 3.8.6"
+ - "python >= 3.9.6"
author: "Sajna Shetty(@Sajna-Shetty)"
notes:
- Run this module from a system that has direct access to Dell OpenManage Enterprise.
@@ -101,8 +101,7 @@ baseline_info:
import json
from ssl import SSLError
-from ansible.module_utils.basic import AnsibleModule
-from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, OmeAnsibleModule
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError
@@ -124,8 +123,8 @@ def main():
specs = {
"baseline_name": {"type": 'str', "required": False},
}
- specs.update(ome_auth_params)
- module = AnsibleModule(
+
+ module = OmeAnsibleModule(
argument_spec=specs,
supports_check_mode=True
)
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_firmware_catalog.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_firmware_catalog.py
index b437db3ae..b477fd9ac 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_firmware_catalog.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_firmware_catalog.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 8.2.0
-# Copyright (C) 2019-2023 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2019-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -114,7 +114,7 @@ options:
default: 600
version_added: 3.4.0
requirements:
- - "python >= 3.8.6"
+ - "python >= 3.9.6"
author:
- "Sajna Shetty(@Sajna-Shetty)"
- "Jagadeesh N V(@jagadeeshnv)"
@@ -354,8 +354,7 @@ import json
import time
import os
from ssl import SSLError
-from ansible.module_utils.basic import AnsibleModule
-from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, OmeAnsibleModule
from ansible_collections.dellemc.openmanage.plugins.module_utils.utils import remove_key
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
@@ -621,8 +620,8 @@ def main():
"job_wait": {"type": 'bool', "default": True},
"job_wait_timeout": {"type": 'int', "default": 600}
}
- specs.update(ome_auth_params)
- module = AnsibleModule(
+
+ module = OmeAnsibleModule(
argument_spec=specs,
required_if=[
['state', 'present',
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_groups.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_groups.py
index 3daf178cf..94d978aaa 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_groups.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_groups.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 7.0.0
-# Copyright (C) 2021-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2021-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -71,7 +71,7 @@ options:
- This is applicable only when I(state) is C(present).
- This option is mutually exclusive with I(parent_group_name).
requirements:
- - "python >= 3.8.6"
+ - "python >= 3.9.6"
notes:
- This module manages only static device groups on Dell OpenManage Enterprise.
- If a device group with the name I(parent_group_name) does not exist, a new device group with the same name is created.
@@ -194,10 +194,9 @@ error_info:
import json
import time
from ssl import SSLError
-from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError
-from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, OmeAnsibleModule
GROUP_URI = "GroupService/Groups"
OP_URI = "GroupService/Actions/GroupService.{op}Group"
@@ -406,8 +405,8 @@ def main():
"parent_group_name": {"type": "str", "default": STATIC_ROOT},
"parent_group_id": {"type": "int"},
}
- specs.update(ome_auth_params)
- module = AnsibleModule(
+
+ module = OmeAnsibleModule(
argument_spec=specs,
required_if=[
["state", "present", ("new_name", "description", "parent_group_name", "parent_group_id"), True],
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_identity_pool.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_identity_pool.py
index 9a627b234..04ffa88eb 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_identity_pool.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_identity_pool.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 7.0.0
-# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2020-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -124,7 +124,7 @@ options:
description: Number of MAC addresses.I(identity_count) is required to option to create FC settings.
type: int
requirements:
- - "python >= 3.8.6"
+ - "python >= 3.9.6"
author:
- "Sajna Shetty(@Sajna-Shetty)"
- "Deepak Joshi(@Dell-Deepak-Joshi))"
@@ -263,8 +263,7 @@ import json
import codecs
import binascii
from ssl import SSLError
-from ansible.module_utils.basic import AnsibleModule
-from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, OmeAnsibleModule
from ansible.module_utils.urls import ConnectionError
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
@@ -575,8 +574,8 @@ def main():
"options": iscsi_specific_settings},
"fc_settings": {"required": False, "type": "dict", "options": fc_settings},
}
- specs.update(ome_auth_params)
- module = AnsibleModule(
+
+ module = OmeAnsibleModule(
argument_spec=specs,
supports_check_mode=True
)
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_job_info.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_job_info.py
index 8a875c756..ab166dacf 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_job_info.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_job_info.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 8.1.0
-# Copyright (C) 2020-2023 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2020-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -46,7 +46,7 @@ options:
type: bool
default: false
requirements:
- - "python >= 3.8.6"
+ - "python >= 3.9.6"
author:
- "Jagadeesh N V (@jagadeeshnv)"
- "Abhishek Sinha (@Abhishek-Dell)"
@@ -292,8 +292,7 @@ job_info:
'''
import json
-from ansible.module_utils.basic import AnsibleModule
-from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, OmeAnsibleModule
from ansible_collections.dellemc.openmanage.plugins.module_utils.utils import strip_substr_dict, remove_key
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
@@ -364,8 +363,8 @@ def main():
}},
"fetch_execution_history": {"type": 'bool', "default": False},
}
- specs.update(ome_auth_params)
- module = AnsibleModule(
+
+ module = OmeAnsibleModule(
argument_spec=specs,
supports_check_mode=True
)
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_network_port_breakout.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_network_port_breakout.py
index 044601d0d..65b7c869f 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_network_port_breakout.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_network_port_breakout.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 7.0.0
-# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2020-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -35,7 +35,7 @@ options:
- To revoke the default breakout configuration, enter 'HardwareDefault'.
type: str
requirements:
- - "python >= 3.8.6"
+ - "python >= 3.9.6"
author: "Felix Stephen (@felixs88)"
notes:
- Run this module from a system that has direct access to Dell OpenManage Enterprise Modular.
@@ -126,8 +126,7 @@ error_info:
import json
import re
from ssl import SSLError
-from ansible.module_utils.basic import AnsibleModule
-from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, OmeAnsibleModule
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
@@ -257,8 +256,8 @@ def main():
"target_port": {"required": True, "type": 'str'},
"breakout_type": {"required": True, "type": 'str'},
}
- specs.update(ome_auth_params)
- module = AnsibleModule(
+
+ module = OmeAnsibleModule(
argument_spec=specs,
supports_check_mode=True
)
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_network_vlan.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_network_vlan.py
index 393f44a71..762acefac 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_network_vlan.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_network_vlan.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 7.0.0
-# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2020-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -63,7 +63,7 @@ options:
'Storage - iSCSI', 'Storage - FCoE', 'Storage - Data Replication',
'VM Migration', 'VMWare FT Logging']
requirements:
- - "python >= 3.8.6"
+ - "python >= 3.9.6"
author:
- "Jagadeesh N V(@jagadeeshnv)"
notes:
@@ -183,8 +183,7 @@ error_info:
import json
from ssl import SSLError
-from ansible.module_utils.basic import AnsibleModule
-from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, OmeAnsibleModule
from ansible.module_utils.urls import ConnectionError, SSLValidationError
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
@@ -318,8 +317,8 @@ def main():
'Storage - iSCSI', 'Storage - FCoE', 'Storage - Data Replication', 'VM Migration',
'VMWare FT Logging']}
}
- specs.update(ome_auth_params)
- module = AnsibleModule(
+
+ module = OmeAnsibleModule(
argument_spec=specs,
required_if=[['state', 'present', ('new_name', 'description', 'vlan_minimum', 'vlan_maximum', 'type',), True]],
supports_check_mode=True
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_network_vlan_info.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_network_vlan_info.py
index ea9861a3b..fb315947a 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_network_vlan_info.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_network_vlan_info.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 7.0.0
-# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2020-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -37,7 +37,7 @@ options:
type: str
requirements:
- - "python >= 3.8.6"
+ - "python >= 3.9.6"
author: "Deepak Joshi(@deepakjoshishri)"
notes:
- Run this module from a system that has direct access to Dell OpenManage Enterprise.
@@ -151,8 +151,7 @@ error_info:
import json
from ssl import SSLError
-from ansible.module_utils.basic import AnsibleModule
-from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, OmeAnsibleModule
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
@@ -216,8 +215,8 @@ def main():
"id": {"required": False, "type": 'int'},
"name": {"required": False, "type": 'str'}
}
- specs.update(ome_auth_params)
- module = AnsibleModule(
+
+ module = OmeAnsibleModule(
argument_spec=specs,
mutually_exclusive=[["id", "name"]],
supports_check_mode=True)
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_powerstate.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_powerstate.py
index 0122848c4..b5056b133 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_powerstate.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_powerstate.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 7.0.0
-# Copyright (C) 2019-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2019-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -38,7 +38,7 @@ options:
- I(device_id) is mutually exclusive with I(device_service_tag).
type: int
requirements:
- - "python >= 3.8.6"
+ - "python >= 3.9.6"
author: "Felix Stephen (@felixs88)"
notes:
- Run this module from a system that has direct access to Dell OpenManage Enterprise.
@@ -157,8 +157,7 @@ job_status:
import json
from ssl import SSLError
-from ansible.module_utils.basic import AnsibleModule
-from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, OmeAnsibleModule
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
@@ -251,8 +250,8 @@ def main():
"device_service_tag": {"required": False, "type": "str"},
"device_id": {"required": False, "type": "int"},
}
- specs.update(ome_auth_params)
- module = AnsibleModule(
+
+ module = OmeAnsibleModule(
argument_spec=specs,
required_one_of=[["device_service_tag", "device_id"]],
mutually_exclusive=[["device_service_tag", "device_id"]],
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_profile.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_profile.py
index eed9a45fd..adc189dc6 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_profile.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_profile.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 7.0.0
-# Copyright (C) 2021-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2021-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -162,7 +162,7 @@ options:
- This is applicable when I(command) is C(assign).
type: dict
requirements:
- - "python >= 3.8.6"
+ - "python >= 3.9.6"
author: "Jagadeesh N V (@jagadeeshnv)"
notes:
- Run this module from a system that has direct access to Dell OpenManage Enterprise.
@@ -397,8 +397,7 @@ error_info:
import json
import time
from ssl import SSLError
-from ansible.module_utils.basic import AnsibleModule
-from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, OmeAnsibleModule
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
from ansible.module_utils.common.dict_transformations import recursive_diff
@@ -827,8 +826,8 @@ def main():
"attributes": {"type": 'dict', "options": assign_spec},
"force": {"default": False, "type": 'bool'}
}
- specs.update(ome_auth_params)
- module = AnsibleModule(
+
+ module = OmeAnsibleModule(
argument_spec=specs,
required_if=[
['command', 'create', ['template_name', 'template_id'], True],
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_profile_info.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_profile_info.py
index 4c5f07dd2..a3dd49e86 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_profile_info.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_profile_info.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 7.2.0
-# Copyright (C) 2023 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2023-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -249,10 +249,9 @@ error_info:
import json
from ssl import SSLError
-from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError
-from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, OmeAnsibleModule
from ansible_collections.dellemc.openmanage.plugins.module_utils.utils import strip_substr_dict
@@ -347,11 +346,10 @@ def main():
"template_name": {"type": 'str'},
"system_query_options": {"type": 'dict'}
}
- argument_spec.update(ome_auth_params)
- module = AnsibleModule(argument_spec=argument_spec,
- mutually_exclusive=[('profile_id', 'profile_name', 'template_name', 'template_id',
- 'system_query_options')],
- supports_check_mode=True)
+ module = OmeAnsibleModule(argument_spec=argument_spec,
+ mutually_exclusive=[('profile_id', 'profile_name', 'template_name', 'template_id',
+ 'system_query_options')],
+ supports_check_mode=True)
try:
with RestOME(module.params, req_session=True) as rest_obj:
query = {}
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_server_interface_profile_info.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_server_interface_profile_info.py
index 12286bff3..adf3ee810 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_server_interface_profile_info.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_server_interface_profile_info.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 7.0.0
-# Copyright (C) 2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2022-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -35,7 +35,7 @@ options:
- I(device_service_tag) is mutually exclusive with I(device_id).
elements: str
requirements:
- - "python >= 3.8.6"
+ - "python >= 3.9.6"
author:
- "Felix Stephen (@felixs88)"
notes:
@@ -154,10 +154,9 @@ error_info:
import json
from ssl import SSLError
-from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError
-from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, OmeAnsibleModule
DOMAIN_URI = "ManagementDomainService/Domains"
PROFILE_URI = "NetworkService/ServerProfiles"
@@ -237,11 +236,10 @@ def main():
"device_id": {"required": False, "type": "list", "elements": "int"},
"device_service_tag": {"required": False, "type": "list", "elements": "str"},
}
- argument_spec.update(ome_auth_params)
- module = AnsibleModule(argument_spec=argument_spec,
- mutually_exclusive=[('device_id', 'device_service_tag')],
- required_one_of=[["device_id", "device_service_tag"]],
- supports_check_mode=True, )
+ module = OmeAnsibleModule(argument_spec=argument_spec,
+ mutually_exclusive=[('device_id', 'device_service_tag')],
+ required_one_of=[["device_id", "device_service_tag"]],
+ supports_check_mode=True)
if not any([module.params.get("device_id"), module.params.get("device_service_tag")]):
module.fail_json(msg=CONFIG_FAIL_MSG)
try:
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_server_interface_profiles.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_server_interface_profiles.py
index 60b436a29..554a44656 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_server_interface_profiles.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_server_interface_profiles.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 7.0.0
-# Copyright (C) 2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2022-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -99,7 +99,7 @@ options:
type: int
default: 120
requirements:
- - "python >= 3.8.6"
+ - "python >= 3.9.6"
author: "Jagadeesh N V (@jagadeeshnv)"
notes:
- This module supports C(check_mode).
@@ -193,10 +193,9 @@ error_info:
import json
from ssl import SSLError
-from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
-from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, OmeAnsibleModule
from ansible_collections.dellemc.openmanage.plugins.module_utils.utils import \
get_rest_items, strip_substr_dict, job_tracking, apply_diff_key
@@ -397,8 +396,8 @@ def main():
}},
"job_wait": {"type": 'bool', "default": True},
"job_wait_timeout": {"type": 'int', "default": 120}}
- specs.update(ome_auth_params)
- module = AnsibleModule(
+
+ module = OmeAnsibleModule(
argument_spec=specs,
mutually_exclusive=[
('device_id', 'device_service_tag',)],
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_session.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_session.py
new file mode 100644
index 000000000..8560b7220
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_session.py
@@ -0,0 +1,408 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+#
+# Dell OpenManage Ansible Modules
+# Version 9.3.0
+# Copyright (C) 2024 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = r"""
+---
+module: ome_session
+short_description: Manage OpenManage Enterprise and OpenManage Enterprise modular sessions
+version_added: "9.3.0"
+description:
+ - This module allows you to create and delete sessions on OpenManage Enterprise and OpenManage Enterprise Modular.
+options:
+ hostname:
+ description:
+ - IP address or hostname of the OpenManage Enterprise.
+ type: str
+ username:
+ description:
+ - Username of the OpenManage Enterprise. If the username is not provided, then
+ the environment variable E(OME_USERNAME) is used.
+ - I(username) is required when I(state) is C(present).
+ type: str
+ password:
+ description:
+ - Password of the OpenManage Enterprise. If the password is not provided, then
+ the environment variable E(OME_PASSWORD) is used.
+ - I(password) is required when I(state) is C(present).
+ type: str
+ port:
+ description:
+ - Port of the OpenManage Enterprise.
+ type: int
+ default: 443
+ validate_certs:
+ description:
+ - If C(false), the SSL certificates will not be validated.
+ - Configure C(false) only on personally controlled sites where self-signed certificates are used.
+ type: bool
+ default: true
+ ca_path:
+ description:
+ - The Privacy Enhanced Mail (PEM) file that contains a CA certificate to be used for the validation.
+ type: path
+ timeout:
+ description:
+ - The HTTPS socket level timeout in seconds.
+ type: int
+ default: 30
+ state:
+ description:
+ - The state of the session in OpenManage Enterprise.
+ - C(present) creates a session.
+ - C(absent) deletes a session.
+ - Module will always report changes found to be applied when I(state) is C(present).
+ choices: [present, absent]
+ type: str
+ default: present
+ x_auth_token:
+ description:
+ - Authentication token.
+ - I(x_auth_token) is required when I(state) is C(absent).
+ type: str
+ aliases: ['auth_token']
+ session_id:
+ description:
+ - Session ID of the OpenManage Enterprise.
+ - I(session_id) is required when I(state) is C(absent).
+ type: str
+requirements:
+ - "python >= 3.9.6"
+author:
+ - "Kritika Bhateja (@Kritika-Bhateja-03)"
+notes:
+ - Run this module from a system that has direct access to Dell OpenManage Enterprise.
+ - This module supports IPv4 and IPv6 addresses.
+ - This module supports C(check_mode).
+ - This module will always report changes found to be applied when I(state) is C(present).
+"""
+
+EXAMPLES = r"""
+---
+- name: Create a session
+ dellemc.openmanage.ome_session:
+ hostname: 198.162.0.1
+ username: username
+ password: password
+ ca_path: "/path/to/ca_cert.pem"
+ state: present
+
+- name: Delete a session
+ dellemc.openmanage.ome_session:
+ hostname: 198.162.0.1
+ ca_path: "/path/to/ca_cert.pem"
+ state: absent
+ x_auth_token: aed4aa802b748d2f3b31deec00a6b28a
+ session_id: 4b48e9ab-809e-4087-b7c4-201a16e0143d
+
+- name: Create a session and execute other modules
+ block:
+ - name: Create a session
+ dellemc.openmanage.ome_session:
+ hostname: 198.162.0.1
+ username: username
+ password: password
+ ca_path: "/path/to/ca_cert.pem"
+ state: present
+ register: authData
+
+ - name: Call ome_user_info module
+ dellemc.openmanage.ome_user_info:
+ hostname: 198.162.0.1
+ ca_path: "/path/to/ca_cert.pem"
+ x_auth_token: "{{ authData.x_auth_token }}"
+
+ - name: Call ome_network_vlan_info module
+ dellemc.openmanage.ome_network_vlan_info:
+ hostname: 198.162.0.1
+ ca_path: "/path/to/ca_cert.pem"
+ x_auth_token: "{{ authData.x_auth_token }}"
+ always:
+ - name: Destroy a session
+ dellemc.openmanage.ome_session:
+ hostname: 198.162.0.1
+ ca_path: "/path/to/ca_cert.pem"
+ state: absent
+ x_auth_token: "{{ authData.x_auth_token }}"
+ session_id: "{{ authData.session_data.Id }}"
+"""
+
+RETURN = r'''
+---
+msg:
+ description: Status of the session operation.
+ returned: always
+ type: str
+ sample: "The session has been created successfully."
+session_data:
+ description: The session details.
+ returned: For session creation operation
+ type: dict
+ sample: {
+ "Id": "d5c28d8e-1084-4055-9c01-e1051cfee2dd",
+ "Description": "admin",
+ "Name": "API",
+ "UserName": "admin",
+ "UserId": 10078,
+ "Password": null,
+ "Roles": [
+ "BACKUP_ADMINISTRATOR"
+ ],
+ "IpAddress": "100.198.162.0",
+ "StartTimeStamp": "2023-07-03 07:22:43.683",
+ "LastAccessedTimeStamp": "2023-07-03 07:22:43.683",
+ "DirectoryGroup": []
+ }
+x_auth_token:
+ description: Authentication token.
+ returned: For session creation operation
+ type: str
+ sample: "d15f17f01cd627c30173b1582642497d"
+error_info:
+ description: Details of the HTTP Error.
+ returned: On HTTP error
+ type: dict
+ sample: {
+ "error": {
+ "@Message.ExtendedInfo": [
+ {
+ "Message": "Unable to complete the operation because an invalid username and/or password is entered, and therefore authentication failed.",
+ "MessageArgs": [],
+ "MessageArgs@odata.count": 0,
+ "MessageId": "IDRAC.2.7.SYS415",
+ "RelatedProperties": [],
+ "RelatedProperties@odata.count": 0,
+ "Resolution": "Enter valid user name and password and retry the operation.",
+ "Severity": "Warning"
+ }
+ ],
+ "code": "Base.1.12.GeneralError",
+ "message": "A general error has occurred. See ExtendedInfo for more information"
+ }
+ }
+'''
+
+
+import json
+from urllib.error import HTTPError, URLError
+from ansible_collections.dellemc.openmanage.plugins.module_utils.session_utils import Session
+from ansible.module_utils.basic import AnsibleModule
+from ansible.module_utils.urls import ConnectionError, SSLValidationError
+from ansible.module_utils.common.parameters import env_fallback
+from ansible_collections.dellemc.openmanage.plugins.module_utils.utils import remove_key
+
+SESSION_URL = "/api/SessionService/Sessions"
+ODATA = "@odata.id"
+ODATA_REGEX = "(.*?)@odata"
+
+CREATE_SUCCESS_MSG = "The session has been created successfully."
+DELETE_SUCCESS_MSG = "The session has been deleted successfully."
+FAILURE_MSG = "Unable to '{operation}' a session."
+CHANGES_FOUND_MSG = "Changes found to be applied."
+NO_CHANGES_FOUND_MSG = "No changes found to be applied."
+
+
+class OMESession(Session):
+ def __init__(self, module):
+ super().__init__(module)
+ self.url_kwrags = {"force_basic_auth": True,
+ "url_username": self.module.params.get("username"),
+ "url_password": self.module.params.get("password")}
+
+ def get_session_status(self, session_url, session_id):
+ """
+ Retrieves the status of a session given its URL and ID.
+
+ Args:
+ session_url (str): The URL of the session.
+ session_id (str): The ID of the session.
+
+ Returns:
+ int: The status code of the session status response. If an HTTPError occurs, the status
+ code of the error is returned.
+ """
+ session_status_response = self.instance.invoke_request(SESSION_URL, "GET")
+ sessions_data = session_status_response.json_data
+ session_ids = [session_id["@odata.id"].split("'")[1] for session_id in sessions_data["value"]]
+ session_status = session_id in session_ids
+ return session_status
+
+ def create_session(self):
+ """
+ Executes the session creation process.
+
+ This function creates a session by sending a POST request to the session URL with the
+ provided username and password.
+ If the request is successful (status code 201), it retrieves the session details, removes
+ any OData keys from the response,
+ and extracts the X-Auth-Token from the response headers. It then exits the module with a
+ success message, indicating that
+ the session was created successfully, and provides the session data and X-Auth-Token as
+ output variables.
+
+ If the request fails (status code other than 201), it exits the module with a failure
+ message, indicating that the session creation failed.
+
+ Parameters:
+ None
+
+ Returns:
+ None
+ """
+ payload = {"UserName": self.module.params.get("username"),
+ "Password": self.module.params.get("password")}
+ if self.module.check_mode:
+ self.module.exit_json(msg=CHANGES_FOUND_MSG, changed=True)
+ session_response = self.instance.invoke_request(SESSION_URL, "POST", data=payload, url_kwargs=self.url_kwrags)
+ status = session_response.status_code
+ if status == 201:
+ session_details = session_response.json_data
+ session_data = remove_key(session_details, regex_pattern=ODATA_REGEX)
+ x_auth_token = session_response.headers.get('X-Auth-Token')
+ self.module.exit_json(msg=CREATE_SUCCESS_MSG,
+ changed=True,
+ session_data=session_data,
+ x_auth_token=x_auth_token)
+ else:
+ self.module.exit_json(msg=FAILURE_MSG.format(operation="create"), failed=True)
+
+ def delete_session(self):
+ """
+ Executes the deletion of a session.
+ This function retrieves the session ID from the module parameters.It then invokes a
+ DELETE request to the session URL with the session ID appended. The response from
+ the request is stored in the `session_response` variable.
+
+ If the response status code is 200, indicating a successful deletion, the function exits
+ the module with a success message and sets the `changed` parameter to True. Otherwise, it
+ exits the module with a failure message and sets the `failed` parameter to True.
+
+ Parameters:
+ None
+
+ Returns:
+ None
+ """
+ session_id = self.module.params.get("session_id")
+ session_status = self.get_session_status(SESSION_URL, session_id)
+ if self.module.check_mode:
+ if session_status:
+ self.module.exit_json(msg=CHANGES_FOUND_MSG, changed=True)
+ else:
+ self.module.exit_json(msg=NO_CHANGES_FOUND_MSG)
+ else:
+ if session_status:
+ try:
+ delete_session_url = SESSION_URL + "('" + session_id + "')"
+ session_response = self.instance.invoke_request(delete_session_url, "DELETE")
+ status = session_response.status_code
+ if status == 204:
+ self.module.exit_json(msg=DELETE_SUCCESS_MSG, changed=True)
+ except HTTPError as err:
+ filter_err = remove_key(json.load(err), regex_pattern=ODATA_REGEX)
+ self.module.exit_json(msg=FAILURE_MSG.format(operation="delete"),
+ error_info=filter_err,
+ failed=True)
+ else:
+ self.module.exit_json(msg=NO_CHANGES_FOUND_MSG)
+
+
+def main():
+ """
+ Main function that initializes the Ansible module with the argument specs and required if
+ conditions.
+ It then creates a SessionAPI object with the module parameters and performs a session operation
+ based on the state parameter.
+ If the state is "present", it creates a CreateSession object and executes it. If the state is
+ "absent", it creates a DeleteSession object and executes it.
+ The session status is returned.
+
+ Raises:
+ HTTPError: If an HTTP error occurs, the error message and filtered error information are
+ returned in the module's exit_json.
+ URLError: If a URL error occurs, the error message is returned in the module's exit_json.
+ SSLValidationError, ConnectionError, TypeError, ValueError, OSError: If any other error
+ occurs, the error message is returned in the module's exit_json.
+
+ Returns:
+ None
+ """
+ specs = get_argument_spec()
+ module = AnsibleModule(
+ argument_spec=specs,
+ required_if=[
+ ["state", "present", ("username", "password",)],
+ ["state", "absent", ("x_auth_token", "session_id",)]
+ ],
+ supports_check_mode=True
+ )
+ try:
+ ome = OMESession(module)
+ session_operation = module.params.get("state")
+ if session_operation == "present":
+ ome.create_session()
+ else:
+ ome.delete_session()
+ except HTTPError as err:
+ filter_err = {}
+ if isinstance(err, dict):
+ filter_err = remove_key(json.load(err), regex_pattern=ODATA_REGEX)
+ module.exit_json(msg=str(err), error_info=filter_err, failed=True)
+ except URLError as err:
+ module.exit_json(msg=str(err), unreachable=True)
+ except (SSLValidationError, ConnectionError, TypeError, ValueError, OSError) as err:
+ module.exit_json(msg=str(err), failed=True)
+
+
+def get_argument_spec():
+ """
+ Returns a dictionary representing the argument specification for a module.
+
+ The dictionary contains the following keys and their corresponding values:
+ - "hostname": A string representing the hostname.
+ - "username": A string representing the username. It has a fallback option to retrieve the
+ value from the environment variable 'IDRAC_USERNAME'.
+ - "password": A string representing the password. It is marked as not to be logged and has a
+ fallback option to retrieve the value from the environment variable 'IDRAC_PASSWORD'.
+ - "port": An integer representing the port number. The default value is 443.
+ - "validate_certs": A boolean representing whether to validate certificates. The default value
+ is True.
+ - "ca_path": A path representing the certificate authority path. The default value is None.
+ - "timeout": An integer representing the timeout value. The default value is 30.
+ - "state": A string representing the state. The default value is "present". The choices are
+ ["present", "absent"].
+ - "x_auth_token": A string representing the authentication token. It is marked as not to be
+ logged.
+ - "session_id": A string representing the session ID.
+
+ Returns:
+ A dictionary representing the argument specification.
+ """
+ return {
+ "hostname": {"type": "str"},
+ "username": {"type": "str", "fallback": (env_fallback, ['OME_USERNAME'])},
+ "password": {"type": "str", "no_log": True, "fallback": (env_fallback, ['OME_PASSWORD'])},
+ "port": {"type": "int", "default": 443},
+ "validate_certs": {"type": "bool", "default": True},
+ "ca_path": {"type": "path", "default": None},
+ "timeout": {"type": "int", "default": 30},
+ "state": {"type": 'str', "default": "present", "choices": ["present", "absent"]},
+ "x_auth_token": {"type": "str", "no_log": True, "aliases": ['auth_token']},
+ "session_id": {"type": "str"}
+ }
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_smart_fabric.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_smart_fabric.py
index 2e790fc08..96e947182 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_smart_fabric.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_smart_fabric.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 7.0.0
-# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2020-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -75,7 +75,7 @@ options:
to represent the entire fabric. Enable this feature only when connecting to such a solution."
choices: ['Enabled', 'Disabled']
requirements:
- - "python >= 3.8.6"
+ - "python >= 3.9.6"
author:
- "Sajna Shetty(@Sajna-Shetty)"
notes:
@@ -179,8 +179,7 @@ error_info:
import json
import socket
-from ansible.module_utils.basic import AnsibleModule
-from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, OmeAnsibleModule
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
from ssl import SSLError
@@ -711,8 +710,8 @@ def main():
"secondary_switch_service_tag": {"required": False, "type": "str"},
"override_LLDP_configuration": {"required": False, "type": "str", "choices": ['Enabled', 'Disabled']},
}
- specs.update(ome_auth_params)
- module = AnsibleModule(
+
+ module = OmeAnsibleModule(
argument_spec=specs,
required_if=[['state', 'present', ('new_name', 'description', 'fabric_design', 'primary_switch_service_tag',
'secondary_switch_service_tag', 'override_LLDP_configuration',), True]],
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_smart_fabric_info.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_smart_fabric_info.py
index 9ce352d5e..4433afbdf 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_smart_fabric_info.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_smart_fabric_info.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 7.1.0
-# Copyright (C) 2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2022-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -570,8 +570,7 @@ error_info:
import json
from ssl import SSLError
-from ansible.module_utils.basic import AnsibleModule
-from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, OmeAnsibleModule
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
from ansible_collections.dellemc.openmanage.plugins.module_utils.utils import strip_substr_dict
@@ -655,8 +654,8 @@ def main():
"fabric_id": {"type": 'str', "required": False},
"fabric_name": {"type": 'str', "required": False}
}
- specs.update(ome_auth_params)
- module = AnsibleModule(
+
+ module = OmeAnsibleModule(
argument_spec=specs,
mutually_exclusive=[
('fabric_id', 'fabric_name')
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_smart_fabric_uplink.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_smart_fabric_uplink.py
index 0ac1f2557..311a2596b 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_smart_fabric_uplink.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_smart_fabric_uplink.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 7.0.0
-# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2020-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -89,7 +89,7 @@ options:
description: Specify the name of the VLAN to be added as untagged to the uplink.
type: str
requirements:
- - "python >= 3.8.6"
+ - "python >= 3.9.6"
author:
- "Jagadeesh N V(@jagadeeshnv)"
notes:
@@ -270,11 +270,10 @@ error_info:
import json
from ssl import SSLError
-from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.urls import ConnectionError, SSLValidationError
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.common.dict_transformations import recursive_diff
-from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, OmeAnsibleModule
from ansible_collections.dellemc.openmanage.plugins.module_utils.utils import get_item_and_list
FABRIC_URI = "NetworkService/Fabrics"
@@ -503,8 +502,8 @@ def main():
"tagged_networks": {"type": "list", "elements": "str"},
"untagged_network": {"type": "str"}
}
- specs.update(ome_auth_params)
- module = AnsibleModule(
+
+ module = OmeAnsibleModule(
argument_spec=specs,
required_if=[['state', 'present',
('new_name', 'description', 'uplink_type', 'ufd_enable',
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_smart_fabric_uplink_info.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_smart_fabric_uplink_info.py
index d6bb0683e..09daad9d3 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_smart_fabric_uplink_info.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_smart_fabric_uplink_info.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 7.2.0
-# Copyright (C) 2022-2023 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2022-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -186,10 +186,9 @@ error_info:
import json
from ssl import SSLError
-from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.urls import ConnectionError, SSLValidationError
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
-from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, OmeAnsibleModule
from ansible_collections.dellemc.openmanage.plugins.module_utils.utils import strip_substr_dict
ALL_UPLINKS_URI = "NetworkService/Fabrics('{0}')/Uplinks?$expand=Networks,Ports"
@@ -294,8 +293,8 @@ def main():
"uplink_id": {"type": "str"},
"uplink_name": {"type": "str"}
}
- specs.update(ome_auth_params)
- module = AnsibleModule(
+
+ module = OmeAnsibleModule(
argument_spec=specs,
mutually_exclusive=[('fabric_id', 'fabric_name'), ('uplink_id', 'uplink_name')],
supports_check_mode=True
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_template.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_template.py
index 6bf77ad02..29f851089 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_template.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_template.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 7.5.0
-# Copyright (C) 2019-2023 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2019-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -133,7 +133,7 @@ options:
- This option is applicable when I(job_wait) is C(true).
default: 1200
requirements:
- - "python >= 3.8.6"
+ - "python >= 3.9.6"
author:
- "Jagadeesh N V (@jagadeeshnv)"
- "Husniya Hameed (@husniya_hameed)"
@@ -546,8 +546,7 @@ error_info:
import json
import time
from ssl import SSLError
-from ansible.module_utils.basic import AnsibleModule
-from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, OmeAnsibleModule
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
from ansible_collections.dellemc.openmanage.plugins.module_utils.utils import apply_diff_key, job_tracking
@@ -1016,8 +1015,8 @@ def main():
"job_wait": {"required": False, "type": "bool", "default": True},
"job_wait_timeout": {"required": False, "type": "int", "default": 1200}
}
- specs.update(ome_auth_params)
- module = AnsibleModule(
+
+ module = OmeAnsibleModule(
argument_spec=specs,
required_if=[
['command', 'create', ['attributes']],
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_template_identity_pool.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_template_identity_pool.py
index 88a09ae95..2f21bda8d 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_template_identity_pool.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_template_identity_pool.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 7.0.0
-# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2020-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -34,7 +34,7 @@ options:
- This option is not applicable when detaching an identity pool from a template.
type: str
requirements:
- - "python >= 3.8.6"
+ - "python >= 3.9.6"
author: "Felix Stephen (@felixs88)"
notes:
- Run this module from a system that has direct access to Dell OpenManage Enterprise.
@@ -91,8 +91,7 @@ error_info:
'''
import json
-from ansible.module_utils.basic import AnsibleModule
-from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, OmeAnsibleModule
from ansible.module_utils.urls import ConnectionError, SSLValidationError
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ssl import SSLError
@@ -157,8 +156,8 @@ def main():
"template_name": {"required": True, "type": "str"},
"identity_pool_name": {"required": False, "type": "str"},
}
- specs.update(ome_auth_params)
- module = AnsibleModule(
+
+ module = OmeAnsibleModule(
argument_spec=specs,
supports_check_mode=True
)
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_template_info.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_template_info.py
index 9e91a5fb3..75ddc5ff5 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_template_info.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_template_info.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 7.0.0
-# Copyright (C) 2019-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2019-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -34,7 +34,7 @@ options:
description: Filter records by the supported values.
type: str
requirements:
- - "python >= 3.8.6"
+ - "python >= 3.9.6"
author: "Sajna Shetty(@Sajna-Shetty)"
notes:
- Run this module from a system that has direct access to Dell OpenManage Enterprise.
@@ -105,8 +105,7 @@ template_info:
import json
from ssl import SSLError
-from ansible.module_utils.basic import AnsibleModule
-from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, OmeAnsibleModule
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
@@ -131,8 +130,8 @@ def main():
"options": {"filter": {"type": 'str', "required": False}}
},
}
- specs.update(ome_auth_params)
- module = AnsibleModule(
+
+ module = OmeAnsibleModule(
argument_spec=specs,
mutually_exclusive=[['template_id', 'system_query_options']],
supports_check_mode=True
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_template_network_vlan.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_template_network_vlan.py
index c9d0bd97d..b4dc386bc 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_template_network_vlan.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_template_network_vlan.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 7.0.0
-# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2020-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -91,7 +91,7 @@ options:
type: list
elements: str
requirements:
- - "python >= 3.8.6"
+ - "python >= 3.9.6"
author:
- "Jagadeesh N V(@jagadeeshnv)"
notes:
@@ -192,8 +192,7 @@ error_info:
import json
from ssl import SSLError
-from ansible.module_utils.basic import AnsibleModule
-from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, OmeAnsibleModule
from ansible.module_utils.urls import ConnectionError, SSLValidationError
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
@@ -421,8 +420,8 @@ def main():
"tagged_networks": {"required": False, "type": "list", "elements": "dict", "options": port_tagged_spec},
"propagate_vlan": {"type": "bool", "default": True}
}
- specs.update(ome_auth_params)
- module = AnsibleModule(
+
+ module = OmeAnsibleModule(
argument_spec=specs,
required_one_of=[("template_id", "template_name"),
("untagged_networks", "tagged_networks")],
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_template_network_vlan_info.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_template_network_vlan_info.py
index b91a6a946..c2b4d2a32 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_template_network_vlan_info.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_template_network_vlan_info.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 7.2.0
-# Copyright (C) 2023 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2023-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -133,10 +133,9 @@ error_info:
import json
from ssl import SSLError
-from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError
-from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, OmeAnsibleModule
NETWORK_HIERARCHY_VIEW = 4 # For Network hierarchy View in a Template
TEMPLATE_ATTRIBUTE_VIEW = "TemplateService/Templates({0})/Views({1})/AttributeViewDetails"
@@ -212,10 +211,9 @@ def main():
"template_id": {"type": 'int'},
"template_name": {"type": 'str'}
}
- argument_spec.update(ome_auth_params)
- module = AnsibleModule(argument_spec=argument_spec,
- mutually_exclusive=[('template_id', 'template_name')],
- supports_check_mode=True)
+ module = OmeAnsibleModule(argument_spec=argument_spec,
+ mutually_exclusive=[('template_id', 'template_name')],
+ supports_check_mode=True)
try:
templates = []
with RestOME(module.params, req_session=True) as rest_obj:
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_user.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_user.py
index 27092a036..38f47dc25 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_user.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_user.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 7.0.0
-# Copyright (C) 2019-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2019-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -53,7 +53,7 @@ options:
- >-
Refer OpenManage Enterprise API Reference Guide for more details.
requirements:
- - "python >= 3.8.6"
+ - "python >= 3.9.6"
author: "Sajna Shetty(@Sajna-Shetty)"
notes:
- Run this module from a system that has direct access to Dell OpenManage Enterprise.
@@ -153,8 +153,7 @@ user_status:
import json
from ssl import SSLError
-from ansible.module_utils.basic import AnsibleModule
-from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, OmeAnsibleModule
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
@@ -238,8 +237,8 @@ def main():
"name": {"required": False, "type": 'str'},
"attributes": {"required": False, "type": 'dict', "default": {}},
}
- specs.update(ome_auth_params)
- module = AnsibleModule(
+
+ module = OmeAnsibleModule(
argument_spec=specs,
mutually_exclusive=[['user_id', 'name'], ],
required_if=[['state', 'present', ['attributes']], ],
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/ome_user_info.py b/ansible_collections/dellemc/openmanage/plugins/modules/ome_user_info.py
index 488444694..7c9dd6fae 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/ome_user_info.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/ome_user_info.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 7.0.0
-# Copyright (C) 2019-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2019-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -34,7 +34,7 @@ options:
description: Filter records for the supported values.
type: str
requirements:
- - "python >= 3.8.6"
+ - "python >= 3.9.6"
author: "Jagadeesh N V (@jagadeeshnv)"
notes:
- Run this module from a system that has direct access to Dell OpenManage Enterprise.
@@ -98,8 +98,7 @@ user_info:
import json
from ssl import SSLError
-from ansible.module_utils.basic import AnsibleModule
-from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, ome_auth_params
+from ansible_collections.dellemc.openmanage.plugins.module_utils.ome import RestOME, OmeAnsibleModule
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
@@ -124,8 +123,8 @@ def main():
"filter": {"type": 'str', "required": False},
}},
}
- specs.update(ome_auth_params)
- module = AnsibleModule(
+
+ module = OmeAnsibleModule(
argument_spec=specs,
mutually_exclusive=[
('account_id', 'system_query_options')
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/redfish_event_subscription.py b/ansible_collections/dellemc/openmanage/plugins/modules/redfish_event_subscription.py
index c974aaccc..0eec0b55b 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/redfish_event_subscription.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/redfish_event_subscription.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 7.0.0
-# Copyright (C) 2021-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2021-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+
# see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt
@@ -55,7 +55,7 @@ options:
default: present
choices: ["present", "absent"]
requirements:
- - "python >= 3.8.6"
+ - "python >= 3.9.6"
author:
- "Trevor Squillario (@TrevorSquillario)"
- "Sachin Apagundi (@sachin-apa)"
@@ -197,8 +197,7 @@ error_info:
import json
import os
from ssl import SSLError
-from ansible_collections.dellemc.openmanage.plugins.module_utils.redfish import Redfish, redfish_auth_params
-from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.dellemc.openmanage.plugins.module_utils.redfish import Redfish, RedfishAnsibleModule
from ansible.module_utils.urls import ConnectionError, SSLValidationError
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
@@ -291,9 +290,8 @@ def main():
"choices": ['Event', 'MetricReport']},
"state": {"type": "str", "default": "present", "choices": ['present', 'absent']},
}
- specs.update(redfish_auth_params)
- module = AnsibleModule(
+ module = RedfishAnsibleModule(
argument_spec=specs,
supports_check_mode=True)
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/redfish_firmware.py b/ansible_collections/dellemc/openmanage/plugins/modules/redfish_firmware.py
index 98f64f780..a135198fd 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/redfish_firmware.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/redfish_firmware.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 8.2.0
-# Copyright (C) 2019-2023 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2019-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -50,7 +50,7 @@ options:
I(job_wait_time) minimum or make I(job_wait)as false and retrigger."
default: 3600
requirements:
- - "python >= 3.8.6"
+ - "python >= 3.9.6"
- "urllib3"
author:
- "Felix Stephen (@felixs88)"
@@ -144,8 +144,8 @@ import json
import os
import time
from ssl import SSLError
-from ansible_collections.dellemc.openmanage.plugins.module_utils.redfish import Redfish, redfish_auth_params
-from ansible.module_utils.basic import AnsibleModule, missing_required_lib
+from ansible_collections.dellemc.openmanage.plugins.module_utils.redfish import Redfish, RedfishAnsibleModule
+from ansible.module_utils.basic import missing_required_lib
from ansible.module_utils.urls import ConnectionError, SSLValidationError
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
@@ -271,8 +271,8 @@ def main():
"job_wait": {"required": False, "type": 'bool', "default": True},
"job_wait_timeout": {"required": False, "type": "int", "default": 3600}
}
- specs.update(redfish_auth_params)
- module = AnsibleModule(
+
+ module = RedfishAnsibleModule(
argument_spec=specs,
supports_check_mode=False)
if not HAS_LIB:
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/redfish_firmware_rollback.py b/ansible_collections/dellemc/openmanage/plugins/modules/redfish_firmware_rollback.py
index ef93d669f..1779b89b0 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/redfish_firmware_rollback.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/redfish_firmware_rollback.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 8.2.0
-# Copyright (C) 2023 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2023-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -144,11 +144,10 @@ import json
import re
import time
from ssl import SSLError
-from ansible_collections.dellemc.openmanage.plugins.module_utils.redfish import Redfish, redfish_auth_params, \
+from ansible_collections.dellemc.openmanage.plugins.module_utils.redfish import Redfish, RedfishAnsibleModule, \
SESSION_RESOURCE_COLLECTION
from ansible_collections.dellemc.openmanage.plugins.module_utils.utils import wait_for_redfish_reboot_job, \
wait_for_redfish_job_complete, strip_substr_dict, MANAGER_JOB_ID_URI, RESET_UNTRACK, MANAGERS_URI, RESET_SUCCESS
-from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.urls import ConnectionError, SSLValidationError
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
@@ -324,8 +323,8 @@ def main():
"reboot": {"type": "bool", "default": True},
"reboot_timeout": {"type": "int", "default": 900},
}
- specs.update(redfish_auth_params)
- module = AnsibleModule(argument_spec=specs, supports_check_mode=True)
+
+ module = RedfishAnsibleModule(argument_spec=specs, supports_check_mode=True)
if module.params["reboot_timeout"] <= 0:
module.fail_json(msg=NEGATIVE_TIMEOUT_MESSAGE)
try:
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/redfish_powerstate.py b/ansible_collections/dellemc/openmanage/plugins/modules/redfish_powerstate.py
index 085bbc018..a361f7b34 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/redfish_powerstate.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/redfish_powerstate.py
@@ -3,8 +3,8 @@
#
# Dell OpenManage Ansible Modules
-# Version 7.0.0
-# Copyright (C) 2020-2022 Dell Inc. or its subsidiaries. All Rights Reserved.
+# Version 9.3.0
+# Copyright (C) 2020-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
@@ -51,7 +51,7 @@ options:
choices: ["ForceOff", "ForceOn", "ForceRestart", "GracefulRestart", "GracefulShutdown",
"Nmi", "On", "PowerCycle", "PushPowerButton"]
requirements:
- - "python >= 3.8.6"
+ - "python >= 3.9.6"
author:
- "Sajna Shetty(@Sajna-Shetty)"
notes:
@@ -118,8 +118,7 @@ error_info:
import json
import re
from ssl import SSLError
-from ansible_collections.dellemc.openmanage.plugins.module_utils.redfish import Redfish, redfish_auth_params
-from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.dellemc.openmanage.plugins.module_utils.redfish import Redfish, RedfishAnsibleModule
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError
@@ -241,9 +240,7 @@ def main():
"choices": ['ForceOff', 'ForceOn', 'ForceRestart', 'GracefulRestart',
'GracefulShutdown', 'Nmi', 'On', 'PowerCycle', 'PushPowerButton']},
}
- specs.update(redfish_auth_params)
-
- module = AnsibleModule(
+ module = RedfishAnsibleModule(
argument_spec=specs,
supports_check_mode=True)
try:
diff --git a/ansible_collections/dellemc/openmanage/plugins/modules/redfish_storage_volume.py b/ansible_collections/dellemc/openmanage/plugins/modules/redfish_storage_volume.py
index 3de200a8f..25360bece 100644
--- a/ansible_collections/dellemc/openmanage/plugins/modules/redfish_storage_volume.py
+++ b/ansible_collections/dellemc/openmanage/plugins/modules/redfish_storage_volume.py
@@ -3,7 +3,7 @@
#
# Dell OpenManage Ansible Modules
-# Version 9.1.0
+# Version 9.3.0
# Copyright (C) 2019-2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
@@ -374,8 +374,7 @@ error_info:
import json
import copy
from ssl import SSLError
-from ansible_collections.dellemc.openmanage.plugins.module_utils.redfish import Redfish, redfish_auth_params
-from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.dellemc.openmanage.plugins.module_utils.redfish import Redfish, RedfishAnsibleModule
from ansible.module_utils.compat.version import LooseVersion
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
@@ -945,10 +944,7 @@ def main():
"job_wait": {"required": False, "type": "bool", "default": False},
"job_wait_timeout": {"required": False, "type": "int", "default": 1200}
}
-
- specs.update(redfish_auth_params)
-
- module = AnsibleModule(
+ module = RedfishAnsibleModule(
argument_spec=specs,
mutually_exclusive=[['state', 'command'], ['volume_type', 'raid_type']],
required_one_of=[['state', 'command']],
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_attributes/molecule/default/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_attributes/molecule/default/converge.yml
index f1ae0f548..d7d393ec1 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_attributes/molecule/default/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_attributes/molecule/default/converge.yml
@@ -13,16 +13,14 @@
validate_certs: false
idrac_attributes:
SSH.1.Timeout: 1800
- ignore_errors: true
ignore_unreachable: true
register: idrac_attributes_error_msg
- name: Asserting after performing operation with wrong hostname
ansible.builtin.assert:
- that:
- - "'Incorrect username or password,
- unreachable iDRAC IP' in '{{ idrac_attributes_out.msg }}' or
- 'Name or service not known' in '{{ idrac_attributes_out.msg }}'"
+ that: >-
+ '"<urlopen error [Errno -2] Name or service not known>" in
+ idrac_attributes_out.msg'
- name: Perform update with wrong username
ansible.builtin.import_role:
@@ -35,7 +33,6 @@
idrac_attributes:
SSH.1.Timeout: 1800
ignore_errors: true
- ignore_unreachable: true
register: idrac_attributes_error_msg
- name: Asserting after performing operation with wrong username
@@ -54,15 +51,12 @@
idrac_attributes:
SSH.1.Timeout: 1800
ignore_errors: true
- ignore_unreachable: true
register: idrac_attributes_error_msg
- name: Asserting after performing operation with wrong password
ansible.builtin.assert:
- that: |-
- ('"HTTP Error 401" in idrac_attributes_out.msg')
- or
- ('"urlopen error timed out" in idrac_attributes_out.msg')
+ that:
+ - '"HTTP Error 401" in idrac_attributes_out.msg'
- name: Perform update with invalid https_port
ansible.builtin.import_role:
@@ -75,16 +69,14 @@
https_port: 9999999
idrac_attributes:
SSH.1.Timeout: 1800
- ignore_errors: true
ignore_unreachable: true
register: idrac_attributes_error_msg
- name: Asserting after performing operation with invalid https_port
ansible.builtin.assert:
- that:
- - ('"Connection refused" in idrac_attributes_out.msg')
- or
- ('"urlopen error timed out" in idrac_attributes_out.msg')
+ that: >-
+ '"<urlopen error [Errno 111] Connection refused>" in
+ idrac_attributes_out.msg'
- name: Perform update with invalid validate_certs
ansible.builtin.import_role:
@@ -118,14 +110,15 @@
validate_certs: true
idrac_attributes:
SSH.1.Timeout: 1800
- ignore_errors: true
ignore_unreachable: true
register: idrac_attributes_error_msg
- name: Asserting after performing operation with wrong ca_path
ansible.builtin.assert:
- that:
- - '"certificate verify failed" in idrac_attributes_out.msg'
+ that: >-
+ '"<urlopen error [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify
+ failed: self signed certificate (_ssl.c:1129)>" in
+ idrac_attributes_out.msg'
- name: Perform update with wrong manager_id
ansible.builtin.import_role:
@@ -140,13 +133,12 @@
SSH.1.Timeout: 1800
manager_id: idrac.random.5
ignore_errors: true
- ignore_unreachable: true
register: idrac_attributes_error_msg
- name: Asserting after performing operation with wrong manager_id
ansible.builtin.assert:
that:
- - '"HTTP Error 404" in idrac_attributes_out.msg'
+ - '"HTTP Error 404: Not Found" in idrac_attributes_out.msg'
- name: Perform update with read only attributes
ansible.builtin.import_role:
@@ -161,7 +153,6 @@
SSH.1.MaxSessions: 5
manager_id: "{{ null | default(omit) }}"
ignore_errors: true
- ignore_unreachable: true
register: idrac_attributes_error_msg
- name: Asserting after performing operation with read only attributes
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_bios/molecule/clear_pending_attributes/prepare.yml b/ansible_collections/dellemc/openmanage/roles/idrac_bios/molecule/clear_pending_attributes/prepare.yml
index a91b20b69..bce98ac96 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_bios/molecule/clear_pending_attributes/prepare.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_bios/molecule/clear_pending_attributes/prepare.yml
@@ -13,6 +13,10 @@
url: "https://{{ idrac_ip }}:{{ idrac_port }}/redfish/v1/Managers\
/iDRAC.Embedded.1/Jobs?$expand=*($levels=1)"
+ - name: Initialize idrac_bios_jobs_items variable
+ ansible.builtin.set_fact:
+ idrac_bios_jobs_items: []
+
- name: Fetch Bios Jobs Data
loop: "{{ idrac_bios_uri_data.json.Members }}"
when: item.JobType == 'BIOSConfiguration' and item.JobState in ['Scheduled', 'Scheduling']
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_source_override_enabled_as_continuous_reset_type_none/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_source_override_enabled_as_continuous_reset_type_none/converge.yml
index a1e11acdd..dbf9d273b 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_source_override_enabled_as_continuous_reset_type_none/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_source_override_enabled_as_continuous_reset_type_none/converge.yml
@@ -109,5 +109,7 @@
- name: Asserting TC-115433 in normal mode
ansible.builtin.assert:
- that: idrac_boot_out.msg == "The boot settings job is triggered successfully."
+ that: >-
+ "The boot settings operation is triggered/submitted successfully." in
+ idrac_boot_out.msg
when: not ansible_check_mode
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_source_override_enabled_as_disabled_reset_type_none/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_source_override_enabled_as_disabled_reset_type_none/converge.yml
index 8c8d2d443..3cd71f208 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_source_override_enabled_as_disabled_reset_type_none/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_source_override_enabled_as_disabled_reset_type_none/converge.yml
@@ -92,7 +92,9 @@
- name: Asserting TC-115431 in normal mode
ansible.builtin.assert:
- that: idrac_boot_out.msg == "The boot settings job is triggered successfully."
+ that: >-
+ "The boot settings operation is triggered/submitted successfully." in
+ idrac_boot_out.msg
when: not ansible_check_mode and idrac_boot_out.changed
- name: Asserting TC-115431 in idempotence mode
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_source_override_enabled_as_once_reset_type_none/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_source_override_enabled_as_once_reset_type_none/converge.yml
index 8fc9af6dc..d9aeee409 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_source_override_enabled_as_once_reset_type_none/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_source_override_enabled_as_once_reset_type_none/converge.yml
@@ -91,6 +91,8 @@
- name: Asserting TC-115432 in normal mode or idempotence mode
ansible.builtin.assert:
- that:
- - 'idrac_boot_out.msg == "The boot settings job is triggered successfully." or "No changes found to be applied."'
+ that: >-
+ 'idrac_boot_out.msg == "The boot settings operation is
+ triggered/submitted successfully." or "No changes found to be
+ applied."'
when: not ansible_check_mode and not idrac_boot_out.changed
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_source_override_mode_legacy_job_wait_false/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_source_override_mode_legacy_job_wait_false/converge.yml
index 85b84477f..142a0bebb 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_source_override_mode_legacy_job_wait_false/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_boot/molecule/boot_source_override_mode_legacy_job_wait_false/converge.yml
@@ -105,6 +105,8 @@
- name: Asserting TC-115429 in normal mode or idempotence mode
ansible.builtin.assert:
- that:
- - 'idrac_boot_out.msg == "The boot settings job is triggered successfully." or "No changes found to be applied."'
+ that: >-
+ 'idrac_boot_out.msg == "The boot settings operation is
+ triggered/submitted successfully." or "No changes found to
+ be applied."'
when: not ansible_check_mode and not idrac_boot_out.changed
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_certificate/molecule/default/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_certificate/molecule/default/converge.yml
index 021cec95f..3b394ff6b 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_certificate/molecule/default/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_certificate/molecule/default/converge.yml
@@ -165,9 +165,9 @@
- name: Verify task status - Negative - invalid unreachable host
ansible.builtin.assert:
- that:
- - idrac_certificate_out.unreachable
- - '"Unable to communicate with" in idrac_certificate_out.msg'
+ that: >-
+ '"<urlopen error [Errno -2] Name or service not known>" in
+ idrac_certificate_out.msg' and idrac_certificate_out.unreachable
- name: Negative - invalid idrac user
ansible.builtin.import_role:
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_certificate/molecule/default/molecule.yml b/ansible_collections/dellemc/openmanage/roles/idrac_certificate/molecule/default/molecule.yml
index e69de29bb..c6b1e43af 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_certificate/molecule/default/molecule.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_certificate/molecule/default/molecule.yml
@@ -0,0 +1,12 @@
+---
+scenario:
+ test_sequence:
+ - dependency
+ - cleanup
+ - destroy
+ - syntax
+ - create
+ - prepare
+ - check
+ - converge
+ - destroy
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/backplane/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/backplane/converge.yml
index 9aab189c2..928369a3b 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/backplane/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/backplane/converge.yml
@@ -40,12 +40,6 @@
register: backplane_result_url
no_log: true
- - name: Validate whether PCIeSSDBackPlane information exists
- ansible.builtin.assert:
- that:
- - "{{ backplane_result_url.json.Members | length > 0 }}"
- fail_msg: "PCIeSSDBackPlane information does not exist"
-
- name: Set PCIeSSDBackPlane facts
ansible.builtin.set_fact:
api_response: "{{ backplane_result_url.json.Members | ansible.utils.remove_keys(target=['@odata.context', '@odata.id', '@odata.type']) }}"
@@ -54,9 +48,18 @@
ansible.builtin.include_role:
name: "idrac_gather_facts"
+ - name: Assert backplane data
+ ansible.builtin.assert:
+ that:
+ - "((api_response | length == 0) and (backplane | length == 0))"
+ fail_msg: "The response from the role does not match"
+ success_msg: "The response from the role matches"
+ when: api_response | length == 0
+
- name: Check whether output differs
ansible.builtin.set_fact:
result_diff: "{{ backplane | symmetric_difference(api_response) }}"
+ when: api_response | length > 0
- name: Assert the differences in List
ansible.builtin.assert:
@@ -64,3 +67,4 @@
success_msg: "The response from the role matches"
that:
- "{{ (result_diff | length) == 0 }}"
+ when: api_response | length > 0
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/passensor/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/passensor/converge.yml
index c4d86a62b..0194b2019 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/passensor/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/passensor/converge.yml
@@ -27,13 +27,52 @@
api_system: "/redfish/v1/Systems/System.Embedded.1"
tasks:
+ - name: Fetching Presence and Status Sensor info
+ ansible.builtin.uri:
+ url: "https://{{ hostname }}{{ api_system }}/Oem/
+ Dell/DellPresenceAndStatusSensors"
+ validate_certs: "{{ validate_certs }}"
+ ca_path: "{{ ca_path | default(omit) }}"
+ method: "{{ idrac_gather_facts_uri_method }}"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ headers: "{{ idrac_gather_facts_uri_headers }}"
+ body_format: "{{ idrac_gather_facts_uri_body_format }}"
+ status_code: "{{ idrac_gather_facts_uri_status_code }}"
+ return_content: "{{ idrac_gather_facts_uri_return_content }}"
+ no_log: true
+ register: passensor_result
+
+ - name: Set presence and status sensor facts
+ ansible.builtin.set_fact:
+ api_response: "{{ passensor_result.json.Members |
+ ansible.utils.remove_keys(target=['@odata.context',
+ '@odata.id', '@odata.type', 'Assembly', 'Links']) }}"
+ when: passensor_result.json.Members is defined
+
- name: Gather Facts for the Presence and Status Sensor
ansible.builtin.include_role:
name: "idrac_gather_facts"
- - name: Call assertion
- ansible.builtin.include_tasks: ../../tests/asserts/passensor_assert.yml
- with_items: "{{ presence_and_status_sensor }}"
- loop_control:
- loop_var: passensor_data
- when: presence_and_status_sensor | length > 0
+ - name: Assert Presence and Status Sensor data
+ ansible.builtin.assert:
+ that:
+ - "((api_response | length == 0) and
+ (presence_and_status_sensor | length == 0))"
+ fail_msg: "The response from the role does not match"
+ success_msg: "The response from the role matches"
+ when: api_response | length == 0
+
+ - name: Check whether output differs
+ ansible.builtin.set_fact:
+ result_diff: "{{ presence_and_status_sensor |
+ symmetric_difference(api_response) }}"
+ when: api_response | length > 0
+
+ - name: Assert the differences in List
+ ansible.builtin.assert:
+ fail_msg: "The response from the role does not match"
+ success_msg: "The response from the role matches"
+ that:
+ - "{{ (result_diff | length) == 0 }}"
+ when: api_response | length > 0
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/physicaldisk/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/physicaldisk/converge.yml
index ef2414535..430237de0 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/physicaldisk/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/physicaldisk/converge.yml
@@ -43,7 +43,10 @@
- name: Get all storage controller ids.
ansible.builtin.set_fact:
- storage_ids_list: "{{ idrac_gather_facts_controllers_result.json.Members | map('dict2items') | flatten | map(attribute='value') }}"
+ storage_ids_list:
+ "{{ idrac_gather_facts_controllers_result.json.Members |
+ map('dict2items') | flatten | map(attribute='value') }}"
+ when: idrac_gather_facts_controllers_result.json.Members is defined
- name: Get physical disk information using API.
ansible.builtin.uri:
@@ -52,31 +55,42 @@
loop: "{{ storage_ids_list }}"
no_log: true
register: idrac_gather_facts_disk_result
+ when: storage_ids_list is defined
- name: Count the number of drives in drives_result.results
ansible.builtin.set_fact:
- drives_count: "{{ idrac_gather_facts_disk_result.results | map(attribute='json.drives') | list | length }}"
-
- - name: Validate whether physical disk information exists
- ansible.builtin.assert:
- that:
- - "{{ drives_count | int > 0 }}"
- fail_msg: "Physical Disk information does not exist"
+ drives_count: "{{ idrac_gather_facts_disk_result.results |
+ map(attribute='json.drives') | list | length }}"
+ when: idrac_gather_facts_disk_result.results is defined
- name: Set Physical Disk facts
ansible.builtin.set_fact:
api_response:
- "{{ disk_result.results | selectattr('json', 'defined') | map(attribute='json') | selectattr('Drives', 'defined') |
- map(attribute='Drives') | flatten | ansible.utils.remove_keys(target=['@odata.context', '@odata.id', '@odata.type',
- 'Actions', 'Assembly', 'Links', 'DellDriveSMARTAttributes', 'DellNVMeSMARTAttributes', 'Operations@odata.count']) }}"
+ "{{ idrac_gather_facts_disk_result.results |
+ selectattr('json', 'defined') | map(attribute='json') |
+ selectattr('Drives', 'defined') | map(attribute='Drives') |
+ flatten | ansible.utils.remove_keys(target=['@odata.context',
+ '@odata.id', '@odata.type', 'Actions', 'Assembly', 'Links',
+ 'DellDriveSMARTAttributes', 'DellNVMeSMARTAttributes',
+ 'Operations@odata.count']) }}"
+ when: idrac_gather_facts_disk_result.results is defined
- name: Gather Facts for the Physical Disk component
ansible.builtin.include_role:
name: "idrac_gather_facts"
+ - name: Assert Physical Disk data
+ ansible.builtin.assert:
+ that:
+ - "((drives_count | length == 0) and (physical_disk | length == 0))"
+ fail_msg: "The response from the role does not match"
+ success_msg: "The response from the role matches"
+ when: drives_count | length == 0
+
- name: Check whether output differs
ansible.builtin.set_fact:
result_diff: "{{ physical_disk | symmetric_difference(api_response) }}"
+ when: drives_count | length > 0
- name: Assert the differences in List
ansible.builtin.assert:
@@ -84,3 +98,4 @@
success_msg: "The response from the role matches"
that:
- "{{ (result_diff | length) == 0 }}"
+ when: drives_count | length > 0
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/powersupply/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/powersupply/converge.yml
index 0f648b875..b0e9a2f0b 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/powersupply/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/powersupply/converge.yml
@@ -24,20 +24,54 @@
idrac_gather_facts_uri_return_content: true
diff_data: {}
exclude_keys: []
+ api_chassis: "/redfish/v1/Chassis/System.Embedded.1"
+ power_supply_api: "/PowerSubsystem/PowerSupplies?$expand=*($levels=1)"
tasks:
+ - name: Fetching Power Supply info
+ ansible.builtin.uri:
+ url: "https://{{ hostname }}{{ api_chassis }}{{ power_supply_api }}"
+ validate_certs: "{{ validate_certs }}"
+ ca_path: "{{ ca_path | default(omit) }}"
+ method: "{{ idrac_gather_facts_uri_method }}"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ headers: "{{ idrac_gather_facts_uri_headers }}"
+ body_format: "{{ idrac_gather_facts_uri_body_format }}"
+ status_code: "{{ idrac_gather_facts_uri_status_code }}"
+ return_content: "{{ idrac_gather_facts_uri_return_content }}"
+ register: powersupply_result
+ no_log: true
+
+ - name: Set powersupply facts
+ ansible.builtin.set_fact:
+ api_response:
+ "{{ powersupply_result.json.Members |
+ ansible.utils.remove_keys(target=['@odata.context', '@odata.id',
+ '@odata.type', 'ActiveInputVoltage@Redfish.Deprecated',
+ 'OperationalStatus@odata.count', 'RedTypeOfSet@odata.count']) }}"
+
- name: Gather Facts for the Power Supply component
ansible.builtin.include_role:
name: "idrac_gather_facts"
- - name: Assert power supply dict for length
+ - name: Assert power supply data
ansible.builtin.assert:
that:
- - "{{ power_supply | length > 0 }}"
+ - "((api_response | length == 0) and (power_supply | length == 0))"
+ fail_msg: "The response from the role does not match"
+ success_msg: "The response from the role matches"
+ when: api_response | length == 0
+
+ - name: Check whether output differs
+ ansible.builtin.set_fact:
+ result_diff: "{{ power_supply | symmetric_difference(api_response) }}"
+ when: api_response | length > 0
- - name: Call assertion
- ansible.builtin.include_tasks: ../../tests/asserts/powersupply_assert.yml
- with_items: "{{ power_supply }}"
- loop_control:
- loop_var: powersupply_data
- when: power_supply | length > 0
+ - name: Assert the differences in List
+ ansible.builtin.assert:
+ fail_msg: "The response from the role does not match"
+ success_msg: "The response from the role matches"
+ that:
+ - "{{ (result_diff | length) == 0 }}"
+ when: api_response | length > 0
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/systemmetrics/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/systemmetrics/converge.yml
index 418aa2b78..ea49daa8d 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/systemmetrics/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/molecule/systemmetrics/converge.yml
@@ -30,49 +30,11 @@
ansible.builtin.include_role:
name: "idrac_gather_facts"
- - name: Assert power metrics dict for length
- ansible.builtin.assert:
- that:
- - "{{ power_metrics | length > 0 }}"
-
- - name: Assert thermal metrics dict for length
- ansible.builtin.assert:
- that:
- - "{{ thermal_metrics | length > 0 }}"
-
- - name: Assert memory metrics dict for length
- ansible.builtin.assert:
- that:
- - "{{ memory_metrics | length > 0 }}"
-
- - name: Get Thermal Metrics information.
- ansible.builtin.uri:
- url: "https://{{ hostname }}{{ api_chassis }}/ThermalSubsystem/ThermalMetrics"
- validate_certs: "{{ validate_certs }}"
- method: "{{ idrac_gather_facts_uri_method }}"
- user: "{{ username }}"
- password: "{{ password }}"
- headers: "{{ idrac_gather_facts_uri_headers }}"
- body_format: "{{ idrac_gather_facts_uri_body_format }}"
- status_code: "{{ idrac_gather_facts_uri_status_code }}"
- return_content: "{{ idrac_gather_facts_uri_return_content }}"
- register: response_thermal_metrics
- no_log: true
-
- - name: Set Thermal Metrics facts
- ansible.builtin.set_fact:
- api_thermal_metrics: "{{ response_thermal_metrics.json |
- ansible.utils.remove_keys(target=['@odata.context', '@odata.type', '@odata.id', 'DataSourceUri', 'TemperatureReadingsCelsius@odata.count']) }}"
-
- name: Call assertion for thermal metrics
ansible.builtin.include_tasks: ../../tests/asserts/tmetrics_assert.yml
- name: Call assertion for memory metrics
ansible.builtin.include_tasks: ../../tests/asserts/mmetrics_assert.yml
- with_items: "{{ memory_metrics }}"
- loop_control:
- loop_var: memory_data
- when: memory_metrics | length > 0
- name: Get Power Supply information.
ansible.builtin.uri:
@@ -92,10 +54,18 @@
ansible.builtin.set_fact:
psu_ids: "{{ power_result.json.Members | map(attribute='Id') | list }}"
+ - name: Assert Power Supply Metrics metrics
+ ansible.builtin.assert:
+ that:
+ - "((psu_ids | length == 0) and (power_metrics | length == 0))"
+ fail_msg: "The response from the role does not match"
+ success_msg: "The response from the role matches"
+ when: psu_ids| length == 0
+
- name: Call assertion for Power metrics
ansible.builtin.include_tasks: ../../tests/asserts/psmetrics_assert.yml
with_items: "{{ power_metrics }}"
loop_control:
loop_var: power_data
index_var: index
- when: power_metrics | length > 0
+ when: psu_ids | length > 0
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/tests/asserts/mmetrics_assert.yml b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/tests/asserts/mmetrics_assert.yml
index 1cc040b73..6964f8d12 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/tests/asserts/mmetrics_assert.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/tests/asserts/mmetrics_assert.yml
@@ -1,6 +1,6 @@
- name: Get Memory information.
- ansible.builtin.uri:
- url: "https://{{ hostname }}{{ api_system }}/Memory/{{ memory_data.Id }}/MemoryMetrics"
+ ansible.builtin.uri: &api_params
+ url: "https://{{ hostname }}{{ api_system }}/Memory?$expand=*($levels=1)"
validate_certs: "{{ validate_certs }}"
method: "{{ idrac_gather_facts_uri_method }}"
user: "{{ username }}"
@@ -9,30 +9,50 @@
body_format: "{{ idrac_gather_facts_uri_body_format }}"
status_code: "{{ idrac_gather_facts_uri_status_code }}"
return_content: "{{ idrac_gather_facts_uri_return_content }}"
+ register: response_memory
+ no_log: true
+
+- name: Get Memory Metrics ids
+ ansible.builtin.set_fact:
+ memory_metrics_ids: "{{ response_memory.json.Members |
+ selectattr('Metrics', 'defined') | map(attribute='Metrics') | flatten }}"
+ when: response_memory.json.Members is defined
+
+- name: Get Memory Metrics information.
+ ansible.builtin.uri:
+ url: "https://{{ hostname }}{{ item['@odata.id'] }}"
+ <<: *api_params
+ loop: "{{ memory_metrics_ids }}"
register: response_memory_metrics
no_log: true
- name: Set Memory Metrics facts
ansible.builtin.set_fact:
- api_memory_metrics: "{{ response_memory_metrics.json | ansible.utils.remove_keys(target=['@odata.context', '@odata.type', '@odata.id', 'DataSourceUri']) }}"
+ api_memory_metrics: "{{ response_memory_metrics.results |
+ selectattr('json', 'defined') | map(attribute='json') |
+ flatten | ansible.utils.remove_keys(target=['@odata.context',
+ '@odata.type', '@odata.id', 'DataSourceUri']) }}"
+ when: response_memory_metrics.results is defined
-- name: Set the keys diff
- ansible.builtin.set_fact:
- diff_keys: "{{ memory_data.keys() | list | symmetric_difference((api_memory_metrics.keys() | list)) }}"
+- name: Assert memory metrics
+ ansible.builtin.assert:
+ that:
+ - "((api_memory_metrics | length == 0) and
+ (memory_metrics | length == 0))"
+ fail_msg: "The response from the role does not match"
+ success_msg: "The response from the role matches"
+ when: api_memory_metrics| length == 0
-- name: Set a diff of dict
+- name: Check whether output differs
ansible.builtin.set_fact:
- diff_data: "{{ diff_data | combine({item: memory_data[item]}) }}"
- loop: "{{ memory_data.keys() }}"
- when:
- - diff_keys | length == 0
- - memory_data[item] != api_memory_metrics[item]
- - item not in exclude_keys
+ result_diff: "{{ memory_metrics |
+ symmetric_difference(api_memory_metrics) }}"
+ when: api_memory_metrics| length > 0
-- name: Assert the difference in Keys
+- name: Assert the differences in List
ansible.builtin.assert:
+ fail_msg: "The response from the role does not match"
+ success_msg: "The response from the role matches"
that:
- - "{{ (diff_keys | length) == 0 }}"
- - "{{ (diff_data | length) == 0 }}"
- fail_msg: "The response from the role does not match | Diff Keys : {{ diff_keys }} Diff Data : {{ diff_data }}"
- success_msg: "The response from the role matches | Diff Keys : {{ diff_keys }} Diff Data : {{ diff_data }}"
+ - "{{ (result_diff | length) == 0 }}"
+ when: api_memory_metrics| length > 0
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/tests/asserts/passensor_assert.yml b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/tests/asserts/passensor_assert.yml
deleted file mode 100644
index 6931c9204..000000000
--- a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/tests/asserts/passensor_assert.yml
+++ /dev/null
@@ -1,39 +0,0 @@
-- name: Fetching Presence and Status Sensor info
- ansible.builtin.uri:
- url: "https://{{ hostname }}{{ api_system }}/Oem/Dell/DellPresenceAndStatusSensors/{{ passensor_data.Id }}"
- validate_certs: "{{ validate_certs }}"
- ca_path: "{{ ca_path | default(omit) }}"
- method: "{{ idrac_gather_facts_uri_method }}"
- user: "{{ username }}"
- password: "{{ password }}"
- headers: "{{ idrac_gather_facts_uri_headers }}"
- body_format: "{{ idrac_gather_facts_uri_body_format }}"
- status_code: "{{ idrac_gather_facts_uri_status_code }}"
- return_content: "{{ idrac_gather_facts_uri_return_content }}"
- no_log: true
- register: passensor_result
-
-- name: Set presence and status sensor facts
- ansible.builtin.set_fact:
- api_response: "{{ passensor_result.json | ansible.utils.remove_keys(target=['@odata.context', '@odata.id', '@odata.type', 'Assembly', 'Links']) }}"
-
-- name: Set the keys diff
- ansible.builtin.set_fact:
- diff_keys: "{{ passensor_data.keys() | list | symmetric_difference((api_response.keys() | list)) }}"
-
-- name: Set a diff of dict
- ansible.builtin.set_fact:
- diff_data: "{{ diff_data | combine({item: passensor_data[item]}) }}"
- loop: "{{ passensor_data.keys() }}"
- when:
- - diff_keys | length == 0
- - passensor_data[item] != api_response[item]
- - item not in exclude_keys
-
-- name: Assert the difference in Keys
- ansible.builtin.assert:
- that:
- - "{{ (diff_keys | length) == 0 }}"
- - "{{ (diff_data | length) == 0 }}"
- fail_msg: "The response from the role does not match | Diff Keys : {{ diff_keys }} Diff Data : {{ diff_data }}"
- success_msg: "The response from the role matches | Diff Keys : {{ diff_keys }} Diff Data : {{ diff_data }}"
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/tests/asserts/powersupply_assert.yml b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/tests/asserts/powersupply_assert.yml
deleted file mode 100644
index ddb77ce04..000000000
--- a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/tests/asserts/powersupply_assert.yml
+++ /dev/null
@@ -1,42 +0,0 @@
-- name: Fetching Power Supply info
- ansible.builtin.uri:
- url: "https://{{ hostname }}{{ api_chassis }}/PowerSubsystem/PowerSupplies/{{ powersupply_data.Id }}"
- validate_certs: "{{ validate_certs }}"
- ca_path: "{{ ca_path | default(omit) }}"
- method: "{{ idrac_gather_facts_uri_method }}"
- user: "{{ username }}"
- password: "{{ password }}"
- headers: "{{ idrac_gather_facts_uri_headers }}"
- body_format: "{{ idrac_gather_facts_uri_body_format }}"
- status_code: "{{ idrac_gather_facts_uri_status_code }}"
- return_content: "{{ idrac_gather_facts_uri_return_content }}"
- register: powersupply_result
- no_log: true
-
-- name: Set powersupply facts
- ansible.builtin.set_fact:
- api_response:
- "{{ powersupply_result.json | ansible.utils.remove_keys(target=['@odata.context',
- '@odata.id', '@odata.type', 'ActiveInputVoltage@Redfish.Deprecated', 'OperationalStatus@odata.count',
- 'RedTypeOfSet@odata.count']) }}"
-
-- name: Set the keys diff
- ansible.builtin.set_fact:
- diff_keys: "{{ powersupply_data.keys() | list | symmetric_difference((api_response.keys() | list)) }}"
-
-- name: Set a diff of dict
- ansible.builtin.set_fact:
- diff_data: "{{ diff_data | combine({item: powersupply_data[item]}) }}"
- loop: "{{ powersupply_data.keys() }}"
- when:
- - diff_keys | length == 0
- - powersupply_data[item] != api_response[item]
- - item not in exclude_keys
-
-- name: Assert the difference in Keys
- ansible.builtin.assert:
- that:
- - "{{ (diff_keys | length) == 0 }}"
- - "{{ (diff_data | length) == 0 }}"
- fail_msg: "The response from the role does not match | Diff Keys : {{ diff_keys }} Diff Data : {{ diff_data }}"
- success_msg: "The response from the role matches | Diff Keys : {{ diff_keys }} Diff Data : {{ diff_data }}"
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/tests/asserts/tmetrics_assert.yml b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/tests/asserts/tmetrics_assert.yml
index 25a8229cf..93f1e3960 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/tests/asserts/tmetrics_assert.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_gather_facts/tests/asserts/tmetrics_assert.yml
@@ -1,20 +1,46 @@
-- name: Set the keys diff
+---
+- name: Get Thermal Metrics information.
+ ansible.builtin.uri:
+ url:
+ "https://{{ hostname }}{{ api_chassis }}/ThermalSubsystem/ThermalMetrics"
+ validate_certs: "{{ validate_certs }}"
+ method: "{{ idrac_gather_facts_uri_method }}"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ headers: "{{ idrac_gather_facts_uri_headers }}"
+ body_format: "{{ idrac_gather_facts_uri_body_format }}"
+ status_code: "{{ idrac_gather_facts_uri_status_code }}"
+ return_content: "{{ idrac_gather_facts_uri_return_content }}"
+ register: response_thermal_metrics
+ no_log: true
+
+- name: Set Thermal Metrics facts
ansible.builtin.set_fact:
- diff_keys: "{{ thermal_metrics.keys() | list | symmetric_difference((api_thermal_metrics.keys() | list)) }}"
+ api_thermal_metrics: "{{ response_thermal_metrics.json |
+ ansible.utils.remove_keys(target=['@odata.context', '@odata.type',
+ '@odata.id', 'DataSourceUri',
+ 'TemperatureReadingsCelsius@odata.count']) }}"
+ when: response_thermal_metrics is defined
+
+- name: Assert thermal metrics
+ ansible.builtin.assert:
+ that:
+ - "((api_thermal_metrics | length == 0) and
+ (thermal_metrics | length == 0))"
+ fail_msg: "The response from the role does not match"
+ success_msg: "The response from the role matches"
+ when: api_thermal_metrics| length == 0
-- name: Set a diff of dict
+- name: Check whether output differs
ansible.builtin.set_fact:
- diff_data: "{{ diff_data | combine({item: thermal_metrics[item]}) }}"
- loop: "{{ thermal_metrics.keys() }}"
- when:
- - diff_keys | length == 0
- - thermal_metrics[item] != api_thermal_metrics[item]
- - item not in exclude_keys
+ result_diff: "{{ thermal_metrics |
+ symmetric_difference(api_thermal_metrics) }}"
+ when: api_thermal_metrics | length > 0
-- name: Assert the difference in Keys
+- name: Assert the differences in List
ansible.builtin.assert:
+ fail_msg: "The response from the role does not match"
+ success_msg: "The response from the role matches"
that:
- - "{{ (diff_keys | length) == 0 }}"
- - "{{ (diff_data | length) == 0 }}"
- fail_msg: "The response from the role does not match | Diff Keys : {{ diff_keys }} Diff Data : {{ diff_data }}"
- success_msg: "The response from the role matches | Diff Keys : {{ diff_keys }} Diff Data : {{ diff_data }}"
+ - "{{ (result_diff | length) == 0 }}"
+ when: api_thermal_metrics | length > 0
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_job_queue/molecule/default/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_job_queue/molecule/default/converge.yml
index 2c16c6319..24909882f 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_job_queue/molecule/default/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_job_queue/molecule/default/converge.yml
@@ -8,7 +8,7 @@
idrac_ip: "{{ lookup('env', 'IDRAC_IP') }}"
idrac_user: "{{ lookup('env', 'IDRAC_USER') }}"
idrac_password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
- validate_certs: "{{ lookup('env', 'VALIDATE_CERT') }}"
+ validate_certs: false
scp_components:
- IDRAC
share_name: "/root/"
@@ -26,7 +26,7 @@
hostname: "{{ lookup('env', 'IDRAC_IP') }}"
username: "{{ lookup('env', 'IDRAC_USER') }}"
password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
- validate_certs: "{{ lookup('env', 'VALIDATE_CERT') }}"
+ validate_certs: false
job_id: JID_12345678
rescue:
- name: "Verifying invalid job deletion from the job queue"
@@ -44,7 +44,7 @@
username: "{{ lookup('env', 'IDRAC_USER') }}"
password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
clear_job_queue: true
- validate_certs: "{{ lookup('env', 'VALIDATE_CERT') }}"
+ validate_certs: false
rescue:
- name: "Verifying job queue clear with an invalid hostname"
@@ -63,7 +63,7 @@
username: "invalidUsername"
password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
clear_job_queue: true
- validate_certs: "{{ lookup('env', 'VALIDATE_CERT') }}"
+ validate_certs: false
rescue:
- name: "Verifying job queue clear with an invalid username"
@@ -81,7 +81,7 @@
username: "{{ lookup('env', 'IDRAC_USER') }}"
password: "invalidPassword"
clear_job_queue: true
- validate_certs: "{{ lookup('env', 'VALIDATE_CERT') }}"
+ validate_certs: false
rescue:
- name: "Verifying job queue clear with an invalid password"
diff --git a/ansible_collections/dellemc/openmanage/roles/idrac_job_queue/molecule/delete_job/converge.yml b/ansible_collections/dellemc/openmanage/roles/idrac_job_queue/molecule/delete_job/converge.yml
index 809dda61f..4c99d6b45 100644
--- a/ansible_collections/dellemc/openmanage/roles/idrac_job_queue/molecule/delete_job/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/idrac_job_queue/molecule/delete_job/converge.yml
@@ -9,7 +9,7 @@
hostname: "{{ lookup('env', 'IDRAC_IP') }}"
username: "{{ lookup('env', 'IDRAC_USER') }}"
password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
- validate_certs: "{{ lookup('env', 'VALIDATE_CERT') }}"
+ validate_certs: false
no_log: true
- name: Creating job which configures iDRAC attributes
@@ -39,7 +39,7 @@
idrac_ip: "{{ lookup('env', 'IDRAC_IP') }}"
idrac_user: "{{ lookup('env', 'IDRAC_USER') }}"
idrac_password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
- validate_certs: "{{ lookup('env', 'VALIDATE_CERT') }}"
+ validate_certs: false
scp_components:
- IDRAC
share_name: "/root/"
@@ -53,7 +53,7 @@
- name: Get Job ID.
ansible.builtin.uri:
url: "https://{{ lookup('env', 'IDRAC_IP') }}/redfish/v1/Managers/iDRAC.Embedded.1/Jobs"
- validate_certs: "{{ lookup('env', 'VALIDATE_CERT') }}"
+ validate_certs: false
method: "GET"
user: "{{ lookup('env', 'IDRAC_USER') }}"
password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
diff --git a/ansible_collections/dellemc/openmanage/roles/redfish_storage_volume/molecule/default/converge.yml b/ansible_collections/dellemc/openmanage/roles/redfish_storage_volume/molecule/default/converge.yml
index 221fe3bbd..11e682d7b 100644
--- a/ansible_collections/dellemc/openmanage/roles/redfish_storage_volume/molecule/default/converge.yml
+++ b/ansible_collections/dellemc/openmanage/roles/redfish_storage_volume/molecule/default/converge.yml
@@ -14,9 +14,9 @@
validate_certs: false
state: present
volume_type: "NonRedundant"
- name: "VD" # noqa: var-naming[no-reserved]
- controller_id: "{{ lookup('env', 'CONTROLLER_ID') }}"
- drives: "{{ lookup('env', 'PHYSICAL_DISK') }}"
+ name: "VD" # noqa: var-naming[no-reserved]
+ controller_id: ""
+ drives: ""
capacity_bytes: 214748364800
optimum_io_size_bytes: 65536
encrypted: false
@@ -27,8 +27,9 @@
- name: Asserting after performing operation.
ansible.builtin.assert:
- that: |-
- redfish_storage_volume_out.msg == "<urlopen error [Errno -2] Name or service not known>"
+ that: >-
+ '"<urlopen error [Errno 111] Connection refused>" in
+ redfish_storage_volume_out.msg'
- name: To check the behaviour of invalid credentials.
ansible.builtin.import_role:
@@ -40,9 +41,9 @@
validate_certs: false
state: present
volume_type: "NonRedundant"
- name: "VD" # noqa: var-naming[no-reserved]
- controller_id: "{{ lookup('env', 'CONTROLLER_ID') }}"
- drives: "{{ lookup('env', 'PHYSICAL_DISK') }}"
+ name: "VD" # noqa: var-naming[no-reserved]
+ controller_id: ""
+ drives: ""
capacity_bytes: 214748364800
optimum_io_size_bytes: 65536
encrypted: false
@@ -55,7 +56,7 @@
that: |-
redfish_storage_volume_out.msg == "HTTP Error 401: Unauthorized"
- - name: To check the behaviour of invalid span count.
+ - name: To check the behaviour of invalid state.
ansible.builtin.import_role:
name: redfish_storage_volume
vars:
@@ -63,11 +64,11 @@
username: "{{ lookup('env', 'IDRAC_USER') }}"
password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
- state: present
+ state: invalid
volume_type: "SpannedStripesWithParity"
- name: "VD" # noqa: var-naming[no-reserved]
- controller_id: "{{ lookup('env', 'CONTROLLER_ID') }}"
- drives: "{{ lookup('env', 'PHYSICAL_DISK') }}"
+ name: "VD" # noqa: var-naming[no-reserved]
+ controller_id: ""
+ drives: ""
capacity_bytes: 214748364800
optimum_io_size_bytes: 65536
encrypted: false
@@ -77,8 +78,9 @@
- name: Asserting after performing operation for invalid span count.
ansible.builtin.assert:
- that: |-
- redfish_storage_volume_out.msg == "HTTP Error 400: Bad Request"
+ that: >-
+ redfish_storage_volume_out.msg == "value of state must be one of:
+ present, absent, got: invalid"
when: not ansible_check_mode
- name: To check the behaviour of invalid certificate path.
@@ -92,9 +94,9 @@
ca_path: "/tmp/invalid_ca_path.pem"
state: present
volume_type: "SpannedStripesWithParity"
- name: "VD" # noqa: var-naming[no-reserved]
- controller_id: "{{ lookup('env', 'CONTROLLER_ID') }}"
- drives: "{{ lookup('env', 'PHYSICAL_DISK') }}"
+ name: "VD" # noqa: var-naming[no-reserved]
+ controller_id: ""
+ drives: ""
capacity_bytes: 214748364800
optimum_io_size_bytes: 65536
encrypted: false
@@ -105,10 +107,38 @@
- name: Asserting after performing operation for invalid certificate path.
ansible.builtin.assert:
- that: |-
- "certificate verify failed" in redfish_storage_volume_out.msg
+ that: >-
+ "[Errno 2] No such file or directory" in
+ redfish_storage_volume_out.msg
+
+ - name: To check the behaviour of invalid raid type.
+ ansible.builtin.import_role:
+ name: redfish_storage_volume
+ vars:
+ hostname: "{{ lookup('env', 'IDRAC_IP') }}"
+ username: "{{ lookup('env', 'IDRAC_USER') }}"
+ password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
+ validate_certs: false
+ state: present
+ raid_type: "Invalid"
+ name: "VD" # noqa: var-naming[no-reserved]
+ controller_id: ""
+ drives: ""
+ capacity_bytes: 214748364800
+ optimum_io_size_bytes: 65536
+ encrypted: false
+ job_wait: false
+ ignore_errors: true
+ register: redfish_storage_volume_result
- - name: To check the behaviour of invalid volume type.
+ - name: Asserting after performing operation for invalid raid type.
+ ansible.builtin.assert:
+ that: >-
+ "value of raid_type must be one of: RAID0, RAID1, RAID5, RAID6,
+ RAID10, RAID50, RAID60, got: Invalid" in
+ redfish_storage_volume_out.msg
+
+ - name: To check the behaviour of invalid controller_id.
ansible.builtin.import_role:
name: redfish_storage_volume
vars:
@@ -117,19 +147,20 @@
password: "{{ lookup('env', 'IDRAC_PASSWORD') }}"
validate_certs: false
state: present
- volume_type: "InvalidMirrored"
- name: "VD" # noqa: var-naming[no-reserved]
- controller_id: "{{ lookup('env', 'CONTROLLER_ID') }}"
- drives: "{{ lookup('env', 'PHYSICAL_DISK') }}"
+ raid_type: "RAID0"
+ name: "VD" # noqa: var-naming[no-reserved]
+ controller_id: "invalid"
capacity_bytes: 214748364800
optimum_io_size_bytes: 65536
encrypted: false
job_wait: false
ignore_errors: true
+ ignore_unreachable: true
register: redfish_storage_volume_result
- - name: Asserting after performing operation for invalid volume type.
+ - name: Asserting after performing operation for invalid controller_id.
ansible.builtin.assert:
- that: >
- redfish_storage_volume_out.msg == "value of volume_type must be one of: NonRedundant, Mirrored,
- StripedWithParity, SpannedMirrors, SpannedStripesWithParity, got: InvalidMirrored"
+ that: >-
+ "Specified Controller invalid does not exist in the System." in
+ redfish_storage_volume_out.msg
+ when: redfish_storage_volume_out.failed
diff --git a/ansible_collections/dellemc/openmanage/roles/redfish_storage_volume/molecule/default/molecule.yml b/ansible_collections/dellemc/openmanage/roles/redfish_storage_volume/molecule/default/molecule.yml
index e69de29bb..6fe7d44bb 100644
--- a/ansible_collections/dellemc/openmanage/roles/redfish_storage_volume/molecule/default/molecule.yml
+++ b/ansible_collections/dellemc/openmanage/roles/redfish_storage_volume/molecule/default/molecule.yml
@@ -0,0 +1,10 @@
+---
+scenario:
+ test_sequence:
+ - dependency
+ - destroy
+ - syntax
+ - create
+ - converge
+ - cleanup
+ - destroy
diff --git a/ansible_collections/dellemc/openmanage/tests/README.md b/ansible_collections/dellemc/openmanage/tests/README.md
index a7d90ff01..e9bb0a645 100644
--- a/ansible_collections/dellemc/openmanage/tests/README.md
+++ b/ansible_collections/dellemc/openmanage/tests/README.md
@@ -1,6 +1,6 @@
### Overview
Dell OpenManage Ansible Modules unit test scripts are located under
- [unit](./tests/unit) directory.
+ [unit](./unit) directory.
### Implementing the unit tests
Any contribution must have an associated unit test. This section covers the
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_boot.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_boot.py
index d5f43360f..bb746d12e 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_boot.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_boot.py
@@ -347,7 +347,7 @@ class TestConfigBios(FakeAnsibleModule):
result = self._run_module(idrac_default_args)
assert 'msg' in result
- def test_manin_success(self, boot_connection_mock, redfish_response_mock, idrac_default_args, mocker):
+ def test_main_success(self, boot_connection_mock, redfish_response_mock, idrac_default_args, mocker):
idrac_default_args.update({"boot_source_override_mode": "legacy"})
redfish_response_mock.success = True
mocker.patch(MODULE_PATH + 'idrac_boot.get_system_res_id', return_value=("System.Embedded.1", ""))
@@ -403,4 +403,4 @@ class TestConfigBios(FakeAnsibleModule):
job_resp.update({"JobState": "Running"})
# with pytest.raises(Exception) as err:
module_return = self._run_module(idrac_default_args)
- assert module_return["msg"] == "The boot settings job is triggered successfully."
+ assert module_return["msg"] == "The boot settings operation is triggered/submitted successfully."
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_certificates.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_certificates.py
index 5e94faf91..4d45cae73 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_certificates.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_certificates.py
@@ -288,6 +288,17 @@ class TestIdracCertificates(FakeAnsibleModule):
assert "Permission denied" in ex.value.args[0]
os.remove(temp_ssl.name)
+ def test_build_generate_csr_payload(self, idrac_default_args):
+ cert_params_data = {
+ "cert_params": {
+ "subject_alt_name": ['192.198.2.1,192.198.2.2', 'X.X.X.X']
+ }
+ }
+ idrac_default_args.update(cert_params_data)
+ f_module = self.get_module_mock(params=idrac_default_args)
+ payload = self.module._build_generate_csr_payload(f_module, None)
+ assert payload["AlternativeNames"] == ['192.198.2.1,192.198.2.2,X.X.X.X']
+
@pytest.mark.parametrize("params", [{"json_data": {
"Actions": {
EXPORT_SSL_CERTIFICATE: {
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_diagnostics.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_diagnostics.py
index 987ff83d2..7a2cabf1f 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_diagnostics.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_diagnostics.py
@@ -2,7 +2,7 @@
#
# Dell OpenManage Ansible Modules
-# Version 9.0.0
+# Version 9.3.0
# Copyright (C) 2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
@@ -1045,10 +1045,8 @@ class TestDiagnosticsType(FakeAnsibleModule):
idrac_mock = mocker.MagicMock()
diagnostics_mock = mocker.MagicMock()
diagnostics_mock.execute.return_value = (None, None, None)
-
mocker.patch(MODULE_PATH + 'get_argument_spec', return_value={})
- mocker.patch(MODULE_PATH + 'idrac_auth_params', {})
- mocker.patch(MODULE_PATH + 'AnsibleModule', return_value=module_mock)
+ mocker.patch(MODULE_PATH + 'IdracAnsibleModule', return_value=module_mock)
mocker.patch(MODULE_PATH + 'iDRACRedfishAPI', return_value=idrac_mock)
mocker.patch(MODULE_PATH + 'DiagnosticsType.diagnostics_operation', return_value=diagnostics_mock)
main()
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_license.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_license.py
index a07cc1eb1..021de2fbb 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_license.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_license.py
@@ -2,7 +2,7 @@
#
# Dell OpenManage Ansible Modules
-# Version 8.7.0
+# Version 9.3.0
# Copyright (C) 2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
@@ -281,21 +281,21 @@ class TestExportLicense(FakeAnsibleModule):
'license_id': 'test_license_id',
'share_parameters': {
'share_name': str(tmp_path),
- 'file_name': 'test_lic'
+ 'file_name': 'test_lic.xml'
}
}
idr_obj = MagicMock()
- idr_obj.json_data = {"license_id": "1234", "LicenseFile": "test_license_content"}
+ idr_obj.json_data = {"license_id": "1234", "LicenseFile": "dGVzdF9saWNlbnNlX2NvbnRlbnQK"}
mocker.patch(MODULE_PATH + API_INVOKE_MOCKER,
return_value=idr_obj)
idrac_default_args.update(export_params)
f_module = self.get_module_mock(params=idrac_default_args, check_mode=False)
export_license_obj = self.module.ExportLicense(idrac_connection_license_mock, f_module)
result = export_license_obj._ExportLicense__export_license_local(EXPORT_URL_MOCK)
- assert result.json_data == {'LicenseFile': 'test_license_content', 'license_id': '1234'}
- assert os.path.exists(f"{tmp_path}/test_lic_iDRAC_license.txt")
- if os.path.exists(f"{tmp_path}/test_lic_iDRAC_license.txt"):
- os.remove(f"{tmp_path}/test_lic_iDRAC_license.txt")
+ assert result.json_data == {'LicenseFile': 'dGVzdF9saWNlbnNlX2NvbnRlbnQK', 'license_id': '1234'}
+ assert os.path.exists(f"{tmp_path}/test_lic.xml")
+ if os.path.exists(f"{tmp_path}/test_lic.xml"):
+ os.remove(f"{tmp_path}/test_lic.xml")
export_params = {
'license_id': 'test_license_id',
@@ -305,10 +305,10 @@ class TestExportLicense(FakeAnsibleModule):
}
idrac_default_args.update(export_params)
result = export_license_obj._ExportLicense__export_license_local(EXPORT_URL_MOCK)
- assert result.json_data == {'LicenseFile': 'test_license_content', 'license_id': '1234'}
- assert os.path.exists(f"{tmp_path}/test_license_id_iDRAC_license.txt")
- if os.path.exists(f"{tmp_path}/test_license_id_iDRAC_license.txt"):
- os.remove(f"{tmp_path}/test_license_id_iDRAC_license.txt")
+ assert result.json_data == {'LicenseFile': 'dGVzdF9saWNlbnNlX2NvbnRlbnQK', 'license_id': '1234'}
+ assert os.path.exists(f"{tmp_path}/test_license_id_iDRAC_license.xml")
+ if os.path.exists(f"{tmp_path}/test_license_id_iDRAC_license.xml"):
+ os.remove(f"{tmp_path}/test_license_id_iDRAC_license.xml")
def test_export_license_http(self, idrac_default_args, idrac_connection_license_mock, mocker):
export_params = {
@@ -736,8 +736,7 @@ class TestLicenseType(FakeAnsibleModule):
# Mock the necessary functions and objects
mocker.patch(MODULE_PATH + 'get_argument_spec', return_value={})
- mocker.patch(MODULE_PATH + 'idrac_auth_params', {})
- mocker.patch(MODULE_PATH + 'AnsibleModule', return_value=module_mock)
+ mocker.patch(MODULE_PATH + 'IdracAnsibleModule', return_value=module_mock)
mocker.patch(MODULE_PATH + 'iDRACRedfishAPI', return_value=idrac_mock)
mocker.patch(MODULE_PATH + 'get_idrac_firmware_version', return_value='3.1')
mocker.patch(MODULE_PATH + 'LicenseType.license_operation', return_value=license_mock)
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_reset.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_reset.py
index d8c23160e..8ff72383c 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_reset.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_reset.py
@@ -57,6 +57,7 @@ STATUS_SUCCESS = [200, 202, 204]
ERR_STATUS_CODE = [400, 404]
RESET_KEY = "Oem.#DellManager.ResetToDefaults"
RESTART_KEY = "#Manager.Reset"
+SLEEP_KEY = "time.sleep"
GET_BASE_URI_KEY = "Validation.get_base_uri"
INVOKE_REQ_KEY = "iDRACRedfishAPI.invoke_request"
GET_CUSTOM_DEFAULT_KEY = "CustomDefaultsDownloadURI"
@@ -374,6 +375,7 @@ class TestFactoryReset(FakeAnsibleModule):
return self.action_api_resp
mocker.patch(MODULE_PATH + CHECK_IDRAC_VERSION, return_value=True)
mocker.patch(MODULE_PATH + GET_BASE_URI_KEY, return_value=IDRAC_URI)
+ mocker.patch(MODULE_PATH + SLEEP_KEY, side_effect=lambda *args, **kwargs: None)
mocker.patch(MODULE_PATH + INVOKE_REQ_KEY, return_value=obj)
mocker.patch(MODULE_PATH + "get_dynamic_uri",
side_effect=mock_get_dynamic_uri_request)
@@ -397,6 +399,7 @@ class TestFactoryReset(FakeAnsibleModule):
return self.action_api_resp
mocker.patch(MODULE_PATH + "get_idrac_firmware_version", return_value="7.10.05")
mocker.patch(MODULE_PATH + CHECK_IDRAC_VERSION, return_value=True)
+ mocker.patch(MODULE_PATH + SLEEP_KEY, side_effect=lambda *args, **kwargs: None)
mocker.patch(MODULE_PATH + GET_BASE_URI_KEY, return_value=IDRAC_URI)
mocker.patch(MODULE_PATH + INVOKE_REQ_KEY, side_effect=[obj, obj2, obj, URLError('URL error occurred'), obj, URLError('URL error occurred'), obj3, obj])
mocker.patch(MODULE_PATH + "get_dynamic_uri",
@@ -422,6 +425,7 @@ class TestFactoryReset(FakeAnsibleModule):
mocker.patch(MODULE_PATH + GET_BASE_URI_KEY, return_value=IDRAC_URI)
mocker.patch(MODULE_PATH + VALIDATE_RESET_OPTION_KEY, side_effect=[(allowed_values, True), (allowed_values, True)])
mocker.patch(MODULE_PATH + INVOKE_REQ_KEY, side_effect=[obj, obj2, obj, obj2])
+ mocker.patch(MODULE_PATH + SLEEP_KEY, side_effect=lambda *args, **kwargs: None)
mocker.patch(MODULE_PATH + 'idrac_redfish_job_tracking', return_value=idrac_redfish_resp)
mocker.patch(MODULE_PATH + "get_dynamic_uri",
side_effect=[self.lc_status_api_links, self.action_api_resp_restart,
@@ -499,6 +503,7 @@ class TestFactoryReset(FakeAnsibleModule):
mocker.patch(MODULE_PATH + "get_idrac_firmware_version", return_value="7.10.05")
mocker.patch(MODULE_PATH + CHECK_IDRAC_VERSION, return_value=True)
mocker.patch(MODULE_PATH + CHECK_LC_STATUS, return_value=None)
+ mocker.patch(MODULE_PATH + SLEEP_KEY, side_effect=lambda *args, **kwargs: None)
mocker.patch(MODULE_PATH + "Validation.validate_path", return_value=None)
mocker.patch(MODULE_PATH + "Validation.validate_file_format", return_value=None)
mocker.patch(MODULE_PATH + "Validation.validate_custom_option", return_value=None)
@@ -534,6 +539,7 @@ class TestFactoryReset(FakeAnsibleModule):
mocker.patch(MODULE_PATH + GET_BASE_URI_KEY, return_value=IDRAC_URI)
mocker.patch(MODULE_PATH + VALIDATE_RESET_OPTION_KEY, return_value=(allowed_values, True))
mocker.patch(MODULE_PATH + INVOKE_REQ_KEY, side_effect=[obj])
+ mocker.patch(MODULE_PATH + SLEEP_KEY, side_effect=lambda *args, **kwargs: None)
mocker.patch(MODULE_PATH + "get_dynamic_uri",
side_effect=mock_get_dynamic_uri_request)
idrac_default_args.update({"reset_to_default": "ResetAllWithRootDefaults"})
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_session.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_session.py
index a28aab255..1cd10fc02 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_session.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_session.py
@@ -2,7 +2,7 @@
#
# Dell OpenManage Ansible Modules
-# Version 9.2.0
+# Version 9.3.0
# Copyright (C) 2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
@@ -579,7 +579,7 @@ class TestMain(FakeAnsibleModule):
StringIO(json_str)))
else:
ome_default_args.update({"state": "absent", "session_id": "1234",
- "auth_token": "token123"})
+ "x_auth_token": "token123"})
mocker.patch(MODULE_PATH + "DeleteSession.get_session_url",
side_effect=exc_type('test'))
result = self._run_module(ome_default_args)
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_storage_volume.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_storage_volume.py
index 3cdf742d2..ad70fe701 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_storage_volume.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_idrac_storage_volume.py
@@ -2,7 +2,7 @@
#
# Dell OpenManage Ansible Modules
-# Version 9.0.0
+# Version 9.3.0
# Copyright (C) 2024 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
@@ -39,6 +39,7 @@ ID_AND_LOCATION_BOTH_NOT_DEFINED = "Either id or location should be specified."
DRIVES_NOT_DEFINED = "Drives must be defined for volume creation."
NOT_ENOUGH_DRIVES = "Number of sufficient disks not found in Controller '{controller_id}'!"
WAIT_TIMEOUT_MSG = "The job is not complete after {0} seconds."
+TIME_TO_WAIT_MSG = "Time to wait value is invalid. Minimum value is 300 and Maximum is 3600 seconds."
JOB_TRIGERRED = "Successfully triggered the {0} storage volume operation."
VOLUME_NAME_REQUIRED_FOR_DELETE = "Virtual disk name is a required parameter for remove virtual disk operations."
VOLUME_NOT_FOUND = "Unable to find the virtual disk."
@@ -756,6 +757,27 @@ class TestStorageValidation(TestStorageBase):
idr_obj = self.module.StorageValidation(idrac_connection_storage_volume_mock, f_module)
idr_obj.validate_job_wait_negative_values()
+ def test_validate_time_to_wait(self, idrac_default_args, idrac_connection_storage_volume_mock, mocker):
+ # Scenario - when time_to_wait < 300
+ mocker.patch(MODULE_PATH + ALL_STORAGE_DATA_METHOD,
+ return_value=TestStorageData.storage_data)
+ idrac_default_args.update({"time_to_wait": 299})
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.StorageValidation(idrac_connection_storage_volume_mock, f_module)
+ with pytest.raises(Exception) as exc:
+ idr_obj.validate_time_to_wait()
+ assert exc.value.args[0] == TIME_TO_WAIT_MSG
+
+ # Scenario - when time_to_wait > 3600
+ idrac_default_args.update({"time_to_wait": 3601})
+ f_module = self.get_module_mock(
+ params=idrac_default_args, check_mode=False)
+ idr_obj = self.module.StorageValidation(idrac_connection_storage_volume_mock, f_module)
+ with pytest.raises(Exception) as exc:
+ idr_obj.validate_time_to_wait()
+ assert exc.value.args[0] == TIME_TO_WAIT_MSG
+
@pytest.mark.parametrize("params", [
{"span_depth": -1, "span_length": 2, "capacity": 200, "strip_size": 131072},
{"span_depth": 1, "span_length": -1, "capacity": 200, "strip_size": 131072},
@@ -1058,6 +1080,7 @@ class TestStorageCreate(TestStorageBase):
def test_validate_create(self, idrac_default_args, idrac_connection_storage_volume_mock, mocker):
mocker.patch(MODULE_PATH + ALL_STORAGE_DATA_METHOD, return_value=TestStorageData.storage_data)
+ mocker.patch(MODULE_PATH + 'StorageValidation.validate_time_to_wait', return_value=None)
mocker.patch(MODULE_PATH + 'StorageValidation.validate_controller_exists', return_value=None)
mocker.patch(MODULE_PATH + 'StorageValidation.validate_job_wait_negative_values', return_value=None)
mocker.patch(MODULE_PATH + 'StorageValidation.validate_negative_values_for_volume_params', return_value=None)
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_certificate.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_certificate.py
index 99c49c210..a996a73ea 100644
--- a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_certificate.py
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_application_certificate.py
@@ -107,7 +107,8 @@ class TestOmeAppCSR(FakeAnsibleModule):
def test_generate_csr(self, mocker, ome_default_args, ome_connection_mock_for_application_certificate,
ome_response_mock):
- csr_json = {"CertificateData": "--BEGIN-REQUEST--"}
+ csr_data = "-----BEGIN CERTIFICATE REQUEST-----MIIFMDCCAxgCAQAwgbAxCzAJBgNVBAYTAlVTMREwDwYDVQQIDAhWaXJnaW5pYTES-----END CERTIFICATE REQUEST-----"
+ csr_json = {"CertificateData": csr_data}
payload = {"DistinguishedName": "hostname.com", "DepartmentName": "Remote Access Group",
"BusinessName": "Dell Inc.", "Locality": "Round Rock", "State": "Texas",
"Country": "US", "Email": EMAIL_ADDRESS, "subject_alternative_names": "XX.XX.XX.XX"}
@@ -121,4 +122,5 @@ class TestOmeAppCSR(FakeAnsibleModule):
ome_response_mock.json_data = csr_json
result = self.execute_module(ome_default_args)
assert result['msg'] == "Successfully generated certificate signing request."
- assert result['csr_status'] == {'CertificateData': '--BEGIN-REQUEST--'}
+ data = '''-----BEGIN CERTIFICATE REQUEST-----\nMIIFMDCCAxgCAQAwgbAxCzAJBgNVBAYTAlVTMREwDwYDVQQIDAhWaXJnaW5pYTES\n-----END CERTIFICATE REQUEST-----'''
+ assert result['csr_status']['CertificateData'] == data
diff --git a/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_session.py b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_session.py
new file mode 100644
index 000000000..6a547dc10
--- /dev/null
+++ b/ansible_collections/dellemc/openmanage/tests/unit/plugins/modules/test_ome_session.py
@@ -0,0 +1,448 @@
+# -*- coding: utf-8 -*-
+
+#
+# Dell OpenManage Ansible Modules
+# Version 9.3.0
+# Copyright (C) 2024 Dell Inc. or its subsidiaries. All Rights Reserved.
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+#
+
+from __future__ import absolute_import, division, print_function
+
+from io import StringIO
+import json
+
+from urllib.error import HTTPError, URLError
+import pytest
+from mock import MagicMock
+from ansible_collections.dellemc.openmanage.plugins.modules import ome_session
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule
+from ansible.module_utils.urls import SSLValidationError
+from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import AnsibleFailJSonException
+from ansible.module_utils._text import to_text
+
+MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.ome_session.'
+MODULE_UTILS_PATH = 'ansible_collections.dellemc.openmanage.plugins.module_utils.utils.'
+SESSION_UTILS_PATH = 'ansible_collections.dellemc.openmanage.plugins.module_utils.session_utils.'
+
+REDFISH = "/redfish/v1"
+SESSIONS = "Sessions"
+ODATA = "@odata.id"
+ODATA_REGEX = "(.*?)@odata"
+
+SESSION_URL = "/api/SessionService/Sessions"
+GET_SESSION_URL = "Session.get_session_url"
+
+CREATE_SUCCESS_MSG = "The session has been created successfully."
+DELETE_SUCCESS_MSG = "The session has been deleted successfully."
+FAILURE_MSG = "Unable to '{operation}' a session."
+CHANGES_FOUND_MSG = "Changes found to be applied."
+NO_CHANGES_FOUND_MSG = "No changes found to be applied."
+HTTPS_PATH = "https://testhost.com"
+HTTP_ERROR = "http error message"
+APPLICATION_JSON = "application/json"
+
+
+class TestOMESession(FakeAnsibleModule):
+ """
+ Main class for testing the ome_session module.
+ """
+ module = ome_session
+
+ @pytest.fixture
+ def ome_session_mock(self):
+ """
+ Creates a mock object for the `ome_session` fixture.
+
+ This function uses the `MagicMock` class from the `unittest.mock` module to create a mock
+ object. The mock object is then returned by the function.
+
+ Returns:
+ MagicMock: A mock object representing the `ome_session`.
+ """
+ ome_obj = MagicMock()
+ return ome_obj
+
+ @pytest.fixture
+ def ome_connection_session_mock(self, mocker, ome_session_mock):
+ """
+ Returns a mock object for the `SessionAPI` class from the `MODULE_PATH` module.
+ The mock object is initialized with the `ome_session_mock` as the return value.
+ The `__enter__` method of the mock object is also mocked to return `ome_session_mock`.
+
+ :param mocker: The pytest fixture for mocking objects.
+ :type mocker: pytest_mock.plugin.MockerFixture
+ :param ome_session_mock: The mock object for the `ome_session_mock`.
+ :type ome_session_mock: Any
+ :return: The mock object for the `SessionAPI` class.
+ :rtype: MagicMock
+ """
+ ome_conn_mock = mocker.patch(SESSION_UTILS_PATH + 'SessionAPI', return_value=ome_session_mock)
+ ome_conn_mock.return_value.__enter__.return_value = ome_session_mock
+ return ome_conn_mock
+
+ def test_create_session_failure(self, ome_connection_session_mock, mocker):
+ """
+ Test the failure scenario of creating a session.
+
+ Args:
+ ome_connection_session_mock (MagicMock): A mock object for the
+ ome_connection_session.
+ mocker (MockerFixture): A fixture for mocking objects.
+
+ Returns:
+ None
+
+ This test function creates a session object using the `ome_connection_session_mock` and
+ `f_module` objects.
+ It sets the `f_module.check_mode` to False and `f_module.params` to a dictionary containing
+ the username and password.
+ It mocks the `ome_connection_session_mock.invoke_request` method to return a response
+ with a status code of 201.
+ It calls the `session_obj.execute()` method to create the session.
+ It asserts that the `f_module.exit_json` method is called once with the message "Unable to
+ 'create' a session." and `failed` set to True.
+ """
+ f_module = MagicMock()
+ session_obj = self.module.OMESession(f_module)
+ f_module.check_mode = False
+ f_module.params = {
+ "username": "admin",
+ "password": "password"
+ }
+ response_mock = MagicMock()
+ response_mock.status_code = 400
+ mocker.patch.object(ome_connection_session_mock.return_value, 'invoke_request',
+ return_value=response_mock)
+ session_obj.create_session()
+ f_module.exit_json.assert_called_once_with(
+ msg="Unable to 'create' a session.",
+ failed=True
+ )
+
+ def test_create_session_check_mode(self, ome_connection_session_mock):
+ """
+ Test the create session functionality in check mode.
+
+ Args:
+ ome_connection_session_mock (MagicMock): A mock object for the ome connection
+ session.
+
+ Returns:
+ None
+
+ This function tests the create session functionality in check mode. It creates an instance
+ of the `CreateSession` class with the provided `ome_connection_session_mock` and a mock
+ `f_module` object.
+ method of the `session_obj` to return the session URL. It also mocks the `exit_json` method
+ of the `f_module` object.
+
+ Finally, it calls the `execute` method of the `session_obj` to execute the create session
+ functionality in check mode.
+
+ Note:
+ This function assumes that the necessary imports and setup for the test are already
+ done.
+ """
+ f_module = MagicMock()
+ session_obj = self.module.OMESession(f_module)
+ f_module = self.get_module_mock(
+ params={"session_id": "abcd", "hostname": "X.X.X.X"}, check_mode=True)
+ f_module.exit_json = MagicMock()
+
+ session_obj.create_session()
+
+ def test_create_session_success(self, ome_connection_session_mock):
+ """
+ Test the successful creation of a session.
+
+ Args:
+ ome_connection_session_mock (MagicMock): A mock object representing the ome
+ connection session.
+
+ This test case verifies the successful creation of a session by mocking the necessary
+ objects and invoking the `execute()` method of the `CreateSession` class. It sets the
+ parameters for the `f_module` object, initializes the `session_obj` with the mocked
+ `ome_connection_session_mock` and `f_module`, and mocks the necessary methods and
+ attributes of the `ome` object. It then asserts that the `exit_json` method of the
+ `f_module` object is called with the expected arguments.
+
+ Returns:
+ None
+ """
+ f_module = self.get_module_mock(
+ params={"username": "admin", "password": "password"}, check_mode=False)
+ session_obj = self.module.OMESession(f_module)
+ session_obj.instance.invoke_request.return_value.status_code = 201
+ session_obj.instance.invoke_request.return_value.json_data = {"SessionID": "123456"}
+ session_obj.instance.invoke_request.return_value.headers.get.return_value = "token123"
+ f_module.exit_json = MagicMock()
+
+ session_obj.create_session()
+ f_module.exit_json.assert_called_once_with(
+ msg=CREATE_SUCCESS_MSG,
+ changed=True,
+ session_data={"SessionID": "123456"},
+ x_auth_token="token123"
+ )
+
+ def test_delete_session_success_check_mode_changes(self, ome_connection_session_mock):
+ """
+ Test the `delete_session_success_check_mode_changes` method of the `DeleteSession` class.
+
+ This method is responsible for testing the success case when the `delete_session` method
+ is called in check mode.
+ It verifies that the `exit_json` method of the `f_module` object is called with the
+ appropriate arguments when the session is successfully deleted.
+
+ Parameters:
+ - ome_connection_session_mock (MagicMock): A mock object representing the
+ `ome_connection_session` object.
+
+ Returns:
+ None
+ """
+ f_module = MagicMock()
+ session_obj = self.module.OMESession(f_module)
+ session_obj.get_session_status = MagicMock(return_value=True)
+ session_obj.delete_session()
+ f_module.exit_json.assert_called_once_with(msg=CHANGES_FOUND_MSG, changed=True)
+
+ def test_delete_session_success_check_mode_no_changes(self, ome_connection_session_mock):
+ """
+ Test the success case of deleting a session in check mode when no changes are expected.
+
+ Args:
+ ome_connection_session_mock (MagicMock): A mock object representing the ome
+ connection session.
+
+ This function tests the scenario where the deletion of a session is successful in check
+ mode and no changes are expected. It sets up the necessary mock objects and asserts that
+ the `exit_json` method of the `f_module` object is called once with the `msg` parameter
+ set to `NO_CHANGES_FOUND_MSG`.
+
+ Returns:
+ None
+ """
+ f_module = MagicMock()
+ session_obj = self.module.OMESession(f_module)
+ session_obj.instance.invoke_request.return_value.status_code = 201
+ session_obj.delete_session()
+ f_module.exit_json.assert_called_once_with(msg=NO_CHANGES_FOUND_MSG)
+
+ def test_delete_session_success(self, ome_connection_session_mock):
+ """
+ Test the successful deletion of a session.
+
+ This test function verifies the behavior of the `DeleteSession` class when a session is
+ successfully deleted. It mocks the `ome_connection_session_mock` object and sets up the
+ necessary parameters for the `f_module` object. It then creates an instance of the
+ `DeleteSession` class with the mocked `ome_connection_session_mock` and the
+ `f_module` object.
+
+ The `invoke_request` method of the `ome` object of the `session_obj` is also mocked
+ to return a response with a status code of 200. The `exit_json` method of the `f_module`
+ object is mocked as well.
+
+ The `execute` method of the `session_obj` is called to execute the deletion of the session.
+ Finally, the `exit_json` method of the `f_module` object is asserted to have been called
+ with the expected arguments, including the success message and the changed flag set to
+ `True`.
+
+ Parameters:
+ - ome_connection_session_mock (MagicMock): A mocked object representing the
+ `ome_connection_session_mock` object.
+
+ Returns:
+ None
+ """
+ f_module = self.get_module_mock(
+ params={"session_id": "abcd", "hostname": "X.X.X.X", "x_auth_token": "token"}, check_mode=False)
+ session_obj = self.module.OMESession(f_module)
+ session_obj.get_session_status = MagicMock(return_value=200)
+ session_obj.instance.invoke_request.return_value.status_code = 204
+ f_module.exit_json = MagicMock()
+ session_obj.delete_session()
+ f_module.exit_json.assert_called_once_with(msg=DELETE_SUCCESS_MSG, changed=True)
+
+ def test_delete_session_success_02(self, ome_connection_session_mock):
+ """
+ Test the successful deletion of a session.
+
+ This test function verifies the behavior of the `DeleteSession` class when a session is
+ successfully deleted. It mocks the `ome_connection_session_mock` object and sets up the
+ necessary parameters for the `f_module` object. It then creates an instance of the
+ `DeleteSession` class with the mocked `ome_connection_session_mock` and the
+ `f_module` object.
+
+ The `invoke_request` method of the `ome` object of the `session_obj` is also mocked
+ to return a response with a status code of 200. The `exit_json` method of the `f_module`
+ object is mocked as well.
+
+ The `execute` method of the `session_obj` is called to execute the deletion of the session.
+ Finally, the `exit_json` method of the `f_module` object is asserted to have been called
+ with the expected arguments, including the success message and the changed flag set to
+ `True`.
+
+ Parameters:
+ - ome_connection_session_mock (MagicMock): A mocked object representing the
+ `ome_connection_session_mock` object.
+
+ Returns:
+ None
+ """
+ f_module = self.get_module_mock(
+ params={"session_id": "abcd", "hostname": "X.X.X.X", "x_auth_token": "token"}, check_mode=False)
+ session_obj = self.module.OMESession(f_module)
+ session_obj.get_session_status = MagicMock(return_value=200)
+ session_obj.instance.invoke_request.return_value.status_code = 400
+ obj = session_obj.delete_session()
+ assert not obj
+
+ def test_delete_session_http_error(self, ome_connection_session_mock):
+ """
+ Test the behavior of the `DeleteSession` class when an HTTP error occurs during the
+ deletion of a session.
+
+ This test case creates a mock `f_module` object with the necessary parameters and
+ initializes a `DeleteSession` object with the mock `ome_connection_session_mock` and the
+ `f_module` object. It then sets up the necessary mock functions and side effects to
+ simulate an HTTP error during the deletion of a session. Finally, it executes the
+ `execute()` method of the `DeleteSession` object and asserts that an
+ `AnsibleFailJSonException` is raised with the expected failure message and error
+ information.
+
+ Parameters:
+ - ome_connection_session_mock (MagicMock): A mock object representing the
+ `ome_connection_session_mock` parameter.
+
+ Raises:
+ - AssertionError: If the expected failure message or error information is not present
+ in the raised exception.
+
+ Returns:
+ None
+ """
+ f_module = self.get_module_mock(
+ params={"session_id": "abcd", "hostname": "X.X.X.X"}, check_mode=False)
+ session_obj = self.module.OMESession(f_module)
+ session_obj.get_session_status = MagicMock(return_value=200)
+ json_str = to_text(json.dumps({"data": "out"}))
+ session_obj.instance.invoke_request.side_effect = HTTPError(HTTPS_PATH, 200,
+ HTTP_ERROR,
+ {"accept-type": APPLICATION_JSON},
+ StringIO(json_str))
+ try:
+ session_obj.delete_session()
+ except AnsibleFailJSonException as ex:
+ assert ex.fail_msg == "Unable to 'delete' a session."
+ assert ex.fail_kwargs == {'error_info': {'data': 'out'}, 'failed': True}
+
+ def test_delete_session_check_mode_false_no_changes(self, ome_connection_session_mock):
+ """
+ Test the scenario where the delete session is executed in check mode with `check_mode` set
+ to False and no changes are expected.
+
+ Args:
+ ome_connection_session_mock (MagicMock): A mock object representing the ome
+ connection session.
+
+ Returns:
+ None
+
+ This function creates a mock module object with the specified parameters and
+ initializes the `DeleteSession` object with the mock ome connection and module.It
+ sets the status code of the invoke request to 201. It then asserts that the `exit_json`
+ method of the module object is called once with the `msg` parameter set to the
+ `NO_CHANGES_FOUND_MSG` constant.
+ """
+ f_module = self.get_module_mock(
+ params={"session_id": "abcd", "hostname": "X.X.X.X"}, check_mode=False)
+ session_obj = self.module.OMESession(f_module)
+ session_obj.instance.invoke_request.return_value.status_code = 201
+ f_module.exit_json = MagicMock()
+ session_obj.delete_session()
+ f_module.exit_json.assert_called_once_with(msg=NO_CHANGES_FOUND_MSG)
+
+ def test_get_session_status_http_error(self, ome_connection_session_mock):
+ """
+ Test the behavior of the `DeleteSession` class when an HTTP error occurs during the
+ deletion of a session.
+
+ This test case creates a mock `f_module` object with the necessary parameters and
+ initializes a `DeleteSession` object with the mock `ome_connection_session_mock` and the
+ `f_module` object. It then sets up the necessary mock functions and side effects to
+ simulate an HTTP error during the deletion of a session. Finally, it executes the
+ `execute()` method of the `DeleteSession` object and asserts that an
+ `AnsibleFailJSonException` is raised with the expected failure message and error
+ information.
+
+ Parameters:
+ - ome_connection_session_mock (MagicMock): A mock object representing the
+ `ome_connection_session_mock` parameter.
+
+ Raises:
+ - AssertionError: If the expected failure message or error information is not present
+ in the raised exception.
+
+ Returns:
+ None
+ """
+ params = {"session_id": "abcd", "hostname": "X.X.X.X"}
+ f_module = self.get_module_mock(
+ params=params, check_mode=False)
+ session_obj = self.module.OMESession(f_module)
+ session_obj.instance.invoke_request.side_effect = HTTPError(HTTPS_PATH, 400, '', {}, None)
+ with pytest.raises(HTTPError):
+ session_obj.get_session_status(SESSION_URL, params["session_id"])
+
+ @pytest.mark.parametrize("exc_type",
+ [URLError, HTTPError, SSLValidationError, ConnectionError, TypeError, ValueError])
+ def test_ome_session_main_exception_handling_case(self, exc_type, ome_default_args, mocker):
+ """
+ Test the exception handling of the `ome_session_main` module.
+
+ This function tests the exception handling of the `ome_session_main` module by mocking
+ different exceptions and verifying the expected behavior.
+
+ Parameters:
+ - exc_type (Exception): The type of exception to be raised.
+ - ome_default_args (dict): The default arguments for the module.
+ - mocker (MockerFixture): The mocker fixture for mocking functions.
+
+ Returns:
+ None
+
+ Raises:
+ AssertionError: If the expected result does not match the actual result.
+
+ Notes:
+ - The function uses the `pytest.mark.parametrize` decorator to parameterize the test
+ cases.
+ - The `exc_type` parameter represents the type of exception to be raised.
+ - The `ome_default_args` parameter contains the default arguments for the module.
+ - The `mocker` parameter is used to mock functions and simulate different exceptions.
+ - The function calls the `_run_module` method with the `ome_default_args` to execute
+ the module.
+ - The function verifies the expected result based on the raised exception type.
+
+ """
+ json_str = to_text(json.dumps({"data": "out"}))
+ if exc_type in [HTTPError, SSLValidationError]:
+ mocker.patch(MODULE_PATH + "OMESession.delete_session",
+ side_effect=exc_type(HTTPS_PATH, 400,
+ HTTP_ERROR,
+ {"accept-type": APPLICATION_JSON},
+ StringIO(json_str)))
+ else:
+ ome_default_args.update({"state": "absent", "session_id": "abcd",
+ "x_auth_token": "token123"})
+ mocker.patch(MODULE_PATH + "OMESession.delete_session",
+ side_effect=exc_type('test'))
+ result = self._run_module(ome_default_args)
+ if exc_type == URLError:
+ assert result['unreachable'] is True
+ else:
+ assert result['failed'] is True
+ assert 'msg' in result
diff --git a/ansible_collections/dellemc/powerflex/.github/CODEOWNERS b/ansible_collections/dellemc/powerflex/.github/CODEOWNERS
index f3093ce14..e4209b68b 100644
--- a/ansible_collections/dellemc/powerflex/.github/CODEOWNERS
+++ b/ansible_collections/dellemc/powerflex/.github/CODEOWNERS
@@ -15,4 +15,4 @@
# Trisha Datta (trisha-dell)
# for all files:
-* @kuttattz @Bhavneet-Sharma @Jennifer-John @meenakshidembi691 @Pavan-Mudunuri @trisha-dell @felixs88 @sachin-apa
+* @Bhavneet-Sharma @Jennifer-John @meenakshidembi691 @trisha-dell @felixs88 @sachin-apa
diff --git a/ansible_collections/dellemc/powerflex/.github/workflows/ansible-test.yml b/ansible_collections/dellemc/powerflex/.github/workflows/ansible-test.yml
index 058c434e2..76f953872 100644
--- a/ansible_collections/dellemc/powerflex/.github/workflows/ansible-test.yml
+++ b/ansible_collections/dellemc/powerflex/.github/workflows/ansible-test.yml
@@ -14,7 +14,7 @@ jobs:
strategy:
fail-fast: false
matrix:
- ansible-version: [stable-2.14, stable-2.15, stable-2.16, devel]
+ ansible-version: [stable-2.15, stable-2.16, stable-2.17, devel]
steps:
- name: Check out code
uses: actions/checkout@v3
@@ -50,13 +50,15 @@ jobs:
matrix:
python: ['3.9', '3.10', '3.11']
ansible:
- - stable-2.14
- stable-2.15
- stable-2.16
+ - stable-2.17
- devel
exclude:
- ansible: stable-2.16
python: '3.9'
+ - ansible: stable-2.17
+ python: '3.9'
- ansible: devel
python: '3.9'
@@ -81,13 +83,15 @@ jobs:
matrix:
python: ['3.9', '3.10', '3.11']
ansible:
- - stable-2.14
- stable-2.15
- stable-2.16
+ - stable-2.17
- devel
exclude:
- ansible: stable-2.16
python: '3.9'
+ - ansible: stable-2.17
+ python: '3.9'
- ansible: devel
python: '3.9'
runs-on: ubuntu-latest
@@ -109,12 +113,14 @@ jobs:
fail-fast: false
matrix:
python-version: ["3.9", "3.10", "3.11"]
- ansible-version: [stable-2.14, stable-2.15, stable-2.16, devel]
+ ansible-version: [stable-2.15, stable-2.16, stable-2.17, devel]
exclude:
# Ansible-core 2.16 is supported only from Python 3.10 onwards
- python-version: "3.9"
ansible-version: stable-2.16
- python-version: "3.9"
+ ansible-version: stable-2.17
+ - python-version: "3.9"
ansible-version: devel
steps:
diff --git a/ansible_collections/dellemc/powerflex/CHANGELOG.rst b/ansible_collections/dellemc/powerflex/CHANGELOG.rst
index 6224280f0..981bbfc43 100644
--- a/ansible_collections/dellemc/powerflex/CHANGELOG.rst
+++ b/ansible_collections/dellemc/powerflex/CHANGELOG.rst
@@ -4,6 +4,16 @@ Dellemc.PowerFlex Change Logs
.. contents:: Topics
+v2.5.0
+======
+
+Minor Changes
+-------------
+
+- Fixed the roles to support attaching the MDM cluster to the gateway.
+- The storage pool module has been enhanced to support more features.
+- Added support for PowerFlex Onyx version(4.6.x).
+
v2.4.0
======
diff --git a/ansible_collections/dellemc/powerflex/FILES.json b/ansible_collections/dellemc/powerflex/FILES.json
index ac7c75c78..ba53dc1d6 100644
--- a/ansible_collections/dellemc/powerflex/FILES.json
+++ b/ansible_collections/dellemc/powerflex/FILES.json
@@ -32,7 +32,7 @@
"name": ".github/CODEOWNERS",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "64591e526136e05232234eaf058ee461ca6ff91ea32a07ead2da6c0e54dd917f",
+ "chksum_sha256": "6d71584c96824a971b8a3a52da96dada30159239f0429b35f10e2e0eb1d6a71b",
"format": 1
},
{
@@ -95,7 +95,7 @@
"name": ".github/workflows/ansible-test.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5804cd1364b07aa6ebe9d2d8b29598dd815c33471f6760da29039c40a6beadba",
+ "chksum_sha256": "6ee73bce211a732615ebb9e1fa631136c0e643d5f2bce1cb96ba7693de0904e6",
"format": 1
},
{
@@ -109,7 +109,7 @@
"name": "CHANGELOG.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "453a68b618853e7feccf984745d38fb5566aab5d3de884790f5fa85c28347993",
+ "chksum_sha256": "42311210c546ae7974fb2278599b4a88072515b26ad366ac30d087b7da5fe1cf",
"format": 1
},
{
@@ -130,7 +130,7 @@
"name": "README.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7b4e0d601cddc58a5b325e2543789d29ea69f5dd362c080a16c77b0b3239a439",
+ "chksum_sha256": "dcbd65f6b2fbc4835b2b17f27723477430d200713a818bf7a40ef49ea27b34ee",
"format": 1
},
{
@@ -151,7 +151,7 @@
"name": "changelogs/changelog.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0a9799578efac17952b5672ebb2d3a4f9541aa524ede37aa2ffe0372c0399fd8",
+ "chksum_sha256": "406b3d8a1383caa4f2380e97981db04a1b6e524644d14c28c349839a856d0fa4",
"format": 1
},
{
@@ -200,49 +200,49 @@
"name": "docs/CONTRIBUTING.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f054a45c8a3b7032987d66180a9c5cc852af935e07f633489976b8f3d2b6755f",
+ "chksum_sha256": "d35d0f6c46fa37f4525387d514a656ec2896a0926953865f42c9422347ee75ed",
"format": 1
},
{
"name": "docs/INSTALLATION.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "782332edfc5dfac225338eec316fcb80264c8a80d64356b3849fa4d1063f4eb7",
+ "chksum_sha256": "9e6612ef91ba62f6bf3858f1818a43c2a9ddc93ce5ad2526ffd0f003472f5d68",
"format": 1
},
{
"name": "docs/ISSUE_TRIAGE.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "db3b92f11a5d38287ead84d7baf84655736fd7c377e88bd3fc29f44ea46ff57e",
+ "chksum_sha256": "ed45f202bf22a3d68b49e52dc20078bd210b4d4d6e5eccbcca7dac3aeafb92c1",
"format": 1
},
{
"name": "docs/MAINTAINERS.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0758ca62988538fa1b83403308bfac6637b523dd20e769b889a2c54561ae1512",
+ "chksum_sha256": "f1338cc336eb09f38805ce990c9205bdcab3fbcf1c4b55637953d92ba0972bb4",
"format": 1
},
{
"name": "docs/MAINTAINER_GUIDE.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "475e92f2e159eaa82f4666d9fd91907000790bea8417fa720d9f57328e101078",
+ "chksum_sha256": "777194d83ecf45ef4b13311bfa52a862b6d4d9d3195cecc40f0636cc7ad00356",
"format": 1
},
{
"name": "docs/Release Notes.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "461b82fb097263724e23d0477036a9fbd7ed46f7a8ad14ff7bfc90e4dc6555a6",
+ "chksum_sha256": "45572c8e81821c53b3fa7f6f273858dbc954c78dcd87bc90f15270314e6ff62f",
"format": 1
},
{
"name": "docs/SECURITY.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "94ff66c47cb36e079846fd744ad870f627535e64326691b0421cad93feaffca2",
+ "chksum_sha256": "1bfb50b1b361321514beda96e3017aba1ddb9f137cddedf9e0acdd165a7af82a",
"format": 1
},
{
@@ -263,98 +263,98 @@
"name": "docs/modules/device.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "724112e62b9e42bf54860d5d6490df28db02f48a1470b222ddb44a7ad830ef8c",
+ "chksum_sha256": "286cc8f42910b29f45f45d514d18d1c274010c7bf1290ab258363cfa97947981",
"format": 1
},
{
"name": "docs/modules/fault_set.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8e5cf661716df94032a49f43d5ce8d751dea569def8ac99e26c5cfada44f4f61",
+ "chksum_sha256": "6983109628b14b6cf780b138d1a24fb4b810d9e038d141cea805b8ee1ddaf6f4",
"format": 1
},
{
"name": "docs/modules/info.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6d7cbe381aa23de4ce4acb228213a916f7ac5898ccf95105995134abf2496f3a",
+ "chksum_sha256": "83e21122599db39633103002e6fd4056952ae51b7aec81931dc2c48aa4ef6efd",
"format": 1
},
{
"name": "docs/modules/mdm_cluster.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9ffc87301f4e67b79451450f964c6559088696c246210342c65678d3b6823eaa",
+ "chksum_sha256": "af22c975649b7ac3c1dd5c529e23bb9e124cbe1070fd7c4a93352abd3297fa22",
"format": 1
},
{
"name": "docs/modules/protection_domain.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a31481e55cbcd48e2de17c5f909958a48a641c9407ca97ac81159d5a732b2769",
+ "chksum_sha256": "e4491d39e1ba456eb36027a550011f75ae8c2bb9c4db8ed6fac3ff4c2a78d7ec",
"format": 1
},
{
"name": "docs/modules/replication_consistency_group.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a7020f015b38a75b76608685358c0d40f1994e942e23728ba563ba0ad76d92d3",
+ "chksum_sha256": "01eb098a7f1fc4177ccc1401f1afbea57ee93e02bfd0ca6c00777fe9b9b2bd45",
"format": 1
},
{
"name": "docs/modules/replication_pair.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1493e8c1d08acd7c51ee0423e0a688b3ee5801c915fdd3ecbf4c40461516fef7",
+ "chksum_sha256": "68a488ec3acf28312649942c9e5813af0e4b32a37e3b718a8ed0f14665368d91",
"format": 1
},
{
"name": "docs/modules/resource_group.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d38e031f9d39e1c92241fc635abfa1cae0a8081dd77a794b03f7784d98d9eb05",
+ "chksum_sha256": "3f44d80f59a115994e3eb8b746cc79c791596fa716c89610204303cf7c71853c",
"format": 1
},
{
"name": "docs/modules/sdc.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fd486d97fd31a569846b33d38336a705e451f884a2ecd9197a90b01312e48a94",
+ "chksum_sha256": "01f3325b74a8cd65e3765d0b028d3459be105d9eb119c44abc8efa77106e2ee4",
"format": 1
},
{
"name": "docs/modules/sds.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "35b848c6fc91ff8af4608d360dc36a1b7a8134712eafd23b6b3c25c1cb4c1d86",
+ "chksum_sha256": "13c99c52ff4b228d8efb5fe53597b42a8e33eb46ce3d8a21bb1adda204b18985",
"format": 1
},
{
"name": "docs/modules/snapshot.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "df9b5ac178c0a16ba79a5d57a97e4dd0dfbb4e332af9864d8a1b90aa35227ff0",
+ "chksum_sha256": "8492bdabc052db257394148318299ee2702f1588042e2afe6a2530c91488f445",
"format": 1
},
{
"name": "docs/modules/snapshot_policy.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7b71c242c4cad07bd71731058093532976a02f9bc93ac658e65def287971cdf2",
+ "chksum_sha256": "6be6d8af2027e62e6a25fc72c3fc6d3aa7cee0e4afa310e66598f55bc783c446",
"format": 1
},
{
"name": "docs/modules/storagepool.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2954cea5c6999667466427d000d016ed770a0c9357dde997449b222b28ee8ea6",
+ "chksum_sha256": "7710d1d6ff490c4c9bf3198af7f77a285266252bc45db801930dbf5cb31b6b8f",
"format": 1
},
{
"name": "docs/modules/volume.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8515171be935508a35837ac2e8f58c5c3ee6f284a0f822b4d74128d2803d93f2",
+ "chksum_sha256": "b710062afc775994b4898765cc8434000216be778989ec86ba67ad4a4a0465fd",
"format": 1
},
{
@@ -368,14 +368,14 @@
"name": "meta/execution-environment.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "dca92e407d7f5bb562d233cf4872326fb7d2b60a07530934b5633c75dfa3ef1f",
+ "chksum_sha256": "4ecb20f07e2aaee8a0797f300826a400eaa575f21f566502a3b13973a154ce72",
"format": 1
},
{
"name": "meta/runtime.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "272017e3158d619273cbce9badfb648a524232f573aa80e5933541215490d9b3",
+ "chksum_sha256": "aaef590e35a44c510ce8192aafaf8d8cdfd8c431ea47678a6c6e97061953e616",
"format": 1
},
{
@@ -480,7 +480,7 @@
"name": "playbooks/modules/storagepool.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7059aed35251235a82b26324e6fab795fc260c72851d72affc54295b8733e94a",
+ "chksum_sha256": "4d8cf1538c775aaa943a167598461c6c69b17705e09c6667f2a6914e04064c42",
"format": 1
},
{
@@ -592,7 +592,7 @@
"name": "plugins/doc_fragments/powerflex.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fe81e9901c08fa0086544e591ea7dda990ef30905ce80ed61ffd3f138b8e57ca",
+ "chksum_sha256": "b3df2bd769b95473c8c3650614cca915b4e8556416d66b66786109564a36233f",
"format": 1
},
{
@@ -662,7 +662,7 @@
"name": "plugins/module_utils/storage/dell/utils.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f9cdf312c0aea0c6686bcf9d1121049e222050d11a1be6c51fcbe9dab64892e8",
+ "chksum_sha256": "3e2c156f1c9646ac425c9cfb3c4355bce9e3d266279c1a72bf784da12ceecfcb",
"format": 1
},
{
@@ -760,7 +760,7 @@
"name": "plugins/modules/storagepool.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "efd6c30ca7e5d8d61c13e3e1c154d28d80c8962e256c7dc4a0114f34e41c678d",
+ "chksum_sha256": "ad0464fb536badce506c0f8b9d54706dd73add6936d422d9f0c9b95fad4afc77",
"format": 1
},
{
@@ -1236,7 +1236,7 @@
"name": "roles/powerflex_config/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "10f2358dbee525cf86fc27e1496b394bfaeb6ddcdce7af7accb194315861444b",
+ "chksum_sha256": "c2aa104693b08805ffad25ea7ad593881cb008947332b23a3841d7772f092f63",
"format": 1
},
{
@@ -1404,7 +1404,7 @@
"name": "roles/powerflex_gateway/tasks/install_keepalived.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c5012b69295e36ac759ab5fa4aa16cf0b2ac7a336964aa75b9e3f8473c4f0e64",
+ "chksum_sha256": "e7c9afa1618df5a94af53dbacfaa54c6a171e66b40e0f3bc3aaad172708cfb1e",
"format": 1
},
{
@@ -1600,7 +1600,7 @@
"name": "roles/powerflex_lia/tasks/install_lia.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2ace8d8c2789df5acea0bc156f0db8730471c2996ee7ca0194211a63708c72eb",
+ "chksum_sha256": "e5056e59fd0fe45e444c6c675c1598e034f2e7857951659c1a6868444e371ac0",
"format": 1
},
{
@@ -1761,7 +1761,7 @@
"name": "roles/powerflex_mdm/tasks/install_powerflex4x_mdm.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9c56bea05dbe554fec279bb36958e567bd784461f451e0d1b1e2342f833c8b15",
+ "chksum_sha256": "46e80b52c93a896554ca448cd07df5847429955684c3ab4b3266f5a405b01fe3",
"format": 1
},
{
@@ -1859,7 +1859,7 @@
"name": "roles/powerflex_sdc/defaults/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9cee69ae196e0f0abecc93111ffeda653e64c7ee46e3cd7d413ae96bb96879e0",
+ "chksum_sha256": "b2ddcb9b937bd6067ced4504b8bb7662478ea8eeeb7aed39b68804a1796fadb7",
"format": 1
},
{
@@ -2062,7 +2062,7 @@
"name": "roles/powerflex_sdc/tasks/install_sdc.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "891ab050e6db8b216367d2075f31356aec6684f686e9a909c50924f70ede0e14",
+ "chksum_sha256": "b95f888155f80899c127aec413c9955511ee7c39e23f8b95bf87807b7d26fd82",
"format": 1
},
{
@@ -2076,7 +2076,7 @@
"name": "roles/powerflex_sdc/tasks/register_esxi_sdc.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ed36076c66cd63ccefb2b3deee053948a973e06525e02122da1000536533ea63",
+ "chksum_sha256": "506e46f2c44f7cf505b3e9099cf22ed808c9a48ed7b17dcc07badcdacc20a3fc",
"format": 1
},
{
@@ -2286,7 +2286,7 @@
"name": "roles/powerflex_sdr/tasks/add_sdr.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0ef7dde0476382d5348a15a3f59870c4623789c200a4710eb9e7db3ce205c3c3",
+ "chksum_sha256": "6760d118ec10bf4a9494106859eead19c0794d947551f5cf13d1b6e10a33d006",
"format": 1
},
{
@@ -2300,7 +2300,7 @@
"name": "roles/powerflex_sdr/tasks/remove_sdr.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2852a831ab356a6d98ffc7c4168ab07555371253333dcf020fe651a06d1aa56c",
+ "chksum_sha256": "8dde871cd8d0459063a717a090523bc7ea6afe025048275256599f99663cfe62",
"format": 1
},
{
@@ -2440,7 +2440,7 @@
"name": "roles/powerflex_sds/tasks/install_sds.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "eb7b2291ea143accdb28777ab6bd4b5929ebd0f569891d9b47ce13ad8b0b9b76",
+ "chksum_sha256": "8156904dc2f87c9ef2d553f6b4c55d93767e9e96fe249f690c11d5a83e577b22",
"format": 1
},
{
@@ -2619,6 +2619,13 @@
"format": 1
},
{
+ "name": "roles/powerflex_tb/tasks/connect_mdm_cluster.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "2f971a96331dbf2ba033ee2c3ef1828cb1440cc69e86301c7648d2c1dc7a9ca6",
+ "format": 1
+ },
+ {
"name": "roles/powerflex_tb/tasks/install_tb.yml",
"ftype": "file",
"chksum_type": "sha256",
@@ -2636,7 +2643,7 @@
"name": "roles/powerflex_tb/tasks/install_tb4x.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "40b6cf736e196e90e599dc4808970ebd64b9a1f848ad7ea0c29184408ecb6ea8",
+ "chksum_sha256": "cf82243b080e35c3d04e519d1140559474258722dd8eecd4a555f208364fc69f",
"format": 1
},
{
@@ -2650,7 +2657,7 @@
"name": "roles/powerflex_tb/tasks/set_tb_ips.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3f6c75067f5575f48ada843abd9456ace5582fdc9f8e0d5483ea46724a0f35f0",
+ "chksum_sha256": "ae2a84b434ef8c1ed46af20bd506d84e7f3871098bc4e96cb69f8a626da3c013",
"format": 1
},
{
@@ -2850,24 +2857,17 @@
"format": 1
},
{
- "name": "tests/sanity/ignore-2.14.txt",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "71fd11298139d7839c2d0cf9c6933ad687b3a6bf7ca09862ea3b984e43bf72a4",
- "format": 1
- },
- {
"name": "tests/sanity/ignore-2.15.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "71fd11298139d7839c2d0cf9c6933ad687b3a6bf7ca09862ea3b984e43bf72a4",
+ "chksum_sha256": "a147975e23ec4159543142fdfd6993febe9b847f213ccf68226d3f14803c9987",
"format": 1
},
{
"name": "tests/sanity/ignore-2.16.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9ae1170098d5b400a80d561fbaef8149b6707489c5f1424b64d418b80f1cecb9",
+ "chksum_sha256": "4a88db7e60ce66fe91fb6839b1bce91d4e2a0073e09a59b022b3e71ab5f7330b",
"format": 1
},
{
@@ -3056,7 +3056,7 @@
"name": "tests/unit/plugins/module_utils/mock_storagepool_api.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5e7e3dfc7f6ac68a53092f5ba3292ec4c7c861f6972ca9c290f223ef10c8afad",
+ "chksum_sha256": "e4f59aab1f7a3c8a10fa218e5d19bea3584736400767c36b93a9a8967b8b6e16",
"format": 1
},
{
@@ -3105,7 +3105,7 @@
"name": "tests/unit/plugins/modules/test_mdm_cluster.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4a242b57ed85421cb8823e0814484d077407f00c761e23169542ac34cc9aa0d3",
+ "chksum_sha256": "e4dc16742a2d276a678746695798c8d08ba7d1309152cc4c70fc8dbe7b3d55bf",
"format": 1
},
{
@@ -3161,7 +3161,7 @@
"name": "tests/unit/plugins/modules/test_storagepool.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8c6bad9def6e6b32b7358bca2c4494be3c077fe49b47b08fc2e0c7305fcdb685",
+ "chksum_sha256": "f686969427a273790d3ec2b4376968492b5eb4851396fefd8228f4e82694b8a0",
"format": 1
},
{
diff --git a/ansible_collections/dellemc/powerflex/MANIFEST.json b/ansible_collections/dellemc/powerflex/MANIFEST.json
index 9e4656fef..38b48b7a3 100644
--- a/ansible_collections/dellemc/powerflex/MANIFEST.json
+++ b/ansible_collections/dellemc/powerflex/MANIFEST.json
@@ -2,7 +2,7 @@
"collection_info": {
"namespace": "dellemc",
"name": "powerflex",
- "version": "2.4.0",
+ "version": "2.5.0",
"authors": [
"Akash Shendge <ansible.team@dell.com>",
"Arindam Datta <ansible.team@dell.com>",
@@ -24,16 +24,16 @@
],
"license_file": null,
"dependencies": {},
- "repository": "https://github.com/dell/ansible-powerflex/tree/2.4.0",
- "documentation": "https://github.com/dell/ansible-powerflex/tree/2.4.0/docs",
- "homepage": "https://github.com/dell/ansible-powerflex/tree/2.4.0",
+ "repository": "https://github.com/dell/ansible-powerflex/tree/main",
+ "documentation": "https://github.com/dell/ansible-powerflex/tree/main/docs",
+ "homepage": "https://github.com/dell/ansible-powerflex/tree/main",
"issues": "https://www.dell.com/community/Automation/bd-p/Automation"
},
"file_manifest_file": {
"name": "FILES.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1e3524ca4d32c06f7162058bfe2e094e72e1b205ab39316a94dde334b0d59992",
+ "chksum_sha256": "51f896ec559ff97298cc533c294c99efe947cbfefff775b717b3448199bde426",
"format": 1
},
"format": 1
diff --git a/ansible_collections/dellemc/powerflex/README.md b/ansible_collections/dellemc/powerflex/README.md
index 6bb8ee215..b4002a0ec 100644
--- a/ansible_collections/dellemc/powerflex/README.md
+++ b/ansible_collections/dellemc/powerflex/README.md
@@ -1,64 +1,89 @@
# Ansible Modules for Dell Technologies PowerFlex
+[![Contributor Covenant](https://img.shields.io/badge/Contributor%20Covenant-v2.0%20adopted-ff69b4.svg)](https://github.com/dell/ansible-powerflex/blob/main/docs/CODE_OF_CONDUCT.md)
+[![License](https://img.shields.io/github/license/dell/ansible-powerflex)](https://github.com/dell/ansible-powerflex/blob/main/LICENSE)
+[![Python version](https://img.shields.io/badge/python-3.9.6+-blue.svg)](https://www.python.org/downloads/)
+[![Ansible version](https://img.shields.io/badge/ansible-2.15.6+-blue.svg)](https://pypi.org/project/ansible/)
+[![PyPowerFlex](https://img.shields.io/github/v/release/dell/python-powerflex?include_prereleases&label=PyPowerFlex&style=flat-square)](https://github.com/dell/python-powerflex/releases)
+[![GitHub release (latest by date including pre-releases)](https://img.shields.io/github/v/release/dell/ansible-powerflex?include_prereleases&label=latest&style=flat-square)](https://github.com/dell/ansible-powerflex/releases)
+[![codecov](https://codecov.io/gh/dell/ansible-powerflex/branch/main/graph/badge.svg)](https://app.codecov.io/gh/dell/ansible-powerflex)
+
The Ansible Modules for Dell Technologies (Dell) PowerFlex allow Data Center and IT administrators to use RedHat Ansible to automate and orchestrate the provisioning and management of Dell PowerFlex storage systems.
The capabilities of the Ansible modules are managing SDCs, volumes, snapshots, snapshot policy, storage pools, replication consistency groups, replication pairs, SDSs, devices, protection domains, MDM cluster, Fault Set and to gather high level facts from the storage system. The options available are list, show, create, modify and delete. These tasks can be executed by running simple playbooks written in yaml syntax. The modules are written so that all the operations are idempotent, so making multiple identical requests has the same effect as making a single request.
## Table of contents
-* [Code of conduct](https://github.com/dell/ansible-powerflex/blob/2.4.0/docs/CODE_OF_CONDUCT.md)
-* [Maintainer guide](https://github.com/dell/ansible-powerflex/blob/2.4.0/docs/MAINTAINER_GUIDE.md)
-* [Committer guide](https://github.com/dell/ansible-powerflex/blob/2.4.0/docs/COMMITTER_GUIDE.md)
-* [Contributing guide](https://github.com/dell/ansible-powerflex/blob/2.4.0/docs/CONTRIBUTING.md)
-* [Branching strategy](https://github.com/dell/ansible-powerflex/blob/2.4.0/docs/BRANCHING.md)
-* [List of adopters](https://github.com/dell/ansible-powerflex/blob/2.4.0/docs/ADOPTERS.md)
-* [Maintainers](https://github.com/dell/ansible-powerflex/blob/2.4.0/docs/MAINTAINERS.md)
-* [Support](https://github.com/dell/ansible-powerflex/blob/2.4.0/docs/SUPPORT.md)
+* [Code of conduct](https://github.com/dell/ansible-powerflex/blob/main/docs/CODE_OF_CONDUCT.md)
+* [Maintainer guide](https://github.com/dell/ansible-powerflex/blob/main/docs/MAINTAINER_GUIDE.md)
+* [Committer guide](https://github.com/dell/ansible-powerflex/blob/main/docs/COMMITTER_GUIDE.md)
+* [Contributing guide](https://github.com/dell/ansible-powerflex/blob/main/docs/CONTRIBUTING.md)
+* [Branching strategy](https://github.com/dell/ansible-powerflex/blob/main/docs/BRANCHING.md)
+* [List of adopters](https://github.com/dell/ansible-powerflex/blob/main/docs/ADOPTERS.md)
+* [Maintainers](https://github.com/dell/ansible-powerflex/blob/main/docs/MAINTAINERS.md)
+* [Support](https://github.com/dell/ansible-powerflex/blob/main/docs/SUPPORT.md)
* [License](#license)
-* [Security](https://github.com/dell/ansible-powerflex/blob/2.4.0/docs/SECURITY.md)
+* [Security](https://github.com/dell/ansible-powerflex/blob/main/docs/SECURITY.md)
* [Prerequisites](#prerequisites)
* [List of Ansible modules for Dell PowerFlex](#list-of-ansible-modules-for-dell-powerflex)
* [Installation and execution of Ansible modules for Dell PowerFlex](#installation-and-execution-of-ansible-modules-for-dell-powerflex)
* [Releasing, Maintenance and Deprecation](#releasing-maintenance-and-deprecation)
-## License
-The Ansible collection for PowerFlex is released and licensed under the GPL-3.0 license. See [LICENSE](https://github.com/dell/ansible-powerflex/blob/2.4.0/LICENSE) for the full terms. Ansible modules and modules utilities that are part of the Ansible collection for PowerFlex are released and licensed under the Apache 2.0 license. See [MODULE-LICENSE](https://github.com/dell/ansible-powerflex/blob/2.4.0/MODULE-LICENSE) for the full terms.
-## Prerequisites
+
+## Requirements
| **Ansible Modules** | **PowerFlex/VxFlex OS Version** | **SDK version** | **Python version** | **Ansible** |
|---------------------|-----------------------|-------|--------------------|--------------------------|
-| v2.4.0 |3.6 <br> 4.0 <br> 4.5 | 1.11.0 | 3.9.x <br> 3.10.x <br> 3.11.x | 2.14 <br> 2.15 <br> 2.16 |
+| v2.5.0 |3.6 <br> 4.0 <br> 4.5 <br> APEX Block Storage for Mircrosoft Azure <br> APEX Block Storage for AWS | 1.12.0 | 3.9.x <br> 3.10.x <br> 3.11.x <br> 3.12.x | 2.15 <br> 2.16 <br> 2.17 |
* Please follow PyPowerFlex installation instructions on [PyPowerFlex Documentation](https://github.com/dell/python-powerflex)
-
-## Idempotency
-The modules are written in such a way that all requests are idempotent and hence fault-tolerant. It essentially means that the result of a successfully performed request is independent of the number of times it is executed.
-
-## List of Ansible modules for Dell PowerFlex
- * [Info module](https://github.com/dell/ansible-powerflex/blob/2.4.0/docs/modules/info.rst)
- * [Snapshot module](https://github.com/dell/ansible-powerflex/blob/2.4.0/docs/modules/snapshot.rst)
- * [SDC module](https://github.com/dell/ansible-powerflex/blob/2.4.0/docs/modules/sdc.rst)
- * [Storage pool module](https://github.com/dell/ansible-powerflex/blob/2.4.0/docs/modules/storagepool.rst)
- * [Volume module](https://github.com/dell/ansible-powerflex/blob/2.4.0/docs/modules/volume.rst)
- * [SDS module](https://github.com/dell/ansible-powerflex/blob/2.4.0/docs/modules/sds.rst)
- * [Device Module](https://github.com/dell/ansible-powerflex/blob/2.4.0/docs/modules/device.rst)
- * [Protection Domain Module](https://github.com/dell/ansible-powerflex/blob/2.4.0/docs/modules/protection_domain.rst)
- * [MDM Cluster Module](https://github.com/dell/ansible-powerflex/blob/2.4.0/docs/modules/mdm_cluster.rst)
- * [Replication Consistency Group Module](https://github.com/dell/ansible-powerflex/blob/2.4.0/docs/modules/replication_consistency_group.rst)
- * [Replication Pair Module](https://github.com/dell/ansible-powerflex/blob/2.4.0/docs/modules/replication_pair.rst)
- * [Snapshot Policy Module](https://github.com/dell/ansible-powerflex/blob/2.4.0/docs/modules/snapshot_policy.rst)
- * [Fault Sets Module](https://github.com/dell/ansible-powerflex/blob/2.4.0/docs/modules/fault_set.rst)
## Installation and execution of Ansible modules for Dell PowerFlex
-The installation and execution steps of Ansible modules for Dell PowerFlex can be found [here](https://github.com/dell/ansible-powerflex/blob/2.4.0/docs/INSTALLATION.md).
+The installation and execution steps of Ansible modules for Dell PowerFlex can be found [here](https://github.com/dell/ansible-powerflex/blob/main/docs/INSTALLATION.md).
+
+## Use Cases
+Refer the [example playbooks](https://github.com/dell/ansible-powerflex/tree/main/playbooks) on how the collection can be used for [modules](https://github.com/dell/ansible-powerflex/tree/main/playbooks/modules) and [roles](https://github.com/dell/ansible-powerflex/tree/main/playbooks/roles).
-## Releasing, Maintenance and Deprecation
+## Testing
+The following tests are done on ansible-powerflex collection
+- Unit tests
+- Integration tests.
+## Support
+Refer [Support](https://github.com/dell/ansible-powerflex/blob/main/docs/SUPPORT.md) documenetation for more information on the support from Dell Technologies.
+
+## Release Notes, Maintenance and Deprecation
Ansible Modules for Dell Technologies PowerFlex follows [Semantic Versioning](https://semver.org/).
New version will be release regularly if significant changes (bug fix or new feature) are made in the collection.
-Released code versions are located on "release" branches with names of the form "release-x.y.z" where x.y.z corresponds to the version number. More information on branching strategy followed can be found [here](https://github.com/dell/ansible-powerflex/blob/2.4.0/docs/BRANCHING.md).
+Released code versions are located on "release" branches with names of the form "release-x.y.z" where x.y.z corresponds to the version number. More information on branching strategy followed can be found [here](https://github.com/dell/ansible-powerflex/blob/main/docs/BRANCHING.md).
+
+Ansible Modules for Dell Technologies PowerFlex deprecation cycle is aligned with that of [Ansible](https://docs.ansible.com/ansible/latest/dev_guide/module_lifecycle.html).
-Ansible Modules for Dell Technologies PowerFlex deprecation cycle is aligned with that of [Ansible](https://docs.ansible.com/ansible/latest/dev_guide/module_lifecycle.html). \ No newline at end of file
+See [change logs](https://github.com/dell/ansible-powerflex/blob/main/CHANGELOG.rst) for more information on what is new in the releases.
+
+## Related Information
+
+### Idempotency
+The modules are written in such a way that all requests are idempotent and hence fault-tolerant. It essentially means that the result of a successfully performed request is independent of the number of times it is executed.
+
+### List of Ansible modules for Dell PowerFlex
+ * [Info module](https://github.com/dell/ansible-powerflex/blob/main/docs/modules/info.rst)
+ * [Snapshot module](https://github.com/dell/ansible-powerflex/blob/main/docs/modules/snapshot.rst)
+ * [SDC module](https://github.com/dell/ansible-powerflex/blob/main/docs/modules/sdc.rst)
+ * [Storage pool module](https://github.com/dell/ansible-powerflex/blob/main/docs/modules/storagepool.rst)
+ * [Volume module](https://github.com/dell/ansible-powerflex/blob/main/docs/modules/volume.rst)
+ * [SDS module](https://github.com/dell/ansible-powerflex/blob/main/docs/modules/sds.rst)
+ * [Device Module](https://github.com/dell/ansible-powerflex/blob/main/docs/modules/device.rst)
+ * [Protection Domain Module](https://github.com/dell/ansible-powerflex/blob/main/docs/modules/protection_domain.rst)
+ * [MDM Cluster Module](https://github.com/dell/ansible-powerflex/blob/main/docs/modules/mdm_cluster.rst)
+ * [Replication Consistency Group Module](https://github.com/dell/ansible-powerflex/blob/main/docs/modules/replication_consistency_group.rst)
+ * [Replication Pair Module](https://github.com/dell/ansible-powerflex/blob/main/docs/modules/replication_pair.rst)
+ * [Snapshot Policy Module](https://github.com/dell/ansible-powerflex/blob/main/docs/modules/snapshot_policy.rst)
+ * [Fault Sets Module](https://github.com/dell/ansible-powerflex/blob/main/docs/modules/fault_set.rst)
+
+
+## License
+The Ansible collection for PowerFlex is released and licensed under the GPL-3.0 license. See [LICENSE](https://github.com/dell/ansible-powerflex/blob/main/LICENSE) for the full terms. Ansible modules and modules utilities that are part of the Ansible collection for PowerFlex are released and licensed under the Apache 2.0 license. See [MODULE-LICENSE](https://github.com/dell/ansible-powerflex/blob/main/MODULE-LICENSE) for the full terms. \ No newline at end of file
diff --git a/ansible_collections/dellemc/powerflex/changelogs/changelog.yaml b/ansible_collections/dellemc/powerflex/changelogs/changelog.yaml
index a4fb3c6c8..15db60f2a 100644
--- a/ansible_collections/dellemc/powerflex/changelogs/changelog.yaml
+++ b/ansible_collections/dellemc/powerflex/changelogs/changelog.yaml
@@ -163,3 +163,10 @@ releases:
minor_changes:
- Added support for executing Ansible PowerFlex modules and roles on AWS environment.
release_date: '2024-04-30'
+ 2.5.0:
+ changes:
+ minor_changes:
+ - The storage pool module has been enhanced to support more features.
+ - Fixed the roles to support attaching the MDM cluster to the gateway.
+ - Added support for PowerFlex Onyx version(4.6.x).
+ release_date: '2024-05-31'
diff --git a/ansible_collections/dellemc/powerflex/docs/CONTRIBUTING.md b/ansible_collections/dellemc/powerflex/docs/CONTRIBUTING.md
index 84531753a..12ce41fec 100644
--- a/ansible_collections/dellemc/powerflex/docs/CONTRIBUTING.md
+++ b/ansible_collections/dellemc/powerflex/docs/CONTRIBUTING.md
@@ -10,7 +10,7 @@ You may obtain a copy of the License at
# How to contribute
-Become one of the contributors to this project! We thrive to build a welcoming and open community for anyone who wants to use the project or contribute to it. There are just a few small guidelines you need to follow. To help us create a safe and positive community experience for all, we require all participants to adhere to the [Code of Conduct](https://github.com/dell/ansible-powerflex/blob/2.4.0/CODE_OF_CONDUCT.md).
+Become one of the contributors to this project! We thrive to build a welcoming and open community for anyone who wants to use the project or contribute to it. There are just a few small guidelines you need to follow. To help us create a safe and positive community experience for all, we require all participants to adhere to the [Code of Conduct](https://github.com/dell/ansible-powerflex/blob/main/CODE_OF_CONDUCT.md).
## Table of contents
@@ -76,7 +76,7 @@ Triage helps ensure that issues resolve quickly by:
If you don't have the knowledge or time to code, consider helping with _issue triage_. The Ansible modules for Dell PowerFlex community will thank you for saving them time by spending some of yours.
-Read more about the ways you can [Triage issues](https://github.com/dell/ansible-powerflex/blob/2.4.0/ISSUE_TRIAGE.md).
+Read more about the ways you can [Triage issues](https://github.com/dell/ansible-powerflex/blob/main/ISSUE_TRIAGE.md).
## Your first contribution
@@ -89,7 +89,7 @@ When you're ready to contribute, it's time to create a pull request.
## Branching
-* [Branching Strategy for Ansible modules for Dell PowerFlex](https://github.com/dell/ansible-powerflex/blob/2.4.0/BRANCHING.md)
+* [Branching Strategy for Ansible modules for Dell PowerFlex](https://github.com/dell/ansible-powerflex/blob/main/BRANCHING.md)
## Signing your commits
@@ -144,7 +144,7 @@ Make sure that the title for your pull request uses the same format as the subje
### Quality gates for pull requests
-GitHub Actions are used to enforce quality gates when a pull request is created or when any commit is made to the pull request. These GitHub Actions enforce our minimum code quality requirement for any code that get checked into the repository. If any of the quality gates fail, it is expected that the contributor will look into the check log, understand the problem and resolve the issue. If help is needed, please feel free to reach out the maintainers of the project for [support](https://github.com/dell/ansible-powerflex/blob/2.4.0/SUPPORT.md).
+GitHub Actions are used to enforce quality gates when a pull request is created or when any commit is made to the pull request. These GitHub Actions enforce our minimum code quality requirement for any code that get checked into the repository. If any of the quality gates fail, it is expected that the contributor will look into the check log, understand the problem and resolve the issue. If help is needed, please feel free to reach out the maintainers of the project for [support](https://github.com/dell/ansible-powerflex/blob/main/SUPPORT.md).
#### Code sanitization
diff --git a/ansible_collections/dellemc/powerflex/docs/INSTALLATION.md b/ansible_collections/dellemc/powerflex/docs/INSTALLATION.md
index c2b8df3ba..f3e0e1c01 100644
--- a/ansible_collections/dellemc/powerflex/docs/INSTALLATION.md
+++ b/ansible_collections/dellemc/powerflex/docs/INSTALLATION.md
@@ -41,7 +41,7 @@ You may obtain a copy of the License at
* Download the latest tar build from any of the available distribution channel [Ansible Galaxy](https://galaxy.ansible.com/dellemc/powerflex) /[Automation Hub](https://console.redhat.com/ansible/automation-hub/repo/published/dellemc/powerflex) and use this command to install the collection anywhere in your system:
- ansible-galaxy collection install dellemc-powerflex-2.4.0.tar.gz -p <install_path>
+ ansible-galaxy collection install dellemc-powerflex-2.5.0.tar.gz -p <install_path>
* Set the environment variable:
@@ -68,7 +68,7 @@ You may obtain a copy of the License at
## Ansible modules execution
-The Ansible server must be configured with Python library for PowerFlex to run the Ansible playbooks. The [Documents](https://github.com/dell/ansible-powerflex/blob/2.4.0/docs/) provide information on different Ansible modules along with their functions and syntax. The parameters table in the Product Guide provides information on various parameters which needs to be configured before running the modules.
+The Ansible server must be configured with Python library for PowerFlex to run the Ansible playbooks. The [Documents](https://github.com/dell/ansible-powerflex/blob/main/docs/) provide information on different Ansible modules along with their functions and syntax. The parameters table in the Product Guide provides information on various parameters which needs to be configured before running the modules.
## SSL certificate validation
diff --git a/ansible_collections/dellemc/powerflex/docs/ISSUE_TRIAGE.md b/ansible_collections/dellemc/powerflex/docs/ISSUE_TRIAGE.md
index 50d4665ef..317216157 100644
--- a/ansible_collections/dellemc/powerflex/docs/ISSUE_TRIAGE.md
+++ b/ansible_collections/dellemc/powerflex/docs/ISSUE_TRIAGE.md
@@ -41,10 +41,10 @@ This section describes the various issue templates and the expected content.
Should explain what happened, what was expected and how to reproduce it together with any additional information that may help giving a complete picture of what happened such as screenshots, output and any environment related information that's applicable and/or maybe related to the reported problem:
- - Ansible Version: [e.g. 2.14]
+ - Ansible Version: [e.g. 2.15]
- Python Version [e.g. 3.11]
- - Ansible modules for Dell PowerFlex Version: [e.g. 2.4.0]
- - PowerFlex SDK version: [e.g. PyPowerFlex 1.11.0]
+ - Ansible modules for Dell PowerFlex Version: [e.g. 2.5.0]
+ - PowerFlex SDK version: [e.g. PyPowerFlex 1.12.0]
- Any other additional information...
#### Feature requests
diff --git a/ansible_collections/dellemc/powerflex/docs/MAINTAINERS.md b/ansible_collections/dellemc/powerflex/docs/MAINTAINERS.md
index 4679f6d73..6f0c6100e 100644
--- a/ansible_collections/dellemc/powerflex/docs/MAINTAINERS.md
+++ b/ansible_collections/dellemc/powerflex/docs/MAINTAINERS.md
@@ -9,10 +9,7 @@ You may obtain a copy of the License at
-->
# Maintainers
-
-* Ananthu Kuttattu (kuttattz)
* Bhavneet Sharma (Bhavneet-Sharma)
* Jennifer John (Jennifer-John)
* Meenakshi Dembi (meenakshidembi691)
-* Pavan Mudunuri (Pavan-Mudunuri)
* Trisha Datta (trisha-dell)
diff --git a/ansible_collections/dellemc/powerflex/docs/MAINTAINER_GUIDE.md b/ansible_collections/dellemc/powerflex/docs/MAINTAINER_GUIDE.md
index 5f982c2f9..2832c9791 100644
--- a/ansible_collections/dellemc/powerflex/docs/MAINTAINER_GUIDE.md
+++ b/ansible_collections/dellemc/powerflex/docs/MAINTAINER_GUIDE.md
@@ -27,7 +27,7 @@ If a candidate is approved, a Maintainer contacts the candidate to invite them t
## Maintainer policies
* Lead by example
-* Follow the [Code of Conduct](https://github.com/dell/ansible-powerflex/blob/2.4.0/CODE_OF_CONDUCT.md) and the guidelines in the [Contributing](https://github.com/dell/ansible-powerflex/blob/2.4.0/CONTRIBUTING.md) and [Committer](https://github.com/dell/ansible-powerflex/blob/2.4.0/COMMITTER_GUIDE.md) guides
+* Follow the [Code of Conduct](https://github.com/dell/ansible-powerflex/blob/main/CODE_OF_CONDUCT.md) and the guidelines in the [Contributing](https://github.com/dell/ansible-powerflex/blob/main/CONTRIBUTING.md) and [Committer](https://github.com/dell/ansible-powerflex/blob/main/COMMITTER_GUIDE.md) guides
* Promote a friendly and collaborative environment within our community
* Be actively engaged in discussions, answering questions, updating defects, and reviewing pull requests
* Criticize code, not people. Ideally, tell the contributor a better way to do what they need.
diff --git a/ansible_collections/dellemc/powerflex/docs/Release Notes.md b/ansible_collections/dellemc/powerflex/docs/Release Notes.md
index 1a2552326..36cf06f97 100644
--- a/ansible_collections/dellemc/powerflex/docs/Release Notes.md
+++ b/ansible_collections/dellemc/powerflex/docs/Release Notes.md
@@ -1,6 +1,6 @@
**Ansible Modules for Dell Technologies PowerFlex**
=========================================
-### Release notes 2.4.0
+### Release notes 2.5.0
> © 2024 Dell Inc. or its subsidiaries. All rights reserved. Dell
> and other trademarks are trademarks of Dell Inc. or its
@@ -28,7 +28,7 @@ Table 1. Revision history
| Revision | Date | Description |
|----------|-----------------|-------------------------------------------------------------|
-| 01 | April 2024 | Current release of Ansible Modules for Dell PowerFlex 2.4.0 |
+| 01 | May 2024 | Current release of Ansible Modules for Dell PowerFlex 2.5.0 |
Product description
-------------------
@@ -44,10 +44,8 @@ each of the entities.
New features and enhancements
-----------------------------
Along with the previous release deliverables, this release supports following features -
-- Added support for executing Ansible PowerFlex modules and roles on AWS environment.
-- Added support for resource group provisioning to validate, deploy, edit, add nodes and delete a resource group.
-- The Info module is enhanced to list out all the firmware repository.
-- Added support for PowerFlex ansible modules and roles on Azure.
+ - The storage pool module has been enhanced to support more features.
+ - Fixed the roles to support attaching the MDM cluster to the gateway.
Known issues
------------
@@ -63,11 +61,11 @@ Limitations
Distribution
------------
The software package is available for download from the [Ansible Modules
-for PowerFlex GitHub](https://github.com/dell/ansible-powerflex/tree/2.4.0) page.
+for PowerFlex GitHub](https://github.com/dell/ansible-powerflex/tree/main) page.
Documentation
-------------
-The documentation is available on [Ansible Modules for PowerFlex GitHub](https://github.com/dell/ansible-powerflex/tree/2.4.0/docs)
+The documentation is available on [Ansible Modules for PowerFlex GitHub](https://github.com/dell/ansible-powerflex/tree/main/docs)
page. It includes the following:
- README
diff --git a/ansible_collections/dellemc/powerflex/docs/SECURITY.md b/ansible_collections/dellemc/powerflex/docs/SECURITY.md
index a7eab1ba4..b2de265bb 100644
--- a/ansible_collections/dellemc/powerflex/docs/SECURITY.md
+++ b/ansible_collections/dellemc/powerflex/docs/SECURITY.md
@@ -12,7 +12,7 @@ You may obtain a copy of the License at
The Ansible modules for Dell PowerFlex repository are inspected for security vulnerabilities via blackduck scans and static code analysis.
-In addition to this, there are various security checks that get executed against a branch when a pull request is created/updated. Please refer to [pull request](https://github.com/dell/ansible-powerflex/blob/2.4.0/docs/CONTRIBUTING.md#Pull-requests) for more information.
+In addition to this, there are various security checks that get executed against a branch when a pull request is created/updated. Please refer to [pull request](https://github.com/dell/ansible-powerflex/blob/main/docs/CONTRIBUTING.md#Pull-requests) for more information.
## Reporting a vulnerability
diff --git a/ansible_collections/dellemc/powerflex/docs/modules/device.rst b/ansible_collections/dellemc/powerflex/docs/modules/device.rst
index cbeb0f813..9eccc2627 100644
--- a/ansible_collections/dellemc/powerflex/docs/modules/device.rst
+++ b/ansible_collections/dellemc/powerflex/docs/modules/device.rst
@@ -21,9 +21,7 @@ Requirements
The below requirements are needed on the host that executes this module.
- A Dell PowerFlex storage system version 3.6 or later.
-- Ansible-core 2.14 or later.
-- PyPowerFlex 1.11.0.
-- Python 3.9, 3.10 or 3.11.
+- PyPowerFlex 1.12.0.
diff --git a/ansible_collections/dellemc/powerflex/docs/modules/fault_set.rst b/ansible_collections/dellemc/powerflex/docs/modules/fault_set.rst
index 191ab73ca..d1d5fb321 100644
--- a/ansible_collections/dellemc/powerflex/docs/modules/fault_set.rst
+++ b/ansible_collections/dellemc/powerflex/docs/modules/fault_set.rst
@@ -21,9 +21,7 @@ Requirements
The below requirements are needed on the host that executes this module.
- A Dell PowerFlex storage system version 3.6 or later.
-- Ansible-core 2.14 or later.
-- PyPowerFlex 1.11.0.
-- Python 3.9, 3.10 or 3.11.
+- PyPowerFlex 1.12.0.
diff --git a/ansible_collections/dellemc/powerflex/docs/modules/info.rst b/ansible_collections/dellemc/powerflex/docs/modules/info.rst
index fd674804f..49c1f0f77 100644
--- a/ansible_collections/dellemc/powerflex/docs/modules/info.rst
+++ b/ansible_collections/dellemc/powerflex/docs/modules/info.rst
@@ -23,9 +23,7 @@ Requirements
The below requirements are needed on the host that executes this module.
- A Dell PowerFlex storage system version 3.6 or later.
-- Ansible-core 2.14 or later.
-- PyPowerFlex 1.11.0.
-- Python 3.9, 3.10 or 3.11.
+- PyPowerFlex 1.12.0.
diff --git a/ansible_collections/dellemc/powerflex/docs/modules/mdm_cluster.rst b/ansible_collections/dellemc/powerflex/docs/modules/mdm_cluster.rst
index fa73ae5d6..4c64170f3 100644
--- a/ansible_collections/dellemc/powerflex/docs/modules/mdm_cluster.rst
+++ b/ansible_collections/dellemc/powerflex/docs/modules/mdm_cluster.rst
@@ -23,9 +23,7 @@ Requirements
The below requirements are needed on the host that executes this module.
- A Dell PowerFlex storage system version 3.6 or later.
-- Ansible-core 2.14 or later.
-- PyPowerFlex 1.11.0.
-- Python 3.9, 3.10 or 3.11.
+- PyPowerFlex 1.12.0.
diff --git a/ansible_collections/dellemc/powerflex/docs/modules/protection_domain.rst b/ansible_collections/dellemc/powerflex/docs/modules/protection_domain.rst
index 0bd532b9a..10acf4b23 100644
--- a/ansible_collections/dellemc/powerflex/docs/modules/protection_domain.rst
+++ b/ansible_collections/dellemc/powerflex/docs/modules/protection_domain.rst
@@ -21,9 +21,7 @@ Requirements
The below requirements are needed on the host that executes this module.
- A Dell PowerFlex storage system version 3.6 or later.
-- Ansible-core 2.14 or later.
-- PyPowerFlex 1.11.0.
-- Python 3.9, 3.10 or 3.11.
+- PyPowerFlex 1.12.0.
diff --git a/ansible_collections/dellemc/powerflex/docs/modules/replication_consistency_group.rst b/ansible_collections/dellemc/powerflex/docs/modules/replication_consistency_group.rst
index d8d144077..f60ce5bdb 100644
--- a/ansible_collections/dellemc/powerflex/docs/modules/replication_consistency_group.rst
+++ b/ansible_collections/dellemc/powerflex/docs/modules/replication_consistency_group.rst
@@ -21,9 +21,7 @@ Requirements
The below requirements are needed on the host that executes this module.
- A Dell PowerFlex storage system version 3.6 or later.
-- Ansible-core 2.14 or later.
-- PyPowerFlex 1.11.0.
-- Python 3.9, 3.10 or 3.11.
+- PyPowerFlex 1.12.0.
diff --git a/ansible_collections/dellemc/powerflex/docs/modules/replication_pair.rst b/ansible_collections/dellemc/powerflex/docs/modules/replication_pair.rst
index 7c883c6fd..9da3699fc 100644
--- a/ansible_collections/dellemc/powerflex/docs/modules/replication_pair.rst
+++ b/ansible_collections/dellemc/powerflex/docs/modules/replication_pair.rst
@@ -21,9 +21,7 @@ Requirements
The below requirements are needed on the host that executes this module.
- A Dell PowerFlex storage system version 3.6 or later.
-- Ansible-core 2.14 or later.
-- PyPowerFlex 1.11.0.
-- Python 3.9, 3.10 or 3.11.
+- PyPowerFlex 1.12.0.
diff --git a/ansible_collections/dellemc/powerflex/docs/modules/resource_group.rst b/ansible_collections/dellemc/powerflex/docs/modules/resource_group.rst
index a72918d83..3a0d9cd34 100644
--- a/ansible_collections/dellemc/powerflex/docs/modules/resource_group.rst
+++ b/ansible_collections/dellemc/powerflex/docs/modules/resource_group.rst
@@ -21,9 +21,7 @@ Requirements
The below requirements are needed on the host that executes this module.
- A Dell PowerFlex storage system version 3.6 or later.
-- Ansible-core 2.14 or later.
-- PyPowerFlex 1.11.0.
-- Python 3.9, 3.10 or 3.11.
+- PyPowerFlex 1.12.0.
diff --git a/ansible_collections/dellemc/powerflex/docs/modules/sdc.rst b/ansible_collections/dellemc/powerflex/docs/modules/sdc.rst
index 7b0871b30..6d4003f69 100644
--- a/ansible_collections/dellemc/powerflex/docs/modules/sdc.rst
+++ b/ansible_collections/dellemc/powerflex/docs/modules/sdc.rst
@@ -21,9 +21,7 @@ Requirements
The below requirements are needed on the host that executes this module.
- A Dell PowerFlex storage system version 3.6 or later.
-- Ansible-core 2.14 or later.
-- PyPowerFlex 1.11.0.
-- Python 3.9, 3.10 or 3.11.
+- PyPowerFlex 1.12.0.
diff --git a/ansible_collections/dellemc/powerflex/docs/modules/sds.rst b/ansible_collections/dellemc/powerflex/docs/modules/sds.rst
index 188fe9f9a..cd2730ceb 100644
--- a/ansible_collections/dellemc/powerflex/docs/modules/sds.rst
+++ b/ansible_collections/dellemc/powerflex/docs/modules/sds.rst
@@ -21,9 +21,7 @@ Requirements
The below requirements are needed on the host that executes this module.
- A Dell PowerFlex storage system version 3.6 or later.
-- Ansible-core 2.14 or later.
-- PyPowerFlex 1.11.0.
-- Python 3.9, 3.10 or 3.11.
+- PyPowerFlex 1.12.0.
diff --git a/ansible_collections/dellemc/powerflex/docs/modules/snapshot.rst b/ansible_collections/dellemc/powerflex/docs/modules/snapshot.rst
index e09e80069..f44b3272f 100644
--- a/ansible_collections/dellemc/powerflex/docs/modules/snapshot.rst
+++ b/ansible_collections/dellemc/powerflex/docs/modules/snapshot.rst
@@ -21,9 +21,7 @@ Requirements
The below requirements are needed on the host that executes this module.
- A Dell PowerFlex storage system version 3.6 or later.
-- Ansible-core 2.14 or later.
-- PyPowerFlex 1.11.0.
-- Python 3.9, 3.10 or 3.11.
+- PyPowerFlex 1.12.0.
diff --git a/ansible_collections/dellemc/powerflex/docs/modules/snapshot_policy.rst b/ansible_collections/dellemc/powerflex/docs/modules/snapshot_policy.rst
index dd683c921..21823e96d 100644
--- a/ansible_collections/dellemc/powerflex/docs/modules/snapshot_policy.rst
+++ b/ansible_collections/dellemc/powerflex/docs/modules/snapshot_policy.rst
@@ -21,9 +21,7 @@ Requirements
The below requirements are needed on the host that executes this module.
- A Dell PowerFlex storage system version 3.6 or later.
-- Ansible-core 2.14 or later.
-- PyPowerFlex 1.11.0.
-- Python 3.9, 3.10 or 3.11.
+- PyPowerFlex 1.12.0.
diff --git a/ansible_collections/dellemc/powerflex/docs/modules/storagepool.rst b/ansible_collections/dellemc/powerflex/docs/modules/storagepool.rst
index f9f3f271f..1cc9717bc 100644
--- a/ansible_collections/dellemc/powerflex/docs/modules/storagepool.rst
+++ b/ansible_collections/dellemc/powerflex/docs/modules/storagepool.rst
@@ -21,9 +21,7 @@ Requirements
The below requirements are needed on the host that executes this module.
- A Dell PowerFlex storage system version 3.6 or later.
-- Ansible-core 2.14 or later.
-- PyPowerFlex 1.11.0.
-- Python 3.9, 3.10 or 3.11.
+- PyPowerFlex 1.12.0.
@@ -80,6 +78,163 @@ Parameters
Enable/Disable RMcache on a specific storage pool.
+ enable_zero_padding (optional, bool, None)
+ Enable/Disable zero padding on a specific storage pool.
+
+
+ rep_cap_max_ratio (optional, int, None)
+ Set replication journal capacity of a storage pool.
+
+
+ enable_rebalance (optional, bool, None)
+ Enable/Disable rebalance on a specific storage pool.
+
+
+ spare_percentage (optional, int, None)
+ Set the spare percentage of a specific storage pool.
+
+
+ rmcache_write_handling_mode (optional, str, Cached)
+ Set RM cache write handling mode of a storage pool.
+
+ *Passthrough* Writes skip the cache and are stored in storage only.
+
+ *Cached* Writes are stored in both cache and storage (the default).
+
+ Caching is only performed for IOs whose size is a multiple of 4k bytes.
+
+
+ enable_rebuild (optional, bool, None)
+ Enable/Disable rebuild of a specific storage pool.
+
+
+ enable_fragmentation (optional, bool, None)
+ Enable/Disable fragmentation of a specific storage pool.
+
+
+ parallel_rebuild_rebalance_limit (optional, int, None)
+ Set rebuild/rebalance parallelism limit of a storage pool.
+
+
+ persistent_checksum (optional, dict, None)
+ Enable/Disable persistent checksum of a specific storage pool.
+
+
+ enable (optional, bool, None)
+ Enable / disable persistent checksum.
+
+
+ validate_on_read (optional, bool, None)
+ Validate checksum upon reading data.
+
+
+ builder_limit (optional, int, 3072)
+ Bandwidth limit in KB/s for the checksum building process.
+
+ Valid range is 1024 to 10240.
+
+
+
+ protected_maintenance_mode_io_priority_policy (optional, dict, None)
+ Set protected maintenance mode I/O priority policy of a storage pool.
+
+
+ policy (optional, str, limitNumOfConcurrentIos)
+ The I/O priority policy for protected maintenance mode.
+
+ ``unlimited`` Protected maintenance mode IOPS are not limited
+
+ ``limitNumOfConcurrentIos``Limit the number of allowed concurrent protected maintenance mode migration I/Os to the value defined for *concurrent_ios_per_device*.
+
+ ``favorAppIos`` Always limit the number of allowed concurrent protected maintenance mode migration I/Os to value defined for *concurrent_ios_per_device*.
+
+ If application I/Os are in progress, should also limit the bandwidth of protected maintenance mode migration I/Os to the limit defined for the *bw_limit_per_device*.
+
+
+ concurrent_ios_per_device (optional, int, None)
+ The maximum number of concurrent protected maintenance mode migration I/Os per device.
+
+ Valid range is 1 to 20.
+
+
+ bw_limit_per_device (optional, int, None)
+ The maximum bandwidth of protected maintenance mode migration I/Os, in KB per second, per device.
+
+ Valid range is 1024 to 1048576.
+
+
+
+ vtree_migration_io_priority_policy (optional, dict, None)
+ Set the I/O priority policy for V-Tree migration for a specific Storage Pool.
+
+
+ policy (optional, str, None)
+ The I/O priority policy for protected maintenance mode.
+
+ ``limitNumOfConcurrentIos`` Limit the number of allowed concurrent V-Tree migration I/Os (default) to the *concurrent_ios_per_device*.
+
+ ``favorAppIos`` Always limit the number of allowed concurrent V-Tree migration I/Os to defined for *concurrent_ios_per_device*.
+
+ If application I/Os are in progress, should also limit the bandwidth of V-Tree migration I/Os to the limit defined for the *bw_limit_per_device*.
+
+
+ concurrent_ios_per_device (optional, int, None)
+ The maximum number of concurrent V-Tree migration I/Os per device.
+
+ Valid range is 1 to 20
+
+
+ bw_limit_per_device (optional, int, None)
+ The maximum bandwidth of V-Tree migration I/Os, in KB per second, per device.
+
+ Valid range is 1024 to 25600.
+
+
+
+ rebalance_io_priority_policy (optional, dict, None)
+ Set the rebalance I/O priority policy for a Storage Pool.
+
+
+ policy (optional, str, favorAppIos)
+ Policy to use for rebalance I/O priority.
+
+ ``unlimited`` Rebalance I/Os are not limited.
+
+ ``limitNumOfConcurrentIos`` Limit the number of allowed concurrent rebalance I/Os.
+
+ ``favorAppIos`` Limit the number and bandwidth of rebalance I/Os when application I/Os are in progress.
+
+
+ concurrent_ios_per_device (optional, int, None)
+ The maximum number of concurrent rebalance I/Os per device.
+
+ Valid range is 1 to 20.
+
+
+ bw_limit_per_device (optional, int, None)
+ The maximum bandwidth of rebalance I/Os, in KB/s, per device.
+
+ Valid range is 1024 to 1048576.
+
+
+
+ cap_alert_thresholds (optional, dict, None)
+ Set the threshold for triggering capacity usage alerts.
+
+ Alerts thresholds are calculated from each Storage Pool capacity after deducting the defined amount of spare capacity.
+
+
+ high_threshold (optional, int, None)
+ Threshold of the non-spare capacity of the Storage Pool that will trigger a high-priority alert, expressed as a percentage.
+
+ This value must be lower than the *critical_threshold*.
+
+
+ critical_threshold (optional, int, None)
+ Threshold of the non-spare capacity of the Storage Pool that will trigger a critical-priority alert, expressed as a percentage.
+
+
+
state (True, str, None)
State of the storage pool.
@@ -122,7 +277,7 @@ Notes
.. note::
- TRANSITIONAL media type is supported only during modification.
- - The *check_mode* is not supported.
+ - The *check_mode* is supported.
- The modules present in the collection named as 'dellemc.powerflex' are built to support the Dell PowerFlex storage platform.
@@ -153,37 +308,75 @@ Examples
storage_pool_id: "abcd1234ab12r"
state: "present"
- - name: Create a new storage pool by name
- dellemc.powerflex.storagepool:
- hostname: "{{hostname}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- storage_pool_name: "ansible_test_pool"
- protection_domain_id: "1c957da800000000"
- media_type: "HDD"
- state: "present"
-
- - name: Modify a storage pool by name
+ - name: Create a new Storage pool
dellemc.powerflex.storagepool:
- hostname: "{{hostname}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- storage_pool_name: "ansible_test_pool"
- protection_domain_id: "1c957da800000000"
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: "{{ validate_certs }}"
+ storage_pool_name: "{{ pool_name }}"
+ protection_domain_name: "{{ protection_domain_name }}"
+ cap_alert_thresholds:
+ high_threshold: 30
+ critical_threshold: 50
+ media_type: "TRANSITIONAL"
+ enable_zero_padding: true
+ rep_cap_max_ratio: 40
+ rmcache_write_handling_mode: "Passthrough"
+ spare_percentage: 80
+ enable_rebalance: false
+ enable_fragmentation: false
+ enable_rebuild: false
use_rmcache: true
use_rfcache: true
+ parallel_rebuild_rebalance_limit: 3
+ protected_maintenance_mode_io_priority_policy:
+ policy: "unlimited"
+ rebalance_io_priority_policy:
+ policy: "unlimited"
+ vtree_migration_io_priority_policy:
+ policy: "limitNumOfConcurrentIos"
+ concurrent_ios_per_device: 10
+ persistent_checksum:
+ enable: false
state: "present"
- - name: Rename storage pool by id
+ - name: Modify a Storage pool by name
dellemc.powerflex.storagepool:
- hostname: "{{hostname}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- storage_pool_id: "abcd1234ab12r"
- storage_pool_new_name: "new_ansible_pool"
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: "{{ validate_certs }}"
+ storage_pool_name: "{{ pool_name }}"
+ protection_domain_name: "{{ protection_domain_name }}"
+ storage_pool_new_name: "pool_name_new"
+ cap_alert_thresholds:
+ high_threshold: 50
+ critical_threshold: 70
+ enable_zero_padding: false
+ rep_cap_max_ratio: 60
+ rmcache_write_handling_mode: "Passthrough"
+ spare_percentage: 90
+ enable_rebalance: true
+ enable_fragmentation: true
+ enable_rebuild: true
+ use_rmcache: true
+ use_rfcache: true
+ parallel_rebuild_rebalance_limit: 6
+ protected_maintenance_mode_io_priority_policy:
+ policy: "limitNumOfConcurrentIos"
+ concurrent_ios_per_device: 4
+ rebalance_io_priority_policy:
+ policy: "favorAppIos"
+ concurrent_ios_per_device: 10
+ bw_limit_per_device: 4096
+ vtree_migration_io_priority_policy:
+ policy: "limitNumOfConcurrentIos"
+ concurrent_ios_per_device: 10
+ persistent_checksum:
+ enable: true
+ validate_on_read: true
+ builder_limit: 1024
state: "present"
@@ -260,4 +453,5 @@ Authors
- Arindam Datta (@dattaarindam) <ansible.team@dell.com>
- P Srinivas Rao (@srinivas-rao5) <ansible.team@dell.com>
+- Trisha Datta (@trisha-dell) <ansible.team@dell.com>
diff --git a/ansible_collections/dellemc/powerflex/docs/modules/volume.rst b/ansible_collections/dellemc/powerflex/docs/modules/volume.rst
index 16dbf2b25..1132bec40 100644
--- a/ansible_collections/dellemc/powerflex/docs/modules/volume.rst
+++ b/ansible_collections/dellemc/powerflex/docs/modules/volume.rst
@@ -23,9 +23,7 @@ Requirements
The below requirements are needed on the host that executes this module.
- A Dell PowerFlex storage system version 3.6 or later.
-- Ansible-core 2.14 or later.
-- PyPowerFlex 1.11.0.
-- Python 3.9, 3.10 or 3.11.
+- PyPowerFlex 1.12.0.
diff --git a/ansible_collections/dellemc/powerflex/meta/execution-environment.yml b/ansible_collections/dellemc/powerflex/meta/execution-environment.yml
index 54e87f9c9..2b747b779 100644
--- a/ansible_collections/dellemc/powerflex/meta/execution-environment.yml
+++ b/ansible_collections/dellemc/powerflex/meta/execution-environment.yml
@@ -1,5 +1,5 @@
---
-version: 1
+version: 3
dependencies:
# Absolute/relative path of requirements.yml
galaxy: requirements.yml
diff --git a/ansible_collections/dellemc/powerflex/meta/runtime.yml b/ansible_collections/dellemc/powerflex/meta/runtime.yml
index dd41c3e91..6711a25cc 100644
--- a/ansible_collections/dellemc/powerflex/meta/runtime.yml
+++ b/ansible_collections/dellemc/powerflex/meta/runtime.yml
@@ -1,5 +1,5 @@
---
-requires_ansible: ">=2.14.0"
+requires_ansible: ">=2.15.0"
plugin_routing:
modules:
dellemc_powerflex_gatherfacts:
diff --git a/ansible_collections/dellemc/powerflex/playbooks/modules/storagepool.yml b/ansible_collections/dellemc/powerflex/playbooks/modules/storagepool.yml
index 7e7860a0a..7d407b610 100644
--- a/ansible_collections/dellemc/powerflex/playbooks/modules/storagepool.yml
+++ b/ansible_collections/dellemc/powerflex/playbooks/modules/storagepool.yml
@@ -20,7 +20,29 @@
validate_certs: "{{ validate_certs }}"
storage_pool_name: "{{ pool_name }}"
protection_domain_name: "{{ protection_domain_name }}"
- media_type: "HDD"
+ cap_alert_thresholds:
+ high_threshold: 30
+ critical_threshold: 50
+ media_type: "TRANSITIONAL"
+ enable_zero_padding: true
+ rep_cap_max_ratio: 40
+ rmcache_write_handling_mode: "Passthrough"
+ spare_percentage: 80
+ enable_rebalance: false
+ enable_fragmentation: false
+ enable_rebuild: false
+ use_rmcache: true
+ use_rfcache: true
+ parallel_rebuild_rebalance_limit: 3
+ protected_maintenance_mode_io_priority_policy:
+ policy: "unlimited"
+ rebalance_io_priority_policy:
+ policy: "unlimited"
+ vtree_migration_io_priority_policy:
+ policy: "limitNumOfConcurrentIos"
+ concurrent_ios_per_device: 10
+ persistent_checksum:
+ enable: false
state: "present"
- name: Set pool id
@@ -47,15 +69,40 @@
storage_pool_new_name: "{{ pool_name }}"
state: "present"
-
- name: Modify a Storage pool by name
dellemc.powerflex.storagepool:
hostname: "{{ hostname }}"
username: "{{ username }}"
password: "{{ password }}"
validate_certs: "{{ validate_certs }}"
- storage_pool_name: "ansible_test_pool"
+ storage_pool_name: "{{ pool_name }}"
protection_domain_name: "{{ protection_domain_name }}"
+ storage_pool_new_name: "pool_name_new"
+ cap_alert_thresholds:
+ high_threshold: 50
+ critical_threshold: 70
+ enable_zero_padding: false
+ rep_cap_max_ratio: 60
+ rmcache_write_handling_mode: "Passthrough"
+ spare_percentage: 90
+ enable_rebalance: true
+ enable_fragmentation: true
+ enable_rebuild: true
use_rmcache: true
use_rfcache: true
+ parallel_rebuild_rebalance_limit: 6
+ protected_maintenance_mode_io_priority_policy:
+ policy: "limitNumOfConcurrentIos"
+ concurrent_ios_per_device: 4
+ rebalance_io_priority_policy:
+ policy: "favorAppIos"
+ concurrent_ios_per_device: 10
+ bw_limit_per_device: 4096
+ vtree_migration_io_priority_policy:
+ policy: "limitNumOfConcurrentIos"
+ concurrent_ios_per_device: 10
+ persistent_checksum:
+ enable: true
+ validate_on_read: true
+ builder_limit: 1024
state: "present"
diff --git a/ansible_collections/dellemc/powerflex/plugins/doc_fragments/powerflex.py b/ansible_collections/dellemc/powerflex/plugins/doc_fragments/powerflex.py
index 32b17a4d6..b6d8626ae 100644
--- a/ansible_collections/dellemc/powerflex/plugins/doc_fragments/powerflex.py
+++ b/ansible_collections/dellemc/powerflex/plugins/doc_fragments/powerflex.py
@@ -52,9 +52,7 @@ class ModuleDocFragment(object):
default: 120
requirements:
- A Dell PowerFlex storage system version 3.6 or later.
- - Ansible-core 2.14 or later.
- - PyPowerFlex 1.10.0.
- - Python 3.9, 3.10 or 3.11.
+ - PyPowerFlex 1.12.0.
notes:
- The modules present in the collection named as 'dellemc.powerflex'
are built to support the Dell PowerFlex storage platform.
diff --git a/ansible_collections/dellemc/powerflex/plugins/module_utils/storage/dell/utils.py b/ansible_collections/dellemc/powerflex/plugins/module_utils/storage/dell/utils.py
index 50f41666c..da603e648 100644
--- a/ansible_collections/dellemc/powerflex/plugins/module_utils/storage/dell/utils.py
+++ b/ansible_collections/dellemc/powerflex/plugins/module_utils/storage/dell/utils.py
@@ -83,10 +83,10 @@ def ensure_required_libs(module):
exception=PKG_RSRC_IMP_ERR)
if not HAS_POWERFLEX_SDK:
- module.fail_json(msg=missing_required_lib("PyPowerFlex V 1.10.0 or above"),
+ module.fail_json(msg=missing_required_lib("PyPowerFlex V 1.12.0 or above"),
exception=POWERFLEX_SDK_IMP_ERR)
- min_ver = '1.11.0'
+ min_ver = '1.12.0'
try:
curr_version = pkg_resources.require("PyPowerFlex")[0].version
supported_version = (parse_version(curr_version) >= parse_version(min_ver))
diff --git a/ansible_collections/dellemc/powerflex/plugins/modules/storagepool.py b/ansible_collections/dellemc/powerflex/plugins/modules/storagepool.py
index 9c8bb1d4a..5aca87b4e 100644
--- a/ansible_collections/dellemc/powerflex/plugins/modules/storagepool.py
+++ b/ansible_collections/dellemc/powerflex/plugins/modules/storagepool.py
@@ -1,6 +1,6 @@
#!/usr/bin/python
-# Copyright: (c) 2021, Dell Technologies
+# Copyright: (c) 2021-24, Dell Technologies
# Apache License version 2.0 (see MODULE-LICENSE or http://www.apache.org/licenses/LICENSE-2.0.txt)
"""Ansible module for managing Dell Technologies (Dell) PowerFlex storage pool"""
@@ -28,6 +28,7 @@ extends_documentation_fragment:
author:
- Arindam Datta (@dattaarindam) <ansible.team@dell.com>
- P Srinivas Rao (@srinivas-rao5) <ansible.team@dell.com>
+- Trisha Datta (@trisha-dell) <ansible.team@dell.com>
options:
storage_pool_name:
@@ -76,6 +77,160 @@ options:
description:
- Enable/Disable RMcache on a specific storage pool.
type: bool
+ enable_zero_padding:
+ description:
+ - Enable/Disable zero padding on a specific storage pool.
+ type: bool
+ rep_cap_max_ratio:
+ description:
+ - Set replication journal capacity of a storage pool.
+ type: int
+ enable_rebalance:
+ description:
+ - Enable/Disable rebalance on a specific storage pool.
+ type: bool
+ spare_percentage:
+ description:
+ - Set the spare percentage of a specific storage pool.
+ type: int
+ rmcache_write_handling_mode :
+ description:
+ - Set RM cache write handling mode of a storage pool.
+ - I(Passthrough) Writes skip the cache and are stored in storage only.
+ - I(Cached) Writes are stored in both cache and storage (the default).
+ - Caching is only performed for IOs whose size is a multiple of 4k bytes.
+ type: str
+ choices: ['Cached', 'Passthrough']
+ default: 'Cached'
+ enable_rebuild:
+ description:
+ - Enable/Disable rebuild of a specific storage pool.
+ type: bool
+ enable_fragmentation:
+ description:
+ - Enable/Disable fragmentation of a specific storage pool.
+ type: bool
+ parallel_rebuild_rebalance_limit:
+ description:
+ - Set rebuild/rebalance parallelism limit of a storage pool.
+ type: int
+ persistent_checksum:
+ description:
+ - Enable/Disable persistent checksum of a specific storage pool.
+ type: dict
+ suboptions:
+ enable:
+ description:
+ - Enable / disable persistent checksum.
+ type: bool
+ validate_on_read:
+ description:
+ - Validate checksum upon reading data.
+ type: bool
+ builder_limit:
+ description:
+ - Bandwidth limit in KB/s for the checksum building process.
+ - Valid range is 1024 to 10240.
+ default: 3072
+ type: int
+ protected_maintenance_mode_io_priority_policy:
+ description:
+ - Set protected maintenance mode I/O priority policy of a storage pool.
+ type: dict
+ suboptions:
+ policy:
+ description:
+ - The I/O priority policy for protected maintenance mode.
+ - C(unlimited) Protected maintenance mode IOPS are not limited
+ - C(limitNumOfConcurrentIos)Limit the number of allowed concurrent protected maintenance mode
+ migration I/Os to the value defined for I(concurrent_ios_per_device).
+ - C(favorAppIos) Always limit the number of allowed concurrent protected maintenance mode
+ migration I/Os to value defined for I(concurrent_ios_per_device).
+ - If application I/Os are in progress, should also limit the bandwidth of
+ protected maintenance mode migration I/Os to the limit defined for the I(bw_limit_per_device).
+ type: str
+ choices: ['unlimited', 'limitNumOfConcurrentIos', 'favorAppIos']
+ default: 'limitNumOfConcurrentIos'
+ concurrent_ios_per_device:
+ description:
+ - The maximum number of concurrent protected maintenance mode migration I/Os per device.
+ - Valid range is 1 to 20.
+ type: int
+ bw_limit_per_device:
+ description:
+ - The maximum bandwidth of protected maintenance mode migration I/Os,
+ in KB per second, per device.
+ - Valid range is 1024 to 1048576.
+ type: int
+ vtree_migration_io_priority_policy:
+ description:
+ - Set the I/O priority policy for V-Tree migration for a specific Storage Pool.
+ type: dict
+ suboptions:
+ policy:
+ description:
+ - The I/O priority policy for protected maintenance mode.
+ - C(limitNumOfConcurrentIos) Limit the number of allowed concurrent V-Tree
+ migration I/Os (default) to the I(concurrent_ios_per_device).
+ - C(favorAppIos) Always limit the number of allowed concurrent
+ V-Tree migration I/Os to defined for I(concurrent_ios_per_device).
+ - If application I/Os are in progress, should also limit the bandwidth of
+ V-Tree migration I/Os to the limit defined for the I(bw_limit_per_device).
+ type: str
+ choices: ['limitNumOfConcurrentIos', 'favorAppIos']
+ concurrent_ios_per_device:
+ description:
+ - The maximum number of concurrent V-Tree migration I/Os per device.
+ - Valid range is 1 to 20
+ type: int
+ bw_limit_per_device:
+ description:
+ - The maximum bandwidth of V-Tree migration I/Os,
+ in KB per second, per device.
+ - Valid range is 1024 to 25600.
+ type: int
+ rebalance_io_priority_policy:
+ description:
+ - Set the rebalance I/O priority policy for a Storage Pool.
+ type: dict
+ suboptions:
+ policy:
+ description:
+ - Policy to use for rebalance I/O priority.
+ - C(unlimited) Rebalance I/Os are not limited.
+ - C(limitNumOfConcurrentIos) Limit the number of allowed concurrent rebalance I/Os.
+ - C(favorAppIos) Limit the number and bandwidth of rebalance I/Os when application I/Os are in progress.
+ type: str
+ choices: ['unlimited', 'limitNumOfConcurrentIos', 'favorAppIos']
+ default: 'favorAppIos'
+ concurrent_ios_per_device:
+ description:
+ - The maximum number of concurrent rebalance I/Os per device.
+ - Valid range is 1 to 20.
+ type: int
+ bw_limit_per_device:
+ description:
+ - The maximum bandwidth of rebalance I/Os, in KB/s, per device.
+ - Valid range is 1024 to 1048576.
+ type: int
+ cap_alert_thresholds:
+ description:
+ - Set the threshold for triggering capacity usage alerts.
+ - Alerts thresholds are calculated from each Storage Pool
+ capacity after deducting the defined amount of spare capacity.
+ type: dict
+ suboptions:
+ high_threshold:
+ description:
+ - Threshold of the non-spare capacity of the Storage Pool that will trigger a
+ high-priority alert, expressed as a percentage.
+ - This value must be lower than the I(critical_threshold).
+ type: int
+ critical_threshold:
+ description:
+ - Threshold of the non-spare capacity of the Storage Pool that will trigger a
+ critical-priority alert, expressed as a percentage.
+ type: int
state:
description:
- State of the storage pool.
@@ -84,7 +239,7 @@ options:
required: true
notes:
- TRANSITIONAL media type is supported only during modification.
- - The I(check_mode) is not supported.
+ - The I(check_mode) is supported.
'''
EXAMPLES = r'''
@@ -107,37 +262,75 @@ EXAMPLES = r'''
storage_pool_id: "abcd1234ab12r"
state: "present"
-- name: Create a new storage pool by name
- dellemc.powerflex.storagepool:
- hostname: "{{hostname}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- storage_pool_name: "ansible_test_pool"
- protection_domain_id: "1c957da800000000"
- media_type: "HDD"
- state: "present"
-
-- name: Modify a storage pool by name
+- name: Create a new Storage pool
dellemc.powerflex.storagepool:
- hostname: "{{hostname}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- storage_pool_name: "ansible_test_pool"
- protection_domain_id: "1c957da800000000"
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: "{{ validate_certs }}"
+ storage_pool_name: "{{ pool_name }}"
+ protection_domain_name: "{{ protection_domain_name }}"
+ cap_alert_thresholds:
+ high_threshold: 30
+ critical_threshold: 50
+ media_type: "TRANSITIONAL"
+ enable_zero_padding: true
+ rep_cap_max_ratio: 40
+ rmcache_write_handling_mode: "Passthrough"
+ spare_percentage: 80
+ enable_rebalance: false
+ enable_fragmentation: false
+ enable_rebuild: false
use_rmcache: true
use_rfcache: true
+ parallel_rebuild_rebalance_limit: 3
+ protected_maintenance_mode_io_priority_policy:
+ policy: "unlimited"
+ rebalance_io_priority_policy:
+ policy: "unlimited"
+ vtree_migration_io_priority_policy:
+ policy: "limitNumOfConcurrentIos"
+ concurrent_ios_per_device: 10
+ persistent_checksum:
+ enable: false
state: "present"
-- name: Rename storage pool by id
+- name: Modify a Storage pool by name
dellemc.powerflex.storagepool:
- hostname: "{{hostname}}"
- username: "{{username}}"
- password: "{{password}}"
- validate_certs: "{{validate_certs}}"
- storage_pool_id: "abcd1234ab12r"
- storage_pool_new_name: "new_ansible_pool"
+ hostname: "{{ hostname }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: "{{ validate_certs }}"
+ storage_pool_name: "{{ pool_name }}"
+ protection_domain_name: "{{ protection_domain_name }}"
+ storage_pool_new_name: "pool_name_new"
+ cap_alert_thresholds:
+ high_threshold: 50
+ critical_threshold: 70
+ enable_zero_padding: false
+ rep_cap_max_ratio: 60
+ rmcache_write_handling_mode: "Passthrough"
+ spare_percentage: 90
+ enable_rebalance: true
+ enable_fragmentation: true
+ enable_rebuild: true
+ use_rmcache: true
+ use_rfcache: true
+ parallel_rebuild_rebalance_limit: 6
+ protected_maintenance_mode_io_priority_policy:
+ policy: "limitNumOfConcurrentIos"
+ concurrent_ios_per_device: 4
+ rebalance_io_priority_policy:
+ policy: "favorAppIos"
+ concurrent_ios_per_device: 10
+ bw_limit_per_device: 4096
+ vtree_migration_io_priority_policy:
+ policy: "limitNumOfConcurrentIos"
+ concurrent_ios_per_device: 10
+ persistent_checksum:
+ enable: true
+ validate_on_read: true
+ builder_limit: 1024
state: "present"
'''
@@ -558,75 +751,50 @@ storage_pool_details:
'''
from ansible.module_utils.basic import AnsibleModule
-from ansible_collections.dellemc.powerflex.plugins.module_utils.storage.dell\
+from ansible_collections.dellemc.powerflex.plugins.module_utils.storage.dell.libraries.powerflex_base \
+ import PowerFlexBase
+from ansible_collections.dellemc.powerflex.plugins.module_utils.storage.dell.libraries.configuration \
+ import Configuration
+from ansible_collections.dellemc.powerflex.plugins.module_utils.storage.dell \
import utils
LOG = utils.get_logger('storagepool')
-class PowerFlexStoragePool(object):
+class PowerFlexStoragePool(PowerFlexBase):
"""Class with StoragePool operations"""
def __init__(self):
""" Define all parameters required by this module"""
-
- self.module_params = utils.get_powerflex_gateway_host_parameters()
- self.module_params.update(get_powerflex_storagepool_parameters())
-
""" initialize the ansible module """
- mut_ex_args = [['storage_pool_name', 'storage_pool_id'],
- ['protection_domain_name', 'protection_domain_id'],
- ['storage_pool_id', 'protection_domain_name'],
- ['storage_pool_id', 'protection_domain_id']]
-
- required_one_of_args = [['storage_pool_name', 'storage_pool_id']]
- self.module = AnsibleModule(argument_spec=self.module_params,
- supports_check_mode=False,
- mutually_exclusive=mut_ex_args,
- required_one_of=required_one_of_args)
+ mutually_exclusive = [['storage_pool_name', 'storage_pool_id'],
+ ['protection_domain_name', 'protection_domain_id'],
+ ['storage_pool_id', 'protection_domain_name'],
+ ['storage_pool_id', 'protection_domain_id']]
- utils.ensure_required_libs(self.module)
-
- try:
- self.powerflex_conn = utils.get_powerflex_gateway_host_connection(
- self.module.params)
- LOG.info('Got the PowerFlex system connection object instance')
- except Exception as e:
- LOG.error(str(e))
- self.module.fail_json(msg=str(e))
-
- def get_protection_domain(self, protection_domain_name=None,
- protection_domain_id=None):
- """Get protection domain details
- :param protection_domain_name: Name of the protection domain
- :param protection_domain_id: ID of the protection domain
- :return: Protection domain details
- """
- name_or_id = protection_domain_id if protection_domain_id \
- else protection_domain_name
- try:
- filter_fields = {}
- if protection_domain_id:
- filter_fields = {'id': protection_domain_id}
- if protection_domain_name:
- filter_fields = {'name': protection_domain_name}
+ required_one_of = [['storage_pool_name', 'storage_pool_id']]
- pd_details = self.powerflex_conn.protection_domain.get(
- filter_fields=filter_fields)
- if pd_details:
- return pd_details[0]
+ ansible_module_params = {
+ 'argument_spec': get_powerflex_storagepool_parameters(),
+ 'supports_check_mode': True,
+ 'mutually_exclusive': mutually_exclusive,
+ 'required_one_of': required_one_of
+ }
+ super().__init__(AnsibleModule, ansible_module_params)
- if not pd_details:
- err_msg = "Unable to find the protection domain with {0}. " \
- "Please enter a valid protection domain" \
- " name/id.".format(name_or_id)
- self.module.fail_json(msg=err_msg)
+ utils.ensure_required_libs(self.module)
+ self.result = dict(
+ changed=False,
+ storage_pool_details={}
+ )
- except Exception as e:
- errormsg = "Failed to get the protection domain {0} with" \
- " error {1}".format(name_or_id, str(e))
- LOG.error(errormsg)
- self.module.fail_json(msg=errormsg)
+ def get_protection_domain(
+ self, protection_domain_name=None, protection_domain_id=None
+ ):
+ """Get the details of a protection domain in a given PowerFlex storage
+ system"""
+ return Configuration(self.powerflex_conn, self.module).get_protection_domain(
+ protection_domain_name=protection_domain_name, protection_domain_id=protection_domain_id)
def get_storage_pool(self, storage_pool_id=None, storage_pool_name=None,
pd_id=None):
@@ -648,7 +816,7 @@ class PowerFlexStoragePool(object):
filter_fields.update({'protectionDomainId': pd_id})
pool_details = self.powerflex_conn.storage_pool.get(
filter_fields=filter_fields)
- if pool_details:
+ if pool_details != []:
if len(pool_details) > 1:
err_msg = "More than one storage pool found with {0}," \
@@ -666,10 +834,9 @@ class PowerFlexStoragePool(object):
protection_domain_id=pd_id)['name']
# adding protection domain name in the pool details
pool_details['protectionDomainName'] = pd_name
- else:
- pool_details = None
+ return pool_details
- return pool_details
+ return None
except Exception as e:
errormsg = "Failed to get the storage pool {0} with error " \
@@ -698,192 +865,206 @@ class PowerFlexStoragePool(object):
self.module.fail_json(
msg="Please provide protection domain details for "
"creation of a storage pool")
- self.powerflex_conn.storage_pool.create(
- media_type=media_type,
- protection_domain_id=pd_id, name=pool_name,
- use_rfcache=use_rfcache, use_rmcache=use_rmcache)
+ if not self.module.check_mode:
+ pool_id = self.powerflex_conn.storage_pool.create(
+ media_type=media_type,
+ protection_domain_id=pd_id, name=pool_name,
+ use_rfcache=use_rfcache, use_rmcache=use_rmcache)['id']
+
+ return self.get_storage_pool(storage_pool_id=pool_id,
+ pd_id=pd_id)
- return True
except Exception as e:
errormsg = "Failed to create the storage pool {0} with error " \
"{1}".format(pool_name, str(e))
LOG.error(errormsg)
self.module.fail_json(msg=errormsg)
- def modify_storage_pool(self, pool_id, modify_dict):
- """
- Modify the parameters of the storage pool.
- :param modify_dict: Dict containing parameters which are to be
- modified
- :param pool_id: Id of the pool.
- :return: True, if the operation is successful.
- """
-
- try:
-
- if 'new_name' in modify_dict:
- self.powerflex_conn.storage_pool.rename(
- pool_id, modify_dict['new_name'])
- if 'use_rmcache' in modify_dict:
- self.powerflex_conn.storage_pool.set_use_rmcache(
- pool_id, modify_dict['use_rmcache'])
- if 'use_rfcache' in modify_dict:
- self.powerflex_conn.storage_pool.set_use_rfcache(
- pool_id, modify_dict['use_rfcache'])
- if 'media_type' in modify_dict:
- self.powerflex_conn.storage_pool.set_media_type(
- pool_id, modify_dict['media_type'])
- return True
-
- except Exception as e:
- err_msg = "Failed to update the storage pool {0} with error " \
- "{1}".format(pool_id, str(e))
- LOG.error(err_msg)
- self.module.fail_json(msg=err_msg)
-
- def verify_params(self, pool_details, pd_name, pd_id):
+ def verify_protection_domain(self, pool_details):
"""
:param pool_details: Details of the storage pool
:param pd_name: Name of the protection domain
:param pd_id: Id of the protection domain
"""
- if pd_id and pd_id != pool_details['protectionDomainId']:
- self.module.fail_json(msg="Entered protection domain id does not"
- " match with the storage pool's "
- "protection domain id. Please enter "
- "a correct protection domain id.")
-
- if pd_name and pd_name != pool_details['protectionDomainName']:
- self.module.fail_json(msg="Entered protection domain name does"
- " not match with the storage pool's "
- "protection domain name. Please enter"
- " a correct protection domain name.")
-
- def perform_module_operation(self):
- """ Perform different actions on Storage Pool based on user input
- in the playbook """
-
- pool_name = self.module.params['storage_pool_name']
- pool_id = self.module.params['storage_pool_id']
- pool_new_name = self.module.params['storage_pool_new_name']
- state = self.module.params['state']
pd_name = self.module.params['protection_domain_name']
pd_id = self.module.params['protection_domain_id']
- use_rmcache = self.module.params['use_rmcache']
- use_rfcache = self.module.params['use_rfcache']
- media_type = self.module.params['media_type']
- if media_type == "TRANSITIONAL":
- media_type = 'Transitional'
-
- result = dict(
- storage_pool_details={}
- )
- changed = False
- pd_details = None
- if pd_name or pd_id:
- pd_details = self.get_protection_domain(
- protection_domain_id=pd_id,
- protection_domain_name=pd_name)
- if pd_details:
- pd_id = pd_details['id']
-
- if pool_name is not None and (len(pool_name.strip()) == 0):
+ if pool_details is not None:
+ if pd_id and pd_id != pool_details['protectionDomainId']:
+ self.module.fail_json(msg="Entered protection domain id does not"
+ " match with the storage pool's "
+ "protection domain id. Please enter "
+ "a correct protection domain id.")
+
+ if pd_name and pd_name != pool_details['protectionDomainName']:
+ self.module.fail_json(msg="Entered protection domain name does"
+ " not match with the storage pool's "
+ "protection domain name. Please enter"
+ " a correct protection domain name.")
+
+ def verify_storage_pool_name(self):
+ if (self.module.params['storage_pool_name'] is not None and
+ (len(self.module.params['storage_pool_name'].strip()) == 0)) or \
+ (self.module.params['storage_pool_new_name'] is not None and
+ (len(self.module.params['storage_pool_new_name'].strip()) == 0)):
self.module.fail_json(
- msg="Empty or white spaced string provided in "
- "storage_pool_name. Please provide valid storage"
+ msg="Empty or white spaced string provided for "
+ "storage pool name. Provide valid storage"
" pool name.")
- # Get the details of the storage pool.
- pool_details = self.get_storage_pool(storage_pool_id=pool_id,
- storage_pool_name=pool_name,
- pd_id=pd_id)
- if pool_name and pool_details:
- pool_id = pool_details['id']
- self.verify_params(pool_details, pd_name, pd_id)
-
- # create a storage pool
- if state == 'present' and not pool_details:
- LOG.info("Creating new storage pool")
- if pool_id:
- self.module.fail_json(
- msg="storage_pool_name is missing & name required to "
- "create a storage pool. Please enter a valid "
- "storage_pool_name.")
- if pool_new_name is not None:
- self.module.fail_json(
- msg="storage_pool_new_name is passed during creation. "
- "storage_pool_new_name is not allowed during "
- "creation of a storage pool.")
- changed = self.create_storage_pool(
- pool_name, pd_id, media_type, use_rfcache, use_rmcache)
- if changed:
- pool_id = self.get_storage_pool(storage_pool_id=pool_id,
- storage_pool_name=pool_name,
- pd_id=pd_id)['id']
-
- # modify the storage pool parameters
- if state == 'present' and pool_details:
- # check if the parameters are to be updated or not
- if pool_new_name is not None and len(pool_new_name.strip()) == 0:
- self.module.fail_json(
- msg="Empty/White spaced name is not allowed during "
- "renaming of a storage pool. Please enter a valid "
- "storage pool new name.")
- modify_dict = to_modify(pool_details, use_rmcache, use_rfcache,
- pool_new_name, media_type)
- if bool(modify_dict):
- LOG.info("Modify attributes of storage pool")
- changed = self.modify_storage_pool(pool_id, modify_dict)
-
- # Delete a storage pool
- if state == 'absent' and pool_details:
- msg = "Deleting storage pool is not supported through" \
- " ansible module."
- LOG.error(msg)
- self.module.fail_json(msg=msg)
-
- # Show the updated storage pool details
- if state == 'present':
- pool_details = self.get_storage_pool(storage_pool_id=pool_id)
- # fetching Id from pool details to address a case where
- # protection domain is not passed
- pd_id = pool_details['protectionDomainId']
- pd_name = self.get_protection_domain(
- protection_domain_id=pd_id)['name']
- # adding protection domain name in the pool details
- pool_details['protectionDomainName'] = pd_name
- result['storage_pool_details'] = pool_details
- result['changed'] = changed
-
- self.module.exit_json(**result)
-
-
-def to_modify(pool_details, use_rmcache, use_rfcache, new_name, media_type):
- """
- Check whether a parameter is required to be updated.
-
- :param media_type: Type of the media supported by the pool.
- :param pool_details: Details of the storage pool
- :param use_rmcache: Enable/Disable RMcache on pool
- :param use_rfcache: Enable/Disable RFcache on pool
- :param new_name: New name for the storage pool
- :return: dict, containing parameters to be modified
- """
- pool_name = pool_details['name']
- pool_use_rfcache = pool_details['useRfcache']
- pool_use_rmcache = pool_details['useRmcache']
- pool_media_type = pool_details['mediaType']
- modify_params = {}
-
- if new_name is not None and pool_name != new_name:
- modify_params['new_name'] = new_name
- if use_rfcache is not None and pool_use_rfcache != use_rfcache:
- modify_params['use_rfcache'] = use_rfcache
- if use_rmcache is not None and pool_use_rmcache != use_rmcache:
- modify_params['use_rmcache'] = use_rmcache
- if media_type is not None and media_type != pool_media_type:
- modify_params['media_type'] = media_type
- return modify_params
+ def set_persistent_checksum(self, pool_details, pool_params):
+ try:
+ if pool_params['persistent_checksum']['enable']:
+ if pool_details['persistentChecksumEnabled'] is not True:
+ self.powerflex_conn.storage_pool.set_persistent_checksum(
+ storage_pool_id=pool_details['id'],
+ enable=pool_params['persistent_checksum']['enable'],
+ validate=pool_params['persistent_checksum']['validate_on_read'],
+ builder_limit=pool_params['persistent_checksum']['builder_limit'])
+ else:
+ self.powerflex_conn.storage_pool.modify_persistent_checksum(
+ storage_pool_id=pool_details['id'],
+ validate=pool_params['persistent_checksum']['validate_on_read'],
+ builder_limit=pool_params['persistent_checksum']['builder_limit'])
+
+ pool_details = self.get_storage_pool(storage_pool_id=pool_details['id'])
+ return pool_details
+
+ except Exception as e:
+ err_msg = "Failed to set persistent checksum with error " \
+ "{0}".format(str(e))
+ LOG.error(err_msg)
+ self.module.fail_json(msg=err_msg)
+
+ def to_modify_persistent_checksum(self, pool_details, pool_params):
+ checksum_dict = dict()
+ if pool_params['persistent_checksum']['enable'] is not None and \
+ pool_params['persistent_checksum']['enable'] != pool_details['persistentChecksumEnabled']:
+ checksum_dict['enable'] = pool_params['persistent_checksum']['enable']
+
+ if pool_params['persistent_checksum']['validate_on_read'] is not None and \
+ pool_params['persistent_checksum']['validate_on_read'] != pool_details['persistentChecksumValidateOnRead'] and \
+ pool_params['persistent_checksum']['enable'] is True:
+ checksum_dict['validate_on_read'] = pool_params['persistent_checksum']['validate_on_read']
+
+ if pool_params['persistent_checksum']['builder_limit'] is not None and \
+ pool_params['persistent_checksum']['builder_limit'] != pool_details['persistentChecksumBuilderLimitKb'] and \
+ pool_params['persistent_checksum']['enable'] is True:
+ checksum_dict['builder_limit'] = pool_params['persistent_checksum']['builder_limit']
+
+ return checksum_dict
+
+ def to_modify_rebalance_io_priority_policy(self, pool_details, pool_params):
+
+ policy_dict = {
+ 'policy': None,
+ 'concurrent_ios': None,
+ 'bw_limit': None
+ }
+ modify = False
+ if pool_params['rebalance_io_priority_policy']['policy'] is not None and \
+ pool_params['rebalance_io_priority_policy']['policy'] != pool_details['rebalanceIoPriorityPolicy']:
+ policy_dict['policy'] = pool_params['rebalance_io_priority_policy']['policy']
+ modify = True
+
+ if pool_params['rebalance_io_priority_policy']['concurrent_ios_per_device'] is not None and \
+ pool_params['rebalance_io_priority_policy']['concurrent_ios_per_device'] != pool_details['rebalanceIoPriorityNumOfConcurrentIosPerDevice']:
+ policy_dict['concurrent_ios'] = str(pool_params['rebalance_io_priority_policy']['concurrent_ios_per_device'])
+
+ if pool_params['rebalance_io_priority_policy']['bw_limit_per_device'] is not None and \
+ pool_params['rebalance_io_priority_policy']['bw_limit_per_device'] != pool_details['rebalanceIoPriorityBwLimitPerDeviceInKbps']:
+ policy_dict['bw_limit'] = str(pool_params['rebalance_io_priority_policy']['bw_limit_per_device'])
+
+ if policy_dict['policy'] is None and (policy_dict['concurrent_ios'] is not None or policy_dict['bw_limit'] is not None):
+ policy_dict['policy'] = pool_details['rebalanceIoPriorityPolicy']
+ modify = True
+
+ if modify is True:
+ return policy_dict
+ else:
+ return None
+
+ def to_modify_vtree_migration_io_priority_policy(self, pool_details, pool_params):
+ policy_dict = {
+ 'policy': None,
+ 'concurrent_ios': None,
+ 'bw_limit': None
+ }
+ modify = False
+ if pool_params['vtree_migration_io_priority_policy']['policy'] is not None and \
+ pool_params['vtree_migration_io_priority_policy']['policy'] != pool_details['vtreeMigrationIoPriorityPolicy']:
+ policy_dict['policy'] = pool_params['vtree_migration_io_priority_policy']['policy']
+ modify = True
+
+ if pool_params['vtree_migration_io_priority_policy']['concurrent_ios_per_device'] is not None and \
+ pool_params['vtree_migration_io_priority_policy']['concurrent_ios_per_device'] != \
+ pool_details['vtreeMigrationIoPriorityNumOfConcurrentIosPerDevice']:
+ policy_dict['concurrent_ios'] = str(pool_params['vtree_migration_io_priority_policy']['concurrent_ios_per_device'])
+
+ if pool_params['vtree_migration_io_priority_policy']['bw_limit_per_device'] is not None and \
+ pool_params['vtree_migration_io_priority_policy']['bw_limit_per_device'] != \
+ pool_details['vtreeMigrationIoPriorityBwLimitPerDeviceInKbps']:
+ policy_dict['bw_limit'] = str(pool_params['vtree_migration_io_priority_policy']['bw_limit_per_device'])
+
+ if policy_dict['policy'] is None and (policy_dict['concurrent_ios'] is not None or policy_dict['bw_limit'] is not None):
+ policy_dict['policy'] = pool_details['vtreeMigrationIoPriorityPolicy']
+ modify = True
+
+ if modify is True:
+ return policy_dict
+ else:
+ return None
+
+ def to_modify_protected_maintenance_mode_io_priority_policy(self, pool_details, pool_params):
+
+ policy_dict = {
+ 'policy': None,
+ 'concurrent_ios': None,
+ 'bw_limit': None
+ }
+ modify = False
+ if pool_params['protected_maintenance_mode_io_priority_policy']['policy'] is not None and \
+ pool_params['protected_maintenance_mode_io_priority_policy']['policy'] != pool_details['protectedMaintenanceModeIoPriorityPolicy']:
+ policy_dict['policy'] = pool_params['protected_maintenance_mode_io_priority_policy']['policy']
+ modify = True
+
+ if pool_params['protected_maintenance_mode_io_priority_policy']['concurrent_ios_per_device'] is not None and \
+ pool_params['protected_maintenance_mode_io_priority_policy']['concurrent_ios_per_device'] != \
+ pool_details['protectedMaintenanceModeIoPriorityNumOfConcurrentIosPerDevice']:
+ policy_dict['concurrent_ios'] = str(pool_params['protected_maintenance_mode_io_priority_policy']['concurrent_ios_per_device'])
+
+ if pool_params['protected_maintenance_mode_io_priority_policy']['bw_limit_per_device'] is not None and \
+ pool_params['protected_maintenance_mode_io_priority_policy']['bw_limit_per_device'] != \
+ pool_details['protectedMaintenanceModeIoPriorityBwLimitPerDeviceInKbps']:
+ policy_dict['bw_limit'] = str(pool_params['protected_maintenance_mode_io_priority_policy']['bw_limit_per_device'])
+
+ if policy_dict['policy'] is None and (policy_dict['concurrent_ios'] is not None or policy_dict['bw_limit'] is not None):
+ policy_dict['policy'] = pool_details['protectedMaintenanceModeIoPriorityPolicy']
+ modify = True
+
+ if modify is True:
+ return policy_dict
+ else:
+ return None
+
+ def to_modify_capacity_alert_thresholds(self, pool_details, pool_params, thresholds):
+ modify = False
+ threshold = dict()
+ if pool_params['cap_alert_thresholds']['high_threshold'] is not None and pool_params['cap_alert_thresholds'][
+ 'high_threshold'] != pool_details['capacityAlertHighThreshold']:
+ threshold['high'] = str(pool_params['cap_alert_thresholds']['high_threshold'])
+ modify = True
+ if pool_params['cap_alert_thresholds']['critical_threshold'] is not None and \
+ pool_params['cap_alert_thresholds']['critical_threshold'] != pool_details[
+ 'capacityAlertCriticalThreshold']:
+ threshold['critical'] = str(pool_params['cap_alert_thresholds']['critical_threshold'])
+ modify = True
+ if modify is True:
+ if 'high' not in threshold:
+ threshold['high'] = str(pool_details['capacityAlertHighThreshold'])
+ if 'critical' not in threshold:
+ threshold['critical'] = str(pool_details['capacityAlertCriticalThreshold'])
+
+ return threshold
def get_powerflex_storagepool_parameters():
@@ -898,15 +1079,464 @@ def get_powerflex_storagepool_parameters():
choices=['HDD', 'SSD', 'TRANSITIONAL']),
use_rfcache=dict(required=False, type='bool'),
use_rmcache=dict(required=False, type='bool'),
+ enable_zero_padding=dict(type='bool'),
+ rep_cap_max_ratio=dict(type='int'),
+ rmcache_write_handling_mode=dict(choices=['Cached', 'Passthrough'], default='Cached'),
+ spare_percentage=dict(type='int'),
+ enable_rebalance=dict(type='bool'),
+ enable_fragmentation=dict(type='bool'),
+ enable_rebuild=dict(type='bool'),
storage_pool_new_name=dict(required=False, type='str'),
+ parallel_rebuild_rebalance_limit=dict(type='int'),
+ cap_alert_thresholds=dict(type='dict', options=dict(
+ high_threshold=dict(type='int'),
+ critical_threshold=dict(type='int'))),
+ protected_maintenance_mode_io_priority_policy=dict(type='dict', options=dict(
+ policy=dict(choices=['unlimited', 'limitNumOfConcurrentIos', 'favorAppIos'], default='limitNumOfConcurrentIos'),
+ concurrent_ios_per_device=dict(type='int'),
+ bw_limit_per_device=dict(type='int'))),
+ rebalance_io_priority_policy=dict(type='dict', options=dict(
+ policy=dict(choices=['unlimited', 'limitNumOfConcurrentIos', 'favorAppIos'], default='favorAppIos'),
+ concurrent_ios_per_device=dict(type='int'),
+ bw_limit_per_device=dict(type='int'))),
+ vtree_migration_io_priority_policy=dict(type='dict', options=dict(
+ policy=dict(choices=['limitNumOfConcurrentIos', 'favorAppIos']),
+ concurrent_ios_per_device=dict(type='int'),
+ bw_limit_per_device=dict(type='int'))),
+ persistent_checksum=dict(type='dict', options=dict(
+ enable=dict(type='bool'),
+ validate_on_read=dict(type='bool'),
+ builder_limit=dict(type='int', default=3072))),
state=dict(required=True, type='str', choices=['present', 'absent']))
+class StoragePoolExitHandler():
+ def handle(self, pool_obj, pool_details):
+ if pool_details:
+ pool_details = pool_obj.get_storage_pool(storage_pool_id=pool_details['id'])
+ pool_obj.result['storage_pool_details'] = pool_details
+
+ pool_obj.module.exit_json(**pool_obj.result)
+
+
+class StoragePoolDeleteHandler():
+ def handle(self, pool_obj, pool_params, pool_details):
+ if pool_params['state'] == 'absent' and pool_details:
+ msg = "Deleting storage pool is not supported through" \
+ " ansible module."
+ LOG.error(msg)
+ pool_obj.module.fail_json(msg=msg)
+
+ StoragePoolExitHandler().handle(pool_obj, pool_details)
+
+
+class StoragePoolModifyPersistentChecksumHandler():
+ def handle(self, pool_obj, pool_params, pool_details):
+ try:
+ if pool_params['state'] == 'present' and pool_details:
+ if pool_params['persistent_checksum'] is not None:
+ checksum_dict = pool_obj.to_modify_persistent_checksum(
+ pool_details=pool_details,
+ pool_params=pool_params)
+ if checksum_dict != {}:
+ if not pool_obj.module.check_mode:
+ pool_details = pool_obj.set_persistent_checksum(
+ pool_details=pool_details,
+ pool_params=pool_params)
+ pool_obj.result['changed'] = True
+
+ StoragePoolDeleteHandler().handle(pool_obj, pool_params, pool_details)
+
+ except Exception as e:
+ error_msg = (f"Modify Persistent Checksum failed "
+ f"with error {str(e)}")
+ LOG.error(error_msg)
+ pool_obj.module.fail_json(msg=error_msg)
+
+
+class StoragePoolModifyRebalanceIOPriorityPolicyHandler():
+ def handle(self, pool_obj, pool_params, pool_details):
+ try:
+ if pool_params['state'] == 'present' and pool_details:
+ if pool_params['rebalance_io_priority_policy'] is not None:
+ policy_dict = pool_obj.to_modify_rebalance_io_priority_policy(
+ pool_details=pool_details,
+ pool_params=pool_params
+ )
+ if policy_dict is not None:
+ if not pool_obj.module.check_mode:
+ pool_details = pool_obj.powerflex_conn.storage_pool.rebalance_io_priority_policy(
+ storage_pool_id=pool_details['id'],
+ policy=policy_dict['policy'],
+ concurrent_ios_per_device=policy_dict['concurrent_ios'],
+ bw_limit_per_device=policy_dict['bw_limit'])
+ pool_obj.result['changed'] = True
+
+ StoragePoolModifyPersistentChecksumHandler().handle(pool_obj, pool_params, pool_details)
+
+ except Exception as e:
+ error_msg = (f"Modify rebalance IO Priority Policy failed "
+ f"with error {str(e)}")
+ LOG.error(error_msg)
+ pool_obj.module.fail_json(msg=error_msg)
+
+
+class StoragePoolSetVtreeMigrationIOPriorityPolicyHandler():
+ def handle(self, pool_obj, pool_params, pool_details):
+ try:
+ if pool_params['state'] == 'present' and pool_details:
+ if pool_params['vtree_migration_io_priority_policy'] is not None:
+ policy_dict = pool_obj.to_modify_vtree_migration_io_priority_policy(
+ pool_details=pool_details,
+ pool_params=pool_params
+ )
+ if policy_dict is not None:
+ if not pool_obj.module.check_mode:
+ pool_details = pool_obj.powerflex_conn.storage_pool.set_vtree_migration_io_priority_policy(
+ storage_pool_id=pool_details['id'],
+ policy=policy_dict['policy'],
+ concurrent_ios_per_device=policy_dict['concurrent_ios'],
+ bw_limit_per_device=policy_dict['bw_limit'])
+ pool_obj.result['changed'] = True
+
+ StoragePoolModifyRebalanceIOPriorityPolicyHandler().handle(pool_obj, pool_params, pool_details)
+
+ except Exception as e:
+ error_msg = (f"Set Vtree Migration I/O Priority Policy operation failed "
+ f"with error {str(e)}")
+ LOG.error(error_msg)
+ pool_obj.module.fail_json(msg=error_msg)
+
+
+class StoragePoolSetProtectedMaintenanceModeIOPriorityPolicyHandler():
+ def handle(self, pool_obj, pool_params, pool_details):
+ try:
+ if pool_params['state'] == 'present' and pool_details:
+ if pool_params['protected_maintenance_mode_io_priority_policy'] is not None:
+ policy_dict = pool_obj.to_modify_protected_maintenance_mode_io_priority_policy(
+ pool_details=pool_details,
+ pool_params=pool_params
+ )
+ if policy_dict is not None:
+ if not pool_obj.module.check_mode:
+ pool_details = pool_obj.powerflex_conn.storage_pool.set_protected_maintenance_mode_io_priority_policy(
+ storage_pool_id=pool_details['id'],
+ policy=policy_dict['policy'],
+ concurrent_ios_per_device=policy_dict['concurrent_ios'],
+ bw_limit_per_device=policy_dict['bw_limit'])
+ pool_obj.result['changed'] = True
+
+ StoragePoolSetVtreeMigrationIOPriorityPolicyHandler().handle(pool_obj, pool_params, pool_details)
+
+ except Exception as e:
+ error_msg = (f"Set Protected Maintenance Mode IO Priority Policy operation failed "
+ f"with error {str(e)}")
+ LOG.error(error_msg)
+ pool_obj.module.fail_json(msg=error_msg)
+
+
+class StoragePoolModifyCapacityAlertThresholdsHandler():
+ def handle(self, pool_obj, pool_params, pool_details):
+ try:
+ if pool_params['state'] == 'present' and pool_details:
+ if pool_params['cap_alert_thresholds'] is not None:
+ threshold = pool_obj.to_modify_capacity_alert_thresholds(pool_details=pool_details,
+ pool_params=pool_params,
+ thresholds=pool_params[
+ 'cap_alert_thresholds'])
+ if threshold != {}:
+ if not pool_obj.module.check_mode:
+ pool_details = pool_obj.powerflex_conn.storage_pool.set_cap_alert_thresholds(
+ storage_pool_id=pool_details['id'],
+ cap_alert_high_threshold=threshold['high'],
+ cap_alert_critical_threshold=threshold['critical'])
+ pool_obj.result['changed'] = True
+
+ StoragePoolSetProtectedMaintenanceModeIOPriorityPolicyHandler().handle(pool_obj, pool_params, pool_details)
+
+ except Exception as e:
+ error_msg = (f"Modify Capacity Alert Thresholds operation failed "
+ f"with error {str(e)}")
+ LOG.error(error_msg)
+ pool_obj.module.fail_json(msg=error_msg)
+
+
+class StoragePoolModifyRebuildRebalanceParallelismLimitHandler():
+ def handle(self, pool_obj, pool_params, pool_details):
+ try:
+ if pool_params['state'] == 'present' and pool_details:
+ if pool_params['parallel_rebuild_rebalance_limit'] is not None and \
+ pool_params['parallel_rebuild_rebalance_limit'] != pool_details['numOfParallelRebuildRebalanceJobsPerDevice']:
+ if not pool_obj.module.check_mode:
+ pool_details = pool_obj.powerflex_conn.storage_pool.set_rebuild_rebalance_parallelism_limit(
+ pool_details['id'], str(pool_params['parallel_rebuild_rebalance_limit']))
+ pool_obj.result['changed'] = True
+
+ StoragePoolModifyCapacityAlertThresholdsHandler().handle(pool_obj, pool_params, pool_details)
+
+ except Exception as e:
+ error_msg = (f"Modify Rebuild/Rebalance Parallelism Limit operation failed "
+ f"with error {str(e)}")
+ LOG.error(error_msg)
+ pool_obj.module.fail_json(msg=error_msg)
+
+
+class StoragePoolModifyRMCacheWriteHandlingModeHandler():
+ def handle(self, pool_obj, pool_params, pool_details):
+ try:
+ if pool_params['state'] == 'present' and pool_details:
+ if pool_params['rmcache_write_handling_mode'] is not None and \
+ pool_params['rmcache_write_handling_mode'] != pool_details['rmcacheWriteHandlingMode']:
+ if not pool_obj.module.check_mode:
+ pool_details = pool_obj.powerflex_conn.storage_pool.set_rmcache_write_handling_mode(
+ pool_details['id'], pool_params['rmcache_write_handling_mode'])
+ pool_obj.result['changed'] = True
+
+ StoragePoolModifyRebuildRebalanceParallelismLimitHandler().handle(pool_obj, pool_params, pool_details)
+
+ except Exception as e:
+ error_msg = (f"Modify RMCache Write Handling Mode failed "
+ f"with error {str(e)}")
+ LOG.error(error_msg)
+ pool_obj.module.fail_json(msg=error_msg)
+
+
+class StoragePoolModifySparePercentageHandler():
+ def handle(self, pool_obj, pool_params, pool_details):
+ try:
+ if pool_params['state'] == 'present' and pool_details:
+ if pool_params['spare_percentage'] is not None and pool_params['spare_percentage'] != pool_details['sparePercentage']:
+ if not pool_obj.module.check_mode:
+ pool_details = pool_obj.powerflex_conn.storage_pool.set_spare_percentage(
+ pool_details['id'], str(pool_params['spare_percentage']))
+ pool_obj.result['changed'] = True
+
+ StoragePoolModifyRMCacheWriteHandlingModeHandler().handle(pool_obj, pool_params, pool_details)
+
+ except Exception as e:
+ error_msg = (f"Modify Spare Percentage operation failed "
+ f"with error {str(e)}")
+ LOG.error(error_msg)
+ pool_obj.module.fail_json(msg=error_msg)
+
+
+class StoragePoolEnableFragmentationHandler():
+ def handle(self, pool_obj, pool_params, pool_details):
+ try:
+ if pool_params['state'] == 'present' and pool_details:
+ if pool_params['enable_fragmentation'] is not None and pool_params['enable_fragmentation'] != pool_details['fragmentationEnabled']:
+ if not pool_obj.module.check_mode:
+ pool_details = pool_obj.powerflex_conn.storage_pool.set_fragmentation_enabled(
+ pool_details['id'], pool_params['enable_fragmentation'])
+ pool_obj.result['changed'] = True
+
+ StoragePoolModifySparePercentageHandler().handle(pool_obj, pool_params, pool_details)
+
+ except Exception as e:
+
+ error_msg = (f"Enable/Disable Fragmentation operation failed "
+ f"with error {str(e)}")
+ LOG.error(error_msg)
+ pool_obj.module.fail_json(msg=error_msg)
+
+
+class StoragePoolEnableRebuildHandler():
+ def handle(self, pool_obj, pool_params, pool_details):
+ try:
+ if pool_params['state'] == 'present' and pool_details:
+ if pool_params['enable_rebuild'] is not None and pool_params['enable_rebuild'] != pool_details['rebuildEnabled']:
+ if not pool_obj.module.check_mode:
+ pool_details = pool_obj.powerflex_conn.storage_pool.set_rebuild_enabled(
+ pool_details['id'], pool_params['enable_rebuild'])
+ pool_obj.result['changed'] = True
+
+ StoragePoolEnableFragmentationHandler().handle(pool_obj, pool_params, pool_details)
+
+ except Exception as e:
+ error_msg = (f"Enable/Disable Rebuild operation failed "
+ f"with error {str(e)}")
+ LOG.error(error_msg)
+ pool_obj.module.fail_json(msg=error_msg)
+
+
+class StoragePoolEnableRebalanceHandler():
+ def handle(self, pool_obj, pool_params, pool_details):
+ try:
+ if pool_params['state'] == 'present' and pool_details:
+ if pool_params['enable_rebalance'] is not None and pool_params['enable_rebalance'] != pool_details['rebalanceEnabled']:
+ if not pool_obj.module.check_mode:
+ pool_details = pool_obj.powerflex_conn.storage_pool.set_rebalance_enabled(
+ pool_details['id'], pool_params['enable_rebalance'])
+ pool_obj.result['changed'] = True
+
+ StoragePoolEnableRebuildHandler().handle(pool_obj, pool_params, pool_details)
+
+ except Exception as e:
+ error_msg = (f"Enable/Disable Rebalance failed "
+ f"with error {str(e)}")
+ LOG.error(error_msg)
+ pool_obj.module.fail_json(msg=error_msg)
+
+
+class StoragePoolModifyRepCapMaxRatioHandler():
+ def handle(self, pool_obj, pool_params, pool_details):
+ try:
+ if pool_params['state'] == 'present' and pool_details:
+ if pool_params['rep_cap_max_ratio'] is not None and pool_params['rep_cap_max_ratio'] != pool_details['replicationCapacityMaxRatio']:
+ if not pool_obj.module.check_mode:
+ pool_details = pool_obj.powerflex_conn.storage_pool.set_rep_cap_max_ratio(
+ pool_details['id'], str(pool_params['rep_cap_max_ratio']))
+ pool_obj.result['changed'] = True
+
+ StoragePoolEnableRebalanceHandler().handle(pool_obj, pool_params, pool_details)
+
+ except Exception as e:
+ error_msg = (f"Modify Replication Capacity max ratio operation failed "
+ f"with error {str(e)}")
+ LOG.error(error_msg)
+ pool_obj.module.fail_json(msg=error_msg)
+
+
+class StoragePoolEnableZeroPaddingHandler():
+ def handle(self, pool_obj, pool_params, pool_details):
+ try:
+ if pool_params['state'] == 'present' and pool_details:
+ if pool_params['enable_zero_padding'] is not None and pool_params['enable_zero_padding'] != pool_details['zeroPaddingEnabled']:
+ if not pool_obj.module.check_mode:
+ pool_details = pool_obj.powerflex_conn.storage_pool.set_zero_padding_policy(
+ pool_details['id'], pool_params['enable_zero_padding'])
+ pool_obj.result['changed'] = True
+
+ StoragePoolModifyRepCapMaxRatioHandler().handle(pool_obj, pool_params, pool_details)
+
+ except Exception as e:
+ error_msg = (f"Enable/Disable zero padding operation failed "
+ f"with error {str(e)}")
+ LOG.error(error_msg)
+ pool_obj.module.fail_json(msg=error_msg)
+
+
+class StoragePoolUseRFCacheHandler():
+ def handle(self, pool_obj, pool_params, pool_details):
+ try:
+ if pool_params['state'] == 'present' and pool_details:
+ if pool_params['use_rfcache'] is not None and pool_params['use_rfcache'] != pool_details['useRfcache']:
+ if not pool_obj.module.check_mode:
+ pool_details = pool_obj.powerflex_conn.storage_pool.set_use_rfcache(
+ pool_details['id'], pool_params['use_rfcache'])
+ pool_obj.result['changed'] = True
+
+ StoragePoolEnableZeroPaddingHandler().handle(pool_obj, pool_params, pool_details)
+
+ except Exception as e:
+ error_msg = (f"Modify RF cache operation failed "
+ f"with error {str(e)}")
+ LOG.error(error_msg)
+ pool_obj.module.fail_json(msg=error_msg)
+
+
+class StoragePoolUseRMCacheHandler():
+ def handle(self, pool_obj, pool_params, pool_details):
+ try:
+ if pool_params['state'] == 'present' and pool_details:
+ if pool_params['use_rmcache'] is not None and pool_params['use_rmcache'] != pool_details['useRmcache']:
+ if not pool_obj.module.check_mode:
+ pool_details = pool_obj.powerflex_conn.storage_pool.set_use_rmcache(
+ pool_details['id'], pool_params['use_rmcache'])
+ pool_obj.result['changed'] = True
+
+ StoragePoolUseRFCacheHandler().handle(pool_obj, pool_params, pool_details)
+
+ except Exception as e:
+ error_msg = (f"Modify RM cache operation failed "
+ f"with error {str(e)}")
+ LOG.error(error_msg)
+ pool_obj.module.fail_json(msg=error_msg)
+
+
+class StoragePoolRenameHandler():
+ def handle(self, pool_obj, pool_params, pool_details):
+ try:
+ if pool_params['state'] == 'present' and pool_details:
+ if pool_params['storage_pool_new_name'] is not None and pool_params['storage_pool_new_name'] != pool_details['name']:
+ if not pool_obj.module.check_mode:
+ pool_obj.powerflex_conn.storage_pool.rename(pool_details['id'], pool_params['storage_pool_new_name'])
+ pool_obj.result['changed'] = True
+
+ StoragePoolUseRMCacheHandler().handle(pool_obj, pool_params, pool_details)
+
+ except Exception as e:
+ error_msg = (f"Modify storage pool name failed "
+ f"with error {str(e)}")
+ LOG.error(error_msg)
+ pool_obj.module.fail_json(msg=error_msg)
+
+
+class StoragePoolModifyMediaTypeHandler():
+ def handle(self, pool_obj, pool_params, pool_details, media_type):
+ try:
+ if pool_params['state'] == 'present' and pool_details:
+ if media_type is not None and media_type != pool_details['mediaType']:
+ if not pool_obj.module.check_mode:
+ pool_details = pool_obj.powerflex_conn.storage_pool.set_media_type(
+ pool_details['id'], media_type)
+ pool_obj.result['changed'] = True
+
+ StoragePoolRenameHandler().handle(pool_obj, pool_params, pool_details)
+
+ except Exception as e:
+ error_msg = (f"Modify Media Type failed "
+ f"with error {str(e)}")
+ LOG.error(error_msg)
+ pool_obj.module.fail_json(msg=error_msg)
+
+
+class StoragePoolCreateHandler():
+ def handle(self, pool_obj, pool_params, pool_details, pd_id, media_type):
+ if pool_params['state'] == 'present' and pool_details is None:
+ if not pool_obj.module.check_mode:
+ LOG.info("Creating new storage pool")
+ if pool_params['storage_pool_id']:
+ self.module.fail_json(
+ msg="storage_pool_name is missing & name required to "
+ "create a storage pool. Please enter a valid "
+ "storage_pool_name.")
+
+ pool_details = pool_obj.create_storage_pool(
+ pool_name=pool_params['storage_pool_name'],
+ pd_id=pd_id,
+ media_type=media_type,
+ use_rfcache=pool_params['use_rfcache'],
+ use_rmcache=pool_params['use_rmcache'])
+
+ pool_obj.result['changed'] = True
+
+ StoragePoolModifyMediaTypeHandler().handle(pool_obj, pool_params, pool_details, media_type)
+
+
+class StoragePoolHandler():
+ def handle(self, pool_obj, pool_params):
+ pool_obj.verify_storage_pool_name()
+ media_type = pool_params['media_type']
+ if media_type == "TRANSITIONAL":
+ media_type = 'Transitional'
+ pd_id = None
+ if pool_params['protection_domain_id'] or pool_params['protection_domain_name']:
+ pd_id = pool_obj.get_protection_domain(
+ protection_domain_id=pool_params['protection_domain_id'],
+ protection_domain_name=pool_params['protection_domain_name'])['id']
+ pool_details = pool_obj.get_storage_pool(storage_pool_id=pool_params['storage_pool_id'],
+ storage_pool_name=pool_params['storage_pool_name'],
+ pd_id=pd_id)
+ pool_obj.verify_protection_domain(pool_details=pool_details)
+ StoragePoolCreateHandler().handle(pool_obj, pool_params, pool_details, pd_id, media_type)
+
+
def main():
- """ Create PowerFlex Storage Pool object and perform action on it
+ """ Create PowerFlex storage pool object and perform action on it
based on user input from playbook"""
obj = PowerFlexStoragePool()
- obj.perform_module_operation()
+ StoragePoolHandler().handle(obj, obj.module.params)
if __name__ == '__main__':
diff --git a/ansible_collections/dellemc/powerflex/roles/powerflex_config/tasks/main.yml b/ansible_collections/dellemc/powerflex/roles/powerflex_config/tasks/main.yml
index 67bad8013..f1611a6b9 100644
--- a/ansible_collections/dellemc/powerflex/roles/powerflex_config/tasks/main.yml
+++ b/ansible_collections/dellemc/powerflex/roles/powerflex_config/tasks/main.yml
@@ -46,6 +46,14 @@
delegate_to: "{{ powerflex_config_mdm_primary_hostname }}"
when: powerflex_config_array_version == '4'
+- name: Add certificate file for PowerFlex version 4.x
+ ansible.builtin.command: scli --add_certificate --certificate_file /opt/emc/scaleio/mdm/cfg/mgmt_ca.pem
+ run_once: true
+ register: powerflex_config_add_certificate
+ changed_when: powerflex_config_add_certificate.rc == 0
+ delegate_to: "{{ powerflex_config_mdm_primary_hostname }}"
+ when: powerflex_config_array_version == '4'
+
- name: Login to MDM for PowerFlex version 4.x
ansible.builtin.command: scli --login --p12_path /opt/emc/scaleio/mdm/cfg/cli_certificate.p12 --p12_password {{ password }}
run_once: true
diff --git a/ansible_collections/dellemc/powerflex/roles/powerflex_gateway/tasks/install_keepalived.yml b/ansible_collections/dellemc/powerflex/roles/powerflex_gateway/tasks/install_keepalived.yml
index df6fd9dac..9b106ab1b 100644
--- a/ansible_collections/dellemc/powerflex/roles/powerflex_gateway/tasks/install_keepalived.yml
+++ b/ansible_collections/dellemc/powerflex/roles/powerflex_gateway/tasks/install_keepalived.yml
@@ -25,4 +25,4 @@
mode: '0600'
owner: root
group: root
- notify: restart keepalived
+ notify: Restart keepalived
diff --git a/ansible_collections/dellemc/powerflex/roles/powerflex_lia/tasks/install_lia.yml b/ansible_collections/dellemc/powerflex/roles/powerflex_lia/tasks/install_lia.yml
index 4b987b80e..f5aca7086 100644
--- a/ansible_collections/dellemc/powerflex/roles/powerflex_lia/tasks/install_lia.yml
+++ b/ansible_collections/dellemc/powerflex/roles/powerflex_lia/tasks/install_lia.yml
@@ -20,3 +20,27 @@
- name: Include install_powerflex.yml
ansible.builtin.include_tasks: ../../powerflex_common/tasks/install_powerflex.yml
+
+- name: Check if /opt/emc/scaleio/lia/cfg/conf.txt exists
+ ansible.builtin.stat:
+ path: /opt/emc/scaleio/lia/cfg/conf.txt
+ register: powerflex_lia_conf_file_stat
+
+- name: Update lia_mtls_only
+ ansible.builtin.lineinfile:
+ path: /opt/emc/scaleio/lia/cfg/conf.txt
+ regexp: '^lia_mtls_only=1$'
+ state: absent
+ when: powerflex_lia_conf_file_stat.stat.exists
+
+- name: Replace lia_token value
+ ansible.builtin.replace:
+ path: /opt/emc/scaleio/lia/cfg/conf.txt
+ regexp: '^lia_token=.*$'
+ replace: 'lia_token={{ powerflex_lia_token }}'
+ when: powerflex_lia_conf_file_stat.stat.exists
+
+- name: PKill LIA for config changes effect
+ ansible.builtin.command: pkill lia
+ register: powerflex_lia_pkill_ouput
+ changed_when: powerflex_lia_pkill_ouput.rc == 0
diff --git a/ansible_collections/dellemc/powerflex/roles/powerflex_mdm/tasks/install_powerflex4x_mdm.yml b/ansible_collections/dellemc/powerflex/roles/powerflex_mdm/tasks/install_powerflex4x_mdm.yml
index 67164337d..5a36649bf 100644
--- a/ansible_collections/dellemc/powerflex/roles/powerflex_mdm/tasks/install_powerflex4x_mdm.yml
+++ b/ansible_collections/dellemc/powerflex/roles/powerflex_mdm/tasks/install_powerflex4x_mdm.yml
@@ -51,6 +51,13 @@
run_once: true
delegate_to: "{{ powerflex_mdm_primary_hostname }}"
+- name: Add certificate file for PowerFlex version 4.x
+ ansible.builtin.command: scli --add_certificate --certificate_file /opt/emc/scaleio/mdm/cfg/mgmt_ca.pem
+ run_once: true
+ register: powerflex_mdm_add_certificate
+ changed_when: powerflex_mdm_add_certificate.rc == 0
+ delegate_to: "{{ powerflex_mdm_primary_hostname }}"
+
- name: Login to primary MDM node
register: powerflex_mdm_secondary_login
ansible.builtin.command: >
diff --git a/ansible_collections/dellemc/powerflex/roles/powerflex_sdc/defaults/main.yml b/ansible_collections/dellemc/powerflex/roles/powerflex_sdc/defaults/main.yml
index 5801c0ced..fc239219b 100644
--- a/ansible_collections/dellemc/powerflex/roles/powerflex_sdc/defaults/main.yml
+++ b/ansible_collections/dellemc/powerflex/roles/powerflex_sdc/defaults/main.yml
@@ -17,5 +17,4 @@ powerflex_sdc_name: sdc_test
powerflex_sdc_performance_profile: Compact
file_glob_name: sdc
i_am_sure: 1
-powerflex_sdc_esxi_guid: "d422ecab-af6f-4e0c-a059-333ac89cfb42"
powerflex_role_environment:
diff --git a/ansible_collections/dellemc/powerflex/roles/powerflex_sdc/tasks/install_sdc.yml b/ansible_collections/dellemc/powerflex/roles/powerflex_sdc/tasks/install_sdc.yml
index 27c82db27..25c6e8109 100644
--- a/ansible_collections/dellemc/powerflex/roles/powerflex_sdc/tasks/install_sdc.yml
+++ b/ansible_collections/dellemc/powerflex/roles/powerflex_sdc/tasks/install_sdc.yml
@@ -70,6 +70,6 @@
mode: "0600"
owner: "root"
group: "root"
- notify: restart scini
+ notify: Restart scini
when:
- ansible_distribution not in ['WindowsOS', 'SLES', 'VMkernel']
diff --git a/ansible_collections/dellemc/powerflex/roles/powerflex_sdc/tasks/register_esxi_sdc.yml b/ansible_collections/dellemc/powerflex/roles/powerflex_sdc/tasks/register_esxi_sdc.yml
index 0b06fc8cd..52a8004f5 100644
--- a/ansible_collections/dellemc/powerflex/roles/powerflex_sdc/tasks/register_esxi_sdc.yml
+++ b/ansible_collections/dellemc/powerflex/roles/powerflex_sdc/tasks/register_esxi_sdc.yml
@@ -1,8 +1,14 @@
---
+- name: Generating random GUID
+ register: powerflex_sdc_esxi_guid
+ ansible.builtin.shell: >
+ uuidgen
+ changed_when: powerflex_sdc_esxi_guid.rc == 0
+
- name: Register SDC and Set MDM IP addresses
register: powerflex_sdc_register_set_sdc_mdm
ansible.builtin.shell: >
- esxcli system module parameters set -m scini -p "IoctlIniGuidStr={{ powerflex_sdc_esxi_guid }}
+ esxcli system module parameters set -m scini -p "IoctlIniGuidStr={{ powerflex_sdc_esxi_guid.stdout }}
IoctlMdmIPStr={{ powerflex_sdc_mdm_ips }} bBlkDevIsPdlActive=1 blkDevPdlTimeoutMillis=60000"
changed_when: powerflex_sdc_register_set_sdc_mdm.rc == 0
diff --git a/ansible_collections/dellemc/powerflex/roles/powerflex_sdr/tasks/add_sdr.yml b/ansible_collections/dellemc/powerflex/roles/powerflex_sdr/tasks/add_sdr.yml
index f7cbfa378..ba365ce90 100644
--- a/ansible_collections/dellemc/powerflex/roles/powerflex_sdr/tasks/add_sdr.yml
+++ b/ansible_collections/dellemc/powerflex/roles/powerflex_sdr/tasks/add_sdr.yml
@@ -54,7 +54,7 @@
rescue:
- name: Generate login certificate using primary_mdm_ip
ansible.builtin.command: >
- scli --generate_login_certificate --management_system_ip {{ powerflex_sdr_primary_mdm_ip }} --username {{ username }}
+ scli --generate_login_certificate --management_system_ip {{ powerflex_sdr_mdm_primary_ip }} --username {{ username }}
--password {{ password }} --p12_path /opt/emc/scaleio/mdm/cfg/cli_certificate.p12 --p12_password {{ password }} --insecure
run_once: true
register: powerflex_sdr_generate_login_certificate_mdm_ip
@@ -62,6 +62,14 @@
delegate_to: "{{ powerflex_sdr_mdm_primary_hostname }}"
when: powerflex_sdr_array_version != "3"
+- name: Add certificate file for PowerFlex version 4.x
+ ansible.builtin.command: scli --add_certificate --certificate_file /opt/emc/scaleio/mdm/cfg/mgmt_ca.pem
+ run_once: true
+ register: powerflex_sdr_add_certificate
+ changed_when: powerflex_sdr_add_certificate.rc == 0
+ delegate_to: "{{ powerflex_sdr_mdm_primary_hostname }}"
+ when: powerflex_sdr_array_version != '3'
+
- name: Login to MDM for PowerFlex version 4.x
ansible.builtin.command: scli --login --p12_path /opt/emc/scaleio/mdm/cfg/cli_certificate.p12 --p12_password {{ password }}
run_once: true
diff --git a/ansible_collections/dellemc/powerflex/roles/powerflex_sdr/tasks/remove_sdr.yml b/ansible_collections/dellemc/powerflex/roles/powerflex_sdr/tasks/remove_sdr.yml
index 3bf33b6ea..17ec5755a 100644
--- a/ansible_collections/dellemc/powerflex/roles/powerflex_sdr/tasks/remove_sdr.yml
+++ b/ansible_collections/dellemc/powerflex/roles/powerflex_sdr/tasks/remove_sdr.yml
@@ -29,17 +29,42 @@
no_log: true
when: powerflex_sdr_array_version == "3"
-- name: Login to mdm for PowerFlex version 4.x
- ansible.builtin.command: >
- scli --login --management_system_ip {{ hostname }}
- --username admin
- --password "{{ password }}"
- --approve_certificate
+- name: Generate login certificate for PowerFlex version 4.x
+ block:
+ - name: Generate login certificate using management_system_ip
+ ansible.builtin.command: >
+ scli --generate_login_certificate --management_system_ip {{ hostname }} --username {{ username }} --password {{ password }}
+ --p12_path /opt/emc/scaleio/mdm/cfg/cli_certificate.p12 --p12_password {{ password }} --insecure
+ run_once: true
+ register: powerflex_sdr_generate_login_certificate
+ changed_when: powerflex_sdr_generate_login_certificate.rc == 0
+ delegate_to: "{{ powerflex_sdr_mdm_primary_hostname }}"
+ when: powerflex_sdr_array_version != "3"
+ rescue:
+ - name: Generate login certificate using primary_mdm_ip
+ ansible.builtin.command: >
+ scli --generate_login_certificate --management_system_ip {{ powerflex_sdr_mdm_primary_ip }} --username {{ username }}
+ --password {{ password }} --p12_path /opt/emc/scaleio/mdm/cfg/cli_certificate.p12 --p12_password {{ password }} --insecure
+ run_once: true
+ register: powerflex_sdr_generate_login_certificate_mdm_ip
+ changed_when: powerflex_sdr_generate_login_certificate_mdm_ip.rc == 0
+ delegate_to: "{{ powerflex_sdr_mdm_primary_hostname }}"
+ when: powerflex_sdr_array_version != "3"
+
+- name: Add certificate file for PowerFlex version 4.x
+ ansible.builtin.command: scli --add_certificate --certificate_file /opt/emc/scaleio/mdm/cfg/mgmt_ca.pem
run_once: true
- register: powerflex_initial_login
+ register: powerflex_sdr_add_certificate
+ changed_when: powerflex_sdr_add_certificate.rc == 0
+ delegate_to: "{{ powerflex_sdr_mdm_primary_hostname }}"
+ when: powerflex_sdr_array_version != '3'
+
+- name: Login to MDM for PowerFlex version 4.x
+ ansible.builtin.command: scli --login --p12_path /opt/emc/scaleio/mdm/cfg/cli_certificate.p12 --p12_password {{ password }}
+ run_once: true
+ register: powerflex_sdr_login_output
+ changed_when: powerflex_sdr_login_output.rc == 0
delegate_to: "{{ powerflex_sdr_mdm_primary_hostname }}"
- changed_when: powerflex_initial_login.rc == 0
- no_log: true
when: powerflex_sdr_array_version != "3"
- name: Output msg of previous task login to mdm
diff --git a/ansible_collections/dellemc/powerflex/roles/powerflex_sds/tasks/install_sds.yml b/ansible_collections/dellemc/powerflex/roles/powerflex_sds/tasks/install_sds.yml
index 010aee075..e6abdbd1e 100644
--- a/ansible_collections/dellemc/powerflex/roles/powerflex_sds/tasks/install_sds.yml
+++ b/ansible_collections/dellemc/powerflex/roles/powerflex_sds/tasks/install_sds.yml
@@ -59,6 +59,14 @@
delegate_to: "{{ powerflex_sds_primary_mdm_hostname }}"
when: powerflex_sds_array_version != "3"
+- name: Add certificate file for PowerFlex version 4.x
+ ansible.builtin.command: scli --add_certificate --certificate_file /opt/emc/scaleio/mdm/cfg/mgmt_ca.pem
+ run_once: true
+ register: powerflex_sds_add_certificate
+ changed_when: powerflex_sds_add_certificate.rc == 0
+ delegate_to: "{{ powerflex_sds_primary_mdm_hostname }}"
+ when: powerflex_sds_array_version != '3'
+
- name: Login to MDM for PowerFlex version 4.x
ansible.builtin.command: scli --login --p12_path /opt/emc/scaleio/mdm/cfg/cli_certificate.p12 --p12_password {{ password }}
run_once: true
diff --git a/ansible_collections/dellemc/powerflex/roles/powerflex_tb/tasks/connect_mdm_cluster.yml b/ansible_collections/dellemc/powerflex/roles/powerflex_tb/tasks/connect_mdm_cluster.yml
new file mode 100644
index 000000000..e166a9316
--- /dev/null
+++ b/ansible_collections/dellemc/powerflex/roles/powerflex_tb/tasks/connect_mdm_cluster.yml
@@ -0,0 +1,131 @@
+---
+- name: Get login token
+ ansible.builtin.uri:
+ url: "https://{{ hostname }}:{{ port }}/api/gatewayLogin"
+ validate_certs: "{{ validate_certs }}"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ method: GET
+ force_basic_auth: true
+ delegate_to: "{{ lookup('ansible.builtin.env', 'RUNON', default='localhost') }}"
+ run_once: true
+ no_log: true
+ register: powerflex_tb_login_token_response
+
+- name: Get LIA credentials
+ ansible.builtin.uri:
+ url: "https://{{ hostname }}:{{ port }}/api/V1/Credential?filter=eq,label,DELL_POWERFLEX_LIA"
+ method: GET
+ validate_certs: "{{ validate_certs }}"
+ headers:
+ Authorization: "Bearer {{ powerflex_tb_login_token_response.json }}"
+ delegate_to: "{{ lookup('ansible.builtin.env', 'RUNON', default='localhost') }}"
+ run_once: true
+ register: powerflex_tb_lia_credentials
+
+- name: Set credential ID
+ ansible.builtin.set_fact:
+ powerflex_tb_lia_credential_id: "{{ powerflex_tb_lia_credentials.json.credentialList[0].credential.id }}"
+ when: powerflex_tb_lia_credentials is defined and powerflex_tb_lia_credentials.json.credentialList | length > 0
+
+- name: Create LIA credentials if not exists
+ when: powerflex_tb_lia_credentials.json.credentialList | length == 0
+ block:
+ - name: Create LIA credentials payload
+ ansible.builtin.set_fact:
+ powerflex_tb_lia_credential:
+ credential:
+ type: "ManagementSystemCredential"
+ label: "DELL_POWERFLEX_LIA"
+ password: "{{ powerflex_lia_token }}"
+ liaPassword: "{{ powerflex_lia_token }}"
+
+ - name: Create LIA credentials
+ ansible.builtin.uri:
+ url: "https://{{ hostname }}:{{ port }}/api/V1/Credential"
+ validate_certs: "{{ validate_certs }}"
+ method: POST
+ headers:
+ Authorization: "Bearer {{ powerflex_tb_login_token_response.json }}"
+ Content-Type: "application/json"
+ body: "{{ powerflex_tb_lia_credential | to_json }}"
+ register: powerflex_tb_lia_create_credentials
+ delegate_to: "{{ lookup('ansible.builtin.env', 'RUNON', default='localhost') }}"
+ run_once: true
+ changed_when: powerflex_tb_lia_create_credentials.status == 200
+
+ - name: Set credential ID
+ ansible.builtin.set_fact:
+ powerflex_tb_lia_credential_id: "{{ powerflex_tb_lia_create_credentials.json.credential.id }}"
+ when: powerflex_tb_lia_create_credentials is defined
+
+- name: Set discovery request payload
+ ansible.builtin.set_fact:
+ discovery_request_payload:
+ discoveryRequestList:
+ DiscoverIPRangeDeviceRequest:
+ - deviceManagementSystemCredRef: "{{ powerflex_tb_lia_credential_id }}"
+ deviceManagementSystemIPAddress: "{{ powerflex_tb_device_management_system_ip_address }}"
+ deviceManagementSystemId: "{{ powerflex_tb_system_id }}"
+ deviceType: "powerflex_management_system"
+ hostName: "block-legacy-gateway"
+ unmanaged: false
+ reserved: false
+ serverPoolId: ""
+ autoConfigureAlerts: true
+ snmpConfigure: true
+ when: powerflex_tb_lia_credentials is defined
+
+- name: Create connect mdm cluster payload
+ ansible.builtin.uri:
+ url: "https://{{ hostname }}:{{ port }}/api/V1/DiscoveryRequest"
+ validate_certs: "{{ validate_certs }}"
+ method: POST
+ headers:
+ Authorization: "Bearer {{ powerflex_tb_login_token_response.json }}"
+ Content-Type: "application/json"
+ body: "{{ discovery_request_payload | to_json }}"
+ status_code: 202
+ when: powerflex_tb_lia_credentials is defined
+ delegate_to: "{{ lookup('ansible.builtin.env', 'RUNON', default='localhost') }}"
+ run_once: true
+ register: powerflex_tb_connect_mdm_cluster_output
+ changed_when: powerflex_tb_connect_mdm_cluster_output.status == 202
+
+- name: Extract Job ID from connect mdm cluster output
+ ansible.builtin.set_fact:
+ powerflex_tb_connect_mdm_cluster_job_id: "{{ powerflex_tb_connect_mdm_cluster_output.link | regex_search('Job-[a-zA-Z0-9-]+') }}"
+ when: powerflex_tb_connect_mdm_cluster_output is defined
+
+- name: Track job
+ register: powerflex_tb_connect_mdm_cluster_job_status
+ ansible.builtin.uri:
+ url: "https://{{ hostname }}:{{ port }}/API/V1/JobHistory/{{ powerflex_tb_connect_mdm_cluster_job_id }}/status"
+ validate_certs: "{{ validate_certs }}"
+ method: GET
+ headers:
+ Authorization: "Bearer {{ powerflex_tb_login_token_response.json }}"
+ status_code: 200
+ run_once: true
+ retries: 5
+ delay: 10
+ delegate_to: "{{ lookup('ansible.builtin.env', 'RUNON', default='localhost') }}"
+ when: powerflex_tb_connect_mdm_cluster_job_id is defined
+ until: powerflex_tb_connect_mdm_cluster_job_status.json != "IN_PROGRESS"
+ failed_when: powerflex_tb_connect_mdm_cluster_job_status.json == "FAILED"
+ changed_when: powerflex_tb_connect_mdm_cluster_job_status.json == "SUCCEESSFUL"
+
+- name: Wait for API login call to be successful
+ register: powerflex_tb_api_login
+ ansible.builtin.uri:
+ url: "https://{{ hostname }}:{{ port }}/api/login"
+ user: "{{ username }}"
+ password: "{{ password }}"
+ validate_certs: "{{ validate_certs }}"
+ method: GET
+ delegate_to: "{{ lookup('ansible.builtin.env', 'RUNON', default='localhost') }}"
+ run_once: true
+ ignore_errors: true
+ retries: 5
+ delay: 10
+ until: powerflex_tb_api_login.status == 200
diff --git a/ansible_collections/dellemc/powerflex/roles/powerflex_tb/tasks/install_tb4x.yml b/ansible_collections/dellemc/powerflex/roles/powerflex_tb/tasks/install_tb4x.yml
index d34857ba4..9ae3bc1c1 100644
--- a/ansible_collections/dellemc/powerflex/roles/powerflex_tb/tasks/install_tb4x.yml
+++ b/ansible_collections/dellemc/powerflex/roles/powerflex_tb/tasks/install_tb4x.yml
@@ -20,6 +20,10 @@
changed_when: powerflex_tb_login_output.rc == 0
delegate_to: "{{ powerflex_tb_mdm_primary_hostname }}"
+- name: Extract System ID
+ ansible.builtin.set_fact:
+ powerflex_tb_system_id: "{{ powerflex_tb_login_output.stdout | regex_search('System ID is (\\w+)', '\\1') | first }}"
+
- name: Add primary TB
ansible.builtin.command: >
scli --add_standby_mdm
@@ -67,3 +71,6 @@
changed_when: powerflex_tb_cluster_to_five_output.rc == 0
delegate_to: "{{ powerflex_tb_mdm_primary_hostname }}"
when: powerflex_tb_mdm_cluster_mode[0] != "5_node" and powerflex_tb_cluster_mode == "FiveNodes"
+
+- name: Connect MDM cluster
+ ansible.builtin.include_tasks: connect_mdm_cluster.yml
diff --git a/ansible_collections/dellemc/powerflex/roles/powerflex_tb/tasks/set_tb_ips.yml b/ansible_collections/dellemc/powerflex/roles/powerflex_tb/tasks/set_tb_ips.yml
index 34c0144d5..f2c534f77 100644
--- a/ansible_collections/dellemc/powerflex/roles/powerflex_tb/tasks/set_tb_ips.yml
+++ b/ansible_collections/dellemc/powerflex/roles/powerflex_tb/tasks/set_tb_ips.yml
@@ -11,19 +11,25 @@
powerflex_tb_mdm_secondary_ip: "{{ hostvars[groups['mdm'][1]]['ansible_host'] }}"
powerflex_tb_mdm_secondary_hostname: "{{ hostvars[groups['mdm'][1]]['inventory_hostname'] }}"
-- name: Set fact - powerflex_tb_mdm_tertiary_ip
- ansible.builtin.set_fact:
- powerflex_tb_mdm_tertiary_ip: "{{ hostvars[groups['tb'][2]]['ansible_host'] }}"
- powerflex_tb_mdm_tertiary_hostname: "{{ hostvars[groups['tb'][2]]['inventory_hostname'] }}"
- when: "powerflex_tb_mdm_count | int > 2"
-
- name: Set fact - powerflex_tb_primary
ansible.builtin.set_fact:
powerflex_tb_primary_ip: "{{ hostvars[groups['tb'][0]]['ansible_host'] }}"
powerflex_tb_primary_hostname: "{{ hostvars[groups['tb'][0]]['inventory_hostname'] }}"
+- name: Set fact - powerflex_tb_device_management_system_ip_address
+ ansible.builtin.set_fact:
+ powerflex_tb_device_management_system_ip_address: "{{ powerflex_tb_mdm_primary_ip }}, {{ powerflex_tb_mdm_secondary_ip }}, {{ powerflex_tb_primary_ip }}"
+
- name: Set fact - powerflex_tb_primary
ansible.builtin.set_fact:
powerflex_tb_secondary_ip: "{{ hostvars[groups['tb'][1]]['ansible_host'] }}"
powerflex_tb_secondary_hostname: "{{ hostvars[groups['tb'][1]]['inventory_hostname'] }}"
+ powerflex_tb_device_management_system_ip_address: "{{ powerflex_tb_device_management_system_ip_address }}, {{ powerflex_tb_secondary_ip }}"
when: "powerflex_tb_count | int > 1"
+
+- name: Set fact - powerflex_tb_mdm_tertiary_ip
+ ansible.builtin.set_fact:
+ powerflex_tb_mdm_tertiary_ip: "{{ hostvars[groups['tb'][2]]['ansible_host'] }}"
+ powerflex_tb_mdm_tertiary_hostname: "{{ hostvars[groups['tb'][2]]['inventory_hostname'] }}"
+ powerflex_tb_device_management_system_ip_address: "{{ powerflex_tb_device_management_system_ip_address }}, {{ powerflex_tb_mdm_tertiary_ip }}"
+ when: "powerflex_tb_mdm_count | int > 2"
diff --git a/ansible_collections/dellemc/powerflex/tests/sanity/ignore-2.14.txt b/ansible_collections/dellemc/powerflex/tests/sanity/ignore-2.14.txt
deleted file mode 100644
index 571402121..000000000
--- a/ansible_collections/dellemc/powerflex/tests/sanity/ignore-2.14.txt
+++ /dev/null
@@ -1,45 +0,0 @@
-plugins/modules/sds.py import-2.7
-plugins/modules/sds.py import-3.5
-plugins/modules/sds.py compile-2.7
-plugins/modules/sds.py compile-3.5
-plugins/modules/info.py import-2.7
-plugins/modules/info.py import-3.5
-plugins/modules/info.py compile-2.7
-plugins/modules/fault_set.py import-2.7
-plugins/modules/fault_set.py import-3.5
-plugins/modules/fault_set.py compile-2.7
-plugins/modules/fault_set.py compile-3.5
-plugins/module_utils/storage/dell/libraries/configuration.py import-2.7
-plugins/module_utils/storage/dell/libraries/configuration.py import-3.5
-plugins/module_utils/storage/dell/libraries/configuration.py compile-2.7
-plugins/module_utils/storage/dell/libraries/configuration.py compile-3.5
-plugins/modules/device.py validate-modules:missing-gplv3-license
-plugins/modules/sdc.py validate-modules:missing-gplv3-license
-plugins/modules/sds.py validate-modules:missing-gplv3-license
-plugins/modules/snapshot.py validate-modules:missing-gplv3-license
-plugins/modules/storagepool.py validate-modules:missing-gplv3-license
-plugins/modules/volume.py validate-modules:missing-gplv3-license
-plugins/modules/info.py validate-modules:missing-gplv3-license
-plugins/modules/protection_domain.py validate-modules:missing-gplv3-license
-plugins/modules/mdm_cluster.py validate-modules:missing-gplv3-license
-plugins/modules/replication_consistency_group.py validate-modules:missing-gplv3-license
-plugins/modules/replication_pair.py validate-modules:missing-gplv3-license
-plugins/modules/snapshot_policy.py validate-modules:missing-gplv3-license
-plugins/modules/fault_set.py validate-modules:missing-gplv3-license
-plugins/modules/snapshot_policy.py compile-2.7
-plugins/modules/snapshot_policy.py compile-3.5
-plugins/modules/snapshot_policy.py import-2.7
-plugins/modules/snapshot_policy.py import-3.5
-plugins/modules/sdc.py import-2.7
-plugins/modules/sdc.py import-3.5
-plugins/modules/sdc.py compile-2.7
-plugins/modules/sdc.py compile-3.5
-tests/unit/plugins/module_utils/mock_device_api.py compile-2.7
-tests/unit/plugins/module_utils/mock_device_api.py compile-3.5
-plugins/modules/replication_consistency_group.py import-2.7
-plugins/modules/replication_consistency_group.py import-3.5
-plugins/modules/replication_consistency_group.py compile-2.7
-plugins/modules/replication_consistency_group.py compile-3.5
-plugins/modules/resource_group.py validate-modules:missing-gplv3-license
-plugins/modules/resource_group.py compile-2.7
-plugins/modules/resource_group.py import-2.7
diff --git a/ansible_collections/dellemc/powerflex/tests/sanity/ignore-2.15.txt b/ansible_collections/dellemc/powerflex/tests/sanity/ignore-2.15.txt
index 571402121..438f0a7bd 100644
--- a/ansible_collections/dellemc/powerflex/tests/sanity/ignore-2.15.txt
+++ b/ansible_collections/dellemc/powerflex/tests/sanity/ignore-2.15.txt
@@ -43,3 +43,5 @@ plugins/modules/replication_consistency_group.py compile-3.5
plugins/modules/resource_group.py validate-modules:missing-gplv3-license
plugins/modules/resource_group.py compile-2.7
plugins/modules/resource_group.py import-2.7
+plugins/modules/storagepool.py compile-2.7
+plugins/modules/storagepool.py import-2.7
diff --git a/ansible_collections/dellemc/powerflex/tests/sanity/ignore-2.16.txt b/ansible_collections/dellemc/powerflex/tests/sanity/ignore-2.16.txt
index 0dbde689c..5f24f9f25 100644
--- a/ansible_collections/dellemc/powerflex/tests/sanity/ignore-2.16.txt
+++ b/ansible_collections/dellemc/powerflex/tests/sanity/ignore-2.16.txt
@@ -29,3 +29,5 @@ plugins/modules/info.py import-2.7
plugins/modules/resource_group.py validate-modules:missing-gplv3-license
plugins/modules/resource_group.py compile-2.7
plugins/modules/resource_group.py import-2.7
+plugins/modules/storagepool.py compile-2.7
+plugins/modules/storagepool.py import-2.7
diff --git a/ansible_collections/dellemc/powerflex/tests/unit/plugins/module_utils/mock_storagepool_api.py b/ansible_collections/dellemc/powerflex/tests/unit/plugins/module_utils/mock_storagepool_api.py
index 87af1d6eb..3f0a89581 100644
--- a/ansible_collections/dellemc/powerflex/tests/unit/plugins/module_utils/mock_storagepool_api.py
+++ b/ansible_collections/dellemc/powerflex/tests/unit/plugins/module_utils/mock_storagepool_api.py
@@ -22,13 +22,44 @@ class MockStoragePoolApi:
"use_rmcache": None,
"use_rfcache": None,
"media_type": None,
+ "enable_zero_padding": None,
+ "rep_cap_max_ratio": None,
+ "rmcache_write_handling_mode": None,
+ "spare_percentage": None,
+ "enable_rebalance": None,
+ "enable_fragmentation": None,
+ "enable_rebuild": None,
+ "parallel_rebuild_rebalance_limit": None,
+ "cap_alert_thresholds": {
+ "high_threshold": 30,
+ "critical_threshold": 50
+ },
+ "protected_maintenance_mode_io_priority_policy": {
+ "policy": None,
+ "concurrent_ios_per_device": None,
+ "bw_limit_per_device": None
+ },
+ "rebalance_io_priority_policy": {
+ "policy": None,
+ "concurrent_ios_per_device": None,
+ "bw_limit_per_device": None
+ },
+ "vtree_migration_io_priority_policy": {
+ "policy": None,
+ "concurrent_ios_per_device": None,
+ "bw_limit_per_device": None},
+ "persistent_checksum": {
+ "enable": None,
+ "validate_on_read": None,
+ "builder_limit": None
+ },
'state': None
}
STORAGE_POOL_GET_LIST = [
{
- 'protectionDomainId': '4eeb304600000000',
- 'protectionDomainName': 'test_pd',
+ 'protectionDomainId': "7bd6457000000000",
+ 'protectionDomainName': "test_pd_1",
'rebuildEnabled': True,
'dataLayout': 'MediumGranularity',
'persistentChecksumState': 'Protected',
@@ -98,8 +129,8 @@ class MockStoragePoolApi:
STORAGE_POOL_GET_MULTI_LIST = [
{
- 'protectionDomainId': '4eeb304600000000',
- 'protectionDomainName': 'test_pd',
+ 'protectionDomainId': "7bd6457000000000",
+ 'protectionDomainName': "test_pd_1",
'rebuildEnabled': True,
'dataLayout': 'MediumGranularity',
'persistentChecksumState': 'Protected',
@@ -166,7 +197,7 @@ class MockStoragePoolApi:
'id': 'test_pool_id_1'
},
{
- 'protectionDomainId': '4eeb304600000002',
+ 'protectionDomainId': "7bd6457000000000",
'protectionDomainName': 'test_pd_1',
'rebuildEnabled': True,
'dataLayout': 'MediumGranularity',
@@ -239,6 +270,30 @@ class MockStoragePoolApi:
PROTECTION_DETAILS_1 = [{"id": "4eeb304600000001", "name": "test_pd_name"}]
+ PROTECTION_DOMAIN = {
+ "protectiondomain": [
+ {
+ "id": "7bd6457000000000",
+ "name": "test_pd_1",
+ "protectionDomainState": "Active",
+ "overallIoNetworkThrottlingInKbps": 20480,
+ "rebalanceNetworkThrottlingInKbps": 10240,
+ "rebuildNetworkThrottlingInKbps": 10240,
+ "vtreeMigrationNetworkThrottlingInKbps": 10240,
+ "rfcacheEnabled": "false",
+ "rfcacheMaxIoSizeKb": 128,
+ "rfcacheOpertionalMode": "None",
+ "rfcachePageSizeKb": 64,
+ "storagePools": [
+ {
+ "id": "8d1cba1700000000",
+ "name": "pool1"
+ }
+ ]
+ }
+ ]
+ }
+
STORAGE_POOL_STATISTICS = {
'backgroundScanFixedReadErrorCount': 0,
'pendingMovingOutBckRebuildJobs': 0,
@@ -616,10 +671,26 @@ class MockStoragePoolApi:
"get_multi_details": "More than one storage pool found",
"create_wo_pd": "Please provide protection domain details",
"create_transitional": "TRANSITIONAL media type is not supported during creation.",
- "create_pool_name_empty": "Empty or white spaced string provided in storage_pool_name.",
+ "create_pool_name_empty": "Empty or white spaced string provided for storage pool name. Provide valid storage pool name",
"create_pool_new_name": "storage_pool_new_name is passed during creation.",
- "rename_storage_pool_empty": "Empty/White spaced name is not allowed during renaming of a storage pool.",
- "delete_storage_pool": "Deleting storage pool is not supported through ansible module."
+ "rename_storage_pool_empty": "Empty or white spaced string provided for storage pool name. Provide valid storage pool name",
+ "delete_storage_pool": "Deleting storage pool is not supported through ansible module.",
+ "rename_pool": "Modify storage pool name failed",
+ "modify_pool_rmcache": "Modify RM cache operation failed",
+ "modify_pool_rfcache": "Modify RF cache operation failed",
+ "modify_pool_zero_padding_enabled": "Enable/Disable zero padding operation failed",
+ "modify_pool_rep_cap_max_ratio": "Modify Replication Capacity max ratio operation failed",
+ "modify_pool_enable_rebalance": "Enable/Disable Rebalance failed",
+ "modify_pool_enable_rebuild": "Enable/Disable Rebuild operation failed",
+ "modify_pool_enable_fragmentation": "Enable/Disable Fragmentation operation failed",
+ "modify_pool_spare_percentage": "Modify Spare Percentage operation failed",
+ "modify_pool_rmcache_write_handling_mode": "Modify RMCache Write Handling Mode failed",
+ "modify_pool_rebuild_rebalance_parallelism_limit": "Modify Rebuild/Rebalance Parallelism Limit operation failed",
+ "modify_pool_capacity_alert_thresholds": "Modify Capacity Alert Thresholds operation failed",
+ "modify_pool_protected_maintenance_mode_io_priority_policy": "Set Protected Maintenance Mode IO Priority Policy operation failed",
+ "modify_pool_vtree_migration_io_priority_policy": "Set Vtree Migration I/O Priority Policy operation failed",
+ "modify_pool_rebalance_io_priority_policy": "Modify rebalance IO Priority Policy failed",
+ "modify_pool_persistent_checksum": "Modify Persistent Checksum failed"
}
@staticmethod
diff --git a/ansible_collections/dellemc/powerflex/tests/unit/plugins/modules/test_mdm_cluster.py b/ansible_collections/dellemc/powerflex/tests/unit/plugins/modules/test_mdm_cluster.py
index f8f3cdc2f..653fcb298 100644
--- a/ansible_collections/dellemc/powerflex/tests/unit/plugins/modules/test_mdm_cluster.py
+++ b/ansible_collections/dellemc/powerflex/tests/unit/plugins/modules/test_mdm_cluster.py
@@ -1,4 +1,4 @@
-# Copyright: (c) 2022, Dell Technologies
+# Copyright: (c) 2024, Dell Technologies
# Apache License version 2.0 (see MODULE-LICENSE or http://www.apache.org/licenses/LICENSE-2.0.txt)
@@ -634,3 +634,145 @@ class TestPowerflexMDMCluster():
)
mdm_cluster_module_mock.perform_module_operation()
assert MockMdmClusterApi.new_name_add_mdm_failed_response() in mdm_cluster_module_mock.module.fail_json.call_args[1]['msg']
+
+ def test_change_cluster_mode(self, mdm_cluster_module_mock):
+ self.get_module_args.update({
+ "cluster_mode": "FiveNodes",
+ "mdm": [
+ {
+ "mdm_name": MockMdmClusterApi.MDM_NAME_STB_MGR,
+ "mdm_id": None,
+ "mdm_type": "Secondary"
+ },
+ {
+ "mdm_id": MockMdmClusterApi.STB_TB_MDM_ID,
+ "mdm_name": None,
+ "mdm_type": "TieBreaker"
+ }
+ ],
+ "mdm_state": "absent-in-cluster",
+ "state": "present"
+ })
+ mdm_cluster_module_mock.module.params = self.get_module_args
+ mdm_cluster_resp = MockSDKResponse(MockMdmClusterApi.THREE_MDM_CLUSTER_DETAILS)
+ mdm_cluster_module_mock.powerflex_conn.system.get_mdm_cluster_details = MagicMock(
+ return_value=mdm_cluster_resp.__dict__['data']
+ )
+ mdm_cluster_module_mock.validate_parameters = MagicMock(return_value=None)
+ mdm_cluster_module_mock.powerflex_conn.system.switch_cluster_mode = MagicMock()
+ mdm_cluster_module_mock.perform_module_operation()
+ assert mdm_cluster_module_mock.module.exit_json.call_args[1]['changed'] is True
+
+ def test_change_cluster_mode_with_name(self, mdm_cluster_module_mock):
+ self.get_module_args.update({
+ "cluster_mode": "FiveNodes",
+ "mdm": [
+ {
+ "mdm_name": MockMdmClusterApi.MDM_NAME_STB_MGR,
+ "mdm_id": MockMdmClusterApi.MDM_ID,
+ "mdm_type": "Secondary"
+ },
+ {
+ "mdm_id": MockMdmClusterApi.STB_TB_MDM_ID,
+ "mdm_name": MockMdmClusterApi.MDM_NAME,
+ "mdm_type": "TieBreaker"
+ }
+ ],
+ "mdm_state": "absent-in-cluster",
+ "state": "present"
+ })
+ mdm_cluster_module_mock.module.params = self.get_module_args
+ mdm_cluster_resp = MockSDKResponse(MockMdmClusterApi.THREE_MDM_CLUSTER_DETAILS)
+ mdm_cluster_module_mock.powerflex_conn.system.get_mdm_cluster_details = MagicMock(
+ return_value=mdm_cluster_resp.__dict__['data']
+ )
+ mdm_cluster_module_mock.validate_parameters = MagicMock(return_value=None)
+ mdm_cluster_module_mock.powerflex_conn.system.switch_cluster_mode = MagicMock()
+ mdm_cluster_module_mock.perform_module_operation()
+ assert mdm_cluster_module_mock.module.exit_json.call_args[1]['changed'] is True
+
+ def test_cluster_reduce_mode_absent(self, mdm_cluster_module_mock):
+ self.get_module_args.update({
+ "cluster_mode": "FiveNodes",
+ "mdm": [
+ {
+ "mdm_name": MockMdmClusterApi.MDM_NAME_STB_MGR,
+ "mdm_id": None,
+ "mdm_type": "Secondary"
+ },
+ {
+ "mdm_id": None,
+ "mdm_name": MockMdmClusterApi.MDM_NAME,
+ "mdm_type": "TieBreaker"
+ }
+ ],
+ "mdm_state": "absent-in-cluster",
+ "state": "present"
+ })
+ mdm_cluster_module_mock.module.params = self.get_module_args
+ mdm_cluster_resp = MockSDKResponse(MockMdmClusterApi.THREE_MDM_CLUSTER_DETAILS)
+ mdm_cluster_module_mock.powerflex_conn.system.get_mdm_cluster_details = MagicMock(
+ return_value=mdm_cluster_resp.__dict__['data']
+ )
+ mdm_cluster_module_mock.is_mdm_name_id_exists = MagicMock(return_value=None)
+ mdm_cluster_module_mock.validate_parameters = MagicMock(return_value=None)
+ mdm_cluster_module_mock.powerflex_conn.system.switch_cluster_mode = MagicMock()
+ mdm_cluster_module_mock.perform_module_operation()
+ assert mdm_cluster_module_mock.module.exit_json.call_args[1]['changed'] is True
+
+ def test_cluster_expand_list_tb(self, mdm_cluster_module_mock):
+ self.get_module_args.update({
+ "cluster_mode": "FiveNodes",
+ "mdm": [
+ {
+ "mdm_name": MockMdmClusterApi.MDM_NAME_STB_MGR,
+ "mdm_id": None,
+ "mdm_type": "Secondary"
+ },
+ {
+ "mdm_id": None,
+ "mdm_name": MockMdmClusterApi.MDM_NAME,
+ "mdm_type": "TieBreaker"
+ }
+ ],
+ "mdm_state": "present-in-cluster",
+ "state": "present"
+ })
+ mdm_cluster_module_mock.module.params = self.get_module_args
+ mdm_cluster_resp = MockSDKResponse(MockMdmClusterApi.THREE_MDM_CLUSTER_DETAILS)
+ mdm_cluster_module_mock.powerflex_conn.system.get_mdm_cluster_details = MagicMock(
+ return_value=mdm_cluster_resp.__dict__['data']
+ )
+ mdm_cluster_module_mock.validate_parameters = MagicMock(return_value=None)
+ mdm_cluster_module_mock.powerflex_conn.system.switch_cluster_mode = MagicMock()
+ mdm_cluster_module_mock.perform_module_operation()
+ assert mdm_cluster_module_mock.module.exit_json.call_args[1]['changed'] is True
+
+ def test_cluster_expand_list_tb_mdm_none(self, mdm_cluster_module_mock):
+ self.get_module_args.update({
+ "cluster_mode": "FiveNodes",
+ "mdm": [
+ {
+ "mdm_name": MockMdmClusterApi.MDM_NAME_STB_MGR,
+ "mdm_id": None,
+ "mdm_type": "Secondary"
+ },
+ {
+ "mdm_id": None,
+ "mdm_name": MockMdmClusterApi.MDM_NAME,
+ "mdm_type": "TieBreaker"
+ }
+ ],
+ "mdm_state": "present-in-cluster",
+ "state": "present"
+ })
+ mdm_cluster_module_mock.module.params = self.get_module_args
+ mdm_cluster_resp = MockSDKResponse(MockMdmClusterApi.THREE_MDM_CLUSTER_DETAILS)
+ mdm_cluster_module_mock.powerflex_conn.system.get_mdm_cluster_details = MagicMock(
+ return_value=mdm_cluster_resp.__dict__['data']
+ )
+ mdm_cluster_module_mock.is_mdm_name_id_exists = MagicMock(return_value=None)
+ mdm_cluster_module_mock.validate_parameters = MagicMock(return_value=None)
+ mdm_cluster_module_mock.powerflex_conn.system.switch_cluster_mode = MagicMock()
+ mdm_cluster_module_mock.perform_module_operation()
+ assert mdm_cluster_module_mock.module.exit_json.call_args[1]['changed'] is True
diff --git a/ansible_collections/dellemc/powerflex/tests/unit/plugins/modules/test_storagepool.py b/ansible_collections/dellemc/powerflex/tests/unit/plugins/modules/test_storagepool.py
index 6780ed7ad..c60e1fd01 100644
--- a/ansible_collections/dellemc/powerflex/tests/unit/plugins/modules/test_storagepool.py
+++ b/ansible_collections/dellemc/powerflex/tests/unit/plugins/modules/test_storagepool.py
@@ -4,29 +4,26 @@
"""Unit Tests for storage pool module on PowerFlex"""
+
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
-
import pytest
-from mock.mock import MagicMock
# pylint: disable=unused-import
from ansible_collections.dellemc.powerflex.tests.unit.plugins.module_utils.libraries import initial_mock
-from ansible_collections.dellemc.powerflex.tests.unit.plugins.module_utils.mock_storagepool_api import MockStoragePoolApi
+from mock.mock import MagicMock
+from ansible_collections.dellemc.powerflex.tests.unit.plugins.module_utils.mock_storagepool_api \
+ import MockStoragePoolApi
from ansible_collections.dellemc.powerflex.tests.unit.plugins.module_utils.mock_api_exception \
import MockApiException
-from ansible_collections.dellemc.powerflex.plugins.module_utils.storage.dell \
- import utils
from ansible_collections.dellemc.powerflex.tests.unit.plugins.module_utils.libraries.powerflex_unit_base \
import PowerFlexUnitBase
-
-utils.get_logger = MagicMock()
-utils.get_powerflex_gateway_host_connection = MagicMock()
-utils.PowerFlexClient = MagicMock()
-
-from ansible.module_utils import basic
-basic.AnsibleModule = MagicMock()
-from ansible_collections.dellemc.powerflex.plugins.modules.storagepool import PowerFlexStoragePool
+from ansible_collections.dellemc.powerflex.plugins.modules.storagepool \
+ import PowerFlexStoragePool
+from ansible_collections.dellemc.powerflex.plugins.module_utils.storage.dell \
+ import utils
+from ansible_collections.dellemc.powerflex.plugins.modules.storagepool import \
+ StoragePoolHandler
class TestPowerflexStoragePool(PowerFlexUnitBase):
@@ -37,23 +34,22 @@ class TestPowerflexStoragePool(PowerFlexUnitBase):
def module_object(self):
return PowerFlexStoragePool
- def test_get_storagepool_details(self, powerflex_module_mock):
- self.get_module_args.update({
- "storage_pool_name": "test_pool",
- "state": "present"
- })
- powerflex_module_mock.module.params = self.get_module_args
- storagepool_resp = MockStoragePoolApi.STORAGE_POOL_GET_LIST
+ def test_get_storage_pool_response(self, powerflex_module_mock):
+ self.set_module_params(
+ powerflex_module_mock,
+ self.get_module_args,
+ {
+ "storage_pool_name": "test_pool",
+ "state": "present"
+ })
powerflex_module_mock.powerflex_conn.storage_pool.get = MagicMock(
- return_value=storagepool_resp
- )
- storagepool_statistics_resp = MockStoragePoolApi.STORAGE_POOL_STATISTICS
- powerflex_module_mock.powerflex_conn.storage_pool.get_statistics = MagicMock(
- return_value=storagepool_statistics_resp
- )
- powerflex_module_mock.perform_module_operation()
+ return_value=MockStoragePoolApi.STORAGE_POOL_GET_LIST)
+ pd_resp = MockStoragePoolApi.PROTECTION_DOMAIN
+ powerflex_module_mock.powerflex_conn.protection_domain.get = MagicMock(
+ return_value=pd_resp['protectiondomain'])
+ StoragePoolHandler().handle(
+ powerflex_module_mock, powerflex_module_mock.module.params)
powerflex_module_mock.powerflex_conn.storage_pool.get.assert_called()
- powerflex_module_mock.powerflex_conn.storage_pool.get_statistics.assert_called()
def test_get_storagepool_details_multi(self, powerflex_module_mock):
self.get_module_args.update({
@@ -71,7 +67,7 @@ class TestPowerflexStoragePool(PowerFlexUnitBase):
)
self.capture_fail_json_call(
MockStoragePoolApi.get_exception_response('get_multi_details'),
- powerflex_module_mock, invoke_perform_module=True)
+ powerflex_module_mock, StoragePoolHandler)
def test_get_storagepool_details_with_exception(self, powerflex_module_mock):
self.get_module_args.update({
@@ -88,50 +84,7 @@ class TestPowerflexStoragePool(PowerFlexUnitBase):
powerflex_module_mock.create_storage_pool = MagicMock(return_value=None)
self.capture_fail_json_call(
MockStoragePoolApi.get_exception_response('get_details'),
- powerflex_module_mock, invoke_perform_module=True)
-
- @pytest.mark.parametrize("params", [
- {"pd_id": "4eeb304600000000"},
- {"pd_name": "test"},
- ])
- def test_get_protection_domain(self, powerflex_module_mock, params):
- pd_id = params.get("pd_id", None)
- pd_name = params.get("pd_name", None)
- powerflex_module_mock.powerflex_conn.protection_domain.get = MagicMock(
- return_value=MockStoragePoolApi.PROTECTION_DETAILS
- )
- pd_details = powerflex_module_mock.get_protection_domain(pd_name, pd_id)
- assert MockStoragePoolApi.PROTECTION_DETAILS[0] == pd_details
-
- def test_get_protection_domain_exception(self, powerflex_module_mock):
- self.set_module_params(
- powerflex_module_mock,
- self.get_module_args,
- {
- "storage_pool_name": "test_pool",
- "protection_domain_id": "4eeb304600000001",
- "state": "present"
- })
- powerflex_module_mock.powerflex_conn.protection_domain.get = MagicMock(
- side_effect=MockApiException)
- self.capture_fail_json_call(
- MockStoragePoolApi.get_exception_response('get_pd_exception'),
- powerflex_module_mock, invoke_perform_module=True)
-
- def test_get_protection_domain_non_exist(self, powerflex_module_mock):
- self.set_module_params(
- powerflex_module_mock,
- self.get_module_args,
- {
- "storage_pool_name": "test_pool",
- "protection_domain_id": "4eeb304600000001",
- "state": "present"
- })
- powerflex_module_mock.powerflex_conn.protection_domain.get = MagicMock(
- return_value=None)
- self.capture_fail_json_call(
- MockStoragePoolApi.get_exception_response('get_pd_non_exist'),
- powerflex_module_mock, invoke_perform_module=True)
+ powerflex_module_mock, StoragePoolHandler)
def test_get_storagepool_details_with_invalid_pd_id(self, powerflex_module_mock):
self.get_module_args.update({
@@ -152,78 +105,33 @@ class TestPowerflexStoragePool(PowerFlexUnitBase):
)
self.capture_fail_json_call(
MockStoragePoolApi.get_exception_response('invalid_pd_id'),
- powerflex_module_mock, invoke_perform_module=True)
+ powerflex_module_mock, StoragePoolHandler)
def test_create_storagepool_response(self, powerflex_module_mock):
self.get_module_args.update({
"storage_pool_name": "test_pool",
- "protection_domain_name": "test_pd_name",
+ "protection_domain_name": "test_pd_1",
"media_type": "HDD",
"state": "present"
})
powerflex_module_mock.module.params = self.get_module_args
+ pd_resp = MockStoragePoolApi.PROTECTION_DOMAIN
powerflex_module_mock.powerflex_conn.protection_domain.get = MagicMock(
- return_value=MockStoragePoolApi.PROTECTION_DETAILS_1)
+ return_value=pd_resp['protectiondomain'])
powerflex_module_mock.powerflex_conn.storage_pool.get = MagicMock(
return_value=[]
)
powerflex_module_mock.powerflex_conn.storage_pool.get_statistics = MagicMock(
return_value=[]
)
- powerflex_module_mock.powerflex_conn.storage_pool.create = MagicMock(
- return_value=None
- )
- resp = powerflex_module_mock.create_storage_pool(pool_name="test_pool",
- pd_id=MockStoragePoolApi.PROTECTION_DETAILS_1[0]['id'],
- media_type="HDD")
- assert resp is True
+ StoragePoolHandler().handle(
+ powerflex_module_mock, powerflex_module_mock.module.params)
powerflex_module_mock.powerflex_conn.storage_pool.create.assert_called()
- def test_create_storagepool_only_pool_id(self, powerflex_module_mock):
- self.get_module_args.update({
- "storage_pool_id": "test_pool_id",
- "protection_domain_name": "test_pd_name",
- "media_type": "HDD",
- "state": "present"
- })
- powerflex_module_mock.module.params = self.get_module_args
- powerflex_module_mock.powerflex_conn.protection_domain.get = MagicMock(
- return_value=MockStoragePoolApi.PROTECTION_DETAILS_1)
- powerflex_module_mock.powerflex_conn.storage_pool.get = MagicMock(
- return_value=[]
- )
- powerflex_module_mock.powerflex_conn.storage_pool.get_statistics = MagicMock(
- return_value=[]
- )
- self.capture_fail_json_call(
- MockStoragePoolApi.get_exception_response('create_pool_id'),
- powerflex_module_mock, invoke_perform_module=True)
-
- def test_create_storagepool_new_name(self, powerflex_module_mock):
- self.get_module_args.update({
- "storage_pool_name": "test_pool",
- "storage_pool_new_name": "pool_new_name",
- "protection_domain_name": "test_pd_name",
- "media_type": "HDD",
- "state": "present"
- })
- powerflex_module_mock.module.params = self.get_module_args
- powerflex_module_mock.powerflex_conn.protection_domain.get = MagicMock(
- return_value=MockStoragePoolApi.PROTECTION_DETAILS_1)
- powerflex_module_mock.powerflex_conn.storage_pool.get = MagicMock(
- return_value=[]
- )
- powerflex_module_mock.powerflex_conn.storage_pool.get_statistics = MagicMock(
- return_value=[]
- )
- self.capture_fail_json_call(
- MockStoragePoolApi.get_exception_response('create_pool_new_name'),
- powerflex_module_mock, invoke_perform_module=True)
-
def test_create_storagepool_empty_name(self, powerflex_module_mock):
self.get_module_args.update({
"storage_pool_name": " ",
- "protection_domain_name": "test_pd_name",
+ "protection_domain_name": "test_pd_1",
"media_type": "HDD",
"state": "present"
})
@@ -232,7 +140,7 @@ class TestPowerflexStoragePool(PowerFlexUnitBase):
return_value=MockStoragePoolApi.PROTECTION_DETAILS_1)
self.capture_fail_json_call(
MockStoragePoolApi.get_exception_response('create_pool_name_empty'),
- powerflex_module_mock, invoke_perform_module=True)
+ powerflex_module_mock, StoragePoolHandler)
def test_create_storagepool_wo_pd(self, powerflex_module_mock):
self.get_module_args.update({
@@ -251,12 +159,12 @@ class TestPowerflexStoragePool(PowerFlexUnitBase):
)
self.capture_fail_json_call(
MockStoragePoolApi.get_exception_response('create_wo_pd'),
- powerflex_module_mock, invoke_perform_module=True)
+ powerflex_module_mock, StoragePoolHandler)
def test_create_storagepool_transitional_exception(self, powerflex_module_mock):
self.get_module_args.update({
"storage_pool_name": "test_pool",
- "protection_domain_name": "test_pd_name",
+ "protection_domain_name": "test_pd_1",
"media_type": "TRANSITIONAL",
"state": "present"
})
@@ -274,7 +182,7 @@ class TestPowerflexStoragePool(PowerFlexUnitBase):
)
self.capture_fail_json_call(
MockStoragePoolApi.get_exception_response('create_transitional'),
- powerflex_module_mock, invoke_perform_module=True)
+ powerflex_module_mock, StoragePoolHandler)
def test_create_storagepool_exception(self, powerflex_module_mock):
self.get_module_args.update({
@@ -297,15 +205,43 @@ class TestPowerflexStoragePool(PowerFlexUnitBase):
)
self.capture_fail_json_call(
MockStoragePoolApi.get_exception_response('create_storage_pool'),
- powerflex_module_mock, invoke_perform_module=True)
+ powerflex_module_mock, StoragePoolHandler)
def test_modify_storagepool_details(self, powerflex_module_mock):
self.get_module_args.update({
"storage_pool_name": "test_pool",
+ "protection_domain_name": "test_pd_1",
"storage_pool_new_name": "new_ansible_pool",
"use_rfcache": True,
"use_rmcache": True,
- "media_type": "TRANSITIONAL",
+ "cap_alert_thresholds": {
+ "high_threshold": 30,
+ "critical_threshold": 50
+ },
+ "enable_zero_padding": True,
+ "rep_cap_max_ratio": 40,
+ "rmcache_write_handling_mode": "Passthrough",
+ "spare_percentage": 80,
+ "enable_rebalance": False,
+ "enable_fragmentation": False,
+ "enable_rebuild": False,
+ "parallel_rebuild_rebalance_limit": 3,
+ "protected_maintenance_mode_io_priority_policy": {
+ "policy": "unlimited",
+ "concurrent_ios_per_device": 1,
+ "bw_limit_per_device": 1024},
+ "rebalance_io_priority_policy": {
+ "policy": "limitNumOfConcurrentIos",
+ "concurrent_ios_per_device": 10,
+ "bw_limit_per_device": 1024},
+ "vtree_migration_io_priority_policy": {
+ "policy": "limitNumOfConcurrentIos",
+ "concurrent_ios_per_device": 10,
+ "bw_limit_per_device": 1024},
+ "persistent_checksum": {
+ "enable": True,
+ "validate_on_read": True,
+ "builder_limit": 1024},
"state": "present"
})
powerflex_module_mock.module.params = self.get_module_args
@@ -313,20 +249,58 @@ class TestPowerflexStoragePool(PowerFlexUnitBase):
powerflex_module_mock.powerflex_conn.storage_pool.get = MagicMock(
return_value=storagepool_resp
)
+ pd_resp = MockStoragePoolApi.PROTECTION_DOMAIN
+ powerflex_module_mock.powerflex_conn.protection_domain.get = MagicMock(
+ return_value=pd_resp['protectiondomain'])
storagepool_statistics_resp = MockStoragePoolApi.STORAGE_POOL_STATISTICS
powerflex_module_mock.powerflex_conn.storage_pool.get_statistics = MagicMock(
return_value=storagepool_statistics_resp
)
- powerflex_module_mock.perform_module_operation()
+ StoragePoolHandler().handle(
+ powerflex_module_mock, powerflex_module_mock.module.params)
powerflex_module_mock.powerflex_conn.storage_pool.rename.assert_called()
powerflex_module_mock.powerflex_conn.storage_pool.set_use_rmcache.assert_called()
powerflex_module_mock.powerflex_conn.storage_pool.set_use_rfcache.assert_called()
- powerflex_module_mock.powerflex_conn.storage_pool.set_media_type.assert_called()
+ powerflex_module_mock.powerflex_conn.storage_pool.set_fragmentation_enabled.assert_called()
+ powerflex_module_mock.powerflex_conn.storage_pool.set_persistent_checksum.assert_called()
+ powerflex_module_mock.powerflex_conn.storage_pool.set_rebuild_rebalance_parallelism_limit.assert_called()
+ powerflex_module_mock.powerflex_conn.storage_pool.set_rmcache_write_handling_mode.assert_called()
+ powerflex_module_mock.powerflex_conn.storage_pool.rebalance_io_priority_policy.assert_called()
+ powerflex_module_mock.powerflex_conn.storage_pool.set_vtree_migration_io_priority_policy.assert_called()
+ powerflex_module_mock.powerflex_conn.storage_pool.set_protected_maintenance_mode_io_priority_policy.assert_called()
+ powerflex_module_mock.powerflex_conn.storage_pool.set_cap_alert_thresholds.assert_called()
+ powerflex_module_mock.powerflex_conn.storage_pool.set_zero_padding_policy.assert_called()
+ powerflex_module_mock.powerflex_conn.storage_pool.set_spare_percentage.assert_called()
+ powerflex_module_mock.powerflex_conn.storage_pool.set_rebuild_enabled.assert_called()
+ powerflex_module_mock.powerflex_conn.storage_pool.set_rebalance_enabled.assert_called()
+ powerflex_module_mock.powerflex_conn.storage_pool.set_rep_cap_max_ratio.assert_called()
- def test_rename_storagepool_exception(self, powerflex_module_mock):
+ def test_delete_storagepool_exception(self, powerflex_module_mock):
self.get_module_args.update({
"storage_pool_name": "test_pool",
- "storage_pool_new_name": "new_ansible_pool",
+ "state": "absent"
+ })
+ powerflex_module_mock.module.params = self.get_module_args
+ storagepool_resp = MockStoragePoolApi.STORAGE_POOL_GET_LIST
+ powerflex_module_mock.powerflex_conn.storage_pool.get = MagicMock(
+ return_value=storagepool_resp
+ )
+ pd_resp = MockStoragePoolApi.PROTECTION_DOMAIN
+ powerflex_module_mock.powerflex_conn.protection_domain.get = MagicMock(
+ return_value=pd_resp['protectiondomain'])
+ storagepool_statistics_resp = MockStoragePoolApi.STORAGE_POOL_STATISTICS
+ powerflex_module_mock.powerflex_conn.storage_pool.get_statistics = MagicMock(
+ return_value=storagepool_statistics_resp
+ )
+ self.capture_fail_json_call(
+ MockStoragePoolApi.get_exception_response('delete_storage_pool'),
+ powerflex_module_mock, StoragePoolHandler)
+
+ def test_modify_name_exception(self, powerflex_module_mock):
+ self.get_module_args.update({
+ "storage_pool_name": "test_pool",
+ "protection_domain_name": "test_pd_1",
+ "storage_pool_new_name": "test_pool_new",
"state": "present"
})
powerflex_module_mock.module.params = self.get_module_args
@@ -334,6 +308,9 @@ class TestPowerflexStoragePool(PowerFlexUnitBase):
powerflex_module_mock.powerflex_conn.storage_pool.get = MagicMock(
return_value=storagepool_resp
)
+ pd_resp = MockStoragePoolApi.PROTECTION_DOMAIN
+ powerflex_module_mock.powerflex_conn.protection_domain.get = MagicMock(
+ return_value=pd_resp['protectiondomain'])
storagepool_statistics_resp = MockStoragePoolApi.STORAGE_POOL_STATISTICS
powerflex_module_mock.powerflex_conn.storage_pool.get_statistics = MagicMock(
return_value=storagepool_statistics_resp
@@ -342,13 +319,14 @@ class TestPowerflexStoragePool(PowerFlexUnitBase):
side_effect=MockApiException
)
self.capture_fail_json_call(
- MockStoragePoolApi.get_exception_response('rename_storage_pool'),
- powerflex_module_mock, invoke_perform_module=True)
+ MockStoragePoolApi.get_exception_response('rename_pool'),
+ powerflex_module_mock, StoragePoolHandler)
- def test_rename_storagepool_empty_exception(self, powerflex_module_mock):
+ def test_modify_rmcache_exception(self, powerflex_module_mock):
self.get_module_args.update({
"storage_pool_name": "test_pool",
- "storage_pool_new_name": " ",
+ "protection_domain_name": "test_pd_1",
+ "use_rmcahe": True,
"state": "present"
})
powerflex_module_mock.module.params = self.get_module_args
@@ -356,28 +334,395 @@ class TestPowerflexStoragePool(PowerFlexUnitBase):
powerflex_module_mock.powerflex_conn.storage_pool.get = MagicMock(
return_value=storagepool_resp
)
+ pd_resp = MockStoragePoolApi.PROTECTION_DOMAIN
+ powerflex_module_mock.powerflex_conn.protection_domain.get = MagicMock(
+ return_value=pd_resp['protectiondomain'])
storagepool_statistics_resp = MockStoragePoolApi.STORAGE_POOL_STATISTICS
powerflex_module_mock.powerflex_conn.storage_pool.get_statistics = MagicMock(
return_value=storagepool_statistics_resp
)
+ powerflex_module_mock.powerflex_conn.storage_pool.set_use_rmcache = MagicMock(
+ side_effect=MockApiException
+ )
self.capture_fail_json_call(
- MockStoragePoolApi.get_exception_response('rename_storage_pool_empty'),
- powerflex_module_mock, invoke_perform_module=True)
+ MockStoragePoolApi.get_exception_response('modify_pool_rmcache'),
+ powerflex_module_mock, StoragePoolHandler)
- def test_delete_storagepool_exception(self, powerflex_module_mock):
+ def test_modify_rfcache_exception(self, powerflex_module_mock):
self.get_module_args.update({
"storage_pool_name": "test_pool",
- "state": "absent"
+ "protection_domain_name": "test_pd_1",
+ "use_rfcahe": True,
+ "state": "present"
})
powerflex_module_mock.module.params = self.get_module_args
storagepool_resp = MockStoragePoolApi.STORAGE_POOL_GET_LIST
powerflex_module_mock.powerflex_conn.storage_pool.get = MagicMock(
return_value=storagepool_resp
)
+ pd_resp = MockStoragePoolApi.PROTECTION_DOMAIN
+ powerflex_module_mock.powerflex_conn.protection_domain.get = MagicMock(
+ return_value=pd_resp['protectiondomain'])
storagepool_statistics_resp = MockStoragePoolApi.STORAGE_POOL_STATISTICS
powerflex_module_mock.powerflex_conn.storage_pool.get_statistics = MagicMock(
return_value=storagepool_statistics_resp
)
+ powerflex_module_mock.powerflex_conn.storage_pool.set_use_rfcache = MagicMock(
+ side_effect=MockApiException
+ )
self.capture_fail_json_call(
- MockStoragePoolApi.get_exception_response('delete_storage_pool'),
- powerflex_module_mock, invoke_perform_module=True)
+ MockStoragePoolApi.get_exception_response('modify_pool_rfcache'),
+ powerflex_module_mock, StoragePoolHandler)
+
+ def test_modify_enable_zero_padding_exception(self, powerflex_module_mock):
+ self.get_module_args.update({
+ "storage_pool_name": "test_pool",
+ "protection_domain_name": "test_pd_1",
+ "enable_zero_padding": False,
+ "state": "present"
+ })
+ powerflex_module_mock.module.params = self.get_module_args
+ storagepool_resp = MockStoragePoolApi.STORAGE_POOL_GET_LIST
+ powerflex_module_mock.powerflex_conn.storage_pool.get = MagicMock(
+ return_value=storagepool_resp
+ )
+ pd_resp = MockStoragePoolApi.PROTECTION_DOMAIN
+ powerflex_module_mock.powerflex_conn.protection_domain.get = MagicMock(
+ return_value=pd_resp['protectiondomain'])
+ storagepool_statistics_resp = MockStoragePoolApi.STORAGE_POOL_STATISTICS
+ powerflex_module_mock.powerflex_conn.storage_pool.get_statistics = MagicMock(
+ return_value=storagepool_statistics_resp
+ )
+ powerflex_module_mock.powerflex_conn.storage_pool.set_zero_padding_policy = MagicMock(
+ side_effect=MockApiException
+ )
+ self.capture_fail_json_call(
+ MockStoragePoolApi.get_exception_response('modify_pool_enable_zero_padding'),
+ powerflex_module_mock, StoragePoolHandler)
+
+ def test_modify_rep_cap_max_ratio_exception(self, powerflex_module_mock):
+ self.get_module_args.update({
+ "storage_pool_name": "test_pool",
+ "protection_domain_name": "test_pd_1",
+ "rep_cap_max_ratio": 10,
+ "state": "present"
+ })
+ powerflex_module_mock.module.params = self.get_module_args
+ storagepool_resp = MockStoragePoolApi.STORAGE_POOL_GET_LIST
+ powerflex_module_mock.powerflex_conn.storage_pool.get = MagicMock(
+ return_value=storagepool_resp
+ )
+ pd_resp = MockStoragePoolApi.PROTECTION_DOMAIN
+ powerflex_module_mock.powerflex_conn.protection_domain.get = MagicMock(
+ return_value=pd_resp['protectiondomain'])
+ storagepool_statistics_resp = MockStoragePoolApi.STORAGE_POOL_STATISTICS
+ powerflex_module_mock.powerflex_conn.storage_pool.get_statistics = MagicMock(
+ return_value=storagepool_statistics_resp
+ )
+ powerflex_module_mock.powerflex_conn.storage_pool.set_rep_cap_max_ratio = MagicMock(
+ side_effect=MockApiException
+ )
+ self.capture_fail_json_call(
+ MockStoragePoolApi.get_exception_response('modify_pool_rep_cap_max_ratio'),
+ powerflex_module_mock, StoragePoolHandler)
+
+ def test_modify_enable_rebalance_exception(self, powerflex_module_mock):
+ self.get_module_args.update({
+ "storage_pool_name": "test_pool",
+ "protection_domain_name": "test_pd_1",
+ "enable_rebalance": False,
+ "state": "present"
+ })
+ powerflex_module_mock.module.params = self.get_module_args
+ storagepool_resp = MockStoragePoolApi.STORAGE_POOL_GET_LIST
+ powerflex_module_mock.powerflex_conn.storage_pool.get = MagicMock(
+ return_value=storagepool_resp
+ )
+ pd_resp = MockStoragePoolApi.PROTECTION_DOMAIN
+ powerflex_module_mock.powerflex_conn.protection_domain.get = MagicMock(
+ return_value=pd_resp['protectiondomain'])
+ storagepool_statistics_resp = MockStoragePoolApi.STORAGE_POOL_STATISTICS
+ powerflex_module_mock.powerflex_conn.storage_pool.get_statistics = MagicMock(
+ return_value=storagepool_statistics_resp
+ )
+ powerflex_module_mock.powerflex_conn.storage_pool.set_rebalance_enabled = MagicMock(
+ side_effect=MockApiException
+ )
+ self.capture_fail_json_call(
+ MockStoragePoolApi.get_exception_response('modify_pool_enable_rebalance'),
+ powerflex_module_mock, StoragePoolHandler)
+
+ def test_modify_enable_rebuild_exception(self, powerflex_module_mock):
+ self.get_module_args.update({
+ "storage_pool_name": "test_pool",
+ "protection_domain_name": "test_pd_1",
+ "enable_rebuild": False,
+ "state": "present"
+ })
+ powerflex_module_mock.module.params = self.get_module_args
+ storagepool_resp = MockStoragePoolApi.STORAGE_POOL_GET_LIST
+ powerflex_module_mock.powerflex_conn.storage_pool.get = MagicMock(
+ return_value=storagepool_resp
+ )
+ pd_resp = MockStoragePoolApi.PROTECTION_DOMAIN
+ powerflex_module_mock.powerflex_conn.protection_domain.get = MagicMock(
+ return_value=pd_resp['protectiondomain'])
+ storagepool_statistics_resp = MockStoragePoolApi.STORAGE_POOL_STATISTICS
+ powerflex_module_mock.powerflex_conn.storage_pool.get_statistics = MagicMock(
+ return_value=storagepool_statistics_resp
+ )
+ powerflex_module_mock.powerflex_conn.storage_pool.set_rebuild_enabled = MagicMock(
+ side_effect=MockApiException
+ )
+ self.capture_fail_json_call(
+ MockStoragePoolApi.get_exception_response('modify_pool_enable_rebuild'),
+ powerflex_module_mock, StoragePoolHandler)
+
+ def test_modify_enable_fragmentation_exception(self, powerflex_module_mock):
+ self.get_module_args.update({
+ "storage_pool_name": "test_pool",
+ "protection_domain_name": "test_pd_1",
+ "enable_fragmentaion": False,
+ "state": "present"
+ })
+ powerflex_module_mock.module.params = self.get_module_args
+ storagepool_resp = MockStoragePoolApi.STORAGE_POOL_GET_LIST
+ powerflex_module_mock.powerflex_conn.storage_pool.get = MagicMock(
+ return_value=storagepool_resp
+ )
+ pd_resp = MockStoragePoolApi.PROTECTION_DOMAIN
+ powerflex_module_mock.powerflex_conn.protection_domain.get = MagicMock(
+ return_value=pd_resp['protectiondomain'])
+ storagepool_statistics_resp = MockStoragePoolApi.STORAGE_POOL_STATISTICS
+ powerflex_module_mock.powerflex_conn.storage_pool.get_statistics = MagicMock(
+ return_value=storagepool_statistics_resp
+ )
+ powerflex_module_mock.powerflex_conn.storage_pool.set_fragmentation_enabled = MagicMock(
+ side_effect=MockApiException
+ )
+ self.capture_fail_json_call(
+ MockStoragePoolApi.get_exception_response('modify_pool_enable_fragmentation'),
+ powerflex_module_mock, StoragePoolHandler)
+
+ def test_modify_spare_percentage_exception(self, powerflex_module_mock):
+ self.get_module_args.update({
+ "storage_pool_name": "test_pool",
+ "protection_domain_name": "test_pd_1",
+ "spare_percentage": 20,
+ "state": "present"
+ })
+ powerflex_module_mock.module.params = self.get_module_args
+ storagepool_resp = MockStoragePoolApi.STORAGE_POOL_GET_LIST
+ powerflex_module_mock.powerflex_conn.storage_pool.get = MagicMock(
+ return_value=storagepool_resp
+ )
+ pd_resp = MockStoragePoolApi.PROTECTION_DOMAIN
+ powerflex_module_mock.powerflex_conn.protection_domain.get = MagicMock(
+ return_value=pd_resp['protectiondomain'])
+ storagepool_statistics_resp = MockStoragePoolApi.STORAGE_POOL_STATISTICS
+ powerflex_module_mock.powerflex_conn.storage_pool.get_statistics = MagicMock(
+ return_value=storagepool_statistics_resp
+ )
+ powerflex_module_mock.powerflex_conn.storage_pool.set_spare_percentage = MagicMock(
+ side_effect=MockApiException
+ )
+ self.capture_fail_json_call(
+ MockStoragePoolApi.get_exception_response('modify_pool_spare_percentage'),
+ powerflex_module_mock, StoragePoolHandler)
+
+ def test_modify_rmcache_write_handling_mode_exception(self, powerflex_module_mock):
+ self.get_module_args.update({
+ "storage_pool_name": "test_pool",
+ "protection_domain_name": "test_pd_1",
+ "rmcache_write_handling_mode": "Cached",
+ "state": "present"
+ })
+ powerflex_module_mock.module.params = self.get_module_args
+ storagepool_resp = MockStoragePoolApi.STORAGE_POOL_GET_LIST
+ powerflex_module_mock.powerflex_conn.storage_pool.get = MagicMock(
+ return_value=storagepool_resp
+ )
+ pd_resp = MockStoragePoolApi.PROTECTION_DOMAIN
+ powerflex_module_mock.powerflex_conn.protection_domain.get = MagicMock(
+ return_value=pd_resp['protectiondomain'])
+ storagepool_statistics_resp = MockStoragePoolApi.STORAGE_POOL_STATISTICS
+ powerflex_module_mock.powerflex_conn.storage_pool.get_statistics = MagicMock(
+ return_value=storagepool_statistics_resp
+ )
+ powerflex_module_mock.powerflex_conn.storage_pool.set_rmcache_write_handling_mode = MagicMock(
+ side_effect=MockApiException
+ )
+ self.capture_fail_json_call(
+ MockStoragePoolApi.get_exception_response('modify_pool_rmcache_write_handling_mode'),
+ powerflex_module_mock, StoragePoolHandler)
+
+ def test_modify_rebuild_rebalance_parallelism_limit_exception(self, powerflex_module_mock):
+ self.get_module_args.update({
+ "storage_pool_name": "test_pool",
+ "protection_domain_name": "test_pd_1",
+ "parallel_rebuild_rebalance_limit": 4,
+ "state": "present"
+ })
+ powerflex_module_mock.module.params = self.get_module_args
+ storagepool_resp = MockStoragePoolApi.STORAGE_POOL_GET_LIST
+ powerflex_module_mock.powerflex_conn.storage_pool.get = MagicMock(
+ return_value=storagepool_resp
+ )
+ pd_resp = MockStoragePoolApi.PROTECTION_DOMAIN
+ powerflex_module_mock.powerflex_conn.protection_domain.get = MagicMock(
+ return_value=pd_resp['protectiondomain'])
+ storagepool_statistics_resp = MockStoragePoolApi.STORAGE_POOL_STATISTICS
+ powerflex_module_mock.powerflex_conn.storage_pool.get_statistics = MagicMock(
+ return_value=storagepool_statistics_resp
+ )
+ powerflex_module_mock.powerflex_conn.storage_pool.set_rebuild_rebalance_parallelism_limit = MagicMock(
+ side_effect=MockApiException
+ )
+ self.capture_fail_json_call(
+ MockStoragePoolApi.get_exception_response('modify_pool_rebuild_rebalance_parallelism_limit'),
+ powerflex_module_mock, StoragePoolHandler)
+
+ def test_modify_capacity_alert_thresholds_exception(self, powerflex_module_mock):
+ self.get_module_args.update({
+ "storage_pool_name": "test_pool",
+ "protection_domain_name": "test_pd_1",
+ "capacity_alert_thresholds": {
+ "high_threshold": 60,
+ "critical_threshold": 70
+ },
+ "state": "present"
+ })
+ powerflex_module_mock.module.params = self.get_module_args
+ storagepool_resp = MockStoragePoolApi.STORAGE_POOL_GET_LIST
+ powerflex_module_mock.powerflex_conn.storage_pool.get = MagicMock(
+ return_value=storagepool_resp
+ )
+ pd_resp = MockStoragePoolApi.PROTECTION_DOMAIN
+ powerflex_module_mock.powerflex_conn.protection_domain.get = MagicMock(
+ return_value=pd_resp['protectiondomain'])
+ storagepool_statistics_resp = MockStoragePoolApi.STORAGE_POOL_STATISTICS
+ powerflex_module_mock.powerflex_conn.storage_pool.get_statistics = MagicMock(
+ return_value=storagepool_statistics_resp
+ )
+ powerflex_module_mock.powerflex_conn.storage_pool.set_capacity_alert_thresholds = MagicMock(
+ side_effect=MockApiException
+ )
+ self.capture_fail_json_call(
+ MockStoragePoolApi.get_exception_response('modify_pool_capacity_alert_thresholds'),
+ powerflex_module_mock, StoragePoolHandler)
+
+ def test_modify_protected_maintenance_mode_io_priority_policy_exception(self, powerflex_module_mock):
+ self.get_module_args.update({
+ "storage_pool_name": "test_pool",
+ "protection_domain_name": "test_pd_1",
+ "protected_maintenance_mode_io_priority_policy": {
+ "policy": "unlimited",
+ "concurrent_ios_per_device": 1,
+ "bw_limit_per_device": 1024},
+ "state": "present"
+ })
+ powerflex_module_mock.module.params = self.get_module_args
+ storagepool_resp = MockStoragePoolApi.STORAGE_POOL_GET_LIST
+ powerflex_module_mock.powerflex_conn.storage_pool.get = MagicMock(
+ return_value=storagepool_resp
+ )
+ pd_resp = MockStoragePoolApi.PROTECTION_DOMAIN
+ powerflex_module_mock.powerflex_conn.protection_domain.get = MagicMock(
+ return_value=pd_resp['protectiondomain'])
+ storagepool_statistics_resp = MockStoragePoolApi.STORAGE_POOL_STATISTICS
+ powerflex_module_mock.powerflex_conn.storage_pool.get_statistics = MagicMock(
+ return_value=storagepool_statistics_resp
+ )
+ powerflex_module_mock.powerflex_conn.storage_pool.set_protected_maintenance_mode_io_priority_policy = MagicMock(
+ side_effect=MockApiException
+ )
+ self.capture_fail_json_call(
+ MockStoragePoolApi.get_exception_response('modify_pool_protected_maintenance_mode_io_priority_policy'),
+ powerflex_module_mock, StoragePoolHandler)
+
+ def test_modify_vtree_migration_io_priority_policy_exception(self, powerflex_module_mock):
+ self.get_module_args.update({
+ "storage_pool_name": "test_pool",
+ "protection_domain_name": "test_pd_1",
+ "vtree_migration_io_priority_policy": {
+ "policy": "favorAppIos",
+ "concurrent_ios_per_device": 1,
+ "bw_limit_per_device": 1024},
+ "state": "present"
+ })
+ powerflex_module_mock.module.params = self.get_module_args
+ storagepool_resp = MockStoragePoolApi.STORAGE_POOL_GET_LIST
+ powerflex_module_mock.powerflex_conn.storage_pool.get = MagicMock(
+ return_value=storagepool_resp
+ )
+ pd_resp = MockStoragePoolApi.PROTECTION_DOMAIN
+ powerflex_module_mock.powerflex_conn.protection_domain.get = MagicMock(
+ return_value=pd_resp['protectiondomain'])
+ storagepool_statistics_resp = MockStoragePoolApi.STORAGE_POOL_STATISTICS
+ powerflex_module_mock.powerflex_conn.storage_pool.get_statistics = MagicMock(
+ return_value=storagepool_statistics_resp
+ )
+ powerflex_module_mock.powerflex_conn.storage_pool.set_vtree_migration_io_priority_policy = MagicMock(
+ side_effect=MockApiException
+ )
+ self.capture_fail_json_call(
+ MockStoragePoolApi.get_exception_response('modify_pool_vtree_migration_io_priority_policy'),
+ powerflex_module_mock, StoragePoolHandler)
+
+ def test_modify_rebalance_io_priority_policy_exception(self, powerflex_module_mock):
+ self.get_module_args.update({
+ "storage_pool_name": "test_pool",
+ "protection_domain_name": "test_pd_1",
+ "rebalance_io_priority_policy": {
+ "policy": "favorAppIos",
+ "concurrent_ios_per_device": 1,
+ "bw_limit_per_device": 1024},
+ "state": "present"
+ })
+ powerflex_module_mock.module.params = self.get_module_args
+ storagepool_resp = MockStoragePoolApi.STORAGE_POOL_GET_LIST
+ powerflex_module_mock.powerflex_conn.storage_pool.get = MagicMock(
+ return_value=storagepool_resp
+ )
+ pd_resp = MockStoragePoolApi.PROTECTION_DOMAIN
+ powerflex_module_mock.powerflex_conn.protection_domain.get = MagicMock(
+ return_value=pd_resp['protectiondomain'])
+ storagepool_statistics_resp = MockStoragePoolApi.STORAGE_POOL_STATISTICS
+ powerflex_module_mock.powerflex_conn.storage_pool.get_statistics = MagicMock(
+ return_value=storagepool_statistics_resp
+ )
+ powerflex_module_mock.powerflex_conn.storage_pool.rebalance_io_priority_policy = MagicMock(
+ side_effect=MockApiException
+ )
+ self.capture_fail_json_call(
+ MockStoragePoolApi.get_exception_response('modify_pool_rebalance_io_priority_policy'),
+ powerflex_module_mock, StoragePoolHandler)
+
+ def test_modify_persistent_checksum_exception(self, powerflex_module_mock):
+ self.get_module_args.update({
+ "storage_pool_name": "test_pool",
+ "protection_domain_name": "test_pd_1",
+ "persistent_checksum": {
+ "enable": True,
+ "validate_on_read": True,
+ "builder_limit": 1024},
+ "state": "present"
+ })
+ powerflex_module_mock.module.params = self.get_module_args
+ storagepool_resp = MockStoragePoolApi.STORAGE_POOL_GET_LIST
+ powerflex_module_mock.powerflex_conn.storage_pool.get = MagicMock(
+ return_value=storagepool_resp
+ )
+ pd_resp = MockStoragePoolApi.PROTECTION_DOMAIN
+ powerflex_module_mock.powerflex_conn.protection_domain.get = MagicMock(
+ return_value=pd_resp['protectiondomain'])
+ storagepool_statistics_resp = MockStoragePoolApi.STORAGE_POOL_STATISTICS
+ powerflex_module_mock.powerflex_conn.storage_pool.get_statistics = MagicMock(
+ return_value=storagepool_statistics_resp
+ )
+ powerflex_module_mock.powerflex_conn.storage_pool.set_persistent_checksum = MagicMock(
+ side_effect=MockApiException
+ )
+ self.capture_fail_json_call(
+ MockStoragePoolApi.get_exception_response('modify_pool_persistent_checksum'),
+ powerflex_module_mock, StoragePoolHandler)
diff --git a/ansible_collections/ieisystem/inmanage/.github/settings.yml b/ansible_collections/ieisystem/inmanage/.github/settings.yml
new file mode 100644
index 000000000..b27b575f0
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/.github/settings.yml
@@ -0,0 +1,5 @@
+# DO NOT MODIFY
+
+# Settings: https://probot.github.io/apps/settings/
+# Pull settings from https://github.com/ansible-collections/.github/blob/master/.github/settings.yml
+_extends: ".github"
diff --git a/ansible_collections/ieisystem/inmanage/.github/workflows/ansible-test.yml b/ansible_collections/ieisystem/inmanage/.github/workflows/ansible-test.yml
new file mode 100644
index 000000000..52759559b
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/.github/workflows/ansible-test.yml
@@ -0,0 +1,54 @@
+name: CI
+on:
+ pull_request:
+ push:
+ # Run CI once per day (at 06:00 UTC)
+ # This ensures that even if there haven't been commits that we are still testing against latest version of ansible-test for each ansible-base version
+ schedule:
+ - cron: '0 6 * * *'
+
+jobs:
+ sanity:
+ name: Sanity (${{ matrix.ansible }})
+ strategy:
+ matrix:
+ ansible:
+ - stable-2.14
+ - stable-2.15
+ - stable-2.16
+ - stable-2.17
+ - devel
+ python-version:
+ - '3.9'
+ - '3.10'
+ - '3.11'
+ - '3.12'
+ exclude:
+ - ansible: stable-2.14
+ python-version: '3.12'
+ - ansible: stable-2.15
+ python-version: '3.12'
+ - ansible: stable-2.16
+ python-version: '3.9'
+ - ansible: stable-2.17
+ python-version: '3.9'
+ - ansible: devel
+ python-version: '3.9'
+ runs-on: ubuntu-latest
+ steps:
+
+ - name: Check out code
+ uses: actions/checkout@v1
+ with:
+ path: ansible_collections/ieisystem/inmanage
+
+ - name: Set up Python (${{ matrix.python-version }})
+ uses: actions/setup-python@v1
+ with:
+ python-version: ${{ matrix.python-version }}
+
+ - name: Install ansible-base (${{ matrix.ansible }})
+ run: pip install https://github.com/ansible/ansible/archive/${{ matrix.ansible }}.tar.gz --disable-pip-version-check
+
+ - name: Run sanity tests
+ run: ansible-test sanity --docker -v --color
diff --git a/ansible_collections/ieisystem/inmanage/CHANGELOG.rst b/ansible_collections/ieisystem/inmanage/CHANGELOG.rst
new file mode 100644
index 000000000..784876c36
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/CHANGELOG.rst
@@ -0,0 +1,200 @@
+====================================================
+CHANGE THIS IN changelogs/config.yaml! Release Notes
+====================================================
+
+.. contents:: Topics
+
+
+v1.2.1
+======
+
+Bugfixes
+--------
+
+- Change the example gather_facts no to false(https://github.com/ieisystem/ieisystem.inmanage/pull/18).
+- Delete the unwanted links.yml file (https://github.com/ieisystem/ieisystem.inmanage/pull/19).
+
+v1.2.0
+======
+
+Removed Features (previously deprecated)
+----------------------------------------
+
+- add_ldisk - Delete the ``add_ldisk.info`` parameter. Use ``ieisystem.inmanage.pdisk_info`` instead (https://github.com/ieisystem/ieisystem.inmanage/pull/15).
+- edit_ldisk - Delete the ``edit_ldisk.info`` parameter. Use ``ieisystem.inmanage.ldisk_info`` instead (https://github.com/ieisystem/ieisystem.inmanage/pull/15).
+- edit_pdisk - Delete the ``edit_pdisk.info`` parameter. Use ``ieisystem.inmanage.pdisk_info`` instead (https://github.com/ieisystem/ieisystem.inmanage/pull/15).
+
+Bugfixes
+--------
+
+- Add ansible 2.17 version tests to our CI test scripts.
+- Delete the useless __init__.py file in modules and module_utils.
+- Modify the add_ldisk, edit_ldisk, and edit_pdisk module.rst documents (https://github.com/ieisystem/ieisystem.inmanage/pull/16).
+
+v1.1.1
+======
+
+Minor Changes
+-------------
+
+- Change edit_alert_policy module description information does not have the first letter capitalization problem.
+- Fix problems such as typos or grammatical errors in the document.
+- Modify the readme.md information about installing supported modules.
+
+v1.1.0
+======
+
+Minor Changes
+-------------
+
+- Fix any issues in your module that don't meet the ansible module format and documentation requirements.
+- Modify the CI test file to add Ansile-2.16 and python 3.12 tests.
+- Modify the collection description in the README.md file.
+
+v1.0.0
+======
+
+Major Changes
+-------------
+
+- add all modules.
+
+Minor Changes
+-------------
+
+- Change the referenced sdk name to inManage.
+- Change the value of tags in galaxy.xml.
+- Example Change the version to 1.0.0.
+- Modify the release plan link in the readme file.
+
+New Modules
+-----------
+
+- ad_group - Manage active directory group information
+- ad_group_info - Get active directory group information
+- ad_info - Get active directory information
+- adapter_info - Get adapter information
+- add_ldisk - Create logical disk
+- alert_policy_info - Get alert policy
+- audit_log_info - Get BMC audit log information
+- auto_capture_info - Get auto capture screen information
+- backplane_info - Get disk backplane information
+- backup - Backup server settings
+- bios_export - Export BIOS config
+- bios_import - Import BIOS config
+- bios_info - Get BIOS setup
+- bmc_info - Get BMC information
+- boot_image_info - Get bmc boot image information
+- boot_option_info - Get BIOS boot options
+- clear_audit_log - Clear BMC audit log
+- clear_event_log - Clear event log
+- clear_system_log - Clear BMC system log
+- collect_blackbox - Collect blackbox log
+- collect_log - Collect logs
+- connect_media_info - Get remote images redirection information
+- cpu_info - Get CPU information
+- del_session - Delete session
+- dns_info - Get dns information
+- download_auto_screenshot - Download auto screenshots
+- download_manual_screenshot - Download manual screenshots
+- edit_ad - Set active directory information
+- edit_alert_policy - Set alert policy
+- edit_auto_capture - Set auto capture screen
+- edit_bios - Set BIOS setup attributes
+- edit_boot_image - Set bmc boot image
+- edit_boot_option - Set BIOS boot options
+- edit_connect_media - Start/Stop virtual media Image
+- edit_dns - Set dns information
+- edit_event_log_policy - Set event log policy
+- edit_fan - Set fan information
+- edit_fru - Set fru settings
+- edit_ipv4 - Set ipv4 information
+- edit_ipv6 - Set ipv6 information
+- edit_kvm - Set KVM
+- edit_ldap - Set ldap information
+- edit_ldisk - Set logical disk
+- edit_log_setting - Set bmc system and audit log setting
+- edit_m6_log_setting - Set bmc system and audit log setting
+- edit_manual_capture - Set manual capture screen
+- edit_media_instance - Set Virtual Media Instance
+- edit_ncsi - Set ncsi information
+- edit_network - Set network information
+- edit_network_bond - Set network bond
+- edit_network_link - Set network link
+- edit_ntp - Set NTP
+- edit_pdisk - Set physical disk
+- edit_power_budget - Set power budget information
+- edit_power_restore - Set power restore information
+- edit_power_status - Set power status information
+- edit_preserve_config - Set preserve config
+- edit_psu_config - Set psu config information
+- edit_psu_peak - Set psu peak information
+- edit_restore_factory_default - Set preserver config
+- edit_service - Set service settings
+- edit_smtp - Set SMTP information
+- edit_smtp_com - Set SMTP information
+- edit_smtp_dest - Set SMTP information
+- edit_snmp - Set snmp
+- edit_snmp_trap - Set snmp trap
+- edit_threshold - Set threshold information
+- edit_uid - Set UID
+- edit_virtual_media - Set virtual media
+- edit_vlan - Set vlan information
+- event_log_info - Get event log information
+- event_log_policy_info - Get event log policy information
+- fan_info - Get fan information
+- fru_info - Get fru information
+- fw_version_info - Get firmware version information
+- gpu_info - Get GPU information
+- hard_disk_info - Get hard disk information
+- hba_info - Get CPU information
+- kvm_info - Get KVM information
+- ldap_group - Manage ldap group information
+- ldap_group_info - Get ldap group information
+- ldap_info - Get ldap information
+- ldisk_info - Get logical disks information
+- log_setting_info - Get bmc log setting information
+- media_instance_info - Get Virtual Media Instance information
+- mem_info - Get memory information
+- ncsi_info - Get ncsi information
+- network_bond_info - Get network bond information
+- network_info - Get network information
+- network_link_info - Get network link information
+- ntp_info - Get NTP information
+- onboard_disk_info - Get onboard disks information
+- pcie_info - Get PCIE information
+- pdisk_info - Get physical disks information
+- power_budget_info - Get power budget information
+- power_consumption_info - Get power consumption information
+- power_restore_info - Get power restore information
+- power_status_info - Get power status information
+- preserve_config_info - Get preserve config information
+- psu_config_info - Get psu config information
+- psu_info - Get psu information
+- psu_peak_info - Get psu peak information
+- raid_info - Get RAID/HBA card and controller information
+- reset_bmc - BMC reset
+- reset_kvm - KVM reset
+- restore - Restore server settings
+- self_test_info - Get self test information
+- sensor_info - Get sensor information
+- server_info - Get server status information
+- service_info - Get service information
+- session_info - Get online session information
+- smtp_info - Get SMTP information
+- snmp_info - Get snmp get/set information
+- snmp_trap_info - Get snmp trap information
+- support_info - Get support information
+- system_log_info - Get BMC system log information
+- temp_info - Get temp information
+- threshold_info - Get threshold information
+- uid_info - Get UID information
+- update_cpld - Update CPLD
+- update_fw - Update firmware
+- update_psu - Update PSU
+- user - Manage user
+- user_group - Manage user group
+- user_group_info - Get user group information
+- user_info - Get user information
+- virtual_media_info - Get Virtual Media information
+- volt_info - Get volt information
diff --git a/ansible_collections/ieisystem/inmanage/CODE_OF_CONDUCT.md b/ansible_collections/ieisystem/inmanage/CODE_OF_CONDUCT.md
new file mode 100644
index 000000000..0164155b8
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/CODE_OF_CONDUCT.md
@@ -0,0 +1,3 @@
+# Community Code of Conduct
+
+Please see the official [Ansible Community Code of Conduct](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html).
diff --git a/ansible_collections/ieisystem/inmanage/FILES.json b/ansible_collections/ieisystem/inmanage/FILES.json
new file mode 100644
index 000000000..cae971560
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/FILES.json
@@ -0,0 +1,1972 @@
+{
+ "files": [
+ {
+ "name": ".",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "plugins",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "plugins/module_utils",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "plugins/module_utils/inmanage.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "674d33cbdf8ddfd68b4884adefa1d4ec6e4e8f8565c82516db536148f1e63080",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/volt_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "35e11277637a095fddc146c0cd09e2328e1d23adeacd4c82e11ce25bcb2a6a97",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/virtual_media_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "ea6f4a789bb7c0aac75fe40423bdc21aff07566934cc4aca5d557d850069253f",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/user_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "c83c1dbd00c068c5693d84e64a56cb1267d6e4c8c015751a2cdf579d5cac2c9d",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/user_group_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "467200e70a20631b08a1c6849b937ccd455a85f425705def7db6e5cef5c537ca",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/user_group.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "8455a0e4f76ca807a25283a35d13108a6817554a51b9ffa7e909ea4578e6406e",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/user.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "a1bf384485164ac2ea5a12362a8cf009ec6c08796d107af9fe19b9ad46a0917e",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/update_psu.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "27a01340519e2cab4aa42bb6ad64b7c48d423294b94e82285cd423a9bb034f7c",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/update_fw.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "2d7725050206c34d1bc96c06d9424c65f5a06bb5db50daad9605445ad4508c06",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/update_cpld.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "3375918f14014feb55a40eaedbdb3ab843a68eb37b2a2426605e879e87811175",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/uid_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "34b6b217d5f0b8f88b4ada18532a46ce8375b25e05e0de16ce761b781a5b3869",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/threshold_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "edc36a4d148ac2a54cbbb7954407b5e92cb32262bbe48222f4be520a19b7660d",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/temp_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e479345ca0dd5b7bb6a622c176d7ac944fd2198fb9f9faa46b440e992ad85af1",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/system_log_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "b3ea90af4d763f9fc2e6b247bfc53e28569eb0c21b881d46a644d10812b01d9b",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/support_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e5f426d4843b13007778981662892b07b513166b67af978115eeba1e0dbdd084",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/snmp_trap_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "39c509d2045924b661be86c8dc2063a07bfd69dd6e7ed1c9e26dcaf278a397a8",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/snmp_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "8e5391275302a9db5e816516173ca06d44df57866a833f7387f2e0a7af639169",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/smtp_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "a5cd04d75040bbfc5f228bdde374754a6065ec23a0be6ab4acabcfb4e0df8cdb",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/session_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "2fff989fe31bd8748c753defa873c0b689bedd71aa0269bf307729e3f6b34032",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/service_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "9d6da3c3b34cf7df4441c4c897cbf582f58b3a85e68d97624b121d581d3260b3",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/server_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "be394d0394a5522e84d49c8ca91154e02df6fecb52be08e085f7880505bb5852",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/sensor_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "8c7b38763bdc27709f19d4f5eb6b2f8bf4f0620675e599ae889366cc91f2e534",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/self_test_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "6fc2aad1286b56f1242809b8ba9d3a8a558ef49ad0885545daab8713eac47490",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/restore.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "2c587ddb2cbaf9318340840265439f155d8cbd3fe9a3e6f3ca2aba80206908e5",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/reset_kvm.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "4150e999b3606aca59d4925811b363be38d3d78e58def3ef14db2b3a17517adb",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/reset_bmc.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "9f901152036ae56ec18f74efc145a2b3961631ca30c10ed0334d5222b03b5280",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/raid_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "5736d8a65443b13041417951214b9f5e7e1945dd428d48b7c4eb15a37535938a",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/psu_peak_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "86a388c6a9489d4fe2e7b1e0994d12097caa82648ee839507fc8157fb67aecf2",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/psu_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e88f3c5bfb01e551de7c6dc582d2c493a44a17c6d18e680229ee3c031174d540",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/psu_config_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "5301dbc916dec77d67c7fbc0437b20a1d76e8b18e8e6ea6906dd2977dfd39e3f",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/preserve_config_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "470b56e39bff7ae0093963e83795c55aee7390414e4fce70207035ad61a396ba",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/power_status_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "42ea84d4402dc768cbb2a015ba5ab47325e8596154047de524c962cf595a1f5d",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/power_restore_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "cd82b86d6e69710f1a139a1c747749d4d8d1cd27921e023c86d44596384c19b3",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/power_consumption_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "29b9b7114adebf19f0ed3f952b607a9eae1dd9d039b8b98510c81153938b8dd7",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/power_budget_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "cdcd0edd62261d7476b71f8242d48672da7c5bcdaea14f0786ccf800d93a18dc",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/pdisk_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "934ff4333ac7e6785a65348cde7a57bad2b0f4b0263cf199c15919b6e0c68cde",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/pcie_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "34cf2c3417cd4c8136713d2507d8de0422f8c8b49867baea947e7a78eb5dc36d",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/onboard_disk_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "ae9d2174c1fbf2b68e6473e6be7a72309418a110e6ba7a47566596d405449eb2",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/ntp_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d7db8c45d27248471e38278e458df9ce0bec12b8436a9c2f2768aa7ce634bac1",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/network_link_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "28913f0873c96943cc0899e890275e4c1db0fd16b2c54ddb6d1f3437a5b3e2a2",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/network_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "72d70839dbd18c48283e26c35ed9082d7096f7afb33a19618d8506667b9f8b28",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/network_bond_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "39db41e6b0db5dc17650784c43de783d9e93e0bce7cf02f64838e6a7acc751b1",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/ncsi_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "8dd66cc2dcae0a32d801d9ec9ce8c29dcd5e0bd0fab471204f16260c6b9a8ff1",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/mem_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e6ef4d7c3abd7c18d7d854dfa1d9d3b5ecbdb014d5ce23d336090570dc0980ec",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/media_instance_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "185f571ea14cac6182b973aea4cd2cf617b2b3dbf62058305cd533f6a3455712",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/log_setting_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "79b63d4b934304595b4676b9feec9b10d637e6fb131b67404cbb5e667096b695",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/ldisk_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "53d93e9998aa7613a21d52023e94511cdfef45275d576b4221757431532a5bf7",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/ldap_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "ae485f5dd2bc147c4925b54a8152370bcbbe551d60a59955c7a62517e7b69254",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/ldap_group_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "29b3145d96d156110105e11b19b6a738ae098285ba10f1897cb183ec1336f98f",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/ldap_group.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "0424370cf3e5d9e99c9845bc74b5eaaa9c6acb1664601709171e7bdcd95d0ea5",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/kvm_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "9c4785c813855398f6966a6be3d8012cdc01e8cc5c08990c55b0682e48a2e3fe",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/hba_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "5ca67398d3c5dd70d8890b069238e48bbcccfbc352bf16b9232531b2cf0c4626",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/hard_disk_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "a771b0327a81aa14820de5f4a48b5f1933d136aa0a6e8299bffc3b7db73d340c",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/gpu_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "965d48b08bbb1494c6e19312ac056700563f2ed86cd0bd0ebabb54044b8fae34",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/fw_version_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "1b9589b792459cdc6e26ea847b40fe630756f8bc1d9d0e4714f0d750170217a8",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/fru_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "3ff8d8a4ba4605595f16a44e78bae9650d5b9d705ab7d37ceb55b45127bba153",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/fan_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d07520d2c3103db0d3eb607a8252612434b88b6036fe53d4bdd7f7867343d917",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/event_log_policy_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "5e8d89ef273f21f52243b6fdc93ca1998f824a64c0efc55ee38f7eb95ee0e771",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/event_log_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "7f238935cad92c35ad87ac6ae595df91942b01820863f5e5d17855abe3f65791",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/edit_vlan.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "4c3aa9680f471e7b3e105484afbe6cacc13ffacd1ca542fbdef2193ce91be796",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/edit_virtual_media.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "91537c68142cfa7ed851b554fb13b5c22ec69f3066c0c77ec6fd7e189815995d",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/edit_uid.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d2bc2dd761810a1b1752a801c12802fc2835fd76f3b80bf08bf61c0f3bf8f16a",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/edit_threshold.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "6708029f9e5fdf8089e7918f147fb4514f4240db816454a6dde91c2fc56de003",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/edit_snmp_trap.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "4851a078dd08ab946188235a2270423f1a5226ccdb965086316120fde6897f8d",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/edit_snmp.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "59b86e9af5afa660a736bc17dade5b753d10904cdb8b0e0143f9caae89eee185",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/edit_smtp_dest.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "fd828e0f6b89edf18d2d25f3e8177ab6d069bdb5269d962b9bfb99e31c72388e",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/edit_smtp_com.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "03b490198e76af34f8bc48e87b0db4d1cb003931a64b931a18d64849cea7b781",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/edit_smtp.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "4a03d161f46728d605411eb14f98ec141955ae6ec6ac019f6d484d0e7294a415",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/edit_service.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "43207d255826b12f5da2f2318a3cecb3a4995da2f986b05dcc5fd663c2d7f8bb",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/edit_restore_factory_default.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "29a475a280a052ca00112358bbb60b083ac4fdca8ba6609f618ef655036cb8ed",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/edit_psu_peak.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "0d2bbb08ec0c3bd3d4b25a39071b3cfaea40f07f5dc40adfd8c0613b7f8b58e9",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/edit_psu_config.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "7f3ec8d7aa771e3672da66e4256a23d3a538d1afc60c39cea52984ddcf897cc5",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/edit_preserve_config.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "9a8a6c699b9210bbbf1142595625a4dd765d78d7e70d3d40b272ac6e033a506f",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/edit_power_status.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "5ded259ca31ef59efd9a940f4331ab1b5d2c29c4c174c013ec7702c846c65476",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/edit_power_restore.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "fc084f5bb1f72be5aed54eb4a75ae9b795536da520bea54b49202d3f84f3b052",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/edit_power_budget.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "8db0c5d90c95396307cd4c1d54cf048a90bf76b39be4e60fec37d6b4fecbe20a",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/edit_pdisk.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "cf103a1515f29f27d538c709e56996d896928d5cacf72e3a489e12b2de6464e4",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/edit_ntp.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d55a781d84a5674ececaf09aea306782d75b900adb92762c94ec2840ef47e56d",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/edit_network_link.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "f0d8c5fe45e1791cbe4583f2c795d3ec5b98e7f9823c0925b3b655852b063071",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/edit_network_bond.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "ec1de92e0932d6947408c386fcfcf0a2abbd18cb30f57506265cd56eecbbaf2d",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/edit_network.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "721d698be67981aacb265416a20a3a2f42b020cd011dbba5bcee8908778f61e0",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/edit_ncsi.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "92817952bb8c03f2622c79038958dc0880ecf7f907fe41b01219f9582b7822d8",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/edit_media_instance.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "97b9bb2dc58f41d94a5372e49f34db83cabc1a48223d43e344896aac7551c51a",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/edit_manual_capture.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "a5ca9fdd06f10448b88856c711b0de781227294490269d554b58fce576ae2388",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/edit_m6_log_setting.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "2d29db568ec9be01b27bb15acff72a8cfed7dc8abf76707a5ac491ae016b4bd3",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/edit_log_setting.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "9185d2d71a8ee13ec9dd48022d14b85ca6a29ab5a3f88f6c99fdfd21b4e613cc",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/edit_ldisk.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "43a349945404413b504e4c52034b9584ee602d2297e14fa2e39f3da71931f078",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/edit_ldap.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "9a9f06ffa860f1528aae3cca4b876ef17b83beff05b123c513d900df87bc3d44",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/edit_kvm.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "268ae719de0140f0f14346bcaaff3bfadd3330635a448378b167d3e177e77bb2",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/edit_ipv6.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "97505ff3a5f5ce8899e9c289b986f612e4dfcd91dffae86f0542936c1f04e86d",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/edit_ipv4.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "c956b56e968fa5a1b946d66ed63830be54fc055de652314ad2ee62ebbe41188d",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/edit_fru.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "1fc6a12d8aa5660be28457da2845328de1dd2b0722ae2432ddcb31d084ff990e",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/edit_fan.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "6c34b0aaecf3ead5539583f0cdcda8cf02e1701995c6b79d150724cacbf1a52d",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/edit_event_log_policy.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "acd52414ed42dfc6a47c3f6114af9d278e22e7f85105620f45dcef8d71da2f2f",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/edit_dns.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "721940f75023edefccfd4cb3f05ace0868753b8b9901315fa195bd4651e818e9",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/edit_connect_media.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "879c4009f1fa579f86a75e8c6ed18950cb80517b245bab2cd6100dc10fe943a1",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/edit_boot_option.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d71e61e54ac5df198777ffd16716b45a9a3eced6a4a1f1b907cc240dfc55448c",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/edit_boot_image.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "b02510cfe7e4820d80fa70827bd36655d21819533e836e2fedf3c74c6865e6e6",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/edit_bios.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "16db3e8118f5e1544b451821fa44efc9c70069935c1c44e7ad5ea04b22844af7",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/edit_auto_capture.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "2504ad423942ce38f96201cb6b7aa58e21266d6ff5eaa9c64fadafa8194af9ba",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/edit_alert_policy.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "cdc1b63413437cbdc11bc2382e8f092df81f8667ed20f59e20acf7a429c05f44",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/edit_ad.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "bd7850f714cac63c5aead4232f71c0e6b9fb241f8a3f15a2b83735ad8b969bc5",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/download_manual_screenshot.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "0e7532104efadb0e80d39ccbd459d37f8755ab5a3a63636f3867163c36caddaa",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/download_auto_screenshot.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "97c125e3f8904553eeacf4536024d9e2ed7abc36d77f6fbb10a187a67f3378f3",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/dns_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "af1f6772351c97741e4292213aa7fdacde11802bb3c81961189a35a5af1561d2",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/del_session.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e39a9c7b390e97425d8c557024c726130b93dd8eae77e53401a9b2f4cea57d86",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/cpu_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d452193f879ab53d60fd9fcf6d1a64ed1ed77abf3c6e68c7d08f7367f2f4474a",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/connect_media_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "7483b222b066faaf08df830f7eb7ce6648a4555eb44bb2a0cdccebee2d372ce9",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/collect_log.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "693abb738a5b7e87bdd140b72571b68cb290a6e23d981a23676111dec18d05cf",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/collect_blackbox.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "c9a08a714d2ccff7f8e4af6dc4a93c45e88ddc535f9741143459d062f3865c07",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/clear_system_log.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "6670289703da645716056999a8b21ffc0b657dcb6127b0be8fbeaf779e63c4b5",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/clear_event_log.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "c1a94335d7a31bf2fbdc0eb51aa175434f4dd674e85f7806a034d268e90f625f",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/clear_audit_log.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "8b4d46966c05b1648bcc9f58c163648f3567dbc1104cfb88e87b1579d0fe4d03",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/boot_option_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d29db881729c6a8c4ef5b8c1a9f5e421418302244de2a13b0f696822ea7a35a7",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/boot_image_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "276903501b0993ca78bd295d58245f8e050fc1cb6c94d0266c83838a3641dc33",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/bmc_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "5e0732cd18a69ee5474a26e102cf9db70e9b1b12415c66fd8e002d212e2a41cc",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/bios_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "96b520a361c1ece1834433d1cf9b31e439370f4a833e7e7abcede105cab602ad",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/bios_import.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "2430678493f5e87117156774b595c859fca66313604f1e6e1aae0514ad62b961",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/bios_export.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "300a7fc7f2c01b358b2996dca98a8d0c57a2d493343d7969a56889c942c9c58e",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/backup.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "1c8a872c695751664b651a5e6eaf29413cc8f99319f4108ea6e3691a561819f5",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/backplane_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "5e4231bca87bce49e0254b15d746fb9e9fedd269094cead1a6773cc2a151b6d4",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/auto_capture_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d3b35380567cfbb15b3a0913754342224afc7a425b97fed4f6eb0a3ee9a43049",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/audit_log_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e18970d86f2252ea78cb492cd555b1b52584197e995820ee66c3677c446cd26d",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/alert_policy_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "fd8e3d3a1efcc2754985df6fd2d2db7f1a465463fc4389002a4e487e2467b5f5",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/ad_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "c6ea0653f534f3e44695b24489a078dc23f8273d8d93c8386314a4d6d5622802",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/ad_group_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "5464158ac246b7a79e0cacaec30548537a97d98c34591c88505e37c483977d81",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/ad_group.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d4d104c235db5573cf2528001e6e6a623db78fc03748bc4235e55717e0147bd2",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/add_ldisk.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "f202afa43a6efa26a8a319ae036bf617cbf71a91a47c581eaa715e720b498742",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/adapter_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "b72ecb368fca72aab815fe97cfcee980b5072c594cb4c14a5dca6d868d8ac06f",
+ "format": 1
+ },
+ {
+ "name": "plugins/doc_fragments",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "plugins/doc_fragments/inmanage.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "9838cd82c6b8fd1ec9d76607bbed78f0b221925d86c87190370ac7b9a0e449b8",
+ "format": 1
+ },
+ {
+ "name": "meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "meta/runtime.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "db022ab321c223ac1a684a5c02ee490cc22278bed6b31f5a2348c9c1bf65ebea",
+ "format": 1
+ },
+ {
+ "name": "docs",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "docs/volt_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e179050f335d1ecbaed7f95dab946d88140650b69872f4b382877a65d85d68f3",
+ "format": 1
+ },
+ {
+ "name": "docs/virtual_media_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e8c0f3fb5d416f5e7abc0e51bd6fe086f634e9eafcce28f1b487bad135fde209",
+ "format": 1
+ },
+ {
+ "name": "docs/user_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "2f22be44e61d77a60cea697edcaf4a97f01263cb7767743a337cc7eb3e892393",
+ "format": 1
+ },
+ {
+ "name": "docs/user_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "008f01b7653848973c9e7af44b7d0c162f703f04000739811be9b1fac770848d",
+ "format": 1
+ },
+ {
+ "name": "docs/user_group_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "14a7ad4fd40e4d9f9e968c35db3cc1cbce0fd31f3d9b3ef98ef78c826a7e6301",
+ "format": 1
+ },
+ {
+ "name": "docs/user_group_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e0669bb23b37abe33cabc796e2cd67db9abe1a2e4eb41d492d67e886c9bb2e32",
+ "format": 1
+ },
+ {
+ "name": "docs/update_psu_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "afb2e964bb48145e0cf80d0828efc5221e2fdb41f66572b0c0f19c1859246dee",
+ "format": 1
+ },
+ {
+ "name": "docs/update_fw_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "2640d5584bb304dabd6c5b4f87902110b0b57cf4fa8d776b2592103e4ba37e44",
+ "format": 1
+ },
+ {
+ "name": "docs/update_cpld_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "9aff3240f970e73249a891eb39beadaf39345bb6b56267487f5c887ecd034feb",
+ "format": 1
+ },
+ {
+ "name": "docs/uid_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "8d685f0416278eacc0e4e64d409d993ed9a5176575dc7751f94aac5570d2ff0b",
+ "format": 1
+ },
+ {
+ "name": "docs/threshold_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "04fe22044102b753d2198e1e5d34ee9af7e5f28f3330f9fd07b33d0dbbf578ef",
+ "format": 1
+ },
+ {
+ "name": "docs/temp_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "3500b0ef3ad1c219f9806abad4b66348a83b1ca38821eb5726a80685ebd4f098",
+ "format": 1
+ },
+ {
+ "name": "docs/system_log_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "ecb3fc7e0d02097c06341471b929ced00de4089ee7e5af82ba8041126f46088c",
+ "format": 1
+ },
+ {
+ "name": "docs/support_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "82fa7d07dda555e259cef2e42935590dc4673b55849fae12394dce451f062e98",
+ "format": 1
+ },
+ {
+ "name": "docs/snmp_trap_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "cc094d2eecdb64ce0f100622ecfd61fd852bf962ae33f532d9991d8016092227",
+ "format": 1
+ },
+ {
+ "name": "docs/snmp_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "3b4a744afa3d8b533e841375072d28537c8bc3caaa0e42630a5e8e58d7a93a1c",
+ "format": 1
+ },
+ {
+ "name": "docs/smtp_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "f7587ffb91ba46e3a0b2587e13f03f0f7dc4c552ebbea74f258536d654b58c0c",
+ "format": 1
+ },
+ {
+ "name": "docs/session_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "6b9ca0950fe0644137cf8729192760afa62c76104d1c6829396ba2f4ed8c46b9",
+ "format": 1
+ },
+ {
+ "name": "docs/service_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d608e82d48fe70067f5a41f6f6a3bcd1f736f8c9607d6da34652620adf6cc180",
+ "format": 1
+ },
+ {
+ "name": "docs/server_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "074e0ad76084ae25687342991b9928f7856d0086a88780fbe1afa88d0a3d874e",
+ "format": 1
+ },
+ {
+ "name": "docs/sensor_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "c9dfb88145e0f3cfbd35b918179954f1533483845e618a00238cfced60346b49",
+ "format": 1
+ },
+ {
+ "name": "docs/self_test_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "504224bf2d2921f4f9127acfde7f25c14c53b3c9beb5626b0c26ec784c507baf",
+ "format": 1
+ },
+ {
+ "name": "docs/restore_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "50edc320631677cab94d570e5569db72921f7e16f38e1d247fe1b81f64567d29",
+ "format": 1
+ },
+ {
+ "name": "docs/reset_kvm_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "bd9c55da8ac2a220ca482f61bc7f643dcc55918b76de84b4227cc575436d4a9c",
+ "format": 1
+ },
+ {
+ "name": "docs/reset_bmc_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e7a97ee210f7b67786e73e2061eaf09dd53e42c5336f8d79ff166dfa08c7612e",
+ "format": 1
+ },
+ {
+ "name": "docs/raid_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "8ae4ac6a413595885a4bb3eec0e64f4a2ade5705cae09e93d2d8f0fc74e7dbe6",
+ "format": 1
+ },
+ {
+ "name": "docs/psu_peak_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "c533e18a6e17940e5715aeb653c1f9f2277c6ad922568a23928d58fcb0ec48f7",
+ "format": 1
+ },
+ {
+ "name": "docs/psu_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "570eaf0df16fbfbab47a067ebde80d4cca3ef31d62b90918c6f55013b7803db5",
+ "format": 1
+ },
+ {
+ "name": "docs/psu_config_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "490822e0a1389ecc8af7e6dbd7308671a537cde722130ebb3bd2c7017b464098",
+ "format": 1
+ },
+ {
+ "name": "docs/preserve_config_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "72e6346b9f48b949110d763ad748e7ff3edb46293340dad5a6697a4cce19e7d1",
+ "format": 1
+ },
+ {
+ "name": "docs/power_status_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "123d66430dc44f0b490fa330e3fe5c485000e6c5dadddccf7fa599ea319775f1",
+ "format": 1
+ },
+ {
+ "name": "docs/power_restore_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "7856fd51685baca7332320a2eeb20413167a36fce4bb23bf8a59187bb8d1d7e9",
+ "format": 1
+ },
+ {
+ "name": "docs/power_consumption_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "a9a41334f8be277b2f0fdd9bd6bb80ab43ebdff8f2ebd8060c42df995af55acb",
+ "format": 1
+ },
+ {
+ "name": "docs/power_budget_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "3de2c26fff3a878734c452870e503a26720d6e14aa67b15e786850ec968b2533",
+ "format": 1
+ },
+ {
+ "name": "docs/pdisk_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "01cd176a614127adcd2ff51c79dc3f09180a49b926bcb6e6bd8e5b7c0f7cd0ca",
+ "format": 1
+ },
+ {
+ "name": "docs/pcie_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "39149963084cd465d4133cf38ba31e7ff90662250814c6a5f9f9335bed2ba696",
+ "format": 1
+ },
+ {
+ "name": "docs/onboard_disk_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "350f9f08300a424507174713892261e3fe44565b1e9faa8eaf2af731c6e00234",
+ "format": 1
+ },
+ {
+ "name": "docs/ntp_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d85c95e603f713ee9846fac6c0901f43c9a83917f9acf6b9c2588f9fdcb3a05f",
+ "format": 1
+ },
+ {
+ "name": "docs/network_link_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "aff29d93bb7ee426e2fef7ad558585767b8f170f7ba78036b6f0629d59c66f6e",
+ "format": 1
+ },
+ {
+ "name": "docs/network_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "8aca51459244dae9c3f8203a673197bae70c6516e385473c6197909e8a0a4306",
+ "format": 1
+ },
+ {
+ "name": "docs/network_bond_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "3f65c4997fd3dba65caeae7baddbfe77fdbfb3596608753e34b0ba3a4c880ac9",
+ "format": 1
+ },
+ {
+ "name": "docs/ncsi_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "c998595ec25f01c5b9dbca11f4c9be0e66195613341c86464096edac1f901b78",
+ "format": 1
+ },
+ {
+ "name": "docs/mem_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "a33b9802ad02bd84612f0549154e429d9ba3c23372a4f5a91b1e037376c9066c",
+ "format": 1
+ },
+ {
+ "name": "docs/media_instance_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "a06f0df7c3648cb0bde13222a8c0869c845d626c9c2f1b8bcc111d99b8e82476",
+ "format": 1
+ },
+ {
+ "name": "docs/log_setting_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "9d6101c3154062835e7d9a645276e1572528b3f7dc158279f4eb333ab8cbf047",
+ "format": 1
+ },
+ {
+ "name": "docs/ldisk_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "3ed53e9236c2208a8ac150c2adab5d81b6b448cc57570afe65e9aedeea04a87a",
+ "format": 1
+ },
+ {
+ "name": "docs/ldap_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "15e7789037d9478e46e3974b299932105b6bb6bb7837df9fd1df6e9296660eca",
+ "format": 1
+ },
+ {
+ "name": "docs/ldap_group_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "916d4212ef62d942d311aa1d4bca61044607f94fba2e00b1f4adb2828f76bd7a",
+ "format": 1
+ },
+ {
+ "name": "docs/ldap_group_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "b3f93a6b0238d903296b8361d0104e6ed0179447c68675e35fe1acd74111f23a",
+ "format": 1
+ },
+ {
+ "name": "docs/kvm_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "367f23f4ceb45b6c1b168e84519ba925b19bdec0da83d93360a58453d8d61ee7",
+ "format": 1
+ },
+ {
+ "name": "docs/index.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "4391a6a8bc5a6376e5c6daa2aee41b46a16c028f5fc8b4c1f0a0462adf988898",
+ "format": 1
+ },
+ {
+ "name": "docs/hba_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "22cb937877cbb2dd9096beabf0918be8fa88537a99bf4763577c5d1ed81fa190",
+ "format": 1
+ },
+ {
+ "name": "docs/hard_disk_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "985637f6f62f92aa32f8eff4ad0c5b323ae3f066319e9b430b9b4a21096f7133",
+ "format": 1
+ },
+ {
+ "name": "docs/gpu_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "59b750a1cbb333e72af499668f34599b78743f95b782136ee0ea51eeee70b5e7",
+ "format": 1
+ },
+ {
+ "name": "docs/fw_version_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "01f43eeb1b5a439fcd0c99f8217a67f46c4c334351299915159d9b28a5fa481f",
+ "format": 1
+ },
+ {
+ "name": "docs/fru_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "089de696dddfcdbc0d9a0b653512d8d54b5f6f2285235b8f0a22696c3e784342",
+ "format": 1
+ },
+ {
+ "name": "docs/fan_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "9e0bc2d17e460c53e7e459b408f987b73b2bb4b1ffd50a9b4e5b1a7106afcedd",
+ "format": 1
+ },
+ {
+ "name": "docs/event_log_policy_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "b10820647f708349e7ad298fa7e45efb0970ed37fa0210705aab9e9fd112b715",
+ "format": 1
+ },
+ {
+ "name": "docs/event_log_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e63056de24eab7d203d1ea1b6f1d76f4b5148cb007fd8b42e5f0617e66c79b2e",
+ "format": 1
+ },
+ {
+ "name": "docs/edit_vlan_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "0598a8b7f7fa4006eb4994c8c0ff088a49a55a843809980e420fca23d999eabc",
+ "format": 1
+ },
+ {
+ "name": "docs/edit_virtual_media_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "ad12263c71cf2b21be7857fa5fa77d9bb19bd6c6e74f8e3104644c3ace5d0a69",
+ "format": 1
+ },
+ {
+ "name": "docs/edit_uid_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "95de2c567b124855224fba2655e74036936f43c5ca9e466b0639d66493273835",
+ "format": 1
+ },
+ {
+ "name": "docs/edit_threshold_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "3b9cc8e0bb941fd407cfced17cf70747a4cdec93b119d8912cd2eeba5d6cf992",
+ "format": 1
+ },
+ {
+ "name": "docs/edit_snmp_trap_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "c81fbbcadc0cc543a3272b1dd16366994fa96dd34a94fb4847e0e93d9e0374b7",
+ "format": 1
+ },
+ {
+ "name": "docs/edit_snmp_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "30510f9b2957cc099352e3305c26243b20f6ba5317ac7fa659e6c6cbc6e22453",
+ "format": 1
+ },
+ {
+ "name": "docs/edit_smtp_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "7b2fd715860b1ecdb8766e7e6746fa4376223c53ec50cc55365d74ece737fb91",
+ "format": 1
+ },
+ {
+ "name": "docs/edit_smtp_dest_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "2e2d9cff0e56dcb573294173a6673528ff0290259771f96cb663d2e1b297ae3b",
+ "format": 1
+ },
+ {
+ "name": "docs/edit_smtp_com_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e2682dfdfa1c5b612fd22880934215253e6e567b892e3258698e7be9f7c2e682",
+ "format": 1
+ },
+ {
+ "name": "docs/edit_service_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "539f136935f01baeae05ce853277d43cf89835518fd6117152b1efa3806f01f0",
+ "format": 1
+ },
+ {
+ "name": "docs/edit_restore_factory_default_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "5cbff38cecd47a9c2da7f852381f7f776f73f451ab478b63f862d16d87072d18",
+ "format": 1
+ },
+ {
+ "name": "docs/edit_psu_peak_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "23c1e859ada7858b5b25daf19dde55edb631147827ce7a83bcceeee967bc22c8",
+ "format": 1
+ },
+ {
+ "name": "docs/edit_psu_config_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "c9fcc01d964bc6cf49e2b51486a7d8e413e61af74a2035e27a3edc6d7773caac",
+ "format": 1
+ },
+ {
+ "name": "docs/edit_preserve_config_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "426d16bc86e81d9d5feffb1e060dd47d84158091fa0f1670e0800e07a666f113",
+ "format": 1
+ },
+ {
+ "name": "docs/edit_power_status_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "2cd6d3e08e615e5b697194ae99384781d2b430fd2d545d734716b582acbeb45b",
+ "format": 1
+ },
+ {
+ "name": "docs/edit_power_restore_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "7ea829726762a9c0902285a87076a7f1aa05437c3e32420ce31a7ed7571e88ab",
+ "format": 1
+ },
+ {
+ "name": "docs/edit_power_budget_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "042427ff7c118776db9da8f49c3c45409fa2cafd368f6c8c6e07ad3f39136ba8",
+ "format": 1
+ },
+ {
+ "name": "docs/edit_pdisk_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "117bc47bda44c0e94bae6d255462e5094656f398d9c4c7928a08e9a08e6de8a2",
+ "format": 1
+ },
+ {
+ "name": "docs/edit_ntp_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "66bd0321204ba88154351807a155a96044ab992288d649ac408ec8a7c2256874",
+ "format": 1
+ },
+ {
+ "name": "docs/edit_network_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "1694d59269a0a8dbb8134f95e933e2d1174adf2876183ca8b20c3033c80799aa",
+ "format": 1
+ },
+ {
+ "name": "docs/edit_network_link_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "0d7f47e3c5331b3c8996386ad88abad5b605f23936555becda7086cb3db38354",
+ "format": 1
+ },
+ {
+ "name": "docs/edit_network_bond_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e578a77f93d99306ced829b0e38a10b70ae47457bf3dff3d4c4fee2b222a9418",
+ "format": 1
+ },
+ {
+ "name": "docs/edit_ncsi_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "7a72c216935f6f2e4fd1e64dcc81faba1809a17aecd14fa2bed3e1238dcae11b",
+ "format": 1
+ },
+ {
+ "name": "docs/edit_media_instance_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "af1df23a72b3dd96a5be73c3f5a1bee52e53eac8a819beed2c9a54bc65c77fd3",
+ "format": 1
+ },
+ {
+ "name": "docs/edit_manual_capture_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "c0e5786f0b093e413d3655622ac535abd80b4d081251d4c80ef12a0bcb6da5af",
+ "format": 1
+ },
+ {
+ "name": "docs/edit_m6_log_setting_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "4ef34cb041a60bb2cbf84d781cbaa15178c39e1e06ed12519ff9d70a8b106644",
+ "format": 1
+ },
+ {
+ "name": "docs/edit_log_setting_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "982a3b9d5c35401420d8e715aa558a4a4322fe896a2c37c7b327b791ff3dd82f",
+ "format": 1
+ },
+ {
+ "name": "docs/edit_ldisk_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "685e7d3b90a816300cf0c7f45bb7a40c8bc02a35406d7a378caa3fca84654de3",
+ "format": 1
+ },
+ {
+ "name": "docs/edit_ldap_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "a0000523d9288537a447df8c7e677d499562aa6f5b72d82bc1488f4810ead42e",
+ "format": 1
+ },
+ {
+ "name": "docs/edit_kvm_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "c4ed56b43287be8b53a1ddbba48046036a19e84728975a72eb09c71cc867e67e",
+ "format": 1
+ },
+ {
+ "name": "docs/edit_ipv6_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "1ca8a6e0008b230a29d79c10b5c609b5ceded6343e7a39c716e412420cd3ebcc",
+ "format": 1
+ },
+ {
+ "name": "docs/edit_ipv4_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "2b1c50ca731ec4581ebe61961405c179bb53e2031f46fbd36476d4fe523da613",
+ "format": 1
+ },
+ {
+ "name": "docs/edit_fru_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "0b66278e5434be03f7c4d26405544f9acaf7d3bec0a41b519d16fea4688f92dd",
+ "format": 1
+ },
+ {
+ "name": "docs/edit_fan_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "955096ca90f786accf125cdd4986f60a19722b5b4129c77be1d723f685533fbc",
+ "format": 1
+ },
+ {
+ "name": "docs/edit_event_log_policy_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "11171ff559dcf7fb7b21c96d7ba878a9a44d59b7e83ad3ff564b2bf3ff981482",
+ "format": 1
+ },
+ {
+ "name": "docs/edit_dns_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "83ffd520cb18800f0eab4a0c16cedff05df38faad0fc24898a8afc529559fb4e",
+ "format": 1
+ },
+ {
+ "name": "docs/edit_connect_media_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "90c8858b811876e612c638b63f4cc659109200998e6fff6538571ec40ce289f9",
+ "format": 1
+ },
+ {
+ "name": "docs/edit_boot_option_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "af8426febf04a18f63ce41480caf2187d60709d462a8a47734d6e9dd36caf0a3",
+ "format": 1
+ },
+ {
+ "name": "docs/edit_boot_image_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "03e88b57407dc613f543deba52a6d99c99e0ca208bda6e609f6e220186ed312e",
+ "format": 1
+ },
+ {
+ "name": "docs/edit_bios_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e52e1aa956a91d997c6d02bcfbbf2c9df098ae223b62880e67cab3fe0d39d549",
+ "format": 1
+ },
+ {
+ "name": "docs/edit_auto_capture_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "3059e2b3f70c5c7c2484ce343a8f173d72051822800c89683710633f07114033",
+ "format": 1
+ },
+ {
+ "name": "docs/edit_alert_policy_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "69357ca0f639b0ef7ed145d0849f79fca4645641406f31dddba504464106c285",
+ "format": 1
+ },
+ {
+ "name": "docs/edit_ad_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "7155cd64a32adf44ac23e54e766da69783ffdf73cd14b2f5df0994e318393f38",
+ "format": 1
+ },
+ {
+ "name": "docs/download_manual_screenshot_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "96d9d6ccc42174abb044cc36b348b110df01f269df552d4122b4001947265bc6",
+ "format": 1
+ },
+ {
+ "name": "docs/download_auto_screenshot_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "25545b1c46429dc95c96e5570579cee42eca02f1d06b2eb8e7e3aea549deb070",
+ "format": 1
+ },
+ {
+ "name": "docs/dns_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "b56cca1f510c0ae3cb0028c5d192cea449ff968647186a982edaf92c3c3a1723",
+ "format": 1
+ },
+ {
+ "name": "docs/del_session_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d0b12377666d295608dba1560be067a42940fb7a8f00a95004d62f7e9630b3ff",
+ "format": 1
+ },
+ {
+ "name": "docs/cpu_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "886b8e5d793d40912c76c6a3f9eebca1feb6e7e7b092189988e43d3ad8eacd5c",
+ "format": 1
+ },
+ {
+ "name": "docs/connect_media_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "cf9f4c72ac92ae5439eac23df6b103998ae31e836f374a7e695cf2b2cbf3d648",
+ "format": 1
+ },
+ {
+ "name": "docs/collect_log_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "64a28d51d255d614d12aa64accd99789a817a20511d25652ff8bbb093c45dcc0",
+ "format": 1
+ },
+ {
+ "name": "docs/collect_blackbox_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "6651f67d32ba0b8d091071f7e3130b8fa4e85aaa0df7458cce10ef43b3029187",
+ "format": 1
+ },
+ {
+ "name": "docs/clear_system_log_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "7c1e176fa570aefba5592de6b896fe99a0902b0ffa0d28accea03765c454b8be",
+ "format": 1
+ },
+ {
+ "name": "docs/clear_event_log_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "b3dd92e593af2add73e273a371eea0a70d4e9bb0026e0f4057b163a7e75e2b5d",
+ "format": 1
+ },
+ {
+ "name": "docs/clear_audit_log_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "2b202f1e8fbdb576e0d45c7e4ca730d4ce13d40efdedad64846d89e602bff183",
+ "format": 1
+ },
+ {
+ "name": "docs/boot_option_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "c48fb41f43812ae4408abe50f0f4004b70c156f7f37bf0d6869e2a93994bbe29",
+ "format": 1
+ },
+ {
+ "name": "docs/boot_image_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "5c4e724f2ac6b11311ea81e0d02c31c681c9c0993c1a66e8237b7e812fcb6a71",
+ "format": 1
+ },
+ {
+ "name": "docs/bmc_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "9aa44d89747e7a99bcbd3929a8e294b7ee5ee6ec5007c544b5c7ceb529db4126",
+ "format": 1
+ },
+ {
+ "name": "docs/bios_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "009acee898ae94772d90cf4a4c6dd61c2ab0cac661ff26b9467a30739567af0d",
+ "format": 1
+ },
+ {
+ "name": "docs/bios_import_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "03ceb7843b8b74de734f68737a4c54d999d4a8b15ad564f9db405cb2330f1153",
+ "format": 1
+ },
+ {
+ "name": "docs/bios_export_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "608db6425ad9914782e0b178b400fc121999b397314bcee5d7b302a8dc1f572b",
+ "format": 1
+ },
+ {
+ "name": "docs/backup_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d60bb2008d2ef66f06371f0716681681a8818fe322185bb3fe6a8aeb593f6962",
+ "format": 1
+ },
+ {
+ "name": "docs/backplane_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "8f942ab36cca1d873571ffbb84fa57dcc2375c65c21d1bb447f7f6feddf73f3b",
+ "format": 1
+ },
+ {
+ "name": "docs/auto_capture_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "93d771bbd209c2cc3de644be6497a370bbf449919f0bddd515f078f5c2cc9a42",
+ "format": 1
+ },
+ {
+ "name": "docs/audit_log_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e2bd4e6c8890583a063bcf718a38704225b1dca9c6d8d6070babfb906e138474",
+ "format": 1
+ },
+ {
+ "name": "docs/alert_policy_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "8c446a5273f138792a20457ca9bfebaae9f3e2d636f0c596716d6653f1d0309b",
+ "format": 1
+ },
+ {
+ "name": "docs/ad_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "225177902f41f1a2705ee571b8017ffcea9a55f775b061731968b882a94e905b",
+ "format": 1
+ },
+ {
+ "name": "docs/ad_group_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "a9009fffd63bd5220756b94d25b56cd25417ce5a6318cd792de55c92e678f16e",
+ "format": 1
+ },
+ {
+ "name": "docs/ad_group_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "36abd8d5d233f1eeca2b966a25836e968ffa1e336e3799079220ab76f703d124",
+ "format": 1
+ },
+ {
+ "name": "docs/add_ldisk_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "4f052142cc9eaffe23aa43bd893eeb6ed664166ddd61f8df79dd54af0e210a38",
+ "format": 1
+ },
+ {
+ "name": "docs/adapter_info_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "9bbd89f6f65ef508af44e810223ae37a668385e88624eb6a2c1a74e7a277d71e",
+ "format": 1
+ },
+ {
+ "name": "changelogs",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "changelogs/fragments",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "changelogs/fragments/.keep",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "format": 1
+ },
+ {
+ "name": "changelogs/config.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "93e6532aa24bd6b494fb52a349736e1595e133e7b2c6ff8197dbf35c31879d69",
+ "format": 1
+ },
+ {
+ "name": "changelogs/changelog.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "6eda1c1af43be8a8e5fc17b95b4689c5a3a117ec7d1d7d3d18a9544101c1b19e",
+ "format": 1
+ },
+ {
+ "name": ".github",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": ".github/workflows",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": ".github/workflows/ansible-test.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "bd598ce9c34e9c78ed5d1a3e1e4b13c07fac7ff33b359688242ee1576c95033a",
+ "format": 1
+ },
+ {
+ "name": ".github/settings.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "cb31353134cff7d91b546a03cc6fec7caaf0dba62079ea66776e2994461e6c7b",
+ "format": 1
+ },
+ {
+ "name": "requirements.txt",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "ef4f74801ee735132cee77b4d649b81c7f95233d14aa409f565250d903134313",
+ "format": 1
+ },
+ {
+ "name": "README.md",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "cad2d451c9cae4492ec532ac7996628c08b587740f4a5bfb5f329f26594cf67c",
+ "format": 1
+ },
+ {
+ "name": "LICENSE",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "3972dc9744f6499f0f9b2dbf76696f2ae7ad8af9b23dde66d6af86c9dfb36986",
+ "format": 1
+ },
+ {
+ "name": "CODE_OF_CONDUCT.md",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "14ec928234a7ed52bf8b458d31e4862335111e477e4dbe7fb543686c24115140",
+ "format": 1
+ },
+ {
+ "name": "CHANGELOG.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "bcfdccaaa849ed861f7735ad47c489cbd7254b1c83240857820d13820bde4d9d",
+ "format": 1
+ }
+ ],
+ "format": 1
+} \ No newline at end of file
diff --git a/ansible_collections/ieisystem/inmanage/LICENSE b/ansible_collections/ieisystem/inmanage/LICENSE
new file mode 100644
index 000000000..f288702d2
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/LICENSE
@@ -0,0 +1,674 @@
+ GNU GENERAL PUBLIC LICENSE
+ Version 3, 29 June 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+ Preamble
+
+ The GNU General Public License is a free, copyleft license for
+software and other kinds of works.
+
+ The licenses for most software and other practical works are designed
+to take away your freedom to share and change the works. By contrast,
+the GNU General Public License is intended to guarantee your freedom to
+share and change all versions of a program--to make sure it remains free
+software for all its users. We, the Free Software Foundation, use the
+GNU General Public License for most of our software; it applies also to
+any other work released this way by its authors. You can apply it to
+your programs, too.
+
+ When we speak of free software, we are referring to freedom, not
+price. Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+them if you wish), that you receive source code or can get it if you
+want it, that you can change the software or use pieces of it in new
+free programs, and that you know you can do these things.
+
+ To protect your rights, we need to prevent others from denying you
+these rights or asking you to surrender the rights. Therefore, you have
+certain responsibilities if you distribute copies of the software, or if
+you modify it: responsibilities to respect the freedom of others.
+
+ For example, if you distribute copies of such a program, whether
+gratis or for a fee, you must pass on to the recipients the same
+freedoms that you received. You must make sure that they, too, receive
+or can get the source code. And you must show them these terms so they
+know their rights.
+
+ Developers that use the GNU GPL protect your rights with two steps:
+(1) assert copyright on the software, and (2) offer you this License
+giving you legal permission to copy, distribute and/or modify it.
+
+ For the developers' and authors' protection, the GPL clearly explains
+that there is no warranty for this free software. For both users' and
+authors' sake, the GPL requires that modified versions be marked as
+changed, so that their problems will not be attributed erroneously to
+authors of previous versions.
+
+ Some devices are designed to deny users access to install or run
+modified versions of the software inside them, although the manufacturer
+can do so. This is fundamentally incompatible with the aim of
+protecting users' freedom to change the software. The systematic
+pattern of such abuse occurs in the area of products for individuals to
+use, which is precisely where it is most unacceptable. Therefore, we
+have designed this version of the GPL to prohibit the practice for those
+products. If such problems arise substantially in other domains, we
+stand ready to extend this provision to those domains in future versions
+of the GPL, as needed to protect the freedom of users.
+
+ Finally, every program is threatened constantly by software patents.
+States should not allow patents to restrict development and use of
+software on general-purpose computers, but in those that do, we wish to
+avoid the special danger that patents applied to a free program could
+make it effectively proprietary. To prevent this, the GPL assures that
+patents cannot be used to render the program non-free.
+
+ The precise terms and conditions for copying, distribution and
+modification follow.
+
+ TERMS AND CONDITIONS
+
+ 0. Definitions.
+
+ "This License" refers to version 3 of the GNU General Public License.
+
+ "Copyright" also means copyright-like laws that apply to other kinds of
+works, such as semiconductor masks.
+
+ "The Program" refers to any copyrightable work licensed under this
+License. Each licensee is addressed as "you". "Licensees" and
+"recipients" may be individuals or organizations.
+
+ To "modify" a work means to copy from or adapt all or part of the work
+in a fashion requiring copyright permission, other than the making of an
+exact copy. The resulting work is called a "modified version" of the
+earlier work or a work "based on" the earlier work.
+
+ A "covered work" means either the unmodified Program or a work based
+on the Program.
+
+ To "propagate" a work means to do anything with it that, without
+permission, would make you directly or secondarily liable for
+infringement under applicable copyright law, except executing it on a
+computer or modifying a private copy. Propagation includes copying,
+distribution (with or without modification), making available to the
+public, and in some countries other activities as well.
+
+ To "convey" a work means any kind of propagation that enables other
+parties to make or receive copies. Mere interaction with a user through
+a computer network, with no transfer of a copy, is not conveying.
+
+ An interactive user interface displays "Appropriate Legal Notices"
+to the extent that it includes a convenient and prominently visible
+feature that (1) displays an appropriate copyright notice, and (2)
+tells the user that there is no warranty for the work (except to the
+extent that warranties are provided), that licensees may convey the
+work under this License, and how to view a copy of this License. If
+the interface presents a list of user commands or options, such as a
+menu, a prominent item in the list meets this criterion.
+
+ 1. Source Code.
+
+ The "source code" for a work means the preferred form of the work
+for making modifications to it. "Object code" means any non-source
+form of a work.
+
+ A "Standard Interface" means an interface that either is an official
+standard defined by a recognized standards body, or, in the case of
+interfaces specified for a particular programming language, one that
+is widely used among developers working in that language.
+
+ The "System Libraries" of an executable work include anything, other
+than the work as a whole, that (a) is included in the normal form of
+packaging a Major Component, but which is not part of that Major
+Component, and (b) serves only to enable use of the work with that
+Major Component, or to implement a Standard Interface for which an
+implementation is available to the public in source code form. A
+"Major Component", in this context, means a major essential component
+(kernel, window system, and so on) of the specific operating system
+(if any) on which the executable work runs, or a compiler used to
+produce the work, or an object code interpreter used to run it.
+
+ The "Corresponding Source" for a work in object code form means all
+the source code needed to generate, install, and (for an executable
+work) run the object code and to modify the work, including scripts to
+control those activities. However, it does not include the work's
+System Libraries, or general-purpose tools or generally available free
+programs which are used unmodified in performing those activities but
+which are not part of the work. For example, Corresponding Source
+includes interface definition files associated with source files for
+the work, and the source code for shared libraries and dynamically
+linked subprograms that the work is specifically designed to require,
+such as by intimate data communication or control flow between those
+subprograms and other parts of the work.
+
+ The Corresponding Source need not include anything that users
+can regenerate automatically from other parts of the Corresponding
+Source.
+
+ The Corresponding Source for a work in source code form is that
+same work.
+
+ 2. Basic Permissions.
+
+ All rights granted under this License are granted for the term of
+copyright on the Program, and are irrevocable provided the stated
+conditions are met. This License explicitly affirms your unlimited
+permission to run the unmodified Program. The output from running a
+covered work is covered by this License only if the output, given its
+content, constitutes a covered work. This License acknowledges your
+rights of fair use or other equivalent, as provided by copyright law.
+
+ You may make, run and propagate covered works that you do not
+convey, without conditions so long as your license otherwise remains
+in force. You may convey covered works to others for the sole purpose
+of having them make modifications exclusively for you, or provide you
+with facilities for running those works, provided that you comply with
+the terms of this License in conveying all material for which you do
+not control copyright. Those thus making or running the covered works
+for you must do so exclusively on your behalf, under your direction
+and control, on terms that prohibit them from making any copies of
+your copyrighted material outside their relationship with you.
+
+ Conveying under any other circumstances is permitted solely under
+the conditions stated below. Sublicensing is not allowed; section 10
+makes it unnecessary.
+
+ 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
+
+ No covered work shall be deemed part of an effective technological
+measure under any applicable law fulfilling obligations under article
+11 of the WIPO copyright treaty adopted on 20 December 1996, or
+similar laws prohibiting or restricting circumvention of such
+measures.
+
+ When you convey a covered work, you waive any legal power to forbid
+circumvention of technological measures to the extent such circumvention
+is effected by exercising rights under this License with respect to
+the covered work, and you disclaim any intention to limit operation or
+modification of the work as a means of enforcing, against the work's
+users, your or third parties' legal rights to forbid circumvention of
+technological measures.
+
+ 4. Conveying Verbatim Copies.
+
+ You may convey verbatim copies of the Program's source code as you
+receive it, in any medium, provided that you conspicuously and
+appropriately publish on each copy an appropriate copyright notice;
+keep intact all notices stating that this License and any
+non-permissive terms added in accord with section 7 apply to the code;
+keep intact all notices of the absence of any warranty; and give all
+recipients a copy of this License along with the Program.
+
+ You may charge any price or no price for each copy that you convey,
+and you may offer support or warranty protection for a fee.
+
+ 5. Conveying Modified Source Versions.
+
+ You may convey a work based on the Program, or the modifications to
+produce it from the Program, in the form of source code under the
+terms of section 4, provided that you also meet all of these conditions:
+
+ a) The work must carry prominent notices stating that you modified
+ it, and giving a relevant date.
+
+ b) The work must carry prominent notices stating that it is
+ released under this License and any conditions added under section
+ 7. This requirement modifies the requirement in section 4 to
+ "keep intact all notices".
+
+ c) You must license the entire work, as a whole, under this
+ License to anyone who comes into possession of a copy. This
+ License will therefore apply, along with any applicable section 7
+ additional terms, to the whole of the work, and all its parts,
+ regardless of how they are packaged. This License gives no
+ permission to license the work in any other way, but it does not
+ invalidate such permission if you have separately received it.
+
+ d) If the work has interactive user interfaces, each must display
+ Appropriate Legal Notices; however, if the Program has interactive
+ interfaces that do not display Appropriate Legal Notices, your
+ work need not make them do so.
+
+ A compilation of a covered work with other separate and independent
+works, which are not by their nature extensions of the covered work,
+and which are not combined with it such as to form a larger program,
+in or on a volume of a storage or distribution medium, is called an
+"aggregate" if the compilation and its resulting copyright are not
+used to limit the access or legal rights of the compilation's users
+beyond what the individual works permit. Inclusion of a covered work
+in an aggregate does not cause this License to apply to the other
+parts of the aggregate.
+
+ 6. Conveying Non-Source Forms.
+
+ You may convey a covered work in object code form under the terms
+of sections 4 and 5, provided that you also convey the
+machine-readable Corresponding Source under the terms of this License,
+in one of these ways:
+
+ a) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by the
+ Corresponding Source fixed on a durable physical medium
+ customarily used for software interchange.
+
+ b) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by a
+ written offer, valid for at least three years and valid for as
+ long as you offer spare parts or customer support for that product
+ model, to give anyone who possesses the object code either (1) a
+ copy of the Corresponding Source for all the software in the
+ product that is covered by this License, on a durable physical
+ medium customarily used for software interchange, for a price no
+ more than your reasonable cost of physically performing this
+ conveying of source, or (2) access to copy the
+ Corresponding Source from a network server at no charge.
+
+ c) Convey individual copies of the object code with a copy of the
+ written offer to provide the Corresponding Source. This
+ alternative is allowed only occasionally and noncommercially, and
+ only if you received the object code with such an offer, in accord
+ with subsection 6b.
+
+ d) Convey the object code by offering access from a designated
+ place (gratis or for a charge), and offer equivalent access to the
+ Corresponding Source in the same way through the same place at no
+ further charge. You need not require recipients to copy the
+ Corresponding Source along with the object code. If the place to
+ copy the object code is a network server, the Corresponding Source
+ may be on a different server (operated by you or a third party)
+ that supports equivalent copying facilities, provided you maintain
+ clear directions next to the object code saying where to find the
+ Corresponding Source. Regardless of what server hosts the
+ Corresponding Source, you remain obligated to ensure that it is
+ available for as long as needed to satisfy these requirements.
+
+ e) Convey the object code using peer-to-peer transmission, provided
+ you inform other peers where the object code and Corresponding
+ Source of the work are being offered to the general public at no
+ charge under subsection 6d.
+
+ A separable portion of the object code, whose source code is excluded
+from the Corresponding Source as a System Library, need not be
+included in conveying the object code work.
+
+ A "User Product" is either (1) a "consumer product", which means any
+tangible personal property which is normally used for personal, family,
+or household purposes, or (2) anything designed or sold for incorporation
+into a dwelling. In determining whether a product is a consumer product,
+doubtful cases shall be resolved in favor of coverage. For a particular
+product received by a particular user, "normally used" refers to a
+typical or common use of that class of product, regardless of the status
+of the particular user or of the way in which the particular user
+actually uses, or expects or is expected to use, the product. A product
+is a consumer product regardless of whether the product has substantial
+commercial, industrial or non-consumer uses, unless such uses represent
+the only significant mode of use of the product.
+
+ "Installation Information" for a User Product means any methods,
+procedures, authorization keys, or other information required to install
+and execute modified versions of a covered work in that User Product from
+a modified version of its Corresponding Source. The information must
+suffice to ensure that the continued functioning of the modified object
+code is in no case prevented or interfered with solely because
+modification has been made.
+
+ If you convey an object code work under this section in, or with, or
+specifically for use in, a User Product, and the conveying occurs as
+part of a transaction in which the right of possession and use of the
+User Product is transferred to the recipient in perpetuity or for a
+fixed term (regardless of how the transaction is characterized), the
+Corresponding Source conveyed under this section must be accompanied
+by the Installation Information. But this requirement does not apply
+if neither you nor any third party retains the ability to install
+modified object code on the User Product (for example, the work has
+been installed in ROM).
+
+ The requirement to provide Installation Information does not include a
+requirement to continue to provide support service, warranty, or updates
+for a work that has been modified or installed by the recipient, or for
+the User Product in which it has been modified or installed. Access to a
+network may be denied when the modification itself materially and
+adversely affects the operation of the network or violates the rules and
+protocols for communication across the network.
+
+ Corresponding Source conveyed, and Installation Information provided,
+in accord with this section must be in a format that is publicly
+documented (and with an implementation available to the public in
+source code form), and must require no special password or key for
+unpacking, reading or copying.
+
+ 7. Additional Terms.
+
+ "Additional permissions" are terms that supplement the terms of this
+License by making exceptions from one or more of its conditions.
+Additional permissions that are applicable to the entire Program shall
+be treated as though they were included in this License, to the extent
+that they are valid under applicable law. If additional permissions
+apply only to part of the Program, that part may be used separately
+under those permissions, but the entire Program remains governed by
+this License without regard to the additional permissions.
+
+ When you convey a copy of a covered work, you may at your option
+remove any additional permissions from that copy, or from any part of
+it. (Additional permissions may be written to require their own
+removal in certain cases when you modify the work.) You may place
+additional permissions on material, added by you to a covered work,
+for which you have or can give appropriate copyright permission.
+
+ Notwithstanding any other provision of this License, for material you
+add to a covered work, you may (if authorized by the copyright holders of
+that material) supplement the terms of this License with terms:
+
+ a) Disclaiming warranty or limiting liability differently from the
+ terms of sections 15 and 16 of this License; or
+
+ b) Requiring preservation of specified reasonable legal notices or
+ author attributions in that material or in the Appropriate Legal
+ Notices displayed by works containing it; or
+
+ c) Prohibiting misrepresentation of the origin of that material, or
+ requiring that modified versions of such material be marked in
+ reasonable ways as different from the original version; or
+
+ d) Limiting the use for publicity purposes of names of licensors or
+ authors of the material; or
+
+ e) Declining to grant rights under trademark law for use of some
+ trade names, trademarks, or service marks; or
+
+ f) Requiring indemnification of licensors and authors of that
+ material by anyone who conveys the material (or modified versions of
+ it) with contractual assumptions of liability to the recipient, for
+ any liability that these contractual assumptions directly impose on
+ those licensors and authors.
+
+ All other non-permissive additional terms are considered "further
+restrictions" within the meaning of section 10. If the Program as you
+received it, or any part of it, contains a notice stating that it is
+governed by this License along with a term that is a further
+restriction, you may remove that term. If a license document contains
+a further restriction but permits relicensing or conveying under this
+License, you may add to a covered work material governed by the terms
+of that license document, provided that the further restriction does
+not survive such relicensing or conveying.
+
+ If you add terms to a covered work in accord with this section, you
+must place, in the relevant source files, a statement of the
+additional terms that apply to those files, or a notice indicating
+where to find the applicable terms.
+
+ Additional terms, permissive or non-permissive, may be stated in the
+form of a separately written license, or stated as exceptions;
+the above requirements apply either way.
+
+ 8. Termination.
+
+ You may not propagate or modify a covered work except as expressly
+provided under this License. Any attempt otherwise to propagate or
+modify it is void, and will automatically terminate your rights under
+this License (including any patent licenses granted under the third
+paragraph of section 11).
+
+ However, if you cease all violation of this License, then your
+license from a particular copyright holder is reinstated (a)
+provisionally, unless and until the copyright holder explicitly and
+finally terminates your license, and (b) permanently, if the copyright
+holder fails to notify you of the violation by some reasonable means
+prior to 60 days after the cessation.
+
+ Moreover, your license from a particular copyright holder is
+reinstated permanently if the copyright holder notifies you of the
+violation by some reasonable means, this is the first time you have
+received notice of violation of this License (for any work) from that
+copyright holder, and you cure the violation prior to 30 days after
+your receipt of the notice.
+
+ Termination of your rights under this section does not terminate the
+licenses of parties who have received copies or rights from you under
+this License. If your rights have been terminated and not permanently
+reinstated, you do not qualify to receive new licenses for the same
+material under section 10.
+
+ 9. Acceptance Not Required for Having Copies.
+
+ You are not required to accept this License in order to receive or
+run a copy of the Program. Ancillary propagation of a covered work
+occurring solely as a consequence of using peer-to-peer transmission
+to receive a copy likewise does not require acceptance. However,
+nothing other than this License grants you permission to propagate or
+modify any covered work. These actions infringe copyright if you do
+not accept this License. Therefore, by modifying or propagating a
+covered work, you indicate your acceptance of this License to do so.
+
+ 10. Automatic Licensing of Downstream Recipients.
+
+ Each time you convey a covered work, the recipient automatically
+receives a license from the original licensors, to run, modify and
+propagate that work, subject to this License. You are not responsible
+for enforcing compliance by third parties with this License.
+
+ An "entity transaction" is a transaction transferring control of an
+organization, or substantially all assets of one, or subdividing an
+organization, or merging organizations. If propagation of a covered
+work results from an entity transaction, each party to that
+transaction who receives a copy of the work also receives whatever
+licenses to the work the party's predecessor in interest had or could
+give under the previous paragraph, plus a right to possession of the
+Corresponding Source of the work from the predecessor in interest, if
+the predecessor has it or can get it with reasonable efforts.
+
+ You may not impose any further restrictions on the exercise of the
+rights granted or affirmed under this License. For example, you may
+not impose a license fee, royalty, or other charge for exercise of
+rights granted under this License, and you may not initiate litigation
+(including a cross-claim or counterclaim in a lawsuit) alleging that
+any patent claim is infringed by making, using, selling, offering for
+sale, or importing the Program or any portion of it.
+
+ 11. Patents.
+
+ A "contributor" is a copyright holder who authorizes use under this
+License of the Program or a work on which the Program is based. The
+work thus licensed is called the contributor's "contributor version".
+
+ A contributor's "essential patent claims" are all patent claims
+owned or controlled by the contributor, whether already acquired or
+hereafter acquired, that would be infringed by some manner, permitted
+by this License, of making, using, or selling its contributor version,
+but do not include claims that would be infringed only as a
+consequence of further modification of the contributor version. For
+purposes of this definition, "control" includes the right to grant
+patent sublicenses in a manner consistent with the requirements of
+this License.
+
+ Each contributor grants you a non-exclusive, worldwide, royalty-free
+patent license under the contributor's essential patent claims, to
+make, use, sell, offer for sale, import and otherwise run, modify and
+propagate the contents of its contributor version.
+
+ In the following three paragraphs, a "patent license" is any express
+agreement or commitment, however denominated, not to enforce a patent
+(such as an express permission to practice a patent or covenant not to
+sue for patent infringement). To "grant" such a patent license to a
+party means to make such an agreement or commitment not to enforce a
+patent against the party.
+
+ If you convey a covered work, knowingly relying on a patent license,
+and the Corresponding Source of the work is not available for anyone
+to copy, free of charge and under the terms of this License, through a
+publicly available network server or other readily accessible means,
+then you must either (1) cause the Corresponding Source to be so
+available, or (2) arrange to deprive yourself of the benefit of the
+patent license for this particular work, or (3) arrange, in a manner
+consistent with the requirements of this License, to extend the patent
+license to downstream recipients. "Knowingly relying" means you have
+actual knowledge that, but for the patent license, your conveying the
+covered work in a country, or your recipient's use of the covered work
+in a country, would infringe one or more identifiable patents in that
+country that you have reason to believe are valid.
+
+ If, pursuant to or in connection with a single transaction or
+arrangement, you convey, or propagate by procuring conveyance of, a
+covered work, and grant a patent license to some of the parties
+receiving the covered work authorizing them to use, propagate, modify
+or convey a specific copy of the covered work, then the patent license
+you grant is automatically extended to all recipients of the covered
+work and works based on it.
+
+ A patent license is "discriminatory" if it does not include within
+the scope of its coverage, prohibits the exercise of, or is
+conditioned on the non-exercise of one or more of the rights that are
+specifically granted under this License. You may not convey a covered
+work if you are a party to an arrangement with a third party that is
+in the business of distributing software, under which you make payment
+to the third party based on the extent of your activity of conveying
+the work, and under which the third party grants, to any of the
+parties who would receive the covered work from you, a discriminatory
+patent license (a) in connection with copies of the covered work
+conveyed by you (or copies made from those copies), or (b) primarily
+for and in connection with specific products or compilations that
+contain the covered work, unless you entered into that arrangement,
+or that patent license was granted, prior to 28 March 2007.
+
+ Nothing in this License shall be construed as excluding or limiting
+any implied license or other defenses to infringement that may
+otherwise be available to you under applicable patent law.
+
+ 12. No Surrender of Others' Freedom.
+
+ If conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License. If you cannot convey a
+covered work so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you may
+not convey it at all. For example, if you agree to terms that obligate you
+to collect a royalty for further conveying from those to whom you convey
+the Program, the only way you could satisfy both those terms and this
+License would be to refrain entirely from conveying the Program.
+
+ 13. Use with the GNU Affero General Public License.
+
+ Notwithstanding any other provision of this License, you have
+permission to link or combine any covered work with a work licensed
+under version 3 of the GNU Affero General Public License into a single
+combined work, and to convey the resulting work. The terms of this
+License will continue to apply to the part which is the covered work,
+but the special requirements of the GNU Affero General Public License,
+section 13, concerning interaction through a network will apply to the
+combination as such.
+
+ 14. Revised Versions of this License.
+
+ The Free Software Foundation may publish revised and/or new versions of
+the GNU General Public License from time to time. Such new versions will
+be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+ Each version is given a distinguishing version number. If the
+Program specifies that a certain numbered version of the GNU General
+Public License "or any later version" applies to it, you have the
+option of following the terms and conditions either of that numbered
+version or of any later version published by the Free Software
+Foundation. If the Program does not specify a version number of the
+GNU General Public License, you may choose any version ever published
+by the Free Software Foundation.
+
+ If the Program specifies that a proxy can decide which future
+versions of the GNU General Public License can be used, that proxy's
+public statement of acceptance of a version permanently authorizes you
+to choose that version for the Program.
+
+ Later license versions may give you additional or different
+permissions. However, no additional obligations are imposed on any
+author or copyright holder as a result of your choosing to follow a
+later version.
+
+ 15. Disclaimer of Warranty.
+
+ THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
+APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
+HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
+OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
+THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
+IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
+ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+ 16. Limitation of Liability.
+
+ IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
+THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
+GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
+USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
+DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
+PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
+EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGES.
+
+ 17. Interpretation of Sections 15 and 16.
+
+ If the disclaimer of warranty and limitation of liability provided
+above cannot be given local legal effect according to their terms,
+reviewing courts shall apply local law that most closely approximates
+an absolute waiver of all civil liability in connection with the
+Program, unless a warranty or assumption of liability accompanies a
+copy of the Program in return for a fee.
+
+ END OF TERMS AND CONDITIONS
+
+ How to Apply These Terms to Your New Programs
+
+ If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+ To do so, attach the following notices to the program. It is safest
+to attach them to the start of each source file to most effectively
+state the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+ <one line to give the program's name and a brief idea of what it does.>
+ Copyright (C) <year> <name of author>
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <https://www.gnu.org/licenses/>.
+
+Also add information on how to contact you by electronic and paper mail.
+
+ If the program does terminal interaction, make it output a short
+notice like this when it starts in an interactive mode:
+
+ <program> Copyright (C) <year> <name of author>
+ This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+ This is free software, and you are welcome to redistribute it
+ under certain conditions; type `show c' for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate
+parts of the General Public License. Of course, your program's commands
+might be different; for a GUI interface, you would use an "about box".
+
+ You should also get your employer (if you work as a programmer) or school,
+if any, to sign a "copyright disclaimer" for the program, if necessary.
+For more information on this, and how to apply and follow the GNU GPL, see
+<https://www.gnu.org/licenses/>.
+
+ The GNU General Public License does not permit incorporating your program
+into proprietary programs. If your program is a subroutine library, you
+may consider it more useful to permit linking proprietary applications with
+the library. If this is what you want to do, use the GNU Lesser General
+Public License instead of this License. But first, please read
+<https://www.gnu.org/licenses/why-not-lgpl.html>.
diff --git a/ansible_collections/ieisystem/inmanage/MANIFEST.json b/ansible_collections/ieisystem/inmanage/MANIFEST.json
new file mode 100644
index 000000000..a54b7638e
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/MANIFEST.json
@@ -0,0 +1,36 @@
+{
+ "collection_info": {
+ "namespace": "ieisystem",
+ "name": "inmanage",
+ "version": "2.0.0",
+ "authors": [
+ "Baoshan Wang <wangbaoshan@ieisystem.com>"
+ ],
+ "readme": "README.md",
+ "tags": [
+ "ieisystem",
+ "inmanage",
+ "server",
+ "manager",
+ "tools"
+ ],
+ "description": "ieisystem Server supports ansible management device.",
+ "license": [
+ "GPL-2.0-or-later"
+ ],
+ "license_file": null,
+ "dependencies": {},
+ "repository": "https://github.com/ieisystem/ieisystem.inmanage",
+ "documentation": "https://ieisystem.github.io/ieisystem.inmanage-docs/index.html",
+ "homepage": "https://github.com/ieisystem/ieisystem.inmanage",
+ "issues": "https://github.com/ieisystem/ieisystem.inmanage/issues"
+ },
+ "file_manifest_file": {
+ "name": "FILES.json",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "0bb56c1b70e10b4035ac5d0f93771aa095317a35e95b3ac42e8ecf042f40687a",
+ "format": 1
+ },
+ "format": 1
+} \ No newline at end of file
diff --git a/ansible_collections/ieisystem/inmanage/README.md b/ansible_collections/ieisystem/inmanage/README.md
new file mode 100644
index 000000000..eb3621f11
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/README.md
@@ -0,0 +1,92 @@
+# ieisystem InManage Collection
+
+This repo contains the `ieisystem.inmanage` Ansible Collection. Collections are used to manage common IEIT servers, and Basic management of the server based on restful interface. Including server BMC configuration, firmware upgrade, BIOS configuration and other functions.
+
+.
+
+## Tested with Ansible
+
+Tested with the current Ansible 2.10 releases and the current development version of Ansible. Ansible versions before 2.10 are not supported.
+
+## External requirements
+
+Circumstance instruction:
+Ansible module is suitable for ansible version 2.10
+
+Main steps:
+
+* Install Ansible 2.10
+* Install inManage: pip install inManage
+<!--- A step-by-step reproduction of the problem is helpful if there is no related issue -->
+Thes modules require the following to be installed on the control node:
+
+* Python 3.7 or later
+* [Ansible](http://www.ansible.com) 2.10 or later
+* [ieisystem support] [inManage](https://github.com/ieisystem/inManage) 1.1.0 or later
+
+## Included content
+
+Please check the included content on the [Ansible Galaxy page for this collection](https://galaxy.ansible.com/ieisystem/inmanage)
+
+## Using this collection
+
+Before using the General community collection, you need to install the collection with the `ansible-galaxy` CLI:
+
+ ansible-galaxy collection install ieisystem.inmanage
+
+You can also include it in a `requirements.yml` file and install it via `ansible-galaxy collection install -r requirements.yml` using the format:
+
+```yaml
+collections:
+- name: ieisystem.inmanage
+```
+
+See [Ansible Using collections](https://ieisystem.github.io/ieisystem.inmanage-docs/index.html) for more details.
+
+## Contributing to this collection
+
+If you want to develop new content for this collection or improve what is already here, the easiest way to work on the collection is to clone it into one of the configured [`COLLECTIONS_PATH`](https://docs.ansible.com/ansible/latest/reference_appendices/config.html#collections-paths), and work on it there.
+
+You can find more information in the [developer guide for collections](https://docs.ansible.com/ansible/devel/dev_guide/developing_collections.html#contributing-to-collections), and in the [Ansible ieisystem.inmanage Guide](https://ieisystem.github.io/ieisystem.inmanage-docs/index.html).
+
+### Running tests
+
+See [here](https://docs.ansible.com/ansible/devel/dev_guide/developing_collections.html#testing-collections).
+
+### Communication
+
+wangbaoshan@ieisystem.com
+
+
+### Publishing New Version
+
+Basic instructions without release branches:
+
+1. Create `changelogs/fragments/<version>.yml` with `release_summary:` section (which must be a string, not a list).
+2. Run `antsibull-changelog release --collection-flatmap yes`
+3. Make sure `CHANGELOG.rst` and `changelogs/changelog.yaml` are added to git, and the deleted fragments have been removed.
+4. Tag the commit with `<version>`. Push changes and tag to the main repository.
+
+## Release notes
+
+See the [changelog](https://github.com/ieisystem/ieisystem.inmanage/blob/main/CHANGELOG.rst).
+
+## Roadmap
+
+See [this issue](https://github.com/ieisystem/ieisystem.inmanage/issues/2) for information on releasing, versioning and deprecation.
+
+In general, we plan to release a major version every year, and minor versions every three months. Major versions can contain breaking changes, while minor versions only contain new features and bugfixes.
+
+
+## More information
+
+- [Ansible Collection overview](https://github.com/ansible-collections/overview)
+- [Ansible User guide](https://docs.ansible.com/ansible/latest/user_guide/index.html)
+- [Ansible Developer guide](https://docs.ansible.com/ansible/latest/dev_guide/index.html)
+- [Ansible Community code of conduct](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html)
+
+## Licensing
+
+GNU General Public License v3.0 or later.
+
+See [COPYING](https://www.gnu.org/licenses/gpl-3.0.txt) to see the full text.
diff --git a/ansible_collections/ieisystem/inmanage/changelogs/changelog.yaml b/ansible_collections/ieisystem/inmanage/changelogs/changelog.yaml
new file mode 100644
index 000000000..9a9b61b38
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/changelogs/changelog.yaml
@@ -0,0 +1,453 @@
+ancestor: null
+releases:
+ 1.0.0:
+ changes:
+ major_changes:
+ - add all modules.
+ minor_changes:
+ - Change the referenced sdk name to inManage.
+ - Change the value of tags in galaxy.xml.
+ - Example Change the version to 1.0.0.
+ - Modify the release plan link in the readme file.
+ fragments:
+ - 1-add_modules.yaml
+ - 2-edit_version1.0.0.yaml
+ - 3-edit_readme.yaml
+ - 4-edit_inmanage_sdk.yaml
+ - 5-edit_galaxy_xml.yaml
+ modules:
+ - description: Manage active directory group information
+ name: ad_group
+ namespace: ''
+ - description: Get active directory group information
+ name: ad_group_info
+ namespace: ''
+ - description: Get active directory information
+ name: ad_info
+ namespace: ''
+ - description: Get adapter information
+ name: adapter_info
+ namespace: ''
+ - description: Create logical disk
+ name: add_ldisk
+ namespace: ''
+ - description: Get alert policy
+ name: alert_policy_info
+ namespace: ''
+ - description: Get BMC audit log information
+ name: audit_log_info
+ namespace: ''
+ - description: Get auto capture screen information
+ name: auto_capture_info
+ namespace: ''
+ - description: Get disk backplane information
+ name: backplane_info
+ namespace: ''
+ - description: Backup server settings
+ name: backup
+ namespace: ''
+ - description: Export BIOS config
+ name: bios_export
+ namespace: ''
+ - description: Import BIOS config
+ name: bios_import
+ namespace: ''
+ - description: Get BIOS setup
+ name: bios_info
+ namespace: ''
+ - description: Get BMC information
+ name: bmc_info
+ namespace: ''
+ - description: Get bmc boot image information
+ name: boot_image_info
+ namespace: ''
+ - description: Get BIOS boot options
+ name: boot_option_info
+ namespace: ''
+ - description: Clear BMC audit log
+ name: clear_audit_log
+ namespace: ''
+ - description: Clear event log
+ name: clear_event_log
+ namespace: ''
+ - description: Clear BMC system log
+ name: clear_system_log
+ namespace: ''
+ - description: Collect blackbox log
+ name: collect_blackbox
+ namespace: ''
+ - description: Collect logs
+ name: collect_log
+ namespace: ''
+ - description: Get remote images redirection information
+ name: connect_media_info
+ namespace: ''
+ - description: Get CPU information
+ name: cpu_info
+ namespace: ''
+ - description: Delete session
+ name: del_session
+ namespace: ''
+ - description: Get dns information
+ name: dns_info
+ namespace: ''
+ - description: Download auto screenshots
+ name: download_auto_screenshot
+ namespace: ''
+ - description: Download manual screenshots
+ name: download_manual_screenshot
+ namespace: ''
+ - description: Set active directory information
+ name: edit_ad
+ namespace: ''
+ - description: Set alert policy
+ name: edit_alert_policy
+ namespace: ''
+ - description: Set auto capture screen
+ name: edit_auto_capture
+ namespace: ''
+ - description: Set BIOS setup attributes
+ name: edit_bios
+ namespace: ''
+ - description: Set bmc boot image
+ name: edit_boot_image
+ namespace: ''
+ - description: Set BIOS boot options
+ name: edit_boot_option
+ namespace: ''
+ - description: Start/Stop virtual media Image
+ name: edit_connect_media
+ namespace: ''
+ - description: Set dns information
+ name: edit_dns
+ namespace: ''
+ - description: Set event log policy
+ name: edit_event_log_policy
+ namespace: ''
+ - description: Set fan information
+ name: edit_fan
+ namespace: ''
+ - description: Set fru settings
+ name: edit_fru
+ namespace: ''
+ - description: Set ipv4 information
+ name: edit_ipv4
+ namespace: ''
+ - description: Set ipv6 information
+ name: edit_ipv6
+ namespace: ''
+ - description: Set KVM
+ name: edit_kvm
+ namespace: ''
+ - description: Set ldap information
+ name: edit_ldap
+ namespace: ''
+ - description: Set logical disk
+ name: edit_ldisk
+ namespace: ''
+ - description: Set bmc system and audit log setting
+ name: edit_log_setting
+ namespace: ''
+ - description: Set bmc system and audit log setting
+ name: edit_m6_log_setting
+ namespace: ''
+ - description: Set manual capture screen
+ name: edit_manual_capture
+ namespace: ''
+ - description: Set Virtual Media Instance
+ name: edit_media_instance
+ namespace: ''
+ - description: Set ncsi information
+ name: edit_ncsi
+ namespace: ''
+ - description: Set network information
+ name: edit_network
+ namespace: ''
+ - description: Set network bond
+ name: edit_network_bond
+ namespace: ''
+ - description: Set network link
+ name: edit_network_link
+ namespace: ''
+ - description: Set NTP
+ name: edit_ntp
+ namespace: ''
+ - description: Set physical disk
+ name: edit_pdisk
+ namespace: ''
+ - description: Set power budget information
+ name: edit_power_budget
+ namespace: ''
+ - description: Set power restore information
+ name: edit_power_restore
+ namespace: ''
+ - description: Set power status information
+ name: edit_power_status
+ namespace: ''
+ - description: Set preserve config
+ name: edit_preserve_config
+ namespace: ''
+ - description: Set psu config information
+ name: edit_psu_config
+ namespace: ''
+ - description: Set psu peak information
+ name: edit_psu_peak
+ namespace: ''
+ - description: Set preserver config
+ name: edit_restore_factory_default
+ namespace: ''
+ - description: Set service settings
+ name: edit_service
+ namespace: ''
+ - description: Set SMTP information
+ name: edit_smtp
+ namespace: ''
+ - description: Set SMTP information
+ name: edit_smtp_com
+ namespace: ''
+ - description: Set SMTP information
+ name: edit_smtp_dest
+ namespace: ''
+ - description: Set snmp
+ name: edit_snmp
+ namespace: ''
+ - description: Set snmp trap
+ name: edit_snmp_trap
+ namespace: ''
+ - description: Set threshold information
+ name: edit_threshold
+ namespace: ''
+ - description: Set UID
+ name: edit_uid
+ namespace: ''
+ - description: Set virtual media
+ name: edit_virtual_media
+ namespace: ''
+ - description: Set vlan information
+ name: edit_vlan
+ namespace: ''
+ - description: Get event log information
+ name: event_log_info
+ namespace: ''
+ - description: Get event log policy information
+ name: event_log_policy_info
+ namespace: ''
+ - description: Get fan information
+ name: fan_info
+ namespace: ''
+ - description: Get fru information
+ name: fru_info
+ namespace: ''
+ - description: Get firmware version information
+ name: fw_version_info
+ namespace: ''
+ - description: Get GPU information
+ name: gpu_info
+ namespace: ''
+ - description: Get hard disk information
+ name: hard_disk_info
+ namespace: ''
+ - description: Get CPU information
+ name: hba_info
+ namespace: ''
+ - description: Get KVM information
+ name: kvm_info
+ namespace: ''
+ - description: Manage ldap group information
+ name: ldap_group
+ namespace: ''
+ - description: Get ldap group information
+ name: ldap_group_info
+ namespace: ''
+ - description: Get ldap information
+ name: ldap_info
+ namespace: ''
+ - description: Get logical disks information
+ name: ldisk_info
+ namespace: ''
+ - description: Get bmc log setting information
+ name: log_setting_info
+ namespace: ''
+ - description: Get Virtual Media Instance information
+ name: media_instance_info
+ namespace: ''
+ - description: Get memory information
+ name: mem_info
+ namespace: ''
+ - description: Get ncsi information
+ name: ncsi_info
+ namespace: ''
+ - description: Get network bond information
+ name: network_bond_info
+ namespace: ''
+ - description: Get network information
+ name: network_info
+ namespace: ''
+ - description: Get network link information
+ name: network_link_info
+ namespace: ''
+ - description: Get NTP information
+ name: ntp_info
+ namespace: ''
+ - description: Get onboard disks information
+ name: onboard_disk_info
+ namespace: ''
+ - description: Get PCIE information
+ name: pcie_info
+ namespace: ''
+ - description: Get physical disks information
+ name: pdisk_info
+ namespace: ''
+ - description: Get power budget information
+ name: power_budget_info
+ namespace: ''
+ - description: Get power consumption information
+ name: power_consumption_info
+ namespace: ''
+ - description: Get power restore information
+ name: power_restore_info
+ namespace: ''
+ - description: Get power status information
+ name: power_status_info
+ namespace: ''
+ - description: Get preserve config information
+ name: preserve_config_info
+ namespace: ''
+ - description: Get psu config information
+ name: psu_config_info
+ namespace: ''
+ - description: Get psu information
+ name: psu_info
+ namespace: ''
+ - description: Get psu peak information
+ name: psu_peak_info
+ namespace: ''
+ - description: Get RAID/HBA card and controller information
+ name: raid_info
+ namespace: ''
+ - description: BMC reset
+ name: reset_bmc
+ namespace: ''
+ - description: KVM reset
+ name: reset_kvm
+ namespace: ''
+ - description: Restore server settings
+ name: restore
+ namespace: ''
+ - description: Get self test information
+ name: self_test_info
+ namespace: ''
+ - description: Get sensor information
+ name: sensor_info
+ namespace: ''
+ - description: Get server status information
+ name: server_info
+ namespace: ''
+ - description: Get service information
+ name: service_info
+ namespace: ''
+ - description: Get online session information
+ name: session_info
+ namespace: ''
+ - description: Get SMTP information
+ name: smtp_info
+ namespace: ''
+ - description: Get snmp get/set information
+ name: snmp_info
+ namespace: ''
+ - description: Get snmp trap information
+ name: snmp_trap_info
+ namespace: ''
+ - description: Get support information
+ name: support_info
+ namespace: ''
+ - description: Get BMC system log information
+ name: system_log_info
+ namespace: ''
+ - description: Get temp information
+ name: temp_info
+ namespace: ''
+ - description: Get threshold information
+ name: threshold_info
+ namespace: ''
+ - description: Get UID information
+ name: uid_info
+ namespace: ''
+ - description: Update CPLD
+ name: update_cpld
+ namespace: ''
+ - description: Update firmware
+ name: update_fw
+ namespace: ''
+ - description: Update PSU
+ name: update_psu
+ namespace: ''
+ - description: Manage user
+ name: user
+ namespace: ''
+ - description: Manage user group
+ name: user_group
+ namespace: ''
+ - description: Get user group information
+ name: user_group_info
+ namespace: ''
+ - description: Get user information
+ name: user_info
+ namespace: ''
+ - description: Get Virtual Media information
+ name: virtual_media_info
+ namespace: ''
+ - description: Get volt information
+ name: volt_info
+ namespace: ''
+ release_date: '2023-11-08'
+ 1.1.0:
+ changes:
+ minor_changes:
+ - Fix any issues in your module that don't meet the ansible module format and
+ documentation requirements.
+ - Modify the CI test file to add Ansile-2.16 and python 3.12 tests.
+ - Modify the collection description in the README.md file.
+ fragments:
+ - 7-edit_readme_and_CI.yaml
+ - 8-edit_documentation.yml
+ release_date: '2024-03-12'
+ 1.1.1:
+ changes:
+ minor_changes:
+ - Change edit_alert_policy module description information does not have the
+ first letter capitalization problem.
+ - Fix problems such as typos or grammatical errors in the document.
+ - Modify the readme.md information about installing supported modules.
+ fragments:
+ - 10-edit_doc_spell.yaml
+ - 11-edit_doc_first_letter.yaml
+ release_date: '2024-03-25'
+ 1.2.0:
+ changes:
+ bugfixes:
+ - Add ansible 2.17 version tests to our CI test scripts.
+ - Delete the useless __init__.py file in modules and module_utils.
+ - Modify the add_ldisk, edit_ldisk, and edit_pdisk module.rst documents (https://github.com/ieisystem/ieisystem.inmanage/pull/16).
+ removed_features:
+ - add_ldisk - Delete the ``add_ldisk.info`` parameter. Use ``ieisystem.inmanage.pdisk_info``
+ instead (https://github.com/ieisystem/ieisystem.inmanage/pull/15).
+ - edit_ldisk - Delete the ``edit_ldisk.info`` parameter. Use ``ieisystem.inmanage.ldisk_info``
+ instead (https://github.com/ieisystem/ieisystem.inmanage/pull/15).
+ - edit_pdisk - Delete the ``edit_pdisk.info`` parameter. Use ``ieisystem.inmanage.pdisk_info``
+ instead (https://github.com/ieisystem/ieisystem.inmanage/pull/15).
+ fragments:
+ - 13-delete_init_file.yml
+ - 14-ansible2.17_added_for_CI_testing.yml
+ - 15-remove_modules_info.yml
+ - 16-edit_docs.yml
+ release_date: '2024-04-17'
+ 1.2.1:
+ changes:
+ bugfixes:
+ - Change the example gather_facts no to false(https://github.com/ieisystem/ieisystem.inmanage/pull/18).
+ - Delete the unwanted links.yml file (https://github.com/ieisystem/ieisystem.inmanage/pull/19).
+ fragments:
+ - 18-edit_example_gather_facts.yml
+ - 19-delete_links.yml
+ release_date: '2024-04-24'
diff --git a/ansible_collections/ieisystem/inmanage/changelogs/config.yaml b/ansible_collections/ieisystem/inmanage/changelogs/config.yaml
new file mode 100644
index 000000000..08c3ba7cc
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/changelogs/config.yaml
@@ -0,0 +1,29 @@
+changelog_filename_template: ../CHANGELOG.rst
+changelog_filename_version_depth: 0
+changes_file: changelog.yaml
+changes_format: combined
+keep_fragments: false
+mention_ancestor: true
+new_plugins_after_name: removed_features
+notesdir: fragments
+prelude_section_name: release_summary
+prelude_section_title: Release Summary
+sections:
+- - major_changes
+ - Major Changes
+- - minor_changes
+ - Minor Changes
+- - breaking_changes
+ - Breaking Changes / Porting Guide
+- - deprecated_features
+ - Deprecated Features
+- - removed_features
+ - Removed Features (previously deprecated)
+- - security_fixes
+ - Security Fixes
+- - bugfixes
+ - Bugfixes
+- - known_issues
+ - Known Issues
+title: CHANGE THIS IN changelogs/config.yaml!
+trivial_section_name: trivial
diff --git a/ansible_collections/ieisystem/inmanage/changelogs/fragments/.keep b/ansible_collections/ieisystem/inmanage/changelogs/fragments/.keep
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/changelogs/fragments/.keep
diff --git a/ansible_collections/ieisystem/inmanage/docs/ad_group_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/ad_group_info_module.rst
new file mode 100644
index 000000000..797a4c122
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/ad_group_info_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.ad_group_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.ad_group_info -- Get active directory group information
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.ad_group_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get active directory group information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Ad group test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get active directory group information"
+ ieisystem.inmanage.ad_group_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/ad_group_module.rst b/ansible_collections/ieisystem/inmanage/docs/ad_group_module.rst
new file mode 100644
index 000000000..b751c17fc
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/ad_group_module.rst
@@ -0,0 +1,431 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.ad_group_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.ad_group -- Manage active directory group information
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.ad_group`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Manage active directory group information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-domain"></div>
+ <b>domain</b>
+ <a class="ansibleOptionLink" href="#parameter-domain" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Enter the Role Group Domain. This is the domain where the role group is located.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-kvm"></div>
+ <b>kvm</b>
+ <a class="ansibleOptionLink" href="#parameter-kvm" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>enable</li>
+ <li>disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>This field provides access to KVM for AD authenticated role group user.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-name"></div>
+ <b>name</b>
+ <a class="ansibleOptionLink" href="#parameter-name" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ / <span style="color: red">required</span> </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Enter the Role Group Name. This name identifies the role group in Active Directory.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-pri"></div>
+ <b>pri</b>
+ <a class="ansibleOptionLink" href="#parameter-pri" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>administrator</li>
+ <li>user</li>
+ <li>operator</li>
+ <li>oem</li>
+ <li>none</li>
+ </ul>
+ </td>
+ <td>
+ <div>Enter the Role Group Privilege. This is the level of privilege to be assigned for this role group.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#parameter-state" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li><div style="color: blue"><b>present</b>&nbsp;&larr;</div></li>
+ <li>absent</li>
+ </ul>
+ </td>
+ <td>
+ <div>Whether the active directory group should exist or not, taking action if the state is different from what is stated.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-vm"></div>
+ <b>vm</b>
+ <a class="ansibleOptionLink" href="#parameter-vm" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>enable</li>
+ <li>disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>This field provides access to VMedia for AD authenticated role group user.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Ad group test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Add active directory group information"
+ ieisystem.inmanage.ad_group:
+ state: "present"
+ name: "wbs"
+ domain: "test.com"
+ pri: "administrator"
+ kvm: "enable"
+ vm: "disable"
+ provider: "{{ inmanage }}"
+
+ - name: "Set active directory group information"
+ ieisystem.inmanage.ad_group:
+ state: "present"
+ name: "wbs"
+ pri: "user"
+ kvm: "disable"
+ provider: "{{ inmanage }}"
+
+ - name: "Delete active directory group information"
+ ieisystem.inmanage.ad_group:
+ state: "absent"
+ name: "wbs"
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/ad_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/ad_info_module.rst
new file mode 100644
index 000000000..3e414f7ae
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/ad_info_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.ad_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.ad_info -- Get active directory information
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.ad_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get active directory information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Ad test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get active directory information"
+ ieisystem.inmanage.ad_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/adapter_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/adapter_info_module.rst
new file mode 100644
index 000000000..16de82e45
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/adapter_info_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.adapter_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.adapter_info -- Get adapter information
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.adapter_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get adapter information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Adapter test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get adapter information"
+ ieisystem.inmanage.adapter_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/add_ldisk_module.rst b/ansible_collections/ieisystem/inmanage/docs/add_ldisk_module.rst
new file mode 100644
index 000000000..9a3eeca5f
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/add_ldisk_module.rst
@@ -0,0 +1,589 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.add_ldisk_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.add_ldisk -- Create logical disk
++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.add_ldisk`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Create logical disk on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-accelerator"></div>
+ <b>accelerator</b>
+ <a class="ansibleOptionLink" href="#parameter-accelerator" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>1</li>
+ <li>2</li>
+ <li>3</li>
+ </ul>
+ </td>
+ <td>
+ <div>Driver accelerator, 1 - 1h, 2 - 2h, 3 - 3h.</div>
+ <div>Required when controller type is PMC.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-access"></div>
+ <b>access</b>
+ <a class="ansibleOptionLink" href="#parameter-access" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>1</li>
+ <li>2</li>
+ <li>3</li>
+ </ul>
+ </td>
+ <td>
+ <div>Access Policy, 1 - Read Write, 2 - Read Only, 3 - Blocked.</div>
+ <div>Required when controller type is LSI.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-cache"></div>
+ <b>cache</b>
+ <a class="ansibleOptionLink" href="#parameter-cache" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>1</li>
+ <li>2</li>
+ <li>3</li>
+ </ul>
+ </td>
+ <td>
+ <div>Drive Cache, 1 - Unchanged, 2 - Enabled, 3 - Disabled.</div>
+ <div>Required when controller type is LSI.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-ctrl_id"></div>
+ <b>ctrl_id</b>
+ <a class="ansibleOptionLink" href="#parameter-ctrl_id" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Raid controller ID.</div>
+ <div>Required when controller type is LSI, PMC or MV.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-init"></div>
+ <b>init</b>
+ <a class="ansibleOptionLink" href="#parameter-init" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>1</li>
+ <li>2</li>
+ <li>3</li>
+ </ul>
+ </td>
+ <td>
+ <div>Init State, 1 - No Init, 2 - Quick Init, 3 - Full Init.</div>
+ <div>Required when controller type is LSI.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-io"></div>
+ <b>io</b>
+ <a class="ansibleOptionLink" href="#parameter-io" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>1</li>
+ <li>2</li>
+ </ul>
+ </td>
+ <td>
+ <div>IO Policy, 1 - Direct IO, 2 - Cached IO.</div>
+ <div>Required when controller type is LSI.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-level"></div>
+ <b>level</b>
+ <a class="ansibleOptionLink" href="#parameter-level" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>0</li>
+ <li>1</li>
+ <li>5</li>
+ <li>6</li>
+ <li>10</li>
+ </ul>
+ </td>
+ <td>
+ <div>RAID Level, 0 - RAID0, 1 - RAID1, 5 - RAID5, 6 - RAID6, 10 - RAID10.</div>
+ <div>Required when controller type is LSI or PMC.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-r"></div>
+ <b>r</b>
+ <a class="ansibleOptionLink" href="#parameter-r" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>1</li>
+ <li>2</li>
+ </ul>
+ </td>
+ <td>
+ <div>Read Policy, 1 - Read Ahead, 2 - No Read Ahead.</div>
+ <div>Required when controller type is LSI.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-select"></div>
+ <b>select</b>
+ <a class="ansibleOptionLink" href="#parameter-select" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Select Size, from 1 to 100.</div>
+ <div>Required when controller type is LSI.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-size"></div>
+ <b>size</b>
+ <a class="ansibleOptionLink" href="#parameter-size" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>0</li>
+ <li>1</li>
+ <li>2</li>
+ <li>3</li>
+ <li>4</li>
+ <li>5</li>
+ </ul>
+ </td>
+ <td>
+ <div>Strip Size, 0 - 32k, 1 - 64k, 2 - 128k, 3 - 256k, 4 - 512k, 5 - 1024k.</div>
+ <div>Required when controller type is LSI, PMC or MV.</div>
+ <div>When the controller type is MV, size is [0, 1].</div>
+ <div>When the controller type is LSI or PMC, size is [1, 2, 3, 4, 5].</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-slot"></div>
+ <b>slot</b>
+ <a class="ansibleOptionLink" href="#parameter-slot" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">list</span>
+ / <span style="color: purple">elements=integer</span> </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Slot Num, input multiple slotNumber like 0, 1, 2....</div>
+ <div>Required when controller type is LSI or PMC.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-vname"></div>
+ <b>vname</b>
+ <a class="ansibleOptionLink" href="#parameter-vname" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Virtual drive name.</div>
+ <div>Required when controller type is PMC or server model is M7.</div>
+ <div>Required when controller type is MV.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-w"></div>
+ <b>w</b>
+ <a class="ansibleOptionLink" href="#parameter-w" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>1</li>
+ <li>2</li>
+ <li>3</li>
+ </ul>
+ </td>
+ <td>
+ <div>Write Policy, 1 - Write Through, 2 - Write Back, 3 - Write caching ok if bad BBU.</div>
+ <div>Required when controller type is LSI.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Add ldisk test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Add LSI ldisk"
+ ieisystem.inmanage.add_ldisk:
+ ctrl_id: 0
+ level: 1
+ size: 1
+ access: 1
+ r: 1
+ w: 1
+ io: 1
+ cache: 1
+ init: 2
+ select: 10
+ slot: 0,1
+ provider: "{{ inmanage }}"
+
+ - name: "Add PMC ldisk"
+ ieisystem.inmanage.add_ldisk:
+ ctrl_id: 0
+ level: 1
+ size: 1
+ accelerator: 1
+ slot: 0,1
+ vname: "test"
+ provider: "{{ inmanage }}"
+
+ - name: "Add MV ldisk"
+ ieisystem.inmanage.add_ldisk:
+ ctrl_id: 0
+ size: 1
+ vname: "test"
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/alert_policy_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/alert_policy_info_module.rst
new file mode 100644
index 000000000..82936dfc2
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/alert_policy_info_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.alert_policy_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.alert_policy_info -- Get alert policy
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.alert_policy_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get alert policy on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Alert test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get alert policy"
+ ieisystem.inmanage.alert_policy_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/audit_log_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/audit_log_info_module.rst
new file mode 100644
index 000000000..05d7e0a8e
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/audit_log_info_module.rst
@@ -0,0 +1,358 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.audit_log_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.audit_log_info -- Get BMC audit log information
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.audit_log_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get BMC audit log information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-audit_file"></div>
+ <b>audit_file</b>
+ <a class="ansibleOptionLink" href="#parameter-audit_file" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Store logs to a file.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-count"></div>
+ <b>count</b>
+ <a class="ansibleOptionLink" href="#parameter-count" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Get the most recent log of a specified number.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-log_time"></div>
+ <b>log_time</b>
+ <a class="ansibleOptionLink" href="#parameter-log_time" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Get logs after the specified date, time should be YYYY-MM-DDTHH:MM+HH:MM, like 2019-06-27T12:30+08:00.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Bmc audit log test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get bmc audit log information"
+ ieisystem.inmanage.audit_log_info:
+ log_time: "2020-06-01T12:30+08:00"
+ provider: "{{ inmanage }}"
+
+ - name: "Get bmc audit log information"
+ ieisystem.inmanage.audit_log_info:
+ count: 30
+ provider: "{{ inmanage }}"
+
+ - name: "Get bmc audit log information"
+ ieisystem.inmanage.audit_log_info:
+ audit_file: "/home/wbs/wbs.log"
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/auto_capture_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/auto_capture_info_module.rst
new file mode 100644
index 000000000..9d375c6e2
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/auto_capture_info_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.auto_capture_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.auto_capture_info -- Get auto capture screen information
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.auto_capture_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get auto capture screen information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Screen test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get auto capture screen information"
+ ieisystem.inmanage.auto_capture_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/backplane_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/backplane_info_module.rst
new file mode 100644
index 000000000..67a9118d8
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/backplane_info_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.backplane_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.backplane_info -- Get disk backplane information
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.backplane_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get disk backplane information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Disk backplane test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get backplane information"
+ ieisystem.inmanage.backplane_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/backup_module.rst b/ansible_collections/ieisystem/inmanage/docs/backup_module.rst
new file mode 100644
index 000000000..1b972ba72
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/backup_module.rst
@@ -0,0 +1,355 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.backup_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.backup -- Backup server settings
++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.backup`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Backup server settings on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-bak_file"></div>
+ <b>bak_file</b>
+ <a class="ansibleOptionLink" href="#parameter-bak_file" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ / <span style="color: red">required</span> </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Backup file or bak folder.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-item"></div>
+ <b>item</b>
+ <a class="ansibleOptionLink" href="#parameter-item" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ / <span style="color: red">required</span> </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>all</li>
+ <li>network</li>
+ <li>service</li>
+ <li>ntp</li>
+ <li>snmptrap</li>
+ <li>dns</li>
+ <li>smtp</li>
+ <li>ad</li>
+ <li>ldap</li>
+ <li>user</li>
+ <li>bios</li>
+ <li>kvm</li>
+ <li>ipmi</li>
+ <li>authentication</li>
+ <li>syslog</li>
+ <li>ncsi</li>
+ </ul>
+ </td>
+ <td>
+ <div>Export item.</div>
+ <div>The values for M5 modules are &#x27;all&#x27;, &#x27;network&#x27;, &#x27;service&#x27;, &#x27;ntp&#x27;, &#x27;snmptrap&#x27;, &#x27;dns&#x27;, &#x27;smtp&#x27;, &#x27;ad&#x27;, &#x27;ldap&#x27;, &#x27;user&#x27;, &#x27;bios&#x27;.</div>
+ <div>The values for M6 modules are &#x27;all&#x27;, &#x27;network&#x27;, &#x27;service&#x27;, &#x27;ntp&#x27;, &#x27;snmptrap&#x27;, &#x27;kvm&#x27;, &#x27;ipmi&#x27;, &#x27;authentication&#x27;, &#x27;syslog&#x27;.</div>
+ <div>The values for M7 modules are &#x27;all&#x27;, &#x27;network&#x27;, &#x27;service&#x27;, &#x27;syslog&#x27;, &#x27;ncsi&#x27;.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Backup test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Backup server settings"
+ ieisystem.inmanage.backup:
+ bak_file: "/home/wbs/"
+ item: "all"
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/bios_export_module.rst b/ansible_collections/ieisystem/inmanage/docs/bios_export_module.rst
new file mode 100644
index 000000000..70e269042
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/bios_export_module.rst
@@ -0,0 +1,318 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.bios_export_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.bios_export -- Export BIOS config
+++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.bios_export`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Export BIOS config on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-file_url"></div>
+ <b>file_url</b>
+ <a class="ansibleOptionLink" href="#parameter-file_url" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ / <span style="color: red">required</span> </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Suffix is .json/.conf, FILEURI format, &quot;/directory/filename&quot;.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Bios test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Export bios config"
+ ieisystem.inmanage.bios_export:
+ file_url: "/home/wbs/bios.conf"
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/bios_import_module.rst b/ansible_collections/ieisystem/inmanage/docs/bios_import_module.rst
new file mode 100644
index 000000000..8d77a7591
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/bios_import_module.rst
@@ -0,0 +1,318 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.bios_import_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.bios_import -- Import BIOS config
+++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.bios_import`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Import BIOS config on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-file_url"></div>
+ <b>file_url</b>
+ <a class="ansibleOptionLink" href="#parameter-file_url" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ / <span style="color: red">required</span> </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Suffix is .json/.conf, FILEURI format, &quot;/directory/filename&quot;.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Bios test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Import bios config"
+ ieisystem.inmanage.bios_import:
+ file_url: "/home/wbs/bios.conf"
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/bios_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/bios_info_module.rst
new file mode 100644
index 000000000..8ed8881f9
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/bios_info_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.bios_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.bios_info -- Get BIOS setup
+++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.bios_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get BIOS setup on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Bios test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get bios setup"
+ ieisystem.inmanage.bios_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/bmc_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/bmc_info_module.rst
new file mode 100644
index 000000000..914714b86
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/bmc_info_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.bmc_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.bmc_info -- Get BMC information
+++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.bmc_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get BMC information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Bmc info test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get BMC information"
+ ieisystem.inmanage.bmc_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/boot_image_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/boot_image_info_module.rst
new file mode 100644
index 000000000..e2def2b0c
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/boot_image_info_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.boot_image_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.boot_image_info -- Get bmc boot image information
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.boot_image_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get bmc boot image information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Boot image test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get bmc boot image information"
+ ieisystem.inmanage.boot_image_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/boot_option_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/boot_option_info_module.rst
new file mode 100644
index 000000000..9359e782b
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/boot_option_info_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.boot_option_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.boot_option_info -- Get BIOS boot options
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.boot_option_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get BIOS boot options on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Boot test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get bios boot option"
+ ieisystem.inmanage.boot_option_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/clear_audit_log_module.rst b/ansible_collections/ieisystem/inmanage/docs/clear_audit_log_module.rst
new file mode 100644
index 000000000..30cff724c
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/clear_audit_log_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.clear_audit_log_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.clear_audit_log -- Clear BMC audit log
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.clear_audit_log`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Clear BMC audit log on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Clear BMC audit log test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Clear BMC audit log "
+ ieisystem.inmanage.clear_audit_log:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/clear_event_log_module.rst b/ansible_collections/ieisystem/inmanage/docs/clear_event_log_module.rst
new file mode 100644
index 000000000..252bee81c
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/clear_event_log_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.clear_event_log_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.clear_event_log -- Clear event log
++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.clear_event_log`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Clear event log on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Clear event log test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Clear event log"
+ ieisystem.inmanage.clear_event_log:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/clear_system_log_module.rst b/ansible_collections/ieisystem/inmanage/docs/clear_system_log_module.rst
new file mode 100644
index 000000000..46e6bebab
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/clear_system_log_module.rst
@@ -0,0 +1,334 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.clear_system_log_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.clear_system_log -- Clear BMC system log
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.clear_system_log`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Clear BMC system log on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-level"></div>
+ <b>level</b>
+ <a class="ansibleOptionLink" href="#parameter-level" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li><div style="color: blue"><b>alert</b>&nbsp;&larr;</div></li>
+ <li>critical</li>
+ <li>error</li>
+ <li>notice</li>
+ <li>warning</li>
+ <li>debug</li>
+ <li>emergency</li>
+ <li>info</li>
+ <li>all</li>
+ </ul>
+ </td>
+ <td>
+ <div>Log level.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Clear BMC system log test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Clear BMC system log"
+ ieisystem.inmanage.clear_system_log:
+ level: "alert"
+ provider: "{{ inmanage }}"
+
+ - name: "Clear BMC system log"
+ ieisystem.inmanage.clear_system_log:
+ level: "all"
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/collect_blackbox_module.rst b/ansible_collections/ieisystem/inmanage/docs/collect_blackbox_module.rst
new file mode 100644
index 000000000..b7d89736a
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/collect_blackbox_module.rst
@@ -0,0 +1,319 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.collect_blackbox_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.collect_blackbox -- Collect blackbox log
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.collect_blackbox`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Collect blackbox log on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-file_url"></div>
+ <b>file_url</b>
+ <a class="ansibleOptionLink" href="#parameter-file_url" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ / <span style="color: red">required</span> </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>File download path.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Collect blackbox test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Collect blackbox log"
+ ieisystem.inmanage.collect_blackbox:
+ file_url: "/home/wbs/wbs.log"
+ provider: "{{ inmanage }}"
+
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/collect_log_module.rst b/ansible_collections/ieisystem/inmanage/docs/collect_log_module.rst
new file mode 100644
index 000000000..2e1243c33
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/collect_log_module.rst
@@ -0,0 +1,318 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.collect_log_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.collect_log -- Collect logs
+++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.collect_log`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Collect logs on ieisystem Server, it takes about 5 minutes.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-file_url"></div>
+ <b>file_url</b>
+ <a class="ansibleOptionLink" href="#parameter-file_url" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ / <span style="color: red">required</span> </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>File download path or path with filename, for example filepath/filename.tar.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Collect test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Collect logs"
+ ieisystem.inmanage.collect_log:
+ file_url: "/home/wbs/test.tar"
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/connect_media_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/connect_media_info_module.rst
new file mode 100644
index 000000000..edd668f54
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/connect_media_info_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.connect_media_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.connect_media_info -- Get remote images redirection information
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.connect_media_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get remote images redirection information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Connect media test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get remote images redirection information"
+ ieisystem.inmanage.connect_media_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/cpu_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/cpu_info_module.rst
new file mode 100644
index 000000000..88f73e1ee
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/cpu_info_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.cpu_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.cpu_info -- Get CPU information
+++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.cpu_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get CPU information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: CPU test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get cpu information"
+ ieisystem.inmanage.cpu_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/del_session_module.rst b/ansible_collections/ieisystem/inmanage/docs/del_session_module.rst
new file mode 100644
index 000000000..9171f4e40
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/del_session_module.rst
@@ -0,0 +1,318 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.del_session_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.del_session -- Delete session
+++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.del_session`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Delete session on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-sid"></div>
+ <b>sid</b>
+ <a class="ansibleOptionLink" href="#parameter-sid" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ / <span style="color: red">required</span> </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Session Id, input &quot;all&quot; to delete all sessions.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Delete session test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Delete session"
+ ieisystem.inmanage.del_session:
+ sid: "223"
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/dns_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/dns_info_module.rst
new file mode 100644
index 000000000..228eb7586
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/dns_info_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.dns_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.dns_info -- Get dns information
+++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.dns_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get dns information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: DNS test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get dns information"
+ ieisystem.inmanage.dns_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/download_auto_screenshot_module.rst b/ansible_collections/ieisystem/inmanage/docs/download_auto_screenshot_module.rst
new file mode 100644
index 000000000..4bab4b073
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/download_auto_screenshot_module.rst
@@ -0,0 +1,318 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.download_auto_screenshot_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.download_auto_screenshot -- Download auto screenshots
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.download_auto_screenshot`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Download auto screenshots on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-file_url"></div>
+ <b>file_url</b>
+ <a class="ansibleOptionLink" href="#parameter-file_url" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ / <span style="color: red">required</span> </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Screen capture file path.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Screen test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Download auto screenshots"
+ ieisystem.inmanage.download_auto_screenshot:
+ file_url: "/home/wbs/screen"
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/download_manual_screenshot_module.rst b/ansible_collections/ieisystem/inmanage/docs/download_manual_screenshot_module.rst
new file mode 100644
index 000000000..e147eae6b
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/download_manual_screenshot_module.rst
@@ -0,0 +1,318 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.download_manual_screenshot_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.download_manual_screenshot -- Download manual screenshots
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.download_manual_screenshot`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Download manual screenshots on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-file_url"></div>
+ <b>file_url</b>
+ <a class="ansibleOptionLink" href="#parameter-file_url" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ / <span style="color: red">required</span> </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Screen capture file path.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Screen test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Download manual screenshots"
+ ieisystem.inmanage.download_manual_screenshot:
+ file_url: "/home/wbs/screen"
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/edit_ad_module.rst b/ansible_collections/ieisystem/inmanage/docs/edit_ad_module.rst
new file mode 100644
index 000000000..5ff1e742f
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/edit_ad_module.rst
@@ -0,0 +1,459 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.edit_ad_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.edit_ad -- Set active directory information
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.edit_ad`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Set active directory information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-addr1"></div>
+ <b>addr1</b>
+ <a class="ansibleOptionLink" href="#parameter-addr1" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Domain Controller Server Address1. Enter the IP address of Active Directory server.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-addr2"></div>
+ <b>addr2</b>
+ <a class="ansibleOptionLink" href="#parameter-addr2" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Domain Controller Server Address2. Enter the IP address of Active Directory server.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-addr3"></div>
+ <b>addr3</b>
+ <a class="ansibleOptionLink" href="#parameter-addr3" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Domain Controller Server Address3. Enter the IP address of Active Directory server.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-code"></div>
+ <b>code</b>
+ <a class="ansibleOptionLink" href="#parameter-code" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Secret Password. Specify the Password of the administrator.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-domain"></div>
+ <b>domain</b>
+ <a class="ansibleOptionLink" href="#parameter-domain" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>User Domain Name. Specify the Domain Name for the user for example MyDomain.com.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-enable"></div>
+ <b>enable</b>
+ <a class="ansibleOptionLink" href="#parameter-enable" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>enable</li>
+ <li>disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>Active Directory Authentication Status.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-name"></div>
+ <b>name</b>
+ <a class="ansibleOptionLink" href="#parameter-name" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Secret Username. Specify the Username of an administrator of the Active Directory Server.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-ssl_enable"></div>
+ <b>ssl_enable</b>
+ <a class="ansibleOptionLink" href="#parameter-ssl_enable" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>enable</li>
+ <li>disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>Active Directory SSL Status.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-timeout"></div>
+ <b>timeout</b>
+ <a class="ansibleOptionLink" href="#parameter-timeout" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>The Time Out configuration(15-300).</div>
+ <div>Only the M5 model supports this parameter.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Ad test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set active directory information"
+ ieisystem.inmanage.edit_ad:
+ enable: "disable"
+ provider: "{{ inmanage }}"
+
+ - name: "Set active directory information"
+ ieisystem.inmanage.edit_ad:
+ enable: "enable"
+ name: "test"
+ code: "123456"
+ timeout: 120
+ domain: "test.com"
+ addr1: "100.2.2.2"
+ addr2: "100.2.2.3"
+ addr3: "100.2.2.4"
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/edit_alert_policy_module.rst b/ansible_collections/ieisystem/inmanage/docs/edit_alert_policy_module.rst
new file mode 100644
index 000000000..35c858d1b
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/edit_alert_policy_module.rst
@@ -0,0 +1,430 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.edit_alert_policy_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.edit_alert_policy -- Set alert policy
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.edit_alert_policy`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Set alert policy on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-channel"></div>
+ <b>channel</b>
+ <a class="ansibleOptionLink" href="#parameter-channel" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>shared</li>
+ <li>dedicated</li>
+ </ul>
+ </td>
+ <td>
+ <div>LAN Channel.</div>
+ <div>Only the M5 model supports this parameter.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-destination"></div>
+ <b>destination</b>
+ <a class="ansibleOptionLink" href="#parameter-destination" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Alert destination. The address of the server receiving trap information sent by Trap.</div>
+ <div>When type is snmp, specify an IP address.</div>
+ <div>When type is email, specify a username.</div>
+ <div>When type is snmpdomain, specify a domain.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-id"></div>
+ <b>id</b>
+ <a class="ansibleOptionLink" href="#parameter-id" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ / <span style="color: red">required</span> </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>1</li>
+ <li>2</li>
+ <li>3</li>
+ <li>4</li>
+ </ul>
+ </td>
+ <td>
+ <div>Alert id. Customize the channel for sending alarms in Trap.</div>
+ <div>The values for M5 modules are 1, 2, 3.</div>
+ <div>The values for M6 modules are 1, 2, 3, 4.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-status"></div>
+ <b>status</b>
+ <a class="ansibleOptionLink" href="#parameter-status" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>enable</li>
+ <li>disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>Alert policy status. Whether to enable the receiving end for sending messages in trap mode.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-trap_port"></div>
+ <b>trap_port</b>
+ <a class="ansibleOptionLink" href="#parameter-trap_port" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>SNMP trap port(1-65535).</div>
+ <div>Only the M6 model supports this parameter.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-type"></div>
+ <b>type</b>
+ <a class="ansibleOptionLink" href="#parameter-type" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>snmp</li>
+ <li>email</li>
+ <li>snmpdomain</li>
+ </ul>
+ </td>
+ <td>
+ <div>Alert Type.</div>
+ <div>Only the M5 model supports this parameter.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Alert policy test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set alert policy"
+ ieisystem.inmanage.edit_alert_policy:
+ id: 1
+ status: "enable"
+ type: "snmp"
+ destination: "100.2.2.2"
+ channel: "shared"
+ provider: "{{ inmanage }}"
+
+ - name: "Set alert policy"
+ ieisystem.inmanage.edit_alert_policy:
+ id: 1
+ status: "disable"
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/edit_auto_capture_module.rst b/ansible_collections/ieisystem/inmanage/docs/edit_auto_capture_module.rst
new file mode 100644
index 000000000..f34e444c3
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/edit_auto_capture_module.rst
@@ -0,0 +1,322 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.edit_auto_capture_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.edit_auto_capture -- Set auto capture screen
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.edit_auto_capture`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Set auto capture screen on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-status"></div>
+ <b>status</b>
+ <a class="ansibleOptionLink" href="#parameter-status" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ / <span style="color: red">required</span> </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>enable</li>
+ <li>disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>Capture status.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Screen test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set auto capture screen"
+ ieisystem.inmanage.edit_auto_capture:
+ status: "enable"
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/edit_bios_module.rst b/ansible_collections/ieisystem/inmanage/docs/edit_bios_module.rst
new file mode 100644
index 000000000..619bc496c
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/edit_bios_module.rst
@@ -0,0 +1,377 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.edit_bios_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.edit_bios -- Set BIOS setup attributes
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.edit_bios`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Set BIOS setup attributes on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-attribute"></div>
+ <b>attribute</b>
+ <a class="ansibleOptionLink" href="#parameter-attribute" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>BIOS setup option.</div>
+ <div>Required when <em>list=False</em> and <em>file_url=None</em>.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-file_url"></div>
+ <b>file_url</b>
+ <a class="ansibleOptionLink" href="#parameter-file_url" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>BIOS option file. attribute must be used with value.</div>
+ <div>Mutually exclusive with fileurl format, &quot;/directory/filename&quot;.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-list"></div>
+ <b>list</b>
+ <a class="ansibleOptionLink" href="#parameter-list" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li><div style="color: blue"><b>no</b>&nbsp;&larr;</div></li>
+ <li>yes</li>
+ </ul>
+ </td>
+ <td>
+ <div>Show attribute name and configurable value.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-value"></div>
+ <b>value</b>
+ <a class="ansibleOptionLink" href="#parameter-value" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>BIOS setup option value.</div>
+ <div>Required when <em>list=False</em> and <em>file_url=None</em>.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Bios test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set bios setup"
+ ieisystem.inmanage.edit_bios:
+ attribute: "VMX"
+ value: "Disable"
+ provider: "{{ inmanage }}"
+
+ - name: "Set bios setup"
+ ieisystem.inmanage.edit_bios:
+ attribute: "VMX"
+ value: "Enable"
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/edit_boot_image_module.rst b/ansible_collections/ieisystem/inmanage/docs/edit_boot_image_module.rst
new file mode 100644
index 000000000..7b054299b
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/edit_boot_image_module.rst
@@ -0,0 +1,327 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.edit_boot_image_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.edit_boot_image -- Set bmc boot image
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.edit_boot_image`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Set bmc boot image on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-image"></div>
+ <b>image</b>
+ <a class="ansibleOptionLink" href="#parameter-image" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ / <span style="color: red">required</span> </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>0</li>
+ <li>1</li>
+ <li>2</li>
+ <li>3</li>
+ <li>4</li>
+ <li>5</li>
+ </ul>
+ </td>
+ <td>
+ <div>BMC boot image.</div>
+ <div>0-Higher firmware version; 1-Image 1; 2-Image 2; 3-Lower firmware version; 4-Latest updated firmware; 5-Not latest updated firmware.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Boot image test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set bmc boot image"
+ ieisystem.inmanage.edit_boot_image:
+ image: 2
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/edit_boot_option_module.rst b/ansible_collections/ieisystem/inmanage/docs/edit_boot_option_module.rst
new file mode 100644
index 000000000..b8197774b
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/edit_boot_option_module.rst
@@ -0,0 +1,365 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.edit_boot_option_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.edit_boot_option -- Set BIOS boot options
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.edit_boot_option`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Set BIOS boot options on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-device"></div>
+ <b>device</b>
+ <a class="ansibleOptionLink" href="#parameter-device" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>none</li>
+ <li>HDD</li>
+ <li>PXE</li>
+ <li>CD</li>
+ <li>BIOSSETUP</li>
+ </ul>
+ </td>
+ <td>
+ <div>Boot device.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-effective"></div>
+ <b>effective</b>
+ <a class="ansibleOptionLink" href="#parameter-effective" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>Once</li>
+ <li>Continuous</li>
+ </ul>
+ </td>
+ <td>
+ <div>Effective, once or continuous.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-mode"></div>
+ <b>mode</b>
+ <a class="ansibleOptionLink" href="#parameter-mode" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>Legacy</li>
+ <li>UEFI</li>
+ </ul>
+ </td>
+ <td>
+ <div>Boot type.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Boot test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set bios boot option"
+ ieisystem.inmanage.edit_boot_option:
+ device: "PXE"
+ effective: "Once"
+ mode: "Legacy"
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/edit_connect_media_module.rst b/ansible_collections/ieisystem/inmanage/docs/edit_connect_media_module.rst
new file mode 100644
index 000000000..ff63a5fe8
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/edit_connect_media_module.rst
@@ -0,0 +1,360 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.edit_connect_media_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.edit_connect_media -- Start/Stop virtual media Image
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.edit_connect_media`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Start/Stop virtual media Image on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-image_name"></div>
+ <b>image_name</b>
+ <a class="ansibleOptionLink" href="#parameter-image_name" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ / <span style="color: red">required</span> </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Image name.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-image_type"></div>
+ <b>image_type</b>
+ <a class="ansibleOptionLink" href="#parameter-image_type" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ / <span style="color: red">required</span> </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>CD</li>
+ <li>FD</li>
+ <li>HD</li>
+ </ul>
+ </td>
+ <td>
+ <div>Virtual media type.</div>
+ <div>Only the M5 model supports this parameter.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-op_type"></div>
+ <b>op_type</b>
+ <a class="ansibleOptionLink" href="#parameter-op_type" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ / <span style="color: red">required</span> </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>start</li>
+ <li>stop</li>
+ </ul>
+ </td>
+ <td>
+ <div>Start or stop media.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Connect media test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set remote image redirection"
+ ieisystem.inmanage.edit_connect_media:
+ image_type: "CD"
+ op_type: "start"
+ image_name: "aa.iso"
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/edit_dns_module.rst b/ansible_collections/ieisystem/inmanage/docs/edit_dns_module.rst
new file mode 100644
index 000000000..6f5a028a1
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/edit_dns_module.rst
@@ -0,0 +1,620 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.edit_dns_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.edit_dns -- Set dns information
+++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.edit_dns`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Set dns information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-dns_iface"></div>
+ <b>dns_iface</b>
+ <a class="ansibleOptionLink" href="#parameter-dns_iface" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>DNS Interface, input like &#x27;eth0&#x27;, &#x27;eth1&#x27;, &#x27;bond0&#x27;.</div>
+ <div>Required when <em>dns_manual=auto</em>.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-dns_manual"></div>
+ <b>dns_manual</b>
+ <a class="ansibleOptionLink" href="#parameter-dns_manual" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>manual</li>
+ <li>auto</li>
+ </ul>
+ </td>
+ <td>
+ <div>DNS Settings. Select whether the DNS interface will be configured manually or automatically.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-dns_priority"></div>
+ <b>dns_priority</b>
+ <a class="ansibleOptionLink" href="#parameter-dns_priority" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>4</li>
+ <li>6</li>
+ </ul>
+ </td>
+ <td>
+ <div>IP Priority.</div>
+ <div>Required when <em>dns_manual=auto</em>.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-dns_server1"></div>
+ <b>dns_server1</b>
+ <a class="ansibleOptionLink" href="#parameter-dns_server1" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>DNS Server1 IPv4 or IPv6 address.</div>
+ <div>Required when <em>dns_manual=manual</em>.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-dns_server2"></div>
+ <b>dns_server2</b>
+ <a class="ansibleOptionLink" href="#parameter-dns_server2" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>DNS Server2 IPv4 or IPv6 address.</div>
+ <div>Required when <em>dns_manual=manual</em>.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-dns_server3"></div>
+ <b>dns_server3</b>
+ <a class="ansibleOptionLink" href="#parameter-dns_server3" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>DNS Server3 IPv4 or IPv6 address.</div>
+ <div>Required when <em>dns_manual=manual</em>.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-dns_status"></div>
+ <b>dns_status</b>
+ <a class="ansibleOptionLink" href="#parameter-dns_status" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>enable</li>
+ <li>disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>DNS status.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-domain_iface"></div>
+ <b>domain_iface</b>
+ <a class="ansibleOptionLink" href="#parameter-domain_iface" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Network Interface, input like &#x27;eth0_v4&#x27;, &#x27;eth0_v6&#x27;, &#x27;eth1_v4&#x27;, &#x27;eth1_v6&#x27;, &#x27;bond0_v4&#x27;, &#x27;bond0_v6&#x27;.</div>
+ <div>Required when <em>domain_manual=auto</em>.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-domain_manual"></div>
+ <b>domain_manual</b>
+ <a class="ansibleOptionLink" href="#parameter-domain_manual" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>manual</li>
+ <li>auto</li>
+ </ul>
+ </td>
+ <td>
+ <div>Domain Settings. Select whether the domain interface will be configured manually or automatically.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-domain_name"></div>
+ <b>domain_name</b>
+ <a class="ansibleOptionLink" href="#parameter-domain_name" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Domain Name.</div>
+ <div>Required when <em>domain_manual=manual</em>.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host_cfg"></div>
+ <b>host_cfg</b>
+ <a class="ansibleOptionLink" href="#parameter-host_cfg" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>manual</li>
+ <li>auto</li>
+ </ul>
+ </td>
+ <td>
+ <div>Host Settings. Select whether the host name will be configured manually or automatically.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host_name"></div>
+ <b>host_name</b>
+ <a class="ansibleOptionLink" href="#parameter-host_name" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Host Name.</div>
+ <div>Required when <em>host_cfg=manual</em>.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-register_status1"></div>
+ <b>register_status1</b>
+ <a class="ansibleOptionLink" href="#parameter-register_status1" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>enable</li>
+ <li>disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>BMC register status 1.</div>
+ <div>Only the M6 model supports this parameter.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-register_status2"></div>
+ <b>register_status2</b>
+ <a class="ansibleOptionLink" href="#parameter-register_status2" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>enable</li>
+ <li>disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>BMC register status 2.</div>
+ <div>Only the M6 model supports this parameter.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-registration_method1"></div>
+ <b>registration_method1</b>
+ <a class="ansibleOptionLink" href="#parameter-registration_method1" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>nsupdate</li>
+ <li>dhcp</li>
+ <li>hostname</li>
+ </ul>
+ </td>
+ <td>
+ <div>Registration method 1.</div>
+ <div>Only the M6 model supports this parameter.</div>
+ <div>Required when <em>register_status1=enable</em>.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-registration_method2"></div>
+ <b>registration_method2</b>
+ <a class="ansibleOptionLink" href="#parameter-registration_method2" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>nsupdate</li>
+ <li>dhcp</li>
+ <li>hostname</li>
+ </ul>
+ </td>
+ <td>
+ <div>Registration method 2.</div>
+ <div>Only the M6 model supports this parameter.</div>
+ <div>Required when <em>register_status2=enable</em>.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: DNS test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set dns information"
+ ieisystem.inmanage.edit_dns:
+ dns_status: "disable"
+ provider: "{{ inmanage }}"
+
+ - name: "Set dns information"
+ ieisystem.inmanage.edit_dns:
+ dns_status: "enable"
+ host_cfg: "manual"
+ host_name: "123456789"
+ domain_manual: "auto"
+ domain_iface: "eth0_v4"
+ dns_manual: "manual"
+ dns_server1: "100.2.2.2"
+ dns_server2: "100.2.2.3"
+ dns_server3: "100.2.2.4"
+ provider: "{{ inmanage }}"
+
+ - name: "Set dns information"
+ ieisystem.inmanage.edit_dns:
+ dns_status: "enable"
+ host_cfg: "manual"
+ host_name: "123456789"
+ domain_manual: "manual"
+ domain_name: "test.com"
+ dns_manual: "auto"
+ dns_iface: "eth0"
+ dns_priority: "4"
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/edit_event_log_policy_module.rst b/ansible_collections/ieisystem/inmanage/docs/edit_event_log_policy_module.rst
new file mode 100644
index 000000000..a7af95001
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/edit_event_log_policy_module.rst
@@ -0,0 +1,322 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.edit_event_log_policy_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.edit_event_log_policy -- Set event log policy
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.edit_event_log_policy`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Set event log policy on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-policy"></div>
+ <b>policy</b>
+ <a class="ansibleOptionLink" href="#parameter-policy" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ / <span style="color: red">required</span> </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>Linear</li>
+ <li>Circular</li>
+ </ul>
+ </td>
+ <td>
+ <div>Event log policy.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Event log policy test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set event log policy"
+ ieisystem.inmanage.edit_event_log_policy:
+ policy: "Linear"
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/edit_fan_module.rst b/ansible_collections/ieisystem/inmanage/docs/edit_fan_module.rst
new file mode 100644
index 000000000..c34eb6cf7
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/edit_fan_module.rst
@@ -0,0 +1,359 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.edit_fan_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.edit_fan -- Set fan information
+++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.edit_fan`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Set fan information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-fan_speed"></div>
+ <b>fan_speed</b>
+ <a class="ansibleOptionLink" href="#parameter-fan_speed" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Fan speed (duty ratio), range in 1 - 100.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-id"></div>
+ <b>id</b>
+ <a class="ansibleOptionLink" href="#parameter-id" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Fan id 255 is for all fans, 0~n.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-mode"></div>
+ <b>mode</b>
+ <a class="ansibleOptionLink" href="#parameter-mode" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>Automatic</li>
+ <li>Manual</li>
+ </ul>
+ </td>
+ <td>
+ <div>Control mode, Manual or Automatic , Manual must be used with fans_peed.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Fan test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set fan information"
+ ieisystem.inmanage.edit_fan:
+ mode: "Automatic"
+ provider: "{{ inmanage }}"
+
+ - name: "Set fan information"
+ ieisystem.inmanage.edit_fan:
+ mode: "Manual"
+ id: 1
+ fan_speed: 80
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/edit_fru_module.rst b/ansible_collections/ieisystem/inmanage/docs/edit_fru_module.rst
new file mode 100644
index 000000000..b3dfc9939
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/edit_fru_module.rst
@@ -0,0 +1,352 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.edit_fru_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.edit_fru -- Set fru settings
++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.edit_fru`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Set fru settings on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-attribute"></div>
+ <b>attribute</b>
+ <a class="ansibleOptionLink" href="#parameter-attribute" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ / <span style="color: red">required</span> </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>CP</li>
+ <li>CS</li>
+ <li>PM</li>
+ <li>PPN</li>
+ <li>PS</li>
+ <li>PN</li>
+ <li>PV</li>
+ <li>PAT</li>
+ <li>BM</li>
+ <li>BPN</li>
+ <li>BS</li>
+ <li>BP</li>
+ </ul>
+ </td>
+ <td>
+ <div>CP is Chassis Part Number, CS is Chassis Serial, PM is Product Manufacturer.</div>
+ <div>PPN is Product Part Number, PS is Product Serial, PN is Product Name.</div>
+ <div>PV is Product Version, PAT is Product Asset Tag, BM is Board Mfg, BPN is Board Product Name.</div>
+ <div>BS is Board Serial, BP is Board Part Number.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-value"></div>
+ <b>value</b>
+ <a class="ansibleOptionLink" href="#parameter-value" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ / <span style="color: red">required</span> </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Set the value of attribute.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Fru test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set Fru"
+ ieisystem.inmanage.edit_fru:
+ attribute: "CP"
+ value: "test"
+ provider: "{{ inmanage }}"
+
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/edit_ipv4_module.rst b/ansible_collections/ieisystem/inmanage/docs/edit_ipv4_module.rst
new file mode 100644
index 000000000..4a5364b2f
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/edit_ipv4_module.rst
@@ -0,0 +1,427 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.edit_ipv4_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.edit_ipv4 -- Set ipv4 information
+++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.edit_ipv4`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Set ipv4 information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-interface_name"></div>
+ <b>interface_name</b>
+ <a class="ansibleOptionLink" href="#parameter-interface_name" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ / <span style="color: red">required</span> </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>eth0</li>
+ <li>eth1</li>
+ <li>bond0</li>
+ </ul>
+ </td>
+ <td>
+ <div>Set interface name. Select the LAN interface to be configured.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-ipv4_address"></div>
+ <b>ipv4_address</b>
+ <a class="ansibleOptionLink" href="#parameter-ipv4_address" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>If DHCP is disabled, specify a static IPv4 address to be configured for the selected interface.</div>
+ <div>Required when <em>ipv4_dhcp_enable=static</em>.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-ipv4_dhcp_enable"></div>
+ <b>ipv4_dhcp_enable</b>
+ <a class="ansibleOptionLink" href="#parameter-ipv4_dhcp_enable" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>dhcp</li>
+ <li>static</li>
+ </ul>
+ </td>
+ <td>
+ <div>Enable &#x27;Enable DHCP&#x27; to dynamically configure IPv4 address using Dynamic Host Configuration Protocol (DHCP).</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-ipv4_gateway"></div>
+ <b>ipv4_gateway</b>
+ <a class="ansibleOptionLink" href="#parameter-ipv4_gateway" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>If DHCP is disabled, specify a static Default Gateway to be configured for the selected interface.</div>
+ <div>Required when <em>ipv4_dhcp_enable=static</em>.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-ipv4_status"></div>
+ <b>ipv4_status</b>
+ <a class="ansibleOptionLink" href="#parameter-ipv4_status" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>enable</li>
+ <li>disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>Enable or disable IPV4.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-ipv4_subnet"></div>
+ <b>ipv4_subnet</b>
+ <a class="ansibleOptionLink" href="#parameter-ipv4_subnet" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>If DHCP is disabled, specify a static Subnet Mask to be configured for the selected interface.</div>
+ <div>Required when <em>ipv4_dhcp_enable=static</em>.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Ipv4 test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set ipv4 information"
+ ieisystem.inmanage.edit_ipv4:
+ interface_name: "eth0"
+ ipv4_status: "disable"
+ provider: "{{ inmanage }}"
+
+ - name: "Set ipv4 information"
+ ieisystem.inmanage.edit_ipv4:
+ interface_name: "eth0"
+ ipv4_status: "enable"
+ ipv4_dhcp_enable: "dhcp"
+ provider: "{{ inmanage }}"
+
+ - name: "Set ipv4 information"
+ ieisystem.inmanage.edit_ipv4:
+ interface_name: "eth0"
+ ipv4_status: "enable"
+ ipv4_dhcp_enable: "static"
+ ipv4_address: "100.2.36.10"
+ ipv4_subnet: "255.255.255.0"
+ ipv4_gateway: "100.2.36.1"
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/edit_ipv6_module.rst b/ansible_collections/ieisystem/inmanage/docs/edit_ipv6_module.rst
new file mode 100644
index 000000000..edb0082f1
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/edit_ipv6_module.rst
@@ -0,0 +1,444 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.edit_ipv6_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.edit_ipv6 -- Set ipv6 information
+++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.edit_ipv6`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Set ipv6 information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-interface_name"></div>
+ <b>interface_name</b>
+ <a class="ansibleOptionLink" href="#parameter-interface_name" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ / <span style="color: red">required</span> </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>eth0</li>
+ <li>eth1</li>
+ <li>bond0</li>
+ </ul>
+ </td>
+ <td>
+ <div>Set interface name. Select the LAN interface to be configured.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-ipv6_address"></div>
+ <b>ipv6_address</b>
+ <a class="ansibleOptionLink" href="#parameter-ipv6_address" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>If DHCP is disabled, specify a static IPv6 address to be configured for the selected interface.</div>
+ <div>Required when <em>ipv6_dhcp_enable=static</em>.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-ipv6_dhcp_enable"></div>
+ <b>ipv6_dhcp_enable</b>
+ <a class="ansibleOptionLink" href="#parameter-ipv6_dhcp_enable" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>dhcp</li>
+ <li>static</li>
+ </ul>
+ </td>
+ <td>
+ <div>Enable &#x27;Enable DHCP&#x27; to dynamically configure IPv6 address using Dynamic Host Configuration Protocol (DHCP).</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-ipv6_gateway"></div>
+ <b>ipv6_gateway</b>
+ <a class="ansibleOptionLink" href="#parameter-ipv6_gateway" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>If DHCP is disabled, specify a static Default Gateway to be configured for the selected interface.</div>
+ <div>Required when <em>ipv6_dhcp_enable=static</em>.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-ipv6_index"></div>
+ <b>ipv6_index</b>
+ <a class="ansibleOptionLink" href="#parameter-ipv6_index" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Ipv6 index(0-15).</div>
+ <div>Required when <em>ipv6_dhcp_enable=static</em>.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-ipv6_prefix"></div>
+ <b>ipv6_prefix</b>
+ <a class="ansibleOptionLink" href="#parameter-ipv6_prefix" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>The subnet prefix length for the IPv6 settings(0-128).</div>
+ <div>Required when <em>ipv6_dhcp_enable=static</em>.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-ipv6_status"></div>
+ <b>ipv6_status</b>
+ <a class="ansibleOptionLink" href="#parameter-ipv6_status" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>enable</li>
+ <li>disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>Enable or disable IPV6.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Ipv6 test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set ipv6 information"
+ ieisystem.inmanage.edit_ipv6:
+ interface_name: "eth0"
+ ipv6_status: "disable"
+ provider: "{{ inmanage }}"
+
+ - name: "Set ipv6 information"
+ ieisystem.inmanage.edit_ipv6:
+ interface_name: "eth0"
+ ipv6_status: "enable"
+ ipv6_dhcp_enable: "dhcp"
+ provider: "{{ inmanage }}"
+
+ - name: "Set ipv6 information"
+ ieisystem.inmanage.edit_ipv6:
+ interface_name: "eth0"
+ ipv6_status: "enable"
+ ipv6_dhcp_enable: "static"
+ ipv6_address: "::ffff:100:2:36:10"
+ ipv6_index: 12
+ ipv6_prefix: 16
+ ipv6_gateway: "::"
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/edit_kvm_module.rst b/ansible_collections/ieisystem/inmanage/docs/edit_kvm_module.rst
new file mode 100644
index 000000000..b97d3094e
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/edit_kvm_module.rst
@@ -0,0 +1,543 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.edit_kvm_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.edit_kvm -- Set KVM
+++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.edit_kvm`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Set KVM on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-automatic_off"></div>
+ <b>automatic_off</b>
+ <a class="ansibleOptionLink" href="#parameter-automatic_off" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>enable</li>
+ <li>disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>Automatically OFF Server Monitor, When KVM Launches.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-client_type"></div>
+ <b>client_type</b>
+ <a class="ansibleOptionLink" href="#parameter-client_type" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>vnc</li>
+ <li>viewer</li>
+ </ul>
+ </td>
+ <td>
+ <div>Client Type.</div>
+ <div>Only the M6 model supports this parameter.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-keyboard_language"></div>
+ <b>keyboard_language</b>
+ <a class="ansibleOptionLink" href="#parameter-keyboard_language" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>AD</li>
+ <li>DA</li>
+ <li>NL-BE</li>
+ <li>NL-NL</li>
+ <li>GB</li>
+ <li>US</li>
+ <li>FI</li>
+ <li>FR-BE</li>
+ <li>FR</li>
+ <li>DE</li>
+ <li>DE-CH</li>
+ <li>IT</li>
+ <li>JP</li>
+ <li>ON</li>
+ <li>PT</li>
+ <li>EC</li>
+ <li>SV</li>
+ <li>TR_F</li>
+ <li>TR_Q</li>
+ </ul>
+ </td>
+ <td>
+ <div>Select the Keyboard Language.</div>
+ <div>AD is Auto Detect, DA is Danish, NL-BE is Dutch Belgium, NL-NL is Dutch Netherland.</div>
+ <div>GB is English UK , US is English US, FI is Finnish, FR-BE is French Belgium, FR is French France.</div>
+ <div>DE is German Germany, DE-CH is German Switzerland, IT is Italian, JP is Japanese.</div>
+ <div>NO is Norwegian, PT is Portuguese, ES is Spanish, SV is Swedish, TR_F is Turkish F, TR_Q is Turkish Q.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-kvm_encryption"></div>
+ <b>kvm_encryption</b>
+ <a class="ansibleOptionLink" href="#parameter-kvm_encryption" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>enable</li>
+ <li>disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>Encrypt KVM packets.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-local_monitor_off"></div>
+ <b>local_monitor_off</b>
+ <a class="ansibleOptionLink" href="#parameter-local_monitor_off" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>enable</li>
+ <li>disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>Server Monitor OFF Feature Status.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-media_attach"></div>
+ <b>media_attach</b>
+ <a class="ansibleOptionLink" href="#parameter-media_attach" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>attach</li>
+ <li>auto</li>
+ </ul>
+ </td>
+ <td>
+ <div>Two types of VM attach mode are available.</div>
+ <div>Attach is Immediately attaches Virtual Media to the server upon bootup.</div>
+ <div>Auto is Attaches Virtual Media to the server only when a virtual media session is started.</div>
+ <div>Only the M5 model supports this parameter.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-non_secure"></div>
+ <b>non_secure</b>
+ <a class="ansibleOptionLink" href="#parameter-non_secure" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>enable</li>
+ <li>disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>Enable/disable Non Secure Connection Type.</div>
+ <div>Only the M6 model supports this parameter.</div>
+ <div>Required when <em>client_type=vnc</em>.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-retry_count"></div>
+ <b>retry_count</b>
+ <a class="ansibleOptionLink" href="#parameter-retry_count" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Number of times to be retried in case a KVM failure occurs.Retry count ranges from 1 to 20.</div>
+ <div>Only the M5 model supports this parameter.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-retry_time_interval"></div>
+ <b>retry_time_interval</b>
+ <a class="ansibleOptionLink" href="#parameter-retry_time_interval" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>The Identification for retry time interval configuration (5-30) seconds.</div>
+ <div>Only the M5 model supports this parameter.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-ssh_vnc"></div>
+ <b>ssh_vnc</b>
+ <a class="ansibleOptionLink" href="#parameter-ssh_vnc" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>enable</li>
+ <li>disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>Enable/disable VNC over SSH in BMC.</div>
+ <div>Only the M6 model supports this parameter.</div>
+ <div>Required when <em>client_type=vnc</em>.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-stunnel_vnc"></div>
+ <b>stunnel_vnc</b>
+ <a class="ansibleOptionLink" href="#parameter-stunnel_vnc" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>enable</li>
+ <li>disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>Enable/disable VNC over Stunnel in BMC.</div>
+ <div>Only the M6 model supports this parameter.</div>
+ <div>Required when <em>client_type=vnc</em>.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: KVM test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set KVM"
+ ieisystem.inmanage.edit_kvm:
+ kvm_encryption: "enable"
+ media_attach: "auto"
+ keyboard_language: "AD"
+ retry_count: 13
+ retry_time_interval: 10
+ local_monitor_off: "enable"
+ automatic_off: "enable"
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/edit_ldap_module.rst b/ansible_collections/ieisystem/inmanage/docs/edit_ldap_module.rst
new file mode 100644
index 000000000..32a4d8ccf
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/edit_ldap_module.rst
@@ -0,0 +1,526 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.edit_ldap_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.edit_ldap -- Set ldap information
+++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.edit_ldap`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Set ldap information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-address"></div>
+ <b>address</b>
+ <a class="ansibleOptionLink" href="#parameter-address" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Server Address.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-attr"></div>
+ <b>attr</b>
+ <a class="ansibleOptionLink" href="#parameter-attr" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>cn</li>
+ <li>uid</li>
+ </ul>
+ </td>
+ <td>
+ <div>Attribute of User Login.</div>
+ <div>The Attribute of User Login field indicates to the LDAP/E-Directory server which attribute should be used to identify the user.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-base"></div>
+ <b>base</b>
+ <a class="ansibleOptionLink" href="#parameter-base" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Search Base.</div>
+ <div>The Search Base allows the LDAP/E-Directory server to find which part of the external directory tree is to be searched.</div>
+ <div>This search base may be equivalent to the organization or the group of the external directory.</div>
+ <div>Search base is a string of 4 to 64 alphanumeric characters.</div>
+ <div>It must start with an alphabetical character.</div>
+ <div>Special Symbols like dot(.), comma(, ), hyphen(-), underscore(_), equal-to(=) are allowed.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-ca"></div>
+ <b>ca</b>
+ <a class="ansibleOptionLink" href="#parameter-ca" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>CA certificate file path.</div>
+ <div>Required when <em>encry=StartTLS</em>.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-ce"></div>
+ <b>ce</b>
+ <a class="ansibleOptionLink" href="#parameter-ce" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Certificate file path.</div>
+ <div>Required when <em>encry=StartTLS</em>.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-cn"></div>
+ <b>cn</b>
+ <a class="ansibleOptionLink" href="#parameter-cn" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>ip</li>
+ <li>fqdn</li>
+ </ul>
+ </td>
+ <td>
+ <div>Common name type.</div>
+ <div>Required when <em>encry=StartTLS</em>.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-code"></div>
+ <b>code</b>
+ <a class="ansibleOptionLink" href="#parameter-code" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Password. The Bind password is also used in the bind authentication operations between client and server.</div>
+ <div>Required when <em>enable=enable</em>.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-dn"></div>
+ <b>dn</b>
+ <a class="ansibleOptionLink" href="#parameter-dn" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Bind DN. The Bind DN is used in bind operations, which authenticates the client to the server.</div>
+ <div>Bind DN is a string of 4 to 64 alphanumeric characters.</div>
+ <div>It must start with an alphabetical character.</div>
+ <div>Special Symbols like dot(.), comma(, ), hyphen(-), underscore(_), equal-to(=) are allowed.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-enable"></div>
+ <b>enable</b>
+ <a class="ansibleOptionLink" href="#parameter-enable" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>enable</li>
+ <li>disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>LDAP/E-Directory Authentication Status.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-encry"></div>
+ <b>encry</b>
+ <a class="ansibleOptionLink" href="#parameter-encry" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>no</li>
+ <li>SSL</li>
+ <li>StartTLS</li>
+ </ul>
+ </td>
+ <td>
+ <div>Encryption Type.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-pk"></div>
+ <b>pk</b>
+ <a class="ansibleOptionLink" href="#parameter-pk" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Private Key file path.</div>
+ <div>Required when <em>encry=StartTLS</em>.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-server_port"></div>
+ <b>server_port</b>
+ <a class="ansibleOptionLink" href="#parameter-server_port" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Server Port. Specify the LDAP/E-Directory Port.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Ldap test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set ldap information"
+ ieisystem.inmanage.edit_ldap:
+ enable: "disable"
+ provider: "{{ inmanage }}"
+
+ - name: "Set ldap information"
+ ieisystem.inmanage.edit_ldap:
+ enable: "enable"
+ encry: "SSL"
+ address: "100.2.2.2"
+ server_port: 389
+ dn: "cn=manager,ou=login,dc=domain,dc=com"
+ code: "123456"
+ base: "cn=manager"
+ attr: "uid"
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/edit_ldisk_module.rst b/ansible_collections/ieisystem/inmanage/docs/edit_ldisk_module.rst
new file mode 100644
index 000000000..ec1a48078
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/edit_ldisk_module.rst
@@ -0,0 +1,378 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.edit_ldisk_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.edit_ldisk -- Set logical disk
++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.edit_ldisk`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Set logical disk on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-ctrl_id"></div>
+ <b>ctrl_id</b>
+ <a class="ansibleOptionLink" href="#parameter-ctrl_id" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Raid controller ID.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-duration"></div>
+ <b>duration</b>
+ <a class="ansibleOptionLink" href="#parameter-duration" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Duration range is 1-255, physical drive under PMC raid controller.</div>
+ <div>Required when <em>option=LOC</em>.</div>
+ <div>Only the M6 model supports this parameter.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-ldisk_id"></div>
+ <b>ldisk_id</b>
+ <a class="ansibleOptionLink" href="#parameter-ldisk_id" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Logical disk ID.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-option"></div>
+ <b>option</b>
+ <a class="ansibleOptionLink" href="#parameter-option" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>LOC</li>
+ <li>STL</li>
+ <li>FI</li>
+ <li>SFI</li>
+ <li>SI</li>
+ <li>DEL</li>
+ </ul>
+ </td>
+ <td>
+ <div>Set operation options for a logical disk.</div>
+ <div>LOC is Locate Logical Drive, STL is Stop Locate LogicalDrive.</div>
+ <div>FI is Fast Initialization, SFI is Slow/Full Initialization.</div>
+ <div>SI is Stop Initialization, DEL is Delete LogicalDrive.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Edit ldisk test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Edit ldisk"
+ ieisystem.inmanage.edit_ldisk:
+ ctrl_id: 0
+ ldisk_id: 1
+ option: "LOC"
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/edit_log_setting_module.rst b/ansible_collections/ieisystem/inmanage/docs/edit_log_setting_module.rst
new file mode 100644
index 000000000..9184d28fa
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/edit_log_setting_module.rst
@@ -0,0 +1,474 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.edit_log_setting_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.edit_log_setting -- Set bmc system and audit log setting
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.edit_log_setting`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Set bmc system and audit log setting on ieisystem Server.
+- Only the M5 models support this feature.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-audit_status"></div>
+ <b>audit_status</b>
+ <a class="ansibleOptionLink" href="#parameter-audit_status" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>enable</li>
+ <li>disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>Audit Log Status.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-audit_type"></div>
+ <b>audit_type</b>
+ <a class="ansibleOptionLink" href="#parameter-audit_type" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>local</li>
+ <li>remote</li>
+ <li>both</li>
+ </ul>
+ </td>
+ <td>
+ <div>Audit log type.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-file_size"></div>
+ <b>file_size</b>
+ <a class="ansibleOptionLink" href="#parameter-file_size" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>File Size(3-65535bytes), set when type is local(default 30000).</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-protocol_type"></div>
+ <b>protocol_type</b>
+ <a class="ansibleOptionLink" href="#parameter-protocol_type" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>UDP</li>
+ <li>TCP</li>
+ </ul>
+ </td>
+ <td>
+ <div>Protocol Type, set when type is remote.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-rotate_count"></div>
+ <b>rotate_count</b>
+ <a class="ansibleOptionLink" href="#parameter-rotate_count" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>0</li>
+ <li>1</li>
+ </ul>
+ </td>
+ <td>
+ <div>Rotate Count, set when type is local, 0-delete old files(default), 1-bak old files.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-server_addr"></div>
+ <b>server_addr</b>
+ <a class="ansibleOptionLink" href="#parameter-server_addr" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Server Address, set when type is remote.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-server_port"></div>
+ <b>server_port</b>
+ <a class="ansibleOptionLink" href="#parameter-server_port" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Server Port(0-65535), set when type is remote.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-status"></div>
+ <b>status</b>
+ <a class="ansibleOptionLink" href="#parameter-status" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>enable</li>
+ <li>disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>System Log Status.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-type"></div>
+ <b>type</b>
+ <a class="ansibleOptionLink" href="#parameter-type" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>local</li>
+ <li>remote</li>
+ <li>both</li>
+ </ul>
+ </td>
+ <td>
+ <div>System log type.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Edit log setting test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Edit bmc system log setting"
+ ieisystem.inmanage.edit_log_setting:
+ status: "enable"
+ type: "both"
+ provider: "{{ inmanage }}"
+
+ - name: "Edit bmc audit log setting"
+ ieisystem.inmanage.edit_log_setting:
+ audit_status: "enable"
+ audit_type: "remote"
+ server_addr: "100.2.126.11"
+ server_port: "514"
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/edit_m6_log_setting_module.rst b/ansible_collections/ieisystem/inmanage/docs/edit_m6_log_setting_module.rst
new file mode 100644
index 000000000..e1bcf1e05
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/edit_m6_log_setting_module.rst
@@ -0,0 +1,490 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.edit_m6_log_setting_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.edit_m6_log_setting -- Set bmc system and audit log setting
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.edit_m6_log_setting`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Set bmc system and audit log setting on ieisystem Server.
+- Only the M6 models support this feature.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host_tag"></div>
+ <b>host_tag</b>
+ <a class="ansibleOptionLink" href="#parameter-host_tag" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>HostName</li>
+ <li>SerialNum</li>
+ <li>AssertTag</li>
+ </ul>
+ </td>
+ <td>
+ <div>System log host tag, set when <em>status=enable</em>.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-level"></div>
+ <b>level</b>
+ <a class="ansibleOptionLink" href="#parameter-level" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>Critical</li>
+ <li>Warning</li>
+ <li>Info</li>
+ </ul>
+ </td>
+ <td>
+ <div>Events Level, set when <em>status=enable</em>.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-log_type"></div>
+ <b>log_type</b>
+ <a class="ansibleOptionLink" href="#parameter-log_type" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>idl</li>
+ <li>audit</li>
+ <li>both</li>
+ </ul>
+ </td>
+ <td>
+ <div>Remote Log Type, set when server_id is not none.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-protocol_type"></div>
+ <b>protocol_type</b>
+ <a class="ansibleOptionLink" href="#parameter-protocol_type" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>UDP</li>
+ <li>TCP</li>
+ </ul>
+ </td>
+ <td>
+ <div>Protocol Type, set when <em>status=enable</em>.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-server_addr"></div>
+ <b>server_addr</b>
+ <a class="ansibleOptionLink" href="#parameter-server_addr" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Server Address, set when server_id is not none.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-server_id"></div>
+ <b>server_id</b>
+ <a class="ansibleOptionLink" href="#parameter-server_id" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>0</li>
+ <li>1</li>
+ <li>2</li>
+ <li>3</li>
+ </ul>
+ </td>
+ <td>
+ <div>Syslog Server ID, set when <em>status=enable</em>.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-server_port"></div>
+ <b>server_port</b>
+ <a class="ansibleOptionLink" href="#parameter-server_port" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Server Address, set when server_id is not none.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-status"></div>
+ <b>status</b>
+ <a class="ansibleOptionLink" href="#parameter-status" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>enable</li>
+ <li>disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>System Log Status.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-test"></div>
+ <b>test</b>
+ <a class="ansibleOptionLink" href="#parameter-test" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li><div style="color: blue"><b>no</b>&nbsp;&larr;</div></li>
+ <li>yes</li>
+ </ul>
+ </td>
+ <td>
+ <div>Test remote log settings, set when server_id is not none.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Edit log setting test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Edit bmc system log setting"
+ ieisystem.inmanage.edit_m6_log_setting:
+ status: "disable"
+ provider: "{{ inmanage }}"
+
+ - name: "Edit bmc audit log setting"
+ ieisystem.inmanage.edit_m6_log_setting:
+ status: "enable"
+ host_tag: "HostName"
+ level: "Info"
+ protocol_type: "TCP"
+ server_id: 0
+ server_addr: "100.2.126.11"
+ server_port: 514
+ log_type: "both"
+ provider: "{{ inmanage }}"
+
+ - name: "test bmc audit log"
+ ieisystem.inmanage.edit_m6_log_setting:
+ server_id: 0
+ test: True
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/edit_manual_capture_module.rst b/ansible_collections/ieisystem/inmanage/docs/edit_manual_capture_module.rst
new file mode 100644
index 000000000..72f9e2bc1
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/edit_manual_capture_module.rst
@@ -0,0 +1,322 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.edit_manual_capture_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.edit_manual_capture -- Set manual capture screen
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.edit_manual_capture`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Set manual capture screen on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-type"></div>
+ <b>type</b>
+ <a class="ansibleOptionLink" href="#parameter-type" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ / <span style="color: red">required</span> </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>capture</li>
+ <li>delete</li>
+ </ul>
+ </td>
+ <td>
+ <div>Manual type.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Screen test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set manual capture screen"
+ ieisystem.inmanage.edit_manual_capture:
+ type: "capture"
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/edit_media_instance_module.rst b/ansible_collections/ieisystem/inmanage/docs/edit_media_instance_module.rst
new file mode 100644
index 000000000..36a853fd3
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/edit_media_instance_module.rst
@@ -0,0 +1,502 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.edit_media_instance_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.edit_media_instance -- Set Virtual Media Instance
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.edit_media_instance`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Set Virtual Media Instance on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-kvm_num_cd"></div>
+ <b>kvm_num_cd</b>
+ <a class="ansibleOptionLink" href="#parameter-kvm_num_cd" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>0</li>
+ <li>1</li>
+ <li>2</li>
+ <li>3</li>
+ <li>4</li>
+ </ul>
+ </td>
+ <td>
+ <div>Select the number of Remote KVM CD/DVD devices that support for virtual Media redirection.</div>
+ <div>The max support number of html5 KVM is 2 and java KVM is 4.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-kvm_num_fd"></div>
+ <b>kvm_num_fd</b>
+ <a class="ansibleOptionLink" href="#parameter-kvm_num_fd" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>0</li>
+ <li>1</li>
+ <li>2</li>
+ <li>3</li>
+ <li>4</li>
+ </ul>
+ </td>
+ <td>
+ <div>Select the number of Remote KVM floppy devices that support for Virtual Media redirection.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-kvm_num_hd"></div>
+ <b>kvm_num_hd</b>
+ <a class="ansibleOptionLink" href="#parameter-kvm_num_hd" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>0</li>
+ <li>1</li>
+ <li>2</li>
+ <li>3</li>
+ <li>4</li>
+ </ul>
+ </td>
+ <td>
+ <div>Select the number of Remote KVM Hard disk devices to support for Virtual Media redirection.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-num_cd"></div>
+ <b>num_cd</b>
+ <a class="ansibleOptionLink" href="#parameter-num_cd" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>0</li>
+ <li>1</li>
+ <li>2</li>
+ <li>3</li>
+ <li>4</li>
+ </ul>
+ </td>
+ <td>
+ <div>Select the number of CD/DVD devices that support for Virtual Media redirection.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-num_fd"></div>
+ <b>num_fd</b>
+ <a class="ansibleOptionLink" href="#parameter-num_fd" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>0</li>
+ <li>1</li>
+ <li>2</li>
+ <li>3</li>
+ <li>4</li>
+ </ul>
+ </td>
+ <td>
+ <div>Select the number of floppy devices that support for Virtual Media redirection.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-num_hd"></div>
+ <b>num_hd</b>
+ <a class="ansibleOptionLink" href="#parameter-num_hd" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>0</li>
+ <li>1</li>
+ <li>2</li>
+ <li>3</li>
+ <li>4</li>
+ </ul>
+ </td>
+ <td>
+ <div>Select the number of harddisk devices that support for Virtual Media redirection.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-power_save_mode"></div>
+ <b>power_save_mode</b>
+ <a class="ansibleOptionLink" href="#parameter-power_save_mode" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>Enable</li>
+ <li>Disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>Check this option to enable Power Save Mode in BMC.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-sd_media"></div>
+ <b>sd_media</b>
+ <a class="ansibleOptionLink" href="#parameter-sd_media" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>Enable</li>
+ <li>Disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>Check this option to enable SD Media support in BMC.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-secure_channel"></div>
+ <b>secure_channel</b>
+ <a class="ansibleOptionLink" href="#parameter-secure_channel" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>Enable</li>
+ <li>Disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>Check this option to enable encrypt media recirection packets.</div>
+ <div>Only the M5/M6 model supports this parameter.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Media instance test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set media instance"
+ ieisystem.inmanage.edit_media_instance:
+ num_fd: 1
+ num_cd: 1
+ num_hd: 1
+ kvm_num_fd: 1
+ kvm_num_cd: 1
+ kvm_num_hd: 1
+ sd_media: "Enable"
+ secure_channel: "Enable"
+ power_save_mode: "Enable"
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/edit_ncsi_module.rst b/ansible_collections/ieisystem/inmanage/docs/edit_ncsi_module.rst
new file mode 100644
index 000000000..b8fa63079
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/edit_ncsi_module.rst
@@ -0,0 +1,389 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.edit_ncsi_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.edit_ncsi -- Set ncsi information
+++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.edit_ncsi`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Set ncsi information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-channel_number"></div>
+ <b>channel_number</b>
+ <a class="ansibleOptionLink" href="#parameter-channel_number" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>0</li>
+ <li>1</li>
+ <li>2</li>
+ <li>3</li>
+ </ul>
+ </td>
+ <td>
+ <div>Channel number.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-interface_name"></div>
+ <b>interface_name</b>
+ <a class="ansibleOptionLink" href="#parameter-interface_name" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Interface name, for example eth0.</div>
+ <div>Only the M5 model supports this parameter.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-mode"></div>
+ <b>mode</b>
+ <a class="ansibleOptionLink" href="#parameter-mode" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>auto</li>
+ <li>manual</li>
+ <li>Disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>NCSI mode, auto-Auto Failover, manual-Manual Switch.</div>
+ <div>Only M6 model supports <code>Disable</code> Settings.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-nic_type"></div>
+ <b>nic_type</b>
+ <a class="ansibleOptionLink" href="#parameter-nic_type" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>PHY</li>
+ <li>OCP</li>
+ <li>OCP1</li>
+ <li>PCIE</li>
+ <li>auto</li>
+ <li>Disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>Nic type.</div>
+ <div>Only NF3280A6 and NF3180A6 model supports <code>Disable</code> Settings, but not support <code>PHY</code> Settings.</div>
+ <div>M6 model only support <code>OCP</code>, <code>OCP1</code>, <code>PCIE</code> settings.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: NCSI test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set ncsi information"
+ ieisystem.inmanage.edit_ncsi:
+ mode: "manual"
+ nic_type: "PCIE"
+ interface_name: "eth0"
+ channel_number: 1
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/edit_network_bond_module.rst b/ansible_collections/ieisystem/inmanage/docs/edit_network_bond_module.rst
new file mode 100644
index 000000000..0da1d87f5
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/edit_network_bond_module.rst
@@ -0,0 +1,363 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.edit_network_bond_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.edit_network_bond -- Set network bond
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.edit_network_bond`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Set network bond on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-auto_config"></div>
+ <b>auto_config</b>
+ <a class="ansibleOptionLink" href="#parameter-auto_config" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>enable</li>
+ <li>disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>Enable this option to configure the interfaces in service configuration automatically.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-bond"></div>
+ <b>bond</b>
+ <a class="ansibleOptionLink" href="#parameter-bond" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>enable</li>
+ <li>disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>Network bond status, If VLAN is enabled for slave interfaces, then Bonding cannot be enabled.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-interface"></div>
+ <b>interface</b>
+ <a class="ansibleOptionLink" href="#parameter-interface" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>shared</li>
+ <li>dedicated</li>
+ <li>both</li>
+ </ul>
+ </td>
+ <td>
+ <div>Interface name.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Bond test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set network bond"
+ ieisystem.inmanage.edit_network_bond:
+ bond: "enable"
+ interface: "dedicated"
+ auto_config: "enable"
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/edit_network_link_module.rst b/ansible_collections/ieisystem/inmanage/docs/edit_network_link_module.rst
new file mode 100644
index 000000000..f07d16bc2
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/edit_network_link_module.rst
@@ -0,0 +1,392 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.edit_network_link_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.edit_network_link -- Set network link
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.edit_network_link`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Set network link on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-auto_nego"></div>
+ <b>auto_nego</b>
+ <a class="ansibleOptionLink" href="#parameter-auto_nego" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>enable</li>
+ <li>disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>This option allows the device to perform auto-configuration.</div>
+ <div>To achieve the best mode of operation (speed and duplex) on the link.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-duplex_mode"></div>
+ <b>duplex_mode</b>
+ <a class="ansibleOptionLink" href="#parameter-duplex_mode" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>HALF</li>
+ <li>FULL</li>
+ </ul>
+ </td>
+ <td>
+ <div>Select any one of the following Duplex Mode.</div>
+ <div>Required when <em>auto_nego=disable</em>.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-interface"></div>
+ <b>interface</b>
+ <a class="ansibleOptionLink" href="#parameter-interface" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ / <span style="color: red">required</span> </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>shared</li>
+ <li>dedicated</li>
+ <li>both</li>
+ </ul>
+ </td>
+ <td>
+ <div>Interface name.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-link_speed"></div>
+ <b>link_speed</b>
+ <a class="ansibleOptionLink" href="#parameter-link_speed" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>10</li>
+ <li>100</li>
+ </ul>
+ </td>
+ <td>
+ <div>Link speed will list all the supported capabilities of the network interface. It can be 10/100 Mbps.</div>
+ <div>Required when <em>auto_nego=disable</em>.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Link test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set network link"
+ ieisystem.inmanage.edit_network_link:
+ interface: "dedicated"
+ auto_nego: "enable"
+ provider: "{{ inmanage }}"
+
+ - name: "Set network link"
+ ieisystem.inmanage.edit_network_link:
+ interface: "dedicated"
+ auto_nego: "disable"
+ link_speed: 100
+ duplex_mode: "FULL"
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/edit_network_module.rst b/ansible_collections/ieisystem/inmanage/docs/edit_network_module.rst
new file mode 100644
index 000000000..dfc07c571
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/edit_network_module.rst
@@ -0,0 +1,343 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.edit_network_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.edit_network -- Set network information
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.edit_network`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Set netowrk information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-interface_name"></div>
+ <b>interface_name</b>
+ <a class="ansibleOptionLink" href="#parameter-interface_name" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ / <span style="color: red">required</span> </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>eth0</li>
+ <li>eth1</li>
+ <li>bond0</li>
+ </ul>
+ </td>
+ <td>
+ <div>Set interface name. Select the LAN interface to be configured.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-lan_enable"></div>
+ <b>lan_enable</b>
+ <a class="ansibleOptionLink" href="#parameter-lan_enable" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ / <span style="color: red">required</span> </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>enable</li>
+ <li>disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>Enable or disable this interface. If disable, you cannot use this interface any more.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Network test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set network information"
+ ieisystem.inmanage.edit_network:
+ interface_name: "eth0"
+ lan_enable: "enable"
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/edit_ntp_module.rst b/ansible_collections/ieisystem/inmanage/docs/edit_ntp_module.rst
new file mode 100644
index 000000000..3dda355a3
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/edit_ntp_module.rst
@@ -0,0 +1,486 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.edit_ntp_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.edit_ntp -- Set NTP
+++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.edit_ntp`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Set NTP on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-auto_date"></div>
+ <b>auto_date</b>
+ <a class="ansibleOptionLink" href="#parameter-auto_date" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>enable</li>
+ <li>disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>Date auto synchronize.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-max_variety"></div>
+ <b>max_variety</b>
+ <a class="ansibleOptionLink" href="#parameter-max_variety" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>NTP Maximum jump time(minute), max variety(1-60).</div>
+ <div>Only the M6 model supports this parameter.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-ntp_time"></div>
+ <b>ntp_time</b>
+ <a class="ansibleOptionLink" href="#parameter-ntp_time" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>NTP time(YYYYmmddHHMMSS).</div>
+ <div>Only the M5 model supports this parameter.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-server1"></div>
+ <b>server1</b>
+ <a class="ansibleOptionLink" href="#parameter-server1" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>NTP Server1(ipv4 or ipv6 or domain name), set when auto_date is enable.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-server2"></div>
+ <b>server2</b>
+ <a class="ansibleOptionLink" href="#parameter-server2" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>NTP Server2(ipv4 or ipv6 or domain name), set when auto_date is enable.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-server3"></div>
+ <b>server3</b>
+ <a class="ansibleOptionLink" href="#parameter-server3" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>NTP Server3(ipv4 or ipv6 or domain name), set when auto_date is enable.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-server4"></div>
+ <b>server4</b>
+ <a class="ansibleOptionLink" href="#parameter-server4" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>NTP Server4(ipv4 or ipv6 or domain name), set when auto_date is enable.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-server5"></div>
+ <b>server5</b>
+ <a class="ansibleOptionLink" href="#parameter-server5" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>NTP Server5(ipv4 or ipv6 or domain name), set when auto_date is enable.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-server6"></div>
+ <b>server6</b>
+ <a class="ansibleOptionLink" href="#parameter-server6" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>NTP Server6(ipv4 or ipv6 or domain name), set when auto_date is enable.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-syn_cycle"></div>
+ <b>syn_cycle</b>
+ <a class="ansibleOptionLink" href="#parameter-syn_cycle" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>NTP syn cycle(minute), sync cycle(5-1440).</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-time_zone"></div>
+ <b>time_zone</b>
+ <a class="ansibleOptionLink" href="#parameter-time_zone" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>UTC time zone, chose from {-12, -11.5, -11, ... , 11, 11.5, 12}.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: NTP test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set ntp"
+ ieisystem.inmanage.edit_ntp:
+ auto_date: "enable"
+ server2: "time.nist.gov"
+ provider: "{{ inmanage }}"
+
+ - name: "Set ntp"
+ ieisystem.inmanage.edit_ntp:
+ auto_date: "disable"
+ ntp_time: "20200609083600"
+ provider: "{{ inmanage }}"
+
+ - name: "set ntp"
+ ieisystem.inmanage.edit_ntp:
+ time_zone: "8"
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/edit_pdisk_module.rst b/ansible_collections/ieisystem/inmanage/docs/edit_pdisk_module.rst
new file mode 100644
index 000000000..f06d13f30
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/edit_pdisk_module.rst
@@ -0,0 +1,482 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.edit_pdisk_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.edit_pdisk -- Set physical disk
+++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.edit_pdisk`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Set physical disk on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-action"></div>
+ <b>action</b>
+ <a class="ansibleOptionLink" href="#parameter-action" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>remove</li>
+ <li>global</li>
+ <li>dedicate</li>
+ </ul>
+ </td>
+ <td>
+ <div>Action while set physical drive hotspare.</div>
+ <div>Required when <em>option=HS</em>.</div>
+ <div>Only the M5 model supports this parameter.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-ctrl_id"></div>
+ <b>ctrl_id</b>
+ <a class="ansibleOptionLink" href="#parameter-ctrl_id" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Raid controller ID.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-device_id"></div>
+ <b>device_id</b>
+ <a class="ansibleOptionLink" href="#parameter-device_id" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Physical drive id.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-duration"></div>
+ <b>duration</b>
+ <a class="ansibleOptionLink" href="#parameter-duration" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Duration range is 1-255, physical drive under PMC raid controller.</div>
+ <div>Required when <em>option=LOC</em>.</div>
+ <div>Only the M6 model supports this parameter.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-encl"></div>
+ <b>encl</b>
+ <a class="ansibleOptionLink" href="#parameter-encl" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>yes</li>
+ <li>no</li>
+ </ul>
+ </td>
+ <td>
+ <div>IsEnclAffinity while set physical drive hotspare.</div>
+ <div>Required when <em>option=HS</em> and <em>action=dedicate</em>.</div>
+ <div>Only the M5 model supports this parameter.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-logical_drivers"></div>
+ <b>logical_drivers</b>
+ <a class="ansibleOptionLink" href="#parameter-logical_drivers" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">list</span>
+ / <span style="color: purple">elements=integer</span> </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Logical Drivers while set physical drive hotspare, input multiple Logical Drivers index like 0, 1, 2.....</div>
+ <div>Required when <em>option=HS</em> and <em>action=dedicate</em>.</div>
+ <div>Only the M5 model supports this parameter.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-option"></div>
+ <b>option</b>
+ <a class="ansibleOptionLink" href="#parameter-option" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>UG</li>
+ <li>UB</li>
+ <li>OFF</li>
+ <li>FAIL</li>
+ <li>RBD</li>
+ <li>ON</li>
+ <li>JB</li>
+ <li>ES</li>
+ <li>EM</li>
+ <li>EN</li>
+ <li>ET</li>
+ <li>LOC</li>
+ <li>STL</li>
+ <li>HS</li>
+ </ul>
+ </td>
+ <td>
+ <div>Set operation options for a physical disk.</div>
+ <div>UG is Unconfigured Good, UB is Unconfigured Bad.</div>
+ <div>OFF is offline, FAIL is Failed, RBD is Rebuild.</div>
+ <div>ON is Online, JB is JBOD, ES is Drive Erase stop.</div>
+ <div>EM is Drive Erase Simple, EN is Drive Erase Normal.</div>
+ <div>ET is Drive Erase Through, LOC is Locate, STL is Stop Locate.</div>
+ <div>HS is Hot spare.</div>
+ <div>Only the M5 model supports <code>HS</code> Settings.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-revertible"></div>
+ <b>revertible</b>
+ <a class="ansibleOptionLink" href="#parameter-revertible" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>yes</li>
+ <li>no</li>
+ </ul>
+ </td>
+ <td>
+ <div>IsRevertible while set physical drive hotspare.</div>
+ <div>Required when <em>option=HS</em> and <em>action=dedicate</em>.</div>
+ <div>Only the M5 model supports this parameter.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Edit pdisk test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Edit pdisk"
+ ieisystem.inmanage.edit_pdisk:
+ ctrl_id: 0
+ device_id: 1
+ option: "LOC"
+ provider: "{{ inmanage }}"
+
+ - name: "M5 Edit pdisk"
+ ieisystem.inmanage.edit_pdisk:
+ ctrl_id: 0
+ device_id: 1
+ option: "HS"
+ action: "dedicate"
+ revertible: "yes"
+ encl: "yes"
+ logical_drivers: 1
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/edit_power_budget_module.rst b/ansible_collections/ieisystem/inmanage/docs/edit_power_budget_module.rst
new file mode 100644
index 000000000..c47d6ca36
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/edit_power_budget_module.rst
@@ -0,0 +1,685 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.edit_power_budget_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.edit_power_budget -- Set power budget information
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.edit_power_budget`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Set power budget information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-action"></div>
+ <b>action</b>
+ <a class="ansibleOptionLink" href="#parameter-action" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>add</li>
+ <li>delete</li>
+ <li>open</li>
+ <li>close</li>
+ </ul>
+ </td>
+ <td>
+ <div>Type to action.</div>
+ <div>Required when <em>range=False</em>.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-domain"></div>
+ <b>domain</b>
+ <a class="ansibleOptionLink" href="#parameter-domain" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>system</li>
+ <li>cpu</li>
+ </ul>
+ </td>
+ <td>
+ <div>Domain id.</div>
+ <div>Required when <em>range=False</em>.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-end1"></div>
+ <b>end1</b>
+ <a class="ansibleOptionLink" href="#parameter-end1" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Pause period of add, end time, must be greater than start time, from 0 to 24.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-end2"></div>
+ <b>end2</b>
+ <a class="ansibleOptionLink" href="#parameter-end2" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Pause period of add, end time, must be greater than start time, from 0 to 24.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-end3"></div>
+ <b>end3</b>
+ <a class="ansibleOptionLink" href="#parameter-end3" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Pause period of add, end time, must be greater than start time, from 0 to 24.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-end4"></div>
+ <b>end4</b>
+ <a class="ansibleOptionLink" href="#parameter-end4" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Pause period of add, end time, must be greater than start time, from 0 to 24.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-end5"></div>
+ <b>end5</b>
+ <a class="ansibleOptionLink" href="#parameter-end5" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Pause period of add, end time, must be greater than start time, from 0 to 24.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-except_action"></div>
+ <b>except_action</b>
+ <a class="ansibleOptionLink" href="#parameter-except_action" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>0</li>
+ <li>1</li>
+ <li>2</li>
+ <li>3</li>
+ </ul>
+ </td>
+ <td>
+ <div>Except action, 0 is do nothing, 1 is send alert, 2 is shutdown system, 3 is shutdown system and send alert.</div>
+ <div>Only the M7 model supports this parameter.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-id"></div>
+ <b>id</b>
+ <a class="ansibleOptionLink" href="#parameter-id" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>1</li>
+ <li>2</li>
+ <li>3</li>
+ <li>4</li>
+ </ul>
+ </td>
+ <td>
+ <div>Policy id.</div>
+ <div>Required when <em>range=False</em>.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-range"></div>
+ <b>range</b>
+ <a class="ansibleOptionLink" href="#parameter-range" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li><div style="color: blue"><b>no</b>&nbsp;&larr;</div></li>
+ <li>yes</li>
+ </ul>
+ </td>
+ <td>
+ <div>Range of power budget watts.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-start1"></div>
+ <b>start1</b>
+ <a class="ansibleOptionLink" href="#parameter-start1" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Pause period of add, start time, from 0 to 24.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-start2"></div>
+ <b>start2</b>
+ <a class="ansibleOptionLink" href="#parameter-start2" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Pause period of add, start time, from 0 to 24.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-start3"></div>
+ <b>start3</b>
+ <a class="ansibleOptionLink" href="#parameter-start3" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Pause period of add, start time, from 0 to 24.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-start4"></div>
+ <b>start4</b>
+ <a class="ansibleOptionLink" href="#parameter-start4" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Pause period of add, start time, from 0 to 24.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-start5"></div>
+ <b>start5</b>
+ <a class="ansibleOptionLink" href="#parameter-start5" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Period of add, start time, from 0 to 24.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-watts"></div>
+ <b>watts</b>
+ <a class="ansibleOptionLink" href="#parameter-watts" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Power budget watts of add.</div>
+ <div>Required when <em>action=add</em>.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-week1"></div>
+ <b>week1</b>
+ <a class="ansibleOptionLink" href="#parameter-week1" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">list</span>
+ / <span style="color: purple">elements=string</span> </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Pause period of add, repetition period.</div>
+ <div>The input parameters are &#x27;Mon&#x27;, &#x27;Tue&#x27;, &#x27;Wed&#x27;, &#x27;Thur&#x27;, &#x27;Fri&#x27;, &#x27;Sat&#x27;, &#x27;Sun&#x27;, separated by commas, such as Mon, Wed, Fri.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-week2"></div>
+ <b>week2</b>
+ <a class="ansibleOptionLink" href="#parameter-week2" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">list</span>
+ / <span style="color: purple">elements=string</span> </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Pause period of add, repetition period.</div>
+ <div>The input parameters are &#x27;Mon&#x27;, &#x27;Tue&#x27;, &#x27;Wed&#x27;, &#x27;Thur&#x27;, &#x27;Fri&#x27;, &#x27;Sat&#x27;, &#x27;Sun&#x27;, separated by commas, such as Mon, Wed, Fri.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-week3"></div>
+ <b>week3</b>
+ <a class="ansibleOptionLink" href="#parameter-week3" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">list</span>
+ / <span style="color: purple">elements=string</span> </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Pause period of add, repetition period.</div>
+ <div>The input parameters are &#x27;Mon&#x27;, &#x27;Tue&#x27;, &#x27;Wed&#x27;, &#x27;Thur&#x27;, &#x27;Fri&#x27;, &#x27;Sat&#x27;, &#x27;Sun&#x27;, separated by commas, such as Mon, Wed, Fri.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-week4"></div>
+ <b>week4</b>
+ <a class="ansibleOptionLink" href="#parameter-week4" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">list</span>
+ / <span style="color: purple">elements=string</span> </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Pause period of add, repetition period.</div>
+ <div>The input parameters are &#x27;Mon&#x27;, &#x27;Tue&#x27;, &#x27;Wed&#x27;, &#x27;Thur&#x27;, &#x27;Fri&#x27;, &#x27;Sat&#x27;, &#x27;Sun&#x27;, separated by commas, such as Mon, Wed, Fri.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-week5"></div>
+ <b>week5</b>
+ <a class="ansibleOptionLink" href="#parameter-week5" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">list</span>
+ / <span style="color: purple">elements=string</span> </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Pause period of add, repetition period.</div>
+ <div>The input parameters are &#x27;Mon&#x27;, &#x27;Tue&#x27;, &#x27;Wed&#x27;, &#x27;Thur&#x27;, &#x27;Fri&#x27;, &#x27;Sat&#x27;, &#x27;Sun&#x27;, separated by commas, such as Mon, Wed, Fri.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Power budget test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get power budget range"
+ ieisystem.inmanage.edit_power_budget:
+ range: True
+ provider: "{{ inmanage }}"
+
+ - name: "add power budget"
+ ieisystem.inmanage.edit_power_budget:
+ action: "add"
+ id: 1
+ watts: 1500
+ start1: 2
+ end1: 5
+ week1:
+ - Mon
+ - Wed
+ - Fri
+ provider: "{{ inmanage }}"
+
+ - name: "Set power budget status to open"
+ ieisystem.inmanage.edit_power_budget:
+ action: "open"
+ id: 1
+ provider: "{{ inmanage }}"
+
+ - name: "Set power budget status to close"
+ ieisystem.inmanage.edit_power_budget:
+ action: "close"
+ id: 1
+ provider: "{{ inmanage }}"
+
+ - name: "Delete power budget"
+ ieisystem.inmanage.edit_power_budget:
+ action: "delete"
+ id: 1
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/edit_power_restore_module.rst b/ansible_collections/ieisystem/inmanage/docs/edit_power_restore_module.rst
new file mode 100644
index 000000000..9040f60de
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/edit_power_restore_module.rst
@@ -0,0 +1,323 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.edit_power_restore_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.edit_power_restore -- Set power restore information
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.edit_power_restore`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Set power restore information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-option"></div>
+ <b>option</b>
+ <a class="ansibleOptionLink" href="#parameter-option" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ / <span style="color: red">required</span> </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>on</li>
+ <li>off</li>
+ <li>restore</li>
+ </ul>
+ </td>
+ <td>
+ <div>Set power policy option.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Power restore test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set power restore information"
+ ieisystem.inmanage.edit_power_restore:
+ option: "on"
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/edit_power_status_module.rst b/ansible_collections/ieisystem/inmanage/docs/edit_power_status_module.rst
new file mode 100644
index 000000000..884533719
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/edit_power_status_module.rst
@@ -0,0 +1,325 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.edit_power_status_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.edit_power_status -- Set power status information
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.edit_power_status`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Set power status information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#parameter-state" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ / <span style="color: red">required</span> </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>On</li>
+ <li>ForceOff</li>
+ <li>ForcePowerCycle</li>
+ <li>ForceReset</li>
+ <li>GracefulShutdown</li>
+ </ul>
+ </td>
+ <td>
+ <div>Power status.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Power status test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set power status information"
+ ieisystem.inmanage.edit_power_status:
+ state: "On"
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/edit_preserve_config_module.rst b/ansible_collections/ieisystem/inmanage/docs/edit_preserve_config_module.rst
new file mode 100644
index 000000000..82b31c32b
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/edit_preserve_config_module.rst
@@ -0,0 +1,373 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.edit_preserve_config_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.edit_preserve_config -- Set preserve config
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.edit_preserve_config`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Set preserve config on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-override"></div>
+ <b>override</b>
+ <a class="ansibleOptionLink" href="#parameter-override" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">list</span>
+ / <span style="color: purple">elements=string</span> </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>authentication</li>
+ <li>dcmi</li>
+ <li>fru</li>
+ <li>hostname</li>
+ <li>ipmi</li>
+ <li>kvm</li>
+ <li>network</li>
+ <li>ntp</li>
+ <li>pef</li>
+ <li>sdr</li>
+ <li>sel</li>
+ <li>smtp</li>
+ <li>snmp</li>
+ <li>sol</li>
+ <li>ssh</li>
+ <li>syslog</li>
+ <li>user</li>
+ </ul>
+ </td>
+ <td>
+ <div>Configuration items that need to be retained.</div>
+ <div>Required when <em>setting=manual</em>.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-setting"></div>
+ <b>setting</b>
+ <a class="ansibleOptionLink" href="#parameter-setting" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ / <span style="color: red">required</span> </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>all</li>
+ <li>none</li>
+ <li>manual</li>
+ </ul>
+ </td>
+ <td>
+ <div>Preserve option, all - preserve all config; none - overwrite all config; manual - manual choose.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Preserve test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set preserve all"
+ ieisystem.inmanage.edit_preserve_config:
+ setting: "all"
+ provider: "{{ inmanage }}"
+
+ - name: "Set preserve none"
+ edit_preserve_config:
+ setting: "none"
+ provider: "{{ inmanage }}"
+
+ - name: "Set preserve manual"
+ edit_preserve_config:
+ setting: "manual"
+ override:
+ - fru
+ - ntp
+ - network
+ - user
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/edit_psu_config_module.rst b/ansible_collections/ieisystem/inmanage/docs/edit_psu_config_module.rst
new file mode 100644
index 000000000..87d5b884f
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/edit_psu_config_module.rst
@@ -0,0 +1,339 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.edit_psu_config_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.edit_psu_config -- Set psu config information
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.edit_psu_config`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Set psu config information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-id"></div>
+ <b>id</b>
+ <a class="ansibleOptionLink" href="#parameter-id" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ / <span style="color: red">required</span> </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Power id.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-switch"></div>
+ <b>switch</b>
+ <a class="ansibleOptionLink" href="#parameter-switch" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ / <span style="color: red">required</span> </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>active</li>
+ <li>standby</li>
+ <li>normal</li>
+ </ul>
+ </td>
+ <td>
+ <div>Power supply mode, active or standby.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Psu config test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set psu config information"
+ ieisystem.inmanage.edit_psu_config:
+ id: 1
+ switch: "active"
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/edit_psu_peak_module.rst b/ansible_collections/ieisystem/inmanage/docs/edit_psu_peak_module.rst
new file mode 100644
index 000000000..a4f8d9569
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/edit_psu_peak_module.rst
@@ -0,0 +1,343 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.edit_psu_peak_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.edit_psu_peak -- Set psu peak information
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.edit_psu_peak`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Set psu peak information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-status"></div>
+ <b>status</b>
+ <a class="ansibleOptionLink" href="#parameter-status" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ / <span style="color: red">required</span> </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>enable</li>
+ <li>disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>Power peak status.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-time"></div>
+ <b>time</b>
+ <a class="ansibleOptionLink" href="#parameter-time" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Maximum random time, range of values(1-600), unit(second).</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Psu peak test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set psu peak information"
+ ieisystem.inmanage.edit_psu_peak:
+ status: "disable"
+ provider: "{{ inmanage }}"
+
+ - name: "Set psu peak information"
+ ieisystem.inmanage.edit_psu_peak:
+ status: "enable"
+ time: 10
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/edit_restore_factory_default_module.rst b/ansible_collections/ieisystem/inmanage/docs/edit_restore_factory_default_module.rst
new file mode 100644
index 000000000..f258888cf
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/edit_restore_factory_default_module.rst
@@ -0,0 +1,368 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.edit_restore_factory_default_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.edit_restore_factory_default -- Set preserver config
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.edit_restore_factory_default`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Set preserver config on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-mode"></div>
+ <b>mode</b>
+ <a class="ansibleOptionLink" href="#parameter-mode" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ / <span style="color: red">required</span> </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>all</li>
+ <li>none</li>
+ <li>manual</li>
+ </ul>
+ </td>
+ <td>
+ <div>Restore factory defaults mode.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-override"></div>
+ <b>override</b>
+ <a class="ansibleOptionLink" href="#parameter-override" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">list</span>
+ / <span style="color: purple">elements=string</span> </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>authentication</li>
+ <li>dcmi</li>
+ <li>fru</li>
+ <li>hostname</li>
+ <li>ipmi</li>
+ <li>kvm</li>
+ <li>network</li>
+ <li>ntp</li>
+ <li>pef</li>
+ <li>sdr</li>
+ <li>sel</li>
+ <li>smtp</li>
+ <li>snmp</li>
+ <li>sol</li>
+ <li>ssh</li>
+ <li>syslog</li>
+ <li>user</li>
+ </ul>
+ </td>
+ <td>
+ <div>Configuration items that need to be retained.</div>
+ <div>Required when <em>mode=manual</em>.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Restore default test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set restore default auto"
+ ieisystem.inmanage.edit_restore_factory_default:
+ mode: "all"
+ provider: "{{ inmanage }}"
+
+ - name: "Set restore default manual"
+ ieisystem.inmanage.edit_restore_factory_default:
+ mode: "manual"
+ override:
+ - fru
+ - ntp
+ - network
+ - user
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/edit_service_module.rst b/ansible_collections/ieisystem/inmanage/docs/edit_service_module.rst
new file mode 100644
index 000000000..3e9b9b68a
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/edit_service_module.rst
@@ -0,0 +1,428 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.edit_service_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.edit_service -- Set service settings
++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.edit_service`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Set service settings on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-interface"></div>
+ <b>interface</b>
+ <a class="ansibleOptionLink" href="#parameter-interface" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>eth0</li>
+ <li>eth1</li>
+ <li>both</li>
+ <li>bond0</li>
+ </ul>
+ </td>
+ <td>
+ <div>It shows the interface in which service is running.</div>
+ <div>The user can choose any one of the available interfaces.</div>
+ <div>Only the M5 model supports this parameter.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-non_secure_port"></div>
+ <b>non_secure_port</b>
+ <a class="ansibleOptionLink" href="#parameter-non_secure_port" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Used to configure non secure port number for the service.</div>
+ <div>Port value ranges from 1 to 65535.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-secure_port"></div>
+ <b>secure_port</b>
+ <a class="ansibleOptionLink" href="#parameter-secure_port" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Used to configure secure port number for the service.</div>
+ <div>Port value ranges from 1 to 65535.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-service_name"></div>
+ <b>service_name</b>
+ <a class="ansibleOptionLink" href="#parameter-service_name" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ / <span style="color: red">required</span> </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>web</li>
+ <li>kvm</li>
+ <li>cd-media</li>
+ <li>fd-media</li>
+ <li>hd-media</li>
+ <li>ssh</li>
+ <li>telnet</li>
+ <li>solssh</li>
+ <li>snmp</li>
+ <li>vnc</li>
+ </ul>
+ </td>
+ <td>
+ <div>Displays service name of the selected slot(readonly).</div>
+ <div>The <em>vnc</em> option is not supported in M5.</div>
+ <div>The <em>fd-media/telnet/snmp</em> option is not supported in M6.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#parameter-state" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>active</li>
+ <li>inactive</li>
+ </ul>
+ </td>
+ <td>
+ <div>Displays the current status of the service, either active or inactive state.</div>
+ <div>Check this option to start the inactive service.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-timeout"></div>
+ <b>timeout</b>
+ <a class="ansibleOptionLink" href="#parameter-timeout" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Displays the session timeout value of the service.</div>
+ <div>For web, SSH and telnet service, user can configure the session timeout value.</div>
+ <div>Web timeout value ranges from 300 to 1800 seconds.</div>
+ <div>SSH and Telnet timeout value ranges from 60 to 1800 seconds.</div>
+ <div>Timeout value should be in multiples of 60 seconds.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Edit service test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Edit kvm"
+ ieisystem.inmanage.edit_service:
+ service_name: "kvm"
+ state: "active"
+ timeout: "1200"
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/edit_smtp_com_module.rst b/ansible_collections/ieisystem/inmanage/docs/edit_smtp_com_module.rst
new file mode 100644
index 000000000..b08cb1d22
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/edit_smtp_com_module.rst
@@ -0,0 +1,578 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.edit_smtp_com_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.edit_smtp_com -- Set SMTP information
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.edit_smtp_com`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Set SMTP com information on ieisystem Server.
+- Only the M6 models support this feature.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-asset_tag"></div>
+ <b>asset_tag</b>
+ <a class="ansibleOptionLink" href="#parameter-asset_tag" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>enable</li>
+ <li>disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>Product asset label.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-email"></div>
+ <b>email</b>
+ <a class="ansibleOptionLink" href="#parameter-email" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Sender email.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-event_level"></div>
+ <b>event_level</b>
+ <a class="ansibleOptionLink" href="#parameter-event_level" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>Info</li>
+ <li>Warning</li>
+ <li>Critical</li>
+ </ul>
+ </td>
+ <td>
+ <div>Events above this level will be sent.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host_name"></div>
+ <b>host_name</b>
+ <a class="ansibleOptionLink" href="#parameter-host_name" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>enable</li>
+ <li>disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>Server name.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-serial_number"></div>
+ <b>serial_number</b>
+ <a class="ansibleOptionLink" href="#parameter-serial_number" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>enable</li>
+ <li>disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>Serial number.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-server_auth"></div>
+ <b>server_auth</b>
+ <a class="ansibleOptionLink" href="#parameter-server_auth" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>enable</li>
+ <li>disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>SMTP server authentication.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-server_ip"></div>
+ <b>server_ip</b>
+ <a class="ansibleOptionLink" href="#parameter-server_ip" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>SMTP server IP.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-server_password"></div>
+ <b>server_password</b>
+ <a class="ansibleOptionLink" href="#parameter-server_password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>SMTP server Password, length be 4 to 64 bits, cannot contain &#x27; &#x27;(space).</div>
+ <div>Required when <em>server_auth=enable</em>.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-server_port"></div>
+ <b>server_port</b>
+ <a class="ansibleOptionLink" href="#parameter-server_port" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>SMTP server port, The Identification for retry count configuration(1-65535).</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-server_secure_port"></div>
+ <b>server_secure_port</b>
+ <a class="ansibleOptionLink" href="#parameter-server_secure_port" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>SMTP server secure port, The Identification for retry count configuration(1-65535).</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-server_username"></div>
+ <b>server_username</b>
+ <a class="ansibleOptionLink" href="#parameter-server_username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>SMTP server Username, length be 4 to 64 bits.</div>
+ <div>Must start with letters and cannot contain &#x27;,&#x27;(comma) &#x27;:&#x27;(colon) &#x27; &#x27;(space) &#x27;;&#x27;(semicolon) &#x27;\&#x27;(backslash).</div>
+ <div>Required when <em>server_auth=enable</em>.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-ssl_tls_enable"></div>
+ <b>ssl_tls_enable</b>
+ <a class="ansibleOptionLink" href="#parameter-ssl_tls_enable" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>enable</li>
+ <li>disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>SMTP SSLTLS Enable.</div>
+ <div><em>ssl_tls_enable=disable</em>, when <em>star_tls_enable=enable</em>.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-star_tls_enable"></div>
+ <b>star_tls_enable</b>
+ <a class="ansibleOptionLink" href="#parameter-star_tls_enable" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>enable</li>
+ <li>disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>SMTP STARTTLS Enable.</div>
+ <div><em>star_tls_enable=disable</em>, when <em>ssl_tls_enable=enable</em>.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-status"></div>
+ <b>status</b>
+ <a class="ansibleOptionLink" href="#parameter-status" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ / <span style="color: red">required</span> </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>enable</li>
+ <li>disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>SMTP Support.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-subject"></div>
+ <b>subject</b>
+ <a class="ansibleOptionLink" href="#parameter-subject" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Email theme.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Smtp com test
+ hosts: inmanage
+ no_log: true
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set smtp com information"
+ ieisystem.inmanage.edit_smtp_com:
+ status: "disable"
+ provider: "{{ inmanage }}"
+
+ - name: "Set smtp com information"
+ ieisystem.inmanage.edit_smtp_com:
+ status: "enable"
+ server_ip: "100.2.2.2"
+ email: "ieit@ieisystem.com"
+ server_auth: "enable"
+ server_username: "admin"
+ server_password: "1234qwer!@#$"
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/edit_smtp_dest_module.rst b/ansible_collections/ieisystem/inmanage/docs/edit_smtp_dest_module.rst
new file mode 100644
index 000000000..5177ec8f2
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/edit_smtp_dest_module.rst
@@ -0,0 +1,383 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.edit_smtp_dest_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.edit_smtp_dest -- Set SMTP information
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.edit_smtp_dest`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Set SMTP dest information on ieisystem Server.
+- Only the M6 models support this feature.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-address"></div>
+ <b>address</b>
+ <a class="ansibleOptionLink" href="#parameter-address" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Email address.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-description"></div>
+ <b>description</b>
+ <a class="ansibleOptionLink" href="#parameter-description" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Description information.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-id"></div>
+ <b>id</b>
+ <a class="ansibleOptionLink" href="#parameter-id" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ / <span style="color: red">required</span> </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>1</li>
+ <li>2</li>
+ <li>3</li>
+ <li>4</li>
+ </ul>
+ </td>
+ <td>
+ <div>Email destination id.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-status"></div>
+ <b>status</b>
+ <a class="ansibleOptionLink" href="#parameter-status" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>enable</li>
+ <li>disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>Email enable.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Smtp dest test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set smtp dest information"
+ ieisystem.inmanage.edit_smtp_dest:
+ id: 1
+ status: "disable"
+ provider: "{{ inmanage }}"
+
+ - name: "Set smtp dest information"
+ ieisystem.inmanage.edit_smtp_dest:
+ id: 1
+ status: "enable"
+ address: "100.2.2.2"
+ description": "test"
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/edit_smtp_module.rst b/ansible_collections/ieisystem/inmanage/docs/edit_smtp_module.rst
new file mode 100644
index 000000000..7390e5a44
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/edit_smtp_module.rst
@@ -0,0 +1,587 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.edit_smtp_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.edit_smtp -- Set SMTP information
+++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.edit_smtp`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Set SMTP information on ieisystem Server.
+- Only the M5 models support this feature.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-email"></div>
+ <b>email</b>
+ <a class="ansibleOptionLink" href="#parameter-email" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Sender email.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-interface"></div>
+ <b>interface</b>
+ <a class="ansibleOptionLink" href="#parameter-interface" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ / <span style="color: red">required</span> </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>eth0</li>
+ <li>eth1</li>
+ <li>bond0</li>
+ </ul>
+ </td>
+ <td>
+ <div>LAN Channel, eth0 is shared, eth1 is dedicated.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-primary_auth"></div>
+ <b>primary_auth</b>
+ <a class="ansibleOptionLink" href="#parameter-primary_auth" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>enable</li>
+ <li>disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>Primary SMTP server authentication.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-primary_ip"></div>
+ <b>primary_ip</b>
+ <a class="ansibleOptionLink" href="#parameter-primary_ip" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Primary SMTP server IP.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-primary_name"></div>
+ <b>primary_name</b>
+ <a class="ansibleOptionLink" href="#parameter-primary_name" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Primary SMTP server name.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-primary_password"></div>
+ <b>primary_password</b>
+ <a class="ansibleOptionLink" href="#parameter-primary_password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Primary SMTP server Password, length be 4 to 64 bits, cannot contain &#x27; &#x27;(space).</div>
+ <div>Required when <em>primary_auth=enable</em>.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-primary_port"></div>
+ <b>primary_port</b>
+ <a class="ansibleOptionLink" href="#parameter-primary_port" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Primary SMTP server port, The Identification for retry count configuration(1-65535).</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-primary_status"></div>
+ <b>primary_status</b>
+ <a class="ansibleOptionLink" href="#parameter-primary_status" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>enable</li>
+ <li>disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>Primary SMTP Support.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-primary_username"></div>
+ <b>primary_username</b>
+ <a class="ansibleOptionLink" href="#parameter-primary_username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Primary SMTP server Username, length be 4 to 64 bits.</div>
+ <div>Must start with letters and cannot contain &#x27;, &#x27;(comma) &#x27;:&#x27;(colon) &#x27; &#x27;(space) &#x27;;&#x27;(semicolon) &#x27;\&#x27;(backslash).</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-secondary_auth"></div>
+ <b>secondary_auth</b>
+ <a class="ansibleOptionLink" href="#parameter-secondary_auth" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>enable</li>
+ <li>disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>Secondary SMTP server authentication.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-secondary_ip"></div>
+ <b>secondary_ip</b>
+ <a class="ansibleOptionLink" href="#parameter-secondary_ip" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Secondary SMTP server IP.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-secondary_name"></div>
+ <b>secondary_name</b>
+ <a class="ansibleOptionLink" href="#parameter-secondary_name" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Secondary SMTP server name.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-secondary_password"></div>
+ <b>secondary_password</b>
+ <a class="ansibleOptionLink" href="#parameter-secondary_password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Secondary SMTP server Password, length be 4 to 64 bits, cannot contain &#x27; &#x27;(space).</div>
+ <div>Required when <em>secondary_auth=enable</em>.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-secondary_port"></div>
+ <b>secondary_port</b>
+ <a class="ansibleOptionLink" href="#parameter-secondary_port" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Secondary SMTP server port, The Identification for retry count configuration(1-65535).</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-secondary_status"></div>
+ <b>secondary_status</b>
+ <a class="ansibleOptionLink" href="#parameter-secondary_status" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>enable</li>
+ <li>disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>Secondary SMTP Support.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-secondary_username"></div>
+ <b>secondary_username</b>
+ <a class="ansibleOptionLink" href="#parameter-secondary_username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Secondary SMTP server Username, length be 4 to 64 bits.</div>
+ <div>Must start with letters and cannot contain &#x27;,&#x27;(comma) &#x27;:&#x27;(colon) &#x27; &#x27;(space) &#x27;;&#x27;(semicolon) &#x27;\&#x27;(backslash).</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Smtp test
+ hosts: inmanage
+ no_log: true
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set smtp information"
+ ieisystem.inmanage.edit_smtp:
+ interface: "eth0"
+ email: "ieit@ieisystem.com"
+ primary_status: "enable"
+ primary_ip: "100.2.2.2"
+ primary_name: "test"
+ primary_auth: "disable"
+ provider: "{{ inmanage }}"
+
+ - name: "Set smtp information"
+ ieisystem.inmanage.edit_smtp:
+ interface: "eth0"
+ email: "ieit@ieisystem.com"
+ primary_status: "enable"
+ primary_ip: "100.2.2.2"
+ primary_name: "test"
+ primary_auth: "enable"
+ primary_username: "test"
+ primary_password: my_password
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/edit_snmp_module.rst b/ansible_collections/ieisystem/inmanage/docs/edit_snmp_module.rst
new file mode 100644
index 000000000..30515f12b
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/edit_snmp_module.rst
@@ -0,0 +1,540 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.edit_snmp_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.edit_snmp -- Set snmp
+++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.edit_snmp`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Set snmp on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-auth_password"></div>
+ <b>auth_password</b>
+ <a class="ansibleOptionLink" href="#parameter-auth_password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Set the authentication password for the V3 trap or v3get/v3set.</div>
+ <div>The password is a string of 8 to 16 alphanumeric characters.</div>
+ <div>Required when <em>auth_protocol</em> is either <code>SHA</code> or <code>MD5</code>.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-auth_protocol"></div>
+ <b>auth_protocol</b>
+ <a class="ansibleOptionLink" href="#parameter-auth_protocol" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>NONE</li>
+ <li>SHA</li>
+ <li>MD5</li>
+ </ul>
+ </td>
+ <td>
+ <div>Choose the authentication protocol for the V3 trap or v3get/v3set.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-community"></div>
+ <b>community</b>
+ <a class="ansibleOptionLink" href="#parameter-community" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Community of v1/v2c or v1get/v1set/v2cget/v2cset.</div>
+ <div>Only the M5 models support this feature.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-priv_password"></div>
+ <b>priv_password</b>
+ <a class="ansibleOptionLink" href="#parameter-priv_password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Set the privacy password for the V3 trap or v3get/v3set.</div>
+ <div>The password is a string of 8 to 16 alphanumeric characters.</div>
+ <div>Required when <em>priv_protocol</em> is either <code>DES</code> or <code>AES</code>.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-priv_protocol"></div>
+ <b>priv_protocol</b>
+ <a class="ansibleOptionLink" href="#parameter-priv_protocol" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>NONE</li>
+ <li>DES</li>
+ <li>AES</li>
+ </ul>
+ </td>
+ <td>
+ <div>Choose the privacy protocol for the V3 trap or v3get/v3set.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-read_community"></div>
+ <b>read_community</b>
+ <a class="ansibleOptionLink" href="#parameter-read_community" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Read Only Community, Community should between 1 and 16 characters.</div>
+ <div>Only the M6 models support this feature.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-read_write_community"></div>
+ <b>read_write_community</b>
+ <a class="ansibleOptionLink" href="#parameter-read_write_community" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Read And Write Community, Community should between 1 and 16 characters.</div>
+ <div>Only the M6 models support this feature.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-snmp_status"></div>
+ <b>snmp_status</b>
+ <a class="ansibleOptionLink" href="#parameter-snmp_status" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">list</span>
+ / <span style="color: purple">elements=string</span> </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>NMP read/write status of customize.</div>
+ <div>The input parameters are &#x27;v1get&#x27;, &#x27;v1set&#x27;, &#x27;v2cget&#x27;, &#x27;v2cset&#x27;, &#x27;v3get&#x27;, &#x27;v3set&#x27;, separated by commas, such as v1get, v1set, v2cget.</div>
+ <div>Only the M5 models support this feature.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-v1status"></div>
+ <b>v1status</b>
+ <a class="ansibleOptionLink" href="#parameter-v1status" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>enable</li>
+ <li>disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>SNMP V1 enable.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-v2status"></div>
+ <b>v2status</b>
+ <a class="ansibleOptionLink" href="#parameter-v2status" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>enable</li>
+ <li>disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>SNMP V2 enable.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-v3status"></div>
+ <b>v3status</b>
+ <a class="ansibleOptionLink" href="#parameter-v3status" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>enable</li>
+ <li>disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>SNMP V3 enable.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-v3username"></div>
+ <b>v3username</b>
+ <a class="ansibleOptionLink" href="#parameter-v3username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Set a username for the V3 trap or v3get/v3set.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-version"></div>
+ <b>version</b>
+ <a class="ansibleOptionLink" href="#parameter-version" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>0</li>
+ <li>1</li>
+ <li>2</li>
+ <li>3</li>
+ <li>4</li>
+ </ul>
+ </td>
+ <td>
+ <div>SNMP trap version option, 0 - &#x27;v1&#x27;, 1 - &#x27;v2c&#x27;, 2 - &#x27;v3&#x27;, 3 - &#x27;all&#x27;, 4 - &#x27;customize&#x27;.</div>
+ <div>Only the M5 models support this feature.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Snmp test
+ hosts: inmanage
+ no_log: true
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set snmp get/set"
+ ieisystem.inmanage.edit_snmp:
+ community: "test"
+ v3username: "test"
+ provider: "{{ inmanage }}"
+
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/edit_snmp_trap_module.rst b/ansible_collections/ieisystem/inmanage/docs/edit_snmp_trap_module.rst
new file mode 100644
index 000000000..544f48a58
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/edit_snmp_trap_module.rst
@@ -0,0 +1,598 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.edit_snmp_trap_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.edit_snmp_trap -- Set snmp trap
+++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.edit_snmp_trap`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Set snmp trap on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-auth_password"></div>
+ <b>auth_password</b>
+ <a class="ansibleOptionLink" href="#parameter-auth_password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Set the authentication password for the V3 trap.</div>
+ <div>The password is a string of 8 to 16 alphanumeric characters.</div>
+ <div>Required when <em>auth_protocol</em> is either <code>SHA</code> or <code>MD5</code>.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-auth_protocol"></div>
+ <b>auth_protocol</b>
+ <a class="ansibleOptionLink" href="#parameter-auth_protocol" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>NONE</li>
+ <li>SHA</li>
+ <li>MD5</li>
+ </ul>
+ </td>
+ <td>
+ <div>Choose the authentication protocol for the V3 trap.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-community"></div>
+ <b>community</b>
+ <a class="ansibleOptionLink" href="#parameter-community" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Community of v1/v2c.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-contact"></div>
+ <b>contact</b>
+ <a class="ansibleOptionLink" href="#parameter-contact" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Set the contact, can be NULL.</div>
+ <div>Only the M5 model supports this parameter.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-engine_id"></div>
+ <b>engine_id</b>
+ <a class="ansibleOptionLink" href="#parameter-engine_id" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies an engine identifier for the V3 trap. The value should be string of 10 to 48 hex characters, must be even, can be NULL.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-event_severity"></div>
+ <b>event_severity</b>
+ <a class="ansibleOptionLink" href="#parameter-event_severity" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>all</li>
+ <li>warning</li>
+ <li>critical</li>
+ </ul>
+ </td>
+ <td>
+ <div>Event Severity.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host_id"></div>
+ <b>host_id</b>
+ <a class="ansibleOptionLink" href="#parameter-host_id" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>HostName</li>
+ <li>SerialNum</li>
+ <li>AssertTag</li>
+ </ul>
+ </td>
+ <td>
+ <div>Host id.</div>
+ <div>Only the M6 model supports this parameter.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-location"></div>
+ <b>location</b>
+ <a class="ansibleOptionLink" href="#parameter-location" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Set the host location, can be NULL.</div>
+ <div>Only the M5 model supports this parameter.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-os"></div>
+ <b>os</b>
+ <a class="ansibleOptionLink" href="#parameter-os" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Set the host operating system, can be NULL.</div>
+ <div>Only the M5 model supports this parameter.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-priv_password"></div>
+ <b>priv_password</b>
+ <a class="ansibleOptionLink" href="#parameter-priv_password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Set the privacy password for the V3 trap.</div>
+ <div>The password is a string of 8 to 16 alphanumeric characters.</div>
+ <div>Required when <em>priv_protocol</em> is either <code>DES</code> or <code>AES</code>.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-priv_protocol"></div>
+ <b>priv_protocol</b>
+ <a class="ansibleOptionLink" href="#parameter-priv_protocol" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>NONE</li>
+ <li>DES</li>
+ <li>AES</li>
+ </ul>
+ </td>
+ <td>
+ <div>Choose the privacy protocol for the V3 trap.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-system_id"></div>
+ <b>system_id</b>
+ <a class="ansibleOptionLink" href="#parameter-system_id" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Set the system ID, can be NULL.</div>
+ <div>Only the M5 model supports this parameter.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-system_name"></div>
+ <b>system_name</b>
+ <a class="ansibleOptionLink" href="#parameter-system_name" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Set the system name, can be NULL.</div>
+ <div>Only the M5 model supports this parameter.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-trap_port"></div>
+ <b>trap_port</b>
+ <a class="ansibleOptionLink" href="#parameter-trap_port" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Set a port for the SNMP trap in the range of 1 to 65535.</div>
+ <div>Only the M5 model supports this parameter.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-v3username"></div>
+ <b>v3username</b>
+ <a class="ansibleOptionLink" href="#parameter-v3username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Set the username for the V3 trap.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-version"></div>
+ <b>version</b>
+ <a class="ansibleOptionLink" href="#parameter-version" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>0</li>
+ <li>1</li>
+ <li>2</li>
+ <li>3</li>
+ </ul>
+ </td>
+ <td>
+ <div>SNMP trap version, 1 is v1, 2 is v2c(v2), 3 is v3, 0 is disable snmp trap.</div>
+ <div>Only the M6 model supports <code>0</code> Settings.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Trap test
+ hosts: inmanage
+ no_log: true
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set snmp trap v2c"
+ ieisystem.inmanage.edit_snmp_trap:
+ version: 2
+ event_severity: "warning"
+ community: "test"
+ system_name: "ieisystem"
+ provider: "{{ inmanage }}"
+
+ - name: "Set snmp trap v3"
+ ieisystem.inmanage.edit_snmp_trap:
+ version: 3
+ event_severity: "all"
+ v3username: "ieisystem"
+ engine_id: "1234567890"
+ auth_protocol: "SHA"
+ auth_password: "12345678"
+ priv_protocol: "AES"
+ priv_password: "123454678"
+ trap_port: 162
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/edit_threshold_module.rst b/ansible_collections/ieisystem/inmanage/docs/edit_threshold_module.rst
new file mode 100644
index 000000000..641b9f68e
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/edit_threshold_module.rst
@@ -0,0 +1,410 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.edit_threshold_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.edit_threshold -- Set threshold information
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.edit_threshold`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Set threshold information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-lc"></div>
+ <b>lc</b>
+ <a class="ansibleOptionLink" href="#parameter-lc" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Lower critical threshold, should be integer.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-lnc"></div>
+ <b>lnc</b>
+ <a class="ansibleOptionLink" href="#parameter-lnc" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Lower non critical threshold, should be integer.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-lnr"></div>
+ <b>lnr</b>
+ <a class="ansibleOptionLink" href="#parameter-lnr" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Lower non recoverable threshold, should be integer.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-name"></div>
+ <b>name</b>
+ <a class="ansibleOptionLink" href="#parameter-name" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ / <span style="color: red">required</span> </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Sensor name.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-uc"></div>
+ <b>uc</b>
+ <a class="ansibleOptionLink" href="#parameter-uc" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Up critical threshold, should be integer.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-unc"></div>
+ <b>unc</b>
+ <a class="ansibleOptionLink" href="#parameter-unc" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Up non critical threshold, should be integer.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-unr"></div>
+ <b>unr</b>
+ <a class="ansibleOptionLink" href="#parameter-unr" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Up non recoverable threshold, should be integer.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Threshold test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set threshold information"
+ ieisystem.inmanage.edit_threshold:
+ name: "GPU1_Temp"
+ uc: 94
+ unc: 92
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/edit_uid_module.rst b/ansible_collections/ieisystem/inmanage/docs/edit_uid_module.rst
new file mode 100644
index 000000000..439f5660a
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/edit_uid_module.rst
@@ -0,0 +1,343 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.edit_uid_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.edit_uid -- Set UID
+++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.edit_uid`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Set UID on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-led"></div>
+ <b>led</b>
+ <a class="ansibleOptionLink" href="#parameter-led" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ / <span style="color: red">required</span> </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>on</li>
+ <li>off</li>
+ </ul>
+ </td>
+ <td>
+ <div>Turn on or turn off the led.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-time"></div>
+ <b>time</b>
+ <a class="ansibleOptionLink" href="#parameter-time" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Set led blink time(second).</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: UID test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set uid"
+ ieisystem.inmanage.edit_uid:
+ led: "on"
+ time: 10
+ provider: "{{ inmanage }}"
+
+ - name: "Set uid"
+ ieisystem.inmanage.edit_uid:
+ led: "off"
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/edit_virtual_media_module.rst b/ansible_collections/ieisystem/inmanage/docs/edit_virtual_media_module.rst
new file mode 100644
index 000000000..63ec1d53b
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/edit_virtual_media_module.rst
@@ -0,0 +1,511 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.edit_virtual_media_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.edit_virtual_media -- Set virtual media
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.edit_virtual_media`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Set virtual media on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-local_media_support"></div>
+ <b>local_media_support</b>
+ <a class="ansibleOptionLink" href="#parameter-local_media_support" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>Enable</li>
+ <li>Disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>To enable or disable Local Media Support, check or uncheck the checkbox respectively.</div>
+ <div>Only the M5 model supports this parameter.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-mount"></div>
+ <b>mount</b>
+ <a class="ansibleOptionLink" href="#parameter-mount" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>Enable</li>
+ <li>Disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>Whether to mount virtual media.</div>
+ <div>Only the M5 model supports this parameter.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-mount_type"></div>
+ <b>mount_type</b>
+ <a class="ansibleOptionLink" href="#parameter-mount_type" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>CD</li>
+ <li>FD</li>
+ <li>HD</li>
+ </ul>
+ </td>
+ <td>
+ <div>Virtual mount type.</div>
+ <div>The <em>FD</em> option is not supported in M6.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-remote_domain_name"></div>
+ <b>remote_domain_name</b>
+ <a class="ansibleOptionLink" href="#parameter-remote_domain_name" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Remote Domain Name, Domain Name field is optional.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-remote_media_support"></div>
+ <b>remote_media_support</b>
+ <a class="ansibleOptionLink" href="#parameter-remote_media_support" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>Enable</li>
+ <li>Disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>To enable or disable Remote Media support, check or uncheck the checkbox respectively.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-remote_password"></div>
+ <b>remote_password</b>
+ <a class="ansibleOptionLink" href="#parameter-remote_password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Remote Password.</div>
+ <div>Required when <em>remote_share_type=cifs</em>.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-remote_server_address"></div>
+ <b>remote_server_address</b>
+ <a class="ansibleOptionLink" href="#parameter-remote_server_address" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Address of the server where the remote media images are stored.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-remote_share_type"></div>
+ <b>remote_share_type</b>
+ <a class="ansibleOptionLink" href="#parameter-remote_share_type" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>nfs</li>
+ <li>cifs</li>
+ </ul>
+ </td>
+ <td>
+ <div>Share Type of the remote media server either NFS or Samba(CIFS).</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-remote_source_path"></div>
+ <b>remote_source_path</b>
+ <a class="ansibleOptionLink" href="#parameter-remote_source_path" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Source path to the remote media images..</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-remote_user_name"></div>
+ <b>remote_user_name</b>
+ <a class="ansibleOptionLink" href="#parameter-remote_user_name" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Remote User Name.</div>
+ <div>Required when <em>remote_share_type=cifs</em>.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-same_settings"></div>
+ <b>same_settings</b>
+ <a class="ansibleOptionLink" href="#parameter-same_settings" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>0</li>
+ <li>1</li>
+ </ul>
+ </td>
+ <td>
+ <div>Same settings with <em>CD</em>, 0 is No, 1 is Yes.</div>
+ <div>Required when <em>mount_type=0</em>.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Media test
+ hosts: inmanage
+ no_log: true
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set local media"
+ ieisystem.inmanage.edit_virtual_media:
+ local_media_support: "Enable"
+ provider: "{{ inmanage }}"
+
+ - name: "Set remote media"
+ ieisystem.inmanage.edit_virtual_media:
+ remote_media_support: "Enable"
+ mount_type: 'CD'
+ same_settings: 0
+ mount: "Enable"
+ remote_server_address: "100.2.28.203"
+ remote_source_path: "/data/nfs/server/"
+ remote_share_type: "nfs"
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/edit_vlan_module.rst b/ansible_collections/ieisystem/inmanage/docs/edit_vlan_module.rst
new file mode 100644
index 000000000..10f022376
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/edit_vlan_module.rst
@@ -0,0 +1,381 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.edit_vlan_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.edit_vlan -- Set vlan information
+++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.edit_vlan`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Set vlan information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-interface_name"></div>
+ <b>interface_name</b>
+ <a class="ansibleOptionLink" href="#parameter-interface_name" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ / <span style="color: red">required</span> </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>eth0</li>
+ <li>eth1</li>
+ <li>bond0</li>
+ </ul>
+ </td>
+ <td>
+ <div>Set interface name. Select the LAN interface to be configured.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-vlan_id"></div>
+ <b>vlan_id</b>
+ <a class="ansibleOptionLink" href="#parameter-vlan_id" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>The Identification for VLAN configuration(2-4094).</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-vlan_priority"></div>
+ <b>vlan_priority</b>
+ <a class="ansibleOptionLink" href="#parameter-vlan_priority" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>The priority for VLAN configuration(1-7).</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-vlan_status"></div>
+ <b>vlan_status</b>
+ <a class="ansibleOptionLink" href="#parameter-vlan_status" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>enable</li>
+ <li>disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>Enable or disable vlan.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Vlan test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set vlan information"
+ ieisystem.inmanage.edit_vlan:
+ interface_name: "eth0"
+ vlan_status: "disable"
+ provider: "{{ inmanage }}"
+
+ - name: "Set vlan information"
+ ieisystem.inmanage.edit_vlan:
+ interface_name: "eth0"
+ vlan_status: "enable"
+ vlan_id: 2
+ vlan_priority: 1
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/event_log_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/event_log_info_module.rst
new file mode 100644
index 000000000..e959d72aa
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/event_log_info_module.rst
@@ -0,0 +1,358 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.event_log_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.event_log_info -- Get event log information
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.event_log_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get event log information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-count"></div>
+ <b>count</b>
+ <a class="ansibleOptionLink" href="#parameter-count" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Get the most recent log of a specified number.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-event_file"></div>
+ <b>event_file</b>
+ <a class="ansibleOptionLink" href="#parameter-event_file" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Store logs to a file.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-log_time"></div>
+ <b>log_time</b>
+ <a class="ansibleOptionLink" href="#parameter-log_time" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Get logs after the specified date, time should be YYYY-MM-DDTHH:MM+HH:MM, like 2019-06-27T12:30+08:00.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Event log info test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get event log information"
+ ieisystem.inmanage.event_log_info:
+ log_time: "2020-06-01T12:30+08:00"
+ provider: "{{ inmanage }}"
+
+ - name: "Get event log information"
+ ieisystem.inmanage.event_log_info:
+ count: 30
+ provider: "{{ inmanage }}"
+
+ - name: "Get event log information"
+ ieisystem.inmanage.event_log_info:
+ event_file: "/home/wbs/wbs.log"
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/event_log_policy_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/event_log_policy_info_module.rst
new file mode 100644
index 000000000..0581c3727
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/event_log_policy_info_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.event_log_policy_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.event_log_policy_info -- Get event log policy information
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.event_log_policy_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get event log policy information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Event log policy test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get event log policy information"
+ ieisystem.inmanage.event_log_policy_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/fan_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/fan_info_module.rst
new file mode 100644
index 000000000..fdc24ec33
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/fan_info_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.fan_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.fan_info -- Get fan information
+++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.fan_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get fan information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Fan test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get fan information"
+ ieisystem.inmanage.fan_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/fru_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/fru_info_module.rst
new file mode 100644
index 000000000..59269ccef
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/fru_info_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.fru_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.fru_info -- Get fru information
+++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.fru_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get fru information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Fru info test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get fru information"
+ ieisystem.inmanage.fru_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/fw_version_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/fw_version_info_module.rst
new file mode 100644
index 000000000..f0e8e8779
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/fw_version_info_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.fw_version_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.fw_version_info -- Get firmware version information
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.fw_version_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get firmware version information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Firmware version test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get firmware version information"
+ ieisystem.inmanage.fw_version_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/gpu_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/gpu_info_module.rst
new file mode 100644
index 000000000..0a5eb24e4
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/gpu_info_module.rst
@@ -0,0 +1,303 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.gpu_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.gpu_info -- Get GPU information
+++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.gpu_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get GPU information on ieisystem Server.
+- Only the M6 models support this feature.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: GPU test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get gpu information"
+ ieisystem.inmanage.gpu_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/hard_disk_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/hard_disk_info_module.rst
new file mode 100644
index 000000000..e49041e7a
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/hard_disk_info_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.hard_disk_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.hard_disk_info -- Get hard disk information
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.hard_disk_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get hard disk information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Hard disk test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get hard disk information"
+ ieisystem.inmanage.hard_disk_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/hba_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/hba_info_module.rst
new file mode 100644
index 000000000..21e346c22
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/hba_info_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.hba_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.hba_info -- Get CPU information
+++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.hba_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get HBA information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: HBA test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get hba information"
+ ieisystem.inmanage.hba_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/index.rst b/ansible_collections/ieisystem/inmanage/docs/index.rst
new file mode 100644
index 000000000..4adacce68
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/index.rst
@@ -0,0 +1,149 @@
+:orphan:
+
+.. _plugins_in_ieisystem.inmanage:
+
+Plugin Index
+============
+
+These are the plugins in the ieisystem.inmanage collection
+
+.. toctree::
+ :maxdepth: 1
+
+
+Modules
+-------
+
+* :ref:`ad_group <ansible_collections.ieisystem.inmanage.ad_group_module>` -- Manage active directory group information
+* :ref:`ad_group_info <ansible_collections.ieisystem.inmanage.ad_group_info_module>` -- Get active directory group information
+* :ref:`ad_info <ansible_collections.ieisystem.inmanage.ad_info_module>` -- Get active directory information
+* :ref:`adapter_info <ansible_collections.ieisystem.inmanage.adapter_info_module>` -- Get adapter information
+* :ref:`add_ldisk <ansible_collections.ieisystem.inmanage.add_ldisk_module>` -- Create logical disk
+* :ref:`alert_policy_info <ansible_collections.ieisystem.inmanage.alert_policy_info_module>` -- Get alert policy
+* :ref:`audit_log_info <ansible_collections.ieisystem.inmanage.audit_log_info_module>` -- Get BMC audit log information
+* :ref:`auto_capture_info <ansible_collections.ieisystem.inmanage.auto_capture_info_module>` -- Get auto capture screen information
+* :ref:`backplane_info <ansible_collections.ieisystem.inmanage.backplane_info_module>` -- Get disk backplane information
+* :ref:`backup <ansible_collections.ieisystem.inmanage.backup_module>` -- Backup server settings
+* :ref:`bios_export <ansible_collections.ieisystem.inmanage.bios_export_module>` -- Export BIOS config
+* :ref:`bios_import <ansible_collections.ieisystem.inmanage.bios_import_module>` -- Import BIOS config
+* :ref:`bios_info <ansible_collections.ieisystem.inmanage.bios_info_module>` -- Get BIOS setup
+* :ref:`bmc_info <ansible_collections.ieisystem.inmanage.bmc_info_module>` -- Get BMC information
+* :ref:`boot_image_info <ansible_collections.ieisystem.inmanage.boot_image_info_module>` -- Get bmc boot image information
+* :ref:`boot_option_info <ansible_collections.ieisystem.inmanage.boot_option_info_module>` -- Get BIOS boot options
+* :ref:`clear_audit_log <ansible_collections.ieisystem.inmanage.clear_audit_log_module>` -- Clear BMC audit log
+* :ref:`clear_event_log <ansible_collections.ieisystem.inmanage.clear_event_log_module>` -- Clear event log
+* :ref:`clear_system_log <ansible_collections.ieisystem.inmanage.clear_system_log_module>` -- Clear BMC system log
+* :ref:`collect_blackbox <ansible_collections.ieisystem.inmanage.collect_blackbox_module>` -- Collect blackbox log
+* :ref:`collect_log <ansible_collections.ieisystem.inmanage.collect_log_module>` -- Collect logs
+* :ref:`connect_media_info <ansible_collections.ieisystem.inmanage.connect_media_info_module>` -- Get remote images redirection information
+* :ref:`cpu_info <ansible_collections.ieisystem.inmanage.cpu_info_module>` -- Get CPU information
+* :ref:`del_session <ansible_collections.ieisystem.inmanage.del_session_module>` -- Delete session
+* :ref:`dns_info <ansible_collections.ieisystem.inmanage.dns_info_module>` -- Get dns information
+* :ref:`download_auto_screenshot <ansible_collections.ieisystem.inmanage.download_auto_screenshot_module>` -- Download auto screenshots
+* :ref:`download_manual_screenshot <ansible_collections.ieisystem.inmanage.download_manual_screenshot_module>` -- Download manual screenshots
+* :ref:`edit_ad <ansible_collections.ieisystem.inmanage.edit_ad_module>` -- Set active directory information
+* :ref:`edit_alert_policy <ansible_collections.ieisystem.inmanage.edit_alert_policy_module>` -- Set alert policy
+* :ref:`edit_auto_capture <ansible_collections.ieisystem.inmanage.edit_auto_capture_module>` -- Set auto capture screen
+* :ref:`edit_bios <ansible_collections.ieisystem.inmanage.edit_bios_module>` -- Set BIOS setup attributes
+* :ref:`edit_boot_image <ansible_collections.ieisystem.inmanage.edit_boot_image_module>` -- Set bmc boot image
+* :ref:`edit_boot_option <ansible_collections.ieisystem.inmanage.edit_boot_option_module>` -- Set BIOS boot options
+* :ref:`edit_connect_media <ansible_collections.ieisystem.inmanage.edit_connect_media_module>` -- Start/Stop virtual media Image
+* :ref:`edit_dns <ansible_collections.ieisystem.inmanage.edit_dns_module>` -- Set dns information
+* :ref:`edit_event_log_policy <ansible_collections.ieisystem.inmanage.edit_event_log_policy_module>` -- Set event log policy
+* :ref:`edit_fan <ansible_collections.ieisystem.inmanage.edit_fan_module>` -- Set fan information
+* :ref:`edit_fru <ansible_collections.ieisystem.inmanage.edit_fru_module>` -- Set fru settings
+* :ref:`edit_ipv4 <ansible_collections.ieisystem.inmanage.edit_ipv4_module>` -- Set ipv4 information
+* :ref:`edit_ipv6 <ansible_collections.ieisystem.inmanage.edit_ipv6_module>` -- Set ipv6 information
+* :ref:`edit_kvm <ansible_collections.ieisystem.inmanage.edit_kvm_module>` -- Set KVM
+* :ref:`edit_ldap <ansible_collections.ieisystem.inmanage.edit_ldap_module>` -- Set ldap information
+* :ref:`edit_ldisk <ansible_collections.ieisystem.inmanage.edit_ldisk_module>` -- Set logical disk
+* :ref:`edit_log_setting <ansible_collections.ieisystem.inmanage.edit_log_setting_module>` -- Set bmc system and audit log setting
+* :ref:`edit_m6_log_setting <ansible_collections.ieisystem.inmanage.edit_m6_log_setting_module>` -- Set bmc system and audit log setting
+* :ref:`edit_manual_capture <ansible_collections.ieisystem.inmanage.edit_manual_capture_module>` -- Set manual capture screen
+* :ref:`edit_media_instance <ansible_collections.ieisystem.inmanage.edit_media_instance_module>` -- Set Virtual Media Instance
+* :ref:`edit_ncsi <ansible_collections.ieisystem.inmanage.edit_ncsi_module>` -- Set ncsi information
+* :ref:`edit_network <ansible_collections.ieisystem.inmanage.edit_network_module>` -- Set network information
+* :ref:`edit_network_bond <ansible_collections.ieisystem.inmanage.edit_network_bond_module>` -- Set network bond
+* :ref:`edit_network_link <ansible_collections.ieisystem.inmanage.edit_network_link_module>` -- Set network link
+* :ref:`edit_ntp <ansible_collections.ieisystem.inmanage.edit_ntp_module>` -- Set NTP
+* :ref:`edit_pdisk <ansible_collections.ieisystem.inmanage.edit_pdisk_module>` -- Set physical disk
+* :ref:`edit_power_budget <ansible_collections.ieisystem.inmanage.edit_power_budget_module>` -- Set power budget information
+* :ref:`edit_power_restore <ansible_collections.ieisystem.inmanage.edit_power_restore_module>` -- Set power restore information
+* :ref:`edit_power_status <ansible_collections.ieisystem.inmanage.edit_power_status_module>` -- Set power status information
+* :ref:`edit_preserve_config <ansible_collections.ieisystem.inmanage.edit_preserve_config_module>` -- Set preserve config
+* :ref:`edit_psu_config <ansible_collections.ieisystem.inmanage.edit_psu_config_module>` -- Set psu config information
+* :ref:`edit_psu_peak <ansible_collections.ieisystem.inmanage.edit_psu_peak_module>` -- Set psu peak information
+* :ref:`edit_restore_factory_default <ansible_collections.ieisystem.inmanage.edit_restore_factory_default_module>` -- Set preserver config
+* :ref:`edit_service <ansible_collections.ieisystem.inmanage.edit_service_module>` -- Set service settings
+* :ref:`edit_smtp <ansible_collections.ieisystem.inmanage.edit_smtp_module>` -- Set SMTP information
+* :ref:`edit_smtp_com <ansible_collections.ieisystem.inmanage.edit_smtp_com_module>` -- Set SMTP information
+* :ref:`edit_smtp_dest <ansible_collections.ieisystem.inmanage.edit_smtp_dest_module>` -- Set SMTP information
+* :ref:`edit_snmp <ansible_collections.ieisystem.inmanage.edit_snmp_module>` -- Set snmp
+* :ref:`edit_snmp_trap <ansible_collections.ieisystem.inmanage.edit_snmp_trap_module>` -- Set snmp trap
+* :ref:`edit_threshold <ansible_collections.ieisystem.inmanage.edit_threshold_module>` -- Set threshold information
+* :ref:`edit_uid <ansible_collections.ieisystem.inmanage.edit_uid_module>` -- Set UID
+* :ref:`edit_virtual_media <ansible_collections.ieisystem.inmanage.edit_virtual_media_module>` -- Set virtual media
+* :ref:`edit_vlan <ansible_collections.ieisystem.inmanage.edit_vlan_module>` -- Set vlan information
+* :ref:`event_log_info <ansible_collections.ieisystem.inmanage.event_log_info_module>` -- Get event log information
+* :ref:`event_log_policy_info <ansible_collections.ieisystem.inmanage.event_log_policy_info_module>` -- Get event log policy information
+* :ref:`fan_info <ansible_collections.ieisystem.inmanage.fan_info_module>` -- Get fan information
+* :ref:`fru_info <ansible_collections.ieisystem.inmanage.fru_info_module>` -- Get fru information
+* :ref:`fw_version_info <ansible_collections.ieisystem.inmanage.fw_version_info_module>` -- Get firmware version information
+* :ref:`gpu_info <ansible_collections.ieisystem.inmanage.gpu_info_module>` -- Get GPU information
+* :ref:`hard_disk_info <ansible_collections.ieisystem.inmanage.hard_disk_info_module>` -- Get hard disk information
+* :ref:`hba_info <ansible_collections.ieisystem.inmanage.hba_info_module>` -- Get CPU information
+* :ref:`kvm_info <ansible_collections.ieisystem.inmanage.kvm_info_module>` -- Get KVM information
+* :ref:`ldap_group <ansible_collections.ieisystem.inmanage.ldap_group_module>` -- Manage ldap group information
+* :ref:`ldap_group_info <ansible_collections.ieisystem.inmanage.ldap_group_info_module>` -- Get ldap group information
+* :ref:`ldap_info <ansible_collections.ieisystem.inmanage.ldap_info_module>` -- Get ldap information
+* :ref:`ldisk_info <ansible_collections.ieisystem.inmanage.ldisk_info_module>` -- Get logical disks information
+* :ref:`log_setting_info <ansible_collections.ieisystem.inmanage.log_setting_info_module>` -- Get bmc log setting information
+* :ref:`media_instance_info <ansible_collections.ieisystem.inmanage.media_instance_info_module>` -- Get Virtual Media Instance information
+* :ref:`mem_info <ansible_collections.ieisystem.inmanage.mem_info_module>` -- Get memory information
+* :ref:`ncsi_info <ansible_collections.ieisystem.inmanage.ncsi_info_module>` -- Get ncsi information
+* :ref:`network_bond_info <ansible_collections.ieisystem.inmanage.network_bond_info_module>` -- Get network bond information
+* :ref:`network_info <ansible_collections.ieisystem.inmanage.network_info_module>` -- Get network information
+* :ref:`network_link_info <ansible_collections.ieisystem.inmanage.network_link_info_module>` -- Get network link information
+* :ref:`ntp_info <ansible_collections.ieisystem.inmanage.ntp_info_module>` -- Get NTP information
+* :ref:`onboard_disk_info <ansible_collections.ieisystem.inmanage.onboard_disk_info_module>` -- Get onboard disks information
+* :ref:`pcie_info <ansible_collections.ieisystem.inmanage.pcie_info_module>` -- Get PCIE information
+* :ref:`pdisk_info <ansible_collections.ieisystem.inmanage.pdisk_info_module>` -- Get physical disks information
+* :ref:`power_budget_info <ansible_collections.ieisystem.inmanage.power_budget_info_module>` -- Get power budget information
+* :ref:`power_consumption_info <ansible_collections.ieisystem.inmanage.power_consumption_info_module>` -- Get power consumption information
+* :ref:`power_restore_info <ansible_collections.ieisystem.inmanage.power_restore_info_module>` -- Get power restore information
+* :ref:`power_status_info <ansible_collections.ieisystem.inmanage.power_status_info_module>` -- Get power status information
+* :ref:`preserve_config_info <ansible_collections.ieisystem.inmanage.preserve_config_info_module>` -- Get preserve config information
+* :ref:`psu_config_info <ansible_collections.ieisystem.inmanage.psu_config_info_module>` -- Get psu config information
+* :ref:`psu_info <ansible_collections.ieisystem.inmanage.psu_info_module>` -- Get psu information
+* :ref:`psu_peak_info <ansible_collections.ieisystem.inmanage.psu_peak_info_module>` -- Get psu peak information
+* :ref:`raid_info <ansible_collections.ieisystem.inmanage.raid_info_module>` -- Get RAID/HBA card and controller information
+* :ref:`reset_bmc <ansible_collections.ieisystem.inmanage.reset_bmc_module>` -- BMC reset
+* :ref:`reset_kvm <ansible_collections.ieisystem.inmanage.reset_kvm_module>` -- KVM reset
+* :ref:`restore <ansible_collections.ieisystem.inmanage.restore_module>` -- Restore server settings
+* :ref:`self_test_info <ansible_collections.ieisystem.inmanage.self_test_info_module>` -- Get self test information
+* :ref:`sensor_info <ansible_collections.ieisystem.inmanage.sensor_info_module>` -- Get sensor information
+* :ref:`server_info <ansible_collections.ieisystem.inmanage.server_info_module>` -- Get server status information
+* :ref:`service_info <ansible_collections.ieisystem.inmanage.service_info_module>` -- Get service information
+* :ref:`session_info <ansible_collections.ieisystem.inmanage.session_info_module>` -- Get online session information
+* :ref:`smtp_info <ansible_collections.ieisystem.inmanage.smtp_info_module>` -- Get SMTP information
+* :ref:`snmp_info <ansible_collections.ieisystem.inmanage.snmp_info_module>` -- Get snmp get/set information
+* :ref:`snmp_trap_info <ansible_collections.ieisystem.inmanage.snmp_trap_info_module>` -- Get snmp trap information
+* :ref:`support_info <ansible_collections.ieisystem.inmanage.support_info_module>` -- Get support information
+* :ref:`system_log_info <ansible_collections.ieisystem.inmanage.system_log_info_module>` -- Get BMC system log information
+* :ref:`temp_info <ansible_collections.ieisystem.inmanage.temp_info_module>` -- Get temp information
+* :ref:`threshold_info <ansible_collections.ieisystem.inmanage.threshold_info_module>` -- Get threshold information
+* :ref:`uid_info <ansible_collections.ieisystem.inmanage.uid_info_module>` -- Get UID information
+* :ref:`update_cpld <ansible_collections.ieisystem.inmanage.update_cpld_module>` -- Update CPLD
+* :ref:`update_fw <ansible_collections.ieisystem.inmanage.update_fw_module>` -- Update firmware
+* :ref:`update_psu <ansible_collections.ieisystem.inmanage.update_psu_module>` -- Update PSU
+* :ref:`user <ansible_collections.ieisystem.inmanage.user_module>` -- Manage user
+* :ref:`user_group <ansible_collections.ieisystem.inmanage.user_group_module>` -- Manage user group
+* :ref:`user_group_info <ansible_collections.ieisystem.inmanage.user_group_info_module>` -- Get user group information
+* :ref:`user_info <ansible_collections.ieisystem.inmanage.user_info_module>` -- Get user information
+* :ref:`virtual_media_info <ansible_collections.ieisystem.inmanage.virtual_media_info_module>` -- Get Virtual Media information
+* :ref:`volt_info <ansible_collections.ieisystem.inmanage.volt_info_module>` -- Get volt information
+
+
+.. seealso::
+
+ List of :ref:`collections <list_of_collections>` with docs hosted here. \ No newline at end of file
diff --git a/ansible_collections/ieisystem/inmanage/docs/kvm_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/kvm_info_module.rst
new file mode 100644
index 000000000..663b09012
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/kvm_info_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.kvm_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.kvm_info -- Get KVM information
+++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.kvm_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get KVM information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: KVM test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get KVM information"
+ ieisystem.inmanage.kvm_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/ldap_group_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/ldap_group_info_module.rst
new file mode 100644
index 000000000..51a50ec59
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/ldap_group_info_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.ldap_group_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.ldap_group_info -- Get ldap group information
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.ldap_group_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get ldap group information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Ldap group test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get ldap group information"
+ ieisystem.inmanage.ldap_group_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/ldap_group_module.rst b/ansible_collections/ieisystem/inmanage/docs/ldap_group_module.rst
new file mode 100644
index 000000000..2de6cb474
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/ldap_group_module.rst
@@ -0,0 +1,431 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.ldap_group_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.ldap_group -- Manage ldap group information
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.ldap_group`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Manage ldap group information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-base"></div>
+ <b>base</b>
+ <a class="ansibleOptionLink" href="#parameter-base" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Search Base.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-kvm"></div>
+ <b>kvm</b>
+ <a class="ansibleOptionLink" href="#parameter-kvm" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>enable</li>
+ <li>disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>Kvm privilege.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-name"></div>
+ <b>name</b>
+ <a class="ansibleOptionLink" href="#parameter-name" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ / <span style="color: red">required</span> </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Group name.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-pri"></div>
+ <b>pri</b>
+ <a class="ansibleOptionLink" href="#parameter-pri" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>administrator</li>
+ <li>user</li>
+ <li>operator</li>
+ <li>oem</li>
+ <li>none</li>
+ </ul>
+ </td>
+ <td>
+ <div>Group privilege.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#parameter-state" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li><div style="color: blue"><b>present</b>&nbsp;&larr;</div></li>
+ <li>absent</li>
+ </ul>
+ </td>
+ <td>
+ <div>Whether the ldap group should exist or not, taking action if the state is different from what is stated.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-vm"></div>
+ <b>vm</b>
+ <a class="ansibleOptionLink" href="#parameter-vm" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>enable</li>
+ <li>disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>Vmedia privilege.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Ldap group test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Add ldap group information"
+ ieisystem.inmanage.ldap_group:
+ state: "present"
+ name: "wbs"
+ base: "cn=manager"
+ pri: "administrator"
+ kvm: "enable"
+ vm: "disable"
+ provider: "{{ inmanage }}"
+
+ - name: "Set ldap group information"
+ ieisystem.inmanage.ldap_group:
+ state: "present"
+ name: "wbs"
+ pri: "user"
+ kvm: "disable"
+ provider: "{{ inmanage }}"
+
+ - name: "Delete ldap group information"
+ ieisystem.inmanage.ldap_group:
+ state: "absent"
+ name: "wbs"
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/ldap_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/ldap_info_module.rst
new file mode 100644
index 000000000..68332e011
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/ldap_info_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.ldap_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.ldap_info -- Get ldap information
+++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.ldap_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get ldap information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Ldap test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get ldap information"
+ ieisystem.inmanage.ldap_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/ldisk_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/ldisk_info_module.rst
new file mode 100644
index 000000000..a2a89647c
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/ldisk_info_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.ldisk_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.ldisk_info -- Get logical disks information
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.ldisk_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get logical disks information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Ldisk test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get logical disks information"
+ ieisystem.inmanage.ldisk_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/log_setting_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/log_setting_info_module.rst
new file mode 100644
index 000000000..179a1f71a
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/log_setting_info_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.log_setting_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.log_setting_info -- Get bmc log setting information
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.log_setting_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get bmc log setting information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Bmc log setting test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get bmc log setting information"
+ ieisystem.inmanage.log_setting_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/media_instance_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/media_instance_info_module.rst
new file mode 100644
index 000000000..40a1951f7
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/media_instance_info_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.media_instance_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.media_instance_info -- Get Virtual Media Instance information
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.media_instance_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get Virtual Media Instance information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Media instance test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get Virtual Media Instance information"
+ ieisystem.inmanage.media_instance_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/mem_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/mem_info_module.rst
new file mode 100644
index 000000000..ac859ff27
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/mem_info_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.mem_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.mem_info -- Get memory information
++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.mem_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get memory information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Memory test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get memory information"
+ ieisystem.inmanage.mem_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/ncsi_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/ncsi_info_module.rst
new file mode 100644
index 000000000..6519a4f7f
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/ncsi_info_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.ncsi_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.ncsi_info -- Get ncsi information
+++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.ncsi_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get ncsi information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: NCSI test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get ncsi information"
+ ieisystem.inmanage.ncsi_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/network_bond_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/network_bond_info_module.rst
new file mode 100644
index 000000000..548e4e282
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/network_bond_info_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.network_bond_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.network_bond_info -- Get network bond information
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.network_bond_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get network bond information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Bond test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get network bond information"
+ ieisystem.inmanage.network_bond_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/network_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/network_info_module.rst
new file mode 100644
index 000000000..4bb523cc1
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/network_info_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.network_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.network_info -- Get network information
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.network_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get network information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Network test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get network information"
+ ieisystem.inmanage.network_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/network_link_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/network_link_info_module.rst
new file mode 100644
index 000000000..0b897cc2f
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/network_link_info_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.network_link_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.network_link_info -- Get network link information
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.network_link_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get network link information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Link test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get network link information"
+ ieisystem.inmanage.network_link_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/ntp_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/ntp_info_module.rst
new file mode 100644
index 000000000..86008f37b
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/ntp_info_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.ntp_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.ntp_info -- Get NTP information
+++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.ntp_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get NTP information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: NTP test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get ntp information"
+ ieisystem.inmanage.ntp_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/onboard_disk_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/onboard_disk_info_module.rst
new file mode 100644
index 000000000..a1836d0e3
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/onboard_disk_info_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.onboard_disk_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.onboard_disk_info -- Get onboard disks information
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.onboard_disk_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get onboard disks information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Onboard test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get onboard disks information"
+ ieisystem.inmanage.onboard_disk_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/pcie_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/pcie_info_module.rst
new file mode 100644
index 000000000..f31d0fb32
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/pcie_info_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.pcie_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.pcie_info -- Get PCIE information
+++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.pcie_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get PCIE information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: PCIE test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get PCIE information"
+ ieisystem.inmanage.pcie_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/pdisk_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/pdisk_info_module.rst
new file mode 100644
index 000000000..f3eae6f2a
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/pdisk_info_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.pdisk_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.pdisk_info -- Get physical disks information
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.pdisk_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get physical disks information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Pdisk test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get physical disks information"
+ ieisystem.inmanage.pdisk_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/power_budget_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/power_budget_info_module.rst
new file mode 100644
index 000000000..c4d467ce4
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/power_budget_info_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.power_budget_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.power_budget_info -- Get power budget information
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.power_budget_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get power budget information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Power budget test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get power budget information"
+ ieisystem.inmanage.power_budget_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/power_consumption_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/power_consumption_info_module.rst
new file mode 100644
index 000000000..8aa54b6f2
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/power_consumption_info_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.power_consumption_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.power_consumption_info -- Get power consumption information
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.power_consumption_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get power consumption information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Power consumption test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get power consumption information"
+ ieisystem.inmanage.power_consumption_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/power_restore_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/power_restore_info_module.rst
new file mode 100644
index 000000000..93818b8b6
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/power_restore_info_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.power_restore_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.power_restore_info -- Get power restore information
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.power_restore_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get power restore information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Power restore test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get power restore information"
+ ieisystem.inmanage.power_restore_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/power_status_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/power_status_info_module.rst
new file mode 100644
index 000000000..f8274e80b
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/power_status_info_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.power_status_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.power_status_info -- Get power status information
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.power_status_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get power status information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Power status test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get power status information"
+ ieisystem.inmanage.power_status_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/preserve_config_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/preserve_config_info_module.rst
new file mode 100644
index 000000000..cb676c447
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/preserve_config_info_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.preserve_config_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.preserve_config_info -- Get preserve config information
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.preserve_config_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get preserve config information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Preserve test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get preserve config information"
+ ieisystem.inmanage.preserve_config_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/psu_config_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/psu_config_info_module.rst
new file mode 100644
index 000000000..39508ba61
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/psu_config_info_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.psu_config_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.psu_config_info -- Get psu config information
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.psu_config_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get psu config information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Psu config test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get psu config information"
+ ieisystem.inmanage.psu_config_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/psu_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/psu_info_module.rst
new file mode 100644
index 000000000..08ac7cf7b
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/psu_info_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.psu_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.psu_info -- Get psu information
+++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.psu_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get psu information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Psu test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get psu information"
+ ieisystem.inmanage.psu_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/psu_peak_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/psu_peak_info_module.rst
new file mode 100644
index 000000000..90effa602
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/psu_peak_info_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.psu_peak_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.psu_peak_info -- Get psu peak information
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.psu_peak_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get psu peak information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Psu peak test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get psu peak information"
+ ieisystem.inmanage.psu_peak_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/raid_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/raid_info_module.rst
new file mode 100644
index 000000000..0850f19dc
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/raid_info_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.raid_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.raid_info -- Get RAID/HBA card and controller information
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.raid_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get RAID/HBA card and controller information information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Raid test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get raid information"
+ ieisystem.inmanage.raid_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/reset_bmc_module.rst b/ansible_collections/ieisystem/inmanage/docs/reset_bmc_module.rst
new file mode 100644
index 000000000..47fc08838
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/reset_bmc_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.reset_bmc_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.reset_bmc -- BMC reset
++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.reset_bmc`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- BMC reset on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Reset bmc test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Reset bmc"
+ ieisystem.inmanage.reset_bmc:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/reset_kvm_module.rst b/ansible_collections/ieisystem/inmanage/docs/reset_kvm_module.rst
new file mode 100644
index 000000000..183937cbb
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/reset_kvm_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.reset_kvm_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.reset_kvm -- KVM reset
++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.reset_kvm`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- KVM reset on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Reset kvm test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Reset kvm"
+ ieisystem.inmanage.reset_kvm:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/restore_module.rst b/ansible_collections/ieisystem/inmanage/docs/restore_module.rst
new file mode 100644
index 000000000..3b9861be4
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/restore_module.rst
@@ -0,0 +1,348 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.restore_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.restore -- Restore server settings
++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.restore`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Restore server settings on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-bak_file"></div>
+ <b>bak_file</b>
+ <a class="ansibleOptionLink" href="#parameter-bak_file" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ / <span style="color: red">required</span> </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Select backup file or bak folder.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-item"></div>
+ <b>item</b>
+ <a class="ansibleOptionLink" href="#parameter-item" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>all</li>
+ <li>network</li>
+ <li>dns</li>
+ <li>service</li>
+ <li>ntp</li>
+ <li>smtp</li>
+ <li>snmptrap</li>
+ <li>ad</li>
+ <li>ldap</li>
+ <li>user</li>
+ <li>bios</li>
+ </ul>
+ </td>
+ <td>
+ <div>Select export item.</div>
+ <div>Only the M5 model supports this parameter.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Restore test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Restore server settings"
+ ieisystem.inmanage.restore:
+ bak_file: "/home/wbs/backfile"
+ item: "all"
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/self_test_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/self_test_info_module.rst
new file mode 100644
index 000000000..64e5e3c09
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/self_test_info_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.self_test_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.self_test_info -- Get self test information
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.self_test_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get self test information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Self test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get self test information"
+ ieisystem.inmanage.self_test_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/sensor_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/sensor_info_module.rst
new file mode 100644
index 000000000..2fd5d7c57
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/sensor_info_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.sensor_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.sensor_info -- Get sensor information
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.sensor_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get sensor information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Sensor test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get sensor information"
+ ieisystem.inmanage.sensor_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/server_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/server_info_module.rst
new file mode 100644
index 000000000..94e2cf4e4
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/server_info_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.server_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.server_info -- Get server status information
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.server_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get server status information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Server test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get server status information"
+ ieisystem.inmanage.server_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/service_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/service_info_module.rst
new file mode 100644
index 000000000..b5e35fe6a
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/service_info_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.service_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.service_info -- Get service information
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.service_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get service information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Service info test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get service information"
+ ieisystem.inmanage.service_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/session_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/session_info_module.rst
new file mode 100644
index 000000000..49ba5fd64
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/session_info_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.session_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.session_info -- Get online session information
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.session_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get online session information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Session test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get session information"
+ ieisystem.inmanage.session_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/smtp_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/smtp_info_module.rst
new file mode 100644
index 000000000..f6fd85f90
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/smtp_info_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.smtp_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.smtp_info -- Get SMTP information
+++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.smtp_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get SMTP information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Smtp test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get smtp information"
+ ieisystem.inmanage.smtp_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/snmp_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/snmp_info_module.rst
new file mode 100644
index 000000000..ef58e7c77
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/snmp_info_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.snmp_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.snmp_info -- Get snmp get/set information
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.snmp_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get snmp get/set information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Snmp test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get snmp get/set information"
+ ieisystem.inmanage.snmp_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/snmp_trap_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/snmp_trap_info_module.rst
new file mode 100644
index 000000000..ad7c284e7
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/snmp_trap_info_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.snmp_trap_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.snmp_trap_info -- Get snmp trap information
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.snmp_trap_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get snmp trap information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Trap test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get snmp trap information"
+ ieisystem.inmanage.snmp_trap_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/support_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/support_info_module.rst
new file mode 100644
index 000000000..55781b9c4
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/support_info_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.support_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.support_info -- Get support information
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.support_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get the ieisystem Server support list information.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Support list test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get support information"
+ ieisystem.inmanage.support_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/system_log_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/system_log_info_module.rst
new file mode 100644
index 000000000..71a5a7690
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/system_log_info_module.rst
@@ -0,0 +1,384 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.system_log_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.system_log_info -- Get BMC system log information
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.system_log_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get BMC system log information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-count"></div>
+ <b>count</b>
+ <a class="ansibleOptionLink" href="#parameter-count" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Get the most recent log of a specified number.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-level"></div>
+ <b>level</b>
+ <a class="ansibleOptionLink" href="#parameter-level" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li><div style="color: blue"><b>alert</b>&nbsp;&larr;</div></li>
+ <li>critical</li>
+ <li>error</li>
+ <li>notice</li>
+ <li>warning</li>
+ <li>debug</li>
+ <li>emergency</li>
+ <li>info</li>
+ </ul>
+ </td>
+ <td>
+ <div>Log level.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-log_time"></div>
+ <b>log_time</b>
+ <a class="ansibleOptionLink" href="#parameter-log_time" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Get logs after the specified date, time should be YYYY-MM-DDTHH:MM+HH:MM, like 2019-06-27T12:30+08:00.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-system_file"></div>
+ <b>system_file</b>
+ <a class="ansibleOptionLink" href="#parameter-system_file" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Store logs to a file.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Bmc system log info test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get bmc system log information"
+ ieisystem.inmanage.system_log_info:
+ level: "alert"
+ log_time: "2020-06-01T12:30+08:00"
+ provider: "{{ inmanage }}"
+
+ - name: "Get bmc system log information"
+ ieisystem.inmanage.system_log_info:
+ count: 30
+ provider: "{{ inmanage }}"
+
+ - name: "Get bmc system log information"
+ ieisystem.inmanage.system_log_info:
+ system_file: "/home/wbs/wbs.log"
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/temp_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/temp_info_module.rst
new file mode 100644
index 000000000..ef4debedf
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/temp_info_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.temp_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.temp_info -- Get temp information
+++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.temp_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get temp information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Temp test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get temp information"
+ ieisystem.inmanage.temp_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/threshold_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/threshold_info_module.rst
new file mode 100644
index 000000000..eca33328e
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/threshold_info_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.threshold_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.threshold_info -- Get threshold information
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.threshold_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get threshold information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Threshold test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get threshold information"
+ ieisystem.inmanage.threshold_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/uid_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/uid_info_module.rst
new file mode 100644
index 000000000..ca26d2e71
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/uid_info_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.uid_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.uid_info -- Get UID information
+++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.uid_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get UID information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: UID test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get UID information"
+ ieisystem.inmanage.uid_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/update_cpld_module.rst b/ansible_collections/ieisystem/inmanage/docs/update_cpld_module.rst
new file mode 100644
index 000000000..fad252986
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/update_cpld_module.rst
@@ -0,0 +1,362 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.update_cpld_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.update_cpld -- Update CPLD
++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.update_cpld`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Update CPLD on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-file_url"></div>
+ <b>file_url</b>
+ <a class="ansibleOptionLink" href="#parameter-file_url" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>CPLD image file path.</div>
+ <div>Required when <em>list=False</em>.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-id"></div>
+ <b>id</b>
+ <a class="ansibleOptionLink" href="#parameter-id" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>CPLD id.</div>
+ <div>Required when <em>list=False</em>.</div>
+ <div>Only the M5 model supports this parameter.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-list"></div>
+ <b>list</b>
+ <a class="ansibleOptionLink" href="#parameter-list" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li><div style="color: blue"><b>no</b>&nbsp;&larr;</div></li>
+ <li>yes</li>
+ </ul>
+ </td>
+ <td>
+ <div>Get cpld list.</div>
+ <div>Only the M5 model supports this parameter.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: CPLD test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get cpld list"
+ ieisystem.inmanage.update_cpld:
+ list: True
+ provider: "{{ inmanage }}"
+
+ - name: "Update cpld"
+ update_cpld:
+ id: 1
+ file_url: "home/wbs/raw.bin"
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/update_fw_module.rst b/ansible_collections/ieisystem/inmanage/docs/update_fw_module.rst
new file mode 100644
index 000000000..b6a4c8276
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/update_fw_module.rst
@@ -0,0 +1,426 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.update_fw_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.update_fw -- Update firmware
++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.update_fw`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Update firmware on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-dual_image"></div>
+ <b>dual_image</b>
+ <a class="ansibleOptionLink" href="#parameter-dual_image" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>single</li>
+ <li><div style="color: blue"><b>dual</b>&nbsp;&larr;</div></li>
+ </ul>
+ </td>
+ <td>
+ <div>Update dual image(default) or not.</div>
+ <div>Only the M5 model supports this parameter.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-has_me"></div>
+ <b>has_me</b>
+ <a class="ansibleOptionLink" href="#parameter-has_me" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>0</li>
+ <li>1</li>
+ </ul>
+ <b>Default:</b><br/><div style="color: blue">1</div>
+ </td>
+ <td>
+ <div>Update me or not when update bios, only work in INTEL platform, 0-no, 1-yes.</div>
+ <div>Only the M5 model supports this parameter.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-mode"></div>
+ <b>mode</b>
+ <a class="ansibleOptionLink" href="#parameter-mode" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li><div style="color: blue"><b>Auto</b>&nbsp;&larr;</div></li>
+ <li>Manual</li>
+ </ul>
+ </td>
+ <td>
+ <div>Active mode, Manual or Auto(default).</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-over_ride"></div>
+ <b>over_ride</b>
+ <a class="ansibleOptionLink" href="#parameter-over_ride" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>0</li>
+ <li>1</li>
+ </ul>
+ <b>Default:</b><br/><div style="color: blue">0</div>
+ </td>
+ <td>
+ <div>Reserve Configurations, 0-reserve, 1-override.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-type"></div>
+ <b>type</b>
+ <a class="ansibleOptionLink" href="#parameter-type" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>BMC</li>
+ <li>BIOS</li>
+ </ul>
+ </td>
+ <td>
+ <div>Firmware type.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-url"></div>
+ <b>url</b>
+ <a class="ansibleOptionLink" href="#parameter-url" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ / <span style="color: red">required</span> </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Firmware image url.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Update fw test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "update bios"
+ ieisystem.inmanage.update_fw:
+ url: "/home/wbs/SA5112M5_BIOS_4.1.8_Standard_20200117.bin"
+ type: "BIOS"
+ provider: "{{ inmanage }}"
+
+ - name: "update bmc"
+ ieisystem.inmanage.update_fw:
+ url: "/home/wbs/SA5112M5_BMC_4.17.7_Standard_20200430"
+ mode: "Auto"
+ type: "BMC"
+ dual_image: "dual"
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/update_psu_module.rst b/ansible_collections/ieisystem/inmanage/docs/update_psu_module.rst
new file mode 100644
index 000000000..c430b52db
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/update_psu_module.rst
@@ -0,0 +1,338 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.update_psu_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.update_psu -- Update PSU
++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.update_psu`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Update psu on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-mode"></div>
+ <b>mode</b>
+ <a class="ansibleOptionLink" href="#parameter-mode" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li><div style="color: blue"><b>Auto</b>&nbsp;&larr;</div></li>
+ <li>Manual</li>
+ </ul>
+ </td>
+ <td>
+ <div>Server Auto Reset Option, Manual or Auto(default).</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-url"></div>
+ <b>url</b>
+ <a class="ansibleOptionLink" href="#parameter-url" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ / <span style="color: red">required</span> </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Firmware image url.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Update psu test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "update psu"
+ ieisystem.inmanage.update_psu:
+ url: "/home/wbs/CRPS1300D2W_00.01.04_BootLoader_Pri_Sec.hpm"
+ mode: "Auto"
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/user_group_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/user_group_info_module.rst
new file mode 100644
index 000000000..903769c1f
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/user_group_info_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.user_group_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.user_group_info -- Get user group information
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.user_group_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get user group information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: User group test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get user group information"
+ ieisystem.inmanage.user_group_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/user_group_module.rst b/ansible_collections/ieisystem/inmanage/docs/user_group_module.rst
new file mode 100644
index 000000000..aba0c7a87
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/user_group_module.rst
@@ -0,0 +1,532 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.user_group_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.user_group -- Manage user group
+++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.user_group`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Manage user group on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-debug"></div>
+ <b>debug</b>
+ <a class="ansibleOptionLink" href="#parameter-debug" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>enable</li>
+ <li>disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>Debug diagnose privilege.</div>
+ <div>Required when <em>state=present</em>.</div>
+ <div>Only the M6 model supports this parameter.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-general"></div>
+ <b>general</b>
+ <a class="ansibleOptionLink" href="#parameter-general" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>enable</li>
+ <li>disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>General configuration privilege.</div>
+ <div>Required when <em>state=present</em>.</div>
+ <div>Only the M6 model supports this parameter.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-kvm"></div>
+ <b>kvm</b>
+ <a class="ansibleOptionLink" href="#parameter-kvm" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>enable</li>
+ <li>disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>Remote KVM configuration privilege.</div>
+ <div>Required when <em>state=present</em>.</div>
+ <div>Only the M6 model supports this parameter.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-media"></div>
+ <b>media</b>
+ <a class="ansibleOptionLink" href="#parameter-media" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>enable</li>
+ <li>disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>Remote media configuration privilege.</div>
+ <div>Required when <em>state=present</em>.</div>
+ <div>Only the M6 model supports this parameter.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-name"></div>
+ <b>name</b>
+ <a class="ansibleOptionLink" href="#parameter-name" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ / <span style="color: red">required</span> </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Group name.</div>
+ <div>The range of group name for M6 model is OEM1, OEM2, OEM3, OEM4.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-power"></div>
+ <b>power</b>
+ <a class="ansibleOptionLink" href="#parameter-power" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>enable</li>
+ <li>disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>Power control privilege.</div>
+ <div>Required when <em>state=present</em>.</div>
+ <div>Only the M6 model supports this parameter.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-pri"></div>
+ <b>pri</b>
+ <a class="ansibleOptionLink" href="#parameter-pri" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>administrator</li>
+ <li>operator</li>
+ <li>user</li>
+ <li>oem</li>
+ <li>none</li>
+ </ul>
+ </td>
+ <td>
+ <div>Group privilege.</div>
+ <div>Required when <em>state=present</em>.</div>
+ <div>Only the M5 model supports this parameter.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-security"></div>
+ <b>security</b>
+ <a class="ansibleOptionLink" href="#parameter-security" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>enable</li>
+ <li>disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>Security configuration privilege.</div>
+ <div>Required when <em>state=present</em>.</div>
+ <div>Only the M6 model supports this parameter.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-self"></div>
+ <b>self</b>
+ <a class="ansibleOptionLink" href="#parameter-self" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>enable</li>
+ <li>disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>Itself configuration privilege.</div>
+ <div>Required when <em>state=present</em>.</div>
+ <div>Only the M6 model supports this parameter.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#parameter-state" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li><div style="color: blue"><b>present</b>&nbsp;&larr;</div></li>
+ <li>absent</li>
+ </ul>
+ </td>
+ <td>
+ <div>Whether the user group should exist or not, taking action if the state is different from what is stated.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: User group test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Add user group"
+ ieisystem.inmanage.user_group:
+ state: "present"
+ name: "test"
+ pri: "administrator"
+ provider: "{{ inmanage }}"
+
+ - name: "Set user group"
+ ieisystem.inmanage.user_group:
+ state: "present"
+ name: "test"
+ pri: "user"
+ provider: "{{ inmanage }}"
+
+ - name: "Set m6 user group"
+ ieisystem.inmanage.user_group:
+ state: "present"
+ name: "OEM1"
+ general: "enable"
+ kvm: "enable"
+ provider: "{{ inmanage }}"
+
+ - name: "Delete user group"
+ ieisystem.inmanage.user_group:
+ state: "absent"
+ name: "test"
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/user_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/user_info_module.rst
new file mode 100644
index 000000000..60f629130
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/user_info_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.user_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.user_info -- Get user information
+++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.user_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get user information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: User test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get user information"
+ ieisystem.inmanage.user_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/user_module.rst b/ansible_collections/ieisystem/inmanage/docs/user_module.rst
new file mode 100644
index 000000000..b7bb5c831
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/user_module.rst
@@ -0,0 +1,454 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.user_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.user -- Manage user
+++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.user`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Manage user on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-access"></div>
+ <b>access</b>
+ <a class="ansibleOptionLink" href="#parameter-access" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>enable</li>
+ <li>disable</li>
+ </ul>
+ </td>
+ <td>
+ <div>User access.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-email"></div>
+ <b>email</b>
+ <a class="ansibleOptionLink" href="#parameter-email" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>User email.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-priv"></div>
+ <b>priv</b>
+ <a class="ansibleOptionLink" href="#parameter-priv" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">list</span>
+ / <span style="color: purple">elements=string</span> </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li>kvm</li>
+ <li>vmm</li>
+ <li>sol</li>
+ <li>none</li>
+ </ul>
+ </td>
+ <td>
+ <div>Other user permissions, select one or more from None/KVM/VMM/SOL.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-role_id"></div>
+ <b>role_id</b>
+ <a class="ansibleOptionLink" href="#parameter-role_id" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>User group.</div>
+ <div>Default user group &#x27;Administrator&#x27;, &#x27;Operator&#x27;, &#x27;User&#x27;.</div>
+ <div>Use command <code>user_group_info</code> can get all group information.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#parameter-state" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ <ul style="margin: 0; padding: 0"><b>Choices:</b>
+ <li><div style="color: blue"><b>present</b>&nbsp;&larr;</div></li>
+ <li>absent</li>
+ </ul>
+ </td>
+ <td>
+ <div>Whether the user should exist or not, taking action if the state is different from what is stated.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-uid"></div>
+ <b>uid</b>
+ <a class="ansibleOptionLink" href="#parameter-uid" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">integer</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>User id, The range is 1 to 16.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-uname"></div>
+ <b>uname</b>
+ <a class="ansibleOptionLink" href="#parameter-uname" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>User name, Required when uid is None.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-upass"></div>
+ <b>upass</b>
+ <a class="ansibleOptionLink" href="#parameter-upass" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>User password.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Does not support ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: User test
+ hosts: inmanage
+ no_log: true
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Add user"
+ ieisystem.inmanage.user:
+ state: "present"
+ uname: "wbs"
+ upass: "admin"
+ role_id: "Administrator"
+ priv: "kvm,sol"
+ email: "wbs@ieisystem.com"
+ provider: "{{ inmanage }}"
+
+ - name: "Set user"
+ ieisystem.inmanage.user:
+ state: "present"
+ uname: "wbs"
+ upass: "12345678"
+ role_id: "user"
+ priv: "kvm,sol"
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/virtual_media_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/virtual_media_info_module.rst
new file mode 100644
index 000000000..d56d4f3ec
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/virtual_media_info_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.virtual_media_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.virtual_media_info -- Get Virtual Media information
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.virtual_media_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get Virtual Media information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Media test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get Virtual Media information"
+ ieisystem.inmanage.virtual_media_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/docs/volt_info_module.rst b/ansible_collections/ieisystem/inmanage/docs/volt_info_module.rst
new file mode 100644
index 000000000..f4e3e745a
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/docs/volt_info_module.rst
@@ -0,0 +1,302 @@
+.. Document meta
+
+:orphan:
+
+.. Anchors
+
+.. _ansible_collections.ieisystem.inmanage.volt_info_module:
+
+.. Anchors: short name for ansible.builtin
+
+.. Anchors: aliases
+
+
+
+.. Title
+
+ieisystem.inmanage.volt_info -- Get volt information
+++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+.. Collection note
+
+.. note::
+ This plugin is part of the `ieisystem.inmanage collection <https://galaxy.ansible.com/ieisystem/inmanage>`_.
+
+ To install it use: :code:`ansible-galaxy collection install ieisystem.inmanage`.
+
+ To use it in a playbook, specify: :code:`ieisystem.inmanage.volt_info`.
+
+.. version_added
+
+.. versionadded:: 1.0.0 of ieisystem.inmanage
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. Deprecated
+
+
+Synopsis
+--------
+
+.. Description
+
+- Get volt information on ieisystem Server.
+
+
+.. Aliases
+
+
+.. Requirements
+
+Requirements
+------------
+The below requirements are needed on the host that executes this module.
+
+- Python 3.7+
+- inManage
+
+
+.. Options
+
+Parameters
+----------
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="2">Parameter</th>
+ <th>Choices/<font color="blue">Defaults</font></th>
+ <th width="100%">Comments</th>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-provider"></div>
+ <b>provider</b>
+ <a class="ansibleOptionLink" href="#parameter-provider" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">dictionary</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>A dict object containing connection details.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/host"></div>
+ <b>host</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/host" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/password"></div>
+ <b>password</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/password" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Specifies the password to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_PASSWORD</code> will be used instead.</div>
+ </td>
+ </tr>
+ <tr>
+ <td class="elbow-placeholder"></td>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="parameter-provider/username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-provider/username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+
+ <tr>
+ <td colspan="2">
+ <div class="ansibleOptionAnchor" id="parameter-username"></div>
+ <b>username</b>
+ <a class="ansibleOptionLink" href="#parameter-username" title="Permalink to this option"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>
+ </td>
+ <td>
+ <div>Configures the username to use to authenticate the connection to the remote device. If the value is not specified in the task, the value of environment variable <code>ANSIBLE_NET_USERNAME</code> will be used instead.</div>
+ </td>
+ </tr>
+ </table>
+ <br/>
+
+.. Notes
+
+Notes
+-----
+
+.. note::
+ - Supports ``check_mode``.
+
+.. Seealso
+
+
+.. Examples
+
+Examples
+--------
+
+.. code-block:: yaml+jinja
+
+
+ - name: Volt test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get volt information"
+ ieisystem.inmanage.volt_info:
+ provider: "{{ inmanage }}"
+
+
+
+
+.. Facts
+
+
+.. Return values
+
+Return Values
+-------------
+Common return values are documented :ref:`here <common_return_values>`, the following are the fields unique to this module:
+
+.. raw:: html
+
+ <table border=0 cellpadding=0 class="documentation-table">
+ <tr>
+ <th colspan="1">Key</th>
+ <th>Returned</th>
+ <th width="100%">Description</th>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-changed"></div>
+ <b>changed</b>
+ <a class="ansibleOptionLink" href="#return-changed" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">boolean</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Check to see if a change was made on the device.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-message"></div>
+ <b>message</b>
+ <a class="ansibleOptionLink" href="#return-message" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Messages returned after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ <tr>
+ <td colspan="1">
+ <div class="ansibleOptionAnchor" id="return-state"></div>
+ <b>state</b>
+ <a class="ansibleOptionLink" href="#return-state" title="Permalink to this return value"></a>
+ <div style="font-size: small">
+ <span style="color: purple">string</span>
+ </div>
+ </td>
+ <td>always</td>
+ <td>
+ <div>Status after module execution.</div>
+ <br/>
+ </td>
+ </tr>
+ </table>
+ <br/><br/>
+
+.. Status (Presently only deprecated)
+
+
+.. Authors
+
+Authors
+~~~~~~~
+
+- WangBaoshan (@ieisystem)
+
+
+
+.. Parsing errors
+
diff --git a/ansible_collections/ieisystem/inmanage/meta/runtime.yml b/ansible_collections/ieisystem/inmanage/meta/runtime.yml
new file mode 100644
index 000000000..86ccce27b
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/meta/runtime.yml
@@ -0,0 +1,2 @@
+---
+requires_ansible: '>=2.10'
diff --git a/ansible_collections/ieisystem/inmanage/plugins/doc_fragments/inmanage.py b/ansible_collections/ieisystem/inmanage/plugins/doc_fragments/inmanage.py
new file mode 100644
index 000000000..05faebd2e
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/doc_fragments/inmanage.py
@@ -0,0 +1,59 @@
+# -*- coding:utf-8 -*-
+
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+
+class ModuleDocFragment(object):
+
+ # Standard files documentation fragment
+ DOCUMENTATION = r'''
+options:
+ provider:
+ description:
+ - A dict object containing connection details.
+ type: dict
+ suboptions:
+ host:
+ description:
+ - Specifies the DNS host name or address for connecting to the remote
+ device over the specified transport. The value of host is used as
+ the destination address for the transport.
+ type: str
+ username:
+ description:
+ - Configures the username to use to authenticate the connection to
+ the remote device. If the value is not specified in the task, the value of environment
+ variable C(ANSIBLE_NET_USERNAME) will be used instead.
+ type: str
+ password:
+ description:
+ - Specifies the password to use to authenticate the connection to
+ the remote device. If the value is not specified in the task, the
+ value of environment variable C(ANSIBLE_NET_PASSWORD) will be used instead.
+ type: str
+ host:
+ description:
+ - Specifies the DNS host name or address for connecting to the remote
+ device over the specified transport. The value of host is used as
+ the destination address for the transport.
+ type: str
+ username:
+ description:
+ - Configures the username to use to authenticate the connection to
+ the remote device. If the value is not specified in the task, the value of environment
+ variable C(ANSIBLE_NET_USERNAME) will be used instead.
+ type: str
+ password:
+ description:
+ - Specifies the password to use to authenticate the connection to
+ the remote device. If the value is not specified in the task, the
+ value of environment variable C(ANSIBLE_NET_PASSWORD) will be used instead.
+ type: str
+requirements:
+ - Python 3.7+
+ - inManage
+'''
diff --git a/ansible_collections/ieisystem/inmanage/plugins/module_utils/inmanage.py b/ansible_collections/ieisystem/inmanage/plugins/module_utils/inmanage.py
new file mode 100644
index 000000000..6a7de4787
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/module_utils/inmanage.py
@@ -0,0 +1,54 @@
+# -*- coding:utf-8 -*-
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+try:
+ import inmanage
+ inmanage_temp = True
+except ImportError:
+ inmanage_temp = False
+from ansible.module_utils.basic import env_fallback
+from ansible.module_utils.six import iteritems
+
+inmanage_provider_spec = {
+ 'host': dict(type='str'),
+ 'username': dict(type='str', fallback=(env_fallback, ['ANSIBLE_NET_USERNAME'])),
+ 'password': dict(type='str', fallback=(env_fallback, ['ANSIBLE_NET_PASSWORD']), no_log=True),
+}
+inmanage_argument_spec = {
+ 'provider': dict(type='dict', options=inmanage_provider_spec),
+}
+inmanage_top_spec = {
+ 'host': dict(type='str'),
+ 'username': dict(type='str'),
+ 'password': dict(type='str', no_log=True),
+}
+inmanage_argument_spec.update(inmanage_top_spec)
+
+
+def load_params(module):
+ """load_params"""
+ provider = module.params.get('provider') or dict()
+ for key, value in iteritems(provider):
+ if key in inmanage_argument_spec:
+ if module.params.get(key) is None and value is not None:
+ module.params[key] = value
+
+
+def get_connection(module):
+ """get_connection"""
+ load_params(module)
+ # result = dict()
+ # if module.check_mode:
+ # result['changed'] = True
+ # result['state'] = 'Success'
+ # result['message'] = module.params['subcommand']
+ # else:
+ dict_param = module.params
+ if not inmanage_temp:
+ module.fail_json(msg='inmanage_sdk must be installed to use this module')
+ result = inmanage.main(dict_param)
+ return result
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/ad_group.py b/ansible_collections/ieisystem/inmanage/plugins/modules/ad_group.py
new file mode 100644
index 000000000..c41c001df
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/ad_group.py
@@ -0,0 +1,158 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: ad_group
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Manage active directory group information
+description:
+ - Manage active directory group information on ieisystem Server.
+notes:
+ - Does not support C(check_mode).
+options:
+ state:
+ description:
+ - Whether the active directory group should exist or not, taking action if the state is different from what is stated.
+ choices: ['present', 'absent']
+ default: present
+ type: str
+ name:
+ description:
+ - Enter the Role Group Name. This name identifies the role group in Active Directory.
+ type: str
+ required: true
+ domain:
+ description:
+ - Enter the Role Group Domain. This is the domain where the role group is located.
+ type: str
+ pri:
+ description:
+ - Enter the Role Group Privilege. This is the level of privilege to be assigned for this role group.
+ choices: ['administrator', 'user', 'operator', 'oem', 'none']
+ type: str
+ kvm:
+ description:
+ - This field provides access to KVM for AD authenticated role group user.
+ choices: ['enable', 'disable']
+ type: str
+ vm:
+ description:
+ - This field provides access to VMedia for AD authenticated role group user.
+ choices: ['enable', 'disable']
+ type: str
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Ad group test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Add active directory group information"
+ ieisystem.inmanage.ad_group:
+ state: "present"
+ name: "wbs"
+ domain: "test.com"
+ pri: "administrator"
+ kvm: "enable"
+ vm: "disable"
+ provider: "{{ inmanage }}"
+
+ - name: "Set active directory group information"
+ ieisystem.inmanage.ad_group:
+ state: "present"
+ name: "wbs"
+ pri: "user"
+ kvm: "disable"
+ provider: "{{ inmanage }}"
+
+ - name: "Delete active directory group information"
+ ieisystem.inmanage.ad_group:
+ state: "absent"
+ name: "wbs"
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class AD(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'editadgroup'
+ self.results = get_connection(self.module)
+ if self.results['State'] == 'Success':
+ self.results['changed'] = True
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ state=dict(type='str', choices=['present', 'absent'], default='present'),
+ name=dict(type='str', required=True),
+ domain=dict(type='str', required=False),
+ pri=dict(type='str', required=False, choices=['administrator', 'user', 'operator', 'oem', 'none']),
+ kvm=dict(type='str', required=False, choices=['enable', 'disable']),
+ vm=dict(type='str', required=False, choices=['enable', 'disable']),
+ )
+ argument_spec.update(inmanage_argument_spec)
+ ad_obj = AD(argument_spec)
+ ad_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/ad_group_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/ad_group_info.py
new file mode 100644
index 000000000..190532cb7
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/ad_group_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: ad_group_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get active directory group information
+description:
+ - Get active directory group information on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Ad group test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get active directory group information"
+ ieisystem.inmanage.ad_group_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class AD(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'getadgroup'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ ad_obj = AD(argument_spec)
+ ad_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/ad_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/ad_info.py
new file mode 100644
index 000000000..1af4c105c
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/ad_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: ad_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get active directory information
+description:
+ - Get active directory information on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Ad test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get active directory information"
+ ieisystem.inmanage.ad_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class AD(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'getad'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ ad_obj = AD(argument_spec)
+ ad_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/adapter_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/adapter_info.py
new file mode 100644
index 000000000..ce4b0830d
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/adapter_info.py
@@ -0,0 +1,119 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: adapter_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get adapter information
+description:
+ - Get adapter information on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Adapter test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get adapter information"
+ ieisystem.inmanage.adapter_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Adapter(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'getnic'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ nic_result = self.results
+ if nic_result['State'] == "Success":
+ nic = nic_result['Message'][0]
+ sysadapter_len = nic.get('Maximum', 0)
+ idx = 0
+ sortedRes = dict()
+ if sysadapter_len > 0:
+ nic = nic.get('NIC', [])
+ List = []
+ while idx < sysadapter_len:
+ nic_info = nic[idx]
+ sysadapter_info = nic_info.get('Controller')
+ List.extend(sysadapter_info)
+ idx = idx + 1
+ sortedRes["State"] = "Success"
+ sortedRes["Message"] = List
+ else:
+ sortedRes["State"] = "Failure"
+ sortedRes["Message"] = "cannot get information"
+ self.module.exit_json(**sortedRes)
+ else:
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ adapter_obj = Adapter(argument_spec)
+ adapter_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/add_ldisk.py b/ansible_collections/ieisystem/inmanage/plugins/modules/add_ldisk.py
new file mode 100644
index 000000000..ec33ac00a
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/add_ldisk.py
@@ -0,0 +1,221 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: add_ldisk
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Create logical disk
+description:
+ - Create logical disk on ieisystem Server.
+notes:
+ - Does not support C(check_mode).
+options:
+ ctrl_id:
+ description:
+ - Raid controller ID.
+ - Required when controller type is LSI, PMC or MV.
+ type: int
+ level:
+ description:
+ - RAID Level, 0 - RAID0, 1 - RAID1, 5 - RAID5, 6 - RAID6, 10 - RAID10.
+ - Required when controller type is LSI or PMC.
+ choices: [0, 1, 5, 6, 10]
+ type: int
+ size:
+ description:
+ - Strip Size, 0 - 32k, 1 - 64k, 2 - 128k, 3 - 256k, 4 - 512k, 5 - 1024k.
+ - Required when controller type is LSI, PMC or MV.
+ - When the controller type is MV, size is [0, 1].
+ - When the controller type is LSI or PMC, size is [1, 2, 3, 4, 5].
+ choices: [0, 1, 2, 3, 4, 5]
+ type: int
+ access:
+ description:
+ - Access Policy, 1 - Read Write, 2 - Read Only, 3 - Blocked.
+ - Required when controller type is LSI.
+ choices: [1, 2, 3]
+ type: int
+ r:
+ description:
+ - Read Policy, 1 - Read Ahead, 2 - No Read Ahead.
+ - Required when controller type is LSI.
+ choices: [1, 2]
+ type: int
+ w:
+ description:
+ - Write Policy, 1 - Write Through, 2 - Write Back, 3 - Write caching ok if bad BBU.
+ - Required when controller type is LSI.
+ choices: [1, 2, 3]
+ type: int
+ io:
+ description:
+ - IO Policy, 1 - Direct IO, 2 - Cached IO.
+ - Required when controller type is LSI.
+ choices: [1, 2]
+ type: int
+ cache:
+ description:
+ - Drive Cache, 1 - Unchanged, 2 - Enabled, 3 - Disabled.
+ - Required when controller type is LSI.
+ choices: [1, 2, 3]
+ type: int
+ init:
+ description:
+ - Init State, 1 - No Init, 2 - Quick Init, 3 - Full Init.
+ - Required when controller type is LSI.
+ choices: [1, 2, 3]
+ type: int
+ select:
+ description:
+ - Select Size, from 1 to 100.
+ - Required when controller type is LSI.
+ type: int
+ slot:
+ description:
+ - Slot Num, input multiple slotNumber like 0, 1, 2....
+ - Required when controller type is LSI or PMC.
+ type: list
+ elements: int
+ accelerator:
+ description:
+ - Driver accelerator, 1 - 1h, 2 - 2h, 3 - 3h.
+ - Required when controller type is PMC.
+ choices: [1, 2, 3]
+ type: int
+ vname:
+ description:
+ - Virtual drive name.
+ - Required when controller type is PMC or server model is M7.
+ - Required when controller type is MV.
+ type: str
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Add ldisk test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Add LSI ldisk"
+ ieisystem.inmanage.add_ldisk:
+ ctrl_id: 0
+ level: 1
+ size: 1
+ access: 1
+ r: 1
+ w: 1
+ io: 1
+ cache: 1
+ init: 2
+ select: 10
+ slot: 0,1
+ provider: "{{ inmanage }}"
+
+ - name: "Add PMC ldisk"
+ ieisystem.inmanage.add_ldisk:
+ ctrl_id: 0
+ level: 1
+ size: 1
+ accelerator: 1
+ slot: 0,1
+ vname: "test"
+ provider: "{{ inmanage }}"
+
+ - name: "Add MV ldisk"
+ ieisystem.inmanage.add_ldisk:
+ ctrl_id: 0
+ size: 1
+ vname: "test"
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Disk(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'addldisk'
+ self.results = get_connection(self.module)
+ if self.results['State'] == 'Success':
+ self.results['changed'] = True
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ ctrl_id=dict(type='int', required=False),
+ level=dict(type='int', required=False, choices=[0, 1, 5, 6, 10]),
+ size=dict(type='int', required=False, choices=[0, 1, 2, 3, 4, 5]),
+ access=dict(type='int', required=False, choices=[1, 2, 3]),
+ r=dict(type='int', required=False, choices=[1, 2]),
+ w=dict(type='int', required=False, choices=[1, 2, 3]),
+ io=dict(type='int', required=False, choices=[1, 2]),
+ cache=dict(type='int', required=False, choices=[1, 2, 3]),
+ init=dict(type='int', required=False, choices=[1, 2, 3]),
+ select=dict(type='int', required=False),
+ slot=dict(type='list', elements='int', required=False),
+ accelerator=dict(type='int', required=False, choices=[1, 2, 3]),
+ vname=dict(type='str', required=False),
+ )
+ argument_spec.update(inmanage_argument_spec)
+ disk_obj = Disk(argument_spec)
+ disk_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/alert_policy_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/alert_policy_info.py
new file mode 100644
index 000000000..f13258cf3
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/alert_policy_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: alert_policy_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get alert policy
+description:
+ - Get alert policy on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Alert test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get alert policy"
+ ieisystem.inmanage.alert_policy_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class SNMP(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'getalertpolicy'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ snmp_obj = SNMP(argument_spec)
+ snmp_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/audit_log_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/audit_log_info.py
new file mode 100644
index 000000000..0d0c57a4d
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/audit_log_info.py
@@ -0,0 +1,126 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: audit_log_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get BMC audit log information
+description:
+ - Get BMC audit log information on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options:
+ log_time:
+ description:
+ - Get logs after the specified date, time should be YYYY-MM-DDTHH:MM+HH:MM, like 2019-06-27T12:30+08:00.
+ type: str
+ count:
+ description:
+ - Get the most recent log of a specified number.
+ type: int
+ audit_file:
+ description:
+ - Store logs to a file.
+ type: str
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Bmc audit log test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get bmc audit log information"
+ ieisystem.inmanage.audit_log_info:
+ log_time: "2020-06-01T12:30+08:00"
+ provider: "{{ inmanage }}"
+
+ - name: "Get bmc audit log information"
+ ieisystem.inmanage.audit_log_info:
+ count: 30
+ provider: "{{ inmanage }}"
+
+ - name: "Get bmc audit log information"
+ ieisystem.inmanage.audit_log_info:
+ audit_file: "/home/wbs/wbs.log"
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class AuditLog(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'getauditlog'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ log_time=dict(type='str', required=False),
+ count=dict(type='int', required=False),
+ audit_file=dict(type='str', required=False),
+ )
+ argument_spec.update(inmanage_argument_spec)
+ log_obj = AuditLog(argument_spec)
+ log_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/auto_capture_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/auto_capture_info.py
new file mode 100644
index 000000000..774617c6e
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/auto_capture_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: auto_capture_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get auto capture screen information
+description:
+ - Get auto capture screen information on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Screen test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get auto capture screen information"
+ ieisystem.inmanage.auto_capture_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Screen(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'getscreen'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ screen_obj = Screen(argument_spec)
+ screen_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/backplane_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/backplane_info.py
new file mode 100644
index 000000000..23746d036
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/backplane_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: backplane_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get disk backplane information
+description:
+ - Get disk backplane information on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Disk backplane test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get backplane information"
+ ieisystem.inmanage.backplane_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Backplane(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'getbackplane'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ back_obj = Backplane(argument_spec)
+ back_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/backup.py b/ansible_collections/ieisystem/inmanage/plugins/modules/backup.py
new file mode 100644
index 000000000..8f5c077d8
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/backup.py
@@ -0,0 +1,120 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: backup
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Backup server settings
+description:
+ - Backup server settings on ieisystem Server.
+notes:
+ - Does not support C(check_mode).
+options:
+ bak_file:
+ description:
+ - Backup file or bak folder.
+ required: true
+ type: str
+ item:
+ description:
+ - Export item.
+ - The values for M5 modules are 'all', 'network', 'service', 'ntp', 'snmptrap', 'dns', 'smtp', 'ad', 'ldap', 'user', 'bios'.
+ - The values for M6 modules are 'all', 'network', 'service', 'ntp', 'snmptrap', 'kvm', 'ipmi', 'authentication', 'syslog'.
+ - The values for M7 modules are 'all', 'network', 'service', 'syslog', 'ncsi'.
+ choices: ['all', 'network', 'service', 'ntp', 'snmptrap', 'dns', 'smtp', 'ad', 'ldap',
+ 'user', 'bios', 'kvm', 'ipmi', 'authentication', 'syslog', 'ncsi']
+ required: true
+ type: str
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Backup test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Backup server settings"
+ ieisystem.inmanage.backup:
+ bak_file: "/home/wbs/"
+ item: "all"
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Backup(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'backup'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ bak_file=dict(type='str', required=True),
+ item=dict(type='str', required=True, choices=['all', 'network', 'service', 'ntp', 'snmptrap', 'dns', 'smtp', 'ad',
+ 'ldap', 'user', 'bios', 'kvm', 'ipmi', 'authentication', 'syslog', 'ncsi']),
+ )
+ argument_spec.update(inmanage_argument_spec)
+ backup_obj = Backup(argument_spec)
+ backup_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/bios_export.py b/ansible_collections/ieisystem/inmanage/plugins/modules/bios_export.py
new file mode 100644
index 000000000..8b783f505
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/bios_export.py
@@ -0,0 +1,107 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: bios_export
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Export BIOS config
+description:
+ - Export BIOS config on ieisystem Server.
+notes:
+ - Does not support C(check_mode).
+options:
+ file_url:
+ description:
+ - Suffix is .json/.conf, FILEURI format, "/directory/filename".
+ required: true
+ type: str
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Bios test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Export bios config"
+ ieisystem.inmanage.bios_export:
+ file_url: "/home/wbs/bios.conf"
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class BIOS(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'exportbioscfg'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ file_url=dict(type='str', required=True),
+ )
+ argument_spec.update(inmanage_argument_spec)
+ bios_obj = BIOS(argument_spec)
+ bios_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/bios_import.py b/ansible_collections/ieisystem/inmanage/plugins/modules/bios_import.py
new file mode 100644
index 000000000..938979a8e
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/bios_import.py
@@ -0,0 +1,109 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: bios_import
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Import BIOS config
+description:
+ - Import BIOS config on ieisystem Server.
+notes:
+ - Does not support C(check_mode).
+options:
+ file_url:
+ description:
+ - Suffix is .json/.conf, FILEURI format, "/directory/filename".
+ required: true
+ type: str
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Bios test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Import bios config"
+ ieisystem.inmanage.bios_import:
+ file_url: "/home/wbs/bios.conf"
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class BIOS(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'importbioscfg'
+ self.results = get_connection(self.module)
+ if self.results['State'] == 'Success':
+ self.results['changed'] = True
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ file_url=dict(type='str', required=True),
+ )
+ argument_spec.update(inmanage_argument_spec)
+ bios_obj = BIOS(argument_spec)
+ bios_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/bios_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/bios_info.py
new file mode 100644
index 000000000..ed0fc37a4
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/bios_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: bios_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get BIOS setup
+description:
+ - Get BIOS setup on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Bios test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get bios setup"
+ ieisystem.inmanage.bios_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class BIOS(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'getbios'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ bios_obj = BIOS(argument_spec)
+ bios_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/bmc_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/bmc_info.py
new file mode 100644
index 000000000..a003a63b9
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/bmc_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: bmc_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get BMC information
+description:
+ - Get BMC information on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Bmc info test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get BMC information"
+ ieisystem.inmanage.bmc_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class BMCInfo(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'getbmcinfo'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ bmc_obj = BMCInfo(argument_spec)
+ bmc_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/boot_image_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/boot_image_info.py
new file mode 100644
index 000000000..fca573381
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/boot_image_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: boot_image_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get bmc boot image information
+description:
+ - Get bmc boot image information on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Boot image test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get bmc boot image information"
+ ieisystem.inmanage.boot_image_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Image(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'getbootimage'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ image_obj = Image(argument_spec)
+ image_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/boot_option_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/boot_option_info.py
new file mode 100644
index 000000000..c62e6daa3
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/boot_option_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: boot_option_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get BIOS boot options
+description:
+ - Get BIOS boot options on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Boot test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get bios boot option"
+ ieisystem.inmanage.boot_option_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class BIOS(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'getsysboot'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ bios_obj = BIOS(argument_spec)
+ bios_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/clear_audit_log.py b/ansible_collections/ieisystem/inmanage/plugins/modules/clear_audit_log.py
new file mode 100644
index 000000000..fc4319b6f
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/clear_audit_log.py
@@ -0,0 +1,101 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: clear_audit_log
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Clear BMC audit log
+description:
+ - Clear BMC audit log on ieisystem Server.
+notes:
+ - Does not support C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Clear BMC audit log test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Clear BMC audit log "
+ ieisystem.inmanage.clear_audit_log:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class AuditLog(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'clearauditlog'
+ self.results = get_connection(self.module)
+ if self.results['State'] == 'Success':
+ self.results['changed'] = True
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ log_obj = AuditLog(argument_spec)
+ log_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/clear_event_log.py b/ansible_collections/ieisystem/inmanage/plugins/modules/clear_event_log.py
new file mode 100644
index 000000000..ffdc6f4ef
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/clear_event_log.py
@@ -0,0 +1,101 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: clear_event_log
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Clear event log
+description:
+ - Clear event log on ieisystem Server.
+notes:
+ - Does not support C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Clear event log test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Clear event log"
+ ieisystem.inmanage.clear_event_log:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class EventLog(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'cleareventlog'
+ self.results = get_connection(self.module)
+ if self.results['State'] == 'Success':
+ self.results['changed'] = True
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ log_obj = EventLog(argument_spec)
+ log_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/clear_system_log.py b/ansible_collections/ieisystem/inmanage/plugins/modules/clear_system_log.py
new file mode 100644
index 000000000..03af9a950
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/clear_system_log.py
@@ -0,0 +1,115 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: clear_system_log
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Clear BMC system log
+description:
+ - Clear BMC system log on ieisystem Server.
+notes:
+ - Does not support C(check_mode).
+options:
+ level:
+ description:
+ - Log level.
+ default: alert
+ choices: ['alert', 'critical', 'error', 'notice', 'warning', 'debug', 'emergency', 'info', 'all']
+ type: str
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Clear BMC system log test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Clear BMC system log"
+ ieisystem.inmanage.clear_system_log:
+ level: "alert"
+ provider: "{{ inmanage }}"
+
+ - name: "Clear BMC system log"
+ ieisystem.inmanage.clear_system_log:
+ level: "all"
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class SystemLog(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'clearsystemlog'
+ self.results = get_connection(self.module)
+ if self.results['State'] == 'Success':
+ self.results['changed'] = True
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ level=dict(type='str', default='alert', choices=['alert', 'critical', 'error', 'notice', 'warning', 'debug', 'emergency', 'info', 'all']),
+ )
+ argument_spec.update(inmanage_argument_spec)
+ log_obj = SystemLog(argument_spec)
+ log_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/collect_blackbox.py b/ansible_collections/ieisystem/inmanage/plugins/modules/collect_blackbox.py
new file mode 100644
index 000000000..2dbd30e78
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/collect_blackbox.py
@@ -0,0 +1,108 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: collect_blackbox
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Collect blackbox log
+description:
+ - Collect blackbox log on ieisystem Server.
+notes:
+ - Does not support C(check_mode).
+options:
+ file_url:
+ description:
+ - File download path.
+ required: true
+ type: str
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Collect blackbox test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Collect blackbox log"
+ ieisystem.inmanage.collect_blackbox:
+ file_url: "/home/wbs/wbs.log"
+ provider: "{{ inmanage }}"
+
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Blackbox(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'collectblackbox'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ file_url=dict(type='str', required=True),
+ )
+ argument_spec.update(inmanage_argument_spec)
+ log_obj = Blackbox(argument_spec)
+ log_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/collect_log.py b/ansible_collections/ieisystem/inmanage/plugins/modules/collect_log.py
new file mode 100644
index 000000000..3eaac30b4
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/collect_log.py
@@ -0,0 +1,107 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: collect_log
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Collect logs
+description:
+ - Collect logs on ieisystem Server, it takes about 5 minutes.
+notes:
+ - Does not support C(check_mode).
+options:
+ file_url:
+ description:
+ - File download path or path with filename, for example filepath/filename.tar.
+ type: str
+ required: true
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Collect test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Collect logs"
+ ieisystem.inmanage.collect_log:
+ file_url: "/home/wbs/test.tar"
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Log(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'collect'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ file_url=dict(type='str', required=True),
+ )
+ argument_spec.update(inmanage_argument_spec)
+ log_obj = Log(argument_spec)
+ log_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/connect_media_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/connect_media_info.py
new file mode 100644
index 000000000..94515794d
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/connect_media_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: connect_media_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get remote images redirection information
+description:
+ - Get remote images redirection information on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Connect media test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get remote images redirection information"
+ ieisystem.inmanage.connect_media_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Connect(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'getconnectmedia'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ connect_obj = Connect(argument_spec)
+ connect_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/cpu_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/cpu_info.py
new file mode 100644
index 000000000..ff2594732
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/cpu_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: cpu_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get CPU information
+description:
+ - Get CPU information on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: CPU test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get cpu information"
+ ieisystem.inmanage.cpu_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class CPU(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'getcpu'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ cpu_obj = CPU(argument_spec)
+ cpu_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/del_session.py b/ansible_collections/ieisystem/inmanage/plugins/modules/del_session.py
new file mode 100644
index 000000000..95dd0f806
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/del_session.py
@@ -0,0 +1,109 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: del_session
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Delete session
+description:
+ - Delete session on ieisystem Server.
+notes:
+ - Does not support C(check_mode).
+options:
+ sid:
+ description:
+ - Session Id, input "all" to delete all sessions.
+ type: str
+ required: true
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Delete session test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Delete session"
+ ieisystem.inmanage.del_session:
+ sid: "223"
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Session(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'delsession'
+ self.results = get_connection(self.module)
+ if self.results['State'] == 'Success':
+ self.results['changed'] = True
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ sid=dict(type='str', required=True),
+ )
+ argument_spec.update(inmanage_argument_spec)
+ session_obj = Session(argument_spec)
+ session_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/dns_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/dns_info.py
new file mode 100644
index 000000000..07de2989f
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/dns_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: dns_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get dns information
+description:
+ - Get dns information on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: DNS test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get dns information"
+ ieisystem.inmanage.dns_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class DNS(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'getdns'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ dns_obj = DNS(argument_spec)
+ dns_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/download_auto_screenshot.py b/ansible_collections/ieisystem/inmanage/plugins/modules/download_auto_screenshot.py
new file mode 100644
index 000000000..7d44166ab
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/download_auto_screenshot.py
@@ -0,0 +1,107 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: download_auto_screenshot
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Download auto screenshots
+description:
+ - Download auto screenshots on ieisystem Server.
+notes:
+ - Does not support C(check_mode).
+options:
+ file_url:
+ description:
+ - Screen capture file path.
+ type: str
+ required: true
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Screen test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Download auto screenshots"
+ ieisystem.inmanage.download_auto_screenshot:
+ file_url: "/home/wbs/screen"
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Screen(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'downscreen'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ file_url=dict(type='str', required=True),
+ )
+ argument_spec.update(inmanage_argument_spec)
+ screen_obj = Screen(argument_spec)
+ screen_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/download_manual_screenshot.py b/ansible_collections/ieisystem/inmanage/plugins/modules/download_manual_screenshot.py
new file mode 100644
index 000000000..be98cba9f
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/download_manual_screenshot.py
@@ -0,0 +1,107 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: download_manual_screenshot
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Download manual screenshots
+description:
+ - Download manual screenshots on ieisystem Server.
+notes:
+ - Does not support C(check_mode).
+options:
+ file_url:
+ description:
+ - Screen capture file path.
+ type: str
+ required: true
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Screen test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Download manual screenshots"
+ ieisystem.inmanage.download_manual_screenshot:
+ file_url: "/home/wbs/screen"
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Screen(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'downscreenmanual'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ file_url=dict(type='str', required=True),
+ )
+ argument_spec.update(inmanage_argument_spec)
+ screen_obj = Screen(argument_spec)
+ screen_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/edit_ad.py b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_ad.py
new file mode 100644
index 000000000..3d6a2a23a
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_ad.py
@@ -0,0 +1,163 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: edit_ad
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Set active directory information
+description:
+ - Set active directory information on ieisystem Server.
+notes:
+ - Does not support C(check_mode).
+options:
+ enable:
+ description:
+ - Active Directory Authentication Status.
+ choices: ['enable', 'disable']
+ type: str
+ ssl_enable:
+ description:
+ - Active Directory SSL Status.
+ choices: ['enable', 'disable']
+ type: str
+ name:
+ description:
+ - Secret Username. Specify the Username of an administrator of the Active Directory Server.
+ type: str
+ code:
+ description:
+ - Secret Password. Specify the Password of the administrator.
+ type: str
+ timeout:
+ description:
+ - The Time Out configuration(15-300).
+ - Only the M5 model supports this parameter.
+ type: int
+ domain:
+ description:
+ - User Domain Name. Specify the Domain Name for the user for example MyDomain.com.
+ type: str
+ addr1:
+ description:
+ - Domain Controller Server Address1. Enter the IP address of Active Directory server.
+ type: str
+ addr2:
+ description:
+ - Domain Controller Server Address2. Enter the IP address of Active Directory server.
+ type: str
+ addr3:
+ description:
+ - Domain Controller Server Address3. Enter the IP address of Active Directory server.
+ type: str
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Ad test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set active directory information"
+ ieisystem.inmanage.edit_ad:
+ enable: "disable"
+ provider: "{{ inmanage }}"
+
+ - name: "Set active directory information"
+ ieisystem.inmanage.edit_ad:
+ enable: "enable"
+ name: "test"
+ code: "123456"
+ timeout: 120
+ domain: "test.com"
+ addr1: "100.2.2.2"
+ addr2: "100.2.2.3"
+ addr3: "100.2.2.4"
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class AD(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'setad'
+ self.results = get_connection(self.module)
+ if self.results['State'] == 'Success':
+ self.results['changed'] = True
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ enable=dict(type='str', required=False, choices=['enable', 'disable']),
+ ssl_enable=dict(type='str', required=False, choices=['enable', 'disable']),
+ name=dict(type='str', required=False),
+ code=dict(type='str', required=False),
+ timeout=dict(type='int', required=False),
+ domain=dict(type='str', required=False),
+ addr1=dict(type='str', required=False),
+ addr2=dict(type='str', required=False),
+ addr3=dict(type='str', required=False),
+ )
+ argument_spec.update(inmanage_argument_spec)
+ ad_obj = AD(argument_spec)
+ ad_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/edit_alert_policy.py b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_alert_policy.py
new file mode 100644
index 000000000..69813479f
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_alert_policy.py
@@ -0,0 +1,156 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: edit_alert_policy
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Set alert policy
+description:
+ - Set alert policy on ieisystem Server.
+notes:
+ - Does not support C(check_mode).
+options:
+ id:
+ description:
+ - Alert id. Customize the channel for sending alarms in Trap.
+ - The values for M5 modules are 1, 2, 3.
+ - The values for M6 modules are 1, 2, 3, 4.
+ choices: [1, 2, 3, 4]
+ required: true
+ type: int
+ status:
+ description:
+ - Alert policy status. Whether to enable the receiving end for sending messages in trap mode.
+ choices: ['enable', 'disable']
+ type: str
+ type:
+ description:
+ - Alert Type.
+ - Only the M5 model supports this parameter.
+ choices: ['snmp', 'email', 'snmpdomain']
+ type: str
+ destination:
+ description:
+ - Alert destination. The address of the server receiving trap information sent by Trap.
+ - When type is snmp, specify an IP address.
+ - When type is email, specify a username.
+ - When type is snmpdomain, specify a domain.
+ type: str
+ channel:
+ description:
+ - LAN Channel.
+ - Only the M5 model supports this parameter.
+ choices: ['shared', 'dedicated']
+ type: str
+ trap_port:
+ description:
+ - SNMP trap port(1-65535).
+ - Only the M6 model supports this parameter.
+ type: int
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Alert policy test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set alert policy"
+ ieisystem.inmanage.edit_alert_policy:
+ id: 1
+ status: "enable"
+ type: "snmp"
+ destination: "100.2.2.2"
+ channel: "shared"
+ provider: "{{ inmanage }}"
+
+ - name: "Set alert policy"
+ ieisystem.inmanage.edit_alert_policy:
+ id: 1
+ status: "disable"
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class SNMP(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'setalertpolicy'
+ self.results = get_connection(self.module)
+ if self.results['State'] == 'Success':
+ self.results['changed'] = True
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ id=dict(type='int', required=True, choices=[1, 2, 3, 4]),
+ status=dict(type='str', required=False, choices=['enable', 'disable']),
+ type=dict(type='str', required=False, choices=['snmp', 'email', 'snmpdomain']),
+ destination=dict(type='str', required=False),
+ channel=dict(type='str', required=False, choices=['shared', 'dedicated']),
+ trap_port=dict(type='int', required=False),
+ )
+ argument_spec.update(inmanage_argument_spec)
+ snmp_obj = SNMP(argument_spec)
+ snmp_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/edit_auto_capture.py b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_auto_capture.py
new file mode 100644
index 000000000..6158ff27c
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_auto_capture.py
@@ -0,0 +1,110 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: edit_auto_capture
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Set auto capture screen
+description:
+ - Set auto capture screen on ieisystem Server.
+notes:
+ - Does not support C(check_mode).
+options:
+ status:
+ description:
+ - Capture status.
+ choices: ['enable', 'disable']
+ type: str
+ required: true
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Screen test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set auto capture screen"
+ ieisystem.inmanage.edit_auto_capture:
+ status: "enable"
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Screen(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'setscreen'
+ self.results = get_connection(self.module)
+ if self.results['State'] == 'Success':
+ self.results['changed'] = True
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ status=dict(type='str', required=True, choices=['enable', 'disable']),
+ )
+ argument_spec.update(inmanage_argument_spec)
+ screen_obj = Screen(argument_spec)
+ screen_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/edit_bios.py b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_bios.py
new file mode 100644
index 000000000..0badfd760
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_bios.py
@@ -0,0 +1,134 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: edit_bios
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Set BIOS setup attributes
+description:
+ - Set BIOS setup attributes on ieisystem Server.
+notes:
+ - Does not support C(check_mode).
+options:
+ list:
+ description:
+ - Show attribute name and configurable value.
+ default: False
+ type: bool
+ attribute:
+ description:
+ - BIOS setup option.
+ - Required when I(list=False) and I(file_url=None).
+ type: str
+ value:
+ description:
+ - BIOS setup option value.
+ - Required when I(list=False) and I(file_url=None).
+ type: str
+ file_url:
+ description:
+ - BIOS option file. attribute must be used with value.
+ - Mutually exclusive with fileurl format, "/directory/filename".
+ type: str
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Bios test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set bios setup"
+ ieisystem.inmanage.edit_bios:
+ attribute: "VMX"
+ value: "Disable"
+ provider: "{{ inmanage }}"
+
+ - name: "Set bios setup"
+ ieisystem.inmanage.edit_bios:
+ attribute: "VMX"
+ value: "Enable"
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class BIOS(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'setbios'
+ self.results = get_connection(self.module)
+ if self.results['State'] == 'Success':
+ self.results['changed'] = True
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ list=dict(type='bool', required=False, default=False),
+ attribute=dict(type='str', required=False),
+ value=dict(type='str', required=False),
+ file_url=dict(type='str', required=False)
+ )
+ argument_spec.update(inmanage_argument_spec)
+ bios_obj = BIOS(argument_spec)
+ bios_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/edit_boot_image.py b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_boot_image.py
new file mode 100644
index 000000000..356987a58
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_boot_image.py
@@ -0,0 +1,111 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: edit_boot_image
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Set bmc boot image
+description:
+ - Set bmc boot image on ieisystem Server.
+notes:
+ - Does not support C(check_mode).
+options:
+ image:
+ description:
+ - BMC boot image.
+ - 0-Higher firmware version; 1-Image 1; 2-Image 2; 3-Lower firmware version; 4-Latest updated firmware; 5-Not latest updated firmware.
+ choices: [0, 1, 2, 3, 4, 5]
+ type: int
+ required: true
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Boot image test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set bmc boot image"
+ ieisystem.inmanage.edit_boot_image:
+ image: 2
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Image(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'setbootimage'
+ self.results = get_connection(self.module)
+ if self.results['State'] == 'Success':
+ self.results['changed'] = True
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ image=dict(type='int', required=True, choices=[0, 1, 2, 3, 4, 5]),
+ )
+ argument_spec.update(inmanage_argument_spec)
+ image_obj = Image(argument_spec)
+ image_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/edit_boot_option.py b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_boot_option.py
new file mode 100644
index 000000000..88e2865c1
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_boot_option.py
@@ -0,0 +1,123 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: edit_boot_option
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Set BIOS boot options
+description:
+ - Set BIOS boot options on ieisystem Server.
+notes:
+ - Does not support C(check_mode).
+options:
+ device:
+ description:
+ - Boot device.
+ choices: ['none', 'HDD', 'PXE', 'CD', 'BIOSSETUP']
+ type: str
+ effective:
+ description:
+ - Effective, once or continuous.
+ choices: ['Once', 'Continuous']
+ type: str
+ mode:
+ description:
+ - Boot type.
+ choices: ['Legacy', 'UEFI']
+ type: str
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Boot test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set bios boot option"
+ ieisystem.inmanage.edit_boot_option:
+ device: "PXE"
+ effective: "Once"
+ mode: "Legacy"
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class BIOS(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'setsysboot'
+ self.results = get_connection(self.module)
+ if self.results['State'] == 'Success':
+ self.results['changed'] = True
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ device=dict(type='str', required=False, choices=['none', 'HDD', 'PXE', 'CD', 'BIOSSETUP']),
+ effective=dict(type='str', required=False, choices=['Once', 'Continuous']),
+ mode=dict(type='str', required=False, choices=['Legacy', 'UEFI'])
+ )
+ argument_spec.update(inmanage_argument_spec)
+ bios_obj = BIOS(argument_spec)
+ bios_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/edit_connect_media.py b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_connect_media.py
new file mode 100644
index 000000000..018cab601
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_connect_media.py
@@ -0,0 +1,126 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: edit_connect_media
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Start/Stop virtual media Image
+description:
+ - Start/Stop virtual media Image on ieisystem Server.
+notes:
+ - Does not support C(check_mode).
+options:
+ image_type:
+ description:
+ - Virtual media type.
+ - Only the M5 model supports this parameter.
+ choices: ['CD', 'FD', 'HD']
+ type: str
+ required: true
+ op_type:
+ description:
+ - Start or stop media.
+ choices: ['start', 'stop']
+ type: str
+ required: true
+ image_name:
+ description:
+ - Image name.
+ type: str
+ required: true
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Connect media test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set remote image redirection"
+ ieisystem.inmanage.edit_connect_media:
+ image_type: "CD"
+ op_type: "start"
+ image_name: "aa.iso"
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Connect(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'setconnectmedia'
+ self.results = get_connection(self.module)
+ if self.results['State'] == 'Success':
+ self.results['changed'] = True
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ image_type=dict(type='str', required=True, choices=['CD', 'FD', 'HD']),
+ op_type=dict(type='str', required=True, choices=['start', 'stop']),
+ image_name=dict(type='str', required=True),
+ )
+ argument_spec.update(inmanage_argument_spec)
+ connect_obj = Connect(argument_spec)
+ connect_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/edit_dns.py b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_dns.py
new file mode 100644
index 000000000..aa4efd8ad
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_dns.py
@@ -0,0 +1,231 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: edit_dns
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Set dns information
+description:
+ - Set dns information on ieisystem Server.
+notes:
+ - Does not support C(check_mode).
+options:
+ dns_status:
+ description:
+ - DNS status.
+ choices: ['enable', 'disable']
+ type: str
+ host_cfg:
+ description:
+ - Host Settings. Select whether the host name will be configured manually or automatically.
+ choices: ['manual', 'auto']
+ type: str
+ host_name:
+ description:
+ - Host Name.
+ - Required when I(host_cfg=manual).
+ type: str
+ domain_manual:
+ description:
+ - Domain Settings. Select whether the domain interface will be configured manually or automatically.
+ choices: ['manual', 'auto']
+ type: str
+ domain_iface:
+ description:
+ - Network Interface, input like 'eth0_v4', 'eth0_v6', 'eth1_v4', 'eth1_v6', 'bond0_v4', 'bond0_v6'.
+ - Required when I(domain_manual=auto).
+ type: str
+ domain_name:
+ description:
+ - Domain Name.
+ - Required when I(domain_manual=manual).
+ type: str
+ dns_manual:
+ description:
+ - DNS Settings. Select whether the DNS interface will be configured manually or automatically.
+ choices: ['manual', 'auto']
+ type: str
+ dns_iface:
+ description:
+ - DNS Interface, input like 'eth0', 'eth1', 'bond0'.
+ - Required when I(dns_manual=auto).
+ type: str
+ dns_priority:
+ description:
+ - IP Priority.
+ - Required when I(dns_manual=auto).
+ choices: ['4', '6']
+ type: str
+ dns_server1:
+ description:
+ - DNS Server1 IPv4 or IPv6 address.
+ - Required when I(dns_manual=manual).
+ type: str
+ dns_server2:
+ description:
+ - DNS Server2 IPv4 or IPv6 address.
+ - Required when I(dns_manual=manual).
+ type: str
+ dns_server3:
+ description:
+ - DNS Server3 IPv4 or IPv6 address.
+ - Required when I(dns_manual=manual).
+ type: str
+ register_status1:
+ description:
+ - BMC register status 1.
+ - Only the M6 model supports this parameter.
+ choices: ['enable', 'disable']
+ type: str
+ registration_method1:
+ description:
+ - Registration method 1.
+ - Only the M6 model supports this parameter.
+ - Required when I(register_status1=enable).
+ choices: ['nsupdate', 'dhcp', 'hostname']
+ type: str
+ register_status2:
+ description:
+ - BMC register status 2.
+ - Only the M6 model supports this parameter.
+ choices: ['enable', 'disable']
+ type: str
+ registration_method2:
+ description:
+ - Registration method 2.
+ - Only the M6 model supports this parameter.
+ - Required when I(register_status2=enable).
+ choices: ['nsupdate', 'dhcp', 'hostname']
+ type: str
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: DNS test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set dns information"
+ ieisystem.inmanage.edit_dns:
+ dns_status: "disable"
+ provider: "{{ inmanage }}"
+
+ - name: "Set dns information"
+ ieisystem.inmanage.edit_dns:
+ dns_status: "enable"
+ host_cfg: "manual"
+ host_name: "123456789"
+ domain_manual: "auto"
+ domain_iface: "eth0_v4"
+ dns_manual: "manual"
+ dns_server1: "100.2.2.2"
+ dns_server2: "100.2.2.3"
+ dns_server3: "100.2.2.4"
+ provider: "{{ inmanage }}"
+
+ - name: "Set dns information"
+ ieisystem.inmanage.edit_dns:
+ dns_status: "enable"
+ host_cfg: "manual"
+ host_name: "123456789"
+ domain_manual: "manual"
+ domain_name: "test.com"
+ dns_manual: "auto"
+ dns_iface: "eth0"
+ dns_priority: "4"
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class DNS(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'setdns'
+ self.results = get_connection(self.module)
+ if self.results['State'] == 'Success':
+ self.results['changed'] = True
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ dns_status=dict(type='str', required=False, choices=['enable', 'disable']),
+ host_cfg=dict(type='str', required=False, choices=['manual', 'auto']),
+ host_name=dict(type='str', required=False),
+ domain_manual=dict(type='str', required=False, choices=['manual', 'auto']),
+ domain_iface=dict(type='str', required=False),
+ domain_name=dict(type='str', required=False),
+ dns_manual=dict(type='str', required=False, choices=['manual', 'auto']),
+ dns_iface=dict(type='str', required=False),
+ dns_priority=dict(type='str', required=False, choices=['4', '6']),
+ dns_server1=dict(type='str', required=False),
+ dns_server2=dict(type='str', required=False),
+ dns_server3=dict(type='str', required=False),
+ register_status1=dict(type='str', required=False, choices=['enable', 'disable']),
+ registration_method1=dict(type='str', required=False, choices=['nsupdate', 'dhcp', 'hostname']),
+ register_status2=dict(type='str', required=False, choices=['enable', 'disable']),
+ registration_method2=dict(type='str', required=False, choices=['nsupdate', 'dhcp', 'hostname']),
+ )
+ argument_spec.update(inmanage_argument_spec)
+ dns_obj = DNS(argument_spec)
+ dns_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/edit_event_log_policy.py b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_event_log_policy.py
new file mode 100644
index 000000000..f18774796
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_event_log_policy.py
@@ -0,0 +1,110 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: edit_event_log_policy
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Set event log policy
+description:
+ - Set event log policy on ieisystem Server.
+notes:
+ - Does not support C(check_mode).
+options:
+ policy:
+ description:
+ - Event log policy.
+ choices: ['Linear', 'Circular']
+ type: str
+ required: true
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Event log policy test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set event log policy"
+ ieisystem.inmanage.edit_event_log_policy:
+ policy: "Linear"
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Log(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'seteventlogpolicy'
+ self.results = get_connection(self.module)
+ if self.results['State'] == 'Success':
+ self.results['changed'] = True
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ policy=dict(type='str', required=True, choices=['Linear', 'Circular']),
+ )
+ argument_spec.update(inmanage_argument_spec)
+ log_obj = Log(argument_spec)
+ log_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/edit_fan.py b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_fan.py
new file mode 100644
index 000000000..1f36d23e5
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_fan.py
@@ -0,0 +1,126 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: edit_fan
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Set fan information
+description:
+ - Set fan information on ieisystem Server.
+notes:
+ - Does not support C(check_mode).
+options:
+ mode:
+ description:
+ - Control mode, Manual or Automatic , Manual must be used with fans_peed.
+ choices: ['Automatic', 'Manual']
+ type: str
+ id:
+ description:
+ - Fan id 255 is for all fans, 0~n.
+ type: int
+ fan_speed:
+ description:
+ - Fan speed (duty ratio), range in 1 - 100.
+ type: int
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Fan test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set fan information"
+ ieisystem.inmanage.edit_fan:
+ mode: "Automatic"
+ provider: "{{ inmanage }}"
+
+ - name: "Set fan information"
+ ieisystem.inmanage.edit_fan:
+ mode: "Manual"
+ id: 1
+ fan_speed: 80
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Fan(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'fancontrol'
+ self.results = get_connection(self.module)
+ if self.results['State'] == 'Success':
+ self.results['changed'] = True
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ mode=dict(type='str', required=False, choices=['Automatic', 'Manual']),
+ id=dict(type='int', required=False),
+ fan_speed=dict(type='int', required=False),
+ )
+ argument_spec.update(inmanage_argument_spec)
+ fan_obj = Fan(argument_spec)
+ fan_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/edit_fru.py b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_fru.py
new file mode 100644
index 000000000..c89d5d60f
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_fru.py
@@ -0,0 +1,121 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: edit_fru
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Set fru settings
+description:
+ - Set fru settings on ieisystem Server.
+notes:
+ - Does not support C(check_mode).
+options:
+ attribute:
+ description:
+ - CP is Chassis Part Number, CS is Chassis Serial, PM is Product Manufacturer.
+ - PPN is Product Part Number, PS is Product Serial, PN is Product Name.
+ - PV is Product Version, PAT is Product Asset Tag, BM is Board Mfg, BPN is Board Product Name.
+ - BS is Board Serial, BP is Board Part Number.
+ choices: ['CP', 'CS', 'PM', 'PPN', 'PS', 'PN', 'PV', 'PAT', 'BM', 'BPN', 'BS', 'BP']
+ required: true
+ type: str
+ value:
+ description:
+ - Set the value of attribute.
+ required: true
+ type: str
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Fru test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set Fru"
+ ieisystem.inmanage.edit_fru:
+ attribute: "CP"
+ value: "test"
+ provider: "{{ inmanage }}"
+
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class UID(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'setfru'
+ self.results = get_connection(self.module)
+ if self.results['State'] == 'Success':
+ self.results['changed'] = True
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ attribute=dict(type='str', required=True, choices=['CP', 'CS', 'PM', 'PPN', 'PS', 'PN', 'PV', 'PAT', 'BM', 'BPN', 'BS', 'BP']),
+ value=dict(type='str', required=True),
+ )
+ argument_spec.update(inmanage_argument_spec)
+ uid_obj = UID(argument_spec)
+ uid_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/edit_ipv4.py b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_ipv4.py
new file mode 100644
index 000000000..95d33ffce
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_ipv4.py
@@ -0,0 +1,159 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: edit_ipv4
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Set ipv4 information
+description:
+ - Set ipv4 information on ieisystem Server.
+notes:
+ - Does not support C(check_mode).
+options:
+ interface_name:
+ description:
+ - Set interface name. Select the LAN interface to be configured.
+ choices: ['eth0', 'eth1', 'bond0']
+ required: true
+ type: str
+ ipv4_status:
+ description:
+ - Enable or disable IPV4.
+ choices: ['enable', 'disable']
+ type: str
+ ipv4_dhcp_enable:
+ description:
+ - Enable 'Enable DHCP' to dynamically configure IPv4 address using Dynamic Host Configuration Protocol (DHCP).
+ choices: ['dhcp', 'static']
+ type: str
+ ipv4_address:
+ description:
+ - If DHCP is disabled, specify a static IPv4 address to be configured for the selected interface.
+ - Required when I(ipv4_dhcp_enable=static).
+ type: str
+ ipv4_subnet:
+ description:
+ - If DHCP is disabled, specify a static Subnet Mask to be configured for the selected interface.
+ - Required when I(ipv4_dhcp_enable=static).
+ type: str
+ ipv4_gateway:
+ description:
+ - If DHCP is disabled, specify a static Default Gateway to be configured for the selected interface.
+ - Required when I(ipv4_dhcp_enable=static).
+ type: str
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Ipv4 test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set ipv4 information"
+ ieisystem.inmanage.edit_ipv4:
+ interface_name: "eth0"
+ ipv4_status: "disable"
+ provider: "{{ inmanage }}"
+
+ - name: "Set ipv4 information"
+ ieisystem.inmanage.edit_ipv4:
+ interface_name: "eth0"
+ ipv4_status: "enable"
+ ipv4_dhcp_enable: "dhcp"
+ provider: "{{ inmanage }}"
+
+ - name: "Set ipv4 information"
+ ieisystem.inmanage.edit_ipv4:
+ interface_name: "eth0"
+ ipv4_status: "enable"
+ ipv4_dhcp_enable: "static"
+ ipv4_address: "100.2.36.10"
+ ipv4_subnet: "255.255.255.0"
+ ipv4_gateway: "100.2.36.1"
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Network(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'setipv4'
+ self.results = get_connection(self.module)
+ if self.results['State'] == 'Success':
+ self.results['changed'] = True
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ interface_name=dict(type='str', required=True, choices=['eth0', 'eth1', 'bond0']),
+ ipv4_status=dict(type='str', required=False, choices=['enable', 'disable']),
+ ipv4_dhcp_enable=dict(type='str', required=False, choices=['dhcp', 'static']),
+ ipv4_address=dict(type='str', required=False),
+ ipv4_subnet=dict(type='str', required=False),
+ ipv4_gateway=dict(type='str', required=False),
+
+ )
+ argument_spec.update(inmanage_argument_spec)
+ net_obj = Network(argument_spec)
+ net_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/edit_ipv6.py b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_ipv6.py
new file mode 100644
index 000000000..283aadb2d
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_ipv6.py
@@ -0,0 +1,166 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: edit_ipv6
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Set ipv6 information
+description:
+ - Set ipv6 information on ieisystem Server.
+notes:
+ - Does not support C(check_mode).
+options:
+ interface_name:
+ description:
+ - Set interface name. Select the LAN interface to be configured.
+ choices: ['eth0', 'eth1', 'bond0']
+ required: True
+ type: str
+ ipv6_status:
+ description:
+ - Enable or disable IPV6.
+ choices: ['enable', 'disable']
+ type: str
+ ipv6_dhcp_enable:
+ description:
+ - Enable 'Enable DHCP' to dynamically configure IPv6 address using Dynamic Host Configuration Protocol (DHCP).
+ choices: ['dhcp', 'static']
+ type: str
+ ipv6_address:
+ description:
+ - If DHCP is disabled, specify a static IPv6 address to be configured for the selected interface.
+ - Required when I(ipv6_dhcp_enable=static).
+ type: str
+ ipv6_index:
+ description:
+ - Ipv6 index(0-15).
+ - Required when I(ipv6_dhcp_enable=static).
+ type: int
+ ipv6_prefix:
+ description:
+ - The subnet prefix length for the IPv6 settings(0-128).
+ - Required when I(ipv6_dhcp_enable=static).
+ type: int
+ ipv6_gateway:
+ description:
+ - If DHCP is disabled, specify a static Default Gateway to be configured for the selected interface.
+ - Required when I(ipv6_dhcp_enable=static).
+ type: str
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Ipv6 test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set ipv6 information"
+ ieisystem.inmanage.edit_ipv6:
+ interface_name: "eth0"
+ ipv6_status: "disable"
+ provider: "{{ inmanage }}"
+
+ - name: "Set ipv6 information"
+ ieisystem.inmanage.edit_ipv6:
+ interface_name: "eth0"
+ ipv6_status: "enable"
+ ipv6_dhcp_enable: "dhcp"
+ provider: "{{ inmanage }}"
+
+ - name: "Set ipv6 information"
+ ieisystem.inmanage.edit_ipv6:
+ interface_name: "eth0"
+ ipv6_status: "enable"
+ ipv6_dhcp_enable: "static"
+ ipv6_address: "::ffff:100:2:36:10"
+ ipv6_index: 12
+ ipv6_prefix: 16
+ ipv6_gateway: "::"
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Network(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'setipv6'
+ self.results = get_connection(self.module)
+ if self.results['State'] == 'Success':
+ self.results['changed'] = True
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ interface_name=dict(type='str', required=True, choices=['eth0', 'eth1', 'bond0']),
+ ipv6_status=dict(type='str', required=False, choices=['enable', 'disable']),
+ ipv6_dhcp_enable=dict(type='str', required=False, choices=['dhcp', 'static']),
+ ipv6_address=dict(type='str', required=False),
+ ipv6_index=dict(type='int', required=False),
+ ipv6_prefix=dict(type='int', required=False),
+ ipv6_gateway=dict(type='str', required=False),
+
+ )
+ argument_spec.update(inmanage_argument_spec)
+ net_obj = Network(argument_spec)
+ net_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/edit_kvm.py b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_kvm.py
new file mode 100644
index 000000000..bd711d4f5
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_kvm.py
@@ -0,0 +1,191 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: edit_kvm
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Set KVM
+description:
+ - Set KVM on ieisystem Server.
+notes:
+ - Does not support C(check_mode).
+options:
+ client_type:
+ description:
+ - Client Type.
+ - Only the M6 model supports this parameter.
+ choices: ['vnc', 'viewer']
+ type: str
+ kvm_encryption:
+ description:
+ - Encrypt KVM packets.
+ choices: ['enable', 'disable']
+ type: str
+ media_attach:
+ description:
+ - Two types of VM attach mode are available.
+ - Attach is Immediately attaches Virtual Media to the server upon bootup.
+ - Auto is Attaches Virtual Media to the server only when a virtual media session is started.
+ - Only the M5 model supports this parameter.
+ choices: ['attach', 'auto']
+ type: str
+ keyboard_language:
+ description:
+ - Select the Keyboard Language.
+ - AD is Auto Detect, DA is Danish, NL-BE is Dutch Belgium, NL-NL is Dutch Netherland.
+ - GB is English UK , US is English US, FI is Finnish, FR-BE is French Belgium, FR is French France.
+ - DE is German Germany, DE-CH is German Switzerland, IT is Italian, JP is Japanese.
+ - NO is Norwegian, PT is Portuguese, ES is Spanish, SV is Swedish, TR_F is Turkish F, TR_Q is Turkish Q.
+ choices: ['AD', 'DA', 'NL-BE', 'NL-NL', 'GB', 'US', 'FI', 'FR-BE', 'FR', 'DE', 'DE-CH', 'IT', 'JP', 'ON', 'PT', 'EC', 'SV', 'TR_F', 'TR_Q']
+ type: str
+ retry_count:
+ description:
+ - Number of times to be retried in case a KVM failure occurs.Retry count ranges from 1 to 20.
+ - Only the M5 model supports this parameter.
+ type: int
+ retry_time_interval:
+ description:
+ - The Identification for retry time interval configuration (5-30) seconds.
+ - Only the M5 model supports this parameter.
+ type: int
+ local_monitor_off:
+ description:
+ - Server Monitor OFF Feature Status.
+ choices: ['enable', 'disable']
+ type: str
+ automatic_off:
+ description:
+ - Automatically OFF Server Monitor, When KVM Launches.
+ choices: ['enable', 'disable']
+ type: str
+ non_secure:
+ description:
+ - Enable/disable Non Secure Connection Type.
+ - Only the M6 model supports this parameter.
+ - Required when I(client_type=vnc).
+ choices: ['enable', 'disable']
+ type: str
+ ssh_vnc:
+ description:
+ - Enable/disable VNC over SSH in BMC.
+ - Only the M6 model supports this parameter.
+ - Required when I(client_type=vnc).
+ choices: ['enable', 'disable']
+ type: str
+ stunnel_vnc:
+ description:
+ - Enable/disable VNC over Stunnel in BMC.
+ - Only the M6 model supports this parameter.
+ - Required when I(client_type=vnc).
+ choices: ['enable', 'disable']
+ type: str
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: KVM test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set KVM"
+ ieisystem.inmanage.edit_kvm:
+ kvm_encryption: "enable"
+ media_attach: "auto"
+ keyboard_language: "AD"
+ retry_count: 13
+ retry_time_interval: 10
+ local_monitor_off: "enable"
+ automatic_off: "enable"
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class KVM(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'setkvm'
+ self.results = get_connection(self.module)
+ if self.results['State'] == 'Success':
+ self.results['changed'] = True
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ client_type=dict(type='str', required=False, choices=['vnc', 'viewer']),
+ kvm_encryption=dict(type='str', required=False, choices=['enable', 'disable']),
+ media_attach=dict(type='str', required=False, choices=['attach', 'auto']),
+ keyboard_language=dict(type='str', required=False,
+ choices=['AD', 'DA', 'NL-BE', 'NL-NL', 'GB', 'US', 'FI', 'FR-BE', 'FR',
+ 'DE', 'DE-CH', 'IT', 'JP', 'ON', 'PT', 'EC', 'SV', 'TR_F', 'TR_Q']),
+ retry_count=dict(type='int', required=False),
+ retry_time_interval=dict(type='int', required=False),
+ local_monitor_off=dict(type='str', required=False, choices=['enable', 'disable']),
+ automatic_off=dict(type='str', required=False, choices=['enable', 'disable']),
+ non_secure=dict(type='str', required=False, choices=['enable', 'disable']),
+ ssh_vnc=dict(type='str', required=False, choices=['enable', 'disable']),
+ stunnel_vnc=dict(type='str', required=False, choices=['enable', 'disable']),
+ )
+ argument_spec.update(inmanage_argument_spec)
+ kvm_obj = KVM(argument_spec)
+ kvm_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/edit_ldap.py b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_ldap.py
new file mode 100644
index 000000000..310816555
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_ldap.py
@@ -0,0 +1,193 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: edit_ldap
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Set ldap information
+description:
+ - Set ldap information on ieisystem Server.
+notes:
+ - Does not support C(check_mode).
+options:
+ enable:
+ description:
+ - LDAP/E-Directory Authentication Status.
+ choices: ['enable', 'disable']
+ type: str
+ encry:
+ description:
+ - Encryption Type.
+ choices: ['no', 'SSL', 'StartTLS']
+ type: str
+ address:
+ description:
+ - Server Address.
+ type: str
+ server_port:
+ description:
+ - Server Port. Specify the LDAP/E-Directory Port.
+ type: int
+ dn:
+ description:
+ - Bind DN. The Bind DN is used in bind operations, which authenticates the client to the server.
+ - Bind DN is a string of 4 to 64 alphanumeric characters.
+ - It must start with an alphabetical character.
+ - Special Symbols like dot(.), comma(, ), hyphen(-), underscore(_), equal-to(=) are allowed.
+ type: str
+ code:
+ description:
+ - Password. The Bind password is also used in the bind authentication operations between client and server.
+ - Required when I(enable=enable).
+ type: str
+ base:
+ description:
+ - Search Base.
+ - The Search Base allows the LDAP/E-Directory server to find which part of the external directory tree is to be searched.
+ - This search base may be equivalent to the organization or the group of the external directory.
+ - Search base is a string of 4 to 64 alphanumeric characters.
+ - It must start with an alphabetical character.
+ - Special Symbols like dot(.), comma(, ), hyphen(-), underscore(_), equal-to(=) are allowed.
+ type: str
+ attr:
+ description:
+ - Attribute of User Login.
+ - The Attribute of User Login field indicates to the LDAP/E-Directory server which attribute should be used to identify the user.
+ choices: ['cn', 'uid']
+ type: str
+ cn:
+ description:
+ - Common name type.
+ - Required when I(encry=StartTLS).
+ choices: ['ip', 'fqdn']
+ type: str
+ ca:
+ description:
+ - CA certificate file path.
+ - Required when I(encry=StartTLS).
+ type: str
+ ce:
+ description:
+ - Certificate file path.
+ - Required when I(encry=StartTLS).
+ type: str
+ pk:
+ description:
+ - Private Key file path.
+ - Required when I(encry=StartTLS).
+ type: str
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Ldap test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set ldap information"
+ ieisystem.inmanage.edit_ldap:
+ enable: "disable"
+ provider: "{{ inmanage }}"
+
+ - name: "Set ldap information"
+ ieisystem.inmanage.edit_ldap:
+ enable: "enable"
+ encry: "SSL"
+ address: "100.2.2.2"
+ server_port: 389
+ dn: "cn=manager,ou=login,dc=domain,dc=com"
+ code: "123456"
+ base: "cn=manager"
+ attr: "uid"
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class LDAP(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'setldap'
+ self.results = get_connection(self.module)
+ if self.results['State'] == 'Success':
+ self.results['changed'] = True
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ enable=dict(type='str', required=False, choices=['enable', 'disable']),
+ encry=dict(type='str', required=False, choices=['no', 'SSL', 'StartTLS']),
+ address=dict(type='str', required=False),
+ server_port=dict(type='int', required=False),
+ dn=dict(type='str', required=False),
+ code=dict(type='str', required=False),
+ base=dict(type='str', required=False),
+ attr=dict(type='str', required=False, choices=['cn', 'uid']),
+ cn=dict(type='str', required=False, choices=['ip', 'fqdn']),
+ ca=dict(type='str', required=False),
+ ce=dict(type='str', required=False),
+ pk=dict(type='str', required=False),
+ )
+ argument_spec.update(inmanage_argument_spec)
+ ldap_obj = LDAP(argument_spec)
+ ldap_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/edit_ldisk.py b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_ldisk.py
new file mode 100644
index 000000000..f55695640
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_ldisk.py
@@ -0,0 +1,131 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: edit_ldisk
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Set logical disk
+description:
+ - Set logical disk on ieisystem Server.
+notes:
+ - Does not support C(check_mode).
+options:
+ ctrl_id:
+ description:
+ - Raid controller ID.
+ type: int
+ ldisk_id:
+ description:
+ - Logical disk ID.
+ type: int
+ option:
+ description:
+ - Set operation options for a logical disk.
+ - LOC is Locate Logical Drive, STL is Stop Locate LogicalDrive.
+ - FI is Fast Initialization, SFI is Slow/Full Initialization.
+ - SI is Stop Initialization, DEL is Delete LogicalDrive.
+ choices: ['LOC', 'STL', 'FI', 'SFI', 'SI', 'DEL']
+ type: str
+ duration:
+ description:
+ - Duration range is 1-255, physical drive under PMC raid controller.
+ - Required when I(option=LOC).
+ - Only the M6 model supports this parameter.
+ type: int
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Edit ldisk test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Edit ldisk"
+ ieisystem.inmanage.edit_ldisk:
+ ctrl_id: 0
+ ldisk_id: 1
+ option: "LOC"
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Disk(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'setldisk'
+ self.results = get_connection(self.module)
+ if self.results['State'] == 'Success':
+ self.results['changed'] = True
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ ctrl_id=dict(type='int', required=False),
+ ldisk_id=dict(type='int', required=False),
+ option=dict(type='str', required=False, choices=['LOC', 'STL', 'FI', 'SFI', 'SI', 'DEL']),
+ duration=dict(type='int', required=False),
+ )
+ argument_spec.update(inmanage_argument_spec)
+ disk_obj = Disk(argument_spec)
+ disk_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/edit_log_setting.py b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_log_setting.py
new file mode 100644
index 000000000..b6d3cb54d
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_log_setting.py
@@ -0,0 +1,164 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: edit_log_setting
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Set bmc system and audit log setting
+description:
+ - Set bmc system and audit log setting on ieisystem Server.
+ - Only the M5 models support this feature.
+notes:
+ - Does not support C(check_mode).
+options:
+ status:
+ description:
+ - System Log Status.
+ choices: ['enable', 'disable']
+ type: str
+ type:
+ description:
+ - System log type.
+ choices: ['local', 'remote', 'both']
+ type: str
+ file_size:
+ description:
+ - File Size(3-65535bytes), set when type is local(default 30000).
+ type: int
+ audit_status:
+ description:
+ - Audit Log Status.
+ choices: ['enable', 'disable']
+ type: str
+ audit_type:
+ description:
+ - Audit log type.
+ choices: ['local', 'remote', 'both']
+ type: str
+ rotate_count:
+ description:
+ - Rotate Count, set when type is local, 0-delete old files(default), 1-bak old files.
+ choices: [0, 1]
+ type: int
+ server_addr:
+ description:
+ - Server Address, set when type is remote.
+ type: str
+ server_port:
+ description:
+ - Server Port(0-65535), set when type is remote.
+ type: int
+ protocol_type:
+ description:
+ - Protocol Type, set when type is remote.
+ choices: ['UDP', 'TCP']
+ type: str
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Edit log setting test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Edit bmc system log setting"
+ ieisystem.inmanage.edit_log_setting:
+ status: "enable"
+ type: "both"
+ provider: "{{ inmanage }}"
+
+ - name: "Edit bmc audit log setting"
+ ieisystem.inmanage.edit_log_setting:
+ audit_status: "enable"
+ audit_type: "remote"
+ server_addr: "100.2.126.11"
+ server_port: "514"
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class LogSetting(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'setbmclogsettings'
+ self.results = get_connection(self.module)
+ if self.results['State'] == 'Success':
+ self.results['changed'] = True
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ status=dict(type='str', required=False, choices=['enable', 'disable']),
+ type=dict(type='str', required=False, choices=['local', 'remote', 'both']),
+ file_size=dict(type='int', required=False),
+ audit_status=dict(type='str', required=False, choices=['enable', 'disable']),
+ audit_type=dict(type='str', required=False, choices=['local', 'remote', 'both']),
+ rotate_count=dict(type='int', required=False, choices=[0, 1]),
+ server_addr=dict(type='str', required=False),
+ server_port=dict(type='int', required=False),
+ protocol_type=dict(type='str', required=False, choices=['UDP', 'TCP']),
+ )
+ argument_spec.update(inmanage_argument_spec)
+ log_obj = LogSetting(argument_spec)
+ log_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/edit_m6_log_setting.py b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_m6_log_setting.py
new file mode 100644
index 000000000..686c48ae1
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_m6_log_setting.py
@@ -0,0 +1,174 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: edit_m6_log_setting
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Set bmc system and audit log setting
+description:
+ - Set bmc system and audit log setting on ieisystem Server.
+ - Only the M6 models support this feature.
+notes:
+ - Does not support C(check_mode).
+options:
+ status:
+ description:
+ - System Log Status.
+ choices: ['enable', 'disable']
+ type: str
+ host_tag:
+ description:
+ - System log host tag, set when I(status=enable).
+ choices: ['HostName', 'SerialNum', 'AssertTag']
+ type: str
+ level:
+ description:
+ - Events Level, set when I(status=enable).
+ choices: ['Critical', 'Warning', 'Info']
+ type: str
+ protocol_type:
+ description:
+ - Protocol Type, set when I(status=enable).
+ choices: ['UDP', 'TCP']
+ type: str
+ server_id:
+ description:
+ - Syslog Server ID, set when I(status=enable).
+ choices: [0, 1, 2, 3]
+ type: int
+ server_addr:
+ description:
+ - Server Address, set when server_id is not none.
+ type: str
+ server_port:
+ description:
+ - Server Address, set when server_id is not none.
+ type: int
+ log_type:
+ description:
+ - Remote Log Type, set when server_id is not none.
+ choices: ['idl', 'audit', 'both']
+ type: str
+ test:
+ description:
+ - Test remote log settings, set when server_id is not none.
+ default: False
+ type: bool
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Edit log setting test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Edit bmc system log setting"
+ ieisystem.inmanage.edit_m6_log_setting:
+ status: "disable"
+ provider: "{{ inmanage }}"
+
+ - name: "Edit bmc audit log setting"
+ ieisystem.inmanage.edit_m6_log_setting:
+ status: "enable"
+ host_tag: "HostName"
+ level: "Info"
+ protocol_type: "TCP"
+ server_id: 0
+ server_addr: "100.2.126.11"
+ server_port: 514
+ log_type: "both"
+ provider: "{{ inmanage }}"
+
+ - name: "test bmc audit log"
+ ieisystem.inmanage.edit_m6_log_setting:
+ server_id: 0
+ test: True
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class LogSetting(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'setbmclogcfg'
+ self.results = get_connection(self.module)
+ if self.results['State'] == 'Success':
+ self.results['changed'] = True
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ status=dict(type='str', required=False, choices=['enable', 'disable']),
+ host_tag=dict(type='str', required=False, choices=['HostName', 'SerialNum', 'AssertTag']),
+ level=dict(type='str', required=False, choices=['Critical', 'Warning', 'Info']),
+ protocol_type=dict(type='str', required=False, choices=['UDP', 'TCP']),
+ server_id=dict(type='int', required=False, choices=[0, 1, 2, 3]),
+ server_addr=dict(type='str', required=False),
+ server_port=dict(type='int', required=False),
+ log_type=dict(type='str', required=False, choices=['idl', 'audit', 'both']),
+ test=dict(type='bool', required=False, default=False),
+ )
+ argument_spec.update(inmanage_argument_spec)
+ log_obj = LogSetting(argument_spec)
+ log_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/edit_manual_capture.py b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_manual_capture.py
new file mode 100644
index 000000000..c7b4c0dda
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_manual_capture.py
@@ -0,0 +1,110 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: edit_manual_capture
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Set manual capture screen
+description:
+ - Set manual capture screen on ieisystem Server.
+notes:
+ - Does not support C(check_mode).
+options:
+ type:
+ description:
+ - Manual type.
+ choices: ['capture', 'delete']
+ type: str
+ required: true
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Screen test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set manual capture screen"
+ ieisystem.inmanage.edit_manual_capture:
+ type: "capture"
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Screen(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'screenmanual'
+ self.results = get_connection(self.module)
+ if self.results['State'] == 'Success':
+ self.results['changed'] = True
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ type=dict(type='str', required=True, choices=['capture', 'delete']),
+ )
+ argument_spec.update(inmanage_argument_spec)
+ screen_obj = Screen(argument_spec)
+ screen_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/edit_media_instance.py b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_media_instance.py
new file mode 100644
index 000000000..3b43b09f3
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_media_instance.py
@@ -0,0 +1,167 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: edit_media_instance
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Set Virtual Media Instance
+description:
+ - Set Virtual Media Instance on ieisystem Server.
+notes:
+ - Does not support C(check_mode).
+options:
+ num_fd:
+ description:
+ - Select the number of floppy devices that support for Virtual Media redirection.
+ choices: [0, 1, 2, 3, 4]
+ type: int
+ num_cd:
+ description:
+ - Select the number of CD/DVD devices that support for Virtual Media redirection.
+ choices: [0, 1, 2, 3, 4]
+ type: int
+ num_hd:
+ description:
+ - Select the number of harddisk devices that support for Virtual Media redirection.
+ choices: [0, 1, 2, 3, 4]
+ type: int
+ kvm_num_fd:
+ description:
+ - Select the number of Remote KVM floppy devices that support for Virtual Media redirection.
+ choices: [0, 1, 2, 3, 4]
+ type: int
+ kvm_num_cd:
+ description:
+ - Select the number of Remote KVM CD/DVD devices that support for virtual Media redirection.
+ - The max support number of html5 KVM is 2 and java KVM is 4.
+ choices: [0, 1, 2, 3, 4]
+ type: int
+ kvm_num_hd:
+ description:
+ - Select the number of Remote KVM Hard disk devices to support for Virtual Media redirection.
+ choices: [0, 1, 2, 3, 4]
+ type: int
+ sd_media:
+ description:
+ - Check this option to enable SD Media support in BMC.
+ choices: ['Enable', 'Disable']
+ type: str
+ secure_channel:
+ description:
+ - Check this option to enable encrypt media recirection packets.
+ - Only the M5/M6 model supports this parameter.
+ choices: ['Enable', 'Disable']
+ type: str
+ power_save_mode:
+ description:
+ - Check this option to enable Power Save Mode in BMC.
+ choices: ['Enable', 'Disable']
+ type: str
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Media instance test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set media instance"
+ ieisystem.inmanage.edit_media_instance:
+ num_fd: 1
+ num_cd: 1
+ num_hd: 1
+ kvm_num_fd: 1
+ kvm_num_cd: 1
+ kvm_num_hd: 1
+ sd_media: "Enable"
+ secure_channel: "Enable"
+ power_save_mode: "Enable"
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Instance(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'setmediainstance'
+ self.results = get_connection(self.module)
+ if self.results['State'] == 'Success':
+ self.results['changed'] = True
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ num_fd=dict(type='int', required=False, choices=[0, 1, 2, 3, 4]),
+ num_cd=dict(type='int', required=False, choices=[0, 1, 2, 3, 4]),
+ num_hd=dict(type='int', required=False, choices=[0, 1, 2, 3, 4]),
+ kvm_num_fd=dict(type='int', required=False, choices=[0, 1, 2, 3, 4]),
+ kvm_num_cd=dict(type='int', required=False, choices=[0, 1, 2, 3, 4]),
+ kvm_num_hd=dict(type='int', required=False, choices=[0, 1, 2, 3, 4]),
+ sd_media=dict(type='str', required=False, choices=['Enable', 'Disable']),
+ secure_channel=dict(type='str', required=False, choices=['Enable', 'Disable']),
+ power_save_mode=dict(type='str', required=False, choices=['Enable', 'Disable']),
+ )
+ argument_spec.update(inmanage_argument_spec)
+ instance_obj = Instance(argument_spec)
+ instance_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/edit_ncsi.py b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_ncsi.py
new file mode 100644
index 000000000..05c1177fb
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_ncsi.py
@@ -0,0 +1,133 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: edit_ncsi
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Set ncsi information
+description:
+ - Set ncsi information on ieisystem Server.
+notes:
+ - Does not support C(check_mode).
+options:
+ nic_type:
+ description:
+ - Nic type.
+ - Only NF3280A6 and NF3180A6 model supports C(Disable) Settings, but not support C(PHY) Settings.
+ - M6 model only support C(OCP), C(OCP1), C(PCIE) settings.
+ choices: ['PHY', 'OCP', 'OCP1', 'PCIE', 'auto', 'Disable']
+ type: str
+ mode:
+ description:
+ - NCSI mode, auto-Auto Failover, manual-Manual Switch.
+ - Only M6 model supports C(Disable) Settings.
+ choices: ['auto', 'manual', 'Disable']
+ type: str
+ interface_name:
+ description:
+ - Interface name, for example eth0.
+ - Only the M5 model supports this parameter.
+ type: str
+ channel_number:
+ description:
+ - Channel number.
+ choices: [0, 1, 2, 3]
+ type: int
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: NCSI test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set ncsi information"
+ ieisystem.inmanage.edit_ncsi:
+ mode: "manual"
+ nic_type: "PCIE"
+ interface_name: "eth0"
+ channel_number: 1
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class NCSI(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'setncsi'
+ self.results = get_connection(self.module)
+ if self.results['State'] == 'Success':
+ self.results['changed'] = True
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ nic_type=dict(type='str', required=False, choices=['PHY', 'OCP', 'OCP1', 'PCIE', 'auto', 'Disable']),
+ mode=dict(type='str', required=False, choices=['auto', 'manual', 'Disable']),
+ interface_name=dict(type='str', required=False),
+ channel_number=dict(type='int', required=False, choices=[0, 1, 2, 3]),
+ )
+ argument_spec.update(inmanage_argument_spec)
+ ncsi_obj = NCSI(argument_spec)
+ ncsi_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/edit_network.py b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_network.py
new file mode 100644
index 000000000..ef4e00805
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_network.py
@@ -0,0 +1,118 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: edit_network
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Set network information
+description:
+ - Set netowrk information on ieisystem Server.
+notes:
+ - Does not support C(check_mode).
+options:
+ interface_name:
+ description:
+ - Set interface name. Select the LAN interface to be configured.
+ choices: ['eth0', 'eth1', 'bond0']
+ required: true
+ type: str
+ lan_enable:
+ description:
+ - Enable or disable this interface. If disable, you cannot use this interface any more.
+ choices: ['enable', 'disable']
+ required: true
+ type: str
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Network test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set network information"
+ ieisystem.inmanage.edit_network:
+ interface_name: "eth0"
+ lan_enable: "enable"
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Network(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'setnetwork'
+ self.results = get_connection(self.module)
+ if self.results['State'] == 'Success':
+ self.results['changed'] = True
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ interface_name=dict(type='str', required=True, choices=['eth0', 'eth1', 'bond0']),
+ lan_enable=dict(type='str', required=True, choices=['enable', 'disable']),
+ )
+ argument_spec.update(inmanage_argument_spec)
+ net_obj = Network(argument_spec)
+ net_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/edit_network_bond.py b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_network_bond.py
new file mode 100644
index 000000000..af61eac2e
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_network_bond.py
@@ -0,0 +1,123 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: edit_network_bond
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Set network bond
+description:
+ - Set network bond on ieisystem Server.
+notes:
+ - Does not support C(check_mode).
+options:
+ bond:
+ description:
+ - Network bond status, If VLAN is enabled for slave interfaces, then Bonding cannot be enabled.
+ choices: ['enable', 'disable']
+ type: str
+ interface:
+ description:
+ - Interface name.
+ choices: ['shared', 'dedicated', 'both']
+ type: str
+ auto_config:
+ description:
+ - Enable this option to configure the interfaces in service configuration automatically.
+ choices: ['enable', 'disable']
+ type: str
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Bond test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set network bond"
+ ieisystem.inmanage.edit_network_bond:
+ bond: "enable"
+ interface: "dedicated"
+ auto_config: "enable"
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Bond(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'setnetworkbond'
+ self.results = get_connection(self.module)
+ if self.results['State'] == 'Success':
+ self.results['changed'] = True
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ bond=dict(type='str', required=False, choices=['enable', 'disable']),
+ interface=dict(type='str', required=False, choices=['shared', 'dedicated', 'both']),
+ auto_config=dict(type='str', required=False, choices=['enable', 'disable']),
+ )
+ argument_spec.update(inmanage_argument_spec)
+ bond_obj = Bond(argument_spec)
+ bond_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/edit_network_link.py b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_network_link.py
new file mode 100644
index 000000000..2bdec76b4
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_network_link.py
@@ -0,0 +1,140 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: edit_network_link
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Set network link
+description:
+ - Set network link on ieisystem Server.
+notes:
+ - Does not support C(check_mode).
+options:
+ interface:
+ description:
+ - Interface name.
+ choices: ['shared', 'dedicated', 'both']
+ type: str
+ required: true
+ auto_nego:
+ description:
+ - This option allows the device to perform auto-configuration.
+ - To achieve the best mode of operation (speed and duplex) on the link.
+ choices: ['enable', 'disable']
+ type: str
+ link_speed:
+ description:
+ - Link speed will list all the supported capabilities of the network interface. It can be 10/100 Mbps.
+ - Required when I(auto_nego=disable).
+ choices: [10, 100]
+ type: int
+ duplex_mode:
+ description:
+ - Select any one of the following Duplex Mode.
+ - Required when I(auto_nego=disable).
+ choices: ['HALF', 'FULL']
+ type: str
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Link test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set network link"
+ ieisystem.inmanage.edit_network_link:
+ interface: "dedicated"
+ auto_nego: "enable"
+ provider: "{{ inmanage }}"
+
+ - name: "Set network link"
+ ieisystem.inmanage.edit_network_link:
+ interface: "dedicated"
+ auto_nego: "disable"
+ link_speed: 100
+ duplex_mode: "FULL"
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Link(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'setnetworklink'
+ self.results = get_connection(self.module)
+ if self.results['State'] == 'Success':
+ self.results['changed'] = True
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ interface=dict(type='str', required=True, choices=['shared', 'dedicated', 'both']),
+ auto_nego=dict(type='str', required=False, choices=['enable', 'disable']),
+ link_speed=dict(type='int', required=False, choices=[10, 100]),
+ duplex_mode=dict(type='str', required=False, choices=['HALF', 'FULL']),
+ )
+ argument_spec.update(inmanage_argument_spec)
+ link_obj = Link(argument_spec)
+ link_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/edit_ntp.py b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_ntp.py
new file mode 100644
index 000000000..1887d1269
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_ntp.py
@@ -0,0 +1,173 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: edit_ntp
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Set NTP
+description:
+ - Set NTP on ieisystem Server.
+notes:
+ - Does not support C(check_mode).
+options:
+ auto_date:
+ description:
+ - Date auto synchronize.
+ choices: ['enable', 'disable']
+ type: str
+ ntp_time:
+ description:
+ - NTP time(YYYYmmddHHMMSS).
+ - Only the M5 model supports this parameter.
+ type: str
+ time_zone:
+ description:
+ - UTC time zone, chose from {-12, -11.5, -11, ... , 11, 11.5, 12}.
+ type: str
+ server1:
+ description:
+ - NTP Server1(ipv4 or ipv6 or domain name), set when auto_date is enable.
+ type: str
+ server2:
+ description:
+ - NTP Server2(ipv4 or ipv6 or domain name), set when auto_date is enable.
+ type: str
+ server3:
+ description:
+ - NTP Server3(ipv4 or ipv6 or domain name), set when auto_date is enable.
+ type: str
+ server4:
+ description:
+ - NTP Server4(ipv4 or ipv6 or domain name), set when auto_date is enable.
+ type: str
+ server5:
+ description:
+ - NTP Server5(ipv4 or ipv6 or domain name), set when auto_date is enable.
+ type: str
+ server6:
+ description:
+ - NTP Server6(ipv4 or ipv6 or domain name), set when auto_date is enable.
+ type: str
+ syn_cycle:
+ description:
+ - NTP syn cycle(minute), sync cycle(5-1440).
+ type: int
+ max_variety:
+ description:
+ - NTP Maximum jump time(minute), max variety(1-60).
+ - Only the M6 model supports this parameter.
+ type: int
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: NTP test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set ntp"
+ ieisystem.inmanage.edit_ntp:
+ auto_date: "enable"
+ server2: "time.nist.gov"
+ provider: "{{ inmanage }}"
+
+ - name: "Set ntp"
+ ieisystem.inmanage.edit_ntp:
+ auto_date: "disable"
+ ntp_time: "20200609083600"
+ provider: "{{ inmanage }}"
+
+ - name: "set ntp"
+ ieisystem.inmanage.edit_ntp:
+ time_zone: "8"
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class NTP(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'settime'
+ self.results = get_connection(self.module)
+ if self.results['State'] == 'Success':
+ self.results['changed'] = True
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ auto_date=dict(type='str', required=False, choices=['enable', 'disable']),
+ ntp_time=dict(type='str', required=False),
+ time_zone=dict(type='str', required=False),
+ server1=dict(type='str', required=False),
+ server2=dict(type='str', required=False),
+ server3=dict(type='str', required=False),
+ server4=dict(type='str', required=False),
+ server5=dict(type='str', required=False),
+ server6=dict(type='str', required=False),
+ syn_cycle=dict(type='int', required=False),
+ max_variety=dict(type='int', required=False),
+ )
+ argument_spec.update(inmanage_argument_spec)
+ ntp_obj = NTP(argument_spec)
+ ntp_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/edit_pdisk.py b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_pdisk.py
new file mode 100644
index 000000000..507bc01dd
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_pdisk.py
@@ -0,0 +1,178 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: edit_pdisk
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Set physical disk
+description:
+ - Set physical disk on ieisystem Server.
+notes:
+ - Does not support C(check_mode).
+options:
+ ctrl_id:
+ description:
+ - Raid controller ID.
+ type: int
+ device_id:
+ description:
+ - Physical drive id.
+ type: int
+ option:
+ description:
+ - Set operation options for a physical disk.
+ - UG is Unconfigured Good, UB is Unconfigured Bad.
+ - OFF is offline, FAIL is Failed, RBD is Rebuild.
+ - ON is Online, JB is JBOD, ES is Drive Erase stop.
+ - EM is Drive Erase Simple, EN is Drive Erase Normal.
+ - ET is Drive Erase Through, LOC is Locate, STL is Stop Locate.
+ - HS is Hot spare.
+ - Only the M5 model supports C(HS) Settings.
+ choices: ['UG', 'UB', 'OFF', 'FAIL', 'RBD', 'ON', 'JB', 'ES', 'EM', 'EN', 'ET', 'LOC', 'STL', 'HS']
+ type: str
+ action:
+ description:
+ - Action while set physical drive hotspare.
+ - Required when I(option=HS).
+ - Only the M5 model supports this parameter.
+ choices: ['remove', 'global', 'dedicate']
+ type: str
+ revertible:
+ description:
+ - IsRevertible while set physical drive hotspare.
+ - Required when I(option=HS) and I(action=dedicate).
+ - Only the M5 model supports this parameter.
+ choices: ['yes', 'no']
+ type: str
+ encl:
+ description:
+ - IsEnclAffinity while set physical drive hotspare.
+ - Required when I(option=HS) and I(action=dedicate).
+ - Only the M5 model supports this parameter.
+ choices: ['yes', 'no']
+ type: str
+ logical_drivers:
+ description:
+ - Logical Drivers while set physical drive hotspare, input multiple Logical Drivers index like 0, 1, 2.....
+ - Required when I(option=HS) and I(action=dedicate).
+ - Only the M5 model supports this parameter.
+ type: list
+ elements: int
+ duration:
+ description:
+ - Duration range is 1-255, physical drive under PMC raid controller.
+ - Required when I(option=LOC).
+ - Only the M6 model supports this parameter.
+ type: int
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Edit pdisk test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Edit pdisk"
+ ieisystem.inmanage.edit_pdisk:
+ ctrl_id: 0
+ device_id: 1
+ option: "LOC"
+ provider: "{{ inmanage }}"
+
+ - name: "M5 Edit pdisk"
+ ieisystem.inmanage.edit_pdisk:
+ ctrl_id: 0
+ device_id: 1
+ option: "HS"
+ action: "dedicate"
+ revertible: "yes"
+ encl: "yes"
+ logical_drivers: 1
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Disk(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'setpdisk'
+ self.results = get_connection(self.module)
+ if self.results['State'] == 'Success':
+ self.results['changed'] = True
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ ctrl_id=dict(type='int', required=False),
+ device_id=dict(type='int', required=False),
+ option=dict(type='str', required=False, choices=['UG', 'UB', 'OFF', 'FAIL', 'RBD', 'ON', 'JB', 'ES', 'EM', 'EN', 'ET', 'LOC', 'STL', 'HS']),
+ action=dict(type='str', required=False, choices=['remove', 'global', 'dedicate']),
+ revertible=dict(type='str', required=False, choices=['yes', 'no']),
+ encl=dict(type='str', required=False, choices=['yes', 'no']),
+ logical_drivers=dict(type='list', elements='int', required=False),
+ duration=dict(type='int', required=False),
+ )
+ argument_spec.update(inmanage_argument_spec)
+ disk_obj = Disk(argument_spec)
+ disk_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/edit_power_budget.py b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_power_budget.py
new file mode 100644
index 000000000..e67548aa2
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_power_budget.py
@@ -0,0 +1,260 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: edit_power_budget
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Set power budget information
+description:
+ - Set power budget information on ieisystem Server.
+notes:
+ - Does not support C(check_mode).
+options:
+ range:
+ description:
+ - Range of power budget watts.
+ choices: ['True', 'False']
+ default: False
+ type: bool
+ domain:
+ description:
+ - Domain id.
+ - Required when I(range=False).
+ choices: ['system', 'cpu']
+ type: str
+ action:
+ description:
+ - Type to action.
+ - Required when I(range=False).
+ choices: ['add', 'delete', 'open', 'close']
+ type: str
+ id:
+ description:
+ - Policy id.
+ - Required when I(range=False).
+ choices: [1, 2, 3, 4]
+ type: int
+ watts:
+ description:
+ - Power budget watts of add.
+ - Required when I(action=add).
+ type: int
+ except_action:
+ description:
+ - Except action, 0 is do nothing, 1 is send alert, 2 is shutdown system, 3 is shutdown system and send alert.
+ - Only the M7 model supports this parameter.
+ choices: [0, 1, 2, 3]
+ type: int
+ start1:
+ description:
+ - Pause period of add, start time, from 0 to 24.
+ type: int
+ end1:
+ description:
+ - Pause period of add, end time, must be greater than start time, from 0 to 24.
+ type: int
+ week1:
+ description:
+ - Pause period of add, repetition period.
+ - The input parameters are 'Mon', 'Tue', 'Wed', 'Thur', 'Fri', 'Sat', 'Sun', separated by commas, such as Mon, Wed, Fri.
+ type: list
+ elements: str
+ start2:
+ description:
+ - Pause period of add, start time, from 0 to 24.
+ type: int
+ end2:
+ description:
+ - Pause period of add, end time, must be greater than start time, from 0 to 24.
+ type: int
+ week2:
+ description:
+ - Pause period of add, repetition period.
+ - The input parameters are 'Mon', 'Tue', 'Wed', 'Thur', 'Fri', 'Sat', 'Sun', separated by commas, such as Mon, Wed, Fri.
+ type: list
+ elements: str
+ start3:
+ description:
+ - Pause period of add, start time, from 0 to 24.
+ type: int
+ end3:
+ description:
+ - Pause period of add, end time, must be greater than start time, from 0 to 24.
+ type: int
+ week3:
+ description:
+ - Pause period of add, repetition period.
+ - The input parameters are 'Mon', 'Tue', 'Wed', 'Thur', 'Fri', 'Sat', 'Sun', separated by commas, such as Mon, Wed, Fri.
+ type: list
+ elements: str
+ start4:
+ description:
+ - Pause period of add, start time, from 0 to 24.
+ type: int
+ end4:
+ description:
+ - Pause period of add, end time, must be greater than start time, from 0 to 24.
+ type: int
+ week4:
+ description:
+ - Pause period of add, repetition period.
+ - The input parameters are 'Mon', 'Tue', 'Wed', 'Thur', 'Fri', 'Sat', 'Sun', separated by commas, such as Mon, Wed, Fri.
+ type: list
+ elements: str
+ start5:
+ description:
+ - Period of add, start time, from 0 to 24.
+ type: int
+ end5:
+ description:
+ - Pause period of add, end time, must be greater than start time, from 0 to 24.
+ type: int
+ week5:
+ description:
+ - Pause period of add, repetition period.
+ - The input parameters are 'Mon', 'Tue', 'Wed', 'Thur', 'Fri', 'Sat', 'Sun', separated by commas, such as Mon, Wed, Fri.
+ type: list
+ elements: str
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Power budget test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get power budget range"
+ ieisystem.inmanage.edit_power_budget:
+ range: True
+ provider: "{{ inmanage }}"
+
+ - name: "add power budget"
+ ieisystem.inmanage.edit_power_budget:
+ action: "add"
+ id: 1
+ watts: 1500
+ start1: 2
+ end1: 5
+ week1:
+ - Mon
+ - Wed
+ - Fri
+ provider: "{{ inmanage }}"
+
+ - name: "Set power budget status to open"
+ ieisystem.inmanage.edit_power_budget:
+ action: "open"
+ id: 1
+ provider: "{{ inmanage }}"
+
+ - name: "Set power budget status to close"
+ ieisystem.inmanage.edit_power_budget:
+ action: "close"
+ id: 1
+ provider: "{{ inmanage }}"
+
+ - name: "Delete power budget"
+ ieisystem.inmanage.edit_power_budget:
+ action: "delete"
+ id: 1
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Power(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'setpowerbudget'
+ self.results = get_connection(self.module)
+ if self.results['State'] == 'Success':
+ self.results['changed'] = True
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ range=dict(type='bool', default=False, choices=[True, False]),
+ domain=dict(type='str', required=False, choices=['system', 'cpu']),
+ action=dict(type='str', required=False, choices=['add', 'delete', 'open', 'close']),
+ id=dict(type='int', required=False, choices=[1, 2, 3, 4]),
+ watts=dict(type='int', required=False),
+ except_action=dict(type='int', required=False, choices=[0, 1, 2, 3]),
+ start1=dict(type='int', required=False),
+ end1=dict(type='int', required=False),
+ week1=dict(type='list', elements='str', required=False),
+ start2=dict(type='int', required=False),
+ end2=dict(type='int', required=False),
+ week2=dict(type='list', elements='str', required=False),
+ start3=dict(type='int', required=False),
+ end3=dict(type='int', required=False),
+ week3=dict(type='list', elements='str', required=False),
+ start4=dict(type='int', required=False),
+ end4=dict(type='int', required=False),
+ week4=dict(type='list', elements='str', required=False),
+ start5=dict(type='int', required=False),
+ end5=dict(type='int', required=False),
+ week5=dict(type='list', elements='str', required=False),
+ )
+ argument_spec.update(inmanage_argument_spec)
+ power_obj = Power(argument_spec)
+ power_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/edit_power_restore.py b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_power_restore.py
new file mode 100644
index 000000000..e344b4686
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_power_restore.py
@@ -0,0 +1,110 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: edit_power_restore
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Set power restore information
+description:
+ - Set power restore information on ieisystem Server.
+notes:
+ - Does not support C(check_mode).
+options:
+ option:
+ description:
+ - Set power policy option.
+ choices: ['on', 'off', 'restore']
+ type: str
+ required: true
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Power restore test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set power restore information"
+ ieisystem.inmanage.edit_power_restore:
+ option: "on"
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Power(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'setpowerrestore'
+ self.results = get_connection(self.module)
+ if self.results['State'] == 'Success':
+ self.results['changed'] = True
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ option=dict(type='str', required=True, choices=['on', 'off', 'restore']),
+ )
+ argument_spec.update(inmanage_argument_spec)
+ power_obj = Power(argument_spec)
+ power_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/edit_power_status.py b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_power_status.py
new file mode 100644
index 000000000..4aef12e72
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_power_status.py
@@ -0,0 +1,110 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: edit_power_status
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Set power status information
+description:
+ - Set power status information on ieisystem Server.
+notes:
+ - Does not support C(check_mode).
+options:
+ state:
+ description:
+ - Power status.
+ choices: ['On', 'ForceOff', 'ForcePowerCycle', 'ForceReset', 'GracefulShutdown']
+ type: str
+ required: true
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Power status test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set power status information"
+ ieisystem.inmanage.edit_power_status:
+ state: "On"
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Power(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'powercontrol'
+ self.results = get_connection(self.module)
+ if self.results['State'] == 'Success':
+ self.results['changed'] = True
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ state=dict(type='str', required=True, choices=['On', 'ForceOff', 'ForcePowerCycle', 'ForceReset', 'GracefulShutdown']),
+ )
+ argument_spec.update(inmanage_argument_spec)
+ power_obj = Power(argument_spec)
+ power_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/edit_preserve_config.py b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_preserve_config.py
new file mode 100644
index 000000000..e40d561de
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_preserve_config.py
@@ -0,0 +1,136 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: edit_preserve_config
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Set preserve config
+description:
+ - Set preserve config on ieisystem Server.
+notes:
+ - Does not support C(check_mode).
+options:
+ setting:
+ description:
+ - Preserve option, all - preserve all config; none - overwrite all config; manual - manual choose.
+ choices: ['all', 'none', 'manual']
+ type: str
+ required: true
+ override:
+ description:
+ - Configuration items that need to be retained.
+ - Required when I(setting=manual).
+ choices: ['authentication', 'dcmi', 'fru', 'hostname', 'ipmi', 'kvm', 'network', 'ntp', 'pef',
+ 'sdr', 'sel', 'smtp', 'snmp', 'sol', 'ssh', 'syslog', 'user']
+ type: list
+ elements: str
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Preserve test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set preserve all"
+ ieisystem.inmanage.edit_preserve_config:
+ setting: "all"
+ provider: "{{ inmanage }}"
+
+ - name: "Set preserve none"
+ edit_preserve_config:
+ setting: "none"
+ provider: "{{ inmanage }}"
+
+ - name: "Set preserve manual"
+ edit_preserve_config:
+ setting: "manual"
+ override:
+ - fru
+ - ntp
+ - network
+ - user
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Preserve(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'preserveconfig'
+ self.results = get_connection(self.module)
+ if self.results['State'] == 'Success':
+ self.results['changed'] = True
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ setting=dict(type='str', required=True, choices=['all', 'none', 'manual']),
+ override=dict(type='list', elements='str', required=False,
+ choices=['authentication', 'dcmi', 'fru', 'hostname', 'ipmi', 'kvm', 'network', 'ntp',
+ 'pef', 'sdr', 'sel', 'smtp', 'snmp', 'sol', 'ssh', 'syslog', 'user']),
+ )
+ argument_spec.update(inmanage_argument_spec)
+ pre_obj = Preserve(argument_spec)
+ pre_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/edit_psu_config.py b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_psu_config.py
new file mode 100644
index 000000000..7e85f9293
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_psu_config.py
@@ -0,0 +1,116 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: edit_psu_config
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Set psu config information
+description:
+ - Set psu config information on ieisystem Server.
+notes:
+ - Does not support C(check_mode).
+options:
+ id:
+ description:
+ - Power id.
+ type: int
+ required: true
+ switch:
+ description:
+ - Power supply mode, active or standby.
+ choices: ['active', 'standby', 'normal']
+ type: str
+ required: true
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Psu config test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set psu config information"
+ ieisystem.inmanage.edit_psu_config:
+ id: 1
+ switch: "active"
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Psu(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'setpsuconfig'
+ self.results = get_connection(self.module)
+ if self.results['State'] == 'Success':
+ self.results['changed'] = True
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ id=dict(type='int', required=True),
+ switch=dict(type='str', required=True, choices=['active', 'standby', 'normal']),
+ )
+ argument_spec.update(inmanage_argument_spec)
+ psu_obj = Psu(argument_spec)
+ psu_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/edit_psu_peak.py b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_psu_peak.py
new file mode 100644
index 000000000..6bb15a6d9
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_psu_peak.py
@@ -0,0 +1,121 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: edit_psu_peak
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Set psu peak information
+description:
+ - Set psu peak information on ieisystem Server.
+notes:
+ - Does not support C(check_mode).
+options:
+ status:
+ description:
+ - Power peak status.
+ choices: ['enable', 'disable']
+ type: str
+ required: true
+ time:
+ description:
+ - Maximum random time, range of values(1-600), unit(second).
+ type: int
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Psu peak test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set psu peak information"
+ ieisystem.inmanage.edit_psu_peak:
+ status: "disable"
+ provider: "{{ inmanage }}"
+
+ - name: "Set psu peak information"
+ ieisystem.inmanage.edit_psu_peak:
+ status: "enable"
+ time: 10
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Psu(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'setpsupeak'
+ self.results = get_connection(self.module)
+ if self.results['State'] == 'Success':
+ self.results['changed'] = True
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ status=dict(type='str', required=True, choices=['enable', 'disable']),
+ time=dict(type='int', required=False),
+ )
+ argument_spec.update(inmanage_argument_spec)
+ psu_obj = Psu(argument_spec)
+ psu_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/edit_restore_factory_default.py b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_restore_factory_default.py
new file mode 100644
index 000000000..8f1c3d599
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_restore_factory_default.py
@@ -0,0 +1,131 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: edit_restore_factory_default
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Set preserver config
+description:
+ - Set preserver config on ieisystem Server.
+notes:
+ - Does not support C(check_mode).
+options:
+ mode:
+ description:
+ - Restore factory defaults mode.
+ choices: ['all', 'none', 'manual']
+ type: str
+ required: true
+ override:
+ description:
+ - Configuration items that need to be retained.
+ - Required when I(mode=manual).
+ choices: ['authentication', 'dcmi', 'fru', 'hostname', 'ipmi', 'kvm', 'network', 'ntp', 'pef',
+ 'sdr', 'sel', 'smtp', 'snmp', 'sol', 'ssh', 'syslog', 'user']
+ type: list
+ elements: str
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Restore default test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set restore default auto"
+ ieisystem.inmanage.edit_restore_factory_default:
+ mode: "all"
+ provider: "{{ inmanage }}"
+
+ - name: "Set restore default manual"
+ ieisystem.inmanage.edit_restore_factory_default:
+ mode: "manual"
+ override:
+ - fru
+ - ntp
+ - network
+ - user
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Preserver(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'restorefactorydefaults'
+ self.results = get_connection(self.module)
+ if self.results['State'] == 'Success':
+ self.results['changed'] = True
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ mode=dict(type='str', required=True, choices=['all', 'none', 'manual']),
+ override=dict(type='list', elements='str', required=False,
+ choices=['authentication', 'dcmi', 'fru', 'hostname', 'ipmi', 'kvm', 'network', 'ntp',
+ 'pef', 'sdr', 'sel', 'smtp', 'snmp', 'sol', 'ssh', 'syslog', 'user']),
+ )
+ argument_spec.update(inmanage_argument_spec)
+ pre_obj = Preserver(argument_spec)
+ pre_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/edit_service.py b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_service.py
new file mode 100644
index 000000000..0fbd55a38
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_service.py
@@ -0,0 +1,150 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: edit_service
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Set service settings
+description:
+ - Set service settings on ieisystem Server.
+notes:
+ - Does not support C(check_mode).
+options:
+ service_name:
+ description:
+ - Displays service name of the selected slot(readonly).
+ - The I(vnc) option is not supported in M5.
+ - The I(fd-media/telnet/snmp) option is not supported in M6.
+ choices: ['web', 'kvm', 'cd-media', 'fd-media', 'hd-media', 'ssh', 'telnet', 'solssh', 'snmp', 'vnc']
+ type: str
+ required: true
+ state:
+ description:
+ - Displays the current status of the service, either active or inactive state.
+ - Check this option to start the inactive service.
+ choices: ['active', 'inactive']
+ type: str
+ interface:
+ description:
+ - It shows the interface in which service is running.
+ - The user can choose any one of the available interfaces.
+ - Only the M5 model supports this parameter.
+ choices: ['eth0', 'eth1', 'both', 'bond0']
+ type: str
+ non_secure_port:
+ description:
+ - Used to configure non secure port number for the service.
+ - Port value ranges from 1 to 65535.
+ type: int
+ secure_port:
+ description:
+ - Used to configure secure port number for the service.
+ - Port value ranges from 1 to 65535.
+ type: int
+ timeout:
+ description:
+ - Displays the session timeout value of the service.
+ - For web, SSH and telnet service, user can configure the session timeout value.
+ - Web timeout value ranges from 300 to 1800 seconds.
+ - SSH and Telnet timeout value ranges from 60 to 1800 seconds.
+ - Timeout value should be in multiples of 60 seconds.
+ type: int
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Edit service test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Edit kvm"
+ ieisystem.inmanage.edit_service:
+ service_name: "kvm"
+ state: "active"
+ timeout: "1200"
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Service(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'setservice'
+ self.results = get_connection(self.module)
+ if self.results['State'] == 'Success':
+ self.results['changed'] = True
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ service_name=dict(type='str', required=True, choices=['web', 'kvm', 'cd-media', 'fd-media', 'hd-media', 'ssh', 'telnet', 'solssh', 'snmp', 'vnc']),
+ state=dict(type='str', required=False, choices=['active', 'inactive']),
+ interface=dict(type='str', required=False, choices=['eth0', 'eth1', 'both', 'bond0']),
+ non_secure_port=dict(type='int', required=False),
+ secure_port=dict(type='int', required=False),
+ timeout=dict(type='int', required=False)
+ )
+ argument_spec.update(inmanage_argument_spec)
+ service_obj = Service(argument_spec)
+ service_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/edit_smtp.py b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_smtp.py
new file mode 100644
index 000000000..c59a3cc66
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_smtp.py
@@ -0,0 +1,213 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: edit_smtp
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Set SMTP information
+description:
+ - Set SMTP information on ieisystem Server.
+ - Only the M5 models support this feature.
+notes:
+ - Does not support C(check_mode).
+options:
+ interface:
+ description:
+ - LAN Channel, eth0 is shared, eth1 is dedicated.
+ choices: ['eth0', 'eth1', 'bond0']
+ type: str
+ required: true
+ email:
+ description:
+ - Sender email.
+ type: str
+ primary_status:
+ description:
+ - Primary SMTP Support.
+ choices: ['enable', 'disable']
+ type: str
+ primary_ip:
+ description:
+ - Primary SMTP server IP.
+ type: str
+ primary_name:
+ description:
+ - Primary SMTP server name.
+ type: str
+ primary_port:
+ description:
+ - Primary SMTP server port, The Identification for retry count configuration(1-65535).
+ type: int
+ primary_auth:
+ description:
+ - Primary SMTP server authentication.
+ choices: ['enable', 'disable']
+ type: str
+ primary_username:
+ description:
+ - Primary SMTP server Username, length be 4 to 64 bits.
+ - Must start with letters and cannot contain ', '(comma) ':'(colon) ' '(space) ';'(semicolon) '\\'(backslash).
+ type: str
+ primary_password:
+ description:
+ - Primary SMTP server Password, length be 4 to 64 bits, cannot contain ' '(space).
+ - Required when I(primary_auth=enable).
+ type: str
+ secondary_status:
+ description:
+ - Secondary SMTP Support.
+ choices: ['enable', 'disable']
+ type: str
+ secondary_ip:
+ description:
+ - Secondary SMTP server IP.
+ type: str
+ secondary_name:
+ description:
+ - Secondary SMTP server name.
+ type: str
+ secondary_port:
+ description:
+ - Secondary SMTP server port, The Identification for retry count configuration(1-65535).
+ type: int
+ secondary_auth:
+ description:
+ - Secondary SMTP server authentication.
+ choices: ['enable', 'disable']
+ type: str
+ secondary_username:
+ description:
+ - Secondary SMTP server Username, length be 4 to 64 bits.
+ - Must start with letters and cannot contain ','(comma) ':'(colon) ' '(space) ';'(semicolon) '\\'(backslash).
+ type: str
+ secondary_password:
+ description:
+ - Secondary SMTP server Password, length be 4 to 64 bits, cannot contain ' '(space).
+ - Required when I(secondary_auth=enable).
+ type: str
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Smtp test
+ hosts: inmanage
+ no_log: true
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set smtp information"
+ ieisystem.inmanage.edit_smtp:
+ interface: "eth0"
+ email: "ieit@ieisystem.com"
+ primary_status: "enable"
+ primary_ip: "100.2.2.2"
+ primary_name: "test"
+ primary_auth: "disable"
+ provider: "{{ inmanage }}"
+
+ - name: "Set smtp information"
+ ieisystem.inmanage.edit_smtp:
+ interface: "eth0"
+ email: "ieit@ieisystem.com"
+ primary_status: "enable"
+ primary_ip: "100.2.2.2"
+ primary_name: "test"
+ primary_auth: "enable"
+ primary_username: "test"
+ primary_password: my_password
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class SMTP(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'setsmtp'
+ self.results = get_connection(self.module)
+ if self.results['State'] == 'Success':
+ self.results['changed'] = True
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ interface=dict(type='str', required=True, choices=['eth0', 'eth1', 'bond0']),
+ email=dict(type='str', required=False),
+ primary_status=dict(type='str', required=False, choices=['enable', 'disable']),
+ primary_ip=dict(type='str', required=False),
+ primary_name=dict(type='str', required=False),
+ primary_port=dict(type='int', required=False),
+ primary_auth=dict(type='str', required=False, choices=['enable', 'disable']),
+ primary_username=dict(type='str', required=False),
+ primary_password=dict(type='str', required=False, no_log=True),
+ secondary_status=dict(type='str', required=False, choices=['enable', 'disable']),
+ secondary_ip=dict(type='str', required=False),
+ secondary_name=dict(type='str', required=False),
+ secondary_port=dict(type='int', required=False),
+ secondary_auth=dict(type='str', required=False, choices=['enable', 'disable']),
+ secondary_username=dict(type='str', required=False),
+ secondary_password=dict(type='str', required=False, no_log=True),
+
+ )
+ argument_spec.update(inmanage_argument_spec)
+ smtp_obj = SMTP(argument_spec)
+ smtp_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/edit_smtp_com.py b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_smtp_com.py
new file mode 100644
index 000000000..7e2618715
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_smtp_com.py
@@ -0,0 +1,204 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: edit_smtp_com
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Set SMTP information
+description:
+ - Set SMTP com information on ieisystem Server.
+ - Only the M6 models support this feature.
+notes:
+ - Does not support C(check_mode).
+options:
+ status:
+ description:
+ - SMTP Support.
+ choices: ['enable', 'disable']
+ required: true
+ type: str
+ server_ip:
+ description:
+ - SMTP server IP.
+ type: str
+ server_port:
+ description:
+ - SMTP server port, The Identification for retry count configuration(1-65535).
+ type: int
+ server_secure_port:
+ description:
+ - SMTP server secure port, The Identification for retry count configuration(1-65535).
+ type: int
+ email:
+ description:
+ - Sender email.
+ type: str
+ server_auth:
+ description:
+ - SMTP server authentication.
+ choices: ['enable', 'disable']
+ type: str
+ server_username:
+ description:
+ - SMTP server Username, length be 4 to 64 bits.
+ - Must start with letters and cannot contain ','(comma) ':'(colon) ' '(space) ';'(semicolon) '\\'(backslash).
+ - Required when I(server_auth=enable).
+ type: str
+ server_password:
+ description:
+ - SMTP server Password, length be 4 to 64 bits, cannot contain ' '(space).
+ - Required when I(server_auth=enable).
+ type: str
+ ssl_tls_enable:
+ description:
+ - SMTP SSLTLS Enable.
+ - I(ssl_tls_enable=disable), when I(star_tls_enable=enable).
+ choices: ['enable', 'disable']
+ type: str
+ star_tls_enable:
+ description:
+ - SMTP STARTTLS Enable.
+ - I(star_tls_enable=disable), when I(ssl_tls_enable=enable).
+ choices: ['enable', 'disable']
+ type: str
+ subject:
+ description:
+ - Email theme.
+ type: str
+ host_name:
+ description:
+ - Server name.
+ choices: ['enable', 'disable']
+ type: str
+ serial_number:
+ description:
+ - Serial number.
+ choices: ['enable', 'disable']
+ type: str
+ asset_tag:
+ description:
+ - Product asset label.
+ choices: ['enable', 'disable']
+ type: str
+ event_level:
+ description:
+ - Events above this level will be sent.
+ choices: ['Info', 'Warning', 'Critical']
+ type: str
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Smtp com test
+ hosts: inmanage
+ no_log: true
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set smtp com information"
+ ieisystem.inmanage.edit_smtp_com:
+ status: "disable"
+ provider: "{{ inmanage }}"
+
+ - name: "Set smtp com information"
+ ieisystem.inmanage.edit_smtp_com:
+ status: "enable"
+ server_ip: "100.2.2.2"
+ email: "ieit@ieisystem.com"
+ server_auth: "enable"
+ server_username: "admin"
+ server_password: "1234qwer!@#$"
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class SMTP(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'setsmtpcom'
+ self.results = get_connection(self.module)
+ if self.results['State'] == 'Success':
+ self.results['changed'] = True
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ status=dict(type='str', required=True, choices=['enable', 'disable']),
+ server_ip=dict(type='str', required=False),
+ server_port=dict(type='int', required=False),
+ server_secure_port=dict(type='int', required=False),
+ email=dict(type='str', required=False),
+ server_auth=dict(type='str', required=False, choices=['enable', 'disable']),
+ server_username=dict(type='str', required=False),
+ server_password=dict(type='str', required=False, no_log=True),
+ ssl_tls_enable=dict(type='str', required=False, choices=['enable', 'disable']),
+ star_tls_enable=dict(type='str', required=False, choices=['enable', 'disable']),
+ subject=dict(type='str', required=False),
+ host_name=dict(type='str', required=False, choices=['enable', 'disable']),
+ serial_number=dict(type='str', required=False, choices=['enable', 'disable']),
+ asset_tag=dict(type='str', required=False, choices=['enable', 'disable']),
+ event_level=dict(type='str', required=False, choices=['Info', 'Warning', 'Critical']),
+ )
+ argument_spec.update(inmanage_argument_spec)
+ smtp_obj = SMTP(argument_spec)
+ smtp_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/edit_smtp_dest.py b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_smtp_dest.py
new file mode 100644
index 000000000..2d8d6a6d3
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_smtp_dest.py
@@ -0,0 +1,136 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: edit_smtp_dest
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Set SMTP information
+description:
+ - Set SMTP dest information on ieisystem Server.
+ - Only the M6 models support this feature.
+notes:
+ - Does not support C(check_mode).
+options:
+ id:
+ description:
+ - Email destination id.
+ choices: [1, 2, 3, 4]
+ type: int
+ required: true
+ status:
+ description:
+ - Email enable.
+ choices: ['enable', 'disable']
+ type: str
+ address:
+ description:
+ - Email address.
+ type: str
+ description:
+ description:
+ - Description information.
+ type: str
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Smtp dest test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set smtp dest information"
+ ieisystem.inmanage.edit_smtp_dest:
+ id: 1
+ status: "disable"
+ provider: "{{ inmanage }}"
+
+ - name: "Set smtp dest information"
+ ieisystem.inmanage.edit_smtp_dest:
+ id: 1
+ status: "enable"
+ address: "100.2.2.2"
+ description": "test"
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class SMTP(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'setsmtpdest'
+ self.results = get_connection(self.module)
+ if self.results['State'] == 'Success':
+ self.results['changed'] = True
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ id=dict(type='int', required=True, choices=[1, 2, 3, 4]),
+ status=dict(type='str', required=False, choices=['enable', 'disable']),
+ address=dict(type='str', required=False),
+ description=dict(type='str', required=False),
+ )
+ argument_spec.update(inmanage_argument_spec)
+ smtp_obj = SMTP(argument_spec)
+ smtp_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/edit_snmp.py b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_snmp.py
new file mode 100644
index 000000000..52ccedb2e
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_snmp.py
@@ -0,0 +1,188 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: edit_snmp
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Set snmp
+description:
+ - Set snmp on ieisystem Server.
+notes:
+ - Does not support C(check_mode).
+options:
+ version:
+ description:
+ - SNMP trap version option, 0 - 'v1', 1 - 'v2c', 2 - 'v3', 3 - 'all', 4 - 'customize'.
+ - Only the M5 models support this feature.
+ choices: [0, 1, 2, 3, 4]
+ type: int
+ snmp_status:
+ description:
+ - NMP read/write status of customize.
+ - The input parameters are 'v1get', 'v1set', 'v2cget', 'v2cset', 'v3get', 'v3set', separated by commas, such as v1get, v1set, v2cget.
+ - Only the M5 models support this feature.
+ type: list
+ elements: str
+ community:
+ description:
+ - Community of v1/v2c or v1get/v1set/v2cget/v2cset.
+ - Only the M5 models support this feature.
+ type: str
+ v1status:
+ description:
+ - SNMP V1 enable.
+ choices: ['enable', 'disable']
+ type: str
+ v2status:
+ description:
+ - SNMP V2 enable.
+ choices: ['enable', 'disable']
+ type: str
+ v3status:
+ description:
+ - SNMP V3 enable.
+ choices: ['enable', 'disable']
+ type: str
+ read_community:
+ description:
+ - Read Only Community, Community should between 1 and 16 characters.
+ - Only the M6 models support this feature.
+ type: str
+ read_write_community:
+ description:
+ - Read And Write Community, Community should between 1 and 16 characters.
+ - Only the M6 models support this feature.
+ type: str
+ v3username:
+ description:
+ - Set a username for the V3 trap or v3get/v3set.
+ type: str
+ auth_protocol:
+ description:
+ - Choose the authentication protocol for the V3 trap or v3get/v3set.
+ choices: ['NONE', 'SHA', 'MD5']
+ type: str
+ auth_password:
+ description:
+ - Set the authentication password for the V3 trap or v3get/v3set.
+ - The password is a string of 8 to 16 alphanumeric characters.
+ - Required when I(auth_protocol) is either C(SHA) or C(MD5).
+ type: str
+ priv_protocol:
+ description:
+ - Choose the privacy protocol for the V3 trap or v3get/v3set.
+ choices: ['NONE', 'DES', 'AES']
+ type: str
+ priv_password:
+ description:
+ - Set the privacy password for the V3 trap or v3get/v3set.
+ - The password is a string of 8 to 16 alphanumeric characters.
+ - Required when I(priv_protocol) is either C(DES) or C(AES).
+ type: str
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Snmp test
+ hosts: inmanage
+ no_log: true
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set snmp get/set"
+ ieisystem.inmanage.edit_snmp:
+ community: "test"
+ v3username: "test"
+ provider: "{{ inmanage }}"
+
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class SNMP(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'setsnmp'
+ self.results = get_connection(self.module)
+ if self.results['State'] == 'Success':
+ self.results['changed'] = True
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ version=dict(type='int', required=False, choices=[0, 1, 2, 3, 4]),
+ snmp_status=dict(type='list', elements='str', required=False),
+ community=dict(type='str', required=False),
+ v1status=dict(type='str', required=False, choices=['enable', 'disable']),
+ v2status=dict(type='str', required=False, choices=['enable', 'disable']),
+ v3status=dict(type='str', required=False, choices=['enable', 'disable']),
+ read_community=dict(type='str', required=False),
+ read_write_community=dict(type='str', required=False),
+ v3username=dict(type='str', required=False),
+ auth_protocol=dict(type='str', required=False, choices=['NONE', 'SHA', 'MD5']),
+ auth_password=dict(type='str', required=False, no_log=True),
+ priv_protocol=dict(type='str', required=False, choices=['NONE', 'DES', 'AES']),
+ priv_password=dict(type='str', required=False, no_log=True),
+ )
+ argument_spec.update(inmanage_argument_spec)
+ snmp_obj = SNMP(argument_spec)
+ snmp_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/edit_snmp_trap.py b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_snmp_trap.py
new file mode 100644
index 000000000..a48771d34
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_snmp_trap.py
@@ -0,0 +1,217 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: edit_snmp_trap
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Set snmp trap
+description:
+ - Set snmp trap on ieisystem Server.
+notes:
+ - Does not support C(check_mode).
+options:
+ version:
+ description:
+ - SNMP trap version, 1 is v1, 2 is v2c(v2), 3 is v3, 0 is disable snmp trap.
+ - Only the M6 model supports C(0) Settings.
+ choices: [0, 1, 2, 3]
+ type: int
+ event_severity:
+ description:
+ - Event Severity.
+ choices: ['all', 'warning', 'critical']
+ type: str
+ community:
+ description:
+ - Community of v1/v2c.
+ type: str
+ host_id:
+ description:
+ - Host id.
+ - Only the M6 model supports this parameter.
+ choices: ['HostName', 'SerialNum', 'AssertTag']
+ type: str
+ v3username:
+ description:
+ - Set the username for the V3 trap.
+ type: str
+ engine_id:
+ description:
+ - Specifies an engine identifier for the V3 trap. The value should be string of 10 to 48 hex characters, must be even, can be NULL.
+ type: str
+ auth_protocol:
+ description:
+ - Choose the authentication protocol for the V3 trap.
+ choices: ['NONE', 'SHA', 'MD5']
+ type: str
+ auth_password:
+ description:
+ - Set the authentication password for the V3 trap.
+ - The password is a string of 8 to 16 alphanumeric characters.
+ - Required when I(auth_protocol) is either C(SHA) or C(MD5).
+ type: str
+ priv_protocol:
+ description:
+ - Choose the privacy protocol for the V3 trap.
+ choices: ['NONE', 'DES', 'AES']
+ type: str
+ priv_password:
+ description:
+ - Set the privacy password for the V3 trap.
+ - The password is a string of 8 to 16 alphanumeric characters.
+ - Required when I(priv_protocol) is either C(DES) or C(AES).
+ type: str
+ system_name:
+ description:
+ - Set the system name, can be NULL.
+ - Only the M5 model supports this parameter.
+ type: str
+ system_id:
+ description:
+ - Set the system ID, can be NULL.
+ - Only the M5 model supports this parameter.
+ type: str
+ location:
+ description:
+ - Set the host location, can be NULL.
+ - Only the M5 model supports this parameter.
+ type: str
+ contact:
+ description:
+ - Set the contact, can be NULL.
+ - Only the M5 model supports this parameter.
+ type: str
+ os:
+ description:
+ - Set the host operating system, can be NULL.
+ - Only the M5 model supports this parameter.
+ type: str
+ trap_port:
+ description:
+ - Set a port for the SNMP trap in the range of 1 to 65535.
+ - Only the M5 model supports this parameter.
+ type: int
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Trap test
+ hosts: inmanage
+ no_log: true
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set snmp trap v2c"
+ ieisystem.inmanage.edit_snmp_trap:
+ version: 2
+ event_severity: "warning"
+ community: "test"
+ system_name: "ieisystem"
+ provider: "{{ inmanage }}"
+
+ - name: "Set snmp trap v3"
+ ieisystem.inmanage.edit_snmp_trap:
+ version: 3
+ event_severity: "all"
+ v3username: "ieisystem"
+ engine_id: "1234567890"
+ auth_protocol: "SHA"
+ auth_password: "12345678"
+ priv_protocol: "AES"
+ priv_password: "123454678"
+ trap_port: 162
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class SNMP(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'setsnmptrap'
+ self.results = get_connection(self.module)
+ if self.results['State'] == 'Success':
+ self.results['changed'] = True
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ version=dict(type='int', required=False, choices=[0, 1, 2, 3]),
+ event_severity=dict(type='str', required=False, choices=['all', 'warning', 'critical']),
+ community=dict(type='str', required=False),
+ host_id=dict(type='str', required=False, choices=['HostName', 'SerialNum', 'AssertTag']),
+ v3username=dict(type='str', required=False),
+ engine_id=dict(type='str', required=False),
+ auth_protocol=dict(type='str', required=False, choices=['NONE', 'SHA', 'MD5']),
+ auth_password=dict(type='str', required=False, no_log=True),
+ priv_protocol=dict(type='str', required=False, choices=['NONE', 'DES', 'AES']),
+ priv_password=dict(type='str', required=False, no_log=True),
+ system_name=dict(type='str', required=False),
+ system_id=dict(type='str', required=False),
+ location=dict(type='str', required=False),
+ contact=dict(type='str', required=False),
+ os=dict(type='str', required=False),
+ trap_port=dict(type='int', required=False),
+ )
+ argument_spec.update(inmanage_argument_spec)
+ snmp_obj = SNMP(argument_spec)
+ snmp_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/edit_threshold.py b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_threshold.py
new file mode 100644
index 000000000..8b12b59cb
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_threshold.py
@@ -0,0 +1,141 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: edit_threshold
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Set threshold information
+description:
+ - Set threshold information on ieisystem Server.
+notes:
+ - Does not support C(check_mode).
+options:
+ name:
+ description:
+ - Sensor name.
+ type: str
+ required: true
+ lnr:
+ description:
+ - Lower non recoverable threshold, should be integer.
+ type: int
+ lc:
+ description:
+ - Lower critical threshold, should be integer.
+ type: int
+ lnc:
+ description:
+ - Lower non critical threshold, should be integer.
+ type: int
+ unc:
+ description:
+ - Up non critical threshold, should be integer.
+ type: int
+ uc:
+ description:
+ - Up critical threshold, should be integer.
+ type: int
+ unr:
+ description:
+ - Up non recoverable threshold, should be integer.
+ type: int
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Threshold test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set threshold information"
+ ieisystem.inmanage.edit_threshold:
+ name: "GPU1_Temp"
+ uc: 94
+ unc: 92
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Threshold(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'setthreshold'
+ self.results = get_connection(self.module)
+ if self.results['State'] == 'Success':
+ self.results['changed'] = True
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ name=dict(type='str', required=True),
+ lnr=dict(type='int', required=False),
+ lc=dict(type='int', required=False),
+ lnc=dict(type='int', required=False),
+ unc=dict(type='int', required=False),
+ uc=dict(type='int', required=False),
+ unr=dict(type='int', required=False),
+ )
+ argument_spec.update(inmanage_argument_spec)
+ threshoold_obj = Threshold(argument_spec)
+ threshoold_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/edit_uid.py b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_uid.py
new file mode 100644
index 000000000..8dc5a0f17
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_uid.py
@@ -0,0 +1,121 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: edit_uid
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Set UID
+description:
+ - Set UID on ieisystem Server.
+notes:
+ - Does not support C(check_mode).
+options:
+ led:
+ description:
+ - Turn on or turn off the led.
+ choices: ['on', 'off']
+ type: str
+ required: true
+ time:
+ description:
+ - Set led blink time(second).
+ type: int
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: UID test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set uid"
+ ieisystem.inmanage.edit_uid:
+ led: "on"
+ time: 10
+ provider: "{{ inmanage }}"
+
+ - name: "Set uid"
+ ieisystem.inmanage.edit_uid:
+ led: "off"
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class UID(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'setuid'
+ self.results = get_connection(self.module)
+ if self.results['State'] == 'Success':
+ self.results['changed'] = True
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ led=dict(type='str', required=True, choices=['on', 'off']),
+ time=dict(type='int', required=False),
+ )
+ argument_spec.update(inmanage_argument_spec)
+ uid_obj = UID(argument_spec)
+ uid_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/edit_virtual_media.py b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_virtual_media.py
new file mode 100644
index 000000000..8a7a60da3
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_virtual_media.py
@@ -0,0 +1,182 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: edit_virtual_media
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Set virtual media
+description:
+ - Set virtual media on ieisystem Server.
+notes:
+ - Does not support C(check_mode).
+options:
+ local_media_support:
+ description:
+ - To enable or disable Local Media Support, check or uncheck the checkbox respectively.
+ - Only the M5 model supports this parameter.
+ choices: ['Enable', 'Disable']
+ type: str
+ remote_media_support:
+ description:
+ - To enable or disable Remote Media support, check or uncheck the checkbox respectively.
+ choices: ['Enable', 'Disable']
+ type: str
+ mount_type:
+ description:
+ - Virtual mount type.
+ - The I(FD) option is not supported in M6.
+ choices: ['CD', 'FD', 'HD']
+ type: str
+ same_settings:
+ description:
+ - Same settings with I(CD), 0 is No, 1 is Yes.
+ - Required when I(mount_type=0).
+ choices: [0, 1]
+ type: int
+ mount:
+ description:
+ - Whether to mount virtual media.
+ - Only the M5 model supports this parameter.
+ choices: ['Enable', 'Disable']
+ type: str
+ remote_server_address:
+ description:
+ - Address of the server where the remote media images are stored.
+ type: str
+ remote_source_path:
+ description:
+ - Source path to the remote media images..
+ type: str
+ remote_share_type:
+ description:
+ - Share Type of the remote media server either NFS or Samba(CIFS).
+ choices: ['nfs', 'cifs']
+ type: str
+ remote_domain_name:
+ description:
+ - Remote Domain Name, Domain Name field is optional.
+ type: str
+ remote_user_name:
+ description:
+ - Remote User Name.
+ - Required when I(remote_share_type=cifs).
+ type: str
+ remote_password:
+ description:
+ - Remote Password.
+ - Required when I(remote_share_type=cifs).
+ type: str
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Media test
+ hosts: inmanage
+ no_log: true
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set local media"
+ ieisystem.inmanage.edit_virtual_media:
+ local_media_support: "Enable"
+ provider: "{{ inmanage }}"
+
+ - name: "Set remote media"
+ ieisystem.inmanage.edit_virtual_media:
+ remote_media_support: "Enable"
+ mount_type: 'CD'
+ same_settings: 0
+ mount: "Enable"
+ remote_server_address: "100.2.28.203"
+ remote_source_path: "/data/nfs/server/"
+ remote_share_type: "nfs"
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Media(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'setvirtualmedia'
+ self.results = get_connection(self.module)
+ if self.results['State'] == 'Success':
+ self.results['changed'] = True
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ local_media_support=dict(type='str', required=False, choices=['Enable', 'Disable']),
+ remote_media_support=dict(type='str', required=False, choices=['Enable', 'Disable']),
+ mount_type=dict(type='str', required=False, choices=['CD', 'FD', 'HD']),
+ same_settings=dict(type='int', required=False, choices=[0, 1]),
+ mount=dict(type='str', required=False, choices=['Enable', 'Disable']),
+ remote_server_address=dict(type='str', required=False),
+ remote_source_path=dict(type='str', required=False),
+ remote_share_type=dict(type='str', required=False, choices=['nfs', 'cifs']),
+ remote_domain_name=dict(type='str', required=False),
+ remote_user_name=dict(type='str', required=False),
+ remote_password=dict(type='str', required=False, no_log=True),
+ )
+ argument_spec.update(inmanage_argument_spec)
+ media_obj = Media(argument_spec)
+ media_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/edit_vlan.py b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_vlan.py
new file mode 100644
index 000000000..be020d8df
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/edit_vlan.py
@@ -0,0 +1,136 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: edit_vlan
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Set vlan information
+description:
+ - Set vlan information on ieisystem Server.
+notes:
+ - Does not support C(check_mode).
+options:
+ interface_name:
+ description:
+ - Set interface name. Select the LAN interface to be configured.
+ choices: ['eth0', 'eth1', 'bond0']
+ required: true
+ type: str
+ vlan_status:
+ description:
+ - Enable or disable vlan.
+ choices: ['enable', 'disable']
+ type: str
+ vlan_id:
+ description:
+ - The Identification for VLAN configuration(2-4094).
+ type: int
+ vlan_priority:
+ description:
+ - The priority for VLAN configuration(1-7).
+ type: int
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Vlan test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Set vlan information"
+ ieisystem.inmanage.edit_vlan:
+ interface_name: "eth0"
+ vlan_status: "disable"
+ provider: "{{ inmanage }}"
+
+ - name: "Set vlan information"
+ ieisystem.inmanage.edit_vlan:
+ interface_name: "eth0"
+ vlan_status: "enable"
+ vlan_id: 2
+ vlan_priority: 1
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Network(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'setvlan'
+ self.results = get_connection(self.module)
+ if self.results['State'] == 'Success':
+ self.results['changed'] = True
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ interface_name=dict(type='str', required=True, choices=['eth0', 'eth1', 'bond0']),
+ vlan_status=dict(type='str', required=False, choices=['enable', 'disable']),
+ vlan_id=dict(type='int', required=False),
+ vlan_priority=dict(type='int', required=False),
+
+ )
+ argument_spec.update(inmanage_argument_spec)
+ net_obj = Network(argument_spec)
+ net_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/event_log_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/event_log_info.py
new file mode 100644
index 000000000..0d35dd002
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/event_log_info.py
@@ -0,0 +1,126 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: event_log_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get event log information
+description:
+ - Get event log information on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options:
+ log_time:
+ description:
+ - Get logs after the specified date, time should be YYYY-MM-DDTHH:MM+HH:MM, like 2019-06-27T12:30+08:00.
+ type: str
+ count:
+ description:
+ - Get the most recent log of a specified number.
+ type: int
+ event_file:
+ description:
+ - Store logs to a file.
+ type: str
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Event log info test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get event log information"
+ ieisystem.inmanage.event_log_info:
+ log_time: "2020-06-01T12:30+08:00"
+ provider: "{{ inmanage }}"
+
+ - name: "Get event log information"
+ ieisystem.inmanage.event_log_info:
+ count: 30
+ provider: "{{ inmanage }}"
+
+ - name: "Get event log information"
+ ieisystem.inmanage.event_log_info:
+ event_file: "/home/wbs/wbs.log"
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class EventLog(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'geteventlog'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ log_time=dict(type='str', required=False),
+ count=dict(type='int', required=False),
+ event_file=dict(type='str', required=False),
+ )
+ argument_spec.update(inmanage_argument_spec)
+ log_obj = EventLog(argument_spec)
+ log_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/event_log_policy_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/event_log_policy_info.py
new file mode 100644
index 000000000..0dca1a8fd
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/event_log_policy_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: event_log_policy_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get event log policy information
+description:
+ - Get event log policy information on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Event log policy test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get event log policy information"
+ ieisystem.inmanage.event_log_policy_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Log(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'geteventlogpolicy'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ log_obj = Log(argument_spec)
+ log_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/fan_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/fan_info.py
new file mode 100644
index 000000000..fca0b7efd
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/fan_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: fan_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get fan information
+description:
+ - Get fan information on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Fan test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get fan information"
+ ieisystem.inmanage.fan_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Fan(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'getfan'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ fan_obj = Fan(argument_spec)
+ fan_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/fru_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/fru_info.py
new file mode 100644
index 000000000..b42441f79
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/fru_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: fru_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get fru information
+description:
+ - Get fru information on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Fru info test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get fru information"
+ ieisystem.inmanage.fru_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Fru(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'getfru'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ fru_obj = Fru(argument_spec)
+ fru_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/fw_version_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/fw_version_info.py
new file mode 100644
index 000000000..03c71e666
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/fw_version_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: fw_version_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get firmware version information
+description:
+ - Get firmware version information on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Firmware version test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get firmware version information"
+ ieisystem.inmanage.fw_version_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class FwVersion(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'getfw'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ fw_obj = FwVersion(argument_spec)
+ fw_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/gpu_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/gpu_info.py
new file mode 100644
index 000000000..bfa16da19
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/gpu_info.py
@@ -0,0 +1,100 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: gpu_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get GPU information
+description:
+ - Get GPU information on ieisystem Server.
+ - Only the M6 models support this feature.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: GPU test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get gpu information"
+ ieisystem.inmanage.gpu_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class CPU(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'getgpu'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ cpu_obj = CPU(argument_spec)
+ cpu_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/hard_disk_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/hard_disk_info.py
new file mode 100644
index 000000000..e0c994319
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/hard_disk_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: hard_disk_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get hard disk information
+description:
+ - Get hard disk information on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Hard disk test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get hard disk information"
+ ieisystem.inmanage.hard_disk_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Harddisk(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'getharddisk'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ disk_obj = Harddisk(argument_spec)
+ disk_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/hba_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/hba_info.py
new file mode 100644
index 000000000..0fd115a60
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/hba_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: hba_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get CPU information
+description:
+ - Get HBA information on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: HBA test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get hba information"
+ ieisystem.inmanage.hba_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class HBA(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'gethba'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ hba_obj = HBA(argument_spec)
+ hba_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/kvm_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/kvm_info.py
new file mode 100644
index 000000000..d101c1f40
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/kvm_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: kvm_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get KVM information
+description:
+ - Get KVM information on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: KVM test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get KVM information"
+ ieisystem.inmanage.kvm_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class KVM(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'getkvm'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ kvm_obj = KVM(argument_spec)
+ kvm_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/ldap_group.py b/ansible_collections/ieisystem/inmanage/plugins/modules/ldap_group.py
new file mode 100644
index 000000000..d561a3521
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/ldap_group.py
@@ -0,0 +1,158 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: ldap_group
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Manage ldap group information
+description:
+ - Manage ldap group information on ieisystem Server.
+notes:
+ - Does not support C(check_mode).
+options:
+ state:
+ description:
+ - Whether the ldap group should exist or not, taking action if the state is different from what is stated.
+ choices: ['present', 'absent']
+ default: present
+ type: str
+ name:
+ description:
+ - Group name.
+ type: str
+ required: true
+ base:
+ description:
+ - Search Base.
+ type: str
+ pri:
+ description:
+ - Group privilege.
+ choices: ['administrator', 'user', 'operator', 'oem', 'none']
+ type: str
+ kvm:
+ description:
+ - Kvm privilege.
+ choices: ['enable', 'disable']
+ type: str
+ vm:
+ description:
+ - Vmedia privilege.
+ choices: ['enable', 'disable']
+ type: str
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Ldap group test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Add ldap group information"
+ ieisystem.inmanage.ldap_group:
+ state: "present"
+ name: "wbs"
+ base: "cn=manager"
+ pri: "administrator"
+ kvm: "enable"
+ vm: "disable"
+ provider: "{{ inmanage }}"
+
+ - name: "Set ldap group information"
+ ieisystem.inmanage.ldap_group:
+ state: "present"
+ name: "wbs"
+ pri: "user"
+ kvm: "disable"
+ provider: "{{ inmanage }}"
+
+ - name: "Delete ldap group information"
+ ieisystem.inmanage.ldap_group:
+ state: "absent"
+ name: "wbs"
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class LDAP(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'editldapgroup'
+ self.results = get_connection(self.module)
+ if self.results['State'] == 'Success':
+ self.results['changed'] = True
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ state=dict(type='str', choices=['present', 'absent'], default='present'),
+ name=dict(type='str', required=True),
+ base=dict(type='str', required=False),
+ pri=dict(type='str', required=False, choices=['administrator', 'user', 'operator', 'oem', 'none']),
+ kvm=dict(type='str', required=False, choices=['enable', 'disable']),
+ vm=dict(type='str', required=False, choices=['enable', 'disable']),
+ )
+ argument_spec.update(inmanage_argument_spec)
+ ldap_obj = LDAP(argument_spec)
+ ldap_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/ldap_group_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/ldap_group_info.py
new file mode 100644
index 000000000..13d0379cf
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/ldap_group_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: ldap_group_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get ldap group information
+description:
+ - Get ldap group information on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Ldap group test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get ldap group information"
+ ieisystem.inmanage.ldap_group_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class LDAP(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'getldapgroup'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ ldap_obj = LDAP(argument_spec)
+ ldap_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/ldap_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/ldap_info.py
new file mode 100644
index 000000000..9cf87bb9b
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/ldap_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: ldap_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get ldap information
+description:
+ - Get ldap information on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Ldap test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get ldap information"
+ ieisystem.inmanage.ldap_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class LDAP(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'getldap'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ ldap_obj = LDAP(argument_spec)
+ ldap_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/ldisk_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/ldisk_info.py
new file mode 100644
index 000000000..f41926543
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/ldisk_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: ldisk_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get logical disks information
+description:
+ - Get logical disks information on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Ldisk test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get logical disks information"
+ ieisystem.inmanage.ldisk_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Disk(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'getldisk'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ disk_obj = Disk(argument_spec)
+ disk_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/log_setting_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/log_setting_info.py
new file mode 100644
index 000000000..fce21d462
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/log_setting_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: log_setting_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get bmc log setting information
+description:
+ - Get bmc log setting information on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Bmc log setting test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get bmc log setting information"
+ ieisystem.inmanage.log_setting_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class LogSetting(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'getbmclogsettings'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ cpu_obj = LogSetting(argument_spec)
+ cpu_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/media_instance_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/media_instance_info.py
new file mode 100644
index 000000000..d106f2603
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/media_instance_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: media_instance_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get Virtual Media Instance information
+description:
+ - Get Virtual Media Instance information on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Media instance test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get Virtual Media Instance information"
+ ieisystem.inmanage.media_instance_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Instance(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'getmediainstance'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ instance_obj = Instance(argument_spec)
+ instance_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/mem_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/mem_info.py
new file mode 100644
index 000000000..d58ad90f7
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/mem_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: mem_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get memory information
+description:
+ - Get memory information on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Memory test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get memory information"
+ ieisystem.inmanage.mem_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Memory(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'getmemory'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ mem_obj = Memory(argument_spec)
+ mem_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/ncsi_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/ncsi_info.py
new file mode 100644
index 000000000..4636a2236
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/ncsi_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: ncsi_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get ncsi information
+description:
+ - Get ncsi information on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: NCSI test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get ncsi information"
+ ieisystem.inmanage.ncsi_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class NCSI(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'getncsi'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ ncsi_obj = NCSI(argument_spec)
+ ncsi_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/network_bond_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/network_bond_info.py
new file mode 100644
index 000000000..96c48143c
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/network_bond_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: network_bond_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get network bond information
+description:
+ - Get network bond information on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Bond test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get network bond information"
+ ieisystem.inmanage.network_bond_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Bond(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'getnetworkbond'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ bond_obj = Bond(argument_spec)
+ bond_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/network_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/network_info.py
new file mode 100644
index 000000000..55feae870
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/network_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: network_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get network information
+description:
+ - Get network information on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Network test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get network information"
+ ieisystem.inmanage.network_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Network(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'getnetwork'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ net_obj = Network(argument_spec)
+ net_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/network_link_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/network_link_info.py
new file mode 100644
index 000000000..01e11f2f7
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/network_link_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: network_link_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get network link information
+description:
+ - Get network link information on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Link test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get network link information"
+ ieisystem.inmanage.network_link_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Link(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'getnetworklink'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ link_obj = Link(argument_spec)
+ link_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/ntp_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/ntp_info.py
new file mode 100644
index 000000000..782012f04
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/ntp_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: ntp_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get NTP information
+description:
+ - Get NTP information on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: NTP test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get ntp information"
+ ieisystem.inmanage.ntp_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class NTP(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'gettime'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ ntp_obj = NTP(argument_spec)
+ ntp_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/onboard_disk_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/onboard_disk_info.py
new file mode 100644
index 000000000..f8ecb570d
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/onboard_disk_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: onboard_disk_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get onboard disks information
+description:
+ - Get onboard disks information on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Onboard test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get onboard disks information"
+ ieisystem.inmanage.onboard_disk_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Disk(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'gethdddisk'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ disk_obj = Disk(argument_spec)
+ disk_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/pcie_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/pcie_info.py
new file mode 100644
index 000000000..933ccdef0
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/pcie_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: pcie_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get PCIE information
+description:
+ - Get PCIE information on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: PCIE test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get PCIE information"
+ ieisystem.inmanage.pcie_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class PCIE(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'getpcie'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ pcie_obj = PCIE(argument_spec)
+ pcie_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/pdisk_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/pdisk_info.py
new file mode 100644
index 000000000..dbea5b681
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/pdisk_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: pdisk_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get physical disks information
+description:
+ - Get physical disks information on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Pdisk test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get physical disks information"
+ ieisystem.inmanage.pdisk_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Disk(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'getpdisk'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ disk_obj = Disk(argument_spec)
+ disk_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/power_budget_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/power_budget_info.py
new file mode 100644
index 000000000..56fd5298d
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/power_budget_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: power_budget_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get power budget information
+description:
+ - Get power budget information on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Power budget test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get power budget information"
+ ieisystem.inmanage.power_budget_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Power(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'getpowerbudget'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ power_obj = Power(argument_spec)
+ power_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/power_consumption_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/power_consumption_info.py
new file mode 100644
index 000000000..86ce5a301
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/power_consumption_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: power_consumption_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get power consumption information
+description:
+ - Get power consumption information on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Power consumption test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get power consumption information"
+ ieisystem.inmanage.power_consumption_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Power(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'getpowerconsumption'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ power_obj = Power(argument_spec)
+ power_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/power_restore_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/power_restore_info.py
new file mode 100644
index 000000000..d5c810aa7
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/power_restore_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: power_restore_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get power restore information
+description:
+ - Get power restore information on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Power restore test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get power restore information"
+ ieisystem.inmanage.power_restore_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Power(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'getpowerrestore'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ power_obj = Power(argument_spec)
+ power_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/power_status_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/power_status_info.py
new file mode 100644
index 000000000..498887b66
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/power_status_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: power_status_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get power status information
+description:
+ - Get power status information on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Power status test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get power status information"
+ ieisystem.inmanage.power_status_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Power(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'getpowerstatus'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ power_obj = Power(argument_spec)
+ power_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/preserve_config_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/preserve_config_info.py
new file mode 100644
index 000000000..c7c4ba32d
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/preserve_config_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: preserve_config_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get preserve config information
+description:
+ - Get preserve config information on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Preserve test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get preserve config information"
+ ieisystem.inmanage.preserve_config_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Preserver(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'getpreserveconfig'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ pre_obj = Preserver(argument_spec)
+ pre_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/psu_config_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/psu_config_info.py
new file mode 100644
index 000000000..c0e783dbf
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/psu_config_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: psu_config_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get psu config information
+description:
+ - Get psu config information on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Psu config test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get psu config information"
+ ieisystem.inmanage.psu_config_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Psu(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'getpsuconfig'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ psu_obj = Psu(argument_spec)
+ psu_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/psu_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/psu_info.py
new file mode 100644
index 000000000..ec7b42fe4
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/psu_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: psu_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get psu information
+description:
+ - Get psu information on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Psu test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get psu information"
+ ieisystem.inmanage.psu_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Psu(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'getpsu'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ psu_obj = Psu(argument_spec)
+ psu_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/psu_peak_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/psu_peak_info.py
new file mode 100644
index 000000000..d83408821
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/psu_peak_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: psu_peak_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get psu peak information
+description:
+ - Get psu peak information on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Psu peak test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get psu peak information"
+ ieisystem.inmanage.psu_peak_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Psu(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'getpsupeak'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ psu_obj = Psu(argument_spec)
+ psu_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/raid_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/raid_info.py
new file mode 100644
index 000000000..d1db9d36c
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/raid_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: raid_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get RAID/HBA card and controller information
+description:
+ - Get RAID/HBA card and controller information information on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Raid test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get raid information"
+ ieisystem.inmanage.raid_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Raid(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'getraid'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ raid_obj = Raid(argument_spec)
+ raid_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/reset_bmc.py b/ansible_collections/ieisystem/inmanage/plugins/modules/reset_bmc.py
new file mode 100644
index 000000000..efb00f91a
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/reset_bmc.py
@@ -0,0 +1,101 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: reset_bmc
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: BMC reset
+description:
+ - BMC reset on ieisystem Server.
+notes:
+ - Does not support C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Reset bmc test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Reset bmc"
+ ieisystem.inmanage.reset_bmc:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Reset(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'resetbmc'
+ self.results = get_connection(self.module)
+ if self.results['State'] == 'Success':
+ self.results['changed'] = True
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ reset_obj = Reset(argument_spec)
+ reset_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/reset_kvm.py b/ansible_collections/ieisystem/inmanage/plugins/modules/reset_kvm.py
new file mode 100644
index 000000000..13ad89159
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/reset_kvm.py
@@ -0,0 +1,101 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: reset_kvm
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: KVM reset
+description:
+ - KVM reset on ieisystem Server.
+notes:
+ - Does not support C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Reset kvm test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Reset kvm"
+ ieisystem.inmanage.reset_kvm:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Reset(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'resetkvm'
+ self.results = get_connection(self.module)
+ if self.results['State'] == 'Success':
+ self.results['changed'] = True
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ reset_obj = Reset(argument_spec)
+ reset_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/restore.py b/ansible_collections/ieisystem/inmanage/plugins/modules/restore.py
new file mode 100644
index 000000000..e72f49fd8
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/restore.py
@@ -0,0 +1,117 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: restore
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Restore server settings
+description:
+ - Restore server settings on ieisystem Server.
+notes:
+ - Does not support C(check_mode).
+options:
+ bak_file:
+ description:
+ - Select backup file or bak folder.
+ required: true
+ type: str
+ item:
+ description:
+ - Select export item.
+ - Only the M5 model supports this parameter.
+ choices: ['all', 'network', 'dns', 'service', 'ntp', 'smtp', 'snmptrap', 'ad', 'ldap', 'user', 'bios']
+ type: str
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Restore test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Restore server settings"
+ ieisystem.inmanage.restore:
+ bak_file: "/home/wbs/backfile"
+ item: "all"
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Restore(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'restore'
+ self.results = get_connection(self.module)
+ if self.results['State'] == 'Success':
+ self.results['changed'] = True
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ bak_file=dict(type='str', required=True),
+ item=dict(type='str', required=False, choices=['all', 'network', 'dns', 'service', 'ntp', 'smtp', 'snmptrap', 'ad', 'ldap', 'user', 'bios']),
+ )
+ argument_spec.update(inmanage_argument_spec)
+ restore_obj = Restore(argument_spec)
+ restore_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/self_test_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/self_test_info.py
new file mode 100644
index 000000000..11d8cb102
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/self_test_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: self_test_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get self test information
+description:
+ - Get self test information on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Self test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get self test information"
+ ieisystem.inmanage.self_test_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Test(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'getselftest'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ test_obj = Test(argument_spec)
+ test_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/sensor_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/sensor_info.py
new file mode 100644
index 000000000..9376a33b9
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/sensor_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: sensor_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get sensor information
+description:
+ - Get sensor information on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Sensor test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get sensor information"
+ ieisystem.inmanage.sensor_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Sensor(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'getsensor'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ sensor_obj = Sensor(argument_spec)
+ sensor_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/server_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/server_info.py
new file mode 100644
index 000000000..e163ee141
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/server_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: server_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get server status information
+description:
+ - Get server status information on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Server test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get server status information"
+ ieisystem.inmanage.server_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class ServerStatus(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'getserver'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ server_obj = ServerStatus(argument_spec)
+ server_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/service_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/service_info.py
new file mode 100644
index 000000000..6e4a6c349
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/service_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: service_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get service information
+description:
+ - Get service information on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Service info test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get service information"
+ ieisystem.inmanage.service_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Service(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'getservice'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ service_obj = Service(argument_spec)
+ service_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/session_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/session_info.py
new file mode 100644
index 000000000..baf013980
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/session_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: session_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get online session information
+description:
+ - Get online session information on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Session test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get session information"
+ ieisystem.inmanage.session_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Session(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'getsessions'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ session_obj = Session(argument_spec)
+ session_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/smtp_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/smtp_info.py
new file mode 100644
index 000000000..912eb344a
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/smtp_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: smtp_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get SMTP information
+description:
+ - Get SMTP information on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Smtp test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get smtp information"
+ ieisystem.inmanage.smtp_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class SMTP(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'getsmtp'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ smtp_obj = SMTP(argument_spec)
+ smtp_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/snmp_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/snmp_info.py
new file mode 100644
index 000000000..b337ea324
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/snmp_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: snmp_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get snmp get/set information
+description:
+ - Get snmp get/set information on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Snmp test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get snmp get/set information"
+ ieisystem.inmanage.snmp_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class SNMP(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'getsnmp'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ snmp_obj = SNMP(argument_spec)
+ snmp_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/snmp_trap_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/snmp_trap_info.py
new file mode 100644
index 000000000..51f1d1cd7
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/snmp_trap_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: snmp_trap_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get snmp trap information
+description:
+ - Get snmp trap information on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Trap test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get snmp trap information"
+ ieisystem.inmanage.snmp_trap_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class SNMP(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'getsnmptrap'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ snmp_obj = SNMP(argument_spec)
+ snmp_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/support_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/support_info.py
new file mode 100644
index 000000000..468f452dc
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/support_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: support_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get support information
+description:
+ - Get the ieisystem Server support list information.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Support list test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get support information"
+ ieisystem.inmanage.support_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Support(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'support_model'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ support_obj = Support(argument_spec)
+ support_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/system_log_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/system_log_info.py
new file mode 100644
index 000000000..af8775002
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/system_log_info.py
@@ -0,0 +1,134 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: system_log_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get BMC system log information
+description:
+ - Get BMC system log information on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options:
+ level:
+ description:
+ - Log level.
+ default: alert
+ choices: ['alert', 'critical', 'error', 'notice', 'warning', 'debug', 'emergency', 'info']
+ type: str
+ log_time:
+ description:
+ - Get logs after the specified date, time should be YYYY-MM-DDTHH:MM+HH:MM, like 2019-06-27T12:30+08:00.
+ type: str
+ count:
+ description:
+ - Get the most recent log of a specified number.
+ type: int
+ system_file:
+ description:
+ - Store logs to a file.
+ type: str
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Bmc system log info test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get bmc system log information"
+ ieisystem.inmanage.system_log_info:
+ level: "alert"
+ log_time: "2020-06-01T12:30+08:00"
+ provider: "{{ inmanage }}"
+
+ - name: "Get bmc system log information"
+ ieisystem.inmanage.system_log_info:
+ count: 30
+ provider: "{{ inmanage }}"
+
+ - name: "Get bmc system log information"
+ ieisystem.inmanage.system_log_info:
+ system_file: "/home/wbs/wbs.log"
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class SystemLog(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'getsystemlog'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ level=dict(type='str', default='alert', choices=['alert', 'critical', 'error', 'notice', 'warning', 'debug', 'emergency', 'info']),
+ log_time=dict(type='str', required=False),
+ count=dict(type='int', required=False),
+ system_file=dict(type='str', required=False),
+ )
+ argument_spec.update(inmanage_argument_spec)
+ log_obj = SystemLog(argument_spec)
+ log_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/temp_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/temp_info.py
new file mode 100644
index 000000000..e8cb1864c
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/temp_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: temp_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get temp information
+description:
+ - Get temp information on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Temp test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get temp information"
+ ieisystem.inmanage.temp_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Sensor(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'gettemp'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ sensor_obj = Sensor(argument_spec)
+ sensor_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/threshold_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/threshold_info.py
new file mode 100644
index 000000000..d1729f5e0
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/threshold_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: threshold_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get threshold information
+description:
+ - Get threshold information on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Threshold test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get threshold information"
+ ieisystem.inmanage.threshold_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Threshold(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'getthreshold'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ threshoold_obj = Threshold(argument_spec)
+ threshoold_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/uid_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/uid_info.py
new file mode 100644
index 000000000..5c66a739f
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/uid_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: uid_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get UID information
+description:
+ - Get UID information on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: UID test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get UID information"
+ ieisystem.inmanage.uid_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class UID(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'getuid'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ uid_obj = UID(argument_spec)
+ uid_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/update_cpld.py b/ansible_collections/ieisystem/inmanage/plugins/modules/update_cpld.py
new file mode 100644
index 000000000..1823c2e57
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/update_cpld.py
@@ -0,0 +1,130 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: update_cpld
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Update CPLD
+description:
+ - Update CPLD on ieisystem Server.
+notes:
+ - Does not support C(check_mode).
+options:
+ list:
+ description:
+ - Get cpld list.
+ - Only the M5 model supports this parameter.
+ choices: [True, False]
+ default: False
+ type: bool
+ id:
+ description:
+ - CPLD id.
+ - Required when I(list=False).
+ - Only the M5 model supports this parameter.
+ type: int
+ file_url:
+ description:
+ - CPLD image file path.
+ - Required when I(list=False).
+ type: str
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: CPLD test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get cpld list"
+ ieisystem.inmanage.update_cpld:
+ list: True
+ provider: "{{ inmanage }}"
+
+ - name: "Update cpld"
+ update_cpld:
+ id: 1
+ file_url: "home/wbs/raw.bin"
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class CPLD(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'updatecpld'
+ self.results = get_connection(self.module)
+ if self.results['State'] == 'Success':
+ self.results['changed'] = True
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ list=dict(type='bool', default=False, choices=[True, False]),
+ id=dict(type='int', required=False),
+ file_url=dict(type='str', required=False),
+ )
+ argument_spec.update(inmanage_argument_spec)
+ cpld_obj = CPLD(argument_spec)
+ cpld_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/update_fw.py b/ansible_collections/ieisystem/inmanage/plugins/modules/update_fw.py
new file mode 100644
index 000000000..4e2ccb224
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/update_fw.py
@@ -0,0 +1,154 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: update_fw
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Update firmware
+description:
+ - Update firmware on ieisystem Server.
+notes:
+ - Does not support C(check_mode).
+options:
+ url:
+ description:
+ - Firmware image url.
+ required: true
+ type: str
+ mode:
+ description:
+ - Active mode, Manual or Auto(default).
+ default: Auto
+ choices: ['Auto', 'Manual']
+ type: str
+ type:
+ description:
+ - Firmware type.
+ choices: ['BMC', 'BIOS']
+ type: str
+ over_ride:
+ description:
+ - Reserve Configurations, 0-reserve, 1-override.
+ default: 0
+ choices: [0, 1]
+ type: int
+ has_me:
+ description:
+ - Update me or not when update bios, only work in INTEL platform, 0-no, 1-yes.
+ - Only the M5 model supports this parameter.
+ default: 1
+ choices: [0, 1]
+ type: int
+ dual_image:
+ description:
+ - Update dual image(default) or not.
+ - Only the M5 model supports this parameter.
+ default: dual
+ choices: ['single', 'dual']
+ type: str
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Update fw test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "update bios"
+ ieisystem.inmanage.update_fw:
+ url: "/home/wbs/SA5112M5_BIOS_4.1.8_Standard_20200117.bin"
+ type: "BIOS"
+ provider: "{{ inmanage }}"
+
+ - name: "update bmc"
+ ieisystem.inmanage.update_fw:
+ url: "/home/wbs/SA5112M5_BMC_4.17.7_Standard_20200430"
+ mode: "Auto"
+ type: "BMC"
+ dual_image: "dual"
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Update(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'fwupdate'
+ self.results = get_connection(self.module)
+ if self.results['State'] == 'Success':
+ self.results['changed'] = True
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ url=dict(type='str', required=True),
+ mode=dict(type='str', default='Auto', choices=['Auto', 'Manual']),
+ over_ride=dict(type='int', default=0, choices=[0, 1]),
+ type=dict(type='str', required=False, choices=['BMC', 'BIOS']),
+ has_me=dict(type='int', default=1, choices=[0, 1]),
+ dual_image=dict(type='str', default='dual', choices=['single', 'dual']),
+ )
+ argument_spec.update(inmanage_argument_spec)
+ update_obj = Update(argument_spec)
+ update_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/update_psu.py b/ansible_collections/ieisystem/inmanage/plugins/modules/update_psu.py
new file mode 100644
index 000000000..f2e38ee45
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/update_psu.py
@@ -0,0 +1,117 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: update_psu
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Update PSU
+description:
+ - Update psu on ieisystem Server.
+notes:
+ - Does not support C(check_mode).
+options:
+ url:
+ description:
+ - Firmware image url.
+ required: true
+ type: str
+ mode:
+ description:
+ - Server Auto Reset Option, Manual or Auto(default).
+ default: Auto
+ choices: ['Auto', 'Manual']
+ type: str
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Update psu test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "update psu"
+ ieisystem.inmanage.update_psu:
+ url: "/home/wbs/CRPS1300D2W_00.01.04_BootLoader_Pri_Sec.hpm"
+ mode: "Auto"
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Update(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'updatepsu'
+ self.results = get_connection(self.module)
+ if self.results['State'] == 'Success':
+ self.results['changed'] = True
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ url=dict(type='str', required=True),
+ mode=dict(type='str', default='Auto', choices=['Auto', 'Manual']),
+ )
+ argument_spec.update(inmanage_argument_spec)
+ update_obj = Update(argument_spec)
+ update_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/user.py b/ansible_collections/ieisystem/inmanage/plugins/modules/user.py
new file mode 100644
index 000000000..59dd84b26
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/user.py
@@ -0,0 +1,165 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: user
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Manage user
+description:
+ - Manage user on ieisystem Server.
+notes:
+ - Does not support C(check_mode).
+options:
+ state:
+ description:
+ - Whether the user should exist or not, taking action if the state is different from what is stated.
+ choices: ['present', 'absent']
+ default: present
+ type: str
+ uid:
+ description:
+ - User id, The range is 1 to 16.
+ type: int
+ uname:
+ description:
+ - User name, Required when uid is None.
+ type: str
+ upass:
+ description:
+ - User password.
+ type: str
+ role_id:
+ description:
+ - User group.
+ - Default user group 'Administrator', 'Operator', 'User'.
+ - Use command C(user_group_info) can get all group information.
+ type: str
+ access:
+ description:
+ - User access.
+ choices: ['enable', 'disable']
+ type: str
+ priv:
+ description:
+ - Other user permissions, select one or more from None/KVM/VMM/SOL.
+ choices: ['kvm', 'vmm', 'sol', 'none']
+ type: list
+ elements: str
+ email:
+ description:
+ - User email.
+ type: str
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: User test
+ hosts: inmanage
+ no_log: true
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Add user"
+ ieisystem.inmanage.user:
+ state: "present"
+ uname: "wbs"
+ upass: "admin"
+ role_id: "Administrator"
+ priv: "kvm,sol"
+ email: "wbs@ieisystem.com"
+ provider: "{{ inmanage }}"
+
+ - name: "Set user"
+ ieisystem.inmanage.user:
+ state: "present"
+ uname: "wbs"
+ upass: "12345678"
+ role_id: "user"
+ priv: "kvm,sol"
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class User(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'edituser'
+ self.results = get_connection(self.module)
+ if self.results['State'] == 'Success':
+ self.results['changed'] = True
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ state=dict(type='str', choices=['present', 'absent'], default='present'),
+ uid=dict(type='int', required=False),
+ uname=dict(type='str', required=False),
+ upass=dict(type='str', required=False, no_log=True),
+ role_id=dict(type='str', required=False),
+ access=dict(type='str', required=False, choices=['enable', 'disable']),
+ priv=dict(type='list', elements='str', required=False, choices=['kvm', 'vmm', 'sol', 'none']),
+ email=dict(type='str', required=False)
+ )
+ argument_spec.update(inmanage_argument_spec)
+ user_obj = User(argument_spec)
+ user_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/user_group.py b/ansible_collections/ieisystem/inmanage/plugins/modules/user_group.py
new file mode 100644
index 000000000..c858960b6
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/user_group.py
@@ -0,0 +1,204 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: user_group
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Manage user group
+description:
+ - Manage user group on ieisystem Server.
+notes:
+ - Does not support C(check_mode).
+options:
+ state:
+ description:
+ - Whether the user group should exist or not, taking action if the state is different from what is stated.
+ choices: ['present', 'absent']
+ default: present
+ type: str
+ name:
+ description:
+ - Group name.
+ - The range of group name for M6 model is OEM1, OEM2, OEM3, OEM4.
+ required: true
+ type: str
+ pri:
+ description:
+ - Group privilege.
+ - Required when I(state=present).
+ - Only the M5 model supports this parameter.
+ choices: ['administrator', 'operator', 'user', 'oem', 'none']
+ type: str
+ general:
+ description:
+ - General configuration privilege.
+ - Required when I(state=present).
+ - Only the M6 model supports this parameter.
+ choices: ['enable', 'disable']
+ type: str
+ power:
+ description:
+ - Power control privilege.
+ - Required when I(state=present).
+ - Only the M6 model supports this parameter.
+ choices: ['enable', 'disable']
+ type: str
+ media:
+ description:
+ - Remote media configuration privilege.
+ - Required when I(state=present).
+ - Only the M6 model supports this parameter.
+ choices: ['enable', 'disable']
+ type: str
+ kvm:
+ description:
+ - Remote KVM configuration privilege.
+ - Required when I(state=present).
+ - Only the M6 model supports this parameter.
+ choices: ['enable', 'disable']
+ type: str
+ security:
+ description:
+ - Security configuration privilege.
+ - Required when I(state=present).
+ - Only the M6 model supports this parameter.
+ choices: ['enable', 'disable']
+ type: str
+ debug:
+ description:
+ - Debug diagnose privilege.
+ - Required when I(state=present).
+ - Only the M6 model supports this parameter.
+ choices: ['enable', 'disable']
+ type: str
+ self:
+ description:
+ - Itself configuration privilege.
+ - Required when I(state=present).
+ - Only the M6 model supports this parameter.
+ choices: ['enable', 'disable']
+ type: str
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: User group test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Add user group"
+ ieisystem.inmanage.user_group:
+ state: "present"
+ name: "test"
+ pri: "administrator"
+ provider: "{{ inmanage }}"
+
+ - name: "Set user group"
+ ieisystem.inmanage.user_group:
+ state: "present"
+ name: "test"
+ pri: "user"
+ provider: "{{ inmanage }}"
+
+ - name: "Set m6 user group"
+ ieisystem.inmanage.user_group:
+ state: "present"
+ name: "OEM1"
+ general: "enable"
+ kvm: "enable"
+ provider: "{{ inmanage }}"
+
+ - name: "Delete user group"
+ ieisystem.inmanage.user_group:
+ state: "absent"
+ name: "test"
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+from ansible.module_utils.basic import AnsibleModule
+
+
+class UserGroup(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=False)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'editusergroup'
+ self.results = get_connection(self.module)
+ if self.results['State'] == 'Success':
+ self.results['changed'] = True
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict(
+ state=dict(type='str', choices=['present', 'absent'], default='present'),
+ name=dict(type='str', required=True),
+ pri=dict(type='str', required=False, choices=['administrator', 'operator', 'user', 'oem', 'none']),
+ general=dict(type='str', required=False, choices=['enable', 'disable']),
+ power=dict(type='str', required=False, choices=['enable', 'disable']),
+ media=dict(type='str', required=False, choices=['enable', 'disable']),
+ kvm=dict(type='str', required=False, choices=['enable', 'disable']),
+ security=dict(type='str', required=False, choices=['enable', 'disable']),
+ debug=dict(type='str', required=False, choices=['enable', 'disable']),
+ self=dict(type='str', required=False, choices=['enable', 'disable']),
+ )
+ argument_spec.update(inmanage_argument_spec)
+ usergroup_obj = UserGroup(argument_spec)
+ usergroup_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/user_group_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/user_group_info.py
new file mode 100644
index 000000000..057c74cfe
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/user_group_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: user_group_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get user group information
+description:
+ - Get user group information on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: User group test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get user group information"
+ ieisystem.inmanage.user_group_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class UserGroup(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'getusergroup'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ usergroup_obj = UserGroup(argument_spec)
+ usergroup_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/user_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/user_info.py
new file mode 100644
index 000000000..df548dc88
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/user_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: user_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get user information
+description:
+ - Get user information on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: User test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get user information"
+ ieisystem.inmanage.user_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class User(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'getuser'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ user_obj = User(argument_spec)
+ user_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/virtual_media_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/virtual_media_info.py
new file mode 100644
index 000000000..ede6f062d
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/virtual_media_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: virtual_media_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get Virtual Media information
+description:
+ - Get Virtual Media information on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Media test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get Virtual Media information"
+ ieisystem.inmanage.virtual_media_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Media(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'getvirtualmedia'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ media_obj = Media(argument_spec)
+ media_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/plugins/modules/volt_info.py b/ansible_collections/ieisystem/inmanage/plugins/modules/volt_info.py
new file mode 100644
index 000000000..c833aa827
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/plugins/modules/volt_info.py
@@ -0,0 +1,99 @@
+#!/usr/bin/python
+# -*- coding:utf-8 -*-
+
+# Copyright(C) 2023 IEIT Inc. All Rights Reserved.
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+DOCUMENTATION = '''
+---
+module: volt_info
+version_added: "1.0.0"
+author:
+ - WangBaoshan (@ieisystem)
+short_description: Get volt information
+description:
+ - Get volt information on ieisystem Server.
+notes:
+ - Supports C(check_mode).
+options: {}
+extends_documentation_fragment:
+ - ieisystem.inmanage.inmanage
+'''
+
+EXAMPLES = '''
+- name: Volt test
+ hosts: inmanage
+ connection: local
+ gather_facts: false
+ vars:
+ inmanage:
+ host: "{{ ansible_ssh_host }}"
+ username: "{{ username }}"
+ password: "{{ password }}"
+
+ tasks:
+
+ - name: "Get volt information"
+ ieisystem.inmanage.volt_info:
+ provider: "{{ inmanage }}"
+'''
+
+RETURN = '''
+message:
+ description: Messages returned after module execution.
+ returned: always
+ type: str
+state:
+ description: Status after module execution.
+ returned: always
+ type: str
+changed:
+ description: Check to see if a change was made on the device.
+ returned: always
+ type: bool
+'''
+
+from ansible.module_utils.basic import AnsibleModule
+from ansible_collections.ieisystem.inmanage.plugins.module_utils.inmanage import (inmanage_argument_spec, get_connection)
+
+
+class Sensor(object):
+ def __init__(self, argument_spec):
+ self.spec = argument_spec
+ self.module = None
+ self.init_module()
+ self.results = dict()
+
+ def init_module(self):
+ """Init module object"""
+
+ self.module = AnsibleModule(
+ argument_spec=self.spec, supports_check_mode=True)
+
+ def run_command(self):
+ self.module.params['subcommand'] = 'getvolt'
+ self.results = get_connection(self.module)
+
+ def show_result(self):
+ """Show result"""
+ self.module.exit_json(**self.results)
+
+ def work(self):
+ """Worker"""
+ self.run_command()
+ self.show_result()
+
+
+def main():
+ argument_spec = dict()
+ argument_spec.update(inmanage_argument_spec)
+ sensor_obj = Sensor(argument_spec)
+ sensor_obj.work()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ansible_collections/ieisystem/inmanage/requirements.txt b/ansible_collections/ieisystem/inmanage/requirements.txt
new file mode 100644
index 000000000..81b70119e
--- /dev/null
+++ b/ansible_collections/ieisystem/inmanage/requirements.txt
@@ -0,0 +1 @@
+inManage==1.0.0 \ No newline at end of file
diff --git a/ansible_collections/inspur/ispim/CHANGELOG.rst b/ansible_collections/inspur/ispim/CHANGELOG.rst
index 4a5628834..fa1910bf0 100644
--- a/ansible_collections/inspur/ispim/CHANGELOG.rst
+++ b/ansible_collections/inspur/ispim/CHANGELOG.rst
@@ -5,6 +5,14 @@ Inspur.Ispim Release Notes
.. contents:: Topics
+v2.2.3
+======
+
+Bugfixes
+--------
+
+- Change the ansible version in meta/runtime.yml to 2.15.0(https://github.com/ispim/inspur.ispim/pull/37).
+
v2.2.2
======
diff --git a/ansible_collections/inspur/ispim/FILES.json b/ansible_collections/inspur/ispim/FILES.json
index 2bc19535d..af8ca85a3 100644
--- a/ansible_collections/inspur/ispim/FILES.json
+++ b/ansible_collections/inspur/ispim/FILES.json
@@ -970,7 +970,7 @@
"name": "meta/runtime.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d365367ae9434742bca426eee9c07bb98c2879e1a12455869d2e17bbf4448304",
+ "chksum_sha256": "dba5c07af6167cb78bb80c586611346196d3ba9bf728dbb1373a0dd32ce9c7c0",
"format": 1
},
{
@@ -1915,14 +1915,14 @@
"name": "changelogs/changelog.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bb70a110be0417beb579fcf78f9ec1dc600d0f24bb934bcde8b7224afefa0358",
+ "chksum_sha256": "25ca9690429dabc556de31df0e4415b1ee2c455103d716e83043bcf0f20a9e91",
"format": 1
},
{
"name": "changelogs/.plugin-cache.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "be9b2ed7bd93a18b6fb794fb56a5c7a838b50be73f6e1576120edfc93a0d6512",
+ "chksum_sha256": "b4028bf04a70c382bd09bd1fb91e4a5d61aaac0e30a84b6846fff2f0f3a694d3",
"format": 1
},
{
@@ -1971,7 +1971,7 @@
"name": "README.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4d3e8acd6e4e1f4a717de66ae790c01bf7c799bfe7d7f8b4644dead575744ad4",
+ "chksum_sha256": "4d6595ae87f7916ecdd5fe9fbc6a36ea9c98980537d609f73742ab376f14c370",
"format": 1
},
{
@@ -1992,7 +1992,7 @@
"name": "CHANGELOG.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d097f759cd6be86c5764be9dedaf6a045cee9a616717d64cc2c6bda342ac859f",
+ "chksum_sha256": "0fb2f13dea29f7c0a337c5c8a31c7ac70ee03a9419e8e29153c8bc2e2b4d5385",
"format": 1
}
],
diff --git a/ansible_collections/inspur/ispim/MANIFEST.json b/ansible_collections/inspur/ispim/MANIFEST.json
index ba2b9362f..1cf4c2191 100644
--- a/ansible_collections/inspur/ispim/MANIFEST.json
+++ b/ansible_collections/inspur/ispim/MANIFEST.json
@@ -2,7 +2,7 @@
"collection_info": {
"namespace": "inspur",
"name": "ispim",
- "version": "2.2.2",
+ "version": "2.2.3",
"authors": [
"Baoshan Wang <wangbaoshan@inspur.com>"
],
@@ -29,7 +29,7 @@
"name": "FILES.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8512bf3f7cc02f00ab9edd6f6fd4e313c414b697cf1fa2b8d6465c46b48131a0",
+ "chksum_sha256": "55f7d6745a2974d75b938207b0a447b3ac5ef2d7c50edb9764aa89b8b2df5533",
"format": 1
},
"format": 1
diff --git a/ansible_collections/inspur/ispim/README.md b/ansible_collections/inspur/ispim/README.md
index a596693de..27b4e89c6 100644
--- a/ansible_collections/inspur/ispim/README.md
+++ b/ansible_collections/inspur/ispim/README.md
@@ -1,90 +1,90 @@
-# inspur sm Collection
-
-This repo contains the `inspur.ispim` Ansible Collection. Inspur server supports ansible management device,Basic management of the server based on restful interface.
-
-## Tested with Ansible
-
-Tested with the current Ansible 2.15 releases and the current development version of Ansible. Ansible versions before 2.10.0 are not supported.
-
-## External requirements
-
-Circumstance instruction:
-Ansible module is suitable for ansible version 2.15
-
-Main steps:
-
-* Install Ansible 2.15
-* Install inspursmsdk: pip install inspursmsdk
-<!--- A step-by-step reproduction of the problem is helpful if there is no related issue -->
-Thes modules require the following to be installed on the control node:
-
-* Python 3.7 or later
-* [Ansible](http://www.ansible.com) 2.10 or later
-* [Inspur support] [inspursmsdk](https://github.com/ISIB-Group/inspursmsdk) 1.5.0 or later
-
-## Included content
-
-Please check the included content on the [Ansible Galaxy page for this collection](https://galaxy.ansible.com/inspur/ispim)
-
-## Using this collection
-
-Before using the General community collection, you need to install the collection with the `ansible-galaxy` CLI:
-
- ansible-galaxy collection install inspur.ispim
-
-You can also include it in a `requirements.yml` file and install it via `ansible-galaxy collection install -r requirements.yml` using the format:
-
-```yaml
-collections:
-- name: inspur.ispim
-```
-
-See [Ansible Using collections](https://isib-group.github.io/inspur.ispim-docs/index.html) for more details.
-
-## Contributing to this collection
-
-If you want to develop new content for this collection or improve what is already here, the easiest way to work on the collection is to clone it into one of the configured [`COLLECTIONS_PATH`](https://docs.ansible.com/ansible/latest/reference_appendices/config.html#collections-paths), and work on it there.
-
-You can find more information in the [developer guide for collections](https://docs.ansible.com/ansible/devel/dev_guide/developing_collections.html#contributing-to-collections), and in the [Ansible inspur.ispim Guide](https://ispim.github.io/inspur.ispim-docs/index.html).
-
-### Running tests
-
-See [here](https://docs.ansible.com/ansible/devel/dev_guide/developing_collections.html#testing-collections).
-
-### Communication
-
-wangbaoshan@inspur.com
-
-
-### Publishing New Version
-
-Basic instructions without release branches:
-
-1. Create `changelogs/fragments/<version>.yml` with `release_summary:` section (which must be a string, not a list).
-2. Run `antsibull-changelog release --collection-flatmap yes`
-3. Make sure `CHANGELOG.rst` and `changelogs/changelog.yaml` are added to git, and the deleted fragments have been removed.
-4. Tag the commit with `<version>`. Push changes and tag to the main repository.
-
-## Release notes
-
-See the [changelog](https://github.com/ispim/inspur.ispim/blob/main/CHANGELOG.rst).
-
-## Roadmap
-
-See [this issue](https://github.com/ispim/inspur.ispim/issues/2) for information on releasing, versioning and deprecation.
-
-In general, we plan to release a major version every year, and minor versions every three months. Major versions can contain breaking changes, while minor versions only contain new features and bugfixes.
-
-
-## More information
-
-- [Ansible Collection overview](https://github.com/ansible-collections/overview)
-- [Ansible User guide](https://docs.ansible.com/ansible/latest/user_guide/index.html)
-- [Ansible Developer guide](https://docs.ansible.com/ansible/latest/dev_guide/index.html)
-- [Ansible Community code of conduct](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html)
-
-## Licensing
-
-GNU General Public License v3.0 or later.
-
-See [COPYING](https://www.gnu.org/licenses/gpl-3.0.txt) to see the full text.
+# inspur sm Collection
+
+This repo contains the `inspur.ispim` Ansible Collection. Inspur server supports ansible management device,Basic management of the server based on restful interface.
+
+## Tested with Ansible
+
+Tested with the current Ansible 2.15.0 releases and the current development version of Ansible. Ansible versions before 2.10.0 are not supported.
+
+## External requirements
+
+Circumstance instruction:
+Ansible module is suitable for ansible version 2.15.0
+
+Main steps:
+
+* Install Ansible 2.15.0
+* Install inspursmsdk: pip install inspursmsdk
+<!--- A step-by-step reproduction of the problem is helpful if there is no related issue -->
+Thes modules require the following to be installed on the control node:
+
+* Python 3.7 or later
+* [Ansible](http://www.ansible.com) 2.15.0 or later
+* [Inspur support] [inspursmsdk](https://github.com/ISIB-Group/inspursmsdk) 1.5.0 or later
+
+## Included content
+
+Please check the included content on the [Ansible Galaxy page for this collection](https://galaxy.ansible.com/inspur/ispim)
+
+## Using this collection
+
+Before using the General community collection, you need to install the collection with the `ansible-galaxy` CLI:
+
+ ansible-galaxy collection install inspur.ispim
+
+You can also include it in a `requirements.yml` file and install it via `ansible-galaxy collection install -r requirements.yml` using the format:
+
+```yaml
+collections:
+- name: inspur.ispim
+```
+
+See [Ansible Using collections](https://isib-group.github.io/inspur.ispim-docs/index.html) for more details.
+
+## Contributing to this collection
+
+If you want to develop new content for this collection or improve what is already here, the easiest way to work on the collection is to clone it into one of the configured [`COLLECTIONS_PATH`](https://docs.ansible.com/ansible/latest/reference_appendices/config.html#collections-paths), and work on it there.
+
+You can find more information in the [developer guide for collections](https://docs.ansible.com/ansible/devel/dev_guide/developing_collections.html#contributing-to-collections), and in the [Ansible inspur.ispim Guide](https://ispim.github.io/inspur.ispim-docs/index.html).
+
+### Running tests
+
+See [here](https://docs.ansible.com/ansible/devel/dev_guide/developing_collections.html#testing-collections).
+
+### Communication
+
+wangbaoshan@inspur.com
+
+
+### Publishing New Version
+
+Basic instructions without release branches:
+
+1. Create `changelogs/fragments/<version>.yml` with `release_summary:` section (which must be a string, not a list).
+2. Run `antsibull-changelog release --collection-flatmap yes`
+3. Make sure `CHANGELOG.rst` and `changelogs/changelog.yaml` are added to git, and the deleted fragments have been removed.
+4. Tag the commit with `<version>`. Push changes and tag to the main repository.
+
+## Release notes
+
+See the [changelog](https://github.com/ispim/inspur.ispim/blob/main/CHANGELOG.rst).
+
+## Roadmap
+
+See [this issue](https://github.com/ispim/inspur.ispim/issues/2) for information on releasing, versioning and deprecation.
+
+In general, we plan to release a major version every year, and minor versions every three months. Major versions can contain breaking changes, while minor versions only contain new features and bugfixes.
+
+
+## More information
+
+- [Ansible Collection overview](https://github.com/ansible-collections/overview)
+- [Ansible User guide](https://docs.ansible.com/ansible/latest/user_guide/index.html)
+- [Ansible Developer guide](https://docs.ansible.com/ansible/latest/dev_guide/index.html)
+- [Ansible Community code of conduct](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html)
+
+## Licensing
+
+GNU General Public License v3.0 or later.
+
+See [COPYING](https://www.gnu.org/licenses/gpl-3.0.txt) to see the full text.
diff --git a/ansible_collections/inspur/ispim/changelogs/.plugin-cache.yaml b/ansible_collections/inspur/ispim/changelogs/.plugin-cache.yaml
index 8280c23b8..1a883f7e7 100644
--- a/ansible_collections/inspur/ispim/changelogs/.plugin-cache.yaml
+++ b/ansible_collections/inspur/ispim/changelogs/.plugin-cache.yaml
@@ -652,4 +652,4 @@ plugins:
shell: {}
strategy: {}
vars: {}
-version: 2.2.2
+version: 2.2.3
diff --git a/ansible_collections/inspur/ispim/changelogs/changelog.yaml b/ansible_collections/inspur/ispim/changelogs/changelog.yaml
index 3134f2326..d99e44707 100644
--- a/ansible_collections/inspur/ispim/changelogs/changelog.yaml
+++ b/ansible_collections/inspur/ispim/changelogs/changelog.yaml
@@ -499,3 +499,10 @@ releases:
fragments:
- 35-delete_venv.yml
release_date: '2024-06-06'
+ 2.2.3:
+ changes:
+ bugfixes:
+ - Change the ansible version in meta/runtime.yml to 2.15.0(https://github.com/ispim/inspur.ispim/pull/37).
+ fragments:
+ - 37-edit_ansible_version.yml
+ release_date: '2024-06-14'
diff --git a/ansible_collections/inspur/ispim/meta/runtime.yml b/ansible_collections/inspur/ispim/meta/runtime.yml
index 1c7d1bb7c..5f8d47e7b 100644
--- a/ansible_collections/inspur/ispim/meta/runtime.yml
+++ b/ansible_collections/inspur/ispim/meta/runtime.yml
@@ -1,2 +1,2 @@
----
-requires_ansible: '>=2.15'
+---
+requires_ansible: '>=2.15.0'
diff --git a/ansible_collections/kubernetes/core/.ansible-lint-ignore b/ansible_collections/kubernetes/core/.ansible-lint-ignore
new file mode 100644
index 000000000..b089a5a25
--- /dev/null
+++ b/ansible_collections/kubernetes/core/.ansible-lint-ignore
@@ -0,0 +1,2 @@
+# no-changed-when is not requried for examples
+plugins/connection/kubectl.py no-changed-when
diff --git a/ansible_collections/kubernetes/core/.github/workflows/integration-tests-kubevirt.yaml b/ansible_collections/kubernetes/core/.github/workflows/integration-tests-kubevirt.yaml
new file mode 100644
index 000000000..aa6a1d548
--- /dev/null
+++ b/ansible_collections/kubernetes/core/.github/workflows/integration-tests-kubevirt.yaml
@@ -0,0 +1,127 @@
+name: Integration tests Kubevirt
+on:
+ pull_request:
+ types:
+ - opened
+ - reopened
+ - synchronize
+ branches:
+ - main
+ - stable-*
+
+jobs:
+ splitter:
+ continue-on-error: true
+ env:
+ kubernetes: "./kubernetes"
+ kubevirt: "./kubevirt"
+ py_version: 3.9
+ runs-on: ubuntu-latest
+ outputs:
+ test_targets: ${{ steps.splitter.outputs.kubevirt_targets }}
+ steps:
+ - name: Checkout kubernetes.core repository
+ uses: actions/checkout@v3
+ with:
+ path: ${{ env.kubernetes }}
+ ref: ${{ github.event.pull_request.head.sha }}
+
+ - name: Checkout the kubevirt.core collection
+ uses: actions/checkout@v3
+ with:
+ repository: kubevirt/kubevirt.core
+ path: ${{ env.kubevirt }}
+
+ - name: "Set up Python ${{ env.py_version }}"
+ uses: actions/setup-python@v4
+ with:
+ python-version: "${{ env.py_version }}"
+
+ - name: List targets from kubevirt.core collection
+ id: splitter
+ run: python ${{ env.kubernetes }}/tools/kubevirt_list_targets.py ${{ env.kubevirt }}
+ shell: bash
+
+ integration:
+ if: ${{ needs.splitter.outputs.test_targets != '' }}
+ name: "integration-kubevirt-${{ matrix.test-target }}"
+ runs-on: ubuntu-latest
+ continue-on-error: true
+ needs:
+ - splitter
+ env:
+ kubernetes: "./kubernetes"
+ kubevirt: "./kubevirt"
+ ansible_version: milestone
+ python_version: 3.12
+ strategy:
+ fail-fast: false
+ matrix:
+ test-target: ${{ fromJson(needs.splitter.outputs.test_targets) }}
+ steps:
+ - name: Checkout kubernetes.core repository
+ uses: actions/checkout@v4
+ with:
+ path: ${{ env.kubernetes }}
+ ref: ${{ github.event.pull_request.head.sha }}
+
+ - name: Checkout kubevirt.core repository
+ uses: actions/checkout@v4
+ with:
+ repository: kubevirt/kubevirt.core
+ path: ${{ env.kubevirt }}
+ ref: main
+
+ # Install ansible
+ - name: Install ansible-core (${{ env.ansible_version }})
+ run: >-
+ python3 -m pip install
+ https://github.com/ansible/ansible/archive/${{ env.ansible_version }}.tar.gz
+ --disable-pip-version-check
+ shell: bash
+
+ - name: Build and install kubevirt.core collection
+ id: install-kubevirt
+ uses: ansible-network/github_actions/.github/actions/build_install_collection@main
+ with:
+ install_python_dependencies: true
+ source_path: ${{ env.kubevirt }}
+
+ - name: Build and install kubernetes.core collection
+ id: install-kubernetes
+ uses: ansible-network/github_actions/.github/actions/build_install_collection@main
+ with:
+ install_python_dependencies: true
+ source_path: ${{ env.kubernetes }}
+
+ - name: Install kind / kubectl
+ uses: helm/kind-action@v1.9.0
+ with:
+ version: v0.22.0
+ install_only: true
+
+ - name: Deploy kubevirt
+ run: >-
+ ${{ env.kubevirt }}/hack/e2e-setup.sh \
+ -v \
+ --configure-inotify-limits \
+ --configure-secondary-network \
+ --deploy-kubevirt \
+ --deploy-kubevirt-cdi \
+ --deploy-kubevirt-common-instancetypes \
+ --deploy-cnao \
+ --create-cluster \
+ --create-nad
+ env:
+ KIND: kind
+ KUBECTL: kubectl
+
+ - name: Run integration tests
+ uses: ansible-network/github_actions/.github/actions/ansible_test_integration@main
+ with:
+ collection_path: ${{ steps.install-kubevirt.outputs.collection_path }}
+ python_version: ${{ env.python_version }}
+ ansible_version: ${{ env.ansible_version }}
+ ansible_test_targets: ${{ matrix.test-target }}
+ env:
+ ANSIBLE_COLLECTIONS_PATHS: /home/runner/collections
diff --git a/ansible_collections/kubernetes/core/CHANGELOG.rst b/ansible_collections/kubernetes/core/CHANGELOG.rst
index f93c131fc..102ddcf74 100644
--- a/ansible_collections/kubernetes/core/CHANGELOG.rst
+++ b/ansible_collections/kubernetes/core/CHANGELOG.rst
@@ -4,6 +4,19 @@ Kubernetes Collection Release Notes
.. contents:: Topics
+v3.2.0
+======
+
+Release Summary
+---------------
+This release comes with documentation updates.
+
+Minor Changes
+-------------
+
+- inventory/k8s.py - Defer removal of k8s inventory plugin to version 6.0.0 (https://github.com/ansible-collections/kubernetes.core/pull/734).
+- connection/kubectl.py - Added an example of using the kubectl connection plugin to the documentation (https://github.com/ansible-collections/kubernetes.core/pull/741).
+
v3.1.0
======
diff --git a/ansible_collections/kubernetes/core/FILES.json b/ansible_collections/kubernetes/core/FILES.json
index 194bc9a29..498543b12 100644
--- a/ansible_collections/kubernetes/core/FILES.json
+++ b/ansible_collections/kubernetes/core/FILES.json
@@ -36,6 +36,13 @@
"format": 1
},
{
+ "name": ".github/workflows/integration-tests-kubevirt.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "2dba23cfa19cfab6102648b679ad3d7129cb3658898110c304fbf39de2283788",
+ "format": 1
+ },
+ {
"name": ".github/workflows/integration-tests.yaml",
"ftype": "file",
"chksum_type": "sha256",
@@ -85,10 +92,31 @@
"format": 1
},
{
+ "name": "changelogs/fragments",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "changelogs/fragments/20240530-ansible-core-support-update.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "388f7f5ade9a2116bb1c3c83b188c842df1569faceca463687e067129ec26417",
+ "format": 1
+ },
+ {
+ "name": "changelogs/fragments/inventory-update_removal_date.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "696751970b01668168ca555359871e3b5c00ed6ebde834e318aea1b1c1c3be9b",
+ "format": 1
+ },
+ {
"name": "changelogs/changelog.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a9dceb8816711232f8abe7c4507257d879f61468e572b7010c3f8f1f9f9a8bf2",
+ "chksum_sha256": "753b928c2066b441de64128abee4a313b60c8f94343b347abd1c443576a545b8",
"format": 1
},
{
@@ -263,7 +291,7 @@
"name": "docs/kubernetes.core.k8s_inventory.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6d970818f092674dec18fdbbe88cce8c212ee6764a2d9682ccd16e82e6696023",
+ "chksum_sha256": "12710bd3a5b8fad92cb40b5932426e8224e0dee466a325ef37a566ac29e19784",
"format": 1
},
{
@@ -326,7 +354,7 @@
"name": "docs/kubernetes.core.kubectl_connection.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9286d253650a63d78908c1525a78cca5adacdbbddbd41fc713302d3683724a9a",
+ "chksum_sha256": "f35da72b89833bb15d9791f4f229950749e16673febbc8ebca937abe50398095",
"format": 1
},
{
@@ -347,7 +375,7 @@
"name": "meta/runtime.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7ea3ff6f907ecc449cbc9f2a327838184e129685588d9b9f4636c6448b35a7c8",
+ "chksum_sha256": "7a23c7787636e2cf2587c32356cd2ce1af9a84098d569673a28cf3b8b595f073",
"format": 1
},
{
@@ -487,7 +515,7 @@
"name": "plugins/connection/kubectl.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "08e05362470ad358e2f8336bf2d47bb91e841d4fb070e807b2e675b7a9ffeab2",
+ "chksum_sha256": "f612353a7be29507e8b54dcea222a95341596655e708b3934a092945578f1310",
"format": 1
},
{
@@ -592,7 +620,7 @@
"name": "plugins/inventory/k8s.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "27d1adb4227f2c07c9bd79d11909df1a7f577ad698340d5c9e956cf1b9e47293",
+ "chksum_sha256": "92b6c7ec15ffdd62f3f3f0f46284b03513ebc497408a5fac81aa3b4914b5bccc",
"format": 1
},
{
@@ -4796,6 +4824,20 @@
"format": 1
},
{
+ "name": "tools",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tools/kubevirt_list_targets.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "631f7ccaf6367b109574f3de3c344cf12120cf8689c59313ef8663a271b8a8c5",
+ "format": 1
+ },
+ {
"name": ".ansible-lint",
"ftype": "file",
"chksum_type": "sha256",
@@ -4803,6 +4845,13 @@
"format": 1
},
{
+ "name": ".ansible-lint-ignore",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "4ac019f513db56ce83fc05ad85a0d5956642ce92c85936f5266dcc901dcb1025",
+ "format": 1
+ },
+ {
"name": ".gitignore",
"ftype": "file",
"chksum_type": "sha256",
@@ -4820,7 +4869,7 @@
"name": "CHANGELOG.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ab78cf4fb4f75175d5ddea647bf1e4a77c5ec5d0f204abb7ff7e06c08dca5c55",
+ "chksum_sha256": "9a446511a9384939b2dd0f404021372d07919164596ad7369289929939176ad9",
"format": 1
},
{
@@ -4841,7 +4890,7 @@
"name": "Makefile",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "72fce95837f17ca1375a1f2130a5b695e500f1ac1cb87942d51dd1a3ed99343b",
+ "chksum_sha256": "2a41ba68ffbb65f00cd8dce3df79f195cf78791159dc954f9b83b02805c9f89e",
"format": 1
},
{
@@ -4855,7 +4904,7 @@
"name": "README.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "690f7280c8733a5b793e7491c6234caaffae167e6a45637c416658fdd84fa0ec",
+ "chksum_sha256": "7a68d6074e036d0659f3f4dddaefc45c6c82a6f659c53ca99e89ca5838b6ad32",
"format": 1
},
{
diff --git a/ansible_collections/kubernetes/core/MANIFEST.json b/ansible_collections/kubernetes/core/MANIFEST.json
index e4e7f12e9..6a42026e4 100644
--- a/ansible_collections/kubernetes/core/MANIFEST.json
+++ b/ansible_collections/kubernetes/core/MANIFEST.json
@@ -2,7 +2,7 @@
"collection_info": {
"namespace": "kubernetes",
"name": "core",
- "version": "3.1.0",
+ "version": "3.2.0",
"authors": [
"chouseknecht (https://github.com/chouseknecht)",
"geerlingguy (https://www.jeffgeerling.com/)",
@@ -36,7 +36,7 @@
"name": "FILES.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "497a265e1b9a279b5abb4fcb6acc3ad94db0ee0519969df4bf816c4a90b7b8cb",
+ "chksum_sha256": "d379854b52548bdd5dd202eb587c0b005e861f6e10f8d0120d34cfb61e789d06",
"format": 1
},
"format": 1
diff --git a/ansible_collections/kubernetes/core/Makefile b/ansible_collections/kubernetes/core/Makefile
index b76f3af50..23826b12c 100644
--- a/ansible_collections/kubernetes/core/Makefile
+++ b/ansible_collections/kubernetes/core/Makefile
@@ -1,5 +1,5 @@
# Also needs to be updated in galaxy.yml
-VERSION = 3.0.0
+VERSION = 3.2.0
TEST_ARGS ?= ""
PYTHON_VERSION ?= `python -c 'import platform; print(".".join(platform.python_version_tuple()[0:2]))'`
diff --git a/ansible_collections/kubernetes/core/README.md b/ansible_collections/kubernetes/core/README.md
index 67993a06e..f5a8d604d 100644
--- a/ansible_collections/kubernetes/core/README.md
+++ b/ansible_collections/kubernetes/core/README.md
@@ -93,7 +93,7 @@ You can also include it in a `requirements.yml` file and install it via `ansible
---
collections:
- name: kubernetes.core
- version: 3.0.0
+ version: 3.2.0
```
### Installing the Kubernetes Python Library
diff --git a/ansible_collections/kubernetes/core/changelogs/changelog.yaml b/ansible_collections/kubernetes/core/changelogs/changelog.yaml
index a6a689887..84adc471a 100644
--- a/ansible_collections/kubernetes/core/changelogs/changelog.yaml
+++ b/ansible_collections/kubernetes/core/changelogs/changelog.yaml
@@ -854,3 +854,14 @@ releases:
- 652-fix-json-patch-action.yml
- 654-helm-expand-user.yml
release_date: '2024-05-16'
+ 3.2.0:
+ changes:
+ minor_changes:
+ - inventory/k8s.py - Defer removal of k8s inventory plugin to version 6.0.0 (https://github.com/ansible-collections/kubernetes.core/pull/734).
+ - connection/kubectl.py - Added an example of using the kubectl connection plugin to the documentation (https://github.com/ansible-collections/kubernetes.core/pull/741).
+ release_summary: This release comes with documentation updates.
+ fragments:
+ - 20240530-defer-removal-and-ansible-core-support-update.yaml
+ - 20240601-doc-example-of-using-kubectl.yaml
+ - 3.2.0.yml
+ release_date: '2024-06-14'
diff --git a/ansible_collections/kubernetes/core/changelogs/fragments/20240530-ansible-core-support-update.yaml b/ansible_collections/kubernetes/core/changelogs/fragments/20240530-ansible-core-support-update.yaml
new file mode 100644
index 000000000..2efafc042
--- /dev/null
+++ b/ansible_collections/kubernetes/core/changelogs/fragments/20240530-ansible-core-support-update.yaml
@@ -0,0 +1,3 @@
+---
+minor_changes:
+ - inventory/k8s.py - Defer removal of k8s inventory plugin to version 6.0.0 (https://github.com/ansible-collections/kubernetes.core/pull/734).
diff --git a/ansible_collections/kubernetes/core/changelogs/fragments/inventory-update_removal_date.yml b/ansible_collections/kubernetes/core/changelogs/fragments/inventory-update_removal_date.yml
new file mode 100644
index 000000000..3ef687d5e
--- /dev/null
+++ b/ansible_collections/kubernetes/core/changelogs/fragments/inventory-update_removal_date.yml
@@ -0,0 +1,2 @@
+minor_changes:
+ - inventory/k8s.py - Defer removal of k8s inventory plugin to version 5.0 (https://github.com/ansible-collections/kubernetes.core/pull/723).
diff --git a/ansible_collections/kubernetes/core/docs/kubernetes.core.k8s_inventory.rst b/ansible_collections/kubernetes/core/docs/kubernetes.core.k8s_inventory.rst
index 7a8cfd62a..727027408 100644
--- a/ansible_collections/kubernetes/core/docs/kubernetes.core.k8s_inventory.rst
+++ b/ansible_collections/kubernetes/core/docs/kubernetes.core.k8s_inventory.rst
@@ -17,7 +17,7 @@ DEPRECATED
----------
:Removed in collection release after
:Why: As discussed in https://github.com/ansible-collections/kubernetes.core/issues/31, we decided to
-remove the k8s inventory plugin in release 4.0.0.
+remove the k8s inventory plugin in release 6.0.0.
:Alternative: Use :ref:`kubernetes.core.k8s_info <kubernetes.core.k8s_info_module>` and :ref:`ansible.builtin.add_host <ansible.builtin.add_host_module>` instead.
@@ -357,7 +357,7 @@ Status
------
-- This inventory will be removed in version 4.0.0. *[deprecated]*
+- This inventory will be removed in version 6.0.0. *[deprecated]*
- For more information see `DEPRECATED`_.
diff --git a/ansible_collections/kubernetes/core/docs/kubernetes.core.kubectl_connection.rst b/ansible_collections/kubernetes/core/docs/kubernetes.core.kubectl_connection.rst
index 48db35992..97e60f9cd 100644
--- a/ansible_collections/kubernetes/core/docs/kubernetes.core.kubectl_connection.rst
+++ b/ansible_collections/kubernetes/core/docs/kubernetes.core.kubectl_connection.rst
@@ -365,6 +365,82 @@ Parameters
+Examples
+--------
+
+.. code-block:: yaml
+
+ - name: Run a command in a pod using local kubectl with kubeconfig file ~/.kube/config
+ hosts: localhost
+ gather_facts: no
+ vars:
+ ansible_connection: kubernetes.core.kubectl
+ ansible_kubectl_namespace: my-namespace
+ ansible_kubectl_pod: my-pod
+ ansible_kubectl_container: my-container
+ tasks:
+ # be aware that the command is executed as the user that started the container
+ # and requires python to be installed in the image
+ - name: Run a command in a pod
+ ansible.builtin.command: echo "Hello, World!"
+
+ - name: Run a command in a pod using local kubectl with inventory variables
+ # Example inventory:
+ # k8s:
+ # hosts:
+ # foo.example.com:
+ # ansible_connection: kubernetes.core.kubectl
+ # ansible_kubectl_kubeconfig: /root/.kube/foo.example.com.config
+ # ansible_kubectl_pod: my-foo-pod
+ # ansible_kubectl_container: my-foo-container
+ # ansible_kubectl_namespace: my-foo-namespace
+ # bar.example.com:
+ # ansible_connection: kubernetes.core.kubectl
+ # ansible_kubectl_kubeconfig: /root/.kube/bar.example.com.config
+ # ansible_kubectl_pod: my-bar-pod
+ # ansible_kubectl_container: my-bar-container
+ # ansible_kubectl_namespace: my-bar-namespace
+ hosts: k8s
+ gather_facts: no
+ tasks:
+ # be aware that the command is executed as the user that started the container
+ # and requires python to be installed in the image
+ - name: Run a command in a pod
+ ansible.builtin.command: echo "Hello, World!"
+
+ - name: Run a command in a pod using dynamic inventory
+ hosts: localhost
+ gather_facts: no
+ vars:
+ kubeconfig: /root/.kube/config
+ namespace: my-namespace
+ my_app: my-app
+ tasks:
+ - name: Get My App pod info based on label
+ kubernetes.core.k8s_info:
+ kubeconfig: "{{ kubeconfig }}"
+ namespace: "{{ namespace }}"
+ kind: Pod
+ label_selectors: app.kubernetes.io/name = "{{ my_app }}"
+ register: my_app_pod
+
+ - name: Get My App pod name
+ ansible.builtin.set_fact:
+ my_app_pod_name: "{{ my_app_pod.resources[0].metadata.name }}"
+
+ - name: Add My App pod to inventory
+ ansible.builtin.add_host:
+ name: "{{ my_app_pod_name }}"
+ ansible_connection: kubernetes.core.kubectl
+ ansible_kubectl_kubeconfig: "{{ kubeconfig }}"
+ ansible_kubectl_pod: "{{ my_app_pod_name }}"
+ ansible_kubectl_namespace: "{{ namespace }}"
+
+ - name: Run a command in My App pod
+ # be aware that the command is executed as the user that started the container
+ # and requires python to be installed in the image
+ ansible.builtin.command: echo "Hello, World!"
+ delegate_to: "{{ my_app_pod_name }}"
diff --git a/ansible_collections/kubernetes/core/meta/runtime.yml b/ansible_collections/kubernetes/core/meta/runtime.yml
index 99134a2c3..d9cb09984 100644
--- a/ansible_collections/kubernetes/core/meta/runtime.yml
+++ b/ansible_collections/kubernetes/core/meta/runtime.yml
@@ -22,10 +22,10 @@ plugin_routing:
redirect: community.okd.openshift
k8s:
deprecation:
- removal_version: 4.0.0
+ removal_version: 6.0.0
warning_text: >-
The k8s inventory plugin has been deprecated and
- will be removed in release 4.0.0.
+ will be removed in release 6.0.0.
modules:
k8s_auth:
redirect: community.okd.k8s_auth
diff --git a/ansible_collections/kubernetes/core/plugins/connection/kubectl.py b/ansible_collections/kubernetes/core/plugins/connection/kubectl.py
index 2a5e1b988..47953845b 100644
--- a/ansible_collections/kubernetes/core/plugins/connection/kubectl.py
+++ b/ansible_collections/kubernetes/core/plugins/connection/kubectl.py
@@ -181,6 +181,81 @@ DOCUMENTATION = r"""
aliases: [ kubectl_verify_ssl ]
"""
+EXAMPLES = r"""
+
+- name: Run a command in a pod using local kubectl with kubeconfig file ~/.kube/config
+ hosts: localhost
+ gather_facts: no
+ vars:
+ ansible_connection: kubernetes.core.kubectl
+ ansible_kubectl_namespace: my-namespace
+ ansible_kubectl_pod: my-pod
+ ansible_kubectl_container: my-container
+ tasks:
+ # be aware that the command is executed as the user that started the container
+ # and requires python to be installed in the image
+ - name: Run a command in a pod
+ ansible.builtin.command: echo "Hello, World!"
+
+- name: Run a command in a pod using local kubectl with inventory variables
+ # Example inventory:
+ # k8s:
+ # hosts:
+ # foo.example.com:
+ # ansible_connection: kubernetes.core.kubectl
+ # ansible_kubectl_kubeconfig: /root/.kube/foo.example.com.config
+ # ansible_kubectl_pod: my-foo-pod
+ # ansible_kubectl_container: my-foo-container
+ # ansible_kubectl_namespace: my-foo-namespace
+ # bar.example.com:
+ # ansible_connection: kubernetes.core.kubectl
+ # ansible_kubectl_kubeconfig: /root/.kube/bar.example.com.config
+ # ansible_kubectl_pod: my-bar-pod
+ # ansible_kubectl_container: my-bar-container
+ # ansible_kubectl_namespace: my-bar-namespace
+ hosts: k8s
+ gather_facts: no
+ tasks:
+ # be aware that the command is executed as the user that started the container
+ # and requires python to be installed in the image
+ - name: Run a command in a pod
+ ansible.builtin.command: echo "Hello, World!"
+
+- name: Run a command in a pod using dynamic inventory
+ hosts: localhost
+ gather_facts: no
+ vars:
+ kubeconfig: /root/.kube/config
+ namespace: my-namespace
+ my_app: my-app
+ tasks:
+ - name: Get My App pod info based on label
+ kubernetes.core.k8s_info:
+ kubeconfig: "{{ kubeconfig }}"
+ namespace: "{{ namespace }}"
+ kind: Pod
+ label_selectors: app.kubernetes.io/name = "{{ my_app }}"
+ register: my_app_pod
+
+ - name: Get My App pod name
+ ansible.builtin.set_fact:
+ my_app_pod_name: "{{ my_app_pod.resources[0].metadata.name }}"
+
+ - name: Add My App pod to inventory
+ ansible.builtin.add_host:
+ name: "{{ my_app_pod_name }}"
+ ansible_connection: kubernetes.core.kubectl
+ ansible_kubectl_kubeconfig: "{{ kubeconfig }}"
+ ansible_kubectl_pod: "{{ my_app_pod_name }}"
+ ansible_kubectl_namespace: "{{ namespace }}"
+
+ - name: Run a command in My App pod
+ # be aware that the command is executed as the user that started the container
+ # and requires python to be installed in the image
+ ansible.builtin.command: echo "Hello, World!"
+ delegate_to: "{{ my_app_pod_name }}"
+"""
+
import json
import os
import os.path
diff --git a/ansible_collections/kubernetes/core/plugins/inventory/k8s.py b/ansible_collections/kubernetes/core/plugins/inventory/k8s.py
index 1a6c5bfce..37ef90a15 100644
--- a/ansible_collections/kubernetes/core/plugins/inventory/k8s.py
+++ b/ansible_collections/kubernetes/core/plugins/inventory/k8s.py
@@ -20,10 +20,10 @@ DOCUMENTATION = """
- Uses k8s.(yml|yaml) YAML configuration file to set parameter values.
deprecated:
- removed_in: 4.0.0
+ removed_in: 6.0.0
why: |
As discussed in U(https://github.com/ansible-collections/kubernetes.core/issues/31), we decided to
- remove the k8s inventory plugin in release 4.0.0.
+ remove the k8s inventory plugin in release 6.0.0.
alternative: "Use M(kubernetes.core.k8s_info) and M(ansible.builtin.add_host) instead."
options:
@@ -164,8 +164,8 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
super(InventoryModule, self).parse(inventory, loader, path)
self.display.deprecated(
- "The 'k8s' inventory plugin has been deprecated and will be removed in release 4.0.0",
- version="4.0.0",
+ "The 'k8s' inventory plugin has been deprecated and will be removed in release 6.0.0",
+ version="6.0.0",
collection_name="kubernetes.core",
)
cache_key = self._get_cache_prefix(path)
diff --git a/ansible_collections/kubernetes/core/tools/kubevirt_list_targets.py b/ansible_collections/kubernetes/core/tools/kubevirt_list_targets.py
new file mode 100644
index 000000000..0692cc348
--- /dev/null
+++ b/ansible_collections/kubernetes/core/tools/kubevirt_list_targets.py
@@ -0,0 +1,22 @@
+import os
+import sys
+from pathlib import PosixPath
+
+
+def main():
+
+ src = sys.argv[1]
+ path = PosixPath(src) / PosixPath("tests/integration/targets/")
+
+ def _is_disable(path):
+ flags = ("unsupported", "disabled", "unstable", "hidden")
+ aliases_path = path / PosixPath("aliases")
+ return (aliases_path.exists() and any((d.startswith(flags) for d in aliases_path.read_text().split("\n"))))
+
+ targets = [i.stem for i in path.glob("*") if i.is_dir() and not _is_disable(i)]
+ with open(os.environ.get("GITHUB_OUTPUT"), "a", encoding="utf-8") as fw:
+ fw.write(f"kubevirt_targets={targets}\n")
+
+
+if __name__ == "__main__":
+ main()
diff --git a/ansible_collections/lowlydba/sqlserver/.github/FUNDING.yml b/ansible_collections/lowlydba/sqlserver/.github/FUNDING.yml
index c235de882..0c6b880f8 100644
--- a/ansible_collections/lowlydba/sqlserver/.github/FUNDING.yml
+++ b/ansible_collections/lowlydba/sqlserver/.github/FUNDING.yml
@@ -10,4 +10,5 @@ liberapay: # Replace with a single Liberapay username
issuehunt: # Replace with a single IssueHunt username
otechie: # Replace with a single Otechie username
lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry
+buy_me_a_coffee: johnmcc
custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']
diff --git a/ansible_collections/lowlydba/sqlserver/.github/workflows/ansible-test-windows.yml b/ansible_collections/lowlydba/sqlserver/.github/workflows/ansible-test-windows.yml
index d764d6014..a75026a86 100644
--- a/ansible_collections/lowlydba/sqlserver/.github/workflows/ansible-test-windows.yml
+++ b/ansible_collections/lowlydba/sqlserver/.github/workflows/ansible-test-windows.yml
@@ -104,7 +104,7 @@ jobs:
shell: cmd
run: echo 127.0.0.1 sqlserver >> "%WinDir%\System32\Drivers\etc\hosts"
- - uses: Vampire/setup-wsl@v3.0.0
+ - uses: Vampire/setup-wsl@v3.1.1
with:
distribution: Ubuntu-22.04
update: "true"
@@ -190,6 +190,6 @@ jobs:
ansible-test coverage xml -v --requirements
# See the reports at https://codecov.io/gh/lowlydba/lowlydba.sqlserver
- - uses: codecov/codecov-action@v4.1.0
+ - uses: codecov/codecov-action@v4.4.1
with:
fail_ci_if_error: false
diff --git a/ansible_collections/lowlydba/sqlserver/.github/workflows/ansible-test.yml b/ansible_collections/lowlydba/sqlserver/.github/workflows/ansible-test.yml
index 242c7a641..667ab3b23 100644
--- a/ansible_collections/lowlydba/sqlserver/.github/workflows/ansible-test.yml
+++ b/ansible_collections/lowlydba/sqlserver/.github/workflows/ansible-test.yml
@@ -68,7 +68,7 @@ jobs:
path: ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}}
- name: Run confidence tests
- uses: ansible-community/ansible-test-gh-action@v1.14.1
+ uses: ansible-community/ansible-test-gh-action@v1.15.0
with:
ansible-core-version: ${{ matrix.ansible }}
testing-type: sanity
@@ -79,7 +79,7 @@ jobs:
working-directory: ./ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}}
# See the reports at https://codecov.io/gh/lowlydba/lowlydba.sqlserver
- - uses: codecov/codecov-action@v4.1.0
+ - uses: codecov/codecov-action@v4.4.1
with:
fail_ci_if_error: false
@@ -128,7 +128,7 @@ jobs:
# Prevent dbatools v2+ issues with newer docker image for 2.13
- name: Run integration tests - v2.13 workaround
if: ${{ matrix.ansible == 'stable-2.13' }}
- uses: ansible-community/ansible-test-gh-action@v1.14.1
+ uses: ansible-community/ansible-test-gh-action@v1.15.0
with:
docker-image: 'quay.io/ansible/default-test-container:6.13.0'
ansible-core-version: ${{ matrix.ansible }}
@@ -139,7 +139,7 @@ jobs:
- name: Run integration tests
if: ${{ matrix.ansible != 'stable-2.13'}}
- uses: ansible-community/ansible-test-gh-action@v1.14.1
+ uses: ansible-community/ansible-test-gh-action@v1.15.0
with:
ansible-core-version: ${{ matrix.ansible }}
#target-python-version: ${{ matrix.python }}
@@ -153,6 +153,6 @@ jobs:
working-directory: ./ansible_collections/${{env.NAMESPACE}}/${{env.COLLECTION_NAME}}
# See the reports at https://codecov.io/gh/lowlydba/lowlydba.sqlserver
- - uses: codecov/codecov-action@v4.1.0
+ - uses: codecov/codecov-action@v4.4.1
with:
fail_ci_if_error: false
diff --git a/ansible_collections/lowlydba/sqlserver/.github/workflows/release.yml b/ansible_collections/lowlydba/sqlserver/.github/workflows/release.yml
index 201875bc0..9b40638f7 100644
--- a/ansible_collections/lowlydba/sqlserver/.github/workflows/release.yml
+++ b/ansible_collections/lowlydba/sqlserver/.github/workflows/release.yml
@@ -69,7 +69,7 @@ jobs:
- name: Create Release
id: create_release
- uses: softprops/action-gh-release@v2.0.0
+ uses: softprops/action-gh-release@v2.0.5
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
diff --git a/ansible_collections/lowlydba/sqlserver/CHANGELOG.rst b/ansible_collections/lowlydba/sqlserver/CHANGELOG.rst
index a9c7adc65..502fe9b87 100644
--- a/ansible_collections/lowlydba/sqlserver/CHANGELOG.rst
+++ b/ansible_collections/lowlydba/sqlserver/CHANGELOG.rst
@@ -4,6 +4,18 @@ lowlydba.sqlserver Release Notes
.. contents:: Topics
+v2.3.3
+======
+
+Release Summary
+---------------
+
+Minor bugfix for ag listener input types, thanks @daarrn for the contribution!
+
+Bugfixes
+--------
+
+- fixed the expected type of the ip_address, subnet_ip, and subnet_mask parameters to be lists instead of strings (lowlydba.sqlserver.ag_listener)
v2.3.2
======
diff --git a/ansible_collections/lowlydba/sqlserver/FILES.json b/ansible_collections/lowlydba/sqlserver/FILES.json
index 35b6495cd..bc4348e0e 100644
--- a/ansible_collections/lowlydba/sqlserver/FILES.json
+++ b/ansible_collections/lowlydba/sqlserver/FILES.json
@@ -8,1238 +8,1210 @@
"format": 1
},
{
- "name": "CHANGELOG.rst",
+ "name": "README.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "aacf6b751586ed3ad211a33884e4382cbaed1ceeb5e17017e7a2e0a0d90dedd0",
+ "chksum_sha256": "0b860fe11d857950cb932384d69d836b52fa4aab697af697b2f74ccfe792160e",
"format": 1
},
{
- "name": "CONTRIBUTING.md",
+ "name": "CHANGELOG.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "eec219dbc09446ed3e9938c8f66378621a0548b056426df33e3ea32e26bb4dc8",
+ "chksum_sha256": "d0caf1f45022e639237984c58caf2fac6306199c63cc91efc137cbc9d1a2fd8d",
"format": 1
},
{
- "name": "LICENSE",
+ "name": "CONTRIBUTING.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3972dc9744f6499f0f9b2dbf76696f2ae7ad8af9b23dde66d6af86c9dfb36986",
+ "chksum_sha256": "eec219dbc09446ed3e9938c8f66378621a0548b056426df33e3ea32e26bb4dc8",
"format": 1
},
{
- "name": ".vscode",
+ "name": "docs",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": ".vscode/settings.json",
+ "name": "docs/.gitkeep",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "129f69fd05d6feacc20ea9f2edf00332ce20639fa3c35f4f4b0a42dea2835bac",
+ "chksum_sha256": "01ba4719c80b6fe911b091a7c05124b64eeece964e09c058ef8f9805daca546b",
"format": 1
},
{
- "name": ".vscode/extensions.json",
+ "name": "ADOPTERS.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f72b83e1aa1301adb18ce0fe71ce6613d3cfb148f881b3e39c55359d41d3277f",
+ "chksum_sha256": "fe2c96f3cddb707c41ef291696c57c8766303500e26218f725657bd39cedd601",
"format": 1
},
{
- "name": "tests",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "MAINTAINERS",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "29b1b04e645e0c11e59c48cfe7bac3dbe16f787a9d6114847d9a5c3b5990e731",
"format": 1
},
{
- "name": "tests/integration",
+ "name": "changelogs",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/inventory.winrm.ci",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "d1a9b5a526214d256d89f06ac3180e7d73b63b7cd199a536888405e21f8768de",
- "format": 1
- },
- {
- "name": "tests/integration/inventory",
+ "name": "changelogs/config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0a28c1f749c45ff439aeeb826635429412f65c284d7d52ca7ff51b9dde495551",
+ "chksum_sha256": "1e054fb68c58140c282fbaf0150eee1bf80ad925196dddd74e848e91b7315a1e",
"format": 1
},
{
- "name": "tests/integration/integration_config.sample.yml",
+ "name": "changelogs/changelog.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "25e1d1343d93da99f01452b27af5f1b290b938b228ff934b0e84e6acd36916ec",
- "format": 1
- },
- {
- "name": "tests/integration/targets",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "783a5302d38f3cdb11964a8575d1e7c7cea5d71f0f65623209ea287b7809bb3a",
"format": 1
},
{
- "name": "tests/integration/targets/win_restore",
+ "name": "changelogs/fragments",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/win_restore/tasks",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "changelogs/fragments/.keep",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "tests/integration/targets/win_restore/tasks/main.yml",
+ "name": "LICENSE",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "25cec2155a81c7e38f39a8124b49ae2daf98d279ca19a1d2fe4e2dc16d406c9c",
+ "chksum_sha256": "3972dc9744f6499f0f9b2dbf76696f2ae7ad8af9b23dde66d6af86c9dfb36986",
"format": 1
},
{
- "name": "tests/integration/targets/win_restore/meta",
+ "name": ".vscode",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/win_restore/meta/main.yml",
+ "name": ".vscode/settings.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a170d576026b40f2e8d93ced0dcaff0108bd61e8f9d62bc3bee3bb14dba13b83",
+ "chksum_sha256": "129f69fd05d6feacc20ea9f2edf00332ce20639fa3c35f4f4b0a42dea2835bac",
"format": 1
},
{
- "name": "tests/integration/targets/win_restore/aliases",
+ "name": ".vscode/extensions.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b522894638086953bde369a5f6c194a90c32338fefab24021b399659246ee20b",
+ "chksum_sha256": "f72b83e1aa1301adb18ce0fe71ce6613d3cfb148f881b3e39c55359d41d3277f",
"format": 1
},
{
- "name": "tests/integration/targets/sp_configure",
+ "name": ".github",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/sp_configure/tasks",
+ "name": ".github/ISSUE_TEMPLATE",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/sp_configure/tasks/main.yml",
+ "name": ".github/ISSUE_TEMPLATE/feature_request.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c17643d6398d807c079ed122d9afc3f7d121e4720ac460693e3a6dc1bf9eb355",
- "format": 1
- },
- {
- "name": "tests/integration/targets/sp_configure/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "cd5b39aee355194f532b686c62a783c0657d8f7810586a757775769b291461a2",
"format": 1
},
{
- "name": "tests/integration/targets/sp_configure/meta/main.yml",
+ "name": ".github/ISSUE_TEMPLATE/bug_report.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a170d576026b40f2e8d93ced0dcaff0108bd61e8f9d62bc3bee3bb14dba13b83",
+ "chksum_sha256": "8d3db528d29f10782cc5b08a68999c7144efbeaa6724817d6c72a69f4c745a31",
"format": 1
},
{
- "name": "tests/integration/targets/sp_configure/aliases",
+ "name": ".github/FUNDING.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "79821e9e38c38952ccfdbe45b988082364c67d537d5753dfbb0a5f8716c48ab9",
- "format": 1
- },
- {
- "name": "tests/integration/targets/win_sa",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "2ece2535f1f5f81451ae7b0ce620fd881f1f0ce3e30681e670e12bbd02340f68",
"format": 1
},
{
- "name": "tests/integration/targets/win_sa/meta",
+ "name": ".github/workflows",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/win_sa/meta/main.yml",
+ "name": ".github/workflows/release.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f8a1f159fe12e4baf75982b00a0fff57296bd8b19e469913458531f0af2307d1",
+ "chksum_sha256": "1faf379e8e1bbbc73ae3f326ff5ab9accb78f6efc81001b68c9bc8544610784d",
"format": 1
},
{
- "name": "tests/integration/targets/win_sa/aliases",
+ "name": ".github/workflows/ansible-test.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3e5a273fb8fb0ecaa330c5beed437ab0d77fc765042c1baa4b97034f3d63de58",
+ "chksum_sha256": "eb2a0dacdef28ba312f6611c7f5b58daec2e7174b124ff29732bd1bf6e0dab1e",
"format": 1
},
{
- "name": "tests/integration/targets/win_agent_job_category",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": ".github/workflows/docs-push.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "9529da03eea03a258ab7ec8613310e62ad06fdbadd293d575070797d716443e9",
"format": 1
},
{
- "name": "tests/integration/targets/win_agent_job_category/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": ".github/workflows/docs-pr.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "84a88873a148150b9b42877fa1c42697c05f631e39e618160bd99bf0f3652abd",
"format": 1
},
{
- "name": "tests/integration/targets/win_agent_job_category/meta/main.yml",
+ "name": ".github/workflows/ansible-test-windows.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "637bd5e4ab78efba6ef44acb927e9d2188743d9d62d8ef4577ba8ca596dbd7f6",
+ "chksum_sha256": "663d5816219ecbdccf5d3363e29a3465057416bf046f3dbaddc7931408c86678",
"format": 1
},
{
- "name": "tests/integration/targets/win_agent_job_category/aliases",
+ "name": ".github/dependabot.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b4a1e70429ec84b35ba19e1696b30a39c4baeed6712c12ad93fd7ff62c42649a",
+ "chksum_sha256": "a35bab1dd3e838ca5730aae35e4b127a052f8e03d5ed5437549e95f8424e621c",
"format": 1
},
{
- "name": "tests/integration/targets/traceflag",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": ".github/codecov.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "a42ee6fd570c21e6f7828ebb71585d819fd3fa126b3d98c39ea79565ce1a0ed6",
"format": 1
},
{
- "name": "tests/integration/targets/traceflag/tasks",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": ".github/PULL_REQUEST_TEMPLATE.md",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "4ea1170a9065ecc1b0effb0e52f06fe42ef9936e86dfddb21c43efdd1c929b7b",
"format": 1
},
{
- "name": "tests/integration/targets/traceflag/tasks/main.yml",
+ "name": ".github/in-solidarity.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "50cc511e187bab0f47387422459e654018bb6d5b8c408c49fe074cc92466560d",
+ "chksum_sha256": "43f54d1f6c1496d0a02ce3144f73d479f2e37ea3d2bb9e54ba08763ddfd230a2",
"format": 1
},
{
- "name": "tests/integration/targets/traceflag/meta",
+ "name": "meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/traceflag/meta/main.yml",
+ "name": "meta/runtime.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a170d576026b40f2e8d93ced0dcaff0108bd61e8f9d62bc3bee3bb14dba13b83",
+ "chksum_sha256": "94d6b49778d101cca4b6cdfc026e86ba673af5cb029165c3dd170f3123b62286",
"format": 1
},
{
- "name": "tests/integration/targets/traceflag/aliases",
+ "name": "CODE_OF_CONDUCT.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "79821e9e38c38952ccfdbe45b988082364c67d537d5753dfbb0a5f8716c48ab9",
+ "chksum_sha256": "14ec928234a7ed52bf8b458d31e4862335111e477e4dbe7fb543686c24115140",
"format": 1
},
{
- "name": "tests/integration/targets/dba_multitool",
+ "name": "plugins",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/dba_multitool/tasks",
+ "name": "plugins/module_utils",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/dba_multitool/tasks/main.yml",
+ "name": "plugins/module_utils/_SqlServerUtils.psm1",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9c2dc342256920c670d472b94856269834490da88bd511ce501d7158dd080ecd",
+ "chksum_sha256": "4fdfc528ea4a54118f616738c9630066e47e0d010c2b60cbecd851a5b5547c3f",
"format": 1
},
{
- "name": "tests/integration/targets/dba_multitool/meta",
+ "name": "plugins/modules",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/dba_multitool/meta/main.yml",
+ "name": "plugins/modules/agent_job_step.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a170d576026b40f2e8d93ced0dcaff0108bd61e8f9d62bc3bee3bb14dba13b83",
+ "chksum_sha256": "56da01e0ba38203752b396a705ef7b385109c803de9787456ab9446353ef88db",
"format": 1
},
{
- "name": "tests/integration/targets/dba_multitool/aliases",
+ "name": "plugins/modules/dba_multitool.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "79821e9e38c38952ccfdbe45b988082364c67d537d5753dfbb0a5f8716c48ab9",
- "format": 1
- },
- {
- "name": "tests/integration/targets/win_tcp_port",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "0236dc7ccb91ecf1e0fdb832a40a8f82ed90ad888d7e02f41b7a2848ddc48bef",
"format": 1
},
{
- "name": "tests/integration/targets/win_tcp_port/tasks",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/agent_job_category.ps1",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "22167936fa56c583e687b1feaa79d42d3d6fe99832c3fba0f818e8e212820f45",
"format": 1
},
{
- "name": "tests/integration/targets/win_tcp_port/tasks/main.yml",
+ "name": "plugins/modules/rg_workload_group.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d692ce8436946b398e6cb3ff85f4c0e6a500468fb623d144566b25db71a60af5",
+ "chksum_sha256": "90cae13aa924431ceddce76eb94f49576b4e8396227a141a2d81c9fbf5f9dd34",
"format": 1
},
{
- "name": "tests/integration/targets/win_tcp_port/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/sp_configure.ps1",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "9c7d49cb933f7d1128be15e359e2ac4f7d8fd48facb741d7c5866ac4028b16c5",
"format": 1
},
{
- "name": "tests/integration/targets/win_tcp_port/meta/main.yml",
+ "name": "plugins/modules/ag_listener.ps1",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a170d576026b40f2e8d93ced0dcaff0108bd61e8f9d62bc3bee3bb14dba13b83",
+ "chksum_sha256": "588853961387ab549d29b1ac11585d28a9679be39395e6ab102f636a06d40565",
"format": 1
},
{
- "name": "tests/integration/targets/win_tcp_port/aliases",
+ "name": "plugins/modules/first_responder_kit.ps1",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "823c89dbff7ce470f0ab6e7e3c41e7fa02b7b38fd4a1d5bcda2ecbcb98a4dbd7",
+ "chksum_sha256": "1e478006b282e7dfc5ebbdf5045f6a81b632cbe2c70819aca11de40dc4de3364",
"format": 1
},
{
- "name": "tests/integration/targets/agent_job",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/ag_replica.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "3d7f122d89145949eaeabf78c2e4747ba9158611d9dd28efe19a6c23b61a18cf",
"format": 1
},
{
- "name": "tests/integration/targets/agent_job/tasks",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/agent_job_schedule.ps1",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "195f583bfb2e1b068f3022bd0a152fde686581266154973f43f150d10adbe247",
"format": 1
},
{
- "name": "tests/integration/targets/agent_job/tasks/main.yml",
+ "name": "plugins/modules/maintenance_solution.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "31c7e5478ab71d9b16cad90042519bd6cef26068f73dab84bf1b12fe6c6b1dcf",
+ "chksum_sha256": "126a8b7ea648ab2bc6e36db65a9b5ca85ca1c2224ad21445c176ea700b5a00ad",
"format": 1
},
{
- "name": "tests/integration/targets/agent_job/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/availability_group.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "8b3b47b92e9a558181210ab01de6d6637d3b39d61d647630926ac424a08303ef",
"format": 1
},
{
- "name": "tests/integration/targets/agent_job/meta/main.yml",
+ "name": "plugins/modules/user.ps1",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a170d576026b40f2e8d93ced0dcaff0108bd61e8f9d62bc3bee3bb14dba13b83",
+ "chksum_sha256": "f3c923f60de426d3b12e67709d0edcec86a396163a9911af82ad937c5561a7bc",
"format": 1
},
{
- "name": "tests/integration/targets/agent_job/aliases",
+ "name": "plugins/modules/rg_resource_pool.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "79821e9e38c38952ccfdbe45b988082364c67d537d5753dfbb0a5f8716c48ab9",
+ "chksum_sha256": "a18afaa50ed7d7183a0a9458b8b4a62130d92670f5d68197c8272c498eef876e",
"format": 1
},
{
- "name": "tests/integration/targets/win_first_responder_kit",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/traceflag.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d08e57033e0da3f63f1c024446f02abdff41400dd0607a400f234468e05260ff",
"format": 1
},
{
- "name": "tests/integration/targets/win_first_responder_kit/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/agent_job.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "9476d4b38459e10be25b64c96bf7b2b9bb4f49045d15cd36f959737eec194d34",
"format": 1
},
{
- "name": "tests/integration/targets/win_first_responder_kit/meta/main.yml",
+ "name": "plugins/modules/dba_multitool.ps1",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "880bb341d99af44927b4791c9df077901daca5219411d812b36303e6b7c6d95c",
+ "chksum_sha256": "5e13bd1112019e1852969d1b39b8579abea06a8d4a7742feb4664c637865b9f5",
"format": 1
},
{
- "name": "tests/integration/targets/win_first_responder_kit/aliases",
+ "name": "plugins/modules/hadr.ps1",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "eb9346a60cae33ea572746b34b03e115c6d4f06b6c1f39bf1fa4eb12ee42e6bb",
+ "chksum_sha256": "ae6aa285169947e898e685a1125017bcc755fcbb578dd5f1654da4631576ad5b",
"format": 1
},
{
- "name": "tests/integration/targets/win_maintenance_solution",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/login.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "8810d99eb07f72104830e9d0741fa93c915e0ab238f932dbbea90882030330b7",
"format": 1
},
{
- "name": "tests/integration/targets/win_maintenance_solution/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/ag_listener.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "5e55fe33fa9f8377d5c371a8296217b9edac3b8ce2f7fe0ae644f54ed9f2e563",
"format": 1
},
{
- "name": "tests/integration/targets/win_maintenance_solution/meta/main.yml",
+ "name": "plugins/modules/login.ps1",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "51ae5e4c8a06b47364e63ec20fc30138f7d30ef5c0fda58701a248cdaad0481c",
+ "chksum_sha256": "196042ab05e84d23b632c7a2d3ca39b6eb4e96f366e3e1dff8119441f4f7b7c2",
"format": 1
},
{
- "name": "tests/integration/targets/win_maintenance_solution/aliases",
+ "name": "plugins/modules/sp_whoisactive.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7d262d00ae91b2b827898ae863725e5579f68d3390fb255cc591560b4f616367",
+ "chksum_sha256": "26025dd39dc4bea36f72bcb0dd4a279245adde8e6be4792cc40ef90d02ade8e7",
"format": 1
},
{
- "name": "tests/integration/targets/agent_job_category",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/sa.ps1",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "950e4b60caa84225b0153cf08fe8b87ca97a27f155fc1d4a1d95c5d677ea0622",
"format": 1
},
{
- "name": "tests/integration/targets/agent_job_category/tasks",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/hadr.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "b2017667c8e572a1582cb076b3d543f4d7eba1ed6d66a7c0249d48b4f793868a",
"format": 1
},
{
- "name": "tests/integration/targets/agent_job_category/tasks/main.yml",
+ "name": "plugins/modules/user.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0544826a56fa9285258e41f18f989bad46a9a333c4508b6ff52991cb820ef6dd",
+ "chksum_sha256": "99b2b1074ac11298c64b987414934b55e1e95a482bcc3185285662482dbf55b7",
"format": 1
},
{
- "name": "tests/integration/targets/agent_job_category/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/restore.ps1",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "c5544cbbca85e97df2d855e33c64ef045d49a441410a6cc3b0a9328ab991485d",
"format": 1
},
{
- "name": "tests/integration/targets/agent_job_category/meta/main.yml",
+ "name": "plugins/modules/spn.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a170d576026b40f2e8d93ced0dcaff0108bd61e8f9d62bc3bee3bb14dba13b83",
+ "chksum_sha256": "33913bdf6543c2653d6d96585168786d3ba6f95d19f967706acaff18b3f16c2b",
"format": 1
},
{
- "name": "tests/integration/targets/agent_job_category/aliases",
+ "name": "plugins/modules/agent_job_schedule.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "79821e9e38c38952ccfdbe45b988082364c67d537d5753dfbb0a5f8716c48ab9",
+ "chksum_sha256": "ec152ec9b41bdeabfb3289c73b4c19955b7b1d849420e93513bd10640e2824c4",
"format": 1
},
{
- "name": "tests/integration/targets/sp_whoisactive",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/credential.ps1",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "364bcb3116174488c1a1c51ada8eac915e9893434dd85577ea49d49c781a8e83",
"format": 1
},
{
- "name": "tests/integration/targets/sp_whoisactive/tasks",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/sp_configure.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "2715043d68e455a59bd7ff7028c769c23d662ae04b937128aec461a39c544bf5",
"format": 1
},
{
- "name": "tests/integration/targets/sp_whoisactive/tasks/main.yml",
+ "name": "plugins/modules/install_script.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6ea486055f1c09c041e9b353582799bb37ed536cb8e7e6bf89920a1d79cc361f",
+ "chksum_sha256": "6932f5d090a894e3bef5e29f612bd1545aae360d91f8ffc85c7352a5da1548db",
"format": 1
},
{
- "name": "tests/integration/targets/sp_whoisactive/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/database.ps1",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "eaedf444462b956cba62cf1742f07353950ccab565377191fcde24241cdb5690",
"format": 1
},
{
- "name": "tests/integration/targets/sp_whoisactive/meta/main.yml",
+ "name": "plugins/modules/nonquery.ps1",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a170d576026b40f2e8d93ced0dcaff0108bd61e8f9d62bc3bee3bb14dba13b83",
+ "chksum_sha256": "e098278136eec42260b9354b6dca6816f4968c546f12e259cd2e62d2f4a72e82",
"format": 1
},
{
- "name": "tests/integration/targets/sp_whoisactive/aliases",
+ "name": "plugins/modules/memory.ps1",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "79821e9e38c38952ccfdbe45b988082364c67d537d5753dfbb0a5f8716c48ab9",
+ "chksum_sha256": "de05b5612d9e692126afe9a25841d3dcbcfef1079cb45585d49564cd3bac2fc6",
"format": 1
},
{
- "name": "tests/integration/targets/user",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/tcp_port.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "fcb5a1326cf73886733d6772f7623bd7e792563155c8d72a0d59312998215d91",
"format": 1
},
{
- "name": "tests/integration/targets/user/tasks",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/restore.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "0f357dea6f735aae072bfee1d35335140a2a4cf3ac617ee68347cde8f7991bad",
"format": 1
},
{
- "name": "tests/integration/targets/user/tasks/main.yml",
+ "name": "plugins/modules/agent_job_step.ps1",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "772de87469867e5ba4167ca395dd6a50004daeddff4cf8a6ca345cf97ca12077",
+ "chksum_sha256": "3ab30f960d4e24bcb1a2dcb22d5a57183eba5c22811f6bc2d7222791103660b1",
"format": 1
},
{
- "name": "tests/integration/targets/user/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/availability_group.ps1",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "46d7f93e5951ea8096b4632ffc8b2bed5391dcb28a7b1c2ed66d9dfc5eb3ec3a",
"format": 1
},
{
- "name": "tests/integration/targets/user/meta/main.yml",
+ "name": "plugins/modules/resource_governor.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a170d576026b40f2e8d93ced0dcaff0108bd61e8f9d62bc3bee3bb14dba13b83",
+ "chksum_sha256": "27d1f38306c23bfea82dc745425cf6cef0099156054bee4a497d0687866ade97",
"format": 1
},
{
- "name": "tests/integration/targets/user/aliases",
+ "name": "plugins/modules/maintenance_solution.ps1",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "79821e9e38c38952ccfdbe45b988082364c67d537d5753dfbb0a5f8716c48ab9",
+ "chksum_sha256": "b2b43694b89c22938156682c8c6f1b3a30c3a0b67f7f50e37efcd17e1eb1cde5",
"format": 1
},
{
- "name": "tests/integration/targets/win_hadr",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/traceflag.ps1",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "278f6beedbef5c73eaf3b98d04dc1adda8a0921aa48a138f339c8d25747cedb9",
"format": 1
},
{
- "name": "tests/integration/targets/win_hadr/tasks",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/backup.ps1",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "c31187c932814a34e5f46576e9ec68237969390de4377e3baa35598940f2fe36",
"format": 1
},
{
- "name": "tests/integration/targets/win_hadr/tasks/main.yml",
+ "name": "plugins/modules/memory.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e5341d244291343a5efba167280a123556be0ce4480c8cab2282fc3e2c288609",
+ "chksum_sha256": "1c3e840a66bb01f5b809b7c9347594cfed73286cb589ca710eb3802f990fe9c8",
"format": 1
},
{
- "name": "tests/integration/targets/win_hadr/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/instance_info.ps1",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "afe3b3ee537cfe249f987e961eb1860ad5c01f617caf756a2d6c204357e25d11",
"format": 1
},
{
- "name": "tests/integration/targets/win_hadr/meta/main.yml",
+ "name": "plugins/modules/first_responder_kit.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a170d576026b40f2e8d93ced0dcaff0108bd61e8f9d62bc3bee3bb14dba13b83",
+ "chksum_sha256": "bf16bc535cfa590e884e99e3c406c602aca4c1322b9a9c86679d1980f18e5e1a",
"format": 1
},
{
- "name": "tests/integration/targets/win_hadr/aliases",
+ "name": "plugins/modules/database.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "823c89dbff7ce470f0ab6e7e3c41e7fa02b7b38fd4a1d5bcda2ecbcb98a4dbd7",
+ "chksum_sha256": "943ace13f44fc4e582ed5bf7c6064b4c90680c2580169782d3c7c61845816b8a",
"format": 1
},
{
- "name": "tests/integration/targets/win_memory",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/agent_job_category.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "eda9101228bdf9db6dfce794c3d7cb51b2913f43cbfd6d2045179a74dad4f52e",
"format": 1
},
{
- "name": "tests/integration/targets/win_memory/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/sp_whoisactive.ps1",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "6d8e9698a0715ede939c5ec6b1bb786c99f63221d3b2c2f91f8b6fafd488a9a5",
"format": 1
},
{
- "name": "tests/integration/targets/win_memory/meta/main.yml",
+ "name": "plugins/modules/resource_governor.ps1",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c32e78042afc05d5738763ed9a9281f6e6a5e03231c2124087400143409ec6d6",
+ "chksum_sha256": "a30e5d126462c9fc8b882307851d623267308c7c6fb4eaac51ce51cff6fa415d",
"format": 1
},
{
- "name": "tests/integration/targets/win_memory/aliases",
+ "name": "plugins/modules/install_script.ps1",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "29e8c25ab424e16e434d902ced6f408da8a4f1f9363126caa03092f62bcb43aa",
+ "chksum_sha256": "0e23cd11873aa61dcfbe61ab841e54e31d07d97bc8c5b44c7623c097c90b5621",
"format": 1
},
{
- "name": "tests/integration/targets/win_instance_info",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/backup.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "ddffc896a151a1c0a9d1798d1f7796d49aae2886f5aeb9c07ca72378d257a298",
"format": 1
},
{
- "name": "tests/integration/targets/win_instance_info/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/instance_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "c6d60a465bcc5ab23e0ab7688479f2c54ca80c4d15f9a7a6adc0211f8aabd4fd",
"format": 1
},
{
- "name": "tests/integration/targets/win_instance_info/meta/main.yml",
+ "name": "plugins/modules/ag_replica.ps1",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ce5de48e8e8a42b74d629f088dc4e10650fe45b811ef394b852884e11ac3d20b",
+ "chksum_sha256": "5868def510947485eea9e2f495ed80cfb407db89c6d9f42c829a45eeb00a9ad1",
"format": 1
},
{
- "name": "tests/integration/targets/win_instance_info/aliases",
+ "name": "plugins/modules/agent_job.ps1",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "96874064bd5afc3782b688417ee53a256f2aeee2b3f7be9db26ef14760752d33",
+ "chksum_sha256": "19ab20059fbd77950f856cb7be75049136673d2c793f91900be18b194716b97a",
"format": 1
},
{
- "name": "tests/integration/targets/win_availability_group",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/spn.ps1",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "f3eb83933723cbc2f88238a60cdb539b97576e9feb5ca0543da2b561e2e60b83",
"format": 1
},
{
- "name": "tests/integration/targets/win_availability_group/tasks",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/sa.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "984966ae91f444089b89a510f47de10b80d3417bd2def60af3bc91a5ba5979d1",
"format": 1
},
{
- "name": "tests/integration/targets/win_availability_group/tasks/main.yml",
+ "name": "plugins/modules/rg_workload_group.ps1",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8f66aa5fd04f4ae573bafb0e037877232be65d063c0569b6a7fa9020081507c3",
+ "chksum_sha256": "9faacfc40d32dbcc2eae198f15784613d7b20721045f9bc551be139019fe2027",
"format": 1
},
{
- "name": "tests/integration/targets/win_availability_group/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/rg_resource_pool.ps1",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "8bda00accf8650882d9527b1537db36a68d9eafa1f2345f844d7ac59ac1d003c",
"format": 1
},
{
- "name": "tests/integration/targets/win_availability_group/meta/main.yml",
+ "name": "plugins/modules/tcp_port.ps1",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a170d576026b40f2e8d93ced0dcaff0108bd61e8f9d62bc3bee3bb14dba13b83",
+ "chksum_sha256": "82354c3a07f163d733ba25a8e43c185ae890681e1104df0224fdbfa68da26c8f",
"format": 1
},
{
- "name": "tests/integration/targets/win_availability_group/aliases",
+ "name": "plugins/modules/nonquery.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b522894638086953bde369a5f6c194a90c32338fefab24021b399659246ee20b",
+ "chksum_sha256": "28566664f6a3f97f72301bc34fa22af1421ead1ca572d651f241e960acc2be29",
"format": 1
},
{
- "name": "tests/integration/targets/rg_resource_pool",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/credential.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "16a49e24a042e89e609a22adc1316559025f60bf24fb456c0acda09e611b6ec5",
"format": 1
},
{
- "name": "tests/integration/targets/rg_resource_pool/tasks",
+ "name": "plugins/doc_fragments",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/rg_resource_pool/tasks/main.yml",
+ "name": "plugins/doc_fragments/sql_credentials.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "669c18b75e00a7e66b58ecbb24511bc42825b97529a6e3ad085ef35ff944ffaa",
- "format": 1
- },
- {
- "name": "tests/integration/targets/rg_resource_pool/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "8319c0f26d235fb9dd33444fe7cfe963324207e64f77423098fb0a6c071be791",
"format": 1
},
{
- "name": "tests/integration/targets/rg_resource_pool/meta/main.yml",
+ "name": "plugins/doc_fragments/state.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a170d576026b40f2e8d93ced0dcaff0108bd61e8f9d62bc3bee3bb14dba13b83",
+ "chksum_sha256": "ae0b5a340638525f1d65822b592ccba03c72dd937e8f1f60f41539f80a1af2f7",
"format": 1
},
{
- "name": "tests/integration/targets/rg_resource_pool/aliases",
+ "name": "plugins/doc_fragments/attributes.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "79821e9e38c38952ccfdbe45b988082364c67d537d5753dfbb0a5f8716c48ab9",
+ "chksum_sha256": "cdffff9cfa27f07533b806e63d11b9a752144eb5f2d7d8c8837e7a4efa4da4c7",
"format": 1
},
{
- "name": "tests/integration/targets/install_script",
+ "name": "tests",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/install_script/tasks",
+ "name": "tests/integration",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/install_script/tasks/main.yml",
+ "name": "tests/integration/.ansible-lint",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "85e9885a3bb1a9621c0ac183c3c831efe453bbb5eddbb4d6351a620164b33672",
+ "chksum_sha256": "982c0b7fe8683e818a27cf59b6a6892d91c277703c81e2db35994f63ef90e7fc",
"format": 1
},
{
- "name": "tests/integration/targets/install_script/meta",
+ "name": "tests/integration/targets",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/install_script/meta/main.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "a170d576026b40f2e8d93ced0dcaff0108bd61e8f9d62bc3bee3bb14dba13b83",
+ "name": "tests/integration/targets/win_traceflag",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/install_script/aliases",
+ "name": "tests/integration/targets/win_traceflag/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "79821e9e38c38952ccfdbe45b988082364c67d537d5753dfbb0a5f8716c48ab9",
+ "chksum_sha256": "b60d4d845847bbd2c2a976e26c6d39e4943a1466503642ee370b84b634b07ed5",
"format": 1
},
{
- "name": "tests/integration/targets/install_script/files",
+ "name": "tests/integration/targets/win_traceflag/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/install_script/files/1-select-choice.sql",
+ "name": "tests/integration/targets/win_traceflag/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fde2acd9de812005948b5935c0c335889828e14ee58dbcc58a205096cdb2266f",
- "format": 1
- },
- {
- "name": "tests/integration/targets/instance_info",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "7ff30f930e0f6004866fed58759768dfdda5f2354cea27c69d3a9636e7dcef02",
"format": 1
},
{
- "name": "tests/integration/targets/instance_info/tasks",
+ "name": "tests/integration/targets/win_login",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/instance_info/tasks/main.yml",
+ "name": "tests/integration/targets/win_login/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "22e5f3e538dfd862aebe6429f2d06293499bcd83a7b8da0f6afbee5f8033366b",
+ "chksum_sha256": "71529f18c9a617354c7ae2d0942307ee460e46dcf304073e6b83f0d9e5e3261a",
"format": 1
},
{
- "name": "tests/integration/targets/instance_info/meta",
+ "name": "tests/integration/targets/win_login/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/instance_info/meta/main.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "a170d576026b40f2e8d93ced0dcaff0108bd61e8f9d62bc3bee3bb14dba13b83",
- "format": 1
- },
- {
- "name": "tests/integration/targets/instance_info/aliases",
+ "name": "tests/integration/targets/win_login/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "79821e9e38c38952ccfdbe45b988082364c67d537d5753dfbb0a5f8716c48ab9",
+ "chksum_sha256": "18dbd3b87064b3336556de28cdb4a48564c66a2ae9123c90cce8bdb1860e97d9",
"format": 1
},
{
- "name": "tests/integration/targets/win_sp_configure",
+ "name": "tests/integration/targets/win_availability_group",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/win_sp_configure/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/win_availability_group/aliases",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "b522894638086953bde369a5f6c194a90c32338fefab24021b399659246ee20b",
"format": 1
},
{
- "name": "tests/integration/targets/win_sp_configure/meta/main.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "4c58bedc5bf4354f6a8e46bd6e292a388f347a5babcc36a61cebd68081a3d1c7",
+ "name": "tests/integration/targets/win_availability_group/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/win_sp_configure/aliases",
+ "name": "tests/integration/targets/win_availability_group/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f9f102cc9b61991c72e62216c363357b58ba6ef812f7838b194d76b9a37ee6dd",
+ "chksum_sha256": "8f66aa5fd04f4ae573bafb0e037877232be65d063c0569b6a7fa9020081507c3",
"format": 1
},
{
- "name": "tests/integration/targets/win_nonquery",
+ "name": "tests/integration/targets/win_availability_group/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/win_nonquery/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/win_availability_group/meta/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "a170d576026b40f2e8d93ced0dcaff0108bd61e8f9d62bc3bee3bb14dba13b83",
"format": 1
},
{
- "name": "tests/integration/targets/win_nonquery/meta/main.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "0dae571f5e423080bf9bdaac127360812ea64380e435c29f03142c56aadb4484",
+ "name": "tests/integration/targets/win_user",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/win_nonquery/aliases",
+ "name": "tests/integration/targets/win_user/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "eb1fa7982c88abde5d789d5b541bc8016f99025f732bef1cca7ba4ca457b6f7b",
+ "chksum_sha256": "4d4819a1314f1ea6a500be90e727a92fb9b964511644c7162c869e351037c2f2",
"format": 1
},
{
- "name": "tests/integration/targets/win_login",
+ "name": "tests/integration/targets/win_user/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/win_login/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/win_user/meta/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "805393b91b743ad8f62db1f7301c2c9c85cd78b64633986e456aa3add70d5419",
"format": 1
},
{
- "name": "tests/integration/targets/win_login/meta/main.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "18dbd3b87064b3336556de28cdb4a48564c66a2ae9123c90cce8bdb1860e97d9",
+ "name": "tests/integration/targets/win_dba_multitool",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/win_login/aliases",
+ "name": "tests/integration/targets/win_dba_multitool/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "71529f18c9a617354c7ae2d0942307ee460e46dcf304073e6b83f0d9e5e3261a",
+ "chksum_sha256": "65187564e8be2b9da7c1a7bc5e86687aa032caca1a4b8517e9cfe2b50ebc1965",
"format": 1
},
{
- "name": "tests/integration/targets/win_spn",
+ "name": "tests/integration/targets/win_dba_multitool/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/win_spn/tasks",
+ "name": "tests/integration/targets/win_dba_multitool/meta/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "f119d90651290bfdbd959b50880d02d480f375e19cb06989384d3b018e3b95dc",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/win_backup",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/win_spn/tasks/main.yml",
+ "name": "tests/integration/targets/win_backup/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "25fe644417d284f14218215aed1e5e3ae8f920c32cdba3b349e8574f10c41f7c",
+ "chksum_sha256": "f4894d7d372026d355a62fe57e4c54da2b6588eb2df68b2bcfb3d6620525da09",
"format": 1
},
{
- "name": "tests/integration/targets/win_spn/meta",
+ "name": "tests/integration/targets/win_backup/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/win_spn/meta/main.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "a170d576026b40f2e8d93ced0dcaff0108bd61e8f9d62bc3bee3bb14dba13b83",
- "format": 1
- },
- {
- "name": "tests/integration/targets/win_spn/aliases",
+ "name": "tests/integration/targets/win_backup/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b522894638086953bde369a5f6c194a90c32338fefab24021b399659246ee20b",
+ "chksum_sha256": "9e6816dbbf21d3dba22fc22b8da0f0cc4cef6ac1dd82aefaab45a38c0fe3ecee",
"format": 1
},
{
- "name": "tests/integration/targets/resource_governor",
+ "name": "tests/integration/targets/win_first_responder_kit",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/resource_governor/tasks",
+ "name": "tests/integration/targets/win_first_responder_kit/aliases",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "eb9346a60cae33ea572746b34b03e115c6d4f06b6c1f39bf1fa4eb12ee42e6bb",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/win_first_responder_kit/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/resource_governor/tasks/main.yml",
+ "name": "tests/integration/targets/win_first_responder_kit/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b0e5d378ac5c85985d61002fb7ac7f22f8f37326bb10d5fd09c3553beea37731",
+ "chksum_sha256": "880bb341d99af44927b4791c9df077901daca5219411d812b36303e6b7c6d95c",
"format": 1
},
{
- "name": "tests/integration/targets/resource_governor/meta",
+ "name": "tests/integration/targets/instance_info",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/resource_governor/meta/main.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "a170d576026b40f2e8d93ced0dcaff0108bd61e8f9d62bc3bee3bb14dba13b83",
- "format": 1
- },
- {
- "name": "tests/integration/targets/resource_governor/aliases",
+ "name": "tests/integration/targets/instance_info/aliases",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "79821e9e38c38952ccfdbe45b988082364c67d537d5753dfbb0a5f8716c48ab9",
"format": 1
},
{
- "name": "tests/integration/targets/win_backup",
+ "name": "tests/integration/targets/instance_info/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/win_backup/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/instance_info/tasks/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "22e5f3e538dfd862aebe6429f2d06293499bcd83a7b8da0f6afbee5f8033366b",
"format": 1
},
{
- "name": "tests/integration/targets/win_backup/meta/main.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "9e6816dbbf21d3dba22fc22b8da0f0cc4cef6ac1dd82aefaab45a38c0fe3ecee",
+ "name": "tests/integration/targets/instance_info/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/win_backup/aliases",
+ "name": "tests/integration/targets/instance_info/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f4894d7d372026d355a62fe57e4c54da2b6588eb2df68b2bcfb3d6620525da09",
+ "chksum_sha256": "a170d576026b40f2e8d93ced0dcaff0108bd61e8f9d62bc3bee3bb14dba13b83",
"format": 1
},
{
- "name": "tests/integration/targets/maintenance_solution",
+ "name": "tests/integration/targets/nonquery",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/maintenance_solution/tasks",
+ "name": "tests/integration/targets/nonquery/aliases",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "79821e9e38c38952ccfdbe45b988082364c67d537d5753dfbb0a5f8716c48ab9",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/nonquery/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/maintenance_solution/tasks/main.yml",
+ "name": "tests/integration/targets/nonquery/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d6841548d89da6e201186a5bd9ba2f370aff1ed3818fc9d5609a26a7405bd088",
+ "chksum_sha256": "851bf7899fb14d2c976f6d2b1df869b108ab5f4440e2c57af920e06f7c59f07f",
"format": 1
},
{
- "name": "tests/integration/targets/maintenance_solution/meta",
+ "name": "tests/integration/targets/nonquery/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/maintenance_solution/meta/main.yml",
+ "name": "tests/integration/targets/nonquery/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "a170d576026b40f2e8d93ced0dcaff0108bd61e8f9d62bc3bee3bb14dba13b83",
"format": 1
},
{
- "name": "tests/integration/targets/maintenance_solution/aliases",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "79821e9e38c38952ccfdbe45b988082364c67d537d5753dfbb0a5f8716c48ab9",
- "format": 1
- },
- {
- "name": "tests/integration/targets/win_user",
+ "name": "tests/integration/targets/win_credential",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/win_user/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/win_credential/aliases",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "db8628e2c4d331f6b0a0263ae6c01220b719d3aa44e806849ba93acff90b3e53",
"format": 1
},
{
- "name": "tests/integration/targets/win_user/meta/main.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "805393b91b743ad8f62db1f7301c2c9c85cd78b64633986e456aa3add70d5419",
+ "name": "tests/integration/targets/win_credential/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/win_user/aliases",
+ "name": "tests/integration/targets/win_credential/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4d4819a1314f1ea6a500be90e727a92fb9b964511644c7162c869e351037c2f2",
+ "chksum_sha256": "c4531eb647b3263f0f9611fea8ff1c0b2ce30c06437ffe41963e146f94a023a4",
"format": 1
},
{
- "name": "tests/integration/targets/win_ag_replica",
+ "name": "tests/integration/targets/first_responder_kit",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/win_ag_replica/tasks",
+ "name": "tests/integration/targets/first_responder_kit/aliases",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "79821e9e38c38952ccfdbe45b988082364c67d537d5753dfbb0a5f8716c48ab9",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/first_responder_kit/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/win_ag_replica/tasks/main.yml",
+ "name": "tests/integration/targets/first_responder_kit/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "257be045f263df194afe43af6d42e581e5acce5c0bb2fbd11eecd8569d2c43d1",
+ "chksum_sha256": "f5ff819f29604f94044eecbb7448dfe33b47718cff9cd972a3e149bb81516aad",
"format": 1
},
{
- "name": "tests/integration/targets/win_ag_replica/meta",
+ "name": "tests/integration/targets/first_responder_kit/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/win_ag_replica/meta/main.yml",
+ "name": "tests/integration/targets/first_responder_kit/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "a170d576026b40f2e8d93ced0dcaff0108bd61e8f9d62bc3bee3bb14dba13b83",
"format": 1
},
{
- "name": "tests/integration/targets/win_ag_replica/aliases",
+ "name": "tests/integration/targets/win_resource_governor",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/win_resource_governor/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b522894638086953bde369a5f6c194a90c32338fefab24021b399659246ee20b",
+ "chksum_sha256": "8f9e2da9d3feca4c1b60f565a53f4dc7e62f3a7eef4444e82b29d46d28c0bed3",
"format": 1
},
{
- "name": "tests/integration/targets/win_rg_workload_group",
+ "name": "tests/integration/targets/win_resource_governor/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/win_rg_workload_group/meta",
+ "name": "tests/integration/targets/win_resource_governor/meta/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "da6570816fbce650c248033a1d6442e212b4d61baa00d6fbc449ae2e9b00777a",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/win_agent_job_step",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/win_rg_workload_group/meta/main.yml",
+ "name": "tests/integration/targets/win_agent_job_step/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ace519beeabaa6fff00bf4230f11205f2eb88ef957a29ff4991e717ab3cd8ed2",
+ "chksum_sha256": "47352893485e47a64c32139b049f01c0aa68e79591c0595922e0acffa2665523",
"format": 1
},
{
- "name": "tests/integration/targets/win_rg_workload_group/aliases",
+ "name": "tests/integration/targets/win_agent_job_step/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/win_agent_job_step/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "30128d716d50ee4404c8f9a1e5f921f3a830a18171470a5de586eac0799cfe5c",
+ "chksum_sha256": "bd6596491fb29701a783f70e0cc7e04bcb54c3af0306ff39166d7ffd01e57143",
"format": 1
},
{
- "name": "tests/integration/targets/database",
+ "name": "tests/integration/targets/rg_resource_pool",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/database/tasks",
+ "name": "tests/integration/targets/rg_resource_pool/aliases",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "79821e9e38c38952ccfdbe45b988082364c67d537d5753dfbb0a5f8716c48ab9",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/rg_resource_pool/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/database/tasks/main.yml",
+ "name": "tests/integration/targets/rg_resource_pool/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "096c592c99e22c947444c3cefe3007bdc7b6370b7c814b0c6cc0e8bd1b6f77c5",
+ "chksum_sha256": "669c18b75e00a7e66b58ecbb24511bc42825b97529a6e3ad085ef35ff944ffaa",
"format": 1
},
{
- "name": "tests/integration/targets/database/meta",
+ "name": "tests/integration/targets/rg_resource_pool/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/database/meta/main.yml",
+ "name": "tests/integration/targets/rg_resource_pool/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "a170d576026b40f2e8d93ced0dcaff0108bd61e8f9d62bc3bee3bb14dba13b83",
"format": 1
},
{
- "name": "tests/integration/targets/database/aliases",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "79821e9e38c38952ccfdbe45b988082364c67d537d5753dfbb0a5f8716c48ab9",
- "format": 1
- },
- {
"name": "tests/integration/targets/sa",
"ftype": "dir",
"chksum_type": null,
@@ -1247,6 +1219,13 @@
"format": 1
},
{
+ "name": "tests/integration/targets/sa/aliases",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "79821e9e38c38952ccfdbe45b988082364c67d537d5753dfbb0a5f8716c48ab9",
+ "format": 1
+ },
+ {
"name": "tests/integration/targets/sa/tasks",
"ftype": "dir",
"chksum_type": null,
@@ -1275,269 +1254,297 @@
"format": 1
},
{
- "name": "tests/integration/targets/sa/aliases",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "79821e9e38c38952ccfdbe45b988082364c67d537d5753dfbb0a5f8716c48ab9",
- "format": 1
- },
- {
- "name": "tests/integration/targets/win_credential",
+ "name": "tests/integration/targets/win_ag_listener",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/win_credential/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/win_ag_listener/aliases",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "823c89dbff7ce470f0ab6e7e3c41e7fa02b7b38fd4a1d5bcda2ecbcb98a4dbd7",
"format": 1
},
{
- "name": "tests/integration/targets/win_credential/meta/main.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "c4531eb647b3263f0f9611fea8ff1c0b2ce30c06437ffe41963e146f94a023a4",
+ "name": "tests/integration/targets/win_ag_listener/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/win_credential/aliases",
+ "name": "tests/integration/targets/win_ag_listener/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "db8628e2c4d331f6b0a0263ae6c01220b719d3aa44e806849ba93acff90b3e53",
+ "chksum_sha256": "379aa139a694b28a3c158f23c41b631fe828d4e64451a46c26b95347068d310c",
"format": 1
},
{
- "name": "tests/integration/targets/win_agent_job_step",
+ "name": "tests/integration/targets/win_ag_listener/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/win_agent_job_step/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/win_ag_listener/meta/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "a170d576026b40f2e8d93ced0dcaff0108bd61e8f9d62bc3bee3bb14dba13b83",
"format": 1
},
{
- "name": "tests/integration/targets/win_agent_job_step/meta/main.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "bd6596491fb29701a783f70e0cc7e04bcb54c3af0306ff39166d7ffd01e57143",
+ "name": "tests/integration/targets/win_agent_job_schedule",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/win_agent_job_step/aliases",
+ "name": "tests/integration/targets/win_agent_job_schedule/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "47352893485e47a64c32139b049f01c0aa68e79591c0595922e0acffa2665523",
+ "chksum_sha256": "a5361cb2dc2de454e750c31de04aed891a17b82ae1d09ed36de2ce628e4f3542",
"format": 1
},
{
- "name": "tests/integration/targets/win_dba_multitool",
+ "name": "tests/integration/targets/win_agent_job_schedule/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/win_dba_multitool/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/win_agent_job_schedule/meta/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "471a1229e4fd010cbc5f53e8f58a3816415bb9f7f3ff1b6a50d697c54ea52eeb",
"format": 1
},
{
- "name": "tests/integration/targets/win_dba_multitool/meta/main.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "f119d90651290bfdbd959b50880d02d480f375e19cb06989384d3b018e3b95dc",
+ "name": "tests/integration/targets/win_sa",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/win_dba_multitool/aliases",
+ "name": "tests/integration/targets/win_sa/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "65187564e8be2b9da7c1a7bc5e86687aa032caca1a4b8517e9cfe2b50ebc1965",
+ "chksum_sha256": "3e5a273fb8fb0ecaa330c5beed437ab0d77fc765042c1baa4b97034f3d63de58",
"format": 1
},
{
- "name": "tests/integration/targets/win_sp_whoisactive",
+ "name": "tests/integration/targets/win_sa/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/win_sp_whoisactive/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/win_sa/meta/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "f8a1f159fe12e4baf75982b00a0fff57296bd8b19e469913458531f0af2307d1",
"format": 1
},
{
- "name": "tests/integration/targets/win_sp_whoisactive/meta/main.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "6d70bd8678381b8aab8b6ce600856a34b6c6f3307744f0340bc8ebaa3f846fb4",
+ "name": "tests/integration/targets/user",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/win_sp_whoisactive/aliases",
+ "name": "tests/integration/targets/user/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0e25bc398118036b0ab0185bdf43aeb2f586af8d25ba057222e7794aab2cf3b2",
+ "chksum_sha256": "79821e9e38c38952ccfdbe45b988082364c67d537d5753dfbb0a5f8716c48ab9",
"format": 1
},
{
- "name": "tests/integration/targets/credential",
+ "name": "tests/integration/targets/user/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/credential/tasks",
+ "name": "tests/integration/targets/user/tasks/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "772de87469867e5ba4167ca395dd6a50004daeddff4cf8a6ca345cf97ca12077",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/user/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/credential/tasks/main.yml",
+ "name": "tests/integration/targets/user/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "502c966d269ba16b8f50711aa3413b89361ca26241b95ea42bfadbb543078190",
+ "chksum_sha256": "a170d576026b40f2e8d93ced0dcaff0108bd61e8f9d62bc3bee3bb14dba13b83",
"format": 1
},
{
- "name": "tests/integration/targets/credential/meta",
+ "name": "tests/integration/targets/win_maintenance_solution",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/credential/meta/main.yml",
+ "name": "tests/integration/targets/win_maintenance_solution/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a170d576026b40f2e8d93ced0dcaff0108bd61e8f9d62bc3bee3bb14dba13b83",
+ "chksum_sha256": "7d262d00ae91b2b827898ae863725e5579f68d3390fb255cc591560b4f616367",
"format": 1
},
{
- "name": "tests/integration/targets/credential/aliases",
+ "name": "tests/integration/targets/win_maintenance_solution/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/win_maintenance_solution/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "79821e9e38c38952ccfdbe45b988082364c67d537d5753dfbb0a5f8716c48ab9",
+ "chksum_sha256": "51ae5e4c8a06b47364e63ec20fc30138f7d30ef5c0fda58701a248cdaad0481c",
"format": 1
},
{
- "name": "tests/integration/targets/rg_workload_group",
+ "name": "tests/integration/targets/win_ag_replica",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/rg_workload_group/tasks",
+ "name": "tests/integration/targets/win_ag_replica/aliases",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "b522894638086953bde369a5f6c194a90c32338fefab24021b399659246ee20b",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/win_ag_replica/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/rg_workload_group/tasks/main.yml",
+ "name": "tests/integration/targets/win_ag_replica/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "20da00d72375b1ce6f9644d75d1c7d8066bd92e88c919abfd0e6813363a19b92",
+ "chksum_sha256": "257be045f263df194afe43af6d42e581e5acce5c0bb2fbd11eecd8569d2c43d1",
"format": 1
},
{
- "name": "tests/integration/targets/rg_workload_group/meta",
+ "name": "tests/integration/targets/win_ag_replica/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/rg_workload_group/meta/main.yml",
+ "name": "tests/integration/targets/win_ag_replica/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "a170d576026b40f2e8d93ced0dcaff0108bd61e8f9d62bc3bee3bb14dba13b83",
"format": 1
},
{
- "name": "tests/integration/targets/rg_workload_group/aliases",
+ "name": "tests/integration/targets/setup_win_sqlserver",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/setup_win_sqlserver/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "79821e9e38c38952ccfdbe45b988082364c67d537d5753dfbb0a5f8716c48ab9",
+ "chksum_sha256": "105c5f0785da8d117d54b796fedefa4ddbd7ec65b52058539abe49f327d06db8",
"format": 1
},
{
- "name": "tests/integration/targets/win_rg_resource_pool",
+ "name": "tests/integration/targets/setup_win_sqlserver/vars",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/win_rg_resource_pool/meta",
+ "name": "tests/integration/targets/setup_win_sqlserver/vars/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "f773fabf54dffb1da94c9a66e897675dd8831d87d1ff525edeaa581ac0963c7e",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/setup_win_sqlserver/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/win_rg_resource_pool/meta/main.yml",
+ "name": "tests/integration/targets/setup_win_sqlserver/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cfce66e3f2dc81d8b04d033c770f490560112a14e8de1c28c7b93e93757dfda8",
+ "chksum_sha256": "456a9ff2086b679cc9bca6d71da070006c67a8e121374985517c200734c42e97",
"format": 1
},
{
- "name": "tests/integration/targets/win_rg_resource_pool/aliases",
+ "name": "tests/integration/targets/setup_win_sqlserver/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/setup_win_sqlserver/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f514a090cace343c41b298fa180e6a993fc535cf9fd0498424e1db2f097261a4",
+ "chksum_sha256": "0717e3c911ac3a45ebb713a535b723d4ebc229a24910a45349c2e8f9ce88cae8",
"format": 1
},
{
- "name": "tests/integration/targets/win_resource_governor",
+ "name": "tests/integration/targets/setup_sqlserver_test_plugins",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/win_resource_governor/meta",
+ "name": "tests/integration/targets/setup_sqlserver_test_plugins/shell_plugins",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/win_resource_governor/meta/main.yml",
+ "name": "tests/integration/targets/setup_sqlserver_test_plugins/shell_plugins/pwsh.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "da6570816fbce650c248033a1d6442e212b4d61baa00d6fbc449ae2e9b00777a",
+ "chksum_sha256": "799d270e2028fccdc49b72338e0b2da17ed561e4f7e8deba52504ff6f4a12a56",
"format": 1
},
{
- "name": "tests/integration/targets/win_resource_governor/aliases",
+ "name": "tests/integration/targets/setup_sqlserver_test_plugins/README.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8f9e2da9d3feca4c1b60f565a53f4dc7e62f3a7eef4444e82b29d46d28c0bed3",
- "format": 1
- },
- {
- "name": "tests/integration/targets/setup_sqlserver_test_plugins",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "6ee78c7c3661d0afd27fd340bbad2f302cf790e96b56911fd5b67761eab941f7",
"format": 1
},
{
@@ -1555,13 +1562,6 @@
"format": 1
},
{
- "name": "tests/integration/targets/setup_sqlserver_test_plugins/README.md",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "6ee78c7c3661d0afd27fd340bbad2f302cf790e96b56911fd5b67761eab941f7",
- "format": 1
- },
- {
"name": "tests/integration/targets/setup_sqlserver_test_plugins/tasks",
"ftype": "dir",
"chksum_type": null,
@@ -1597,353 +1597,416 @@
"format": 1
},
{
- "name": "tests/integration/targets/setup_sqlserver_test_plugins/shell_plugins",
+ "name": "tests/integration/targets/setup_sqlserver_test_plugins/connection_plugins",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/setup_sqlserver_test_plugins/shell_plugins/pwsh.py",
+ "name": "tests/integration/targets/setup_sqlserver_test_plugins/connection_plugins/local_pwsh.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "799d270e2028fccdc49b72338e0b2da17ed561e4f7e8deba52504ff6f4a12a56",
+ "chksum_sha256": "d05e0a673b2b8e159c1ff114b18bd7176958446b079591cbdb05304c002bee4e",
"format": 1
},
{
- "name": "tests/integration/targets/setup_sqlserver_test_plugins/connection_plugins",
+ "name": "tests/integration/targets/install_script",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/setup_sqlserver_test_plugins/connection_plugins/local_pwsh.py",
+ "name": "tests/integration/targets/install_script/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d05e0a673b2b8e159c1ff114b18bd7176958446b079591cbdb05304c002bee4e",
+ "chksum_sha256": "79821e9e38c38952ccfdbe45b988082364c67d537d5753dfbb0a5f8716c48ab9",
"format": 1
},
{
- "name": "tests/integration/targets/memory",
+ "name": "tests/integration/targets/install_script/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/memory/tasks",
+ "name": "tests/integration/targets/install_script/tasks/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "85e9885a3bb1a9621c0ac183c3c831efe453bbb5eddbb4d6351a620164b33672",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/install_script/files",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/memory/tasks/main.yml",
+ "name": "tests/integration/targets/install_script/files/1-select-choice.sql",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "571c3ec667aa8c42235cf6a6d91e9ce5bcc8785653474f6a1d7bdd6166256705",
+ "chksum_sha256": "fde2acd9de812005948b5935c0c335889828e14ee58dbcc58a205096cdb2266f",
"format": 1
},
{
- "name": "tests/integration/targets/memory/meta",
+ "name": "tests/integration/targets/install_script/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/memory/meta/main.yml",
+ "name": "tests/integration/targets/install_script/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "a170d576026b40f2e8d93ced0dcaff0108bd61e8f9d62bc3bee3bb14dba13b83",
"format": 1
},
{
- "name": "tests/integration/targets/memory/aliases",
+ "name": "tests/integration/targets/win_rg_resource_pool",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/win_rg_resource_pool/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "79821e9e38c38952ccfdbe45b988082364c67d537d5753dfbb0a5f8716c48ab9",
+ "chksum_sha256": "f514a090cace343c41b298fa180e6a993fc535cf9fd0498424e1db2f097261a4",
"format": 1
},
{
- "name": "tests/integration/targets/login",
+ "name": "tests/integration/targets/win_rg_resource_pool/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/login/tasks",
+ "name": "tests/integration/targets/win_rg_resource_pool/meta/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "cfce66e3f2dc81d8b04d033c770f490560112a14e8de1c28c7b93e93757dfda8",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/win_hadr",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/login/tasks/main.yml",
+ "name": "tests/integration/targets/win_hadr/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1439a3129965bb135085f89336b4fc48a1894bfd4f38f4bf8e68f164997fbc19",
+ "chksum_sha256": "823c89dbff7ce470f0ab6e7e3c41e7fa02b7b38fd4a1d5bcda2ecbcb98a4dbd7",
"format": 1
},
{
- "name": "tests/integration/targets/login/meta",
+ "name": "tests/integration/targets/win_hadr/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/login/meta/main.yml",
+ "name": "tests/integration/targets/win_hadr/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a170d576026b40f2e8d93ced0dcaff0108bd61e8f9d62bc3bee3bb14dba13b83",
+ "chksum_sha256": "e5341d244291343a5efba167280a123556be0ce4480c8cab2282fc3e2c288609",
"format": 1
},
{
- "name": "tests/integration/targets/login/aliases",
+ "name": "tests/integration/targets/win_hadr/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/win_hadr/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "79821e9e38c38952ccfdbe45b988082364c67d537d5753dfbb0a5f8716c48ab9",
+ "chksum_sha256": "a170d576026b40f2e8d93ced0dcaff0108bd61e8f9d62bc3bee3bb14dba13b83",
"format": 1
},
{
- "name": "tests/integration/targets/win_ag_listener",
+ "name": "tests/integration/targets/agent_job_schedule",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/win_ag_listener/tasks",
+ "name": "tests/integration/targets/agent_job_schedule/aliases",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "79821e9e38c38952ccfdbe45b988082364c67d537d5753dfbb0a5f8716c48ab9",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/agent_job_schedule/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/win_ag_listener/tasks/main.yml",
+ "name": "tests/integration/targets/agent_job_schedule/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "23fed7e50228fa01b7b19994f71440ad28b0684f89fd08f7906062fe77602421",
+ "chksum_sha256": "2080e6f637f79d857797559778d18399d2d2c9adbc90992082851af7c76c5cd9",
"format": 1
},
{
- "name": "tests/integration/targets/win_ag_listener/meta",
+ "name": "tests/integration/targets/agent_job_schedule/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/win_ag_listener/meta/main.yml",
+ "name": "tests/integration/targets/agent_job_schedule/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "a170d576026b40f2e8d93ced0dcaff0108bd61e8f9d62bc3bee3bb14dba13b83",
"format": 1
},
{
- "name": "tests/integration/targets/win_ag_listener/aliases",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "823c89dbff7ce470f0ab6e7e3c41e7fa02b7b38fd4a1d5bcda2ecbcb98a4dbd7",
- "format": 1
- },
- {
- "name": "tests/integration/targets/first_responder_kit",
+ "name": "tests/integration/targets/setup_sqlserver",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/first_responder_kit/tasks",
+ "name": "tests/integration/targets/setup_sqlserver/vars",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/first_responder_kit/tasks/main.yml",
+ "name": "tests/integration/targets/setup_sqlserver/vars/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f5ff819f29604f94044eecbb7448dfe33b47718cff9cd972a3e149bb81516aad",
+ "chksum_sha256": "79635f192493c32b61ca004b05e8445d822906b84321725bb94fee4eae0c69cd",
"format": 1
},
{
- "name": "tests/integration/targets/first_responder_kit/meta",
+ "name": "tests/integration/targets/setup_sqlserver/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/first_responder_kit/meta/main.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "a170d576026b40f2e8d93ced0dcaff0108bd61e8f9d62bc3bee3bb14dba13b83",
- "format": 1
- },
- {
- "name": "tests/integration/targets/first_responder_kit/aliases",
+ "name": "tests/integration/targets/setup_sqlserver/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "79821e9e38c38952ccfdbe45b988082364c67d537d5753dfbb0a5f8716c48ab9",
+ "chksum_sha256": "bd3b6c1bcc76af05949e4b6195b6674a7b5ff725b8e11e89cc09b2fa7828d815",
"format": 1
},
{
- "name": "tests/integration/targets/win_traceflag",
+ "name": "tests/integration/targets/setup_sqlserver/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/win_traceflag/meta",
+ "name": "tests/integration/targets/setup_sqlserver/defaults/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "454653b07873ad71128411eba4df7f2b86352fd4b884e667e045cab2b83ff4aa",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/agent_job_category",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/win_traceflag/meta/main.yml",
+ "name": "tests/integration/targets/agent_job_category/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7ff30f930e0f6004866fed58759768dfdda5f2354cea27c69d3a9636e7dcef02",
+ "chksum_sha256": "79821e9e38c38952ccfdbe45b988082364c67d537d5753dfbb0a5f8716c48ab9",
"format": 1
},
{
- "name": "tests/integration/targets/win_traceflag/aliases",
+ "name": "tests/integration/targets/agent_job_category/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/agent_job_category/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b60d4d845847bbd2c2a976e26c6d39e4943a1466503642ee370b84b634b07ed5",
+ "chksum_sha256": "0544826a56fa9285258e41f18f989bad46a9a333c4508b6ff52991cb820ef6dd",
"format": 1
},
{
- "name": "tests/integration/targets/agent_job_schedule",
+ "name": "tests/integration/targets/agent_job_category/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/agent_job_schedule/tasks",
+ "name": "tests/integration/targets/agent_job_category/meta/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "a170d576026b40f2e8d93ced0dcaff0108bd61e8f9d62bc3bee3bb14dba13b83",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/win_rg_workload_group",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/agent_job_schedule/tasks/main.yml",
+ "name": "tests/integration/targets/win_rg_workload_group/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2080e6f637f79d857797559778d18399d2d2c9adbc90992082851af7c76c5cd9",
+ "chksum_sha256": "30128d716d50ee4404c8f9a1e5f921f3a830a18171470a5de586eac0799cfe5c",
"format": 1
},
{
- "name": "tests/integration/targets/agent_job_schedule/meta",
+ "name": "tests/integration/targets/win_rg_workload_group/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/agent_job_schedule/meta/main.yml",
+ "name": "tests/integration/targets/win_rg_workload_group/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a170d576026b40f2e8d93ced0dcaff0108bd61e8f9d62bc3bee3bb14dba13b83",
+ "chksum_sha256": "ace519beeabaa6fff00bf4230f11205f2eb88ef957a29ff4991e717ab3cd8ed2",
"format": 1
},
{
- "name": "tests/integration/targets/agent_job_schedule/aliases",
+ "name": "tests/integration/targets/rg_workload_group",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/rg_workload_group/aliases",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "79821e9e38c38952ccfdbe45b988082364c67d537d5753dfbb0a5f8716c48ab9",
"format": 1
},
{
- "name": "tests/integration/targets/win_database",
+ "name": "tests/integration/targets/rg_workload_group/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/win_database/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/rg_workload_group/tasks/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "20da00d72375b1ce6f9644d75d1c7d8066bd92e88c919abfd0e6813363a19b92",
"format": 1
},
{
- "name": "tests/integration/targets/win_database/meta/main.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "c90756dd3934d30739814f2f25f6bed9190409f7134a21b6547594842dedeade",
+ "name": "tests/integration/targets/rg_workload_group/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/win_database/aliases",
+ "name": "tests/integration/targets/rg_workload_group/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "291c6f9c2e41aa8c04d6b8feb47807d97cdf9f02b487eb63f4507fbac044fae1",
+ "chksum_sha256": "a170d576026b40f2e8d93ced0dcaff0108bd61e8f9d62bc3bee3bb14dba13b83",
"format": 1
},
{
- "name": "tests/integration/targets/setup_win_sqlserver",
+ "name": "tests/integration/targets/win_spn",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/setup_win_sqlserver/vars",
+ "name": "tests/integration/targets/win_spn/aliases",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "b522894638086953bde369a5f6c194a90c32338fefab24021b399659246ee20b",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/win_spn/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/setup_win_sqlserver/vars/main.yml",
+ "name": "tests/integration/targets/win_spn/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f773fabf54dffb1da94c9a66e897675dd8831d87d1ff525edeaa581ac0963c7e",
+ "chksum_sha256": "25fe644417d284f14218215aed1e5e3ae8f920c32cdba3b349e8574f10c41f7c",
"format": 1
},
{
- "name": "tests/integration/targets/setup_win_sqlserver/tasks",
+ "name": "tests/integration/targets/win_spn/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/setup_win_sqlserver/tasks/main.yml",
+ "name": "tests/integration/targets/win_spn/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "456a9ff2086b679cc9bca6d71da070006c67a8e121374985517c200734c42e97",
+ "chksum_sha256": "a170d576026b40f2e8d93ced0dcaff0108bd61e8f9d62bc3bee3bb14dba13b83",
"format": 1
},
{
- "name": "tests/integration/targets/setup_win_sqlserver/meta",
+ "name": "tests/integration/targets/win_agent_job_category",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/setup_win_sqlserver/meta/main.yml",
+ "name": "tests/integration/targets/win_agent_job_category/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0717e3c911ac3a45ebb713a535b723d4ebc229a24910a45349c2e8f9ce88cae8",
+ "chksum_sha256": "b4a1e70429ec84b35ba19e1696b30a39c4baeed6712c12ad93fd7ff62c42649a",
"format": 1
},
{
- "name": "tests/integration/targets/setup_win_sqlserver/aliases",
+ "name": "tests/integration/targets/win_agent_job_category/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/win_agent_job_category/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "105c5f0785da8d117d54b796fedefa4ddbd7ec65b52058539abe49f327d06db8",
+ "chksum_sha256": "637bd5e4ab78efba6ef44acb927e9d2188743d9d62d8ef4577ba8ca596dbd7f6",
"format": 1
},
{
@@ -1954,6 +2017,13 @@
"format": 1
},
{
+ "name": "tests/integration/targets/backup/aliases",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "79821e9e38c38952ccfdbe45b988082364c67d537d5753dfbb0a5f8716c48ab9",
+ "format": 1
+ },
+ {
"name": "tests/integration/targets/backup/tasks",
"ftype": "dir",
"chksum_type": null,
@@ -1982,934 +2052,864 @@
"format": 1
},
{
- "name": "tests/integration/targets/backup/aliases",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "79821e9e38c38952ccfdbe45b988082364c67d537d5753dfbb0a5f8716c48ab9",
- "format": 1
- },
- {
- "name": "tests/integration/targets/win_agent_job",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
- "format": 1
- },
- {
- "name": "tests/integration/targets/win_agent_job/meta",
+ "name": "tests/integration/targets/maintenance_solution",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/win_agent_job/meta/main.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "92d7d87955e27c2d9511ab6e1d62ecf60b9d0ed81895b1a5692c3cc620a29780",
- "format": 1
- },
- {
- "name": "tests/integration/targets/win_agent_job/aliases",
+ "name": "tests/integration/targets/maintenance_solution/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "80886616ffe77f72ad8654dd26e3c88268215eceda9fd2fcee6eb98288aadae1",
+ "chksum_sha256": "79821e9e38c38952ccfdbe45b988082364c67d537d5753dfbb0a5f8716c48ab9",
"format": 1
},
{
- "name": "tests/integration/targets/win_agent_job_schedule",
+ "name": "tests/integration/targets/maintenance_solution/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/win_agent_job_schedule/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/maintenance_solution/tasks/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d6841548d89da6e201186a5bd9ba2f370aff1ed3818fc9d5609a26a7405bd088",
"format": 1
},
{
- "name": "tests/integration/targets/win_agent_job_schedule/meta/main.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "471a1229e4fd010cbc5f53e8f58a3816415bb9f7f3ff1b6a50d697c54ea52eeb",
+ "name": "tests/integration/targets/maintenance_solution/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/win_agent_job_schedule/aliases",
+ "name": "tests/integration/targets/maintenance_solution/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a5361cb2dc2de454e750c31de04aed891a17b82ae1d09ed36de2ce628e4f3542",
+ "chksum_sha256": "a170d576026b40f2e8d93ced0dcaff0108bd61e8f9d62bc3bee3bb14dba13b83",
"format": 1
},
{
- "name": "tests/integration/targets/win_install_script",
+ "name": "tests/integration/targets/agent_job_step",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/win_install_script/tasks",
+ "name": "tests/integration/targets/agent_job_step/aliases",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "79821e9e38c38952ccfdbe45b988082364c67d537d5753dfbb0a5f8716c48ab9",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/agent_job_step/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/win_install_script/tasks/main.yml",
+ "name": "tests/integration/targets/agent_job_step/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f6dc29e973e9e7c8c7396cda456a513614d795455331165968588cbfc622726e",
+ "chksum_sha256": "6adff290d0afbfbf88a488083531374c89338499ae35daea7d73599e83f88e16",
"format": 1
},
{
- "name": "tests/integration/targets/win_install_script/meta",
+ "name": "tests/integration/targets/agent_job_step/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/win_install_script/meta/main.yml",
+ "name": "tests/integration/targets/agent_job_step/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "a170d576026b40f2e8d93ced0dcaff0108bd61e8f9d62bc3bee3bb14dba13b83",
"format": 1
},
{
- "name": "tests/integration/targets/win_install_script/aliases",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "823c89dbff7ce470f0ab6e7e3c41e7fa02b7b38fd4a1d5bcda2ecbcb98a4dbd7",
- "format": 1
- },
- {
- "name": "tests/integration/targets/win_install_script/files",
+ "name": "tests/integration/targets/win_instance_info",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/win_install_script/files/1-select-choice.sql",
+ "name": "tests/integration/targets/win_instance_info/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fde2acd9de812005948b5935c0c335889828e14ee58dbcc58a205096cdb2266f",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nonquery",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "96874064bd5afc3782b688417ee53a256f2aeee2b3f7be9db26ef14760752d33",
"format": 1
},
{
- "name": "tests/integration/targets/nonquery/tasks",
+ "name": "tests/integration/targets/win_instance_info/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nonquery/tasks/main.yml",
+ "name": "tests/integration/targets/win_instance_info/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "851bf7899fb14d2c976f6d2b1df869b108ab5f4440e2c57af920e06f7c59f07f",
+ "chksum_sha256": "ce5de48e8e8a42b74d629f088dc4e10650fe45b811ef394b852884e11ac3d20b",
"format": 1
},
{
- "name": "tests/integration/targets/nonquery/meta",
+ "name": "tests/integration/targets/resource_governor",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/nonquery/meta/main.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "a170d576026b40f2e8d93ced0dcaff0108bd61e8f9d62bc3bee3bb14dba13b83",
- "format": 1
- },
- {
- "name": "tests/integration/targets/nonquery/aliases",
+ "name": "tests/integration/targets/resource_governor/aliases",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "79821e9e38c38952ccfdbe45b988082364c67d537d5753dfbb0a5f8716c48ab9",
"format": 1
},
{
- "name": "tests/integration/targets/setup_sqlserver",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
- "format": 1
- },
- {
- "name": "tests/integration/targets/setup_sqlserver/vars",
+ "name": "tests/integration/targets/resource_governor/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/setup_sqlserver/vars/main.yml",
+ "name": "tests/integration/targets/resource_governor/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "79635f192493c32b61ca004b05e8445d822906b84321725bb94fee4eae0c69cd",
+ "chksum_sha256": "b0e5d378ac5c85985d61002fb7ac7f22f8f37326bb10d5fd09c3553beea37731",
"format": 1
},
{
- "name": "tests/integration/targets/setup_sqlserver/defaults",
+ "name": "tests/integration/targets/resource_governor/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/setup_sqlserver/defaults/main.yml",
+ "name": "tests/integration/targets/resource_governor/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "454653b07873ad71128411eba4df7f2b86352fd4b884e667e045cab2b83ff4aa",
+ "chksum_sha256": "a170d576026b40f2e8d93ced0dcaff0108bd61e8f9d62bc3bee3bb14dba13b83",
"format": 1
},
{
- "name": "tests/integration/targets/setup_sqlserver/tasks",
+ "name": "tests/integration/targets/memory",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/setup_sqlserver/tasks/main.yml",
+ "name": "tests/integration/targets/memory/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bd3b6c1bcc76af05949e4b6195b6674a7b5ff725b8e11e89cc09b2fa7828d815",
- "format": 1
- },
- {
- "name": "tests/integration/targets/agent_job_step",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "79821e9e38c38952ccfdbe45b988082364c67d537d5753dfbb0a5f8716c48ab9",
"format": 1
},
{
- "name": "tests/integration/targets/agent_job_step/tasks",
+ "name": "tests/integration/targets/memory/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/agent_job_step/tasks/main.yml",
+ "name": "tests/integration/targets/memory/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6adff290d0afbfbf88a488083531374c89338499ae35daea7d73599e83f88e16",
+ "chksum_sha256": "571c3ec667aa8c42235cf6a6d91e9ce5bcc8785653474f6a1d7bdd6166256705",
"format": 1
},
{
- "name": "tests/integration/targets/agent_job_step/meta",
+ "name": "tests/integration/targets/memory/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/agent_job_step/meta/main.yml",
+ "name": "tests/integration/targets/memory/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "a170d576026b40f2e8d93ced0dcaff0108bd61e8f9d62bc3bee3bb14dba13b83",
"format": 1
},
{
- "name": "tests/integration/targets/agent_job_step/aliases",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "79821e9e38c38952ccfdbe45b988082364c67d537d5753dfbb0a5f8716c48ab9",
- "format": 1
- },
- {
- "name": "tests/integration/.ansible-lint",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "982c0b7fe8683e818a27cf59b6a6892d91c277703c81e2db35994f63ef90e7fc",
+ "name": "tests/integration/targets/win_agent_job",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "README.md",
+ "name": "tests/integration/targets/win_agent_job/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0b860fe11d857950cb932384d69d836b52fa4aab697af697b2f74ccfe792160e",
+ "chksum_sha256": "80886616ffe77f72ad8654dd26e3c88268215eceda9fd2fcee6eb98288aadae1",
"format": 1
},
{
- "name": "docs",
+ "name": "tests/integration/targets/win_agent_job/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "docs/.gitkeep",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "01ba4719c80b6fe911b091a7c05124b64eeece964e09c058ef8f9805daca546b",
- "format": 1
- },
- {
- "name": "ADOPTERS.md",
+ "name": "tests/integration/targets/win_agent_job/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fe2c96f3cddb707c41ef291696c57c8766303500e26218f725657bd39cedd601",
+ "chksum_sha256": "92d7d87955e27c2d9511ab6e1d62ecf60b9d0ed81895b1a5692c3cc620a29780",
"format": 1
},
{
- "name": "changelogs",
+ "name": "tests/integration/targets/win_memory",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "changelogs/changelog.yaml",
+ "name": "tests/integration/targets/win_memory/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6397d8c6b1b5bf3d2ff546e6aaebfef0bfd3165fe29514018937c098c6b4d890",
+ "chksum_sha256": "29e8c25ab424e16e434d902ced6f408da8a4f1f9363126caa03092f62bcb43aa",
"format": 1
},
{
- "name": "changelogs/fragments",
+ "name": "tests/integration/targets/win_memory/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "changelogs/fragments/.keep",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
- "format": 1
- },
- {
- "name": "changelogs/config.yaml",
+ "name": "tests/integration/targets/win_memory/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1e054fb68c58140c282fbaf0150eee1bf80ad925196dddd74e848e91b7315a1e",
+ "chksum_sha256": "c32e78042afc05d5738763ed9a9281f6e6a5e03231c2124087400143409ec6d6",
"format": 1
},
{
- "name": "plugins",
+ "name": "tests/integration/targets/dba_multitool",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils",
+ "name": "tests/integration/targets/dba_multitool/aliases",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "79821e9e38c38952ccfdbe45b988082364c67d537d5753dfbb0a5f8716c48ab9",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/dba_multitool/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/_SqlServerUtils.psm1",
+ "name": "tests/integration/targets/dba_multitool/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4fdfc528ea4a54118f616738c9630066e47e0d010c2b60cbecd851a5b5547c3f",
+ "chksum_sha256": "9c2dc342256920c670d472b94856269834490da88bd511ce501d7158dd080ecd",
"format": 1
},
{
- "name": "plugins/modules",
+ "name": "tests/integration/targets/dba_multitool/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/dba_multitool.ps1",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "5e13bd1112019e1852969d1b39b8579abea06a8d4a7742feb4664c637865b9f5",
- "format": 1
- },
- {
- "name": "plugins/modules/user.ps1",
+ "name": "tests/integration/targets/dba_multitool/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f3c923f60de426d3b12e67709d0edcec86a396163a9911af82ad937c5561a7bc",
+ "chksum_sha256": "a170d576026b40f2e8d93ced0dcaff0108bd61e8f9d62bc3bee3bb14dba13b83",
"format": 1
},
{
- "name": "plugins/modules/first_responder_kit.ps1",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "1e478006b282e7dfc5ebbdf5045f6a81b632cbe2c70819aca11de40dc4de3364",
+ "name": "tests/integration/targets/login",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/sp_configure.ps1",
+ "name": "tests/integration/targets/login/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9c7d49cb933f7d1128be15e359e2ac4f7d8fd48facb741d7c5866ac4028b16c5",
+ "chksum_sha256": "79821e9e38c38952ccfdbe45b988082364c67d537d5753dfbb0a5f8716c48ab9",
"format": 1
},
{
- "name": "plugins/modules/backup.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "ddffc896a151a1c0a9d1798d1f7796d49aae2886f5aeb9c07ca72378d257a298",
+ "name": "tests/integration/targets/login/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/agent_job_schedule.ps1",
+ "name": "tests/integration/targets/login/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "195f583bfb2e1b068f3022bd0a152fde686581266154973f43f150d10adbe247",
+ "chksum_sha256": "1439a3129965bb135085f89336b4fc48a1894bfd4f38f4bf8e68f164997fbc19",
"format": 1
},
{
- "name": "plugins/modules/ag_replica.ps1",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "5868def510947485eea9e2f495ed80cfb407db89c6d9f42c829a45eeb00a9ad1",
+ "name": "tests/integration/targets/login/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/install_script.ps1",
+ "name": "tests/integration/targets/login/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0e23cd11873aa61dcfbe61ab841e54e31d07d97bc8c5b44c7623c097c90b5621",
+ "chksum_sha256": "a170d576026b40f2e8d93ced0dcaff0108bd61e8f9d62bc3bee3bb14dba13b83",
"format": 1
},
{
- "name": "plugins/modules/hadr.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "b2017667c8e572a1582cb076b3d543f4d7eba1ed6d66a7c0249d48b4f793868a",
+ "name": "tests/integration/targets/win_database",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/resource_governor.ps1",
+ "name": "tests/integration/targets/win_database/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a30e5d126462c9fc8b882307851d623267308c7c6fb4eaac51ce51cff6fa415d",
+ "chksum_sha256": "291c6f9c2e41aa8c04d6b8feb47807d97cdf9f02b487eb63f4507fbac044fae1",
"format": 1
},
{
- "name": "plugins/modules/credential.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "16a49e24a042e89e609a22adc1316559025f60bf24fb456c0acda09e611b6ec5",
+ "name": "tests/integration/targets/win_database/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/traceflag.ps1",
+ "name": "tests/integration/targets/win_database/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "278f6beedbef5c73eaf3b98d04dc1adda8a0921aa48a138f339c8d25747cedb9",
+ "chksum_sha256": "c90756dd3934d30739814f2f25f6bed9190409f7134a21b6547594842dedeade",
"format": 1
},
{
- "name": "plugins/modules/availability_group.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "8b3b47b92e9a558181210ab01de6d6637d3b39d61d647630926ac424a08303ef",
+ "name": "tests/integration/targets/win_restore",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/nonquery.py",
+ "name": "tests/integration/targets/win_restore/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "28566664f6a3f97f72301bc34fa22af1421ead1ca572d651f241e960acc2be29",
+ "chksum_sha256": "b522894638086953bde369a5f6c194a90c32338fefab24021b399659246ee20b",
"format": 1
},
{
- "name": "plugins/modules/memory.ps1",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "de05b5612d9e692126afe9a25841d3dcbcfef1079cb45585d49564cd3bac2fc6",
+ "name": "tests/integration/targets/win_restore/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/spn.ps1",
+ "name": "tests/integration/targets/win_restore/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f3eb83933723cbc2f88238a60cdb539b97576e9feb5ca0543da2b561e2e60b83",
+ "chksum_sha256": "25cec2155a81c7e38f39a8124b49ae2daf98d279ca19a1d2fe4e2dc16d406c9c",
"format": 1
},
{
- "name": "plugins/modules/dba_multitool.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "0236dc7ccb91ecf1e0fdb832a40a8f82ed90ad888d7e02f41b7a2848ddc48bef",
+ "name": "tests/integration/targets/win_restore/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/sa.py",
+ "name": "tests/integration/targets/win_restore/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "984966ae91f444089b89a510f47de10b80d3417bd2def60af3bc91a5ba5979d1",
+ "chksum_sha256": "a170d576026b40f2e8d93ced0dcaff0108bd61e8f9d62bc3bee3bb14dba13b83",
"format": 1
},
{
- "name": "plugins/modules/user.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "99b2b1074ac11298c64b987414934b55e1e95a482bcc3185285662482dbf55b7",
+ "name": "tests/integration/targets/win_sp_configure",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/maintenance_solution.ps1",
+ "name": "tests/integration/targets/win_sp_configure/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b2b43694b89c22938156682c8c6f1b3a30c3a0b67f7f50e37efcd17e1eb1cde5",
+ "chksum_sha256": "f9f102cc9b61991c72e62216c363357b58ba6ef812f7838b194d76b9a37ee6dd",
"format": 1
},
{
- "name": "plugins/modules/nonquery.ps1",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "e098278136eec42260b9354b6dca6816f4968c546f12e259cd2e62d2f4a72e82",
+ "name": "tests/integration/targets/win_sp_configure/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/agent_job.ps1",
+ "name": "tests/integration/targets/win_sp_configure/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "19ab20059fbd77950f856cb7be75049136673d2c793f91900be18b194716b97a",
+ "chksum_sha256": "4c58bedc5bf4354f6a8e46bd6e292a388f347a5babcc36a61cebd68081a3d1c7",
"format": 1
},
{
- "name": "plugins/modules/traceflag.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "d08e57033e0da3f63f1c024446f02abdff41400dd0607a400f234468e05260ff",
+ "name": "tests/integration/targets/win_install_script",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/ag_listener.ps1",
+ "name": "tests/integration/targets/win_install_script/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5fb3935cf9a93b88546cbc762ce8825df079013d679af682626a9a8de5d003e4",
+ "chksum_sha256": "823c89dbff7ce470f0ab6e7e3c41e7fa02b7b38fd4a1d5bcda2ecbcb98a4dbd7",
"format": 1
},
{
- "name": "plugins/modules/database.ps1",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "eaedf444462b956cba62cf1742f07353950ccab565377191fcde24241cdb5690",
+ "name": "tests/integration/targets/win_install_script/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/sp_whoisactive.py",
+ "name": "tests/integration/targets/win_install_script/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "26025dd39dc4bea36f72bcb0dd4a279245adde8e6be4792cc40ef90d02ade8e7",
+ "chksum_sha256": "f6dc29e973e9e7c8c7396cda456a513614d795455331165968588cbfc622726e",
"format": 1
},
{
- "name": "plugins/modules/hadr.ps1",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "ae6aa285169947e898e685a1125017bcc755fcbb578dd5f1654da4631576ad5b",
+ "name": "tests/integration/targets/win_install_script/files",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/restore.py",
+ "name": "tests/integration/targets/win_install_script/files/1-select-choice.sql",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0f357dea6f735aae072bfee1d35335140a2a4cf3ac617ee68347cde8f7991bad",
+ "chksum_sha256": "fde2acd9de812005948b5935c0c335889828e14ee58dbcc58a205096cdb2266f",
"format": 1
},
{
- "name": "plugins/modules/spn.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "57a8e8f95263b83394aaf4342759ffccbfa20315cf8d208aecee5b54f46d7a72",
+ "name": "tests/integration/targets/win_install_script/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/tcp_port.py",
+ "name": "tests/integration/targets/win_install_script/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fcb5a1326cf73886733d6772f7623bd7e792563155c8d72a0d59312998215d91",
+ "chksum_sha256": "a170d576026b40f2e8d93ced0dcaff0108bd61e8f9d62bc3bee3bb14dba13b83",
"format": 1
},
{
- "name": "plugins/modules/agent_job_step.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "56da01e0ba38203752b396a705ef7b385109c803de9787456ab9446353ef88db",
+ "name": "tests/integration/targets/database",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/ag_replica.py",
+ "name": "tests/integration/targets/database/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3d7f122d89145949eaeabf78c2e4747ba9158611d9dd28efe19a6c23b61a18cf",
+ "chksum_sha256": "79821e9e38c38952ccfdbe45b988082364c67d537d5753dfbb0a5f8716c48ab9",
"format": 1
},
{
- "name": "plugins/modules/ag_listener.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "c91b831040e354aff0581367dcd1778b898ca424a92abf35786e9835e96c1018",
+ "name": "tests/integration/targets/database/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/sa.ps1",
+ "name": "tests/integration/targets/database/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "950e4b60caa84225b0153cf08fe8b87ca97a27f155fc1d4a1d95c5d677ea0622",
+ "chksum_sha256": "096c592c99e22c947444c3cefe3007bdc7b6370b7c814b0c6cc0e8bd1b6f77c5",
"format": 1
},
{
- "name": "plugins/modules/instance_info.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "c6d60a465bcc5ab23e0ab7688479f2c54ca80c4d15f9a7a6adc0211f8aabd4fd",
+ "name": "tests/integration/targets/database/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/instance_info.ps1",
+ "name": "tests/integration/targets/database/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "afe3b3ee537cfe249f987e961eb1860ad5c01f617caf756a2d6c204357e25d11",
+ "chksum_sha256": "a170d576026b40f2e8d93ced0dcaff0108bd61e8f9d62bc3bee3bb14dba13b83",
"format": 1
},
{
- "name": "plugins/modules/sp_configure.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "2715043d68e455a59bd7ff7028c769c23d662ae04b937128aec461a39c544bf5",
+ "name": "tests/integration/targets/credential",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/rg_resource_pool.ps1",
+ "name": "tests/integration/targets/credential/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8bda00accf8650882d9527b1537db36a68d9eafa1f2345f844d7ac59ac1d003c",
+ "chksum_sha256": "79821e9e38c38952ccfdbe45b988082364c67d537d5753dfbb0a5f8716c48ab9",
"format": 1
},
{
- "name": "plugins/modules/rg_resource_pool.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "a18afaa50ed7d7183a0a9458b8b4a62130d92670f5d68197c8272c498eef876e",
+ "name": "tests/integration/targets/credential/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/sp_whoisactive.ps1",
+ "name": "tests/integration/targets/credential/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6d8e9698a0715ede939c5ec6b1bb786c99f63221d3b2c2f91f8b6fafd488a9a5",
+ "chksum_sha256": "502c966d269ba16b8f50711aa3413b89361ca26241b95ea42bfadbb543078190",
"format": 1
},
{
- "name": "plugins/modules/agent_job_step.ps1",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "3ab30f960d4e24bcb1a2dcb22d5a57183eba5c22811f6bc2d7222791103660b1",
+ "name": "tests/integration/targets/credential/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/maintenance_solution.py",
+ "name": "tests/integration/targets/credential/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "126a8b7ea648ab2bc6e36db65a9b5ca85ca1c2224ad21445c176ea700b5a00ad",
+ "chksum_sha256": "a170d576026b40f2e8d93ced0dcaff0108bd61e8f9d62bc3bee3bb14dba13b83",
"format": 1
},
{
- "name": "plugins/modules/availability_group.ps1",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "46d7f93e5951ea8096b4632ffc8b2bed5391dcb28a7b1c2ed66d9dfc5eb3ec3a",
+ "name": "tests/integration/targets/sp_configure",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/login.py",
+ "name": "tests/integration/targets/sp_configure/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8810d99eb07f72104830e9d0741fa93c915e0ab238f932dbbea90882030330b7",
+ "chksum_sha256": "79821e9e38c38952ccfdbe45b988082364c67d537d5753dfbb0a5f8716c48ab9",
"format": 1
},
{
- "name": "plugins/modules/agent_job.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "9476d4b38459e10be25b64c96bf7b2b9bb4f49045d15cd36f959737eec194d34",
+ "name": "tests/integration/targets/sp_configure/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/database.py",
+ "name": "tests/integration/targets/sp_configure/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "943ace13f44fc4e582ed5bf7c6064b4c90680c2580169782d3c7c61845816b8a",
+ "chksum_sha256": "c17643d6398d807c079ed122d9afc3f7d121e4720ac460693e3a6dc1bf9eb355",
"format": 1
},
{
- "name": "plugins/modules/agent_job_schedule.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "ec152ec9b41bdeabfb3289c73b4c19955b7b1d849420e93513bd10640e2824c4",
+ "name": "tests/integration/targets/sp_configure/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/backup.ps1",
+ "name": "tests/integration/targets/sp_configure/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c31187c932814a34e5f46576e9ec68237969390de4377e3baa35598940f2fe36",
+ "chksum_sha256": "a170d576026b40f2e8d93ced0dcaff0108bd61e8f9d62bc3bee3bb14dba13b83",
"format": 1
},
{
- "name": "plugins/modules/install_script.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "6932f5d090a894e3bef5e29f612bd1545aae360d91f8ffc85c7352a5da1548db",
+ "name": "tests/integration/targets/agent_job",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/resource_governor.py",
+ "name": "tests/integration/targets/agent_job/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "27d1f38306c23bfea82dc745425cf6cef0099156054bee4a497d0687866ade97",
+ "chksum_sha256": "79821e9e38c38952ccfdbe45b988082364c67d537d5753dfbb0a5f8716c48ab9",
"format": 1
},
{
- "name": "plugins/modules/memory.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "1c3e840a66bb01f5b809b7c9347594cfed73286cb589ca710eb3802f990fe9c8",
+ "name": "tests/integration/targets/agent_job/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/rg_workload_group.ps1",
+ "name": "tests/integration/targets/agent_job/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9faacfc40d32dbcc2eae198f15784613d7b20721045f9bc551be139019fe2027",
+ "chksum_sha256": "31c7e5478ab71d9b16cad90042519bd6cef26068f73dab84bf1b12fe6c6b1dcf",
"format": 1
},
{
- "name": "plugins/modules/agent_job_category.ps1",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "22167936fa56c583e687b1feaa79d42d3d6fe99832c3fba0f818e8e212820f45",
+ "name": "tests/integration/targets/agent_job/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/login.ps1",
+ "name": "tests/integration/targets/agent_job/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "196042ab05e84d23b632c7a2d3ca39b6eb4e96f366e3e1dff8119441f4f7b7c2",
+ "chksum_sha256": "a170d576026b40f2e8d93ced0dcaff0108bd61e8f9d62bc3bee3bb14dba13b83",
"format": 1
},
{
- "name": "plugins/modules/credential.ps1",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "364bcb3116174488c1a1c51ada8eac915e9893434dd85577ea49d49c781a8e83",
+ "name": "tests/integration/targets/sp_whoisactive",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/restore.ps1",
+ "name": "tests/integration/targets/sp_whoisactive/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c5544cbbca85e97df2d855e33c64ef045d49a441410a6cc3b0a9328ab991485d",
+ "chksum_sha256": "79821e9e38c38952ccfdbe45b988082364c67d537d5753dfbb0a5f8716c48ab9",
"format": 1
},
{
- "name": "plugins/modules/agent_job_category.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "eda9101228bdf9db6dfce794c3d7cb51b2913f43cbfd6d2045179a74dad4f52e",
+ "name": "tests/integration/targets/sp_whoisactive/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/tcp_port.ps1",
+ "name": "tests/integration/targets/sp_whoisactive/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "82354c3a07f163d733ba25a8e43c185ae890681e1104df0224fdbfa68da26c8f",
+ "chksum_sha256": "6ea486055f1c09c041e9b353582799bb37ed536cb8e7e6bf89920a1d79cc361f",
"format": 1
},
{
- "name": "plugins/modules/rg_workload_group.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "90cae13aa924431ceddce76eb94f49576b4e8396227a141a2d81c9fbf5f9dd34",
+ "name": "tests/integration/targets/sp_whoisactive/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/first_responder_kit.py",
+ "name": "tests/integration/targets/sp_whoisactive/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bf16bc535cfa590e884e99e3c406c602aca4c1322b9a9c86679d1980f18e5e1a",
+ "chksum_sha256": "a170d576026b40f2e8d93ced0dcaff0108bd61e8f9d62bc3bee3bb14dba13b83",
"format": 1
},
{
- "name": "plugins/doc_fragments",
+ "name": "tests/integration/targets/win_tcp_port",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/doc_fragments/attributes.py",
+ "name": "tests/integration/targets/win_tcp_port/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cdffff9cfa27f07533b806e63d11b9a752144eb5f2d7d8c8837e7a4efa4da4c7",
+ "chksum_sha256": "823c89dbff7ce470f0ab6e7e3c41e7fa02b7b38fd4a1d5bcda2ecbcb98a4dbd7",
"format": 1
},
{
- "name": "plugins/doc_fragments/state.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "ae0b5a340638525f1d65822b592ccba03c72dd937e8f1f60f41539f80a1af2f7",
+ "name": "tests/integration/targets/win_tcp_port/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/doc_fragments/sql_credentials.py",
+ "name": "tests/integration/targets/win_tcp_port/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8319c0f26d235fb9dd33444fe7cfe963324207e64f77423098fb0a6c071be791",
+ "chksum_sha256": "d692ce8436946b398e6cb3ff85f4c0e6a500468fb623d144566b25db71a60af5",
"format": 1
},
{
- "name": ".github",
+ "name": "tests/integration/targets/win_tcp_port/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": ".github/ISSUE_TEMPLATE",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/win_tcp_port/meta/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "a170d576026b40f2e8d93ced0dcaff0108bd61e8f9d62bc3bee3bb14dba13b83",
"format": 1
},
{
- "name": ".github/ISSUE_TEMPLATE/bug_report.md",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "8d3db528d29f10782cc5b08a68999c7144efbeaa6724817d6c72a69f4c745a31",
+ "name": "tests/integration/targets/win_sp_whoisactive",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": ".github/ISSUE_TEMPLATE/feature_request.md",
+ "name": "tests/integration/targets/win_sp_whoisactive/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cd5b39aee355194f532b686c62a783c0657d8f7810586a757775769b291461a2",
+ "chksum_sha256": "0e25bc398118036b0ab0185bdf43aeb2f586af8d25ba057222e7794aab2cf3b2",
"format": 1
},
{
- "name": ".github/workflows",
+ "name": "tests/integration/targets/win_sp_whoisactive/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": ".github/workflows/ansible-test-windows.yml",
+ "name": "tests/integration/targets/win_sp_whoisactive/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9f1e8d195386bbda824f1b666b84c1ecfe75c3f7e83ae325a715b5189e733e3e",
+ "chksum_sha256": "6d70bd8678381b8aab8b6ce600856a34b6c6f3307744f0340bc8ebaa3f846fb4",
"format": 1
},
{
- "name": ".github/workflows/docs-pr.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "84a88873a148150b9b42877fa1c42697c05f631e39e618160bd99bf0f3652abd",
+ "name": "tests/integration/targets/traceflag",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": ".github/workflows/release.yml",
+ "name": "tests/integration/targets/traceflag/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c7da1e4ae99e0dee4bebf9b7b709a90bfda092ff8c3a421f21da5f39d1251d36",
+ "chksum_sha256": "79821e9e38c38952ccfdbe45b988082364c67d537d5753dfbb0a5f8716c48ab9",
"format": 1
},
{
- "name": ".github/workflows/ansible-test.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "104a40d59c18f8ca86b4cc4fe122e801b95c634bf3a0af9087734154893b63e3",
+ "name": "tests/integration/targets/traceflag/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": ".github/workflows/docs-push.yml",
+ "name": "tests/integration/targets/traceflag/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9529da03eea03a258ab7ec8613310e62ad06fdbadd293d575070797d716443e9",
+ "chksum_sha256": "50cc511e187bab0f47387422459e654018bb6d5b8c408c49fe074cc92466560d",
"format": 1
},
{
- "name": ".github/PULL_REQUEST_TEMPLATE.md",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "4ea1170a9065ecc1b0effb0e52f06fe42ef9936e86dfddb21c43efdd1c929b7b",
+ "name": "tests/integration/targets/traceflag/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": ".github/codecov.yml",
+ "name": "tests/integration/targets/traceflag/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a42ee6fd570c21e6f7828ebb71585d819fd3fa126b3d98c39ea79565ce1a0ed6",
+ "chksum_sha256": "a170d576026b40f2e8d93ced0dcaff0108bd61e8f9d62bc3bee3bb14dba13b83",
"format": 1
},
{
- "name": ".github/FUNDING.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "1a09efd96d758af083de6b9468b6508672806abb882fd9fc16a1a23a67ab3acc",
+ "name": "tests/integration/targets/win_nonquery",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": ".github/dependabot.yml",
+ "name": "tests/integration/targets/win_nonquery/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a35bab1dd3e838ca5730aae35e4b127a052f8e03d5ed5437549e95f8424e621c",
+ "chksum_sha256": "eb1fa7982c88abde5d789d5b541bc8016f99025f732bef1cca7ba4ca457b6f7b",
"format": 1
},
{
- "name": ".github/in-solidarity.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "43f54d1f6c1496d0a02ce3144f73d479f2e37ea3d2bb9e54ba08763ddfd230a2",
+ "name": "tests/integration/targets/win_nonquery/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "CODE_OF_CONDUCT.md",
+ "name": "tests/integration/targets/win_nonquery/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "14ec928234a7ed52bf8b458d31e4862335111e477e4dbe7fb543686c24115140",
+ "chksum_sha256": "0dae571f5e423080bf9bdaac127360812ea64380e435c29f03142c56aadb4484",
"format": 1
},
{
- "name": "meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/inventory.winrm.ci",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d1a9b5a526214d256d89f06ac3180e7d73b63b7cd199a536888405e21f8768de",
"format": 1
},
{
- "name": "meta/runtime.yml",
+ "name": "tests/integration/inventory",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "94d6b49778d101cca4b6cdfc026e86ba673af5cb029165c3dd170f3123b62286",
+ "chksum_sha256": "0a28c1f749c45ff439aeeb826635429412f65c284d7d52ca7ff51b9dde495551",
"format": 1
},
{
- "name": "MAINTAINERS",
+ "name": "tests/integration/integration_config.sample.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "29b1b04e645e0c11e59c48cfe7bac3dbe16f787a9d6114847d9a5c3b5990e731",
+ "chksum_sha256": "25e1d1343d93da99f01452b27af5f1b290b938b228ff934b0e84e6acd36916ec",
"format": 1
}
],
diff --git a/ansible_collections/lowlydba/sqlserver/MANIFEST.json b/ansible_collections/lowlydba/sqlserver/MANIFEST.json
index e4601206f..e22249075 100644
--- a/ansible_collections/lowlydba/sqlserver/MANIFEST.json
+++ b/ansible_collections/lowlydba/sqlserver/MANIFEST.json
@@ -2,7 +2,7 @@
"collection_info": {
"namespace": "lowlydba",
"name": "sqlserver",
- "version": "2.3.2",
+ "version": "2.3.3",
"authors": [
"John McCall (github.com/lowlydba)"
],
@@ -25,7 +25,7 @@
"name": "FILES.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c715ba466fb55aea6acf9408f6c7f1286005dda30e44d773afa17e8a32e5ab03",
+ "chksum_sha256": "c498d858d1e34d6e7e3aeb9c5348092b09a08a5c088316e8ca1c72b1d991b6eb",
"format": 1
},
"format": 1
diff --git a/ansible_collections/lowlydba/sqlserver/changelogs/changelog.yaml b/ansible_collections/lowlydba/sqlserver/changelogs/changelog.yaml
index 57c71a1e5..a8a45ded6 100644
--- a/ansible_collections/lowlydba/sqlserver/changelogs/changelog.yaml
+++ b/ansible_collections/lowlydba/sqlserver/changelogs/changelog.yaml
@@ -494,3 +494,14 @@ releases:
- 2-3-2-release-summary.yml
- 229-docs-update.yml
release_date: '2024-03-09'
+ 2.3.3:
+ changes:
+ bugfixes:
+ - fixed the expected type of the ip_address, subnet_ip, and subnet_mask parameters
+ to be lists instead of strings (lowlydba.sqlserver.ag_listener)
+ release_summary: Minor bugfix for ag listener input types, thanks @daarrn for
+ the contribution!
+ fragments:
+ - 2-3-3-release-summary.yml
+ - 245-ag_listener-ip_address-fix.yml
+ release_date: '2024-06-06'
diff --git a/ansible_collections/lowlydba/sqlserver/plugins/modules/ag_listener.ps1 b/ansible_collections/lowlydba/sqlserver/plugins/modules/ag_listener.ps1
index e20238ed4..21cc05538 100644
--- a/ansible_collections/lowlydba/sqlserver/plugins/modules/ag_listener.ps1
+++ b/ansible_collections/lowlydba/sqlserver/plugins/modules/ag_listener.ps1
@@ -15,9 +15,9 @@ $spec = @{
options = @{
ag_name = @{type = 'str'; required = $true }
listener_name = @{type = 'str'; required = $true }
- ip_address = @{type = 'str'; required = $false }
- subnet_ip = @{type = 'str'; required = $false }
- subnet_mask = @{type = 'str'; required = $false; default = '255.255.255.0' }
+ ip_address = @{type = 'list'; elements = 'str'; required = $false }
+ subnet_ip = @{type = 'list'; elements = 'str'; required = $false }
+ subnet_mask = @{type = 'list'; elements = 'str'; required = $false; default = '255.255.255.0' }
port = @{type = 'int'; required = $false; default = 1433 }
dhcp = @{type = 'bool'; required = $false; default = $false }
state = @{type = "str"; required = $false; default = "present"; choices = @("present", "absent") }
diff --git a/ansible_collections/lowlydba/sqlserver/plugins/modules/ag_listener.py b/ansible_collections/lowlydba/sqlserver/plugins/modules/ag_listener.py
index 12f0ec635..e835678ae 100644
--- a/ansible_collections/lowlydba/sqlserver/plugins/modules/ag_listener.py
+++ b/ansible_collections/lowlydba/sqlserver/plugins/modules/ag_listener.py
@@ -25,17 +25,20 @@ options:
ip_address:
description:
- IP address(es) of the listener. Comma separated if multiple.
- type: str
+ type: list
+ elements: str
required: false
subnet_ip:
description:
- Subnet IP address(es) of the listener. Comma separated if multiple.
- type: str
+ type: list
+ elements: str
required: false
subnet_mask:
description:
- Sets the subnet IP mask(s) of the availability group listener. Comma separated if multiple.
- type: str
+ type: list
+ elements: str
required: false
default: 255.255.255.0
port:
@@ -71,9 +74,13 @@ EXAMPLES = r'''
sql_instance_primary: sql-01.myco.io
ag_name: AG_MyDatabase
listener_name: aglMyDatabase
- ip_address: 10.0.20.20,10.1.77.77
- subnet_ip: 255.255.252.0
- subnet_mask: 255.255.255.0
+ ip_address:
+ - 10.0.20.20
+ - 10.1.77.77
+ subnet_ip:
+ - 255.255.252.0
+ subnet_mask:
+ - 255.255.255.0
'''
RETURN = r'''
diff --git a/ansible_collections/lowlydba/sqlserver/plugins/modules/spn.py b/ansible_collections/lowlydba/sqlserver/plugins/modules/spn.py
index 0fabb9346..d08d825b1 100644
--- a/ansible_collections/lowlydba/sqlserver/plugins/modules/spn.py
+++ b/ansible_collections/lowlydba/sqlserver/plugins/modules/spn.py
@@ -52,9 +52,13 @@ EXAMPLES = r'''
sql_instance_primary: sql-01.myco.io
ag_name: AG_MyDatabase
listener_name: aglMyDatabase
- ip_address: 10.0.20.20,10.1.77.77
- subnet_ip: 255.255.252.0
- subnet_mask: 255.255.255.0
+ ip_address:
+ - 10.0.20.20
+ - 10.1.77.77
+ subnet_ip:
+ - 255.255.252.0
+ subnet_mask:
+ - 255.255.255.0
- name: Add SPN for new AG listener on port 1433
lowlydba.sqlserver.spn:
diff --git a/ansible_collections/lowlydba/sqlserver/tests/integration/targets/win_ag_listener/tasks/main.yml b/ansible_collections/lowlydba/sqlserver/tests/integration/targets/win_ag_listener/tasks/main.yml
index b4142a487..68604997a 100644
--- a/ansible_collections/lowlydba/sqlserver/tests/integration/targets/win_ag_listener/tasks/main.yml
+++ b/ansible_collections/lowlydba/sqlserver/tests/integration/targets/win_ag_listener/tasks/main.yml
@@ -25,8 +25,10 @@
ag_name: "{{ ag_name }}"
listener_name: "{{ listener_name }}"
port: "{{ port }}"
- ip_address: "192.168.6.9"
- subnet_mask: "255.255.255.0"
+ ip_address:
+ - "192.168.6.9"
+ subnet_mask:
+ - "255.255.255.0"
tags: ["ag_listener"]
block:
- name: Enable hadr
diff --git a/ansible_collections/microsoft/ad/.ansible-lint b/ansible_collections/microsoft/ad/.ansible-lint
index cad3b8e14..d1340f58c 100644
--- a/ansible_collections/microsoft/ad/.ansible-lint
+++ b/ansible_collections/microsoft/ad/.ansible-lint
@@ -5,6 +5,8 @@ exclude_paths:
- changelogs/changelog.yaml
# Incorrect error around supported ansible versions in this file
- meta/runtime.yml
+ # Examples contain duplicate keys due to how we document inventory plugins
+ - plugins/inventory/ldap.py
- tests/integration/
# We skip a rule that has to be skipped
- tests/sanity/ignore-*.txt
diff --git a/ansible_collections/microsoft/ad/.azure-pipelines/azure-pipelines.yml b/ansible_collections/microsoft/ad/.azure-pipelines/azure-pipelines.yml
index 5db3a5531..7de2c9fd6 100644
--- a/ansible_collections/microsoft/ad/.azure-pipelines/azure-pipelines.yml
+++ b/ansible_collections/microsoft/ad/.azure-pipelines/azure-pipelines.yml
@@ -76,6 +76,22 @@ stages:
test: sanity
- name: Units
test: units
+ - name: Lint
+ test: lint
+ - stage: Ansible_2_17
+ displayName: Ansible 2.17
+ dependsOn:
+ - Dependencies
+ jobs:
+ - template: templates/matrix.yml
+ parameters:
+ nameFormat: "{0}"
+ testFormat: "2.17/{0}"
+ targets:
+ - name: Sanity
+ test: sanity
+ - name: Units
+ test: units
- stage: Ansible_2_16
displayName: Ansible 2.16
dependsOn:
@@ -90,8 +106,6 @@ stages:
test: sanity
- name: Units
test: units
- - name: Lint
- test: lint
- stage: Ansible_2_15
displayName: Ansible 2.15
dependsOn:
@@ -139,6 +153,7 @@ stages:
condition: succeededOrFailed()
dependsOn:
- Ansible_devel
+ - Ansible_2_17
- Ansible_2_16
- Ansible_2_15
- Ansible_2_14
diff --git a/ansible_collections/microsoft/ad/.gitignore b/ansible_collections/microsoft/ad/.gitignore
index f77a1cf67..cf70d5ad4 100644
--- a/ansible_collections/microsoft/ad/.gitignore
+++ b/ansible_collections/microsoft/ad/.gitignore
@@ -393,4 +393,5 @@ changelogs/.plugin-cache.yaml
tests/integration/inventory*
tests/integration/targets/domain_controller/.vagrant
tests/integration/targets/membership/.vagrant
-tests/output/ \ No newline at end of file
+tests/output/
+.vagrant/ \ No newline at end of file
diff --git a/ansible_collections/microsoft/ad/CHANGELOG.rst b/ansible_collections/microsoft/ad/CHANGELOG.rst
index 53c63d173..f778721e7 100644
--- a/ansible_collections/microsoft/ad/CHANGELOG.rst
+++ b/ansible_collections/microsoft/ad/CHANGELOG.rst
@@ -4,6 +4,32 @@ Ansible Microsoft Active Directory Release Notes
.. contents:: Topics
+v1.6.0
+======
+
+Release Summary
+---------------
+
+Release summary for v1.6.0
+
+Minor Changes
+-------------
+
+- microsoft.ad AD modules - Added ``domain_credentials`` as a common module option that can be used to specify credentials for specific AD servers.
+- microsoft.ad AD modules - Added ``lookup_failure_action`` on all modules that can specify a list of distinguishedName values to control what should happen if the lookup fails.
+- microsoft.ad.computer - Added the ability to lookup a distinguishedName on a specific domain server for ``delegates`` and ``managed_by``.
+- microsoft.ad.group - Added the ability to lookup a distinguishedName on a specific domain server for ``managed_by`` and ``members``.
+- microsoft.ad.ou - Added the ability to lookup a distinguishedName on a specific domain server for ``managed_by``.
+- microsoft.ad.user - Added the ability to lookup a distinguishedName on a specific domain server for ``delegates``.
+- microsoft.ad.user - Rename the option ``groups.missing_action`` to ``groups.lookup_failure_action`` to make the option more consistent with other modules. The ``missing_action`` option is still supported as an alias.
+- microsoft.ad.user - Support group member lookup on alternative server using the DN lookup syntax. This syntax uses a dictionary where ``name`` defined the group to lookup and ``server`` defines the server to lookup the group on.
+
+Bugfixes
+--------
+
+- microsoft.ad.membership - Fix hostname check to work with hostnames longer than 15 characters long - https://github.com/ansible-collections/microsoft.ad/issues/113
+- microsoft.ad.user - Fix issue when creating a new user account with ``account_locked: false`` - https://github.com/ansible-collections/microsoft.ad/issues/108
+
v1.5.0
======
diff --git a/ansible_collections/microsoft/ad/FILES.json b/ansible_collections/microsoft/ad/FILES.json
index 0dd2d37a2..b5d8ba03c 100644
--- a/ansible_collections/microsoft/ad/FILES.json
+++ b/ansible_collections/microsoft/ad/FILES.json
@@ -102,7 +102,7 @@
"name": ".azure-pipelines/azure-pipelines.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bd6f3baf66ceaa437b9b6a25f26a4169acb7c08fbacc02b0e7550825ec6d1ec4",
+ "chksum_sha256": "2f25e97b196dbaa78b87a262b6631129769fefac4c58797343e0314b73a348ed",
"format": 1
},
{
@@ -158,7 +158,7 @@
"name": "changelogs/changelog.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5a0d5c07afae82f7e211f5f8bf93c2d0a8ffc3cc594bd93f3bc5efc855546cff",
+ "chksum_sha256": "3751f05d4ce0f68f57798e044ecdb504aa8092d5c4a17a0863613b15fabfb26a",
"format": 1
},
{
@@ -190,17 +190,24 @@
"format": 1
},
{
+ "name": "docs/docsite/rst/guide_ad_module_authentication.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "a8108762af89062af8a311efe6239224a8878ef7d99136f91dfdc3b3d52388c9",
+ "format": 1
+ },
+ {
"name": "docs/docsite/rst/guide_attributes.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a42f374f72fec7ecdb4cb7a7d110cede6a29fe62263e3dc48e078b2f1803d9e5",
+ "chksum_sha256": "f50e3d4da85f4b2647f9b374bb94b52873b880b247dfd6ef0ddaf4abfb5981bf",
"format": 1
},
{
"name": "docs/docsite/rst/guide_ldap_connection.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "53537bac987f50ddddd23033632dd28e77022b88f4dcb00a0c3b93ed7c07b925",
+ "chksum_sha256": "35ed448478aef43642299f37cdc9507e5e2bfdec24f97bad939968af4a341f3f",
"format": 1
},
{
@@ -221,14 +228,14 @@
"name": "docs/docsite/rst/guide_migration.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fe1668d6d2b28633ee9e78394327f846f028e7448901df8040633c5900be9cd4",
+ "chksum_sha256": "af1bcca081b9a0dabcc474ae259dda01030100db31f36ecc03472cb506a2c765",
"format": 1
},
{
"name": "docs/docsite/extra-docs.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3ba33d70d90d838dee9bb1644d9cabbe184450979613b031507dc1fb11c2bdd8",
+ "chksum_sha256": "9c692e6876d22d1b26c352dd983733861c141ab8afc73b12e7dcd18f0da14753",
"format": 1
},
{
@@ -284,14 +291,21 @@
"name": "plugins/action/domain.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6e989c9f1f4ecd67419de47b000a05fced5165a4f741d3fdf1bd5bfb9bae7d81",
+ "chksum_sha256": "71d3694b8188e411c19904a7cb93193619ab8a5beb5d1e2c7ae8b6942bd0d2f0",
+ "format": 1
+ },
+ {
+ "name": "plugins/action/domain_child.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "937a4f5e16bab1aa76d3a315964617c462b592bb4886ab9ec6ed5b075e42f9bf",
"format": 1
},
{
"name": "plugins/action/domain_controller.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6e989c9f1f4ecd67419de47b000a05fced5165a4f741d3fdf1bd5bfb9bae7d81",
+ "chksum_sha256": "71d3694b8188e411c19904a7cb93193619ab8a5beb5d1e2c7ae8b6942bd0d2f0",
"format": 1
},
{
@@ -312,14 +326,14 @@
"name": "plugins/doc_fragments/ad_object.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6296c1c278de0d5fd4af66393e7e074e9f57b73788888b74856f28a46b1852c8",
+ "chksum_sha256": "e4f6f708c9e7e3d14b16fb6e83cc95ed148bc2b4f74fb28efe6dfd6436c54b9c",
"format": 1
},
{
"name": "plugins/doc_fragments/ldap_connection.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "70c3cef5f7c2a102ab915450e274484939a520a640b82f6d43d9f1ff47b3af6b",
+ "chksum_sha256": "52fd76fcc19d62ee4957866122364df5bf4ae2de161d11f8ecc1470dc6b0f65f",
"format": 1
},
{
@@ -389,7 +403,7 @@
"name": "plugins/inventory/ldap.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e09952907a3409d3e3730362b2d62d3321b13096f9206ddd10b5f9d0413dc17f",
+ "chksum_sha256": "d7b35e107d4015836c84dca36dcba415b186ae82aff2d2385a35b3d008777c35",
"format": 1
},
{
@@ -403,7 +417,7 @@
"name": "plugins/module_utils/_ADObject.psm1",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a8e86078dfa4a138807bf4b9d5a7820d017ece515d6dce44800e273f7079260e",
+ "chksum_sha256": "d1aadbe160df42b99a752b2a3d853f67ec3a46683a0bbf8a912958d67e113515",
"format": 1
},
{
@@ -417,14 +431,14 @@
"name": "plugins/modules/computer.ps1",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "54ab2fb411ef15a43c2304f1c9b5f720b727d07c0148db65ff07e9a1a812b57a",
+ "chksum_sha256": "253e81afabef739244cfa13c560fc41cf5ca0b574ff84baa88424bddbb912a63",
"format": 1
},
{
"name": "plugins/modules/computer.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8072a01ef9ffe97d05a2249c867fd0623cceb2514b436fb32e6bc74fea7c4019",
+ "chksum_sha256": "20de182a119807c7a46e10a987d04e36861a6387e5e7f26f36db8f789304691b",
"format": 1
},
{
@@ -445,7 +459,21 @@
"name": "plugins/modules/domain.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "711592202ac4384d826a4a246fde4f168699e9fdc6bd52a5a54a3e482fc6ee8f",
+ "chksum_sha256": "5acc5cfb5b0cbd6f9c9a4c0542b314c12ed9ed88b09338362dc22eba699db0f6",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/domain_child.ps1",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "1d99c67f6a6f99eeda9b5e37d9562977a02fd432f91ca89e096978c1a16110a3",
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/domain_child.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "082d40fd1106a83dd5db68aff92f84666d20c58c242157cb85b9b41d924ff32c",
"format": 1
},
{
@@ -459,28 +487,28 @@
"name": "plugins/modules/domain_controller.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6d1779d52492dc68599a205be7518a84388b6f83322caf23daf4708989cb62f7",
+ "chksum_sha256": "d8e0041bc48b4df5d0f9e734c51e4eacf3b9ccff92e7156cc50915c0c1117ce7",
"format": 1
},
{
"name": "plugins/modules/group.ps1",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ff138faeab60823fe1deb45c9a40b44691f47710ac074fa62a3f7370536307ac",
+ "chksum_sha256": "81a5a4dba28d1ba50f1142083d6519426c1d1e55faa93b1bc87184b8db1067af",
"format": 1
},
{
"name": "plugins/modules/group.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ccb8945f218faace36c9fa29bc0180a0e43f35a8ac01015b133e460db82db21c",
+ "chksum_sha256": "95e412dc48e5ec27e4c9dccdbd96a0c3288603db71ff59792e799dc5099c7daf",
"format": 1
},
{
"name": "plugins/modules/membership.ps1",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "acff85cf60f65e36759593d6dc32dd9cdaa8e781bad4ffdce3673fa5a7c3442f",
+ "chksum_sha256": "4c2cd2bc5d4d2650c89f34873c35461718a6e0efec43ca4e2b25eedd96c7a065",
"format": 1
},
{
@@ -501,7 +529,7 @@
"name": "plugins/modules/object.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4384e7040a815bea61ad11a19a0b65015f418228e980062347a19531012934b7",
+ "chksum_sha256": "b7bf0f56555bccb55b64db5caa83c0d2a76f87c6dfec6e68de71aee1c1a0a5b2",
"format": 1
},
{
@@ -515,7 +543,7 @@
"name": "plugins/modules/object_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "403a343f8c36ec12fb991e570aa99b5e58a74329f6e346618d8de557915445ea",
+ "chksum_sha256": "bb8ea5d7aef8c718e29b3a050922fe3385a622b3a5ef3a0056ba42dcda13274d",
"format": 1
},
{
@@ -536,28 +564,28 @@
"name": "plugins/modules/ou.ps1",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "890945f581fbfb1f7a2a9f1d8e9960ec70ca19e731f8fa90e5b91dc52b484e24",
+ "chksum_sha256": "d93529b6c11871d249638d6a62609468e9d90a880a5b27a6d86534bb0b023c78",
"format": 1
},
{
"name": "plugins/modules/ou.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "96b3ce4a4ade31daa6aae80829dab807f513ce0af6728fd3d954866be516b266",
+ "chksum_sha256": "67c53de4b1ae422cee3551bdb8cb8567194a445181ae138a1af7119d77b37f1e",
"format": 1
},
{
"name": "plugins/modules/user.ps1",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c4c651c30ba1f15b85b6f76f7c2048f6493a1298b8a958a39e5623e16d0f505e",
+ "chksum_sha256": "dfd4b1375677810def9eeaee738c3318893841d2d022e4fc719533ba669c8cdf",
"format": 1
},
{
"name": "plugins/modules/user.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cd159582daa2ed5b7aeeda556a7218529ef60965d6e6fb657addc9b587dbc62a",
+ "chksum_sha256": "234b6dfe32edbd092299a66e4364a5f66eddd65558fc1c32cdf98449f367bccc",
"format": 1
},
{
@@ -634,7 +662,7 @@
"name": "plugins/plugin_utils/_module_with_reboot.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "86be9d66a96d18ba18bd5b65ba042f4b68fcb60283b7fecd6243deac572c4f50",
+ "chksum_sha256": "1f1c8132c7f0e4283266e4e6ecd9fe881d594193cf5825be9c9f79846aaa8128",
"format": 1
},
{
@@ -704,7 +732,7 @@
"name": "tests/integration/targets/computer/tasks/tests.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9a0987642dce4687f66cba03796f9710be7a2048d54353a274e2edf2d13245ae",
+ "chksum_sha256": "065fc98a925cdae2cc4a8fdcab71938870d1f1974e9ed8faa6907dd0f165fa1e",
"format": 1
},
{
@@ -792,6 +820,90 @@
"format": 1
},
{
+ "name": "tests/integration/targets/domain_child",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/domain_child/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/domain_child/tasks/cross_domain.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "0af4c699a160d0bc1d0e396c7100bfa961f45ca3284a36ccf63cedb91b57233e",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/domain_child/tasks/main_child.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "862600153d9842632f2da4401a6ff560a5c23e7eab57be6d76d2ebf651a3b199",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/domain_child/tasks/main_tree.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "3cba47e4ad9306a68ed63e3cedf1ca61394afb700853c9dabb692f9ba2877667",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/domain_child/README.md",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "6319e7a40152ef8b3b0ce6bf4e48c32d497f84ada01b6e61d1cdce8ceb244900",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/domain_child/Vagrantfile",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "748db6763788ff2048f1de10720d9210f58457ee62e8bc7f0815e06a45b2b6a6",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/domain_child/aliases",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "dde1e703301f0d11990651509d20f81e97e080514a7f551493082b247a106901",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/domain_child/ansible.cfg",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "8598f22926ef839805b1c23e17f04b86a0ea1186216682882a441f0601de044f",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/domain_child/inventory.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "cd824c275c5ad8883ac164a54ae44505992a634a327f858c9872776985d9b3e8",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/domain_child/setup.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "9d717283be6257eeef3ab6a5290cb5711f43d038d175b42b0736a7549d2ec5b5",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/domain_child/test.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "4eb27e1346561a4a60faf00ea384690f7a4b9b33016b5f262334285d7624825b",
+ "format": 1
+ },
+ {
"name": "tests/integration/targets/domain_controller",
"ftype": "dir",
"chksum_type": null,
@@ -900,7 +1012,7 @@
"name": "tests/integration/targets/group/tasks/tests.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "508aa71de34afe93bc3d68f856e5b66fc43decbc9b879d346d0be60fc7de6a30",
+ "chksum_sha256": "e9fc5dec76e1536092745fe19c8e749aa199ed2a610cc75e75923623f95ae195",
"format": 1
},
{
@@ -1040,7 +1152,7 @@
"name": "tests/integration/targets/membership/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ed9037a9950eccef2189e91c0aab66ab2edf5f128c8cc941c150129b1b8f4fca",
+ "chksum_sha256": "89806b126c17a724fd54f8904176692e19f09826d89b2a3254b0568197df728a",
"format": 1
},
{
@@ -1299,7 +1411,7 @@
"name": "tests/integration/targets/ou/tasks/tests.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7af0a8c25fd6ca592956f33ad115cdc1641ca5346ac85a103b8accf1cb5d40ee",
+ "chksum_sha256": "9c9792a59fefce74504300df3339e3152b5d3acd1bf945b4417a6890f655a836",
"format": 1
},
{
@@ -1390,7 +1502,7 @@
"name": "tests/integration/targets/user/tasks/tests.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7e74639819afa56c4a630f75600156f3a411b58bb615566f33b905f84baee967",
+ "chksum_sha256": "9649f15ad69b87815255393bb4c35d46b3fc87b3c6be657badf39f0679e7a136",
"format": 1
},
{
@@ -1418,28 +1530,35 @@
"name": "tests/sanity/ignore-2.14.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "1bae2dbdeb7e39b94941d0e9377ae91979b226f2670133fdfc27c7dbe7bf0b05",
"format": 1
},
{
"name": "tests/sanity/ignore-2.15.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "1bae2dbdeb7e39b94941d0e9377ae91979b226f2670133fdfc27c7dbe7bf0b05",
"format": 1
},
{
"name": "tests/sanity/ignore-2.16.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "1bae2dbdeb7e39b94941d0e9377ae91979b226f2670133fdfc27c7dbe7bf0b05",
"format": 1
},
{
"name": "tests/sanity/ignore-2.17.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "1bae2dbdeb7e39b94941d0e9377ae91979b226f2670133fdfc27c7dbe7bf0b05",
+ "format": 1
+ },
+ {
+ "name": "tests/sanity/ignore-2.18.txt",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "1bae2dbdeb7e39b94941d0e9377ae91979b226f2670133fdfc27c7dbe7bf0b05",
"format": 1
},
{
@@ -1572,7 +1691,7 @@
"name": "tests/utils/shippable/lint.sh",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f945142c1487de0996b9bc46bc18c82d5c0b6c9470f0f48b87634c2e4b0eabf5",
+ "chksum_sha256": "c7bc94e43d0337251fa9f22f3ec4516604e109f7cd7fa35c7f1fa2a90f5c2ccc",
"format": 1
},
{
@@ -1614,21 +1733,21 @@
"name": ".ansible-lint",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "33235ea099dc7bd2061e0271afcef60424799834d3023d14652b6296e3f133c2",
+ "chksum_sha256": "2f41de2acdeff3b92cff1f64992fd443ee1fa63a5e21163e8ced110bec121a75",
"format": 1
},
{
"name": ".gitignore",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0039fe591d9f12f0fb24c7a9a50745d96627f3a8a4cb73d536801dcc220e9b3b",
+ "chksum_sha256": "3df1a3a721c47e43ad0bca664e780fa956cfb209ff897cb3c75250790f896f0a",
"format": 1
},
{
"name": "CHANGELOG.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fba73adcf8b95de1f2c33cc77b6f8216cb15754dbe34756a8f386f77d9c6db75",
+ "chksum_sha256": "3403e71cab7325c3401111b3de5e8ad8b6eb329f6e4348a6df141dc476c31a0b",
"format": 1
},
{
diff --git a/ansible_collections/microsoft/ad/MANIFEST.json b/ansible_collections/microsoft/ad/MANIFEST.json
index 57ce22ed2..5eb59e544 100644
--- a/ansible_collections/microsoft/ad/MANIFEST.json
+++ b/ansible_collections/microsoft/ad/MANIFEST.json
@@ -2,7 +2,7 @@
"collection_info": {
"namespace": "microsoft",
"name": "ad",
- "version": "1.5.0",
+ "version": "1.6.0",
"authors": [
"Jordan Borean @jborean93",
"Matt Davis @nitzmahone"
@@ -25,7 +25,7 @@
"name": "FILES.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4da03dbf6e40118c90aad5d75b02840c99ac9169e450044d11d902a2e3a5f97f",
+ "chksum_sha256": "cbfa3113cd723bbb0b394c37aaed248fd54ab8b280f3dc12ae481283bb35dc3f",
"format": 1
},
"format": 1
diff --git a/ansible_collections/microsoft/ad/changelogs/changelog.yaml b/ansible_collections/microsoft/ad/changelogs/changelog.yaml
index f9d1dc51f..f2a6c6512 100644
--- a/ansible_collections/microsoft/ad/changelogs/changelog.yaml
+++ b/ansible_collections/microsoft/ad/changelogs/changelog.yaml
@@ -153,3 +153,39 @@ releases:
name: parse_dn
namespace: null
release_date: '2024-03-20'
+ 1.6.0:
+ changes:
+ bugfixes:
+ - microsoft.ad.membership - Fix hostname check to work with hostnames longer
+ than 15 characters long - https://github.com/ansible-collections/microsoft.ad/issues/113
+ - 'microsoft.ad.user - Fix issue when creating a new user account with ``account_locked:
+ false`` - https://github.com/ansible-collections/microsoft.ad/issues/108'
+ minor_changes:
+ - microsoft.ad AD modules - Added ``domain_credentials`` as a common module
+ option that can be used to specify credentials for specific AD servers.
+ - microsoft.ad AD modules - Added ``lookup_failure_action`` on all modules that
+ can specify a list of distinguishedName values to control what should happen
+ if the lookup fails.
+ - microsoft.ad.computer - Added the ability to lookup a distinguishedName on
+ a specific domain server for ``delegates`` and ``managed_by``.
+ - microsoft.ad.group - Added the ability to lookup a distinguishedName on a
+ specific domain server for ``managed_by`` and ``members``.
+ - microsoft.ad.ou - Added the ability to lookup a distinguishedName on a specific
+ domain server for ``managed_by``.
+ - microsoft.ad.user - Added the ability to lookup a distinguishedName on a specific
+ domain server for ``delegates``.
+ - microsoft.ad.user - Rename the option ``groups.missing_action`` to ``groups.lookup_failure_action``
+ to make the option more consistent with other modules. The ``missing_action``
+ option is still supported as an alias.
+ - microsoft.ad.user - Support group member lookup on alternative server using
+ the DN lookup syntax. This syntax uses a dictionary where ``name`` defined
+ the group to lookup and ``server`` defines the server to lookup the group
+ on.
+ release_summary: Release summary for v1.6.0
+ fragments:
+ - lookup-dn.yml
+ - membership-long-name.yml
+ - release-1.6.0.yml
+ - user-account-locked.yml
+ - user-groups.yml
+ release_date: '2024-06-10'
diff --git a/ansible_collections/microsoft/ad/docs/docsite/extra-docs.yml b/ansible_collections/microsoft/ad/docs/docsite/extra-docs.yml
index 6a548ed38..a3b4f8ed0 100644
--- a/ansible_collections/microsoft/ad/docs/docsite/extra-docs.yml
+++ b/ansible_collections/microsoft/ad/docs/docsite/extra-docs.yml
@@ -6,6 +6,7 @@
sections:
- title: Scenario Guides
toctree:
+ - guide_ad_module_authentication
- guide_attributes
- guide_ldap_connection
- guide_ldap_inventory
diff --git a/ansible_collections/microsoft/ad/docs/docsite/rst/guide_ad_module_authentication.rst b/ansible_collections/microsoft/ad/docs/docsite/rst/guide_ad_module_authentication.rst
new file mode 100644
index 000000000..632c19515
--- /dev/null
+++ b/ansible_collections/microsoft/ad/docs/docsite/rst/guide_ad_module_authentication.rst
@@ -0,0 +1,120 @@
+.. _ansible_collections.microsoft.ad.docsite.guide_ad_module_authentication:
+
+****************************
+AD Authentication in Modules
+****************************
+
+A key requirement of the modules used inside this collection is being able to authenticate a user to the domain controller when managing a resource. This guide will cover the different options available for this scenario.
+
+.. note::
+ This guide covers authentication to a domain controller when using a module on a Windows host. See :ref:`LDAP Authentication <ansible_collections.microsoft.ad.docsite.guide_ldap_connection.authentication>` for information on how authentication is done when using plugins running on Linux.
+
+.. contents::
+ :local:
+ :depth: 1
+
+.. _ansible_collections.microsoft.ad.docsite.guide_ad_module_authentication.implicit_auth:
+
+Implicit Authentication
+=======================
+
+The first and simplest option is to use the connection user's existing credentials during authentication. This avoids having to specify a username and password in the module's parameters, but it does require that the connection method used by Ansible supports credential delegation. For example using CredSSP authentication with the ``winrm`` and ``psrp`` connection plugin, or using Kerberos delegation. Other authentication options, like NTLM, do not support credential delegation and will not work with implicit authentication.
+
+The only way to test out if implicit authentication is available is to run the module and see if it works. If it does not work then the error will most likely contain the message ``Failed to contact the AD server``.
+
+.. _ansible_collections.microsoft.ad.docsite.guide_ad_module_authentication.become:
+
+Become
+======
+
+If implicit authentication is not available, the module can be run with ``become`` that specifies the username and password to use for authentication.
+
+.. code-block:: yaml
+
+ - name: Use become with connection credentials
+ microsoft.ad.user:
+ name: MyUser
+ state: present
+ become: true
+ become_method: runas
+ become_flags: logon_type=new_credentials logon_flags=netcredentials_only
+ vars:
+ ansible_become_user: '{{ ansible_user }}'
+ ansible_become_pass: '{{ ansible_password }}'
+
+The ``runas`` method is used on Windows and the ``become_flags`` will specify that the credentials should be used for network authentication only. The ``ansible_become_user`` and ``ansible_become_pass`` variables specify the username and password to use for authentication. It is important that both of these variables are set to a valid username and password or else the authentication will fail.
+
+It is also possible to use the ``SYSTEM`` account for become. This will have the module use the AD computer account for that host when authenticating with the target DC rather than an explicit username and password. The AD computer account must still have the required rights to perform the operation requested.
+
+.. code-block:: yaml
+
+ - name: Use machine account for authentication
+ microsoft.ad.user:
+ name: MyUser
+ state: present
+ become: true
+ become_method: runas
+ become_user: SYSTEM
+
+.. _ansible_collections.microsoft.ad.docsite.guide_ad_module_authentication.explicit_creds:
+
+Explicit Credentials
+====================
+
+The final option is to specify the username and password as module options. This can be done in two ways; with the ``domain_username`` and ``domain_password`` options, or with the ``domain_credentials`` option. An example of both methods is shown below.
+
+.. code-block:: yaml
+
+ - name: Use domain_username and domain_password
+ microsoft.ad.user:
+ name: MyUser
+ state: present
+ domain_username: '{{ ansible_user }}'
+ domain_password: '{{ ansible_password }}'
+
+ - name: Use domain_credentials
+ name: MyUser
+ state: present
+ domain_credentials:
+ - username: '{{ ansible_user }}'
+ password: '{{ ansible_password }}'
+
+.. note::
+ The ``domain_credentials`` option was added in version 1.6.0 of this collection.
+
+The ``domain_credentials`` option without the ``name`` key, like in the above example, will be the credentials used for authentication with the default domain controller just like ``domain_username`` and ``domain_password``. Using both options together is not supported and will result in an error.
+
+The ``domain_credentials`` option can also be used to specify server specific credentials. For example when attempting to lookup the identity of an AD object:
+
+.. code-block:: yaml
+
+ - name: Set member with lookup on different server
+ microsoft.ad.group:
+ name: MyGroup
+ state: present
+ members:
+ add:
+ - GroupOnDefaultDC
+ - name: GroupOnDefaultDC2
+ - name: GroupOnOtherDC
+ server: OtherDC
+ - name: GroupOnThirdDC
+ server: ThirdDC
+ domain_credentials:
+ - username: UserForDefaultDC
+ password: PasswordForDefaultDC
+ - name: OtherDC
+ username: UserForOtherDC
+ password: PasswordForOtherDC
+
+In the case above there are three members being added to the group:
+
+* ``GroupOnDefaultDC`` - Will be looked up on the default domain controller using ``UserForDefaultDC`` and ``PasswordForDefaultDC``
+* ``GroupOnDefaultDC2`` - Same as the above just specified as a dictionary
+* ``GroupOnOtherDC`` - Will be looked up on ``OtherDC`` using ``UserForOtherDC`` and ``PasswordForOtherDC``
+* ``GroupOnThirdDC`` - Will be looked up on ``ThirdDC`` using the implicit user authentication context
+
+The value for ``server`` must correspond to a ``name`` entry in ``domain_credentials``. If the server is not specified in ``domain_credentials``, the module will default to using the ``domain_username/domain_password`` or implicit user authentication.
+
+.. note::
+ The default (no ``name`` key) entry in ``domain_credentials`` is only used for lookups without an explicit server set. The ``domain_username`` and ``domain_password`` credential will be used for all connections unless there is an explicit server entry in ``domain_credentials``.
diff --git a/ansible_collections/microsoft/ad/docs/docsite/rst/guide_attributes.rst b/ansible_collections/microsoft/ad/docs/docsite/rst/guide_attributes.rst
index ee53dce64..7ed192ca2 100644
--- a/ansible_collections/microsoft/ad/docs/docsite/rst/guide_attributes.rst
+++ b/ansible_collections/microsoft/ad/docs/docsite/rst/guide_attributes.rst
@@ -310,3 +310,77 @@ SDDL strings can be quite complex so building them manually is ill-advised. It i
$dn = 'CN=ObjectName,DC=domain,DC=test'
$obj = Get-ADObject -Identity $dn -Properties nTSecurityDescriptor
$obj.nTSecurityDescriptor.GetSecurityDescriptorSddlForm('All')
+
+.. _ansible_collections.microsoft.ad.docsite.guide_attributes.dn_lookup_attributes:
+
+DN Lookup Attributes
+====================
+
+Some attributes in Active Directory are stored as a Distinguished Name (``DN``) value that references another AD object. Some modules expose a way to lookup the DN using a more human friendly value, such as ``managed_by``. These option values must either be a string or a dictionary with the key ``name`` and optional key ``server``. The string value or the value of ``name`` is the identity to lookup while ``server`` is the domain server to lookup the identity on. The lookup identity value can be specified as a ``distinguishedName``, ``objectGUID``, ``objectSid``, ``sAMAccountName``, or ``userPrincipalName``. The below is an example of how to lookup a DN using the ``sAMAccountName`` using a string value or in the dictionary form:
+
+.. code-block:: yaml
+
+ - name: Find managed_by using string value
+ microsoft.ad.group:
+ name: My Group
+ scope: global
+ managed_by: Domain Admins
+
+ - name: Find managed_by using dictionary value with a server
+ microsoft.ad.group:
+ name: My Group
+ scope: global
+ managed_by:
+ name: Domain Admins
+ server: OtherDC
+
+There are also module options that can set a list of DN values for an attribute. The list values for these options are the same as the single value attributes where each DN lookup is set as a string or a dictionary with the ``name`` and optional ``server`` key.
+
+.. code-block:: yaml
+
+ - name: Specify a list of DNs to set
+ microsoft.ad.computer:
+ identity: TheComputer
+ delegates:
+ set:
+ - FileShare
+ - name: ServerA
+ server: OtherDC
+
+For list attributes with the ``add/remove/set`` subkey options, the ``lookup_failure_action`` option can also be set to ``fail`` (default), ``ignore``, or ``warn``. The ``fail`` option will fail the task if any of the lookups fail, ``ignore`` will ignore any invalid lookups, and ``warn`` will emit a warning but still continue on a lookup failure.
+
+.. code-block:: yaml
+
+ - name: Specify a list of DNs to set - ignoring lookup failures
+ microsoft.ad.computer:
+ identity: TheComputer
+ delegates:
+ lookup_failure_action: ignore
+ set:
+ - FileShare
+ - MissingUser
+
+When a ``server`` key is provided, the lookup will be done using the server value specified. It is possible to also provide explicit credentials just for that server using the ``domain_credentials`` option.
+
+.. code-block:: yaml
+
+ - name: Set member with lookup on different server
+ microsoft.ad.group:
+ name: MyGroup
+ state: present
+ members:
+ add:
+ - GroupOnDefaultDC
+ - name: GroupOnDefaultDC2
+ - name: GroupOnOtherDC
+ server: OtherDC
+ domain_credentials:
+ - username: UserForDefaultDC
+ password: PasswordForDefaultDC
+ - name: OtherDC
+ username: UserForOtherDC
+ password: PasswordForOtherDC
+
+In the above, the ``GroupOnOtherDC`` will be done with ``OtherDC`` with the username ``UserForOtherDC``.
+
+The documentation for the module option will identify if the option supports the lookup behaviour or whether a DN value must be explicitly provided.
diff --git a/ansible_collections/microsoft/ad/docs/docsite/rst/guide_ldap_connection.rst b/ansible_collections/microsoft/ad/docs/docsite/rst/guide_ldap_connection.rst
index 60755f00c..ed0b290fa 100644
--- a/ansible_collections/microsoft/ad/docs/docsite/rst/guide_ldap_connection.rst
+++ b/ansible_collections/microsoft/ad/docs/docsite/rst/guide_ldap_connection.rst
@@ -7,7 +7,7 @@ LDAP Connection guide
This guide covers information about communicating with an LDAP server, like Microsoft Active Directory, from the Ansible host. Unlike Windows hosts, there are no builtin mechanisms to communicate and authenticate with an LDAP server, so the plugins that run on the Ansible host require some extra configuration to get working.
.. note::
- This guide covers LDAP communication from the Ansible host. This does not apply to the modules that run on the remote Windows hosts.
+ This guide covers LDAP communication from the Ansible host. This does not apply to the modules that run on the remote Windows hosts. See :ref:`AD Authentication in Modules <ansible_collections.microsoft.ad.docsite.guide_ad_module_authentication>` for information on how modules authentication can be configured.
.. contents::
:local:
diff --git a/ansible_collections/microsoft/ad/docs/docsite/rst/guide_migration.rst b/ansible_collections/microsoft/ad/docs/docsite/rst/guide_migration.rst
index c0b01ca5f..d3e3c2d26 100644
--- a/ansible_collections/microsoft/ad/docs/docsite/rst/guide_migration.rst
+++ b/ansible_collections/microsoft/ad/docs/docsite/rst/guide_migration.rst
@@ -129,6 +129,30 @@ Migrated to :ref:`microsoft.ad.group <ansible_collections.microsoft.ad.group_mod
The functionality of this module has been merged with ``microsoft.ad.group``. Use the ``members`` option to ``add``, ``remove``, or ``set`` to add, remove, or set group members respectively.
+One change is ``win_domain_group_membership`` could specify the server to lookup the member using the ``SERVER\member-name`` format. This member format is not supported in ``microsoft.ad.group`` but since v1.6.0 of this collection the same can be achieved by using a dictionary as the member value. For example:
+
+.. code-block:: yaml
+
+ - name: Add a domain user/group from another Domain in the multi-domain forest to a domain group
+ community.windows.win_domain_group_membership:
+ name: GroupinDomainAAA
+ domain_server: DomainAAA.cloud
+ members:
+ - DomainBBB.cloud\UserInDomainBBB
+ state: present
+
+ - name: Add a domain user/group from another Domain in the multi-domain forest to a domain group
+ microsoft.ad.group:
+ name: GroupinDomainAAA
+ domain_server: DomainAAA.cloud
+ members:
+ add:
+ - name: UserInDomainBBB
+ server: DomainBBB.cloud
+ state: present
+
+See :ref:`DN Lookup Attributes <ansible_collections.microsoft.ad.docsite.guide_attributes.dn_lookup_attributes>` for more information.
+
.. _ansible_collections.microsoft.ad.docsite.guide_migration.migrated_modules.win_domain_object_info:
Module ``win_domain_object_info``
diff --git a/ansible_collections/microsoft/ad/plugins/action/domain.py b/ansible_collections/microsoft/ad/plugins/action/domain.py
index 36cdb26e5..803f94d5d 100644
--- a/ansible_collections/microsoft/ad/plugins/action/domain.py
+++ b/ansible_collections/microsoft/ad/plugins/action/domain.py
@@ -1,34 +1,8 @@
# Copyright (c) 2022 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-import typing as t
+from ..plugin_utils._module_with_reboot import DomainPromotionWithReboot
-from ..plugin_utils._module_with_reboot import ActionModuleWithReboot
-
-class ActionModule(ActionModuleWithReboot):
- def __init__(self, *args: t.Any, **kwargs: t.Any) -> None:
- super().__init__(*args, **kwargs)
- self._ran_once = False
-
- def _ad_should_rerun(self, result: t.Dict[str, t.Any]) -> bool:
- ran_once = self._ran_once
- self._ran_once = True
-
- if ran_once or not result.get("_do_action_reboot", False):
- return False
-
- if self._task.check_mode:
- # Assume that on a rerun it will not have failed and that it
- # ran successfull.
- result["failed"] = False
- result.pop("msg", None)
- return False
-
- else:
- return True
-
- def _ad_process_result(self, result: t.Dict[str, t.Any]) -> t.Dict[str, t.Any]:
- result.pop("_do_action_reboot", None)
-
- return result
+class ActionModule(DomainPromotionWithReboot):
+ ...
diff --git a/ansible_collections/microsoft/ad/plugins/action/domain_child.py b/ansible_collections/microsoft/ad/plugins/action/domain_child.py
new file mode 100644
index 000000000..ecc566c5a
--- /dev/null
+++ b/ansible_collections/microsoft/ad/plugins/action/domain_child.py
@@ -0,0 +1,8 @@
+# Copyright (c) 2024 Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from ..plugin_utils._module_with_reboot import DomainPromotionWithReboot
+
+
+class ActionModule(DomainPromotionWithReboot):
+ ...
diff --git a/ansible_collections/microsoft/ad/plugins/action/domain_controller.py b/ansible_collections/microsoft/ad/plugins/action/domain_controller.py
index 36cdb26e5..803f94d5d 100644
--- a/ansible_collections/microsoft/ad/plugins/action/domain_controller.py
+++ b/ansible_collections/microsoft/ad/plugins/action/domain_controller.py
@@ -1,34 +1,8 @@
# Copyright (c) 2022 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
-import typing as t
+from ..plugin_utils._module_with_reboot import DomainPromotionWithReboot
-from ..plugin_utils._module_with_reboot import ActionModuleWithReboot
-
-class ActionModule(ActionModuleWithReboot):
- def __init__(self, *args: t.Any, **kwargs: t.Any) -> None:
- super().__init__(*args, **kwargs)
- self._ran_once = False
-
- def _ad_should_rerun(self, result: t.Dict[str, t.Any]) -> bool:
- ran_once = self._ran_once
- self._ran_once = True
-
- if ran_once or not result.get("_do_action_reboot", False):
- return False
-
- if self._task.check_mode:
- # Assume that on a rerun it will not have failed and that it
- # ran successfull.
- result["failed"] = False
- result.pop("msg", None)
- return False
-
- else:
- return True
-
- def _ad_process_result(self, result: t.Dict[str, t.Any]) -> t.Dict[str, t.Any]:
- result.pop("_do_action_reboot", None)
-
- return result
+class ActionModule(DomainPromotionWithReboot):
+ ...
diff --git a/ansible_collections/microsoft/ad/plugins/doc_fragments/ad_object.py b/ansible_collections/microsoft/ad/plugins/doc_fragments/ad_object.py
index 3231e2341..5042e1208 100644
--- a/ansible_collections/microsoft/ad/plugins/doc_fragments/ad_object.py
+++ b/ansible_collections/microsoft/ad/plugins/doc_fragments/ad_object.py
@@ -76,9 +76,48 @@ options:
- The display name of the AD object to set.
- This is the value of the C(displayName) LDAP attribute.
type: str
+ domain_credentials:
+ description:
+ - Specifies the credentials that should be used when using the server
+ specified by I(name).
+ - To specify credentials for the default domain server, use an entry
+ without the I(name) key or use the I(domain_username) and
+ I(domain_password) option.
+ - This can be set under the R(play's module defaults,module_defaults_groups)
+ under the C(group/microsoft.ad.domain) group.
+ - See R(AD authentication in modules,ansible_collections.microsoft.ad.docsite.guide_ad_module_authentication)
+ for more information.
+ default: []
+ type: list
+ elements: dict
+ suboptions:
+ name:
+ description:
+ - The name of the server these credentials are for.
+ - This value should correspond to the value used in other options that
+ specify a custom server to use, for example an option that references
+ an AD identity located on a different AD server.
+ - This key can be omitted in one entry to specify the default
+ credentials to use when a server is not specified instead of using
+ I(domain_username) and I(domain_password).
+ type: str
+ username:
+ description:
+ - The username to use when connecting to the server specified by
+ I(name).
+ type: str
+ required: true
+ password:
+ description:
+ - The password to use when connecting to the server specified by
+ I(name).
+ type: str
+ required: true
domain_password:
description:
- The password for I(domain_username).
+ - The I(domain_credentials) sub entry without a I(name) key can also be
+ used to specify the credentials for the default domain authentication.
- This can be set under the R(play's module defaults,module_defaults_groups)
under the C(group/microsoft.ad.domain) group.
type: str
@@ -87,6 +126,9 @@ options:
- Specified the Active Directory Domain Services instance to connect to.
- Can be in the form of an FQDN or NetBIOS name.
- If not specified then the value is based on the default domain of the computer running PowerShell.
+ - Custom credentials can be specified under a I(domain_credentials) entry
+ without a I(name) key or through I(domain_username) and
+ I(domain_password).
- This can be set under the R(play's module defaults,module_defaults_groups)
under the C(group/microsoft.ad.domain) group.
type: str
@@ -96,6 +138,8 @@ options:
- If this is not set then the user that is used for authentication will be the connection user.
- Ansible will be unable to use the connection user unless auth is Kerberos with credential delegation or CredSSP,
or become is used on the task.
+ - The I(domain_credentials) sub entry without a I(name) key can also be
+ used to specify the credentials for the default domain authentication.
- This can be set under the R(play's module defaults,module_defaults_groups)
under the C(group/microsoft.ad.domain) group.
type: str
diff --git a/ansible_collections/microsoft/ad/plugins/doc_fragments/ldap_connection.py b/ansible_collections/microsoft/ad/plugins/doc_fragments/ldap_connection.py
index 327c1ba76..79fcbf4d4 100644
--- a/ansible_collections/microsoft/ad/plugins/doc_fragments/ldap_connection.py
+++ b/ansible_collections/microsoft/ad/plugins/doc_fragments/ldap_connection.py
@@ -197,4 +197,5 @@ requirements:
- pyspnego >= 0.8.0
- pyspnego[kerberos] - For Kerberos and server lookup support
- sansldap
+- dpapi-ng - For LAPS decryption support
"""
diff --git a/ansible_collections/microsoft/ad/plugins/inventory/ldap.py b/ansible_collections/microsoft/ad/plugins/inventory/ldap.py
index 0a329daff..ce77a5efb 100644
--- a/ansible_collections/microsoft/ad/plugins/inventory/ldap.py
+++ b/ansible_collections/microsoft/ad/plugins/inventory/ldap.py
@@ -134,6 +134,7 @@ ca_cert: /home/user/certs/ldap.pem
username: '{{ lookup("ansible.builtin.env", "LDAP_USERNAME") }}'
password: '{{ lookup("ansible.builtin.env", "LDAP_PASSWORD") }}'
+
##############################################
# Search Options #
# #
@@ -220,9 +221,9 @@ groups:
# Adds the host to a group site_{{ location }} with the default group of
# site_unknown if the location isn't defined
keyed_groups:
-- key: location | default(omit)
- prefix: site
- default_value: unknown
+ - key: location | default(omit)
+ prefix: site
+ default_value: unknown
"""
import base64
diff --git a/ansible_collections/microsoft/ad/plugins/module_utils/_ADObject.psm1 b/ansible_collections/microsoft/ad/plugins/module_utils/_ADObject.psm1
index e51c974cb..70868c3e8 100644
--- a/ansible_collections/microsoft/ad/plugins/module_utils/_ADObject.psm1
+++ b/ansible_collections/microsoft/ad/plugins/module_utils/_ADObject.psm1
@@ -486,6 +486,7 @@ Function Compare-AnsibleADIdempotentList {
}
[PSCustomObject]@{
+ # $null is explicit here as the AD modules use it to unset a value
Value = if ($value.Count) { $value.ToArray() } else { $null }
# Also returned if the API doesn't support explicitly setting 1 value
ToAdd = $toAdd.ToArray()
@@ -494,6 +495,160 @@ Function Compare-AnsibleADIdempotentList {
}
}
+Function ConvertTo-AnsibleADDistinguishedName {
+ <#
+ .SYNOPSIS
+ Converts the input list into DistinguishedNames for later comparison.
+
+ .PARAMETER InputObject
+ The identity parameter, this can either be a string or a hashtable.
+ If a hashtable it should contain the name and optional server key to
+ identity the object to search and set a specific server to search on.
+
+ .PARAMETER Module
+ The AnsibleModule object associated with the current module execution.
+
+ .PARAMETER Context
+ The context behind this conversion to add to the error message if there
+ is one.
+
+ .PARAMETER Server
+ The default server to search. The Identity server key will override this
+ value is present.
+
+ .PARAMETER Credential
+ The credential to search with. This is ignored if the Identity server key
+ is present.
+
+ .PARAMETER FailureAction
+ The action to take if the lookup fails. Fail will cause the module to
+ exit with an error, ignore will ignore the error, and warn will emit a
+ warning on failure.
+ #>
+ [OutputType([string])]
+ [CmdletBinding()]
+ param (
+ [Parameter(Mandatory, ValueFromPipeline)]
+ [object[]]
+ $InputObject,
+
+ [Parameter(Mandatory)]
+ [object]
+ $Module,
+
+ [Parameter(Mandatory)]
+ [string]
+ $Context,
+
+ [string]
+ $Server,
+
+ [PSCredential]
+ $Credential,
+
+ [ValidateSet('Fail', 'Ignore', 'Warn')]
+ [string]
+ $FailureAction = 'Fail'
+ )
+
+ begin {
+ $allowedKeys = [string[]]@('name', 'server')
+ $results = [System.Collections.Generic.List[string]]@()
+ $getErrors = [System.Collections.Generic.List[string]]@()
+ $invalidIdentities = [System.Collections.Generic.List[string]]@()
+ }
+
+ process {
+ foreach ($obj in $InputObject) {
+ $getParams = @{}
+ if ($Server) {
+ $getParams.Server = $Server
+ }
+ if ($Credential) {
+ $getParams.Credential = $Credential
+ }
+
+ if ($obj -is [System.Collections.IDictionary]) {
+ # When using a hashtable, the name and server key can be used
+ # to specify the identity and server to use. If no server is
+ # set then it defaults to the default server (if provided) and
+ # it's credentials.
+ $existingKeys = [string[]]$obj.Keys
+
+ if ('name' -notin $existingKeys) {
+ $getErrors.Add("Identity entry does not contain the required name key")
+ continue
+ }
+ $name = [string]$obj.name
+
+ [string[]]$extraKeys = [System.Linq.Enumerable]::Except($existingKeys, $allowedKeys)
+ if ($extraKeys) {
+ $extraKeys = $extraKeys | Sort-Object
+ $getErrors.Add("Identity entry for '$name' contains extra keys: '$($extraKeys -join "', '")'")
+ continue
+ }
+ $getParams.Identity = $name
+
+ if ($obj.server) {
+ # If a custom server is specified we use that and the
+ # credential (if any) associated with that server.
+ $getParams.Server = $obj.server
+
+ if ($Module.ServerCredentials.ContainsKey($obj.server)) {
+ $getParams.Credential = $Module.ServerCredentials[$obj.server]
+ }
+ elseif (-not $Module.DefaultCredentialSet) {
+ $null = $getParams.Remove('Credential')
+ }
+ }
+ }
+ else {
+ # Treat the value as just the identity as a string.
+ $getParams.Identity = [string]$obj
+ }
+
+ if (-not $getParams.Identity) {
+ continue
+ }
+
+ $adDN = Get-AnsibleADObject @getParams |
+ Select-Object -ExpandProperty DistinguishedName
+ if ($adDN) {
+ $results.Add($adDN)
+ }
+ else {
+ $invalidIdentities.Add($getParams.Identity)
+ }
+ }
+ }
+
+ end {
+ # This is a weird workaround as FailJson calls exit which means the
+ # caller won't capture the output causing junk data in the output. By
+ # only outputting the results if no errors occurred we can avoid that
+ # problem.
+ $errorPrefix = "Failed to find the AD object DNs for $Context"
+ if ($getErrors) {
+ $msg = "$errorPrefix. $($getErrors -join '. ')."
+ $Module.FailJson($msg)
+ }
+
+ if ($invalidIdentities) {
+ if ($FailureAction -ne 'Ignore') {
+ $identityString = "'$($invalidIdentities -join "', '")'"
+ if ($FailureAction -eq 'Fail') {
+ $Module.FailJson("$errorPrefix. Invalid identities: $identityString")
+ }
+ else {
+ $module.Warn("$errorPrefix. Ignoring invalid identities: $identityString")
+ }
+ }
+ }
+
+ $results
+ }
+}
+
Function Get-AnsibleADObject {
<#
.SYNOPSIS
@@ -612,9 +767,17 @@ Function Invoke-AnsibleADObject {
Attribute - The ldap attribute name to compare against
CaseInsensitive - The values are case insensitive (defaults to $false)
StateRequired - Set to 'present' or 'absent' if this needs to be defined for either state
+ DNLookup - Whether each value needs to be looked up to get the DN
+ IsRawAttribute - Whether the attribute is a raw LDAP attribute name and not a parameter name
New - Called when the option is to be set on the New-AD* cmdlet splat
Set - Called when the option is to be set on the Set-AD* cmdlet splat
+ The 'type' key in 'Option' should be a valid Ansible.Basic type or
+ 'add_remove_set'. When 'add_remove_set' is used the option type becomes
+ dict with the options subentry for add/remove/set being the Option value
+ specified. This can be combined with DNLookup to set the value as raw that
+ can lookup the DN value from the string or dict specified.
+
If Attribute is set then requested value will be compared with the
attribute specified. The current attribute value is added to the before
diff state for the option it is on. If New is not specified then the
@@ -632,6 +795,10 @@ Function Invoke-AnsibleADObject {
It is up to the scriptblock to set the required splat parameters or call
whatever function is needed.
+ The DNLookup key is used to indicate that the add/remove/set values can
+ either be a string or a dictionary containing the name/server to specify
+ the name and server to lookup the object DN value.
+
Both New and Set must set the $Module.Diff.after results accordingly and/or
mark $Module.Result.changed if it is making a change outside of adjusting
the splat hashtable passed in.
@@ -709,6 +876,25 @@ Function Invoke-AnsibleADObject {
}
}
}
+ domain_credentials = @{
+ default = @()
+ type = 'list'
+ elements = 'dict'
+ options = @{
+ name = @{
+ type = 'str'
+ }
+ username = @{
+ required = $true
+ type = 'str'
+ }
+ password = @{
+ no_log = $true
+ required = $true
+ type = 'str'
+ }
+ }
+ }
domain_password = @{
no_log = $true
type = 'str'
@@ -775,7 +961,44 @@ Function Invoke-AnsibleADObject {
$stateRequiredIf[$propInfo.StateRequired] += $ansibleOption
}
- $spec.options[$ansibleOption] = $propInfo.Option
+ $option = $propInfo.Option
+ if ($option.type -eq 'add_remove_set') {
+ $option.type = 'dict'
+
+ $optionElement = $option.Clone()
+ $optionElement.type = 'list'
+
+ $option = @{
+ type = 'dict'
+ options = @{}
+ }
+
+ if ($propInfo.DNLookup) {
+ $optionElement.elements = 'raw'
+ $option.options.lookup_failure_action = @{
+ choices = @('fail', 'ignore', 'warn')
+ default = 'fail'
+ type = 'str'
+ }
+ }
+ elseif (-not $optionElement.ContainsKey('elements')) {
+ $optionElement.elements = 'str'
+ }
+
+ if ($optionElement.ContainsKey('aliases')) {
+ $option.aliases = $optionElement.aliases
+ $null = $optionElement.Remove('aliases')
+ }
+
+ $option.options.add = $optionElement
+ $option.options.remove = $optionElement
+ $option.options.set = $optionElement
+ }
+ elseif ($propInfo.DNLookup) {
+ $option.type = 'raw'
+ }
+
+ $spec.options[$ansibleOption] = $option
if ($propInfo.Attribute) {
$propInfo.Attribute
@@ -798,15 +1021,39 @@ Function Invoke-AnsibleADObject {
$module.Result.object_guid = $null
$adParams = @{}
+ $serverCredentials = @{}
+ foreach ($domainCred in $module.Params.domain_credentials) {
+ $cred = New-Object -TypeName System.Management.Automation.PSCredential -ArgumentList @(
+ $domainCred.username,
+ (ConvertTo-SecureString -AsPlainText -Force -String $domainCred.password)
+ )
+
+ if ($domainCred.name) {
+ $serverCredentials[$domainCred.name] = $cred
+ }
+ elseif ($adParams.Credential) {
+ $module.FailJson("Cannot specify default domain_credentials with domain_username and domain_password")
+ }
+ else {
+ $adParams.Credential = $cred
+ }
+ }
+ $module | Add-Member -MemberType NoteProperty -Name ServerCredentials -Value $serverCredentials
+
if ($module.Params.domain_server) {
$adParams.Server = $module.Params.domain_server
}
if ($module.Params.domain_username) {
+ if ($adParams.Credential) {
+ $msg = "Cannot specify domain_username/domain_password and domain_credentials with an entry that has no name."
+ $module.FailJson($msg)
+ }
$adParams.Credential = New-Object -TypeName System.Management.Automation.PSCredential -ArgumentList @(
$module.Params.domain_username,
(ConvertTo-SecureString -AsPlainText -Force -String $module.Params.domain_password)
)
+ $module | Add-Member -MemberType NoteProperty -Name DefaultCredentialSet -Value $true
}
$defaultObjectPath = & $DefaultPath $module $adParams
@@ -922,8 +1169,7 @@ Function Invoke-AnsibleADObject {
$objectPath = $null
if ($module.Params.path -and $module.Params.path -ne $defaultPathSentinel) {
- $objectPath = $path
- $newParams.Path = $module.Params.path
+ $newParams.Path = $objectPath = $module.Params.path
}
else {
$objectPath = $defaultObjectPath
@@ -953,11 +1199,45 @@ Function Invoke-AnsibleADObject {
$null = & $propInfo.New $module $adParams $newParams
}
elseif ($propInfo.Attribute) {
- if ($propValue -is [System.Collections.IDictionary]) {
- $propValue = @($propValue['add']; $propValue['set']) | Select-Object -Unique
+ # If a dictionary (add/set/remove) and is not a DNLookup single value
+ if ($propValue -is [System.Collections.IDictionary] -and $propInfo.Option.type -ne 'raw') {
+ $propValue = if ($propInfo.DNLookup) {
+ foreach ($actionKvp in $propValue.GetEnumerator()) {
+ if ($null -eq $actionKvp.Value -or $actionKvp.Key -in @('lookup_failure_action', 'remove')) {
+ continue
+ }
+
+ $convertParams = @{
+ Module = $module
+ Context = "$($propInfo.Name).$($actionKvp.Key)"
+ FailureAction = $propValue.lookup_failure_action
+ }
+ $actionKvp.Value | ConvertTo-AnsibleADDistinguishedName @adParams @convertParams
+ }
+ }
+ else {
+ $propValue['add']
+ $propValue['set']
+ }
+
+ $propValue = $propValue | Select-Object -Unique
+ }
+ elseif ($propInfo.DNLookup) {
+ $propValue = $propValue | ConvertTo-AnsibleADDistinguishedName @adParams -Module $module -Context $propInfo.Name
}
- $newParams[$propInfo.Attribute] = $propValue
+ if ($propInfo.IsRawAttribute) {
+ if (-not $newParams.ContainsKey('OtherAttributes')) {
+ $newParams.OtherAttributes = @{}
+ }
+
+ # The AD cmdlets don't like explicitly casted arrays, use
+ # ForEach-Object to get back a vanilla object[] to set.
+ $newParams.OtherAttributes[$propInfo.Attribute] = $propValue | ForEach-Object { "$_" }
+ }
+ else {
+ $newParams[$propInfo.Attribute] = $propValue
+ }
if ($propInfo.Option.no_log) {
$propValue = 'VALUE_SPECIFIED_IN_NO_LOG_PARAMETER'
@@ -1043,17 +1323,36 @@ Function Invoke-AnsibleADObject {
$compareParams = @{
Existing = $actualValue
- CaseInsensitive = $propInfo.CaseInsensitive
+ CaseInsensitive = $propInfo.DNLookup -or $propInfo.CaseInsensitive
}
- if ($propValue -is [System.Collections.IDictionary]) {
- $compareParams.Add = $propValue['add']
- $compareParams.Remove = $propValue['remove']
- $compareParams.Set = $propValue['set']
+ # If a dictionary (add/set/remove) and is not a DNLookup single value
+ if ($propValue -is [System.Collections.IDictionary] -and $propInfo.Option.type -ne 'raw') {
+ if ($propInfo.DNLookup) {
+ foreach ($actionKvp in $propValue.GetEnumerator()) {
+ if ($null -eq $actionKvp.Value -or $actionKvp.Key -eq 'lookup_failure_action') { continue }
+
+ $convertParams = @{
+ Module = $module
+ Context = "$($propInfo.Name).$($actionKvp.Key)"
+ FailureAction = $propValue.lookup_failure_action
+ }
+ $dns = $actionKvp.Value | ConvertTo-AnsibleADDistinguishedName @adParams @convertParams
+ $compareParams[$actionKvp.Key] = @($dns)
+ }
+ }
+ else {
+ $compareParams.Add = $propValue['add']
+ $compareParams.Remove = $propValue['remove']
+ $compareParams.Set = $propValue['set']
+ }
}
elseif ([string]::IsNullOrWhiteSpace($propValue)) {
$compareParams.Set = @()
}
+ elseif ($propInfo.DNLookup) {
+ $compareParams.Set = @($propValue | ConvertTo-AnsibleADDistinguishedName @adParams -Module $module -Context $propInfo.Name)
+ }
else {
$compareParams.Set = @($propValue)
}
@@ -1061,7 +1360,23 @@ Function Invoke-AnsibleADObject {
$res = Compare-AnsibleADIdempotentList @compareParams
$newValue = $res.Value
if ($res.Changed) {
- $setParams[$propInfo.Attribute] = $newValue
+ if ($propInfo.IsRawAttribute) {
+ if ($newValue) {
+ if (-not $setParams.ContainsKey('Replace')) {
+ $setParams['Replace'] = @{}
+ }
+ $setParams['Replace'][$propInfo.Attribute] = $newValue
+ }
+ else {
+ if (-not $setParams.ContainsKey('Clear')) {
+ $setParams['Clear'] = [System.Collections.Generic.List[string]]@()
+ }
+ $setParams['Clear'].Add($propInfo.Attribute)
+ }
+ }
+ else {
+ $setParams[$propInfo.Attribute] = $newValue
+ }
}
$noLog = $propInfo.Option.no_log
@@ -1169,6 +1484,7 @@ Function Invoke-AnsibleADObject {
$exportMembers = @{
Function = @(
"Compare-AnsibleADIdempotentList"
+ "ConvertTo-AnsibleADDistinguishedName"
"Get-AnsibleADObject"
"Invoke-AnsibleADObject"
)
diff --git a/ansible_collections/microsoft/ad/plugins/modules/computer.ps1 b/ansible_collections/microsoft/ad/plugins/modules/computer.ps1
index b97bb1062..9010c103d 100644
--- a/ansible_collections/microsoft/ad/plugins/modules/computer.ps1
+++ b/ansible_collections/microsoft/ad/plugins/modules/computer.ps1
@@ -12,15 +12,10 @@ $setParams = @{
Name = 'delegates'
Option = @{
aliases = 'principals_allowed_to_delegate'
- type = 'dict'
- options = @{
- add = @{ type = 'list'; elements = 'str' }
- remove = @{ type = 'list'; elements = 'str' }
- set = @{ type = 'list'; elements = 'str' }
- }
+ type = 'add_remove_set'
}
Attribute = 'PrincipalsAllowedToDelegateToAccount'
- CaseInsensitive = $true
+ DNLookup = $true
}
[PSCustomObject]@{
Name = 'dns_hostname'
@@ -35,24 +30,8 @@ $setParams = @{
[PSCustomObject]@{
Name = 'kerberos_encryption_types'
Option = @{
- type = 'dict'
- options = @{
- add = @{
- choices = 'aes128', 'aes256', 'des', 'rc4'
- type = 'list'
- elements = 'str'
- }
- remove = @{
- choices = 'aes128', 'aes256', 'des', 'rc4'
- type = 'list'
- elements = 'str'
- }
- set = @{
- choices = 'aes128', 'aes256', 'des', 'rc4'
- type = 'list'
- elements = 'str'
- }
- }
+ type = 'add_remove_set'
+ choices = 'aes128', 'aes256', 'des', 'rc4'
}
Attribute = 'KerberosEncryptionType'
CaseInsensitive = $true
@@ -107,8 +86,9 @@ $setParams = @{
}
[PSCustomObject]@{
Name = 'managed_by'
- Option = @{ type = 'str' }
+ Option = @{ type = 'raw' }
Attribute = 'ManagedBy'
+ DNLookup = $true
}
[PSCustomObject]@{
Name = 'sam_account_name'
@@ -119,45 +99,11 @@ $setParams = @{
Name = 'spn'
Option = @{
aliases = 'spns'
- type = 'dict'
- options = @{
- add = @{ type = 'list'; elements = 'str' }
- remove = @{ type = 'list'; elements = 'str' }
- set = @{ type = 'list'; elements = 'str' }
- }
- }
- Attribute = 'ServicePrincipalNames'
- New = {
- param($Module, $ADParams, $NewParams)
-
- $spns = @(
- $Module.Params.spn.add
- $Module.Params.spn.set
- ) | Select-Object -Unique
-
- $NewParams.ServicePrincipalNames = $spns
- $Module.Diff.after.spn = $spns
- }
- Set = {
- param($Module, $ADParams, $SetParams, $ADObject)
-
- $desired = $Module.Params.spn
- $compareParams = @{
- Existing = $ADObject.ServicePrincipalNames
- CaseInsensitive = $true
- }
- $res = Compare-AnsibleADIdempotentList @compareParams @desired
- if ($res.Changed) {
- $SetParams.ServicePrincipalNames = @{}
- if ($res.ToAdd) {
- $SetParams.ServicePrincipalNames.Add = $res.ToAdd
- }
- if ($res.ToRemove) {
- $SetParams.ServicePrincipalNames.Remove = $res.ToRemove
- }
- }
- $module.Diff.after.kerberos_encryption_types = @($res.Value | Sort-Object)
+ type = 'add_remove_set'
}
+ Attribute = 'servicePrincipalName'
+ CaseInsensitive = $true
+ IsRawAttribute = $true
}
[PSCustomObject]@{
Name = 'trusted_for_delegation'
diff --git a/ansible_collections/microsoft/ad/plugins/modules/computer.py b/ansible_collections/microsoft/ad/plugins/modules/computer.py
index ab336d6b4..cf160256a 100644
--- a/ansible_collections/microsoft/ad/plugins/modules/computer.py
+++ b/ansible_collections/microsoft/ad/plugins/modules/computer.py
@@ -15,14 +15,19 @@ options:
description:
- The principal objects that the current AD object can trust for
delegation to either add, remove or set.
- - The values for each sub option must be specified as a distinguished name
- C(CN=shenetworks,CN=Users,DC=ansible,DC=test)
+ - Each subkey value is a list of values in the form of a
+ C(distinguishedName), C(objectGUID), C(objectSid), C(sAMAccountName),
+ or C(userPrincipalName) string or a dictionary with the I(name) and
+ optional I(server) key.
- This is the value set on the C(msDS-AllowedToActOnBehalfOfOtherIdentity)
LDAP attribute.
- This is a highly sensitive attribute as it allows the principals
specified to impersonate any account when authenticating with the AD
computer object being managed.
- To clear all principals, use I(set) with an empty list.
+ - See
+ R(DN Lookup Attributes,ansible_collections.microsoft.ad.docsite.guide_attributes.dn_lookup_attributes)
+ for more information on how DN lookups work.
- See R(Setting list option values,ansible_collections.microsoft.ad.docsite.guide_list_values)
for more information on how to add/remove/set list options.
aliases:
@@ -31,29 +36,35 @@ options:
suboptions:
add:
description:
- - The AD objects by their C(DistinguishedName) to add as a principal
- allowed to delegate.
+ - Adds the principals specified as principals allowed to delegate to.
- Any existing principals not specified by I(add) will be untouched
unless specified by I(remove) or not in I(set).
type: list
- elements: str
+ elements: raw
+ lookup_failure_action:
+ description:
+ - Control the action to take when the lookup fails to find the DN.
+ - C(fail) will cause the task to fail.
+ - C(ignore) will ignore the value and continue.
+ - C(warn) will ignore the value and display a warning.
+ choices: ['fail', 'ignore', 'warn']
+ default: fail
+ type: str
remove:
description:
- - The AD objects by their C(DistinguishedName) to remove as a principal
- allowed to delegate.
+ - Removes the principals specified as principals allowed to delegate to.
- Any existing pricipals not specified by I(remove) will be untouched
unless I(set) is defined.
type: list
- elements: str
+ elements: raw
set:
description:
- - The AD objects by their C(DistinguishedName) to set as the only
- principals allowed to delegate.
+ - Sets the principals specified as principals allowed to delegate to.
- This will remove any existing principals if not specified in this
list.
- Specify an empty list to remove all principals allowed to delegate.
type: list
- elements: str
+ elements: raw
dns_hostname:
description:
- Specifies the fully qualified domain name (FQDN) of the computer.
@@ -124,9 +135,13 @@ options:
description:
- The user or group that manages the object.
- The value can be in the form of a C(distinguishedName), C(objectGUID),
- C(objectSid), or sAMAccountName).
+ C(objectSid), C(sAMAccountName), or C(userPrincipalName) string or a
+ dictionary with the I(name) and optional I(server) key.
- This is the value set on the C(managedBy) LDAP attribute.
- type: str
+ - See
+ R(DN Lookup Attributes,ansible_collections.microsoft.ad.docsite.guide_attributes.dn_lookup_attributes)
+ for more information on how DN lookups work.
+ type: raw
sam_account_name:
description:
- The C(sAMAccountName) value to set for the group.
@@ -220,7 +235,7 @@ EXAMPLES = r"""
dns_hostname: one_linux_server.my_org.local
path: OU=servers,DC=my_org,DC=local
description: Example of linux server
- enabled: yes
+ enabled: true
state: present
- name: Remove linux computer from Active Directory using a windows machine
@@ -233,26 +248,26 @@ EXAMPLES = r"""
identity: TheComputer
spn:
add:
- - HOST/TheComputer
- - HOST/TheComputer.domain.test
- - HOST/TheComputer.domain.test:1234
+ - HOST/TheComputer
+ - HOST/TheComputer.domain.test
+ - HOST/TheComputer.domain.test:1234
- name: Remove SPNs on the computer
microsoft.ad.computer:
identity: TheComputer
spn:
remove:
- - HOST/TheComputer
- - HOST/TheComputer.domain.test
- - HOST/TheComputer.domain.test:1234
+ - HOST/TheComputer
+ - HOST/TheComputer.domain.test
+ - HOST/TheComputer.domain.test:1234
- name: Set the principals the computer trusts for delegation from
microsoft.ad.computer:
identity: TheComputer
delegates:
set:
- - CN=FileShare,OU=Computers,DC=domain,DC=test
- - CN=DC,OU=Domain Controllers,DC=domain,DC=test
+ - CN=FileShare,OU=Computers,DC=domain,DC=test
+ - OtherServer$ # Lookup by sAMAaccountName
"""
RETURN = r"""
diff --git a/ansible_collections/microsoft/ad/plugins/modules/domain.py b/ansible_collections/microsoft/ad/plugins/modules/domain.py
index 15578f7fd..0d9359242 100644
--- a/ansible_collections/microsoft/ad/plugins/modules/domain.py
+++ b/ansible_collections/microsoft/ad/plugins/modules/domain.py
@@ -99,6 +99,7 @@ attributes:
bypass_host_loop:
support: none
seealso:
+- module: microsoft.ad.domain_child
- module: microsoft.ad.domain_controller
- module: microsoft.ad.group
- module: microsoft.ad.membership
diff --git a/ansible_collections/microsoft/ad/plugins/modules/domain_child.ps1 b/ansible_collections/microsoft/ad/plugins/modules/domain_child.ps1
new file mode 100644
index 000000000..85fe3053d
--- /dev/null
+++ b/ansible_collections/microsoft/ad/plugins/modules/domain_child.ps1
@@ -0,0 +1,242 @@
+#!powershell
+
+# Copyright (c) 2024 Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+#AnsibleRequires -CSharpUtil Ansible.Basic
+
+$spec = @{
+ options = @{
+ create_dns_delegation = @{
+ type = 'bool'
+ }
+ database_path = @{
+ type = 'path'
+ }
+ dns_domain_name = @{
+ type = 'str'
+ }
+ domain_admin_password = @{
+ type = 'str'
+ required = $true
+ no_log = $true
+ }
+ domain_admin_user = @{
+ type = 'str'
+ required = $true
+ }
+ domain_mode = @{
+ type = 'str'
+ }
+ domain_type = @{
+ choices = 'child', 'tree'
+ default = 'child'
+ type = 'str'
+ }
+ install_dns = @{
+ type = 'bool'
+ }
+ log_path = @{
+ type = 'path'
+ }
+ parent_domain_name = @{
+ type = 'str'
+ }
+ reboot = @{
+ default = $false
+ type = 'bool'
+ }
+ safe_mode_password = @{
+ type = 'str'
+ required = $true
+ no_log = $true
+ }
+ site_name = @{
+ type = 'str'
+ }
+ sysvol_path = @{
+ type = 'path'
+ }
+ }
+ required_if = @(
+ , @('domain_type', 'tree', @('parent_domain_name'))
+ )
+ required_together = @(
+ , @("domain_admin_user", "domain_admin_password")
+ )
+ supports_check_mode = $true
+}
+$module = [Ansible.Basic.AnsibleModule]::Create($args, $spec)
+
+$module.Result.reboot_required = $false
+$module.Result._do_action_reboot = $false # Used by action plugin
+
+$createDnsDelegation = $module.Params.create_dns_delegation
+$databasePath = $module.Params.database_path
+$dnsDomainName = $module.Params.dns_domain_name
+$domainMode = $module.Params.domain_mode
+$domainType = $module.Params.domain_type
+$installDns = $module.Params.install_dns
+$logPath = $module.Params.log_path
+$parentDomainName = $module.Params.parent_domain_name
+$safeModePassword = $module.Params.safe_mode_password
+$siteName = $module.Params.site_name
+$sysvolPath = $module.Params.sysvol_path
+
+$domainCredential = New-Object -TypeName System.Management.Automation.PSCredential -ArgumentList @(
+ $module.Params.domain_admin_user,
+ (ConvertTo-SecureString -AsPlainText -Force -String $module.Params.domain_admin_password)
+)
+
+if ($domainType -eq 'child' -and $parentDomainName) {
+ $module.FailJson("parent_domain_name must not be set when domain_type=child")
+}
+
+$requiredFeatures = @("AD-Domain-Services", "RSAT-ADDS")
+$features = Get-WindowsFeature -Name $requiredFeatures
+$unavailableFeatures = Compare-Object -ReferenceObject $requiredFeatures -DifferenceObject $features.Name -PassThru
+
+if ($unavailableFeatures) {
+ $module.FailJson("The following features required for a domain child are unavailable: $($unavailableFeatures -join ',')")
+}
+
+$missingFeatures = $features | Where-Object InstallState -NE Installed
+if ($missingFeatures) {
+ $res = Install-WindowsFeature -Name $missingFeatures -WhatIf:$module.CheckMode
+ $module.Result.changed = $true
+ $module.Result.reboot_required = [bool]$res.RestartNeeded
+
+ # When in check mode and the prereq was "installed" we need to exit early as
+ # the AD cmdlets weren't really installed
+ if ($module.CheckMode) {
+ $module.ExitJson()
+ }
+}
+
+# Check that we got a valid domain_mode
+$validDomainModes = [Enum]::GetNames((Get-Command -Name Install-ADDSDomain).Parameters.DomainMode.ParameterType)
+if (($null -ne $domainMode) -and -not ($domainMode -in $validDomainModes)) {
+ $validModes = $validDomainModes -join ", "
+ $module.FailJson("The parameter 'domain_mode' does not accept '$domainMode', please use one of: $validModes")
+}
+
+$systemRole = Get-CimInstance -ClassName Win32_ComputerSystem -Property Domain, DomainRole
+if ($systemRole.DomainRole -in @(4, 5)) {
+ if ($systemRole.Domain -ne $dnsDomainName) {
+ $module.FailJson("Host is already a domain controller in another domain $($systemRole.Domain)")
+ }
+ $module.ExitJson()
+}
+
+$installParams = @{
+ Confirm = $false
+ Credential = $domainCredential
+ Force = $true
+ NoRebootOnCompletion = $true
+ SafeModeAdministratorPassword = (ConvertTo-SecureString $safeModePassword -AsPlainText -Force)
+ SkipPreChecks = $true
+ WhatIf = $module.CheckMode
+}
+
+if ($domainType -eq 'child') {
+ $newDomainName, $parentDomainName = $dnsDomainName.Split([char[]]".", 2)
+ $installParams.DomainType = 'ChildDomain'
+ $installParams.NewDomainName = $newDomainName
+ $installParams.ParentDomainName = $parentDomainName
+}
+else {
+ $installParams.DomainType = 'TreeDomain'
+ $installParams.NewDomainName = $dnsDomainName
+ $installParams.ParentDomainName = $parentDomainName
+}
+
+if ($null -ne $createDnsDelegation) {
+ $installParams.CreateDnsDelegation = $createDnsDelegation
+}
+if ($databasePath) {
+ $installParams.DatabasePath = $databasePath
+}
+if ($domainMode) {
+ $installParams.DomainMode = $domainMode
+}
+if ($null -ne $installDns) {
+ $installParams.InstallDns = $installDns
+}
+if ($logPath) {
+ $installParams.LogPath = $logPath
+}
+if ($siteName) {
+ $installParams.SiteName = $siteName
+}
+if ($sysvolPath) {
+ $installParams.SysvolPath = $sysvolPath
+}
+
+try {
+ $null = Install-ADDSDomain @installParams
+}
+catch [Microsoft.DirectoryServices.Deployment.DCPromoExecutionException] {
+ # ExitCode 15 == 'Role change is in progress or this computer needs to be restarted.'
+ # DCPromo exit codes details can be found at
+ # https://docs.microsoft.com/en-us/windows-server/identity/ad-ds/deploy/troubleshooting-domain-controller-deployment
+ if ($_.Exception.ExitCode -in @(15, 19)) {
+ $module.Result.reboot_required = $true
+ $module.Result._do_action_reboot = $true
+ }
+
+ $module.FailJson("Failed to install ADDSDomain, DCPromo exited with $($_.Exception.ExitCode)", $_)
+}
+finally {
+ # The Netlogon service is set to auto start but is not started. This is
+ # required for Ansible to connect back to the host and reboot in a
+ # later task. Even if this fails Ansible can still connect but only
+ # with ansible_winrm_transport=basic so we just display a warning if
+ # this fails.
+ if (-not $module.CheckMode) {
+ try {
+ Start-Service -Name Netlogon
+ }
+ catch {
+ $msg = -join @(
+ "Failed to start the Netlogon service after promoting the host, "
+ "Ansible may be unable to connect until the host is manually rebooted: $($_.Exception.Message)"
+ )
+ $module.Warn($msg)
+ }
+ }
+}
+
+$module.Result.changed = $true
+$module.Result.reboot_required = $true
+
+if ($module.Result.reboot_required -and $module.Params.reboot -and -not $module.CheckMode) {
+ # Promoting or depromoting puts the server in a very funky state and it may
+ # not be possible for Ansible to connect back without a reboot is done. If
+ # the user requested the action plugin to perform the reboot then start it
+ # here and get the action plugin to continue where this left off.
+
+ $lastBootTime = (Get-CimInstance -ClassName Win32_OperatingSystem -Property LastBootUpTime).LastBootUpTime.ToFileTime()
+ $module.Result._previous_boot_time = $lastBootTime
+
+ $shutdownRegPath = 'HKLM:\SOFTWARE\Microsoft\Windows NT\CurrentVersion\Winlogon\AutoLogonChecked'
+ Remove-Item -LiteralPath $shutdownRegPath -Force -ErrorAction SilentlyContinue
+
+ $comment = 'Reboot initiated by Ansible'
+ $stdout = $null
+ $stderr = . { shutdown.exe /r /t 10 /c $comment | Set-Variable stdout } 2>&1 | ForEach-Object ToString
+ if ($LASTEXITCODE -eq 1190) {
+ # A reboot was already scheduled, abort it and try again
+ shutdown.exe /a
+ $stdout = $null
+ $stderr = . { shutdown.exe /r /t 10 /c $comment | Set-Variable stdout } 2>&1 | ForEach-Object ToString
+ }
+
+ if ($LASTEXITCODE) {
+ $module.Result.rc = $LASTEXITCODE
+ $module.Result.stdout = $stdout
+ $module.Result.stderr = $stderr
+ $module.FailJson("Failed to initiate reboot, see rc, stdout, stderr for more information")
+ }
+}
+
+$module.ExitJson()
diff --git a/ansible_collections/microsoft/ad/plugins/modules/domain_child.yml b/ansible_collections/microsoft/ad/plugins/modules/domain_child.yml
new file mode 100644
index 000000000..0f3308098
--- /dev/null
+++ b/ansible_collections/microsoft/ad/plugins/modules/domain_child.yml
@@ -0,0 +1,184 @@
+# Copyright (c) 2024 Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+DOCUMENTATION:
+ module: domain_child
+ short_description: Manage domain children in an existing Active Directory forest.
+ description:
+ - Ensure that a Windows Server host is configured as a domain controller as
+ a new domain in an existing forest.
+ - This module may require subsequent use of the
+ M(ansible.windows.win_reboot) action if changes are made.
+ - This module will only check if the domain specified by I(dns_domain_name)
+ exists or not. If the domain already exists under the same name, no other
+ options, other than the domain name will be checked during the run.
+ options:
+ create_dns_delegation:
+ description:
+ - Whether to create a DNS delegation that references the new DNS
+ server that was installed.
+ - Valid for Active Directory-integrated DNS only.
+ - The default is computed automatically based on the environment.
+ type: bool
+ database_path:
+ description:
+ - The path to a directory on a fixed disk of the Windows host where the
+ domain database will be created..
+ - If not set then the default path is C(%SYSTEMROOT%\NTDS).
+ type: path
+ dns_domain_name:
+ description:
+ - The full DNS name of the domain to create.
+ - When I(domain_type=child), the parent DNS domain name is derived
+ from this value.
+ type: str
+ domain_admin_password:
+ description:
+ - Password for the specified I(domain_admin_user).
+ type: str
+ required: true
+ domain_admin_user:
+ description:
+ - Username of a domain admin for the parent domain.
+ type: str
+ required: true
+ domain_mode:
+ description:
+ - Specifies the domain functional level of child/tree.
+ - The domain functional level cannot be lower than the forest
+ functional level, but it can be higher.
+ - The default is automatically computed and set.
+ - Current known modes are C(Win2003), C(Win2008), C(Win2008R2),
+ C(Win2012), C(Win2012R2), or C(WinThreshold).
+ type: str
+ domain_type:
+ description:
+ - Specifies the type of domain to create.
+ - Set to C(child) to create a child of an existing domain as specified
+ by I(dns_domain_name).
+ - Set to C(tree) to create a new domain tree in an existing forest as
+ specified by I(parent_domain_name). The I(dns_domain_name) must be
+ the full domain name of the new domain tree to create.
+ choices:
+ - child
+ - tree
+ default: child
+ type: str
+ install_dns:
+ description:
+ - Whether to install the DNS service when creating the domain
+ controller.
+ - If not specified then the C(-InstallDns) option is not supplied to
+ the C(Install-ADDSDomain) command, see
+ L(Install-ADDSDomain,https://learn.microsoft.com/en-us/powershell/module/addsdeployment/install-addsdomain#-installdns)
+ for more information.
+ type: bool
+ log_path:
+ description:
+ - Specified the fully qualified, non-UNC path to a directory on a fixed
+ disk of the local computer that will contain the domain log files.
+ type: path
+ parent_domain_name:
+ description:
+ - The fully qualified domain name of an existing parent domain to
+ create a new domain tree in.
+ - This can only be set when I(domain_type=tree).
+ type: str
+ reboot:
+ description:
+ - If C(true), this will reboot the host if a reboot was create the
+ domain.
+ - If C(false), this will not reboot the host if a reboot was required
+ and instead sets the I(reboot_required) return value to C(true).
+ - Multiple reboots may occur if the host required a reboot before the
+ domain promotion.
+ - This cannot be used with async mode.
+ type: bool
+ default: false
+ safe_mode_password:
+ description:
+ - Safe mode password for the domain controller.
+ required: true
+ type: str
+ site_name:
+ description:
+ - Specifies the name of an existing site where you can place the new
+ domain controller.
+ type: str
+ sysvol_path:
+ description:
+ - The path to a directory on a fixed disk of the Windows host where the
+ Sysvol folder will be created.
+ - If not set then the default path is C(%SYSTEMROOT%\SYSVOL).
+ type: path
+ notes:
+ - It is highly recommended to set I(reboot=true) to have Ansible manage the
+ host reboot phase as the actions done by this module puts the host in a
+ state where it may not be possible for Ansible to reconnect in a
+ subsequent task without a reboot.
+ - This module must be run on a Windows target host.
+ extends_documentation_fragment:
+ - ansible.builtin.action_common_attributes
+ - ansible.builtin.action_common_attributes.flow
+ attributes:
+ check_mode:
+ support: full
+ diff_mode:
+ support: none
+ platform:
+ platforms:
+ - windows
+ action:
+ support: full
+ async:
+ support: partial
+ details: Supported for all scenarios except with I(reboot=True).
+ bypass_host_loop:
+ support: none
+ seealso:
+ - module: microsoft.ad.domain
+ - module: microsoft.ad.domain_controller
+ author:
+ - Jordan Borean (@jborean93)
+
+EXAMPLES: |
+ - name: Create a child domain foo.example.com with parent example.com
+ microsoft.ad.domain_child:
+ dns_domain_name: foo.example.com
+ domain_admin_user: testguy@example.com
+ domain_admin_password: password123!
+ safe_mode_password: password123!
+ reboot: true
+
+ - name: Create a domain tree foo.example.com with parent bar.example.com
+ microsoft.ad.domain_child:
+ dns_domain_name: foo.example.com
+ parent_domain_name: bar.example.com
+ domain_type: tree
+ domain_admin_user: testguy@bar.example.com
+ domain_admin_password: password123!
+ local_admin_password: password123!
+ reboot: true
+
+ # This scenario is not recommended, use reboot: true when possible
+ - name: Promote server with custom paths with manual reboot task
+ microsoft.ad.domain_child:
+ dns_domain_name: foo.ansible.vagrant
+ domain_admin_user: testguy@ansible.vagrant
+ domain_admin_password: password123!
+ safe_mode_password: password123!
+ sysvol_path: D:\SYSVOL
+ database_path: D:\NTDS
+ log_path: D:\NTDS
+ register: dc_promotion
+
+ - name: Reboot after promotion
+ microsoft.ad.win_reboot:
+ when: dc_promotion.reboot_required
+
+RETURNS:
+ reboot_required:
+ description: True if changes were made that require a reboot.
+ returned: always
+ type: bool
+ sample: true
diff --git a/ansible_collections/microsoft/ad/plugins/modules/domain_controller.py b/ansible_collections/microsoft/ad/plugins/modules/domain_controller.py
index df4641741..69971243b 100644
--- a/ansible_collections/microsoft/ad/plugins/modules/domain_controller.py
+++ b/ansible_collections/microsoft/ad/plugins/modules/domain_controller.py
@@ -114,6 +114,7 @@ attributes:
seealso:
- module: microsoft.ad.computer
- module: microsoft.ad.domain
+- module: microsoft.ad.domain_child
- module: microsoft.ad.group
- module: microsoft.ad.membership
- module: microsoft.ad.user
@@ -150,7 +151,7 @@ EXAMPLES = r"""
domain_admin_password: password123!
safe_mode_password: password123!
state: domain_controller
- read_only: yes
+ read_only: true
site_name: London
reboot: true
@@ -168,7 +169,7 @@ EXAMPLES = r"""
register: dc_promotion
- name: Reboot after promotion
- microsoft.ad.win_reboot:
+ ansible.windows.win_reboot:
when: dc_promotion.reboot_required
"""
diff --git a/ansible_collections/microsoft/ad/plugins/modules/group.ps1 b/ansible_collections/microsoft/ad/plugins/modules/group.ps1
index bbb3aa8d7..ed4a52164 100644
--- a/ansible_collections/microsoft/ad/plugins/modules/group.ps1
+++ b/ansible_collections/microsoft/ad/plugins/modules/group.ps1
@@ -26,141 +26,14 @@ $setParams = @{
Name = 'managed_by'
Option = @{ type = 'str' }
Attribute = 'ManagedBy'
+ DNLookup = $true
}
[PSCustomObject]@{
Name = 'members'
- Option = @{
- type = 'dict'
- options = @{
- add = @{
- type = 'list'
- elements = 'str'
- }
- remove = @{
- type = 'list'
- elements = 'str'
- }
- set = @{
- type = 'list'
- elements = 'str'
- }
- }
- }
+ Option = @{ type = 'add_remove_set' }
Attribute = 'member'
- New = {
- param($Module, $ADParams, $NewParams)
-
- $newMembers = @(
- foreach ($actionKvp in $Module.Params.members.GetEnumerator()) {
- if ($null -eq $actionKvp.Value -or $actionKvp.Key -eq 'remove') { continue }
-
- $invalidMembers = [System.Collections.Generic.List[string]]@()
-
- foreach ($m in $actionKvp.Value) {
- $obj = Get-AnsibleADObject -Identity $m @ADParams |
- Select-Object -ExpandProperty DistinguishedName
- if ($obj) {
- $obj
- }
- else {
- $invalidMembers.Add($m)
- }
- }
-
- if ($invalidMembers) {
- $module.FailJson("Failed to find the following ad objects for group members: '$($invalidMembers -join "', '")'")
- }
- }
- )
-
- if ($newMembers) {
- if (-not $NewParams.ContainsKey('OtherAttributes')) {
- $NewParams.OtherAttributes = @{}
- }
- # The AD cmdlets don't like explicitly casted arrays, use
- # ForEach-Object to get back a vanilla object[] to set.
- $NewParams.OtherAttributes.member = $newMembers | ForEach-Object { "$_" }
- }
- $Module.Diff.after.members = @($newMembers | Sort-Object)
- }
- Set = {
- param($Module, $ADParams, $SetParams, $ADObject)
-
- [string[]]$existingMembers = $ADObject.member
-
- $desiredState = @{}
- foreach ($actionKvp in $Module.Params.members.GetEnumerator()) {
- if ($null -eq $actionKvp.Value) { continue }
-
- $invalidMembers = [System.Collections.Generic.List[string]]@()
-
- $dns = foreach ($m in $actionKvp.Value) {
- $obj = Get-AnsibleADObject -Identity $m @ADParams |
- Select-Object -ExpandProperty DistinguishedName
- if ($obj) {
- $obj
- }
- else {
- $invalidMembers.Add($m)
- }
- }
-
- if ($invalidMembers) {
- $module.FailJson("Failed to find the following ad objects for group members: '$($invalidMembers -join "', '")'")
- }
-
- $desiredState[$actionKvp.Key] = @($dns)
- }
-
- $ignoreCase = [System.StringComparer]::OrdinalIgnoreCase
- [string[]]$diffAfter = @()
- if ($desiredState.ContainsKey('set')) {
- [string[]]$desiredMembers = $desiredState.set
- $diffAfter = $desiredMembers
-
- $toAdd = [string[]][System.Linq.Enumerable]::Except($desiredMembers, $existingMembers, $ignoreCase)
- $toRemove = [string[]][System.Linq.Enumerable]::Except($existingMembers, $desiredMembers, $ignoreCase)
-
- if ($toAdd -or $toRemove) {
- if (-not $SetParams.ContainsKey('Replace')) {
- $SetParams.Replace = @{}
- }
- $SetParams.Replace.member = $desiredMembers
- }
- }
- else {
- [string[]]$toAdd = @()
- [string[]]$toRemove = @()
- $diffAfter = $existingMembers
-
- if ($desiredState.ContainsKey('add') -and $desiredState.add) {
- [string[]]$desiredMembers = $desiredState.add
- $toAdd = [string[]][System.Linq.Enumerable]::Except($desiredMembers, $existingMembers, $ignoreCase)
- $diffAfter = [System.Linq.Enumerable]::Union($desiredMembers, $diffAfter, $ignoreCase)
- }
- if ($desiredState.ContainsKey('remove') -and $desiredState.remove) {
-
- [string[]]$desiredMembers = $desiredState.remove
- $toRemove = [string[]][System.Linq.Enumerable]::Intersect($desiredMembers, $existingMembers, $ignoreCase)
- $diffAfter = [System.Linq.Enumerable]::Except($diffAfter, $desiredMembers, $ignoreCase)
- }
-
- if ($toAdd) {
- if (-not $SetParams.ContainsKey('Add')) {
- $SetParams.Add = @{}
- }
- $SetParams.Add.member = $toAdd
- }
- if ($toRemove) {
- if (-not $SetParams.ContainsKey('Remove')) {
- $SetParams.Remove = @{}
- }
- $SetParams.Remove.member = $toRemove
- }
- }
-
- $Module.Diff.after.members = ($diffAfter | Sort-Object)
- }
+ DNLookup = $true
+ IsRawAttribute = $true
}
[PSCustomObject]@{
Name = 'sam_account_name'
diff --git a/ansible_collections/microsoft/ad/plugins/modules/group.py b/ansible_collections/microsoft/ad/plugins/modules/group.py
index 9fb28e819..df2c70440 100644
--- a/ansible_collections/microsoft/ad/plugins/modules/group.py
+++ b/ansible_collections/microsoft/ad/plugins/modules/group.py
@@ -32,19 +32,29 @@ options:
description:
- The user or group that manages the group.
- The value can be in the form of a C(distinguishedName), C(objectGUID),
- C(objectSid), or C(sAMAccountName).
+ C(objectSid), C(sAMAccountName), or C(userPrincipalName) string or a
+ dictionary with the I(name) and optional I(server) key.
- This is the value set on the C(managedBy) LDAP attribute.
- type: str
+ - See
+ R(DN Lookup Attributes,ansible_collections.microsoft.ad.docsite.guide_attributes.dn_lookup_attributes)
+ for more information on how DN lookups work.
+ type: raw
members:
description:
- The members of the group to set.
- The value is a dictionary that contains 3 keys, I(add), I(remove), and
I(set).
- - Each subkey is set to a list of AD principal objects to add, remove or
- set as the members of this AD group respectively. A principal can be in
- the form of a C(distinguishedName), C(objectGUID), C(objectSid), or
- C(sAMAccountName).
- - The module will fail if it cannot find any of the members referenced.
+ - Each subkey value is a list of values in the form of a
+ C(distinguishedName), C(objectGUID), C(objectSid), C(sAMAccountName),
+ or C(userPrincipalName) string or a dictionary with the I(name) and
+ optional I(server) key.
+ - The value for each subkey can either be specified as a string or a
+ dictionary with the I(name) and optional I(server) key. The I(name) is
+ the identity to lookup and I(server) is an optional key to override what
+ AD server to lookup the identity on.
+ - See
+ R(DN Lookup Attributes,ansible_collections.microsoft.ad.docsite.guide_attributes.dn_lookup_attributes)
+ for more information.
type: dict
suboptions:
add:
@@ -52,13 +62,22 @@ options:
- Adds the principals specified as members of the group, keeping the
existing membership if they are not specified.
type: list
- elements: str
+ elements: raw
+ lookup_failure_action:
+ description:
+ - Control the action to take when the lookup fails to find the DN.
+ - C(fail) will cause the task to fail.
+ - C(ignore) will ignore the value and continue.
+ - C(warn) will ignore the value and display a warning.
+ choices: ['fail', 'ignore', 'warn']
+ default: fail
+ type: str
remove:
description:
- Removes the principals specified as members of the group, keeping the
existing membership if they are not specified.
type: list
- elements: str
+ elements: raw
set:
description:
- Sets only the principals specified as members of the group.
@@ -66,7 +85,7 @@ options:
if not specified in this list.
- Set this to an empty list to remove all members from a group.
type: list
- elements: str
+ elements: raw
sam_account_name:
description:
- The C(sAMAccountName) value to set for the group.
@@ -179,8 +198,8 @@ EXAMPLES = r"""
scope: domainlocal
members:
add:
- - Domain Admins
- - Domain Users
+ - Domain Admins
+ - Domain Users
- name: Remove members from the group, preserving existing membership
microsoft.ad.group:
@@ -188,8 +207,8 @@ EXAMPLES = r"""
scope: domainlocal
members:
remove:
- - Domain Admins
- - Domain Users
+ - Domain Admins
+ - Domain Users
- name: Replace entire membership of group
microsoft.ad.group:
@@ -197,8 +216,14 @@ EXAMPLES = r"""
scope: domainlocal
members:
set:
- - Domain Admins
- - Domain Users
+ - Domain Admins
+ - Domain Users
+ - name: UserInOtherDomain
+ server: OtherDomain
+ domain_credentials:
+ - name: OtherDomain
+ username: OtherDomainUser
+ password: '{{ other_domain_password }}'
"""
RETURN = r"""
diff --git a/ansible_collections/microsoft/ad/plugins/modules/membership.ps1 b/ansible_collections/microsoft/ad/plugins/modules/membership.ps1
index d2be34e9f..963733a97 100644
--- a/ansible_collections/microsoft/ad/plugins/modules/membership.ps1
+++ b/ansible_collections/microsoft/ad/plugins/modules/membership.ps1
@@ -143,7 +143,7 @@ Function Get-CurrentState {
}
[PSCustomObject]@{
- HostName = $env:COMPUTERNAME
+ HostName = [System.Net.Dns]::GetHostName()
PartOfDomain = $cs.PartOfDomain
DnsDomainName = $domainName
WorkgroupName = $cs.Workgroup
diff --git a/ansible_collections/microsoft/ad/plugins/modules/object.py b/ansible_collections/microsoft/ad/plugins/modules/object.py
index c6396619a..6b305afa2 100644
--- a/ansible_collections/microsoft/ad/plugins/modules/object.py
+++ b/ansible_collections/microsoft/ad/plugins/modules/object.py
@@ -128,8 +128,8 @@ EXAMPLES = r"""
attributes:
add:
extensionName:
- - value 1
- - value 2
+ - value 1
+ - value 2
type: container
state: present
@@ -139,8 +139,8 @@ EXAMPLES = r"""
attributes:
remove:
extensionName:
- - value 1
- - value 3
+ - value 1
+ - value 3
type: container
state: present
"""
diff --git a/ansible_collections/microsoft/ad/plugins/modules/object_info.py b/ansible_collections/microsoft/ad/plugins/modules/object_info.py
index 0cdcf06a7..88460979b 100644
--- a/ansible_collections/microsoft/ad/plugins/modules/object_info.py
+++ b/ansible_collections/microsoft/ad/plugins/modules/object_info.py
@@ -130,13 +130,13 @@ EXAMPLES = r"""
microsoft.ad.object_info:
filter: ObjectClass -eq 'user' -and objectCategory -eq 'Person'
properties:
- - objectSid
+ - objectSid
- name: Get the SID for all user accounts as a LDAP filter
microsoft.ad.object_info:
ldap_filter: (&(objectClass=user)(objectCategory=Person))
properties:
- - objectSid
+ - objectSid
- name: Search all computer accounts in a specific path that were added after February 1st
microsoft.ad.object_info:
diff --git a/ansible_collections/microsoft/ad/plugins/modules/ou.ps1 b/ansible_collections/microsoft/ad/plugins/modules/ou.ps1
index 6af68b5ae..909b13cd9 100644
--- a/ansible_collections/microsoft/ad/plugins/modules/ou.ps1
+++ b/ansible_collections/microsoft/ad/plugins/modules/ou.ps1
@@ -22,6 +22,7 @@ $setParams = @{
Name = 'managed_by'
Option = @{ type = 'str' }
Attribute = 'ManagedBy'
+ DNLookup = $true
}
[PSCustomObject]@{
Name = 'postal_code'
diff --git a/ansible_collections/microsoft/ad/plugins/modules/ou.py b/ansible_collections/microsoft/ad/plugins/modules/ou.py
index 5d1d60503..1e31cc890 100644
--- a/ansible_collections/microsoft/ad/plugins/modules/ou.py
+++ b/ansible_collections/microsoft/ad/plugins/modules/ou.py
@@ -26,9 +26,13 @@ options:
description:
- The user or group that manages the object.
- The value can be in the form of a C(distinguishedName), C(objectGUID),
- C(objectSid), or sAMAccountName).
+ C(objectSid), C(sAMAccountName), or C(userPrincipalName) string or a
+ dictionary with the I(name) and optional I(server) key.
- This is the value set on the C(managedBy) LDAP attribute.
- type: str
+ - See
+ R(DN Lookup Attributes,ansible_collections.microsoft.ad.docsite.guide_attributes.dn_lookup_attributes)
+ for more information on how DN lookups work.
+ type: raw
postal_code:
description:
- Configures the user's postal code / zip code.
@@ -116,6 +120,13 @@ EXAMPLES = r"""
attributes:
set:
comment: A comment for the OU
+
+- name: Set managedBy using an identity from another DC
+ microsoft.ad.ou:
+ name: MyOU
+ managed_by:
+ name: manager-user
+ server: OtherDC
"""
RETURN = r"""
diff --git a/ansible_collections/microsoft/ad/plugins/modules/user.ps1 b/ansible_collections/microsoft/ad/plugins/modules/user.ps1
index 267c77627..8eef49635 100644
--- a/ansible_collections/microsoft/ad/plugins/modules/user.ps1
+++ b/ansible_collections/microsoft/ad/plugins/modules/user.ps1
@@ -66,6 +66,9 @@ $setParams = @{
type = 'bool'
}
Attribute = 'LockedOut'
+ # We cannot lock a user and creating a user that is unlocked
+ # requires no action.
+ New = {}
Set = {
param($Module, $ADParams, $SetParams, $ADObject)
@@ -100,15 +103,10 @@ $setParams = @{
Name = 'delegates'
Option = @{
aliases = 'principals_allowed_to_delegate'
- type = 'dict'
- options = @{
- add = @{ type = 'list'; elements = 'str' }
- remove = @{ type = 'list'; elements = 'str' }
- set = @{ type = 'list'; elements = 'str' }
- }
+ type = 'add_remove_set'
}
Attribute = 'PrincipalsAllowedToDelegateToAccount'
- CaseInsensitive = $true
+ DNLookup = $true
}
[PSCustomObject]@{
@@ -134,10 +132,11 @@ $setParams = @{
Option = @{
type = 'dict'
options = @{
- add = @{ type = 'list'; elements = 'str' }
- remove = @{ type = 'list'; elements = 'str' }
- set = @{ type = 'list'; elements = 'str' }
- missing_behaviour = @{
+ add = @{ type = 'list'; elements = 'raw' }
+ remove = @{ type = 'list'; elements = 'raw' }
+ set = @{ type = 'list'; elements = 'raw' }
+ lookup_failure_action = @{
+ aliases = @('missing_behaviour')
choices = 'fail', 'ignore', 'warn'
default = 'fail'
type = 'str'
@@ -367,27 +366,19 @@ $setParams = @{
return
}
- $groupMissingBehaviour = $Module.Params.groups.missing_behaviour
- $lookupGroup = {
- try {
- (Get-ADGroup -Identity $args[0] @ADParams).DistinguishedName
- }
- catch {
- if ($groupMissingBehaviour -eq "fail") {
- $module.FailJson("Failed to locate group $($args[0]): $($_.Exception.Message)", $_)
- }
- elseif ($groupMissingBehaviour -eq "warn") {
- $module.Warn("Failed to locate group $($args[0]) but continuing on: $($_.Exception.Message)")
- }
- }
- }
-
[string[]]$existingGroups = @(
# In check mode the ADObject won't be given
if ($ADObject) {
try {
- Get-ADPrincipalGroupMembership -Identity $ADObject.ObjectGUID @ADParams -ErrorAction Stop |
- Select-Object -ExpandProperty DistinguishedName
+ # Get-ADPrincipalGroupMembership doesn't work well with
+ # cross domain membership. It also gets the primary group
+ # so this code reflects that using Get-ADUser instead.
+ $userMembership = Get-ADUser -Identity $ADObject.ObjectGUID @ADParams -Properties @(
+ 'MemberOf',
+ 'PrimaryGroup'
+ ) -ErrorAction Stop
+ $userMembership.memberOf
+ $userMembership.PrimaryGroup
}
catch {
$module.Warn("Failed to enumerate user groups but continuing on: $($_.Exception.Message)")
@@ -403,14 +394,42 @@ $setParams = @{
CaseInsensitive = $true
Existing = $existingGroups
}
- 'add', 'remove', 'set' | ForEach-Object -Process {
- if ($null -ne $Module.Params.groups[$_]) {
- $compareParams[$_] = @(
- foreach ($group in $Module.Params.groups[$_]) {
- & $lookupGroup $group
+ $dnServerParams = @{}
+ foreach ($actionKvp in $Module.Params.groups.GetEnumerator()) {
+ if ($null -eq $actionKvp.Value -or $actionKvp.Key -in @('lookup_failure_action', 'missing_behaviour')) {
+ continue
+ }
+
+ $convertParams = @{
+ Module = $Module
+ Context = "groups.$($actionKvp.Key)"
+ FailureAction = $Module.Params.groups.lookup_failure_action
+ }
+ $dns = foreach ($lookupId in $actionKvp.Value) {
+ $dn = $lookupId | ConvertTo-AnsibleADDistinguishedName @ADParams @convertParams
+ if (-not $dn) {
+ continue # Warning was written
+ }
+
+ # As membership is done on the group server, we need to store
+ # correct server and credentials that was used for the lookup.
+ if ($lookupId -is [System.Collections.IDictionary] -and $lookupId.server) {
+ $dnServerParams[$dn] = @{
+ Server = $lookupId.server
+ }
+
+ if ($Module.ServerCredentials.ContainsKey($lookupId.server)) {
+ $dnServerParams[$dn].Credential = $Module.ServerCredentials[$lookupId.server]
}
- )
+ }
+ else {
+ $dnServerParams[$dn] = $ADParams
+ }
+
+ $dn
}
+
+ $compareParams[$actionKvp.Key] = @($dns)
}
$res = Compare-AnsibleADIdempotentList @compareParams
@@ -422,15 +441,32 @@ $setParams = @{
WhatIf = $Module.CheckMode
}
foreach ($member in $res.ToAdd) {
+ $lookupParams = if ($dnServerParams.ContainsKey($member)) {
+ $dnServerParams[$member]
+ }
+ else {
+ $ADParams
+ }
if ($ADObject) {
- Add-ADGroupMember -Identity $member -Members $ADObject.ObjectGUID @ADParams @commonParams
+ Set-ADObject -Identity $member -Add @{
+ member = $ADObject.DistinguishedName
+ } @lookupParams @commonParams
+
}
$Module.Result.changed = $true
}
foreach ($member in $res.ToRemove) {
+ $lookupParams = if ($dnServerParams.ContainsKey($member)) {
+ $dnServerParams[$member]
+ }
+ else {
+ $ADParams
+ }
if ($ADObject) {
try {
- Remove-ADGroupMember -Identity $member -Members $ADObject.ObjectGUID @ADParams @commonParams
+ Set-ADObject -Identity $member -Remove @{
+ member = $ADObject.DistinguishedName
+ } @lookupParams @commonParams
}
catch [Microsoft.ActiveDirectory.Management.ADException] {
if ($_.Exception.ErrorCode -eq 0x0000055E) {
diff --git a/ansible_collections/microsoft/ad/plugins/modules/user.py b/ansible_collections/microsoft/ad/plugins/modules/user.py
index a3e7d1ecb..81a48b41d 100644
--- a/ansible_collections/microsoft/ad/plugins/modules/user.py
+++ b/ansible_collections/microsoft/ad/plugins/modules/user.py
@@ -40,14 +40,19 @@ options:
description:
- The principal objects that the current AD object can trust for
delegation to either add, remove or set.
- - The values for each sub option must be specified as a distinguished name
- C(CN=shenetworks,CN=Users,DC=ansible,DC=test)
+ - Each subkey value is a list of values in the form of a
+ C(distinguishedName), C(objectGUID), C(objectSid), C(sAMAccountName),
+ or C(userPrincipalName) string or a dictionary with the I(name) and
+ optional I(server) key.
- This is the value set on the C(msDS-AllowedToActOnBehalfOfOtherIdentity)
LDAP attribute.
- This is a highly sensitive attribute as it allows the principals
specified to impersonate any account when authenticating with the AD
computer object being managed.
- To clear all principals, use I(set) with an empty list.
+ - See
+ R(DN Lookup Attributes,ansible_collections.microsoft.ad.docsite.guide_attributes.dn_lookup_attributes)
+ for more information on how DN lookups work.
- See R(Setting list option values,ansible_collections.microsoft.ad.docsite.guide_list_values)
for more information on how to add/remove/set list options.
aliases:
@@ -56,29 +61,36 @@ options:
suboptions:
add:
description:
- - The AD objects by their C(DistinguishedName) to add as a principal
- allowed to delegate.
+ - Adds the principals specified as principals allowed to delegate to.
- Any existing principals not specified by I(add) will be untouched
unless specified by I(remove) or not in I(set).
type: list
- elements: str
+ elements: raw
+ lookup_failure_action:
+ description:
+ - Control the action to take when the lookup fails to find the DN.
+ - C(fail) will cause the task to fail.
+ - C(ignore) will ignore the value and continue.
+ - C(warn) will ignore the value and display a warning.
+ choices: ['fail', 'ignore', 'warn']
+ default: fail
+ type: str
remove:
description:
- - The AD objects by their C(DistinguishedName) to remove as a principal
- allowed to delegate.
+ - Removes the principals specified as principals allowed to delegate to.
- Any existing principals not specified by I(remove) will be untouched
unless I(set) is defined.
type: list
- elements: str
+ elements: raw
set:
description:
- - The AD objects by their C(DistinguishedName) to set as the only
+ - Sets the principals specified as principals allowed to delegate to.
principals allowed to delegate.
- This will remove any existing principals if not specified in this
list.
- Specify an empty list to remove all principals allowed to delegate.
type: list
- elements: str
+ elements: raw
email:
description:
- Configures the user's email address.
@@ -104,10 +116,20 @@ options:
- To clear all group memberships, use I(set) with an empty list.
- Note that users cannot be removed from their principal group (for
example, "Domain Users"). Attempting to do so will display a warning.
+ - Adding and removing a user from a group is done on the group AD object.
+ If the group is an object in a different domain, then it may require
+ explicit I(server) and I(domain_credentials) for it to work.
- Each subkey is set to a list of groups objects to add, remove or
set as the membership of this AD user respectively. A group can be in
the form of a C(distinguishedName), C(objectGUID), C(objectSid), or
C(sAMAccountName).
+ - Each subkey value is a list of group objects in the form of a
+ C(distinguishedName), C(objectGUID), C(objectSid), C(sAMAccountName),
+ or C(userPrincipalName) string or a dictionary with the I(name) and
+ optional I(server) key.
+ - See
+ R(DN Lookup Attributes,ansible_collections.microsoft.ad.docsite.guide_attributes.dn_lookup_attributes)
+ for more information on how DN lookups work.
- See R(Setting list option values,ansible_collections.microsoft.ad.docsite.guide_list_values)
for more information on how to add/remove/set list options.
type: dict
@@ -116,20 +138,20 @@ options:
description:
- The groups to add the user to.
type: list
- elements: str
+ elements: raw
remove:
description:
- The groups to remove the user from.
type: list
- elements: str
+ elements: raw
set:
description:
- The only groups the user is a member of.
- This will clear out any existing groups if not in the specified list.
- Set to an empty list to clear all group membership of the user.
type: list
- elements: str
- missing_behaviour:
+ elements: raw
+ lookup_failure_action:
description:
- Controls what happens when a group specified by C(groups) is an
invalid group name.
@@ -138,6 +160,8 @@ options:
- C(ignore) will ignore any groups that does not exist.
- C(warn) will display a warning for any groups that do not exist but
will continue without failing.
+ aliases:
+ - missing_behaviour
choices:
- fail
- ignore
@@ -287,7 +311,7 @@ EXAMPLES = r"""
state: present
groups:
set:
- - Domain Admins
+ - Domain Admins
street: 123 4th St.
city: Sometown
state_province: IN
@@ -316,8 +340,8 @@ EXAMPLES = r"""
path: ou=test,dc=domain,dc=local
groups:
set:
- - Domain Admins
- - Domain Users
+ - Domain Admins
+ - Domain Users
- name: Ensure user bob is absent
microsoft.ad.user:
@@ -329,15 +353,15 @@ EXAMPLES = r"""
identity: liz.kenyon
spn:
set:
- - MSSQLSvc/us99db-svr95:1433
- - MSSQLSvc/us99db-svr95.vmware.com:1433
+ - MSSQLSvc/us99db-svr95:1433
+ - MSSQLSvc/us99db-svr95.vmware.com:1433
- name: Ensure user has spn added
microsoft.ad.user:
identity: liz.kenyon
spn:
add:
- - MSSQLSvc/us99db-svr95:2433
+ - MSSQLSvc/us99db-svr95:2433
- name: Ensure user is created with delegates and spn's defined
microsoft.ad.user:
@@ -346,17 +370,17 @@ EXAMPLES = r"""
state: present
groups:
set:
- - Domain Admins
- - Domain Users
- - Enterprise Admins
+ - Domain Admins
+ - Domain Users
+ - Enterprise Admins
delegates:
set:
- - CN=shenetworks,CN=Users,DC=ansible,DC=test
- - CN=mk.ai,CN=Users,DC=ansible,DC=test
- - CN=jessiedotjs,CN=Users,DC=ansible,DC=test
+ - CN=shenetworks,CN=Users,DC=ansible,DC=test
+ - CN=mk.ai,CN=Users,DC=ansible,DC=test
+ - CN=jessiedotjs,CN=Users,DC=ansible,DC=test
spn:
set:
- - MSSQLSvc/us99db-svr95:2433
+ - MSSQLSvc/us99db-svr95:2433
# The name option is the name of the AD object as seen in dsa.msc and not the
# sAMAccountName. For example, this will change the sAMAccountName of the user
diff --git a/ansible_collections/microsoft/ad/plugins/plugin_utils/_module_with_reboot.py b/ansible_collections/microsoft/ad/plugins/plugin_utils/_module_with_reboot.py
index ebc46ead6..95e23465d 100644
--- a/ansible_collections/microsoft/ad/plugins/plugin_utils/_module_with_reboot.py
+++ b/ansible_collections/microsoft/ad/plugins/plugin_utils/_module_with_reboot.py
@@ -156,7 +156,7 @@ class ActionModuleWithReboot(ActionBase):
if self._ad_should_rerun(module_res) and not self._task.check_mode:
display.vv(
- "Module result has indicated it should rerun after a reboot has occured, rerunning"
+ "Module result has indicated it should rerun after a reboot has occurred, rerunning"
)
continue
@@ -169,3 +169,38 @@ class ActionModuleWithReboot(ActionBase):
result = merge_hash(result, module_res)
return self._ad_process_result(result)
+
+
+class DomainPromotionWithReboot(ActionModuleWithReboot):
+ """Domain Promotion Action Plugin with Auto Reboot.
+
+ An action plugin that runs a task that can promote the target Windows host
+ to a domain controller. It implements the common reboot handling for that
+ particular task.
+ """
+
+ def __init__(self, *args: t.Any, **kwargs: t.Any) -> None:
+ super().__init__(*args, **kwargs)
+ self._ran_once = False
+
+ def _ad_should_rerun(self, result: t.Dict[str, t.Any]) -> bool:
+ ran_once = self._ran_once
+ self._ran_once = True
+
+ if ran_once or not result.get("_do_action_reboot", False):
+ return False
+
+ if self._task.check_mode:
+ # Assume that on a rerun it will not have failed and that it
+ # ran successful.
+ result["failed"] = False
+ result.pop("msg", None)
+ return False
+
+ else:
+ return True
+
+ def _ad_process_result(self, result: t.Dict[str, t.Any]) -> t.Dict[str, t.Any]:
+ result.pop("_do_action_reboot", None)
+
+ return result
diff --git a/ansible_collections/microsoft/ad/tests/integration/targets/computer/tasks/tests.yml b/ansible_collections/microsoft/ad/tests/integration/targets/computer/tasks/tests.yml
index 2a403c3d5..3619df40b 100644
--- a/ansible_collections/microsoft/ad/tests/integration/targets/computer/tasks/tests.yml
+++ b/ansible_collections/microsoft/ad/tests/integration/targets/computer/tasks/tests.yml
@@ -99,14 +99,41 @@
that:
- not remove_comp_again is changed
+- name: expect failure with invalid DN lookup entry - no name
+ computer:
+ name: MyComputer
+ state: present
+ delegates:
+ set:
+ - CN=krbtgt,CN=Users,{{ setup_domain_info.output[0].defaultNamingContext }}
+ - server: fail
+ register: invalid_dn_lookup_no_name
+ failed_when: >-
+ invalid_dn_lookup_no_name.msg != "Failed to find the AD object DNs for delegates.set. Identity entry does not contain the required name key."
+
+- name: expect failure with invalid DN lookup entry - extra keys
+ computer:
+ name: MyComputer
+ state: present
+ delegates:
+ add:
+ - name: name
+ invalid2: bar
+ invalid1: foo
+ register: invalid_dn_lookup_extra_keys
+ failed_when: >-
+ invalid_dn_lookup_extra_keys.msg != "Failed to find the AD object DNs for delegates.add. Identity entry for 'name' contains extra keys: 'invalid1', 'invalid2'."
+
- name: create computer with custom options
computer:
name: MyComputer
state: present
delegates:
+ lookup_failure_action: ignore
set:
- CN=krbtgt,CN=Users,{{ setup_domain_info.output[0].defaultNamingContext }}
- - CN=Administrator,CN=Users,{{ setup_domain_info.output[0].defaultNamingContext }}
+ - name: CN=Administrator,CN=Users,{{ setup_domain_info.output[0].defaultNamingContext }}
+ - CN=Missing,{{ setup_domain_info.output[0].defaultNamingContext }}
kerberos_encryption_types:
set:
- aes128
@@ -188,8 +215,11 @@
name: MyComputer
path: CN=Users,{{ setup_domain_info.output[0].defaultNamingContext }}
delegates:
+ lookup_failure_action: warn
set:
- - CN=KRBTGT,CN=Users,{{ setup_domain_info.output[0].defaultNamingContext }}
+ - name: CN=KRBTGT,CN=Users,{{ setup_domain_info.output[0].defaultNamingContext }}
+ - ''
+ - CN=Missing,{{ setup_domain_info.output[0].defaultNamingContext }}
dns_hostname: other.domain.com
kerberos_encryption_types:
set:
@@ -236,6 +266,9 @@
assert:
that:
- change_comp is changed
+ - change_comp.warnings | length == 1
+ - >-
+ change_comp.warnings[0] == "Failed to find the AD object DNs for delegates.set. Ignoring invalid identities: 'CN=Missing," ~ setup_domain_info.output[0].defaultNamingContext ~ "'"
- change_comp_actual.objects[0].dnsHostName == 'other.domain.com'
- change_comp_actual.objects[0].location == 'comp location'
- change_comp_actual.objects[0]['msDS-SupportedEncryptionTypes'] == 20
@@ -247,6 +280,17 @@
- '"ADS_UF_TRUSTED_FOR_DELEGATION" not in change_comp_actual.objects[0].userAccountControl_AnsibleFlags'
- change_comp_delegates.output == ["krbtgt"]
+- name: fail with invalid delegate identity
+ computer:
+ name: MyComputer
+ path: CN=Users,{{ setup_domain_info.output[0].defaultNamingContext }}
+ delegates:
+ set:
+ - CN=Missing,{{ setup_domain_info.output[0].defaultNamingContext }}
+ register: invalid_delegate
+ failed_when: >-
+ invalid_delegate.msg != "Failed to find the AD object DNs for delegates.set. Invalid identities: 'CN=Missing," ~ setup_domain_info.output[0].defaultNamingContext ~ "'"
+
- name: add and remove list options
computer:
name: MyComputer
@@ -254,9 +298,10 @@
delegates:
add:
- CN=Administrator,CN=Users,{{ setup_domain_info.output[0].defaultNamingContext }}
+ - ''
remove:
+ - name: ''
- CN=KRBTGT,CN=Users,{{ setup_domain_info.output[0].defaultNamingContext }}
- - CN=Missing,{{ setup_domain_info.output[0].defaultNamingContext }}
kerberos_encryption_types:
add:
- aes128
@@ -305,7 +350,6 @@
- CN=Administrator,CN=Users,{{ setup_domain_info.output[0].defaultNamingContext }}
remove:
- CN=KRBTGT,CN=Users,{{ setup_domain_info.output[0].defaultNamingContext }}
- - CN=Missing,{{ setup_domain_info.output[0].defaultNamingContext }}
kerberos_encryption_types:
add:
- aes128
diff --git a/ansible_collections/microsoft/ad/tests/integration/targets/domain_child/README.md b/ansible_collections/microsoft/ad/tests/integration/targets/domain_child/README.md
new file mode 100644
index 000000000..f7bc08ff1
--- /dev/null
+++ b/ansible_collections/microsoft/ad/tests/integration/targets/domain_child/README.md
@@ -0,0 +1,36 @@
+# microsoft.ad.domain_child tests
+
+As this cannot be run in CI this is a brief guide on how to run these tests locally.
+Run the following:
+
+```bash
+vagrant up
+
+ansible-playbook setup.yml
+```
+
+It is a good idea to create a snapshot of both hosts before running the tests.
+This allows you to reset the host back to a blank starting state if the tests need to be rerun.
+To create a snapshot do the following:
+
+```bash
+virsh snapshot-create-as --domain "domain_child_PARENT" --name "pretest"
+virsh snapshot-create-as --domain "domain_child_CHILD" --name "pretest"
+virsh snapshot-create-as --domain "domain_child_TREE" --name "pretest"
+```
+
+To restore these snapshots run the following:
+
+```bash
+virsh snapshot-revert --domain "domain_child_PARENT" --snapshotname "pretest" --running
+virsh snapshot-revert --domain "domain_child_CHILD" --snapshotname "pretest" --running
+virsh snapshot-revert --domain "domain_child_TREE" --snapshotname "pretest" --running
+```
+
+Once you are ready to run the tests run the following:
+
+```bash
+ansible-playbook test.yml
+```
+
+Run `vagrant destroy` to remove the test VMs.
diff --git a/ansible_collections/microsoft/ad/tests/integration/targets/domain_child/Vagrantfile b/ansible_collections/microsoft/ad/tests/integration/targets/domain_child/Vagrantfile
new file mode 100644
index 000000000..13af40311
--- /dev/null
+++ b/ansible_collections/microsoft/ad/tests/integration/targets/domain_child/Vagrantfile
@@ -0,0 +1,27 @@
+# -*- mode: ruby -*-
+# vi: set ft=ruby :
+
+require 'yaml'
+
+inventory = YAML.load_file('inventory.yml')
+
+Vagrant.configure("2") do |config|
+ inventory['all']['children'].each do |group,details|
+ details['hosts'].each do |server,host_details|
+ config.vm.define server do |srv|
+ srv.vm.box = host_details['vagrant_box']
+ srv.vm.hostname = server
+ srv.vm.network :private_network,
+ :ip => host_details['ansible_host'],
+ :libvirt__network_name => 'microsoft.ad',
+ :libvirt__domain_name => inventory['all']['vars']['domain_realm']
+
+ srv.vm.provider :libvirt do |l|
+ l.memory = 8192
+ l.cpus = 4
+ end
+ end
+ end
+ end
+end
+
diff --git a/ansible_collections/microsoft/ad/tests/integration/targets/domain_child/aliases b/ansible_collections/microsoft/ad/tests/integration/targets/domain_child/aliases
new file mode 100644
index 000000000..435ff207d
--- /dev/null
+++ b/ansible_collections/microsoft/ad/tests/integration/targets/domain_child/aliases
@@ -0,0 +1,2 @@
+windows
+unsupported # can never run in CI, see README.md
diff --git a/ansible_collections/microsoft/ad/tests/integration/targets/domain_child/ansible.cfg b/ansible_collections/microsoft/ad/tests/integration/targets/domain_child/ansible.cfg
new file mode 100644
index 000000000..cfedec78f
--- /dev/null
+++ b/ansible_collections/microsoft/ad/tests/integration/targets/domain_child/ansible.cfg
@@ -0,0 +1,4 @@
+[defaults]
+callback_result_format = yaml
+inventory = inventory.yml
+retry_files_enabled = False
diff --git a/ansible_collections/microsoft/ad/tests/integration/targets/domain_child/inventory.yml b/ansible_collections/microsoft/ad/tests/integration/targets/domain_child/inventory.yml
new file mode 100644
index 000000000..e57f75516
--- /dev/null
+++ b/ansible_collections/microsoft/ad/tests/integration/targets/domain_child/inventory.yml
@@ -0,0 +1,28 @@
+all:
+ children:
+ windows:
+ hosts:
+ PARENT:
+ ansible_host: 192.168.11.10
+ vagrant_box: jborean93/WindowsServer2022
+ CHILD:
+ ansible_host: 192.168.11.11
+ vagrant_box: jborean93/WindowsServer2022
+ new_hostname: foo
+ child_domain_name: child.ad.test
+ TREE:
+ ansible_host: 192.168.11.12
+ vagrant_box: jborean93/WindowsServer2022
+ new_hostname: bar
+ child_domain_name: tree.test
+ vars:
+ ansible_port: 5985
+ ansible_connection: psrp
+
+ vars:
+ ansible_user: vagrant
+ ansible_password: vagrant
+ domain_username: vagrant-domain
+ domain_user_upn: '{{ domain_username }}@{{ domain_realm | upper }}'
+ domain_password: VagrantPass1
+ domain_realm: ad.test
diff --git a/ansible_collections/microsoft/ad/tests/integration/targets/domain_child/setup.yml b/ansible_collections/microsoft/ad/tests/integration/targets/domain_child/setup.yml
new file mode 100644
index 000000000..de0843829
--- /dev/null
+++ b/ansible_collections/microsoft/ad/tests/integration/targets/domain_child/setup.yml
@@ -0,0 +1,71 @@
+- name: setup common Windows information
+ hosts: windows
+ gather_facts: no
+
+ tasks:
+ - name: get network connection names
+ ansible.windows.win_powershell:
+ parameters:
+ IPAddress: '{{ ansible_host }}'
+ script: |
+ param ($IPAddress)
+
+ $Ansible.Changed = $false
+
+ Get-CimInstance -ClassName Win32_NetworkAdapter -Filter "Netenabled='True'" |
+ ForEach-Object -Process {
+ $config = Get-CimInstance -ClassName Win32_NetworkAdapterConfiguration -Filter "Index='$($_.Index)'"
+ if ($config.IPAddress -contains $IPAddress) {
+ $_.NetConnectionID
+ }
+ }
+ register: connection_name
+
+- name: create parent forest
+ hosts: PARENT
+ gather_facts: no
+
+ tasks:
+ - name: set the DNS for the internal adapters to localhost
+ ansible.windows.win_dns_client:
+ adapter_names:
+ - '{{ connection_name.output[0] }}'
+ dns_servers:
+ - 127.0.0.1
+
+ - name: ensure domain exists and DC is promoted as a domain controller
+ microsoft.ad.domain:
+ dns_domain_name: '{{ domain_realm }}'
+ safe_mode_password: '{{ domain_password }}'
+ reboot: true
+
+ - name: create parent domain username
+ microsoft.ad.user:
+ name: '{{ domain_username }}'
+ upn: '{{ domain_user_upn }}'
+ description: '{{ domain_username }} Domain Account'
+ password: '{{ domain_password }}'
+ password_never_expires: yes
+ update_password: when_changed
+ groups:
+ add:
+ - Domain Admins
+ - Enterprise Admins
+ state: present
+
+- name: setup test host
+ hosts: CHILD,TREE
+ gather_facts: no
+
+ tasks:
+ - name: set DNS for the private adapter to point to the parent forest DC
+ ansible.windows.win_dns_client:
+ adapter_names:
+ - '{{ connection_name.output[0] }}'
+ dns_servers:
+ - '{{ hostvars["PARENT"]["ansible_host"] }}'
+
+ - name: install RSAT tools for debugging purposes
+ ansible.windows.win_feature:
+ name: RSAT-AD-PowerShell
+ state: present
diff --git a/ansible_collections/microsoft/ad/tests/integration/targets/domain_child/tasks/cross_domain.yml b/ansible_collections/microsoft/ad/tests/integration/targets/domain_child/tasks/cross_domain.yml
new file mode 100644
index 000000000..611beedac
--- /dev/null
+++ b/ansible_collections/microsoft/ad/tests/integration/targets/domain_child/tasks/cross_domain.yml
@@ -0,0 +1,596 @@
+- name: create test object in parent domain with domain_username creds - check mode
+ microsoft.ad.user:
+ name: ParentUser1
+ path: '{{ parent_ou }}'
+ state: present
+ password: '{{ domain_password }}'
+ update_password: when_changed
+ domain_server: '{{ domain_realm }}'
+ domain_username: '{{ domain_user_upn }}'
+ domain_password: '{{ domain_password }}'
+ register: user_with_creds1_check
+ check_mode: true
+ delegate_to: CHILD
+
+- name: get result of create test object in parent domain with domain_username creds - check mode
+ microsoft.ad.object_info:
+ identity: CN=ParentUser1,{{ parent_ou }}
+ register: user_with_creds1_check_actual
+ delegate_to: PARENT
+
+- name: assert create test object in parent domain with domain_username creds - check mode
+ assert:
+ that:
+ - user_with_creds1_check is changed
+ - user_with_creds1_check.distinguished_name == "CN=ParentUser1," ~ parent_ou
+ - user_with_creds1_check_actual.objects == []
+
+- name: create test object in parent domain with domain_username creds
+ microsoft.ad.user:
+ name: ParentUser1
+ path: '{{ parent_ou }}'
+ state: present
+ password: '{{ domain_password }}'
+ update_password: when_changed
+ domain_server: '{{ domain_realm }}'
+ domain_username: '{{ domain_user_upn }}'
+ domain_password: '{{ domain_password }}'
+ register: user_with_creds1
+ delegate_to: CHILD
+
+- name: get result of create test object in parent domain with domain_username creds
+ microsoft.ad.object_info:
+ identity: CN=ParentUser1,{{ parent_ou }}
+ register: user_with_creds1_actual
+ delegate_to: PARENT
+
+- name: assert create test object in parent domain with domain_username creds
+ assert:
+ that:
+ - user_with_creds1 is changed
+ - user_with_creds1.distinguished_name == "CN=ParentUser1," ~ parent_ou
+ - user_with_creds1_actual.objects | count == 1
+ - user_with_creds1_actual.objects[0].ObjectGUID == user_with_creds1.object_guid
+ - user_with_creds1_actual.objects[0].DistinguishedName == user_with_creds1.distinguished_name
+
+- name: create test object in parent domain with domain_username creds - idempotent
+ microsoft.ad.user:
+ name: ParentUser1
+ path: '{{ parent_ou }}'
+ state: present
+ password: '{{ domain_password }}'
+ update_password: when_changed
+ domain_server: '{{ domain_realm }}'
+ domain_username: '{{ domain_user_upn }}'
+ domain_password: '{{ domain_password }}'
+ register: user_with_creds1_again
+ delegate_to: CHILD
+
+- name: assert create test object in parent domain with domain_username creds - idempotent
+ assert:
+ that:
+ - not user_with_creds1_again is changed
+ - user_with_creds1_again.distinguished_name == user_with_creds1.distinguished_name
+ - user_with_creds1_again.object_guid == user_with_creds1.object_guid
+
+- name: create test object in parent domain with domain_credentials creds - check mode
+ microsoft.ad.user:
+ name: ParentUser2
+ path: '{{ parent_ou }}'
+ state: present
+ password: '{{ domain_password }}'
+ update_password: when_changed
+ domain_server: '{{ domain_realm }}'
+ domain_credentials:
+ - username: '{{ domain_user_upn }}'
+ password: '{{ domain_password }}'
+ register: user_with_creds2_check
+ check_mode: true
+ delegate_to: CHILD
+
+- name: get result of create test object in parent domain with domain_credentials creds - check mode
+ microsoft.ad.object_info:
+ identity: CN=ParentUser2,{{ parent_ou }}
+ register: user_with_creds2_check_actual
+ delegate_to: PARENT
+
+- name: assert create test object in parent domain with domain_credentials creds - check mode
+ assert:
+ that:
+ - user_with_creds2_check is changed
+ - user_with_creds2_check.distinguished_name == "CN=ParentUser2," ~ parent_ou
+ - user_with_creds2_check_actual.objects == []
+
+- name: create test object in parent domain with domain_credentials creds
+ microsoft.ad.user:
+ name: ParentUser2
+ path: '{{ parent_ou }}'
+ state: present
+ password: '{{ domain_password }}'
+ update_password: when_changed
+ domain_server: '{{ domain_realm }}'
+ domain_credentials:
+ - username: '{{ domain_user_upn }}'
+ password: '{{ domain_password }}'
+ register: user_with_creds2
+ delegate_to: CHILD
+
+- name: get result of create test object in parent domain with domain_credentials creds
+ microsoft.ad.object_info:
+ identity: CN=ParentUser2,{{ parent_ou }}
+ register: user_with_creds2_actual
+ delegate_to: PARENT
+
+- name: assert create test object in parent domain with domain_credentials creds
+ assert:
+ that:
+ - user_with_creds2 is changed
+ - user_with_creds2.distinguished_name == "CN=ParentUser2," ~ parent_ou
+ - user_with_creds2_actual.objects | count == 1
+ - user_with_creds2_actual.objects[0].ObjectGUID == user_with_creds2.object_guid
+ - user_with_creds2_actual.objects[0].DistinguishedName == user_with_creds2.distinguished_name
+
+- name: create test object in parent domain with domain_credentials creds - idempotent
+ microsoft.ad.user:
+ name: ParentUser2
+ path: '{{ parent_ou }}'
+ state: present
+ password: '{{ domain_password }}'
+ update_password: when_changed
+ domain_server: '{{ domain_realm }}'
+ domain_credentials:
+ - username: '{{ domain_user_upn }}'
+ password: '{{ domain_password }}'
+ register: user_with_creds2_again
+ delegate_to: CHILD
+
+- name: assert create test object in parent domain with domain_credentials creds - idempotent
+ assert:
+ that:
+ - not user_with_creds2_again is changed
+ - user_with_creds2_again.distinguished_name == user_with_creds2.distinguished_name
+ - user_with_creds2_again.object_guid == user_with_creds2.object_guid
+
+- name: edit user with domain_username creds - check mode
+ microsoft.ad.user:
+ name: ParentUser1
+ path: '{{ parent_ou }}'
+ state: present
+ password: '{{ domain_password }}'
+ update_password: when_changed
+ domain_server: '{{ domain_realm }}'
+ domain_username: '{{ domain_user_upn }}'
+ domain_password: '{{ domain_password }}'
+ description: User Description
+ spn:
+ set:
+ - HTTP/ParentUser1
+ attributes:
+ set:
+ comment: My comment
+ register: set_with_creds1_check
+ delegate_to: CHILD
+ check_mode: true
+
+- name: get result of set user with domain_username creds - check mode
+ microsoft.ad.object_info:
+ identity: '{{ user_with_creds1.object_guid }}'
+ properties:
+ - comment
+ - Description
+ - servicePrincipalName
+ register: set_with_creds1_check_actual
+ delegate_to: PARENT
+
+- name: assert set user with domain_username creds - check mode
+ assert:
+ that:
+ - set_with_creds1_check is changed
+ - set_with_creds1_check.distinguished_name == user_with_creds1.distinguished_name
+ - set_with_creds1_check.object_guid == user_with_creds1.object_guid
+ - set_with_creds1_check_actual.objects[0].Description == None
+ - set_with_creds1_check_actual.objects[0].DistinguishedName == user_with_creds1.distinguished_name
+ - set_with_creds1_check_actual.objects[0].Name == 'ParentUser1'
+ - set_with_creds1_check_actual.objects[0].ObjectGUID == user_with_creds1.object_guid
+ - set_with_creds1_check_actual.objects[0].comment == None
+ - set_with_creds1_check_actual.objects[0].servicePrincipalName == None
+
+- name: edit user with domain_username creds
+ microsoft.ad.user:
+ name: ParentUser1
+ path: '{{ parent_ou }}'
+ state: present
+ password: '{{ domain_password }}'
+ update_password: when_changed
+ domain_server: '{{ domain_realm }}'
+ domain_username: '{{ domain_user_upn }}'
+ domain_password: '{{ domain_password }}'
+ description: User Description
+ spn:
+ set:
+ - HTTP/ParentUser1
+ attributes:
+ set:
+ comment: My comment
+ register: set_with_creds1
+ delegate_to: CHILD
+
+- name: get result of set user with domain_username creds
+ microsoft.ad.object_info:
+ identity: '{{ user_with_creds1.object_guid }}'
+ properties:
+ - comment
+ - Description
+ - servicePrincipalName
+ register: set_with_creds1_actual
+ delegate_to: PARENT
+
+- name: assert set user with domain_username creds
+ assert:
+ that:
+ - set_with_creds1 is changed
+ - set_with_creds1.distinguished_name == user_with_creds1.distinguished_name
+ - set_with_creds1.object_guid == user_with_creds1.object_guid
+ - set_with_creds1_actual.objects[0].Description == "User Description"
+ - set_with_creds1_actual.objects[0].DistinguishedName == user_with_creds1.distinguished_name
+ - set_with_creds1_actual.objects[0].Name == 'ParentUser1'
+ - set_with_creds1_actual.objects[0].ObjectGUID == user_with_creds1.object_guid
+ - set_with_creds1_actual.objects[0].comment == "My comment"
+ - set_with_creds1_actual.objects[0].servicePrincipalName == "HTTP/ParentUser1"
+
+- name: edit user with domain_credentials creds - check mode
+ microsoft.ad.user:
+ name: ParentUser2
+ path: '{{ parent_ou }}'
+ state: present
+ password: '{{ domain_password }}'
+ update_password: when_changed
+ domain_server: '{{ domain_realm }}'
+ domain_credentials:
+ - username: '{{ domain_user_upn }}'
+ password: '{{ domain_password }}'
+ description: User Description
+ spn:
+ set:
+ - HTTP/ParentUser2
+ attributes:
+ set:
+ comment: My comment
+ register: set_with_creds2_check
+ delegate_to: CHILD
+ check_mode: true
+
+- name: get result of set user with domain_credentials creds - check mode
+ microsoft.ad.object_info:
+ identity: '{{ user_with_creds2.object_guid }}'
+ properties:
+ - comment
+ - Description
+ - servicePrincipalName
+ register: set_with_creds2_check_actual
+ delegate_to: PARENT
+
+- name: assert set user with domain_credentials creds - check mode
+ assert:
+ that:
+ - set_with_creds2_check is changed
+ - set_with_creds2_check.distinguished_name == user_with_creds2.distinguished_name
+ - set_with_creds2_check.object_guid == user_with_creds2.object_guid
+ - set_with_creds2_check_actual.objects[0].Description == None
+ - set_with_creds2_check_actual.objects[0].DistinguishedName == user_with_creds2.distinguished_name
+ - set_with_creds2_check_actual.objects[0].Name == 'ParentUser2'
+ - set_with_creds2_check_actual.objects[0].ObjectGUID == user_with_creds2.object_guid
+ - set_with_creds2_check_actual.objects[0].comment == None
+ - set_with_creds2_check_actual.objects[0].servicePrincipalName == None
+
+- name: edit user with domain_credentials creds
+ microsoft.ad.user:
+ name: ParentUser2
+ path: '{{ parent_ou }}'
+ state: present
+ password: '{{ domain_password }}'
+ update_password: when_changed
+ domain_server: '{{ domain_realm }}'
+ domain_credentials:
+ - username: '{{ domain_user_upn }}'
+ password: '{{ domain_password }}'
+ description: User Description
+ spn:
+ set:
+ - HTTP/ParentUser2
+ attributes:
+ set:
+ comment: My comment
+ register: set_with_creds2
+ delegate_to: CHILD
+
+- name: get result of set user with domain_credentials creds
+ microsoft.ad.object_info:
+ identity: '{{ user_with_creds2.object_guid }}'
+ properties:
+ - comment
+ - Description
+ - servicePrincipalName
+ register: set_with_creds2_actual
+ delegate_to: PARENT
+
+- name: assert set user with domain_credentials creds
+ assert:
+ that:
+ - set_with_creds2 is changed
+ - set_with_creds2.distinguished_name == user_with_creds2.distinguished_name
+ - set_with_creds2.object_guid == user_with_creds2.object_guid
+ - set_with_creds2_actual.objects[0].Description == "User Description"
+ - set_with_creds2_actual.objects[0].DistinguishedName == user_with_creds2.distinguished_name
+ - set_with_creds2_actual.objects[0].Name == 'ParentUser2'
+ - set_with_creds2_actual.objects[0].ObjectGUID == user_with_creds2.object_guid
+ - set_with_creds2_actual.objects[0].comment == "My comment"
+ - set_with_creds2_actual.objects[0].servicePrincipalName == "HTTP/ParentUser2"
+
+- name: set value with DN lookup and creds
+ microsoft.ad.group:
+ name: Group-CHILD
+ path: '{{ child_ou }}'
+ state: present
+ members:
+ add:
+ - User-CHILD
+ - name: User-PARENT
+ server: '{{ domain_realm }}'
+ domain_credentials:
+ - name: '{{ domain_realm }}'
+ username: '{{ domain_user_upn }}'
+ password: '{{ domain_password }}'
+ register: lookup_with_creds
+ delegate_to: CHILD
+
+- name: get result of set value with DN lookup and creds
+ microsoft.ad.object_info:
+ identity: '{{ lookup_with_creds.object_guid }}'
+ properties:
+ - member
+ register: lookup_with_creds_actual
+ delegate_to: CHILD
+
+- name: assert set value with DN lookup and creds
+ assert:
+ that:
+ - lookup_with_creds is changed
+ - parent_user in lookup_with_creds_actual.objects[0].member
+ - child_user in lookup_with_creds_actual.objects[0].member
+
+- name: set value with DN lookup and creds - idempotent
+ microsoft.ad.group:
+ name: Group-CHILD
+ path: '{{ child_ou }}'
+ state: present
+ members:
+ add:
+ - User-CHILD
+ - name: User-PARENT
+ server: '{{ domain_realm }}'
+ domain_credentials:
+ - name: '{{ domain_realm }}'
+ username: '{{ domain_user_upn }}'
+ password: '{{ domain_password }}'
+ register: lookup_with_creds_again
+ delegate_to: CHILD
+
+- name: assert set value with DN lookup and creds - idempotent
+ assert:
+ that:
+ - not lookup_with_creds_again is changed
+
+- name: create user group with DN lookup and creds - check mode
+ microsoft.ad.user:
+ name: ChildUser1
+ path: '{{ child_ou }}'
+ state: present
+ password: '{{ domain_password }}'
+ update_password: when_changed
+ domain_credentials:
+ - name: '{{ domain_realm }}'
+ username: '{{ domain_user_upn }}'
+ password: '{{ domain_password }}'
+ groups:
+ add:
+ - Group-CHILD
+ - name: Group-PARENT
+ server: '{{ domain_realm }}'
+ register: new_user_with_group_check
+ delegate_to: CHILD
+ check_mode: true
+
+- name: get result of create user group with DN lookup and creds - check mode
+ microsoft.ad.object_info:
+ identity: '{{ new_user_with_group_check.distinguished_name }}'
+ properties:
+ - memberOf
+ register: new_user_with_group_check_actual
+ delegate_to: CHILD
+
+- name: assert set value with DN lookup and creds
+ assert:
+ that:
+ - new_user_with_group_check is changed
+ - new_user_with_group_check.distinguished_name == "CN=ChildUser1," ~ child_ou
+ - new_user_with_group_check_actual.objects == []
+
+- name: create user group with DN lookup and creds
+ microsoft.ad.user:
+ name: ChildUser1
+ path: '{{ child_ou }}'
+ state: present
+ password: '{{ domain_password }}'
+ update_password: when_changed
+ domain_credentials:
+ - name: '{{ domain_realm }}'
+ username: '{{ domain_user_upn }}'
+ password: '{{ domain_password }}'
+ groups:
+ add:
+ - Group-CHILD
+ - name: Group-PARENT
+ server: '{{ domain_realm }}'
+ register: new_user_with_group
+ delegate_to: CHILD
+
+- name: replicate group membership of parent group to child domain after adding group
+ ansible.windows.win_command: >-
+ repadmin.exe
+ /replsingleobj
+ {{ hostvars["CHILD"]["new_hostname"] }}.{{ hostvars["CHILD"]["child_domain_name"] }}
+ parent.{{ domain_realm }}
+ {{ parent_group }}
+ delegate_to: CHILD
+
+- name: get result of create user group with DN lookup and creds
+ microsoft.ad.object_info:
+ identity: '{{ new_user_with_group.distinguished_name }}'
+ properties:
+ - memberOf
+ register: new_user_with_group_actual
+ delegate_to: CHILD
+
+- name: assert create user group with DN lookup and creds
+ assert:
+ that:
+ - new_user_with_group is changed
+ - new_user_with_group.distinguished_name == "CN=ChildUser1," ~ child_ou
+ - new_user_with_group_actual.objects | count == 1
+ - new_user_with_group_actual.objects[0].DistinguishedName == new_user_with_group.distinguished_name
+ - >-
+ "CN=Group-CHILD," ~ child_ou in new_user_with_group_actual.objects[0].memberOf
+ - >-
+ "CN=Group-PARENT," ~ parent_ou in new_user_with_group_actual.objects[0].memberOf
+
+- name: create user group with DN lookup and creds - idempotent
+ microsoft.ad.user:
+ name: ChildUser1
+ path: '{{ child_ou }}'
+ state: present
+ password: '{{ domain_password }}'
+ update_password: when_changed
+ domain_credentials:
+ - name: '{{ domain_realm }}'
+ username: '{{ domain_user_upn }}'
+ password: '{{ domain_password }}'
+ groups:
+ add:
+ - Group-CHILD
+ - name: Group-PARENT
+ server: '{{ domain_realm }}'
+ register: new_user_with_group_again
+ delegate_to: CHILD
+
+- name: assert create user group with DN lookup and creds - idempotent
+ assert:
+ that:
+ - not new_user_with_group_again is changed
+
+- name: remove user group with DN lookup and creds - check mode
+ microsoft.ad.user:
+ name: ChildUser1
+ path: '{{ child_ou }}'
+ state: present
+ password: '{{ domain_password }}'
+ update_password: when_changed
+ domain_credentials:
+ - name: '{{ domain_realm }}'
+ username: '{{ domain_user_upn }}'
+ password: '{{ domain_password }}'
+ groups:
+ remove:
+ - Group-CHILD
+ - name: Group-PARENT
+ server: '{{ domain_realm }}'
+ register: remove_user_with_group_check
+ delegate_to: CHILD
+ check_mode: true
+
+- name: get result of remove user group with DN lookup and creds - check mode
+ microsoft.ad.object_info:
+ identity: '{{ remove_user_with_group_check.distinguished_name }}'
+ properties:
+ - memberOf
+ register: remove_user_with_group_check_actual
+ delegate_to: CHILD
+
+- name: assert remove user group with DN lookup and creds - check mode
+ assert:
+ that:
+ - remove_user_with_group_check is changed
+ - remove_user_with_group_check.distinguished_name == "CN=ChildUser1," ~ child_ou
+ - >-
+ "CN=Group-CHILD," ~ child_ou in remove_user_with_group_check_actual.objects[0].memberOf
+ - >-
+ "CN=Group-PARENT," ~ parent_ou in remove_user_with_group_check_actual.objects[0].memberOf
+
+- name: remove user group with DN lookup and creds
+ microsoft.ad.user:
+ name: ChildUser1
+ path: '{{ child_ou }}'
+ state: present
+ password: '{{ domain_password }}'
+ update_password: when_changed
+ domain_credentials:
+ - name: '{{ domain_realm }}'
+ username: '{{ domain_user_upn }}'
+ password: '{{ domain_password }}'
+ groups:
+ remove:
+ - Group-CHILD
+ - name: Group-PARENT
+ server: '{{ domain_realm }}'
+ register: remove_user_with_group
+ delegate_to: CHILD
+
+- name: replicate group membership of parent group to child domain after removing group
+ ansible.windows.win_command: >-
+ repadmin.exe
+ /replsingleobj
+ {{ hostvars["CHILD"]["new_hostname"] }}.{{ hostvars["CHILD"]["child_domain_name"] }}
+ parent.{{ domain_realm }}
+ {{ parent_group }}
+ delegate_to: CHILD
+
+- name: get result of remove user group with DN lookup and creds
+ microsoft.ad.object_info:
+ identity: '{{ remove_user_with_group.distinguished_name }}'
+ properties:
+ - memberOf
+ register: remove_user_with_group_actual
+ delegate_to: CHILD
+
+- name: assert remove user group with DN lookup and creds
+ assert:
+ that:
+ - remove_user_with_group is changed
+ - remove_user_with_group.distinguished_name == "CN=ChildUser1," ~ child_ou
+ - remove_user_with_group_actual.objects | count == 1
+ - remove_user_with_group_actual.objects[0].DistinguishedName == new_user_with_group.distinguished_name
+ - remove_user_with_group_actual.objects[0].memberOf == None
+
+- name: remove user group with DN lookup and creds - idempotent
+ microsoft.ad.user:
+ name: ChildUser1
+ path: '{{ child_ou }}'
+ state: present
+ password: '{{ domain_password }}'
+ update_password: when_changed
+ domain_credentials:
+ - name: '{{ domain_realm }}'
+ username: '{{ domain_user_upn }}'
+ password: '{{ domain_password }}'
+ groups:
+ remove:
+ - Group-CHILD
+ - name: Group-PARENT
+ server: '{{ domain_realm }}'
+ register: remove_user_with_group_again
+ delegate_to: CHILD
+
+- name: assert remove user group with DN lookup and creds - idempotent
+ assert:
+ that:
+ - not remove_user_with_group_again is changed
diff --git a/ansible_collections/microsoft/ad/tests/integration/targets/domain_child/tasks/main_child.yml b/ansible_collections/microsoft/ad/tests/integration/targets/domain_child/tasks/main_child.yml
new file mode 100644
index 000000000..40f4f2eda
--- /dev/null
+++ b/ansible_collections/microsoft/ad/tests/integration/targets/domain_child/tasks/main_child.yml
@@ -0,0 +1,98 @@
+- name: create child domain - check mode
+ domain_child:
+ dns_domain_name: '{{ child_domain_name }}'
+ domain_admin_user: '{{ domain_user_upn }}'
+ domain_admin_password: '{{ domain_password }}'
+ safe_mode_password: '{{ domain_password }}'
+ reboot: true
+ register: to_domain_check
+ check_mode: true
+
+- name: get result of promote to domain - check mode
+ ansible.windows.win_powershell:
+ script: '{{ get_role_script }}'
+ register: to_domain_check_actual
+
+- name: assert promote to domain - check mode
+ assert:
+ that:
+ - to_domain_check is changed
+ - to_domain_check_actual.output[0]["Domain"] == None
+ - to_domain_check_actual.output[0]["DomainRole"] == "StandaloneServer"
+
+- name: change hostname to have a pending change before promotion
+ ansible.windows.win_hostname:
+ name: '{{ new_hostname }}'
+
+- name: create child domain with pending reboot
+ domain_child:
+ dns_domain_name: '{{ child_domain_name }}'
+ domain_admin_user: '{{ domain_user_upn }}'
+ domain_admin_password: '{{ domain_password }}'
+ safe_mode_password: '{{ domain_password }}'
+ reboot: true
+ register: to_domain
+
+- name: get result of promote to domain with pending reboot
+ ansible.windows.win_powershell:
+ script: '{{ get_role_script }}'
+ register: to_domain_actual
+
+- name: assert promote to domain with pending reboot
+ assert:
+ that:
+ - to_domain is changed
+ - to_domain_actual.output[0]["Domain"] == child_domain_name
+ - to_domain_actual.output[0]["DomainRole"] == "PrimaryDC"
+ - to_domain_actual.output[0]["HostName"] == new_hostname | upper
+
+- name: create child domain - idempotent
+ domain_child:
+ dns_domain_name: '{{ child_domain_name }}'
+ domain_admin_user: '{{ domain_user_upn }}'
+ domain_admin_password: '{{ domain_password }}'
+ safe_mode_password: '{{ domain_password }}'
+ reboot: true
+ register: to_domain_again
+
+- name: assert create child domain - idempotent
+ assert:
+ that:
+ - not to_domain_again is changed
+
+- name: fail to change domain of host
+ domain_child:
+ dns_domain_name: bogus.local
+ domain_admin_user: '{{ domain_user_upn }}'
+ domain_admin_password: '{{ domain_password }}'
+ safe_mode_password: '{{ domain_password }}'
+ reboot: true
+ register: change_domain_fail
+ failed_when:
+ - change_domain_fail.msg != "Host is already a domain controller in another domain " ~ child_domain_name
+
+- name: fail with parent_domain_name with domain_type mode
+ domain_child:
+ dns_domain_name: '{{ child_domain_name }}'
+ domain_admin_user: '{{ domain_user_upn }}'
+ domain_admin_password: '{{ domain_password }}'
+ safe_mode_password: '{{ domain_password }}'
+ parent_domain_name: other
+ reboot: true
+ register: invalid_parent
+ failed_when:
+ - invalid_parent.msg != "parent_domain_name must not be set when domain_type=child"
+
+- name: fail with invalid domain_mode
+ domain_child:
+ dns_domain_name: bogus.local
+ parent_domain_name: '{{ domain_realm }}'
+ domain_admin_user: '{{ domain_user_upn }}'
+ domain_admin_password: '{{ domain_password }}'
+ safe_mode_password: '{{ domain_password }}'
+ domain_mode: Invalid
+ reboot: true
+ register: change_domain_invalid_mode
+ failed_when:
+ - >-
+ change_domain_invalid_mode.msg.startswith("The parameter 'domain_mode' does not accept 'Invalid', please use one of: ")
diff --git a/ansible_collections/microsoft/ad/tests/integration/targets/domain_child/tasks/main_tree.yml b/ansible_collections/microsoft/ad/tests/integration/targets/domain_child/tasks/main_tree.yml
new file mode 100644
index 000000000..01e5e06b5
--- /dev/null
+++ b/ansible_collections/microsoft/ad/tests/integration/targets/domain_child/tasks/main_tree.yml
@@ -0,0 +1,91 @@
+- name: create test folders
+ ansible.windows.win_file:
+ path: 'C:\ansible_testing\{{ item }}'
+ state: directory
+ loop:
+ - DB
+ - LogPath
+ - SysVol
+
+- name: create tree domain - check mode
+ domain_child:
+ dns_domain_name: '{{ child_domain_name }}'
+ parent_domain_name: '{{ domain_realm }}'
+ domain_type: tree
+ domain_admin_user: '{{ domain_user_upn }}'
+ domain_admin_password: '{{ domain_password }}'
+ safe_mode_password: '{{ domain_password }}'
+ domain_mode: WinThreshold
+ database_path: C:\ansible_testing\DB
+ log_path: C:\ansible_testing\LogPath
+ sysvol_path: C:\ansible_testing\SysVol
+ reboot: true
+ register: to_tree_check
+ check_mode: true
+
+- name: get result of promote to tree domain - check mode
+ ansible.windows.win_powershell:
+ script: '{{ get_role_script }}'
+ register: to_tree_check_actual
+
+- name: assert promote to domain - check mode
+ assert:
+ that:
+ - to_tree_check is changed
+ - not to_tree_check.reboot_required
+ - to_tree_check_actual.output[0]["Domain"] == None
+ - to_tree_check_actual.output[0]["DomainRole"] == "StandaloneServer"
+
+- name: change hostname to have a pending change before promotion
+ ansible.windows.win_hostname:
+ name: '{{ new_hostname }}'
+
+- name: create tree domain with pending reboot
+ domain_child:
+ dns_domain_name: '{{ child_domain_name }}'
+ parent_domain_name: '{{ domain_realm }}'
+ domain_type: tree
+ domain_admin_user: '{{ domain_user_upn }}'
+ domain_admin_password: '{{ domain_password }}'
+ safe_mode_password: '{{ domain_password }}'
+ domain_mode: WinThreshold
+ database_path: C:\ansible_testing\DB
+ log_path: C:\ansible_testing\LogPath
+ sysvol_path: C:\ansible_testing\SysVol
+ reboot: true
+ register: to_tree
+
+- name: get result of promote to domain with pending reboot
+ ansible.windows.win_powershell:
+ script: '{{ get_role_script }}'
+ register: to_tree_actual
+
+- name: assert promote to domain with pending reboot
+ assert:
+ that:
+ - to_tree is changed
+ - not to_tree.reboot_required
+ - to_tree_actual.output[0]["Domain"] == child_domain_name
+ - to_tree_actual.output[0]["DomainRole"] == "PrimaryDC"
+ - to_tree_actual.output[0]["HostName"] == new_hostname | upper
+
+- name: create tree domain - idempotent
+ domain_child:
+ dns_domain_name: '{{ child_domain_name }}'
+ parent_domain_name: '{{ domain_realm }}'
+ domain_type: tree
+ domain_admin_user: '{{ domain_user_upn }}'
+ domain_admin_password: '{{ domain_password }}'
+ safe_mode_password: '{{ domain_password }}'
+ domain_mode: WinThreshold
+ database_path: C:\ansible_testing\DB
+ log_path: C:\ansible_testing\LogPath
+ sysvol_path: C:\ansible_testing\SysVol
+ reboot: true
+ register: to_tree_again
+
+- name: assert create tree domain - idempotent
+ assert:
+ that:
+ - not to_tree_again is changed
+ - not to_tree_again.reboot_required
diff --git a/ansible_collections/microsoft/ad/tests/integration/targets/domain_child/test.yml b/ansible_collections/microsoft/ad/tests/integration/targets/domain_child/test.yml
new file mode 100644
index 000000000..ba936e1d9
--- /dev/null
+++ b/ansible_collections/microsoft/ad/tests/integration/targets/domain_child/test.yml
@@ -0,0 +1,146 @@
+- name: ensure time is in sync
+ hosts: windows
+ gather_facts: false
+ tasks:
+ - name: get current host datetime
+ command: date +%s
+ changed_when: False
+ delegate_to: localhost
+ run_once: True
+ register: local_time
+
+ - name: set datetime on Windows
+ ansible.windows.win_powershell:
+ parameters:
+ SecondsSinceEpoch: '{{ local_time.stdout | trim }}'
+ script: |
+ param($SecondsSinceEpoch)
+
+ $utc = [System.DateTimeKind]::Utc
+ $epoch = New-Object -TypeName System.DateTime -ArgumentList 1970, 1, 1, 0, 0, 0, 0, $utc
+ $date = $epoch.AddSeconds($SecondsSinceEpoch)
+
+ Set-Date -Date $date
+
+ - name: set common test vars
+ ansible.builtin.set_fact:
+ get_role_script: |
+ $Ansible.Changed = $false
+ Get-CimInstance -ClassName Win32_ComputerSystem -Property Domain, DomainRole, PartOfDomain |
+ Select-Object -Property @{
+ N = 'Domain'
+ E = {
+ if ($_.PartOfDomain) {
+ $_.Domain
+ }
+ else {
+ $null
+ }
+ }
+ }, @{
+ N = 'DomainRole'
+ E = {
+ switch ($_.DomainRole) {
+ 0 { "StandaloneWorkstation" }
+ 1 { "MemberWorkstation" }
+ 2 { "StandaloneServer" }
+ 3 { "MemberServer" }
+ 4 { "BackupDC" }
+ 5 { "PrimaryDC" }
+ }
+ }
+ }, @{
+ N = 'HostName'
+ E = { $env:COMPUTERNAME }
+ }
+
+- name: run microsoft.ad.domain_child child tests
+ hosts: CHILD
+ gather_facts: false
+
+ tasks:
+ - name: check domain status to see if test will run
+ ansible.windows.win_powershell:
+ script: '{{ get_role_script }}'
+ register: domain_status
+
+ - ansible.builtin.include_tasks: tasks/main_child.yml
+ when: domain_status.output[0].Domain != child_domain_name
+
+- name: run microsoft.ad.domain_child tree tests
+ hosts: TREE
+ gather_facts: false
+
+ tasks:
+ - name: check domain status to see if test will run
+ ansible.windows.win_powershell:
+ script: '{{ get_role_script }}'
+ register: domain_status
+
+ - ansible.builtin.include_tasks: tasks/main_tree.yml
+ when: domain_status.output[0].Domain != child_domain_name
+
+- name: run extra tests to test out cross domain functionality in other modules
+ hosts: localhost
+ gather_facts: false
+
+ tasks:
+ - name: create test OU in each domain
+ microsoft.ad.ou:
+ name: Ansible-{{ item }}
+ state: present
+ delegate_to: '{{ item }}'
+ register: ou_info
+ loop:
+ - PARENT
+ - CHILD
+
+ - block:
+ - name: set facts for each OU DN
+ ansible.builtin.set_fact:
+ parent_ou: '{{ ou_info.results[0].distinguished_name }}'
+ child_ou: '{{ ou_info.results[1].distinguished_name }}'
+
+ - name: create test users
+ microsoft.ad.user:
+ name: User-{{ item }}
+ state: present
+ password: '{{ domain_password }}'
+ path: '{{ {"PARENT": parent_ou, "CHILD": child_ou}[item] }}'
+ register: user_info
+ delegate_to: '{{ item }}'
+ loop:
+ - PARENT
+ - CHILD
+
+ - name: create test groups
+ microsoft.ad.group:
+ name: Group-{{ item }}
+ state: present
+ path: '{{ {"PARENT": parent_ou, "CHILD": child_ou}[item] }}'
+ scope: universal
+ register: group_info
+ delegate_to: '{{ item }}'
+ loop:
+ - PARENT
+ - CHILD
+
+ - name: set facts for each test user and group DN
+ ansible.builtin.set_fact:
+ parent_user: '{{ user_info.results[0].distinguished_name }}'
+ parent_group: '{{ group_info.results[0].distinguished_name }}'
+ child_user: '{{ user_info.results[1].distinguished_name }}'
+ child_group: '{{ group_info.results[1].distinguished_name }}'
+
+ - name: run cross domain tests
+ ansible.builtin.import_tasks: tasks/cross_domain.yml
+
+ always:
+ - name: remove test OU in each domain
+ microsoft.ad.ou:
+ name: Ansible-{{ item }}
+ state: absent
+ delegate_to: '{{ item }}'
+ loop:
+ - PARENT
+ - CHILD
diff --git a/ansible_collections/microsoft/ad/tests/integration/targets/group/tasks/tests.yml b/ansible_collections/microsoft/ad/tests/integration/targets/group/tasks/tests.yml
index b40041b0d..958398a42 100644
--- a/ansible_collections/microsoft/ad/tests/integration/targets/group/tasks/tests.yml
+++ b/ansible_collections/microsoft/ad/tests/integration/targets/group/tasks/tests.yml
@@ -107,7 +107,8 @@
- my_user_2
- another-user
register: fail_invalid_members
- failed_when: 'fail_invalid_members.msg != "Failed to find the following ad objects for group members: ''fake-user'', ''another-user''"'
+ failed_when: >-
+ fail_invalid_members.msg != "Failed to find the AD object DNs for members.add. Invalid identities: 'fake-user', 'another-user'"
- name: add members to a group - check
group:
@@ -141,7 +142,7 @@
members:
add:
- my_user_1
- - '{{ test_users.results[2].sid }}'
+ - name: '{{ test_users.results[2].sid }}'
- MyGroup2-ReallyLongGroupNameHere
register: add_member
@@ -376,7 +377,8 @@
- my_user_2
- another-user
register: fail_invalid_members
- failed_when: 'fail_invalid_members.msg != "Failed to find the following ad objects for group members: ''fake-user'', ''another-user''"'
+ failed_when: >-
+ fail_invalid_members.msg != "Failed to find the AD object DNs for members.add. Invalid identities: 'fake-user', 'another-user'"
- name: create group with custom options
group:
@@ -388,7 +390,8 @@
scope: domainlocal
category: distribution
homepage: www.ansible.com
- managed_by: Domain Admins
+ managed_by:
+ name: Domain Admins
members:
add:
- my_user_1
diff --git a/ansible_collections/microsoft/ad/tests/integration/targets/membership/tasks/main.yml b/ansible_collections/microsoft/ad/tests/integration/targets/membership/tasks/main.yml
index f66985da9..0060179e9 100644
--- a/ansible_collections/microsoft/ad/tests/integration/targets/membership/tasks/main.yml
+++ b/ansible_collections/microsoft/ad/tests/integration/targets/membership/tasks/main.yml
@@ -1,7 +1,7 @@
- set_fact:
get_result_script: |
$Ansible.Changed = $false
- $cs = Get-CimInstance -ClassName Win32_ComputerSystem -Property Domain, PartOfDomain, Workgroup
+ $cs = Get-CimInstance -ClassName Win32_ComputerSystem -Property DNSHostName, Domain, PartOfDomain, Workgroup
$domainName = if ($cs.PartOfDomain) {
try {
[System.DirectoryServices.ActiveDirectory.Domain]::GetComputerDomain().Name
@@ -15,7 +15,8 @@
}
[PSCustomObject]@{
- HostName = $env:COMPUTERNAME
+ HostName = $cs.DNSHostName
+ NetbiosName = $env:COMPUTERNAME
PartOfDomain = $cs.PartOfDomain
DnsDomainName = $domainName
WorkgroupName = $cs.Workgroup
@@ -23,8 +24,13 @@
get_ad_result_script: |
$Ansible.Changed = $false
- Get-ADComputer -Filter { Name -ne 'DC' } -Properties DistinguishedName, Name, Enabled |
- Select-Object -Property DistinguishedName, Name, Enabled
+ Get-ADComputer -Filter { Name -ne 'DC' } -Properties DistinguishedName, DNSHostName, Name, Enabled |
+ Select-Object -Property @(
+ 'DistinguishedName'
+ @{ N = 'DNSHostName'; E = { $_.DNSHostName.Substring(0, $_.DNSHostName.IndexOf('.')) } }
+ 'Name'
+ 'Enabled'
+ )
- name: join domain invalid OU
membership:
@@ -65,6 +71,7 @@
- join_domain_check.reboot_required == False
- join_domain_check_actual.output[0]["DnsDomainName"] == None
- join_domain_check_actual.output[0]["HostName"] == "TEST"
+ - join_domain_check_actual.output[0]["NetbiosName"] == "TEST"
- join_domain_check_actual.output[0]["PartOfDomain"] == False
- join_domain_check_actual.output[0]["WorkgroupName"] == "WORKGROUP"
@@ -95,9 +102,11 @@
- join_domain.reboot_required == False
- join_domain_actual.output[0]["DnsDomainName"] == domain_realm
- join_domain_actual.output[0]["HostName"] == "TEST"
+ - join_domain_actual.output[0]["NetbiosName"] == "TEST"
- join_domain_actual.output[0]["PartOfDomain"] == True
- join_domain_actual.output[0]["WorkgroupName"] == None
- join_domain_ad_actual.output | length == 1
+ - join_domain_ad_actual.output[0]["DNSHostName"] == "TEST"
- join_domain_ad_actual.output[0]["Name"] == "TEST"
- join_domain_ad_actual.output[0]["Enabled"] == True
@@ -132,7 +141,7 @@
dns_domain_name: '{{ domain_realm }}'
domain_admin_user: '{{ domain_user_upn }}'
domain_admin_password: '{{ domain_password }}'
- hostname: OTHER
+ hostname: TEST1-long-HOSTNAME1
state: domain
reboot: true
register: rename_host_domain_check
@@ -156,9 +165,11 @@
- rename_host_domain_check.reboot_required == False
- rename_host_domain_check_actual.output[0]["DnsDomainName"] == domain_realm
- rename_host_domain_check_actual.output[0]["HostName"] == "TEST"
+ - rename_host_domain_check_actual.output[0]["NetbiosName"] == "TEST"
- rename_host_domain_check_actual.output[0]["PartOfDomain"] == True
- rename_host_domain_check_actual.output[0]["WorkgroupName"] == None
- rename_host_domain_check_ad_actual.output | length == 1
+ - rename_host_domain_check_ad_actual.output[0]["DNSHostName"] == "TEST"
- rename_host_domain_check_ad_actual.output[0]["Name"] == "TEST"
- rename_host_domain_check_ad_actual.output[0]["Enabled"] == True
@@ -167,7 +178,7 @@
dns_domain_name: '{{ domain_realm }}'
domain_admin_user: '{{ domain_user_upn }}'
domain_admin_password: '{{ domain_password }}'
- hostname: OTHER
+ hostname: TEST1-long-HOSTNAME1
state: domain
reboot: true
register: rename_host_domain
@@ -183,19 +194,109 @@
delegate_to: DC
register: rename_host_domain_ad_actual
-- name: assert join domain
+- name: assert rename hostname of domain joined host
assert:
that:
- rename_host_domain is changed
- rename_host_domain.reboot_required == False
- rename_host_domain_actual.output[0]["DnsDomainName"] == domain_realm
- - rename_host_domain_actual.output[0]["HostName"] == "OTHER"
+ - rename_host_domain_actual.output[0]["HostName"] == "TEST1-long-HOSTNAME1"
+ - rename_host_domain_actual.output[0]["NetbiosName"] == "TEST1-LONG-HOST"
- rename_host_domain_actual.output[0]["PartOfDomain"] == True
- rename_host_domain_actual.output[0]["WorkgroupName"] == None
- rename_host_domain_ad_actual.output | length == 1
- - rename_host_domain_ad_actual.output[0]["Name"] == "OTHER"
+ - rename_host_domain_ad_actual.output[0]["DNSHostName"] == "TEST1-long-HOSTNAME1"
+ - rename_host_domain_ad_actual.output[0]["Name"] == "TEST1-LONG-HOST"
- rename_host_domain_ad_actual.output[0]["Enabled"] == True
+- name: rename hostname of domain joined host - idempotent
+ membership:
+ dns_domain_name: '{{ domain_realm }}'
+ domain_admin_user: '{{ domain_user_upn }}'
+ domain_admin_password: '{{ domain_password }}'
+ hostname: TEST1-long-HOSTNAME1
+ state: domain
+ reboot: true
+ register: rename_host_domain_again
+
+- name: assert rename hostname of domain joined host - idempotent
+ assert:
+ that:
+ - not rename_host_domain_again is changed
+
+- name: rename hostname of domain joined host netbios portion - check mode
+ membership:
+ dns_domain_name: '{{ domain_realm }}'
+ domain_admin_user: '{{ domain_user_upn }}'
+ domain_admin_password: '{{ domain_password }}'
+ hostname: TEST2-long-HOSTNAME1
+ state: domain
+ reboot: true
+ register: rename_host_domain_netbios_check
+ check_mode: True
+
+- name: get result of rename hostname of domain joined host netbios portion - check mode
+ ansible.windows.win_powershell:
+ script: '{{ get_result_script }}'
+ register: rename_host_domain_netbios_check_actual
+
+- name: get ad result of rename hostname of domain joined host netbios portion - check mode
+ ansible.windows.win_powershell:
+ script: '{{ get_ad_result_script }}'
+ delegate_to: DC
+ register: rename_host_domain_netbios_check_ad_actual
+
+- name: assert rename hostname of domain joined host netbios portion - check mode
+ assert:
+ that:
+ - rename_host_domain_netbios_check is changed
+ - rename_host_domain_netbios_check.reboot_required == False
+ - rename_host_domain_netbios_check_actual.output[0]["DnsDomainName"] == domain_realm
+ - rename_host_domain_netbios_check_actual.output[0]["HostName"] == "TEST1-long-HOSTNAME1"
+ - rename_host_domain_netbios_check_actual.output[0]["NetbiosName"] == "TEST1-LONG-HOST"
+ - rename_host_domain_netbios_check_actual.output[0]["PartOfDomain"] == True
+ - rename_host_domain_netbios_check_actual.output[0]["WorkgroupName"] == None
+ - rename_host_domain_netbios_check_ad_actual.output | length == 1
+ - rename_host_domain_netbios_check_ad_actual.output[0]["DNSHostName"] == "TEST1-long-HOSTNAME1"
+ - rename_host_domain_netbios_check_ad_actual.output[0]["Name"] == "TEST1-LONG-HOST"
+ - rename_host_domain_netbios_check_ad_actual.output[0]["Enabled"] == True
+
+- name: rename hostname of domain joined host netbios portion
+ membership:
+ dns_domain_name: '{{ domain_realm }}'
+ domain_admin_user: '{{ domain_user_upn }}'
+ domain_admin_password: '{{ domain_password }}'
+ hostname: TEST2-long-HOSTNAME1
+ state: domain
+ reboot: true
+ register: rename_host_domain_netbios
+
+- name: get result of rename hostname of domain joined host netbios portion
+ ansible.windows.win_powershell:
+ script: '{{ get_result_script }}'
+ register: rename_host_domain_netbios_actual
+
+- name: get ad result of rename hostname of domain joined host netbios portion
+ ansible.windows.win_powershell:
+ script: '{{ get_ad_result_script }}'
+ delegate_to: DC
+ register: rename_host_domain_netbios_ad_actual
+
+- name: assert rename hostname of domain joined host
+ assert:
+ that:
+ - rename_host_domain_netbios is changed
+ - rename_host_domain_netbios.reboot_required == False
+ - rename_host_domain_netbios_actual.output[0]["DnsDomainName"] == domain_realm
+ - rename_host_domain_netbios_actual.output[0]["HostName"] == "TEST2-long-HOSTNAME1"
+ - rename_host_domain_netbios_actual.output[0]["NetbiosName"] == "TEST2-LONG-HOST"
+ - rename_host_domain_netbios_actual.output[0]["PartOfDomain"] == True
+ - rename_host_domain_netbios_actual.output[0]["WorkgroupName"] == None
+ - rename_host_domain_netbios_ad_actual.output | length == 1
+ - rename_host_domain_netbios_ad_actual.output[0]["DNSHostName"] == "TEST2-long-HOSTNAME1"
+ - rename_host_domain_netbios_ad_actual.output[0]["Name"] == "TEST2-LONG-HOST"
+ - rename_host_domain_netbios_ad_actual.output[0]["Enabled"] == True
+
- name: change domain to workgroup - check mode
membership:
workgroup_name: TEST
@@ -222,11 +323,13 @@
- to_workgroup_check is changed
- to_workgroup_check.reboot_required == True
- to_workgroup_check_actual.output[0]["DnsDomainName"] == domain_realm
- - to_workgroup_check_actual.output[0]["HostName"] == "OTHER"
+ - to_workgroup_check_actual.output[0]["HostName"] == "TEST2-long-HOSTNAME1"
+ - to_workgroup_check_actual.output[0]["NetbiosName"] == "TEST2-LONG-HOST"
- to_workgroup_check_actual.output[0]["PartOfDomain"] == True
- to_workgroup_check_actual.output[0]["WorkgroupName"] == None
- to_workgroup_check_ad_actual.output | length == 1
- - to_workgroup_check_ad_actual.output[0]["Name"] == "OTHER"
+ - to_workgroup_check_ad_actual.output[0]["DNSHostName"] == "TEST2-long-HOSTNAME1"
+ - to_workgroup_check_ad_actual.output[0]["Name"] == "TEST2-LONG-HOST"
- to_workgroup_check_ad_actual.output[0]["Enabled"] == True
- name: change domain to workgroup
@@ -238,7 +341,7 @@
register: to_workgroup
- set_fact:
- local_user: OTHER\{{ ansible_user }}
+ local_user: TEST2-LONG-HOST\{{ ansible_user }}
- ansible.windows.win_reboot:
when: to_workgroup.reboot_required
@@ -266,16 +369,18 @@
- to_workgroup is changed
- to_workgroup.reboot_required == True
- to_workgroup_actual.output[0]["DnsDomainName"] == None
- - to_workgroup_actual.output[0]["HostName"] == "OTHER"
+ - to_workgroup_actual.output[0]["HostName"] == "TEST2-long-HOSTNAME1"
+ - to_workgroup_actual.output[0]["NetbiosName"] == "TEST2-LONG-HOST"
- to_workgroup_actual.output[0]["PartOfDomain"] == False
- to_workgroup_actual.output[0]["WorkgroupName"] == "TEST"
- to_workgroup_ad_actual.output | length == 1
- - to_workgroup_ad_actual.output[0]["Name"] == "OTHER"
+ - to_workgroup_ad_actual.output[0]["DNSHostName"] == "TEST2-long-HOSTNAME1"
+ - to_workgroup_ad_actual.output[0]["Name"] == "TEST2-LONG-HOST"
- to_workgroup_ad_actual.output[0]["Enabled"] == False
- name: remove orphaned AD account for later tests
microsoft.ad.computer:
- name: OTHER
+ name: TEST2-LONG-HOST
state: absent
delegate_to: DC
@@ -315,7 +420,8 @@
- change_workgroup_check is changed
- change_workgroup_check.reboot_required == False
- change_workgroup_check_actual.output[0]["DnsDomainName"] == None
- - change_workgroup_check_actual.output[0]["HostName"] == "OTHER"
+ - change_workgroup_check_actual.output[0]["HostName"] == "TEST2-long-HOSTNAME1"
+ - change_workgroup_check_actual.output[0]["NetbiosName"] == "TEST2-LONG-HOST"
- change_workgroup_check_actual.output[0]["PartOfDomain"] == False
- change_workgroup_check_actual.output[0]["WorkgroupName"] == "TEST"
@@ -339,7 +445,8 @@
- change_workgroup is changed
- change_workgroup.reboot_required == False
- change_workgroup_actual.output[0]["DnsDomainName"] == None
- - change_workgroup_actual.output[0]["HostName"] == "OTHER"
+ - change_workgroup_actual.output[0]["HostName"] == "TEST2-long-HOSTNAME1"
+ - change_workgroup_actual.output[0]["NetbiosName"] == "TEST2-LONG-HOST"
- change_workgroup_actual.output[0]["PartOfDomain"] == False
- change_workgroup_actual.output[0]["WorkgroupName"] == "TEST2"
@@ -350,7 +457,7 @@
domain_admin_password: '{{ domain_password }}'
state: workgroup
reboot: true
- hostname: FOO
+ hostname: TEST1-long-HOSTNAME2
register: change_hostname_check
check_mode: true
@@ -365,7 +472,8 @@
- change_hostname_check is changed
- change_hostname_check.reboot_required == False
- change_hostname_check_actual.output[0]["DnsDomainName"] == None
- - change_hostname_check_actual.output[0]["HostName"] == "OTHER"
+ - change_hostname_check_actual.output[0]["HostName"] == "TEST2-long-HOSTNAME1"
+ - change_hostname_check_actual.output[0]["NetbiosName"] == "TEST2-LONG-HOST"
- change_hostname_check_actual.output[0]["PartOfDomain"] == False
- change_hostname_check_actual.output[0]["WorkgroupName"] == "TEST2"
@@ -376,7 +484,7 @@
domain_admin_password: '{{ domain_password }}'
state: workgroup
reboot: true
- hostname: FOO
+ hostname: TEST1-long-HOSTNAME2
register: change_hostname
- name: get result of change just the hostname
@@ -384,16 +492,85 @@
script: '{{ get_result_script }}'
register: change_hostname_actual
-- name: assert change just the hostname - check mode
+- name: assert change just the hostname
assert:
that:
- change_hostname is changed
- change_hostname.reboot_required == False
- change_hostname_actual.output[0]["DnsDomainName"] == None
- - change_hostname_actual.output[0]["HostName"] == "FOO"
+ - change_hostname_actual.output[0]["HostName"] == "TEST1-long-HOSTNAME2"
+ - change_hostname_actual.output[0]["NetbiosName"] == "TEST1-LONG-HOST"
- change_hostname_actual.output[0]["PartOfDomain"] == False
- change_hostname_actual.output[0]["WorkgroupName"] == "TEST2"
+- name: change just the hostname - idempotent
+ membership:
+ workgroup_name: TEST2
+ domain_admin_user: '{{ domain_user_upn }}'
+ domain_admin_password: '{{ domain_password }}'
+ state: workgroup
+ reboot: true
+ hostname: TEST1-long-HOSTNAME2
+ register: change_hostname_again
+
+- name: assert change just the hostname - idempotent
+ assert:
+ that:
+ - not change_hostname_again is changed
+
+- name: change just the hostname netbios portion - check mode
+ membership:
+ workgroup_name: TEST2
+ domain_admin_user: '{{ domain_user_upn }}'
+ domain_admin_password: '{{ domain_password }}'
+ state: workgroup
+ reboot: true
+ hostname: TEST2-long-HOSTNAME2
+ register: change_hostname_netbios_check
+ check_mode: true
+
+- name: get result of change just the hostname netbios portion - check mode
+ ansible.windows.win_powershell:
+ script: '{{ get_result_script }}'
+ register: change_hostname_netbios_check_actual
+
+- name: assert change just the hostname netbios portion - check mode
+ assert:
+ that:
+ - change_hostname_netbios_check is changed
+ - change_hostname_netbios_check.reboot_required == False
+ - change_hostname_netbios_check_actual.output[0]["DnsDomainName"] == None
+ - change_hostname_netbios_check_actual.output[0]["HostName"] == "TEST1-long-HOSTNAME2"
+ - change_hostname_netbios_check_actual.output[0]["NetbiosName"] == "TEST1-LONG-HOST"
+ - change_hostname_netbios_check_actual.output[0]["PartOfDomain"] == False
+ - change_hostname_netbios_check_actual.output[0]["WorkgroupName"] == "TEST2"
+
+- name: change just the hostname netbios portion
+ membership:
+ workgroup_name: TEST2
+ domain_admin_user: '{{ domain_user_upn }}'
+ domain_admin_password: '{{ domain_password }}'
+ state: workgroup
+ reboot: true
+ hostname: TEST2-long-HOSTNAME2
+ register: change_hostname_netbios
+
+- name: get result of change just the hostname netbios portion
+ ansible.windows.win_powershell:
+ script: '{{ get_result_script }}'
+ register: change_hostname_netbios_actual
+
+- name: assert change just the hostname
+ assert:
+ that:
+ - change_hostname_netbios is changed
+ - change_hostname_netbios.reboot_required == False
+ - change_hostname_netbios_actual.output[0]["DnsDomainName"] == None
+ - change_hostname_netbios_actual.output[0]["HostName"] == "TEST2-long-HOSTNAME2"
+ - change_hostname_netbios_actual.output[0]["NetbiosName"] == "TEST2-LONG-HOST"
+ - change_hostname_netbios_actual.output[0]["PartOfDomain"] == False
+ - change_hostname_netbios_actual.output[0]["WorkgroupName"] == "TEST2"
+
- name: create custom OU
ansible.windows.win_powershell:
script: |
@@ -407,7 +584,7 @@
dns_domain_name: '{{ domain_realm }}'
domain_admin_user: '{{ domain_user_upn }}'
domain_admin_password: '{{ domain_password }}'
- hostname: BAR
+ hostname: TEST1-long-HOSTNAME3
domain_ou_path: '{{ custom_ou.output[0] }}'
state: domain
register: join_ou_check
@@ -424,7 +601,8 @@
- join_ou_check is changed
- join_ou_check.reboot_required == True
- join_ou_check_actual.output[0]["DnsDomainName"] == None
- - join_ou_check_actual.output[0]["HostName"] == "FOO"
+ - join_ou_check_actual.output[0]["HostName"] == "TEST2-long-HOSTNAME2"
+ - join_ou_check_actual.output[0]["NetbiosName"] == "TEST2-LONG-HOST"
- join_ou_check_actual.output[0]["PartOfDomain"] == False
- join_ou_check_actual.output[0]["WorkgroupName"] == "TEST2"
@@ -433,7 +611,7 @@
dns_domain_name: '{{ domain_realm }}'
domain_admin_user: '{{ domain_user_upn }}'
domain_admin_password: '{{ domain_password }}'
- hostname: BAR
+ hostname: TEST1-long-HOSTNAME3
domain_ou_path: '{{ custom_ou.output[0] }}'
state: domain
register: join_ou
@@ -452,26 +630,28 @@
register: join_ou_ad_actual
delegate_to: DC
-- name: assert change just the hostname
+- name: assert join domain with hostname and OUT
assert:
that:
- join_ou is changed
- join_ou.reboot_required == True
- join_ou_actual.output[0]["DnsDomainName"] == domain_realm
- - join_ou_actual.output[0]["HostName"] == "BAR"
+ - join_ou_actual.output[0]["HostName"] == "TEST1-long-HOSTNAME3"
+ - join_ou_actual.output[0]["NetbiosName"] == "TEST1-LONG-HOST"
- join_ou_actual.output[0]["PartOfDomain"] == True
- join_ou_actual.output[0]["WorkgroupName"] == None
- join_ou_ad_actual.output | length == 1
- - join_ou_ad_actual.output[0]["Name"] == "BAR"
+ - join_ou_ad_actual.output[0]["DNSHostName"] == "TEST1-long-HOSTNAME3"
+ - join_ou_ad_actual.output[0]["Name"] == "TEST1-LONG-HOST"
- join_ou_ad_actual.output[0]["Enabled"] == True
- - join_ou_ad_actual.output[0]["DistinguishedName"] == "CN=BAR," ~ custom_ou.output[0]
+ - join_ou_ad_actual.output[0]["DistinguishedName"] == "CN=TEST1-LONG-HOST," ~ custom_ou.output[0]
- name: change domain to workgroup with hostname change - check mode
membership:
workgroup_name: WORKGROUP
domain_admin_user: '{{ domain_user_upn }}'
domain_admin_password: '{{ domain_password }}'
- hostname: FOO
+ hostname: TEST1-long-HOSTNAME4
state: workgroup
register: to_workgroup_hostname_check
check_mode: true
@@ -493,11 +673,13 @@
- to_workgroup_hostname_check is changed
- to_workgroup_hostname_check.reboot_required == True
- to_workgroup_hostname_check_actual.output[0]["DnsDomainName"] == domain_realm
- - to_workgroup_hostname_check_actual.output[0]["HostName"] == "BAR"
+ - to_workgroup_hostname_check_actual.output[0]["HostName"] == "TEST1-long-HOSTNAME3"
+ - to_workgroup_hostname_check_actual.output[0]["NetbiosName"] == "TEST1-LONG-HOST"
- to_workgroup_hostname_check_actual.output[0]["PartOfDomain"] == True
- to_workgroup_hostname_check_actual.output[0]["WorkgroupName"] == None
- to_workgroup_hostname_check_ad_actual.output | length == 1
- - to_workgroup_hostname_check_ad_actual.output[0]["Name"] == "BAR"
+ - to_workgroup_hostname_check_ad_actual.output[0]["DNSHostName"] == "TEST1-long-HOSTNAME3"
+ - to_workgroup_hostname_check_ad_actual.output[0]["Name"] == "TEST1-LONG-HOST"
- to_workgroup_hostname_check_ad_actual.output[0]["Enabled"] == True
- name: change domain to workgroup with hostname change
@@ -505,7 +687,7 @@
workgroup_name: WORKGROUP
domain_admin_user: '{{ domain_user_upn }}'
domain_admin_password: '{{ domain_password }}'
- hostname: FOO
+ hostname: TEST1-long-HOSTNAME4
state: workgroup
reboot: true
register: to_workgroup_hostname
@@ -527,13 +709,30 @@
- to_workgroup_hostname is changed
- to_workgroup_hostname.reboot_required == False
- to_workgroup_hostname_actual.output[0]["DnsDomainName"] == None
- - to_workgroup_hostname_actual.output[0]["HostName"] == "FOO"
+ - to_workgroup_hostname_actual.output[0]["HostName"] == "TEST1-long-HOSTNAME4"
+ - to_workgroup_hostname_actual.output[0]["NetbiosName"] == "TEST1-LONG-HOST"
- to_workgroup_hostname_actual.output[0]["PartOfDomain"] == False
- to_workgroup_hostname_actual.output[0]["WorkgroupName"] == "WORKGROUP"
- to_workgroup_hostname_ad_actual.output | length == 1
- - to_workgroup_hostname_ad_actual.output[0]["Name"] == "BAR"
+ - to_workgroup_hostname_ad_actual.output[0]["DNSHostName"] == "TEST1-long-HOSTNAME3"
+ - to_workgroup_hostname_ad_actual.output[0]["Name"] == "TEST1-LONG-HOST"
- to_workgroup_hostname_ad_actual.output[0]["Enabled"] == False
+- name: change domain to workgroup with hostname change - idempotent
+ membership:
+ workgroup_name: WORKGROUP
+ domain_admin_user: '{{ domain_user_upn }}'
+ domain_admin_password: '{{ domain_password }}'
+ hostname: TEST1-long-HOSTNAME4
+ state: workgroup
+ reboot: true
+ register: to_workgroup_hostname_again
+
+- name: assert change domain to workgroup with hostname change - idempotent
+ assert:
+ that:
+ - not to_workgroup_hostname_again is changed
+
- name: remove orphaned AD account for later tests
microsoft.ad.computer:
name: BAR
diff --git a/ansible_collections/microsoft/ad/tests/integration/targets/ou/tasks/tests.yml b/ansible_collections/microsoft/ad/tests/integration/targets/ou/tasks/tests.yml
index 49d06aefb..b6061b7d5 100644
--- a/ansible_collections/microsoft/ad/tests/integration/targets/ou/tasks/tests.yml
+++ b/ansible_collections/microsoft/ad/tests/integration/targets/ou/tasks/tests.yml
@@ -163,7 +163,8 @@
country: US
description: Custom description
display_name: OU display Name
- managed_by: Domain Users
+ managed_by:
+ name: Domain Users
postal_code: 10001
state_province: ''
street: Main
diff --git a/ansible_collections/microsoft/ad/tests/integration/targets/user/tasks/tests.yml b/ansible_collections/microsoft/ad/tests/integration/targets/user/tasks/tests.yml
index 98718da6f..10261afdf 100644
--- a/ansible_collections/microsoft/ad/tests/integration/targets/user/tasks/tests.yml
+++ b/ansible_collections/microsoft/ad/tests/integration/targets/user/tasks/tests.yml
@@ -510,6 +510,7 @@
user:
name: MyUser
state: present
+ account_locked: False
city: Brisbane
company: Red Hat
country: au
@@ -563,6 +564,7 @@
user:
name: MyUser
state: present
+ account_locked: False
city: Brisbane
company: Red Hat
country: au
@@ -685,6 +687,7 @@
user:
name: MyUser
state: present
+ account_locked: False
city: Brisbane
company: Red Hat
country: au
@@ -1095,7 +1098,8 @@
- Invalid
register: fail_missing_group
failed_when:
- - '"Failed to locate group Invalid: Cannot find an object with identity" not in fail_missing_group.msg'
+ - >-
+ "Failed to find the AD object DNs for groups.add. Invalid identities: 'Invalid'" not in fail_missing_group.msg
- name: warn on group that is missing
user:
@@ -1104,7 +1108,7 @@
groups:
add:
- Invalid
- missing_behaviour: warn
+ lookup_failure_action: warn
register: warn_missing_group
- name: assert warn on group that is missing
@@ -1112,7 +1116,8 @@
that:
- not warn_missing_group is changed
- warn_missing_group.warnings | length == 1
- - '"Failed to locate group Invalid but continuing on" in warn_missing_group.warnings[0]'
+ - >-
+ "Failed to find the AD object DNs for groups.add. Ignoring invalid identities: 'Invalid'" in warn_missing_group.warnings[0]
- name: ignore on group that is missing
user:
@@ -1120,7 +1125,7 @@
path: '{{ setup_domain_info.output[0].defaultNamingContext }}'
groups:
add:
- - Invalid
+ - name: Invalid
missing_behaviour: ignore
register: ignore_missing_group
@@ -1136,7 +1141,7 @@
path: '{{ setup_domain_info.output[0].defaultNamingContext }}'
groups:
remove:
- - domain admins
+ - name: domain admins
- Enterprise Admins
register: groups_remove
diff --git a/ansible_collections/microsoft/ad/tests/sanity/ignore-2.14.txt b/ansible_collections/microsoft/ad/tests/sanity/ignore-2.14.txt
index e69de29bb..a3dca9603 100644
--- a/ansible_collections/microsoft/ad/tests/sanity/ignore-2.14.txt
+++ b/ansible_collections/microsoft/ad/tests/sanity/ignore-2.14.txt
@@ -0,0 +1 @@
+plugins/action/domain_child.py action-plugin-docs # ansible-test is ignoring sidecar docs \ No newline at end of file
diff --git a/ansible_collections/microsoft/ad/tests/sanity/ignore-2.15.txt b/ansible_collections/microsoft/ad/tests/sanity/ignore-2.15.txt
index e69de29bb..a3dca9603 100644
--- a/ansible_collections/microsoft/ad/tests/sanity/ignore-2.15.txt
+++ b/ansible_collections/microsoft/ad/tests/sanity/ignore-2.15.txt
@@ -0,0 +1 @@
+plugins/action/domain_child.py action-plugin-docs # ansible-test is ignoring sidecar docs \ No newline at end of file
diff --git a/ansible_collections/microsoft/ad/tests/sanity/ignore-2.16.txt b/ansible_collections/microsoft/ad/tests/sanity/ignore-2.16.txt
index e69de29bb..a3dca9603 100644
--- a/ansible_collections/microsoft/ad/tests/sanity/ignore-2.16.txt
+++ b/ansible_collections/microsoft/ad/tests/sanity/ignore-2.16.txt
@@ -0,0 +1 @@
+plugins/action/domain_child.py action-plugin-docs # ansible-test is ignoring sidecar docs \ No newline at end of file
diff --git a/ansible_collections/microsoft/ad/tests/sanity/ignore-2.17.txt b/ansible_collections/microsoft/ad/tests/sanity/ignore-2.17.txt
index e69de29bb..a3dca9603 100644
--- a/ansible_collections/microsoft/ad/tests/sanity/ignore-2.17.txt
+++ b/ansible_collections/microsoft/ad/tests/sanity/ignore-2.17.txt
@@ -0,0 +1 @@
+plugins/action/domain_child.py action-plugin-docs # ansible-test is ignoring sidecar docs \ No newline at end of file
diff --git a/ansible_collections/microsoft/ad/tests/sanity/ignore-2.18.txt b/ansible_collections/microsoft/ad/tests/sanity/ignore-2.18.txt
new file mode 100644
index 000000000..a3dca9603
--- /dev/null
+++ b/ansible_collections/microsoft/ad/tests/sanity/ignore-2.18.txt
@@ -0,0 +1 @@
+plugins/action/domain_child.py action-plugin-docs # ansible-test is ignoring sidecar docs \ No newline at end of file
diff --git a/ansible_collections/microsoft/ad/tests/utils/shippable/lint.sh b/ansible_collections/microsoft/ad/tests/utils/shippable/lint.sh
index 12b5b4cd2..9d60edb2a 100755
--- a/ansible_collections/microsoft/ad/tests/utils/shippable/lint.sh
+++ b/ansible_collections/microsoft/ad/tests/utils/shippable/lint.sh
@@ -5,6 +5,6 @@ set -o pipefail -eux
# This is aligned with the galaxy-importer used by AH
# https://github.com/ansible/galaxy-importer/blob/d4b5e6d12088ba452f129f4824bd049be5543358/setup.cfg#L22C4-L22C33
python -m pip install \
- 'ansible-lint>=6.2.2,<=6.14.3'
+ 'ansible-lint>=6.2.2,<=6.22.1'
ansible-lint
diff --git a/ansible_collections/netbox/netbox/.ansible-lint b/ansible_collections/netbox/netbox/.ansible-lint
new file mode 100644
index 000000000..2a7754ed4
--- /dev/null
+++ b/ansible_collections/netbox/netbox/.ansible-lint
@@ -0,0 +1,15 @@
+---
+profile: production
+
+exclude_paths:
+ - .cache/ # implicit unless exclude_paths is defined in config
+ - .venv/
+ - .github/
+
+skip_list:
+ - args[module]
+ - no-handler
+ - yaml[key-duplicates]
+ - yaml[line-length]
+
+offline: false
diff --git a/ansible_collections/netbox/netbox/.ansible-lint-ignore b/ansible_collections/netbox/netbox/.ansible-lint-ignore
new file mode 100644
index 000000000..6f4b2dcfd
--- /dev/null
+++ b/ansible_collections/netbox/netbox/.ansible-lint-ignore
@@ -0,0 +1,3 @@
+# This file contains ignores rule violations for ansible-lint
+changelogs/fragments/ yaml[comments]
+changelogs/changelog.yaml yaml[indentation]
diff --git a/ansible_collections/netbox/netbox/.github/workflows/main.yml b/ansible_collections/netbox/netbox/.github/workflows/main.yml
index 03a827362..dd05b2137 100644
--- a/ansible_collections/netbox/netbox/.github/workflows/main.yml
+++ b/ansible_collections/netbox/netbox/.github/workflows/main.yml
@@ -10,35 +10,26 @@ jobs:
steps:
- name: Checkout repo
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Run black
uses: psf/black@stable
- - name: 'Yamllint'
- uses: karancode/yamllint-github-action@master
-
- #- name: Run isort
- # uses: jamescurtin/isort-action@master
-
- #- name: Run ruff
- # uses: chartboost/ruff-action@v1
-
unit_testing:
- runs-on: ubuntu-latest
+ runs-on: ubuntu-20.04
needs: linting
strategy:
fail-fast: false
matrix:
- python-version: ["3.9", "3.10", "3.11"]
+ python-version: ["3.10", "3.11"]
steps:
- name: Checkout repo
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }}
- uses: actions/setup-python@v4
+ uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
@@ -71,24 +62,22 @@ jobs:
runs-on: ubuntu-latest
needs: unit_testing
env:
- python-version: "3.9"
+ python-version: "3.10"
strategy:
fail-fast: false
matrix:
include:
- - VERSION: "v3.5"
- NETBOX_DOCKER_VERSION: 2.6.1
- VERSION: "v3.6"
NETBOX_DOCKER_VERSION: 2.7.0
- VERSION: "v3.7"
NETBOX_DOCKER_VERSION: 2.7.0
- # If we want to integration test wiht all supported Python:
- #python-version: ["3.9", "3.10", "3.11"]
+ - VERSION: "v4.0"
+ NETBOX_DOCKER_VERSION: 2.9.1
steps:
- name: Checkout repo
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Clone & Start netbox-docker containers - ${{ matrix.VERSION }}
run: |
@@ -102,7 +91,7 @@ jobs:
cd ..
- name: Set up Python ${{ env.python-version }}
- uses: actions/setup-python@v4
+ uses: actions/setup-python@v5
with:
python-version: ${{ env.python-version }}
diff --git a/ansible_collections/netbox/netbox/.github/workflows/tests.yml b/ansible_collections/netbox/netbox/.github/workflows/tests.yml
new file mode 100644
index 000000000..b077abb14
--- /dev/null
+++ b/ansible_collections/netbox/netbox/.github/workflows/tests.yml
@@ -0,0 +1,50 @@
+---
+name: "CI"
+
+concurrency:
+ group: ${{ github.head_ref || github.run_id }}
+ cancel-in-progress: true
+
+on: # yamllint disable-line rule:truthy
+ pull_request:
+ branches: [master, devel]
+ workflow_dispatch:
+ schedule:
+ - cron: '0 0 * * *'
+
+jobs:
+ changelog:
+ uses: ansible/ansible-content-actions/.github/workflows/changelog.yaml@main
+ if: github.event_name == 'pull_request' && github.base_ref == 'devel'
+ build-import:
+ uses: ansible/ansible-content-actions/.github/workflows/build_import.yaml@main
+ ansible-lint:
+ uses: ansible/ansible-content-actions/.github/workflows/ansible_lint.yaml@main
+ sanity:
+ uses: minitriga/ansible-content-actions/.github/workflows/sanity.yaml@patch-1
+ unit-galaxy:
+ uses: minitriga/ansible-content-actions/.github/workflows/unit.yaml@patch-1
+ unit-source:
+ uses: ansible-network/github_actions/.github/workflows/unit_source.yml@main
+ with:
+ collection_pre_install: >-
+ git+https://github.com/ansible-collections/ansible.utils.git
+ all_green:
+ if: ${{ always() }}
+ needs:
+ - changelog
+ - sanity
+ - ansible-lint
+ - unit-galaxy
+ - unit-source
+ runs-on: ubuntu-latest
+ steps:
+ - run: >-
+ python -c "assert 'failure' not in
+ set([
+ '${{ needs.changelog.result }}',
+ '${{ needs.sanity.result }}',
+ '${{ needs.unit-galaxy.result }}',
+ '${{ needs.unit-source.result }}',
+ '${{ needs.ansible-lint.result }}'
+ ])" \ No newline at end of file
diff --git a/ansible_collections/netbox/netbox/.readthedocs.yml b/ansible_collections/netbox/netbox/.readthedocs.yml
index a5b2dd66f..f661b0da2 100644
--- a/ansible_collections/netbox/netbox/.readthedocs.yml
+++ b/ansible_collections/netbox/netbox/.readthedocs.yml
@@ -1,7 +1,7 @@
---
version: 2
-#sphinx:
-# configuration: docs/conf.py
+# sphinx:
+# configuration: docs/conf.py
build:
os: ubuntu-22.04
diff --git a/ansible_collections/netbox/netbox/.yamllint b/ansible_collections/netbox/netbox/.yamllint
deleted file mode 100644
index 3e5958e9b..000000000
--- a/ansible_collections/netbox/netbox/.yamllint
+++ /dev/null
@@ -1,32 +0,0 @@
----
-
-yaml-files:
- - '*.yaml'
- - '*.yml'
- - '.yamllint'
-
-rules:
- anchors: enable
- braces: enable
- brackets: enable
- colons: enable
- commas: enable
- comments: disable
- comments-indentation: disable
- document-end: disable
- document-start:
- level: warning
- empty-lines: enable
- empty-values: enable
- float-values: disable
- hyphens: enable
- indentation: disable
- key-duplicates: enable
- key-ordering: disable
- line-length: disable
- new-line-at-end-of-file: enable
- new-lines: enable
- octal-values: disable
- quoted-strings: disable
- trailing-spaces: enable
- truthy: disable
diff --git a/ansible_collections/netbox/netbox/CHANGELOG.rst b/ansible_collections/netbox/netbox/CHANGELOG.rst
index 0d71beeb4..d27d59776 100644
--- a/ansible_collections/netbox/netbox/CHANGELOG.rst
+++ b/ansible_collections/netbox/netbox/CHANGELOG.rst
@@ -4,6 +4,42 @@ NetBox.NetBox Release Notes
.. contents:: Topics
+v3.19.1
+=======
+
+Minor Changes
+-------------
+
+- Bump version 3.19.1
+
+v3.19.0
+=======
+
+Minor Changes
+-------------
+
+- Add cluster host to dynamic inventory response `#1219 <https://github.com/netbox-community/ansible_modules/pull/1219>`_
+- Add galaxy-importer to CI process `#1245 <https://github.com/netbox-community/ansible_modules/issues/1245>`_
+- Adjust modules to support NetBox v4.0.0 `#1234 <https://github.com/netbox-community/ansible_modules/pull/1234>`_
+- Bump jinja2 from 3.1.2 to 3.1.4 `#1226 <https://github.com/netbox-community/ansible_modules/pull/1226>`_
+- Bump requests from 2.31.0 to 2.32.0 `#1236 <https://github.com/netbox-community/ansible_modules/pull/1236>`_
+- Drop obsolete Ansible and Python versions and fix tests `#1241 <https://github.com/netbox-community/ansible_modules/issues/1241>`_
+- Get ansible-lint passing again (sequence after `#1241 <https://github.com/netbox-community/ansible_modules/issues/1241>`_) `#1243 <https://github.com/netbox-community/ansible_modules/issues/1243>`_
+- Update CI process to follow Ansible Collection Standards `#1247 <https://github.com/netbox-community/ansible_modules/issues/1247>`_
+- Update CI to use master instead of main. `#1253 <https://github.com/netbox-community/ansible_modules/issues/1253>`_
+- Update ansible-lint to ignore changelog file for yaml indentation. `#1256 <https://github.com/netbox-community/ansible_modules/issues/1256>`_
+- Update top-level README with new minimum Ansible version (sequence after `#1241 <https://github.com/netbox-community/ansible_modules/issues/1241>`_ `#1244 <https://github.com/netbox-community/ansible_modules/issues/1244>`_
+- Updated CI to only run changelog job if PR into devel branch is detected. `#1251 <https://github.com/netbox-community/ansible_modules/issues/1251>`_
+- Updated CI to support NetBox 4.0 `#1230 <https://github.com/netbox-community/ansible_modules/pull/1230>`_
+- Updates to top-level README.md to align collection with Ansible best practices `#1238 <https://github.com/netbox-community/ansible_modules/issues/1238>`_
+
+Bugfixes
+--------
+
+- Added ALLOWED_QUERY_PARAMS module_bay by device `#1228 <https://github.com/netbox-community/ansible_modules/pull/1228>`_
+- Added label to power outlet `#1222 <https://github.com/netbox-community/ansible_modules/pull/1222>`_
+- Added power outlet type iec-60320-c21 to power outlet template and power outlet modules `#1229 <https://github.com/netbox-community/ansible_modules/issues/1229>`_
+- Extend query param for parent_location `#1233 <https://github.com/netbox-community/ansible_modules/issues/1233>`_
v3.18.0
=======
@@ -449,7 +485,6 @@ Release Summary
This release should fix obvious broken changes between collection and NetBox 2.11, but there is most likely more. Please report as they're encountered.
**packages** is now a required Python package and is already included in Ansible 2.10, but anyone using Ansible 2.9 or below must manually pip install the library.
-
Major Changes
-------------
diff --git a/ansible_collections/netbox/netbox/FILES.json b/ansible_collections/netbox/netbox/FILES.json
index 2c8ac1942..4b5193f87 100644
--- a/ansible_collections/netbox/netbox/FILES.json
+++ b/ansible_collections/netbox/netbox/FILES.json
@@ -8,6 +8,27 @@
"format": 1
},
{
+ "name": "README.md",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "867374819319da961efd5218443bf80a319c27e57ead358d82fb925c79e981dd",
+ "format": 1
+ },
+ {
+ "name": "CHANGELOG.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "6f54356870c1f1ce5e08d8b3be1b4d2358708df49e81975d8da6410a38d6e6cd",
+ "format": 1
+ },
+ {
+ "name": "CONTRIBUTING.md",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "11337b573ba4ffe704e9a94e682e018f7a489b4bd9ed5b474643ec2ae136ddf4",
+ "format": 1
+ },
+ {
"name": "docs",
"ftype": "dir",
"chksum_type": null,
@@ -15,31 +36,178 @@
"format": 1
},
{
- "name": "docs/_static",
+ "name": "docs/getting_started",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "docs/_static/pygments.css",
+ "name": "docs/getting_started/how-to-use",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "docs/getting_started/how-to-use/media",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "docs/getting_started/how-to-use/media/advanced_dns_name.png",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "74318b36e2302111536796af5aa86a9ceab09faa3eedc3e7d28878b8751896d6",
+ "chksum_sha256": "3f4c6892f90e4b7a2ef8ce70fbe04c4c1cee8806d2aa37ffe09955045eb96bb2",
"format": 1
},
{
- "name": "docs/_static/ansible.css",
+ "name": "docs/getting_started/how-to-use/media/api_device_post.png",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0787ca656847c399975c5cca01be77f08d4ff9344b87bec1c1ba1aaa3cff56a8",
+ "chksum_sha256": "abdd29c2a0dc029c4aa6d4176b1a205dbc5d4d1f434d821e04b9a08a26175221",
"format": 1
},
{
- "name": "docs/Makefile",
+ "name": "docs/getting_started/how-to-use/media/advanced_dict.png",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8b6587b859607f200f116e2cb043fc358e1c3a26c326b563bf348453cfc68307",
+ "chksum_sha256": "b0d011fef324a804f6cbb2778e65d69c6373c8123fac576d9f49072471a6e908",
+ "format": 1
+ },
+ {
+ "name": "docs/getting_started/how-to-use/modules.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "720bd7b5e52bf0923a949930fff93636d166388dee964928f065e67649b6cfe0",
+ "format": 1
+ },
+ {
+ "name": "docs/getting_started/how-to-use/inventory.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "7b28ca7e2a6cf1d0549a4eb2cafc901e3b2adfc449c3de6950bdf165daeb2b35",
+ "format": 1
+ },
+ {
+ "name": "docs/getting_started/how-to-use/advanced.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "aad987bea7cd675b99e0a3224b88b8ce6163ec6626a7c6f1393d0c52fe988c30",
+ "format": 1
+ },
+ {
+ "name": "docs/getting_started/how-to-use.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "24afa8389fadeb19111920ff8dedfad7bbbe0cf54c9fefa238c564b8809a39e9",
+ "format": 1
+ },
+ {
+ "name": "docs/getting_started/contributing",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "docs/getting_started/contributing/modules",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "docs/getting_started/contributing/modules/new_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "992100c8aa6c94c16d82d1196bbf725193ebad97aa7630eec5c2c58556ed363c",
+ "format": 1
+ },
+ {
+ "name": "docs/getting_started/contributing/modules/architecture.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "9f1a0fce897bddacaa5a5b979a00bbe6cbecdcd3d0fff4fca484db73ac76e82b",
+ "format": 1
+ },
+ {
+ "name": "docs/getting_started/contributing/modules/media",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "docs/getting_started/contributing/modules/media/post_rt.png",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "7c5735f28bbce3e68418d44c9d799bfd9a546304ea826bc31e9da6604817ca51",
+ "format": 1
+ },
+ {
+ "name": "docs/getting_started/contributing/modules/media/vrf_options.png",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "c04ff654c85e611c7968264f1596973cd6f28b6d48a71efcafa5a22f5c498daf",
+ "format": 1
+ },
+ {
+ "name": "docs/getting_started/contributing/modules/index.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "5158e3e659bafe06c7cd9d2897af6e7ac6209207d906fb4bb530b200b390832e",
+ "format": 1
+ },
+ {
+ "name": "docs/getting_started/contributing/modules/update_module.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "afff5f11445cb862a1177e48f0f2e92316056b7781786a368fa960645ccb34bd",
+ "format": 1
+ },
+ {
+ "name": "docs/getting_started/contributing/index.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "08326dcd2935d4a33b028eb1dab2a5afda56afb802d9ca6c89e1a59c0c45afd2",
+ "format": 1
+ },
+ {
+ "name": "docs/getting_started/contributing/inventory",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "docs/getting_started/contributing/inventory/index.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "698639c0280fc8b9d8a8926baf2e53dbaff581600a4cc3f65cf9ecdc4dc19732",
+ "format": 1
+ },
+ {
+ "name": "docs/getting_started/installation.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "a4a35f73d6e5c37ffbc2e117b4c11a1f9ebeaa78d93b9c36efca62080792c7ca",
+ "format": 1
+ },
+ {
+ "name": "docs/getting_started/index.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "105990e8aebad3487a6e7174313bc26bc4138813416cd4c98c8157f158b6904c",
+ "format": 1
+ },
+ {
+ "name": "docs/getting_started/contributing.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "7029fde832d231a3111e5484faac6759377430100b288bc14f6b2670c6022d73",
"format": 1
},
{
@@ -53,42 +221,70 @@
"name": "docs/_extensions/pygments_lexer.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b5efd3823b3470001b252cb0f6970477bfc191a884b1a8aaeca235e02570575a",
+ "chksum_sha256": "6fa5a72e224c3cae4a07f42ea993bed348822e31e4435c300f01e707fb961cb8",
"format": 1
},
{
- "name": "docs/module_utils",
+ "name": "docs/media",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "docs/module_utils/netbox_dcim",
+ "name": "docs/media/postresults.PNG",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "12ce503f9c545a573466e75cf028f316afc893e41f056be1ce6838b44db49537",
+ "format": 1
+ },
+ {
+ "name": "docs/Makefile",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "8b6587b859607f200f116e2cb043fc358e1c3a26c326b563bf348453cfc68307",
+ "format": 1
+ },
+ {
+ "name": "docs/changelog",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "docs/module_utils/netbox_dcim/index.rst",
+ "name": "docs/changelog/index.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "aac738555b1754e47e6a3b632cde1a825c7af6f831382487ccad4eccc8910f13",
+ "chksum_sha256": "1831a65ac37c6d38d1024a11f051b0fb19e95123b9372436ea5d6fd366fe8c54",
"format": 1
},
{
- "name": "docs/module_utils/netbox_extras",
+ "name": "docs/changelog/changelog_include.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d8a74a8989015f6ff8f2e9c52c07518770e91dd69c81b078eec816aafc8ab9a2",
+ "format": 1
+ },
+ {
+ "name": "docs/module_utils",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "docs/module_utils/netbox_extras/index.rst",
+ "name": "docs/module_utils/netbox_dcim",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "docs/module_utils/netbox_dcim/index.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "91e0d05c958f3fd4bddd1f90fcffd40cb838ca9fe10ddb23fed96c96c29f925b",
+ "chksum_sha256": "aac738555b1754e47e6a3b632cde1a825c7af6f831382487ccad4eccc8910f13",
"format": 1
},
{
@@ -106,17 +302,17 @@
"format": 1
},
{
- "name": "docs/module_utils/netbox_circuits",
+ "name": "docs/module_utils/netbox_ipam",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "docs/module_utils/netbox_circuits/index.rst",
+ "name": "docs/module_utils/netbox_ipam/index.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "69b4f0147da11b460812a18e1a30c46cf783038607e8aa15b8104ebcf745971b",
+ "chksum_sha256": "5c83d597e8b57c1fc6e1b240b361802f04cf5b2b93ed5b163430c2ad2a25c700",
"format": 1
},
{
@@ -134,6 +330,41 @@
"format": 1
},
{
+ "name": "docs/module_utils/netbox_extras",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "docs/module_utils/netbox_extras/index.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "91e0d05c958f3fd4bddd1f90fcffd40cb838ca9fe10ddb23fed96c96c29f925b",
+ "format": 1
+ },
+ {
+ "name": "docs/module_utils/index.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "b4fdf0597f752c618b999e67fb63624f430780bf693e4b2b408ab5666df9d1bd",
+ "format": 1
+ },
+ {
+ "name": "docs/module_utils/netbox_secrets",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "docs/module_utils/netbox_secrets/index.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "8fe38c525cf5a8c333411a9c0266960fb55f3360ec7aaf981a0765d126326c89",
+ "format": 1
+ },
+ {
"name": "docs/module_utils/netbox_tenancy",
"ftype": "dir",
"chksum_type": null,
@@ -148,38 +379,52 @@
"format": 1
},
{
- "name": "docs/module_utils/netbox_ipam",
+ "name": "docs/module_utils/netbox_circuits",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "docs/module_utils/netbox_ipam/index.rst",
+ "name": "docs/module_utils/netbox_circuits/index.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5c83d597e8b57c1fc6e1b240b361802f04cf5b2b93ed5b163430c2ad2a25c700",
+ "chksum_sha256": "69b4f0147da11b460812a18e1a30c46cf783038607e8aa15b8104ebcf745971b",
"format": 1
},
{
- "name": "docs/module_utils/netbox_secrets",
+ "name": "docs/conf.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "9f66ecb31620add016b1743c7943620b384c4f0acb370aa4479f3b4b38bd6933",
+ "format": 1
+ },
+ {
+ "name": "docs/index.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "13a5ee7e9853fc07707a7cdc745cb837c7b2ee852328f4b4c7ebcf6e3acb881b",
+ "format": 1
+ },
+ {
+ "name": "docs/_static",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "docs/module_utils/netbox_secrets/index.rst",
+ "name": "docs/_static/pygments.css",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8fe38c525cf5a8c333411a9c0266960fb55f3360ec7aaf981a0765d126326c89",
+ "chksum_sha256": "74318b36e2302111536796af5aa86a9ceab09faa3eedc3e7d28878b8751896d6",
"format": 1
},
{
- "name": "docs/module_utils/index.rst",
+ "name": "docs/_static/ansible.css",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b4fdf0597f752c618b999e67fb63624f430780bf693e4b2b408ab5666df9d1bd",
+ "chksum_sha256": "0787ca656847c399975c5cca01be77f08d4ff9344b87bec1c1ba1aaa3cff56a8",
"format": 1
},
{
@@ -204,10 +449,10 @@
"format": 1
},
{
- "name": "docs/conf.py",
+ "name": "docs/requirements.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cb3bc957fa2adf51bd249cf90fa963c9223be14357b27bafa65b8da76a15df32",
+ "chksum_sha256": "39988308e7ae66376608418d13d7979ccd50fcdc3e1c3439912ff9943c210ca3",
"format": 1
},
{
@@ -218,2292 +463,2635 @@
"format": 1
},
{
- "name": "docs/plugins/netbox_cluster_group_module.rst",
+ "name": "docs/plugins/netbox_tag_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cf2aa826844f75d5442f4d72f0bac1ff40940173b7918fad8f2a2e6aa0132182",
+ "chksum_sha256": "a97ab7ce58ecd2ad9b898b262cd569baed9fd99d7b9e018c13ce17e03e3ef897",
"format": 1
},
{
- "name": "docs/plugins/netbox_site_module.rst",
+ "name": "docs/plugins/netbox_manufacturer_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "19594c11cbd212bc119272381f5a9f2680d1666f278394158ee4943e9c442534",
+ "chksum_sha256": "5722e58b3c307dd1d6418e380407c6bc5da28e61baae914678f1c22f17b9bf8f",
"format": 1
},
{
- "name": "docs/plugins/netbox_contact_module.rst",
+ "name": "docs/plugins/netbox_vlan_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f3da0572391fe52cffbeeb7c21da524a4553c82023ecefb2250724f6d9c52df2",
+ "chksum_sha256": "48d1e2fa6c9751d1eb14395682cdcb1e0a4069ab1fe801dbf45b9353ad9fd4dc",
"format": 1
},
{
- "name": "docs/plugins/netbox_module_type_module.rst",
+ "name": "docs/plugins/netbox_circuit_type_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "791c805acdad6a7f881ce1095f89216dc9f795c99edeb84f9eb6c48716d1d1dc",
+ "chksum_sha256": "29fe3da4217a1e02c4e1cd0aee2d5da9d8c72ca7b6271c9e08d53cea1ee3248a",
"format": 1
},
{
- "name": "docs/plugins/netbox_config_template_module.rst",
+ "name": "docs/plugins/nb_inventory_inventory.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "13597eb1a80e98fbcc6753bb3de6c4d8ccb69d214aad52443c311c3d8b17dcb0",
+ "chksum_sha256": "b3b197f2bfcb61687d409a10ca657a074a84d0ccde5117f1fdbb67f18e9ec377",
"format": 1
},
{
- "name": "docs/plugins/netbox_interface_module.rst",
+ "name": "docs/plugins/netbox_device_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a81f1860d8e8bf8b1c437ba16c9939419a4ba695ed228f0c726b5944c3690ab3",
+ "chksum_sha256": "e562e2210574d4f7aa3b48df116228a2044a6e2f9e84c3b5f1c5927115e46f9a",
"format": 1
},
{
- "name": "docs/plugins/netbox_power_port_template_module.rst",
+ "name": "docs/plugins/netbox_virtual_disk_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ec264ee717d0d32c697abc10ed886848c4ba386e1094eefd588bcb2411b8b07b",
+ "chksum_sha256": "e7b5322cb65f16892d7dc4d659ed50026597086827433d143a2864d92ccbb4ac",
"format": 1
},
{
- "name": "docs/plugins/netbox_circuit_module.rst",
+ "name": "docs/plugins/netbox_interface_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "62ff17ad56ca1ae51bdd2772eac51d1424d29b5b11281d11e4af45d10aec20fd",
+ "chksum_sha256": "bac3e444069da9456bf44a79c764f5456459888f9158c3458a092c7ff4431a65",
"format": 1
},
{
- "name": "docs/plugins/netbox_inventory_item_role_module.rst",
+ "name": "docs/plugins/netbox_console_server_port_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d65116e3a0e0d71b1faa8338a40f7f13df4eaeb0804d8530eb1618207daf3ce3",
+ "chksum_sha256": "3cf74e2863e74649e510ac082ed4abacba90d05dc36ddbff39bbe92cafa3c711",
"format": 1
},
{
- "name": "docs/plugins/netbox_fhrp_group_assignment_module.rst",
+ "name": "docs/plugins/netbox_tenant_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "069b71f5e27cb50477765f8f648c9e8d2b488596699396089ff2346383011e78",
+ "chksum_sha256": "6d4aa3632e8fca16e7627b6b7d490f436aef519dd4e768ca6a1e3fe537caea3c",
"format": 1
},
{
- "name": "docs/plugins/netbox_virtual_disk_module.rst",
+ "name": "docs/plugins/netbox_config_template_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3c7c2700a0266a0d81beb53b8c35bc840aa153613318c6cafa7b4103100f90e8",
+ "chksum_sha256": "4f0c06e0bbb37536e9976122b259b43c9295cd06833084f3dc45a41db1ab4166",
"format": 1
},
{
- "name": "docs/plugins/netbox_export_template_module.rst",
+ "name": "docs/plugins/netbox_service_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "00924f4f3f26fe9127ad6a20336a102c7e5be0a6c242e6a3137c51e6928619cc",
+ "chksum_sha256": "6da244a6e53c70121e6e4a82a1678fef951baa26d2af1ccfc7b80afde8a8e8c7",
"format": 1
},
{
- "name": "docs/plugins/netbox_virtual_machine_module.rst",
+ "name": "docs/plugins/netbox_front_port_template_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a3d75b052e02c931b24cf7dd04b7ca2246b0c43793f95f76b5d8d4f2e972269d",
+ "chksum_sha256": "b2f13d54e6f2bc3f211ab4c529a9b39d85aee24b2ca238cdb620e74a441ec7cd",
"format": 1
},
{
- "name": "docs/plugins/netbox_custom_link_module.rst",
+ "name": "docs/plugins/netbox_circuit_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fd7b23c3ab8e0ddeeea98d8bb9233893a5938e813cc3f35ecb667a9d486d44e5",
+ "chksum_sha256": "2e0137de67ad465b4642cba7a7ffdd12315589aececf49ac70badf6115b6adb6",
"format": 1
},
{
- "name": "docs/plugins/netbox_contact_group_module.rst",
+ "name": "docs/plugins/netbox_virtual_chassis_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a2837c6859dfdba133cf1b54259061dbff734f918a2b3d5276c38209283dca4c",
+ "chksum_sha256": "f66968af2f62108e940df3623b567d94ae87bb74e60969d148dad9dfa9871be9",
"format": 1
},
{
- "name": "docs/plugins/netbox_platform_module.rst",
+ "name": "docs/plugins/netbox_provider_network_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9b1ce38fabb69418ced65ac491b00d9087171972d4fe42a35687091f6041a417",
+ "chksum_sha256": "63df2d0530365042b95472518762ec27b40c34b4b04b56c50b015d161d8bbc79",
"format": 1
},
{
- "name": "docs/plugins/netbox_contact_role_module.rst",
+ "name": "docs/plugins/netbox_custom_link_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3df2ce4f83ad06c0073f397497708a99b1103923932eb51d73c9b5f31eb1549",
+ "chksum_sha256": "5864550194796e6c551cd4e17f41e3cc5184dec730e70a0a18357eb8c3503f3a",
"format": 1
},
{
- "name": "docs/plugins/nb_inventory_inventory.rst",
+ "name": "docs/plugins/netbox_device_interface_template_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a87d71fbc237548249843da9018028e39e249ed79346551b5a9a0fd8ed06856b",
+ "chksum_sha256": "7417d997c30baa6b3b1eb917dd5646c0674e361b12a528211bca5a3318ba0c65",
"format": 1
},
{
- "name": "docs/plugins/netbox_wireless_lan_module.rst",
+ "name": "docs/plugins/netbox_l2vpn_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1908bd1628eb49750019c31ac5356f74c512e95e29ea5ddde054ee63a320d32f",
+ "chksum_sha256": "2a60b86b3424fe47f412fef844b4bb4c5dc23fe19c8cfc89bd3b41238f2532e1",
"format": 1
},
{
- "name": "docs/plugins/netbox_vlan_group_module.rst",
+ "name": "docs/plugins/netbox_console_port_template_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "744574598f8b98ec0ef926dc2a625ec365630769040cf762ddd5546f9899b41c",
+ "chksum_sha256": "4a81ba46c8f1f4ac438640bf9ca23d2ffd86862c353fb0f587c883339f7348d8",
"format": 1
},
{
- "name": "docs/plugins/netbox_vlan_module.rst",
+ "name": "docs/plugins/netbox_ip_address_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ec4bc435f50cc6a504140960df1f73aa3559c3b2b09c80996532be3073b4e117",
+ "chksum_sha256": "9264e93eb8fd9ddfd9287f8929931d624e79ebc6e2add8ca3547eb8c066635b6",
"format": 1
},
{
- "name": "docs/plugins/netbox_power_port_module.rst",
+ "name": "docs/plugins/netbox_rack_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2c7dedd266fec7cb07f5fe55f762f835dd92eb586f108c4c1fda40c8d3ac5116",
+ "chksum_sha256": "5ec3266b5d0ca1e04a62370fe08e24ee1b7daea62c207e147d42b19970de0fed",
"format": 1
},
{
- "name": "docs/plugins/netbox_device_bay_module.rst",
+ "name": "docs/plugins/netbox_console_port_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "725a28bd7995f5bc8f771c66705951e1002b2f4b20b2a3a68ffc08ac41dbed3f",
+ "chksum_sha256": "68951df7af440f38fce10bfa21f53f10506de84a9d14ba4a05984dcf0ca9abee",
"format": 1
},
{
- "name": "docs/plugins/netbox_circuit_termination_module.rst",
+ "name": "docs/plugins/netbox_contact_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ac99c0b5b43dadff11bc61c1d38df201a4e58d27bb99b9bd4d60992347d48d38",
+ "chksum_sha256": "b73dd97651a1a49591a7c66bbf9a2d9d41b6930dd3959f3b809ef14f2e575a77",
"format": 1
},
{
- "name": "docs/plugins/netbox_site_group_module.rst",
+ "name": "docs/plugins/netbox_rack_role_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b9afac864612171a6cf4737e8667f327f2d86e851859558e957e5192f226def6",
+ "chksum_sha256": "5e69fb0e845987507a6ee349152bf79aeeeb6ec65daaba466cd5f9c1f0b41252",
"format": 1
},
{
- "name": "docs/plugins/netbox_device_interface_module.rst",
+ "name": "docs/plugins/netbox_power_feed_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "09449a2832be423140eb4a7e1723431781506f0f134c9257d6b89f1bdfb8f227",
+ "chksum_sha256": "2310946a748854ea73f5bc85216e062332364a2db381836277e11b746e9b8827",
"format": 1
},
{
- "name": "docs/plugins/netbox_ip_address_module.rst",
+ "name": "docs/plugins/netbox_rear_port_template_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "78f4a78f16fbad0eda34bf20b7f23d8848a449140d7edf69d6ed4bccf6a64bc7",
+ "chksum_sha256": "619f7201fcb6aab3e91a37aba75e967c8ac231242cefdff9115f7c9f81ee4681",
"format": 1
},
{
- "name": "docs/plugins/netbox_device_interface_template_module.rst",
+ "name": "docs/plugins/netbox_prefix_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bf997dc224bfc2493fdb2ba522c40536e84ad7e278f186096232489d945310dd",
+ "chksum_sha256": "33c090974cf17ac907514872269cba7152929841346cad931ff6aa5fb36aaa8f",
"format": 1
},
{
- "name": "docs/plugins/netbox_circuit_type_module.rst",
+ "name": "docs/plugins/netbox_device_interface_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a92878ea19f67e678ad76ab984848c02e59629f24f2477b3f60fe9cdc4a3564a",
+ "chksum_sha256": "3fce5b2e3ddbf9b47b659d1143cb0d3bb6519120da29705a6e52f0f83c83a13f",
"format": 1
},
{
- "name": "docs/plugins/netbox_console_port_module.rst",
+ "name": "docs/plugins/netbox_location_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "64bf8db93c7eac612edbeadfdeb160e0e44afb9fe171fe9cbec69211d4e2b7a9",
+ "chksum_sha256": "347b5691f3131f983a2227c7aeeae9cef71c1563d91ea23978b4881c543f8cc8",
"format": 1
},
{
- "name": "docs/plugins/netbox_rir_module.rst",
+ "name": "docs/plugins/netbox_vm_interface_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "95431e29dc63494e75c00a50edc7ef21a465590e2daf5615c438400524660bf4",
+ "chksum_sha256": "28aba6d533870acf30dfd6897a0a283f660e3d4360c68e3c41907f1bc34d4142",
"format": 1
},
{
- "name": "docs/plugins/netbox_console_server_port_template_module.rst",
+ "name": "docs/plugins/netbox_asn_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "56f7d2539820da217c188d7737a045fa22cb24088d06dff47767c1526ea93bcb",
+ "chksum_sha256": "bf6aca4c3711ac2de31dd62c02eceb420fe82af9cc8683ce3baf79835ae2bbd8",
"format": 1
},
{
- "name": "docs/plugins/netbox_front_port_module.rst",
+ "name": "docs/plugins/netbox_rir_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0e89863e1559c49a3ffa7c0425b4b94a33cfa19c099ac61aa778ec73b1c45f9a",
+ "chksum_sha256": "ebb4429feabd382488facbcad09de2bf575a09b4f71ce608090e28c57a8800c1",
"format": 1
},
{
- "name": "docs/plugins/netbox_wireless_lan_group_module.rst",
+ "name": "docs/plugins/netbox_power_outlet_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4bf98e78fa29ea2bc047dc7ce54ab84dae42ad7ffdb7923b90e1f0a2c39342ef",
+ "chksum_sha256": "fc7d7941f9f1dfa964a0d109060f5c29d71c6cd160d77256a5f2fe65e60779e0",
"format": 1
},
{
- "name": "docs/plugins/netbox_power_outlet_template_module.rst",
+ "name": "docs/plugins/netbox_module_bay_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "558ae4f998b33362ceaa13e13554851381bce8d991e440adccb9676777605827",
+ "chksum_sha256": "08e2aa5511760dfeaa6ca2cbb781fa2f5dd9bd17d57b382ebf55282d6637303a",
"format": 1
},
{
- "name": "docs/plugins/netbox_prefix_module.rst",
+ "name": "docs/plugins/netbox_console_server_port_template_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "303c492974bb9bd45ceb30c9d026b88738984d11435aa5ee9fe68fb19fc60641",
+ "chksum_sha256": "efa9a228ad792bdfdba42644a73a8958e944263271f5a97637efcdfa57f79e69",
"format": 1
},
{
- "name": "docs/plugins/netbox_console_server_port_module.rst",
+ "name": "docs/plugins/netbox_tenant_group_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2e42e32c6c2964aacce8f72f4c33413c8435f62419876470f07b39c220baf0d2",
+ "chksum_sha256": "952163b4e0e3eb0141fd73a83a100d4d4fa98f9d291c54630e7bf17728cbaf64",
"format": 1
},
{
- "name": "docs/plugins/netbox_provider_network_module.rst",
+ "name": "docs/plugins/netbox_region_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d62c04a676687c7a396b1e1cd11375059b2b08341b034fdfa21a750d89bed2c2",
+ "chksum_sha256": "f9cb10228cc7ecba3ef6e596b742024d5b9405f8073ae3dc43eb701f8c1f1b97",
"format": 1
},
{
- "name": "docs/plugins/netbox_rack_group_module.rst",
+ "name": "docs/plugins/netbox_virtual_machine_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "02f174938f3cd714a280849e728af82d5e40f8946881566743694bb38f4f38b9",
+ "chksum_sha256": "e621952249adb466955a0fa3393f79ba4e38dfab405ce08c8ca60b8ba0a36c59",
"format": 1
},
{
- "name": "docs/plugins/netbox_tag_module.rst",
+ "name": "docs/plugins/netbox_power_panel_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "61930da19d2da3f04b674dd2bf14dc5c31965453947c046f7985ad95d829777a",
+ "chksum_sha256": "edb5de14b9c23b2e64c00d9c82a3c3ed22b39c1caaa9e41ef9b73a2a7dc70176",
"format": 1
},
{
- "name": "docs/plugins/environment_variables.rst",
+ "name": "docs/plugins/netbox_vrf_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8e05134cb38bd00d064befb99045c51ae4893762c86b0c2c3007cbc2665090bf",
+ "chksum_sha256": "979b761cf3a7a11c6ed46817681a8a576407cc8f41c83a4ecf43a41d0ca365a8",
"format": 1
},
{
- "name": "docs/plugins/netbox_journal_entry_module.rst",
+ "name": "docs/plugins/index.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b155ca58fe8c0eecf86f6471618d025a401b843dbc32b648e30834c50c728576",
+ "chksum_sha256": "cfc6078d8bc269c544cf3d942d09b6a114a10ce8e88f16c571755086c7d65c60",
"format": 1
},
{
- "name": "docs/plugins/netbox_rack_role_module.rst",
+ "name": "docs/plugins/netbox_contact_role_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "33147df9be51239a74cd04be29591aa12af8fc4c5fa602b0310ea160fa3235a7",
+ "chksum_sha256": "b2ceaa92e04ef846e13e9e01133375728e3f65393bd3c4425fc43d867e9fb487",
"format": 1
},
{
- "name": "docs/plugins/netbox_front_port_template_module.rst",
+ "name": "docs/plugins/nb_lookup_lookup.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ff39b04c1e1ece028987ddf17c968a9294c3f7ae94ab19df5d5084f1240d29c7",
+ "chksum_sha256": "2bef14a52568c6121405016e88c5e2fc897aca3d571a47bf343fd0fb6695beac",
"format": 1
},
{
- "name": "docs/plugins/netbox_config_context_module.rst",
+ "name": "docs/plugins/netbox_route_target_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a0f5225ac1bdc2df393b60ffccefa824e198d2e5908880a00411b5d14b8cd0b8",
+ "chksum_sha256": "d2ebcbd13954aa23efb2bf617ba1c3b12f0d762f034dabf3882084962efa01bc",
"format": 1
},
{
- "name": "docs/plugins/netbox_power_feed_module.rst",
+ "name": "docs/plugins/netbox_circuit_termination_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "dba1d5783c9f6582967775edaa73c282e6af425bdf004a574f5e370cf1d4a2aa",
+ "chksum_sha256": "9d7d596568b3f18960e07912574d8f144f835b86b8d4d541563c82a6011416be",
"format": 1
},
{
- "name": "docs/plugins/netbox_webhook_module.rst",
+ "name": "docs/plugins/netbox_cluster_type_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1d7c55e0270f60ee122a111ba5fd0875aa3ab2ebcea597d3f91d1a3952b35285",
+ "chksum_sha256": "ef30e4cdcc86265248affaa5d80048d1e042a9bdbf292a81b8c35ef761f41ce0",
"format": 1
},
{
- "name": "docs/plugins/nb_lookup_lookup.rst",
+ "name": "docs/plugins/netbox_module_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "818598e0d2adb534fd1901b797761e5ad3b4e4189a09ecda43c1f5aef801c5f9",
+ "chksum_sha256": "a919bed8c9b25864f490765dda8daa29cd466efc1e992de860755b620d6c4765",
"format": 1
},
{
- "name": "docs/plugins/netbox_custom_field_choice_set_module.rst",
+ "name": "docs/plugins/netbox_wireless_lan_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c7a2cf9f6db482f6cd1687b12cea0bd4154644ea77620b98b60b176df4607e99",
+ "chksum_sha256": "73bb96346604431a986a1e08b3e09db51bcafcbf56e691f339fefaaf68810e3c",
"format": 1
},
{
- "name": "docs/plugins/netbox_location_module.rst",
+ "name": "docs/plugins/netbox_front_port_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1f967b168c78229a485447184b600ea0a6993262921e82516d3594191c98a340",
+ "chksum_sha256": "8fa7f2672790e4a27b837b67501624234569617c10c3f4ccc6fe5acf4f557652",
"format": 1
},
{
- "name": "docs/plugins/netbox_ipam_role_module.rst",
+ "name": "docs/plugins/netbox_webhook_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d7aeea61eeed4d231e037b266922cbd33e765fe360f5094c6871b771bb22c9c2",
+ "chksum_sha256": "cc02772fcbddb4ae813f507c09e53e7d89d9fe00ad00c13773e426a78ec21510",
"format": 1
},
{
- "name": "docs/plugins/netbox_device_module.rst",
+ "name": "docs/plugins/netbox_cluster_group_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d75b3885f4fb5c283b8bf7593242e9661d124becb93300d4b75f759cce73c25e",
+ "chksum_sha256": "ebad52e6ee70a0137b83ab0cef224fe96a16af1509c50c5d71584ab8166cae16",
"format": 1
},
{
- "name": "docs/plugins/netbox_asn_module.rst",
+ "name": "docs/plugins/netbox_fhrp_group_assignment_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8a1b05764239c3ab2cab75bbd24f5a7f3a79c340b87d1f56712e980fc7f95aed",
+ "chksum_sha256": "27403b3044509983b77b855a33987037e7befb26c8935ddf981e8a650cf3a068",
"format": 1
},
{
- "name": "docs/plugins/netbox_wireless_link_module.rst",
+ "name": "docs/plugins/netbox_ipam_role_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ffe0caf6aca220cac8554ef400bba82f4035cbcc1cf4e9eaeabcfe7502603c57",
+ "chksum_sha256": "e9907323114eab10dab4110c4bbba66093f7acb7aa58d0d678de843a6f394092",
"format": 1
},
{
- "name": "docs/plugins/netbox_virtual_chassis_module.rst",
+ "name": "docs/plugins/netbox_vlan_group_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "faf9d732a702f2c0ad630adf7a8f79066b871bd05fcc005f8807daecd593a396",
+ "chksum_sha256": "98582636ed7e504ef820419feedbf980be9a25a93edaea1ad86feacc4dd2e57b",
"format": 1
},
{
- "name": "docs/plugins/netbox_vm_interface_module.rst",
+ "name": "docs/plugins/netbox_wireless_lan_group_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a8eaaee3abde466f051bf43bd0b19844b486294849a1d1fc648109ba7b37fc42",
+ "chksum_sha256": "1fea89108e1203e20769b83e9ea67a4d10f65dc7eed8648a2f48430ca83b8320",
"format": 1
},
{
- "name": "docs/plugins/netbox_device_role_module.rst",
+ "name": "docs/plugins/environment_variables.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2ab9666e0164ef9e74ca5e2d0c8f1ff5cf2a372b8fd0c201dcb33311df8410d6",
+ "chksum_sha256": "10e745dac34a45446f4c4be0664ca250b0111d404b6a35eadc39bc5b3f3dd06a",
"format": 1
},
{
- "name": "docs/plugins/netbox_device_bay_template_module.rst",
+ "name": "docs/plugins/netbox_rear_port_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0a48a6ba3021c670e5eaf5d867a6c228c98fe0cb575420afb3bf9a510ead0e59",
+ "chksum_sha256": "bd2bd2d3ffa3a1f7b98c6329354e4791cdc6d5b34195bfba8a25a660783685e2",
"format": 1
},
{
- "name": "docs/plugins/netbox_aggregate_module.rst",
+ "name": "docs/plugins/netbox_inventory_item_role_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fff2b35c0f41a6af911dbb64b25fdae3e5df9ddbdb26830bb67d014d3850d0a4",
+ "chksum_sha256": "c5a25b964beb053ff9edf610062899fc25b1b309d09424cc400e898affb6dd5b",
"format": 1
},
{
- "name": "docs/plugins/netbox_rack_module.rst",
+ "name": "docs/plugins/netbox_power_port_template_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "17df26c35c2238f9a8c58462a8ad30f34ac67c52e686a86c98811c0aef06e7ca",
+ "chksum_sha256": "62b01724e8449c8fe73547a14020fcf8865dbe6a800df5317a0ec34e05ff2228",
"format": 1
},
{
- "name": "docs/plugins/netbox_cluster_type_module.rst",
+ "name": "docs/plugins/netbox_cable_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a0eca272ddee6ffceee29cb095cdd4dbb1e29e007e50a3444c16b0e936d8df3a",
+ "chksum_sha256": "e59817abf34d99d4755d3da36468a9872fa8f902aec1041f6242dca8b1bc7afc",
"format": 1
},
{
- "name": "docs/plugins/netbox_cluster_module.rst",
+ "name": "docs/plugins/netbox_custom_field_choice_set_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b8d0e1c0e7ef3e92e07f7fde274ed2102fcaf932571fdb81295de788014eccd0",
+ "chksum_sha256": "7b26e451abadcc9d832a21686aaca5279ebc3f81bc47e5aad69c57d91951cdc2",
"format": 1
},
{
- "name": "docs/plugins/netbox_module_module.rst",
+ "name": "docs/plugins/netbox_fhrp_group_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f3e47912adeabef8e593d54d3825b60f37a232e9796093b01cf8743bcc2453a6",
+ "chksum_sha256": "15fee14645708e18acca082b3cf1d0992fd5ec5995d0ad2a059d1bcb60fc575d",
"format": 1
},
{
- "name": "docs/plugins/netbox_region_module.rst",
+ "name": "docs/plugins/netbox_device_type_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8f319c3b75e123423b049eab47ee2c050a9d54f963c571f09aea82ef9325227d",
+ "chksum_sha256": "f64e2831906a386ce1a632a2a836e7ce1789f9f255b9a9160c6b63ecbac0a4c4",
"format": 1
},
{
- "name": "docs/plugins/netbox_manufacturer_module.rst",
+ "name": "docs/plugins/netbox_module_type_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a557d6b6a9eeaefa5f65be4a0eabee6077a311896e01508b12f9374ad3076332",
+ "chksum_sha256": "7e627cd4cdb8d75e8e382e0fcf9f000acb2b142cdc780acf1e699748a1cd218e",
"format": 1
},
{
- "name": "docs/plugins/netbox_module_bay_module.rst",
+ "name": "docs/plugins/netbox_service_template_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2e75ac555fd8d421965cb3f210110dd0c7000155480d42b1631df0d41c346118",
+ "chksum_sha256": "e6e69ae63c79c142a8ebcfa0a853d9b8dd526c9571af08a0712ad52207043459",
"format": 1
},
{
- "name": "docs/plugins/netbox_inventory_item_module.rst",
+ "name": "docs/plugins/netbox_cluster_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "86acffad7be0dc8d7e7996f5acb73741c70ccadb32608f756d2a5845f772cb6a",
+ "chksum_sha256": "5068f575a382a5858e6b0f1fe860f87feea8576c36c81b2f495ee49b296f22bc",
"format": 1
},
{
- "name": "docs/plugins/netbox_provider_module.rst",
+ "name": "docs/plugins/netbox_power_outlet_template_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5ce40cbac9917a243e1365b342a083b7c7438ecf3844f634b25415dfd4680907",
+ "chksum_sha256": "78f4fcb9e202ae6bfe455a52f0ad8a568706d193389358bbbcd591f036447ad4",
"format": 1
},
{
- "name": "docs/plugins/netbox_l2vpn_termination_module.rst",
+ "name": "docs/plugins/netbox_aggregate_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "984e86e689629a4e3090a5cfcb61f3ed77f3ec67c61a74a8a6d613cb4e7f9d03",
+ "chksum_sha256": "8e7280a943bd24ad12425ce7a22050e0b5acf4a3127b50fc6ead2c50c7daa877",
"format": 1
},
{
- "name": "docs/plugins/netbox_route_target_module.rst",
+ "name": "docs/plugins/netbox_power_port_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5ee160f6811124acc96c06b0ccc77737fd4d87c9052c4c793895567125a5496d",
+ "chksum_sha256": "cfc95851e83d59a4484e75c57a4f3cbc313c8963ce3256de273c98732c96fb85",
"format": 1
},
{
- "name": "docs/plugins/netbox_console_port_template_module.rst",
+ "name": "docs/plugins/netbox_device_bay_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8344beaa34b81f73d28dd709d397ef48a1dc9070cfdebdd462344fc95c3d5062",
+ "chksum_sha256": "1d265d84e58b26733e813dde8ea3a272c50338f7da8e71ec6a92ad188f4bd46b",
"format": 1
},
{
- "name": "docs/plugins/netbox_power_panel_module.rst",
+ "name": "docs/plugins/netbox_device_bay_template_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "43e32df6b480501bf093edb8a3cc25c7d03f53eed96f644024552361d31c0add",
+ "chksum_sha256": "ab6fc008b3d9ddb5d2f2b298526b75f704f8a24526ff77ee71adfc450a2214ee",
"format": 1
},
{
- "name": "docs/plugins/index.rst",
+ "name": "docs/plugins/netbox_rack_group_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ec1b374e3b70b7b3a02df485bba028d6ec5e45ddeb233c9232ce2d10b4006841",
+ "chksum_sha256": "2dc0fe15897938b5bc845e66b31e769985df5d206e8e568f57343c89784421c5",
"format": 1
},
{
- "name": "docs/plugins/netbox_device_type_module.rst",
+ "name": "docs/plugins/netbox_journal_entry_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7542f9f79902aa07467958a249f70774475de367860867143e3b15c84fecc578",
+ "chksum_sha256": "e770a41021d2c3120eddaaf0e97deb77decc841be494aa18b6f89adabc59f87b",
"format": 1
},
{
- "name": "docs/plugins/netbox_fhrp_group_module.rst",
+ "name": "docs/plugins/netbox_contact_group_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "59a88a6dd875b6cf337c3c8c2c51b8a49cf0aa21b61af531aa90960d07bf4cfa",
+ "chksum_sha256": "5f3419991a945d47d1dd57ab02245403be9cf8e0555689de39c8aeb848c841a4",
"format": 1
},
{
- "name": "docs/plugins/netbox_rear_port_template_module.rst",
+ "name": "docs/plugins/netbox_site_group_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "61930a5ec4e1be9082356a247ff0c3cf7eb4dade7e1b46cd390c9c49c8792a44",
+ "chksum_sha256": "8acbb19fda55662d22611dbceda002027e27ebf75dd28267a1b90b60b09c3e8b",
"format": 1
},
{
- "name": "docs/plugins/netbox_l2vpn_module.rst",
+ "name": "docs/plugins/netbox_provider_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a098ba7976b24e5d6fd8a6e4e4eda80a9fc09b089181be08d9cd5912b6c37b70",
+ "chksum_sha256": "a2c08949e04779767e171c98d478bb22e095ff7ad232249e3ec7e6b042c4dba1",
"format": 1
},
{
- "name": "docs/plugins/netbox_service_module.rst",
+ "name": "docs/plugins/netbox_l2vpn_termination_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "00331d976747c19b54821c23896ed3459b7c0e4b5366caef66f85272bb478a6c",
+ "chksum_sha256": "494acd3c966bf49937f0064a387a0d289399b2f89654bc4b7f15d2cf1345e75d",
"format": 1
},
{
- "name": "docs/plugins/netbox_tenant_group_module.rst",
+ "name": "docs/plugins/netbox_export_template_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "35782c3d41dde663c3717b6efb2865f222197ba4cfcd30df916c9adb0c263cdd",
+ "chksum_sha256": "14fe06bd1bc07d781c70e27fe6e05ac428bb4c2be14616eeb52caa5f48499680",
"format": 1
},
{
- "name": "docs/plugins/netbox_rear_port_module.rst",
+ "name": "docs/plugins/netbox_device_role_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "34b6bcbd2c3a4e1f2f290cecd7954c4559608553da463eec43348700b9f8d4c1",
+ "chksum_sha256": "4745a922212d4089a015cd4785718f76cf210ef38e89776d1c19985768eb4ba0",
"format": 1
},
{
- "name": "docs/plugins/netbox_power_outlet_module.rst",
+ "name": "docs/plugins/netbox_inventory_item_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "449175237c724791a885357b4069b6ed82132af8a9b142de0fa372ea6f4b8567",
+ "chksum_sha256": "a8839304fa1620dda1332f8e989b7b855ba418edef0524d7890c417efbeff6fe",
"format": 1
},
{
- "name": "docs/plugins/netbox_vrf_module.rst",
+ "name": "docs/plugins/netbox_custom_field_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c3bab4899887b45e9db3bd047082ea7d098e4bbbed22e16837d7c985589a1d97",
+ "chksum_sha256": "18dcdcf6ecdbe74c31a91640a022cf44715910c98cf9f6989f6f1062a962e681",
"format": 1
},
{
- "name": "docs/plugins/netbox_service_template_module.rst",
+ "name": "docs/plugins/netbox_site_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a33217682a2e9a2a1bd6ac5faa812cd31a3087f0857c2703d4178945f8645185",
+ "chksum_sha256": "a7f69c7ba9a94613140c543792b8535d65c9aafc86b8ccabbf0392382fc16a6b",
"format": 1
},
{
- "name": "docs/plugins/netbox_tenant_module.rst",
+ "name": "docs/plugins/netbox_platform_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "545b497d6afb011f8e33794b9ebd048037fa59b55e2425c64f01454e26d93f55",
+ "chksum_sha256": "f02a16a74b17a348dc7798e982eaf3d37549e3bc8c4d088aba766766ff3385ac",
"format": 1
},
{
- "name": "docs/plugins/netbox_cable_module.rst",
+ "name": "docs/plugins/netbox_config_context_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "68a3565de27944eeaaf30adc7026f2cabcbfcdfd1fe9fdc517449719333834d5",
+ "chksum_sha256": "75ed0e5cd64091a8cafd3b896da5228ded27a905832a82184f6c27f7a90db49d",
"format": 1
},
{
- "name": "docs/plugins/netbox_custom_field_module.rst",
+ "name": "docs/plugins/netbox_wireless_link_module.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "aecd88621418419ab133803c3d1d09f14a287508abfed9fd819514a2dca2c539",
+ "chksum_sha256": "9c74db49ca19ceada23c452ce76fdbbf6ad6cca6796c8ecde65843c0182cfd7c",
"format": 1
},
{
- "name": "docs/requirements.txt",
+ "name": ".gitignore",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "39988308e7ae66376608418d13d7979ccd50fcdc3e1c3439912ff9943c210ca3",
+ "chksum_sha256": "c0ac384e24827235e0b17021954d2b803c3da263038dfaa24967d3d40a86ea15",
"format": 1
},
{
- "name": "docs/changelog",
+ "name": "changelogs",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "docs/changelog/changelog_include.rst",
+ "name": "changelogs/config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d8a74a8989015f6ff8f2e9c52c07518770e91dd69c81b078eec816aafc8ab9a2",
+ "chksum_sha256": "bad0157e7ffee11df87f3adaa5dcfa3f1841e816a27cbcf5b866fa21f63ef1bd",
"format": 1
},
{
- "name": "docs/changelog/index.rst",
+ "name": "changelogs/changelog.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1831a65ac37c6d38d1024a11f051b0fb19e95123b9372436ea5d6fd366fe8c54",
+ "chksum_sha256": "76b3301e977ee917fcfaa22b3ddde95ffba94e868707303ac015dd78faa77b85",
"format": 1
},
{
- "name": "docs/getting_started",
+ "name": "changelogs/fragments",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "docs/getting_started/how-to-use",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "changelogs/fragments/.gitkeep",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "docs/getting_started/how-to-use/modules.rst",
+ "name": "ansible.cfg",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "77c91048d906e5cbd3a09cff4d53a02cb5f87014e854d209f3c482708ddb6a6b",
+ "chksum_sha256": "b5e219ab076b1e90de2c650d480fe0a4b8724f839ac95f5240ed64a80202dfbf",
"format": 1
},
{
- "name": "docs/getting_started/how-to-use/advanced.rst",
+ "name": "LICENSE",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "21fbfd64d3127309d605716700a7c223fd419c9fc7256414e9dd8ff480330a49",
+ "chksum_sha256": "605e9047a563c5c8396ffb18232aa4304ec56586aee537c45064c6fb425e44ad",
"format": 1
},
{
- "name": "docs/getting_started/how-to-use/media",
+ "name": "pyproject.toml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "4cb7c45388fa048d16fee536d074fc5d802744f9711885475436f73cd41dd1a1",
+ "format": 1
+ },
+ {
+ "name": "hacking",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "docs/getting_started/how-to-use/media/advanced_dict.png",
+ "name": "hacking/build.sh",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b0d011fef324a804f6cbb2778e65d69c6373c8123fac576d9f49072471a6e908",
+ "chksum_sha256": "c824f229eafb4ad8fcc24c800b6d08ad8af1c8ca866e9cdbfd543429b2a04a24",
"format": 1
},
{
- "name": "docs/getting_started/how-to-use/media/api_device_post.png",
+ "name": "hacking/local-test.sh",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "abdd29c2a0dc029c4aa6d4176b1a205dbc5d4d1f434d821e04b9a08a26175221",
+ "chksum_sha256": "3c4c16322033b5df7c08d9081098e11416b3bbdebede263ba84a92c4f043d815",
"format": 1
},
{
- "name": "docs/getting_started/how-to-use/media/advanced_dns_name.png",
+ "name": "hacking/black.sh",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3f4c6892f90e4b7a2ef8ce70fbe04c4c1cee8806d2aa37ffe09955045eb96bb2",
+ "chksum_sha256": "31b38a6d54cf7fd233d647760c6f9b695bde61762b09b0aba5899ea746d14248",
"format": 1
},
{
- "name": "docs/getting_started/how-to-use/inventory.rst",
+ "name": "hacking/make-docs.sh",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7b28ca7e2a6cf1d0549a4eb2cafc901e3b2adfc449c3de6950bdf165daeb2b35",
+ "chksum_sha256": "eaf25c88bd9a0d8d8b583c8f65bed04ce553749fe2949e2e07063fc420b08bbb",
"format": 1
},
{
- "name": "docs/getting_started/installation.rst",
+ "name": "hacking/update_test_inventories.sh",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a4a35f73d6e5c37ffbc2e117b4c11a1f9ebeaa78d93b9c36efca62080792c7ca",
+ "chksum_sha256": "c02406514a0a53c1328f5397a4df2aae6ed1096159a3a85db20e34b5c73acdbe",
"format": 1
},
{
- "name": "docs/getting_started/contributing",
+ "name": "tox-ansible.ini",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "6365ac7f012c2abf8fdda1781d0c36ec4f4e166ebd8442492914f5438ec0b9e8",
+ "format": 1
+ },
+ {
+ "name": "poetry.lock",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "37dc342a2eaa02e0d336865a0243ca94b245e3568731bd6c4d42eb62120b3e51",
+ "format": 1
+ },
+ {
+ "name": "requirements.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "5dfa35ed19609e37e1629ce816373f8dd766b78c0e65d948bfa6e69da08cd090",
+ "format": 1
+ },
+ {
+ "name": ".readthedocs.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "b6bc4a2db269d9e5a9c546a5badfcd26bd56c877c7540278cff7a8f0b870e993",
+ "format": 1
+ },
+ {
+ "name": ".github",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "docs/getting_started/contributing/modules",
+ "name": ".github/ISSUE_TEMPLATE",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "docs/getting_started/contributing/modules/new_module.rst",
+ "name": ".github/ISSUE_TEMPLATE/housekeeping.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6fac4851042c6541d5cd1ed9ead7c1fc2690a142254a1d069fcdd535d5de272e",
+ "chksum_sha256": "6952213267de18d7a4656f4aa76a8fecd52fad264d38159074ce5fea2ebf3986",
"format": 1
},
{
- "name": "docs/getting_started/contributing/modules/architecture.rst",
+ "name": ".github/ISSUE_TEMPLATE/config.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ce74b06a64b3ab858744db7e940c2996aa102a159fcd97bec7191ee1ea78777f",
+ "chksum_sha256": "417cfe754f2372185d81c8ef435d8ade002ab0f5db9f7f917a9ac26ec7479851",
"format": 1
},
{
- "name": "docs/getting_started/contributing/modules/update_module.rst",
+ "name": ".github/ISSUE_TEMPLATE/documentation_change.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "00c882e699b1094dc2996aa8a48bd9f824cd3e9ed97c626db7c4bd7df0af9c2f",
+ "chksum_sha256": "a1c372660a10ea2ce252341ddcf8f52b5faf3923b4d3ca2f864609450cfe3534",
"format": 1
},
{
- "name": "docs/getting_started/contributing/modules/index.rst",
+ "name": ".github/ISSUE_TEMPLATE/feature_request.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5158e3e659bafe06c7cd9d2897af6e7ac6209207d906fb4bb530b200b390832e",
+ "chksum_sha256": "4c042a9f4b7a1e6a8a3c7962b44e364f5b2cfe5242f17415bb9e7af94103561b",
"format": 1
},
{
- "name": "docs/getting_started/contributing/modules/media",
+ "name": ".github/ISSUE_TEMPLATE/bug_report.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "c626f2997fc48e16250057238523be1a42788d29eb48755db176bd433c79e13a",
+ "format": 1
+ },
+ {
+ "name": ".github/pull_request_template.md",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "63489727d4197ef434809dfd6310bcb819a32864f5586f1b7f604338c3b86808",
+ "format": 1
+ },
+ {
+ "name": ".github/workflows",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "docs/getting_started/contributing/modules/media/post_rt.png",
+ "name": ".github/workflows/tests.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7c5735f28bbce3e68418d44c9d799bfd9a546304ea826bc31e9da6604817ca51",
+ "chksum_sha256": "0c92711af925ff0e4c24e1173b14497783014373cb175d5b8533a1e75d632b37",
"format": 1
},
{
- "name": "docs/getting_started/contributing/modules/media/vrf_options.png",
+ "name": ".github/workflows/release.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c04ff654c85e611c7968264f1596973cd6f28b6d48a71efcafa5a22f5c498daf",
+ "chksum_sha256": "f60625361566d151303c75db858e6bdeec0d40f1caaf67c1cc832659b29285e4",
"format": 1
},
{
- "name": "docs/getting_started/contributing/index.rst",
+ "name": ".github/workflows/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "08326dcd2935d4a33b028eb1dab2a5afda56afb802d9ca6c89e1a59c0c45afd2",
+ "chksum_sha256": "9fb9e9c4ecc4ba1220731997700fb2a0660be9cc29136b4310475138f2fd2e44",
"format": 1
},
{
- "name": "docs/getting_started/contributing/inventory",
+ "name": "meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "docs/getting_started/contributing/inventory/index.rst",
+ "name": "meta/runtime.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "698639c0280fc8b9d8a8926baf2e53dbaff581600a4cc3f65cf9ecdc4dc19732",
+ "chksum_sha256": "60937475296509a4a4e56f81236215983395396771507b8ce1c7d231d401f1d5",
"format": 1
},
{
- "name": "docs/getting_started/contributing.rst",
+ "name": "meta/execution-environment.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7029fde832d231a3111e5484faac6759377430100b288bc14f6b2670c6022d73",
+ "chksum_sha256": "0cd0b966cbb8fefb00f7122190d7a9b42dc5da517a8c5ba9d86e34c51bc3e43a",
"format": 1
},
{
- "name": "docs/getting_started/index.rst",
+ "name": ".ansible-lint",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "105990e8aebad3487a6e7174313bc26bc4138813416cd4c98c8157f158b6904c",
+ "chksum_sha256": "b43ecf9cad4057c044a233c4b4ac61c46f6171284e2760e201cc429b487d1eab",
"format": 1
},
{
- "name": "docs/getting_started/how-to-use.rst",
+ "name": "requirements.txt",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "24afa8389fadeb19111920ff8dedfad7bbbe0cf54c9fefa238c564b8809a39e9",
+ "chksum_sha256": "d003b5dde4117b9ef860b6d4fd67096a2909ea206f63116d5d46d1955fcc94f1",
"format": 1
},
{
- "name": "docs/index.rst",
+ "name": ".ansible-lint-ignore",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "13a5ee7e9853fc07707a7cdc745cb837c7b2ee852328f4b4c7ebcf6e3acb881b",
+ "chksum_sha256": "c9f3858f2f46c33226d8494ece32bb03872c105e228fb32e56b4d4c342b924c9",
"format": 1
},
{
- "name": "docs/media",
+ "name": "test-requirements.txt",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "47f934aaf74138c7491a9fedb7e83b606f40334ed0ad8169e79de138d51933e1",
+ "format": 1
+ },
+ {
+ "name": "plugins",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "docs/media/postresults.PNG",
+ "name": "plugins/module_utils",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "plugins/module_utils/netbox_tenancy.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "12ce503f9c545a573466e75cf028f316afc893e41f056be1ce6838b44db49537",
+ "chksum_sha256": "50a1de4f8a2dd29d4ee73a75fa9ab8e3f5e444a5fc1935a8d09172a56f290d11",
"format": 1
},
{
- "name": "LICENSE",
+ "name": "plugins/module_utils/netbox_ipam.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "605e9047a563c5c8396ffb18232aa4304ec56586aee537c45064c6fb425e44ad",
+ "chksum_sha256": "368bdd616498612f33b9a3903abe22da7df375eb969be91d71e9c21e9fa34398",
"format": 1
},
{
- "name": ".github",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/module_utils/__init__.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": ".github/pull_request_template.md",
+ "name": "plugins/module_utils/netbox_utils.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "63489727d4197ef434809dfd6310bcb819a32864f5586f1b7f604338c3b86808",
+ "chksum_sha256": "30169185e24d41d9bf843e1471609d2144e5687c171a828482a1a41eee98100e",
"format": 1
},
{
- "name": ".github/workflows",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/module_utils/netbox_dcim.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "ad6b8d8a7537c776e8579cbbf75800c7fc57b1d4ea2d56d39d36e03bb1ec4ff6",
"format": 1
},
{
- "name": ".github/workflows/main.yml",
+ "name": "plugins/module_utils/netbox_circuits.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "88328c699e965b13d17597a87ff4e41dfe2011913d3b0463b83996a2e3eb1fa4",
+ "chksum_sha256": "1e448439cc15b42ac16bf9e390741429fc6293dae59817e680dc5dae4de5683e",
"format": 1
},
{
- "name": ".github/workflows/release.yml",
+ "name": "plugins/module_utils/netbox_extras.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f60625361566d151303c75db858e6bdeec0d40f1caaf67c1cc832659b29285e4",
+ "chksum_sha256": "7caec2ff44747f93ef6f18b1201d4b8abd1ba455f41db4a902d859e819bc99c2",
"format": 1
},
{
- "name": ".github/ISSUE_TEMPLATE",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/module_utils/netbox_vpn.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "af3a42c86663730a585e2853c8bfb36b071b8c75bc33f071579d5471af5b3219",
"format": 1
},
{
- "name": ".github/ISSUE_TEMPLATE/housekeeping.yaml",
+ "name": "plugins/module_utils/netbox_wireless.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6952213267de18d7a4656f4aa76a8fecd52fad264d38159074ce5fea2ebf3986",
+ "chksum_sha256": "e962fa78336a3c28fb139d8ef1485b74451d06bcad5bb779d849344e74303d84",
"format": 1
},
{
- "name": ".github/ISSUE_TEMPLATE/config.yml",
+ "name": "plugins/module_utils/netbox_virtualization.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "417cfe754f2372185d81c8ef435d8ade002ab0f5db9f7f917a9ac26ec7479851",
+ "chksum_sha256": "4888905fe5f076ebb504c9259b5a7a04975265001c91daab7a144772f6d331cc",
"format": 1
},
{
- "name": ".github/ISSUE_TEMPLATE/documentation_change.yaml",
+ "name": "plugins/module_utils/netbox_secrets.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a1c372660a10ea2ce252341ddcf8f52b5faf3923b4d3ca2f864609450cfe3534",
+ "chksum_sha256": "b12f0853325dd27f38948cd9da65b72c563ce46612f8cf55cfa5806b1ab784c6",
"format": 1
},
{
- "name": ".github/ISSUE_TEMPLATE/bug_report.yaml",
+ "name": "plugins/modules",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "plugins/modules/netbox_virtual_disk.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c626f2997fc48e16250057238523be1a42788d29eb48755db176bd433c79e13a",
+ "chksum_sha256": "213ca5145932f7b848bbc3d93ed282dff9153c50727f51f4747137135d5e2e3f",
"format": 1
},
{
- "name": ".github/ISSUE_TEMPLATE/feature_request.yaml",
+ "name": "plugins/modules/netbox_front_port.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4c042a9f4b7a1e6a8a3c7962b44e364f5b2cfe5242f17415bb9e7af94103561b",
+ "chksum_sha256": "3ca3d33bfbee6d950e92ec23cd92876e4d6ef8ebb22bc82118ea9a2ff3ebb527",
"format": 1
},
{
- "name": ".yamllint",
+ "name": "plugins/modules/netbox_config_template.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9ef09bf0cb4cee6f716aa4aab01a4acb6eeaa3245b9d938394b21afaa0fcbdbd",
+ "chksum_sha256": "6c4e8fbba0a4a370f572ce87c4f49559476b2adf1fb75ba1c00ea8c11dfc8afc",
"format": 1
},
{
- "name": "README.md",
+ "name": "plugins/modules/netbox_manufacturer.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f2e837942f3d96fbf5ac0daeb978d7eb1923afff1e9ec95d24c7b3da6bafa83a",
+ "chksum_sha256": "7562792a9f7bb0cda5180d4ef2c6127ea6c5250c01bc2a3f8bb0fcb681ac836e",
"format": 1
},
{
- "name": "ansible.cfg",
+ "name": "plugins/modules/netbox_wireless_link.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b5e219ab076b1e90de2c650d480fe0a4b8724f839ac95f5240ed64a80202dfbf",
+ "chksum_sha256": "6ff0e8be94d254e8d074e45954755f315fd9edf9137661826abd2b17cf831460",
"format": 1
},
{
- "name": "plugins",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/netbox_device_interface.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e3140aed168eaa5609f6d4f3246c671b203adb00576ae7ecdb38e6ff1178cafb",
"format": 1
},
{
- "name": "plugins/modules",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/netbox_console_port_template.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "3bad97e5cefb8d56f784bf9e8f2dd7b8c6fabbf737e1acb4e8e0d753bf9903cb",
"format": 1
},
{
- "name": "plugins/modules/netbox_virtual_chassis.py",
+ "name": "plugins/modules/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bd5e27a261c43cb5028367157b89b8370064bca9c65923c4deaa790ed133e252",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/modules/netbox_custom_field_choice_set.py",
+ "name": "plugins/modules/netbox_module.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9523ed8f99eccdd36cc52fc94ccd514d17d6f6bca0aeddf4debe2a781e662697",
+ "chksum_sha256": "05a271bbb128fd2083abf52cf65df145b416340f8020ca1fae0567e1bed2a45d",
"format": 1
},
{
- "name": "plugins/modules/netbox_module.py",
+ "name": "plugins/modules/netbox_journal_entry.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c913a3e3f6aab49d84bedd805ce6b14908424b14383812415ea234913c84887d",
+ "chksum_sha256": "e4d8f7d87a363368d9fe18bda8c77392851e98e9ac46161f89dfa6bacdecb1ed",
"format": 1
},
{
- "name": "plugins/modules/netbox_inventory_item_role.py",
+ "name": "plugins/modules/netbox_site.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1852194cc4a32c5d1ba65d6d7d9af0d7a356a907d87f109716c6937581a8c200",
+ "chksum_sha256": "142edd8247ffb608050e9f140fc83dac895a8a3bbfb736483b97e1d036a2de2a",
"format": 1
},
{
- "name": "plugins/modules/netbox_device.py",
+ "name": "plugins/modules/netbox_power_outlet.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2e0cffe878e618b9d77f2c280eaba8e5194034719100cdc77ca69abf8c4eff8c",
+ "chksum_sha256": "05f934116bfba3947288f4f3eef2dbaf705bd077e3bd6fbf15d4937c0b4288d4",
"format": 1
},
{
- "name": "plugins/modules/netbox_cluster_group.py",
+ "name": "plugins/modules/netbox_device_type.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1d74dabfe5af59627b1180240a065b01dd0dfc9e212bac12dcca12953ba62ac5",
+ "chksum_sha256": "f419cbd269160a7fa6b6eda4929cfb99b1e7245ce7db44447c8a3eaa9885c231",
"format": 1
},
{
- "name": "plugins/modules/netbox_rear_port_template.py",
+ "name": "plugins/modules/netbox_power_port.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f734ec9a2b49ad1fc0db69e9ce28312cd094b80f63601e2b9d11ee1008754ec8",
+ "chksum_sha256": "810c2aba9e965ecd4a8e0f437e330a5c2d068b06c384c8f3777d1c09f075aa74",
"format": 1
},
{
- "name": "plugins/modules/netbox_cluster_type.py",
+ "name": "plugins/modules/netbox_circuit.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2b8e0c7920a4ea33c1afaf8a5b0389e73d57f1b85e08665ed544e8b6eee72428",
+ "chksum_sha256": "9fd9f64d9887eca8ce2a128e3261dd9a900f7a5345470c8e801980680648b480",
"format": 1
},
{
- "name": "plugins/modules/netbox_contact_role.py",
+ "name": "plugins/modules/netbox_custom_field_choice_set.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6de8f2911e87114e1370a6605ab3370b1246cc6fe2a65ba3e04744d2f128e85e",
+ "chksum_sha256": "a4ce60e3b58f719b3e4f577404ed14a4876033850bd83e55ce29371a3e8c45aa",
"format": 1
},
{
- "name": "plugins/modules/netbox_service.py",
+ "name": "plugins/modules/netbox_circuit_type.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e8ea746182e0b927bb8ff2a79ab08271c9dec4f26d099c53d64f786cc8d66e3c",
+ "chksum_sha256": "6636665daf3094c1dfcf1ad6e406f1c545e144d83157d945e28f91864de7bc92",
"format": 1
},
{
- "name": "plugins/modules/netbox_device_interface_template.py",
+ "name": "plugins/modules/netbox_asn.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5eedebc3c39b16aa7c0afef25497ef00aaaadf384c9458db0172d8d60d1b1fb8",
+ "chksum_sha256": "2ea2240131475f5a03e062f7510517cae8d32f2039eee611008cd24ebde770cc",
"format": 1
},
{
- "name": "plugins/modules/netbox_device_role.py",
+ "name": "plugins/modules/netbox_power_outlet_template.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bb5b536ce0d452b4fa8b67d65f8ac2c64de2b0095161b1a8e931565360b4be34",
+ "chksum_sha256": "890b7fb0444b514cd5e3ece079a507b861dbf2891c5ac44770dc36577e975d1a",
"format": 1
},
{
- "name": "plugins/modules/netbox_vrf.py",
+ "name": "plugins/modules/netbox_front_port_template.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6f99d2df272dd6e25db3cb1e8c78c3917f5788da3eccf2826295833a9eade58c",
+ "chksum_sha256": "35b11c253d567c858b7927bc95c9fdae68de7019657021e949536ddd5e4b41bd",
"format": 1
},
{
- "name": "plugins/modules/netbox_site_group.py",
+ "name": "plugins/modules/netbox_device_role.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4ac5d367b28cefebefcfd5464fe3e6564e451f14ae57f63f0b5d54daa8a87b65",
+ "chksum_sha256": "83d9795f3371255a833f3944231b4a5da5ab99cfe4f20e76483ff49856b845d5",
"format": 1
},
{
- "name": "plugins/modules/netbox_l2vpn.py",
+ "name": "plugins/modules/netbox_service_template.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "19cd42b91c64b572b6ac1b015c642e334ddaf71fa40d9918bb55d1571a8777b0",
+ "chksum_sha256": "04a40a22ba361900ebb90ef35e38972a4db791f2ac4f7f3328de0b477809a47e",
"format": 1
},
{
- "name": "plugins/modules/netbox_circuit_type.py",
+ "name": "plugins/modules/netbox_module_bay.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ce1798885b4fa32481cf31c7a476d95a12f6bd93450a798dbe6c99b527ca5578",
+ "chksum_sha256": "a7ad78cf8ad94dee64058b6122ddbc6b7520dfb51d1cab7a75b676f0c33bc45f",
"format": 1
},
{
- "name": "plugins/modules/netbox_tag.py",
+ "name": "plugins/modules/netbox_power_port_template.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fb7f5f72afff9c96871e51eedd0b228f9f4e21912e26588d64806b4fdcbca654",
+ "chksum_sha256": "970a4d5f21f065104a6585e48d6076a793cde0b87f36974af3019d96b9629e92",
"format": 1
},
{
- "name": "plugins/modules/netbox_device_type.py",
+ "name": "plugins/modules/netbox_console_port.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0c858de59b7a25dbd1c355136f877409100cc71e600cf91ccd3f9e05eb10bf6d",
+ "chksum_sha256": "22d0691edb3978d72f23f4131ddd8520aa7430c43ccceee188e204d39a4cffcf",
"format": 1
},
{
- "name": "plugins/modules/netbox_vm_interface.py",
+ "name": "plugins/modules/netbox_fhrp_group.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2a3691f62ab8f8dd7ff81f1150314c2411eabd9b336e6f8c34d939d8e2c5dd5b",
+ "chksum_sha256": "44a5d39cedb28a79b2b4caea15025abbeca990b5d19e139eadf56bebff50925e",
"format": 1
},
{
- "name": "plugins/modules/netbox_webhook.py",
+ "name": "plugins/modules/netbox_config_context.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fb5d375b6d89f825848959de2a7d2af42a45242b1679232f8f53bb95f19f2b48",
+ "chksum_sha256": "82856c9866618126bb9767651a6a86db8c52240247a761515dde467b621cd140",
"format": 1
},
{
- "name": "plugins/modules/netbox_front_port_template.py",
+ "name": "plugins/modules/netbox_l2vpn_termination.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "30151c6e58e9cc87b8d1177d87db2e9eaa6effa6337a3bd6c530ac0f7ed3c230",
+ "chksum_sha256": "19f273e9c69300f75685605c07de32692829c4397913cf2279cfca7da0cda433",
"format": 1
},
{
- "name": "plugins/modules/netbox_power_outlet.py",
+ "name": "plugins/modules/netbox_tenant.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d0231a96945419f3f12635d779707ca4d79e05800da0cc924f1728cd6e2774f4",
+ "chksum_sha256": "4d4517859d7f1fb6492e58cabb50264d5c7fa8b3c3f75030a89989632fae44f2",
"format": 1
},
{
- "name": "plugins/modules/netbox_ip_address.py",
+ "name": "plugins/modules/netbox_cable.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b9364f34ebc2159204954a0137a707301488090617420d09f43f316c0ee1b507",
+ "chksum_sha256": "bdcb740914e8b63db284b86b0dd92499526c668ecc4a9385482d268bbc147920",
"format": 1
},
{
- "name": "plugins/modules/netbox_aggregate.py",
+ "name": "plugins/modules/netbox_power_panel.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "da9b8bebd84958d2339492517fc40a550799214d0f8965b68d4002d38a34c0a0",
+ "chksum_sha256": "bf77eeca2ff7495d3dbea12f0848889d332c0c3ad7323dab0f6cd21e536c9f89",
"format": 1
},
{
- "name": "plugins/modules/netbox_cluster.py",
+ "name": "plugins/modules/netbox_aggregate.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5b972e239e6442f94214d6f09c9954598e35e7ee3f109b18421734f86ce1fc5a",
+ "chksum_sha256": "f6986e99d152086f462a477c4796585dd6cbbe22258f7595335113a5e4eef0cf",
"format": 1
},
{
- "name": "plugins/modules/netbox_fhrp_group.py",
+ "name": "plugins/modules/netbox_webhook.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "228d08a624bb22b11664a04099ad53d940887609098fa0f0739bac7fb6b4ac2f",
+ "chksum_sha256": "1060bd63ba3d1fc3243fa7585140dfc3030ff0c52dfbf88d75c45b7d8ea99818",
"format": 1
},
{
- "name": "plugins/modules/netbox_power_outlet_template.py",
+ "name": "plugins/modules/netbox_ipam_role.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "baa096a7af6d17ec2212ee6fee0bc38568260fed4f775960d9fef9945d60148a",
+ "chksum_sha256": "bdae208439bee3f22aecba61e93ce377780ea02e7c39ba3f6df565240d4f6c5f",
"format": 1
},
{
- "name": "plugins/modules/netbox_tenant.py",
+ "name": "plugins/modules/netbox_power_feed.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "96421201ad8e599a976af27bcb95086b71f15d0b44dd60a0f8d4c054c47f5b6a",
+ "chksum_sha256": "be2e18e3463e0be77686b1a9b9b23ab674bc65d8b76757bd08d18315b613ce9d",
"format": 1
},
{
- "name": "plugins/modules/netbox_rack.py",
+ "name": "plugins/modules/netbox_prefix.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4b9eea63b31a469f99c96bd9d0ec7a62c712f930467ad925c84cb976320b1ef3",
+ "chksum_sha256": "7219271669ce842f93c91a7ee8b6600f798b4d9806e4a13a314ae077564b826a",
"format": 1
},
{
- "name": "plugins/modules/netbox_contact_group.py",
+ "name": "plugins/modules/netbox_site_group.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8e85c8a0ebd547ec383843e85a47ae589fe5115106122b14566dfa8aae1c35de",
+ "chksum_sha256": "81d354cd6dcdaaa0e4267e628d0e28a20b24d3313edecf7e8cc8b0615028ddc0",
"format": 1
},
{
- "name": "plugins/modules/netbox_export_template.py",
+ "name": "plugins/modules/netbox_provider.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9210933364791e025131d0d43b99906af15e4937351ebc0dd2e19d718c992be8",
+ "chksum_sha256": "d52039cff5a155d8bd27872ef27902356d6cfc3dd56121a9fdb1452a09848ada",
"format": 1
},
{
- "name": "plugins/modules/netbox_asn.py",
+ "name": "plugins/modules/netbox_l2vpn.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "45d12eb4c19b82f00d7852ba4f6397c8991394bb0a6c8efcda7d0fb5b15033ab",
+ "chksum_sha256": "8af6e4df049e34038f1ebadd11f337eaa24e26fbcf8a86834c4bf3ef79fffb56",
"format": 1
},
{
- "name": "plugins/modules/netbox_power_port_template.py",
+ "name": "plugins/modules/netbox_rack.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bfd2823032ae4343c7f2a76c4ad066756f469de058a6f3b716b4562a8f179baf",
+ "chksum_sha256": "8cbe5d65ac1770b62cbaec8f7a5bc5f7aa86268e29d3d04975311e27cb38ed28",
"format": 1
},
{
"name": "plugins/modules/netbox_fhrp_group_assignment.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a38961faaadd6b660adf020a91dae8cb26c144a1f6deeb47013eba3f6cc63251",
+ "chksum_sha256": "a056bc88bba0fed5eaf823a6f8b6bbb11db2de8b7de044dd9e97c843b11ebbcd",
"format": 1
},
{
- "name": "plugins/modules/netbox_location.py",
+ "name": "plugins/modules/netbox_console_server_port.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "99c24c9c1837625b5f9ef70aad85ea7e67595f70b6b2a5d8eee16c40bf957664",
+ "chksum_sha256": "5bfcbf1b1403757095db86810251a73d71d761d7c7e12dd21870bbc80d80bcc9",
"format": 1
},
{
- "name": "plugins/modules/netbox_rack_role.py",
+ "name": "plugins/modules/netbox_rear_port_template.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "29ca3073512679336f583b9234da5b899912c4f8e05baf40fb840503d7fb4adb",
+ "chksum_sha256": "0774a138a14545017bd78766e461c5f723991aa4c54804eeeca96ac09c3bfb4f",
"format": 1
},
{
- "name": "plugins/modules/netbox_config_context.py",
+ "name": "plugins/modules/netbox_device.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e0f90a1686acd65c632f46b45efa89251e293937423a14bcd0d724a4a46b6d32",
+ "chksum_sha256": "eba8b1469a775d0793b28c259305811172d900f4cad500f826c6012036d67fc3",
"format": 1
},
{
- "name": "plugins/modules/netbox_device_bay_template.py",
+ "name": "plugins/modules/netbox_rir.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d0249f5e6f069d400301c90fc44ae2c5421c9e0cdac60e25731c6f8e65a2ebaa",
+ "chksum_sha256": "623f518cee58aad0d0f8fa70ccbcc23945e9c67dfcd7722004f7958ea4d19456",
"format": 1
},
{
- "name": "plugins/modules/netbox_module_type.py",
+ "name": "plugins/modules/netbox_provider_network.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1f28401a1e8e43e89470a5000eac0e9de2472d291568bd28358a657653df89dd",
+ "chksum_sha256": "e7d61d14305709e298ae538853f531c2fcdf7704bfd5c3bc0d5a2d9bf1780983",
"format": 1
},
{
- "name": "plugins/modules/netbox_rir.py",
+ "name": "plugins/modules/netbox_vrf.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "80e822b9412b770273a71f71a65dbb3851641874dea8c5b1259b4e7b83dd85db",
+ "chksum_sha256": "3e118e744fa36ff6b96694c90384ce092eff62534a0b1007b91049d55580e2ad",
"format": 1
},
{
- "name": "plugins/modules/netbox_tenant_group.py",
+ "name": "plugins/modules/netbox_inventory_item.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f1cf231347052f7454411b155525dcf855cea758a5864c4e31673125a5e3cc2e",
+ "chksum_sha256": "b486dc3dcf750bc36bc29e79bb5ef756f57a2970caddc97485fd765fade9489c",
"format": 1
},
{
"name": "plugins/modules/netbox_console_server_port_template.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "60ba6f1a53543ea3ed0a4bbfad114cc66d199f8ff61957188795388ca4bd81bc",
+ "chksum_sha256": "67d4f7bd5f7c75842829388993cc7ce4a94c183a470baa5f0478b15e1337857e",
"format": 1
},
{
- "name": "plugins/modules/__init__.py",
+ "name": "plugins/modules/netbox_circuit_termination.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "625f56c8b99f27b151f05827a0a3a423688d753bf7b051d07c030eb5f302dae7",
"format": 1
},
{
- "name": "plugins/modules/netbox_wireless_lan_group.py",
+ "name": "plugins/modules/netbox_tag.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "660a54257cf8c3b0d3153853b517b9a47550bd5b3f21e88eee29c75fe18d1ac2",
+ "chksum_sha256": "b57d9ba9911fa979987d82027af3ae416d0a54bae21559f0346d9c6fd9160c6d",
"format": 1
},
{
- "name": "plugins/modules/netbox_contact.py",
+ "name": "plugins/modules/netbox_device_interface_template.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8068913242d56a40b186be127c577e11a437dd407a41e5858b7fb9f9fb8e048f",
+ "chksum_sha256": "a54862fefebb77674e3bc78d485eeb8a2cb5d0b8f562023999f8ccacaf8543d9",
"format": 1
},
{
- "name": "plugins/modules/netbox_virtual_machine.py",
+ "name": "plugins/modules/netbox_module_type.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fbab9a283047c291bdd92de7eaddfa675917957875b02b82190b64633c907885",
+ "chksum_sha256": "9b50044bae1b05dd580bf6e7620aaba2e5a74f50b271fcf74aa45de46dcda3ae",
"format": 1
},
{
- "name": "plugins/modules/netbox_module_bay.py",
+ "name": "plugins/modules/netbox_rear_port.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7cf07b320895fd9106e2059d2f5c5a93dabbbc3c148ad00e08cba3930babfe49",
+ "chksum_sha256": "2a7278ac9b53aec8726f6110c6e336aea6ca8e8ddc06a9c41aa9e98ea53d31e8",
"format": 1
},
{
- "name": "plugins/modules/netbox_wireless_link.py",
+ "name": "plugins/modules/netbox_vlan.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6af33a80fac24b61dc5262cebe8bcdced717eaffd4f89ce4c4e69c2bd66928de",
+ "chksum_sha256": "771f17cfdacf3c1dd13f25c9a341b4c4716396b5ea0e8c8997d25077d7856e31",
"format": 1
},
{
- "name": "plugins/modules/netbox_service_template.py",
+ "name": "plugins/modules/netbox_tenant_group.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ce5f9655c07d5c21448c5e8b420de6d1a066f976095cb68a3354ed13886ae906",
+ "chksum_sha256": "d718548666a1b6ae349c9175f39935bad5af259704a97eb415ad0b6ca273ffc5",
"format": 1
},
{
- "name": "plugins/modules/netbox_rear_port.py",
+ "name": "plugins/modules/netbox_contact_role.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f8ddc6f163389ca5de162ba31eb9bf1a7ddda49e2de9fa5416c5b82377fead3a",
+ "chksum_sha256": "385d48ba1d4b904e1caf01af3e140410b7ebc8b5666ba69a1743eba4044578e9",
"format": 1
},
{
- "name": "plugins/modules/netbox_circuit.py",
+ "name": "plugins/modules/netbox_custom_link.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c578a92b541dc67fa7ddd1fcb3334b10b64ca1d0a135ba76d44c8e731c15bcea",
+ "chksum_sha256": "026a4cd881bae68d2bcde93a69620441905b473d1e6a60cdde110f2bf1db094e",
"format": 1
},
{
- "name": "plugins/modules/netbox_device_bay.py",
+ "name": "plugins/modules/netbox_vlan_group.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6d57b68076b63835ed121f47c904888304b6f6266d84569d170f4784f128639f",
+ "chksum_sha256": "ca6bc993ddb97d20e33461b6192e583a77c5e33901455dd1b6133a6b865476d7",
"format": 1
},
{
- "name": "plugins/modules/netbox_provider_network.py",
+ "name": "plugins/modules/netbox_cluster_type.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5321b303c82d546eebf10d796c4dbda56cda68b261539ae90315e0d9785cb987",
+ "chksum_sha256": "c8e9eaa064b4421aa51a5bb3128d72c277891d08305c730450177e55e12eb1d8",
"format": 1
},
{
- "name": "plugins/modules/netbox_console_server_port.py",
+ "name": "plugins/modules/netbox_platform.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "50f33803aab89aec74597c474c0c6daebab488b26f9a55a724f33244b2f0e31a",
+ "chksum_sha256": "287aec55bb1e282c54d9f1ff68dd9f3df05761c19054ba817f5cd8a2e80aed38",
"format": 1
},
{
- "name": "plugins/modules/netbox_virtual_disk.py",
+ "name": "plugins/modules/netbox_inventory_item_role.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bf88ca47c7f4faa4af71b0d558c3b099c70e756f87cd23637d6954f597f04110",
+ "chksum_sha256": "50c6ef2a713ca82ddf49d34b0c3093e6c01c3048b215ef0ef891906c333b1529",
"format": 1
},
{
- "name": "plugins/modules/netbox_front_port.py",
+ "name": "plugins/modules/netbox_contact_group.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1948a47d56177ae2929d2e3f74c278d9bd105978b915717e082af4e7ac435ec5",
+ "chksum_sha256": "d83cc16f31751d1ede538d6b6f6be1715713f4c9973c198e805ad24ff580c3ef",
"format": 1
},
{
- "name": "plugins/modules/netbox_l2vpn_termination.py",
+ "name": "plugins/modules/netbox_virtual_chassis.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4449989833f616fad444e2a36883f8625f98a9f25af6e6f6e3d3e194c4c53ce9",
+ "chksum_sha256": "645e5c160dce736090188005632028848087315b802e58fdce8fb0feccc17033",
"format": 1
},
{
- "name": "plugins/modules/netbox_circuit_termination.py",
+ "name": "plugins/modules/netbox_cluster_group.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "831c5bef092967e002c612dcd546efd519c7e065e5c520c672ccb345136244ab",
+ "chksum_sha256": "99dfdd55163f21679ad39dcbe39906dc2cee1925214f547e48a7784ee2d41e1a",
"format": 1
},
{
- "name": "plugins/modules/netbox_vlan_group.py",
+ "name": "plugins/modules/netbox_wireless_lan.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3182c6ffd3aa41b49a8719bfc3516835f10d243342ae38f04a00fba80150a8b8",
+ "chksum_sha256": "46df15ad06bd919abaa71e0bd5b82aaa00d639246ad49fb548cb098c087371e1",
"format": 1
},
{
- "name": "plugins/modules/netbox_site.py",
+ "name": "plugins/modules/netbox_export_template.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a97f3d426a08f49df703389bd36e99df062531b93bb2eec9e28b3dcdb963c7d7",
+ "chksum_sha256": "69d6d720adfbf91c1a52374338625267752eb7c394b160da8601fe43d4d13b65",
"format": 1
},
{
- "name": "plugins/modules/netbox_inventory_item.py",
+ "name": "plugins/modules/netbox_custom_field.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "271c9aa0834b7fb854b491a7daeeb6ae51cbda1eee6bc2fb9fa2ded6e8c3b280",
+ "chksum_sha256": "c4e20b77b7fafcab6b08f884044702deb67b48cd5db8f65331b03858f38e1c26",
"format": 1
},
{
- "name": "plugins/modules/netbox_power_feed.py",
+ "name": "plugins/modules/netbox_location.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3db0ec1ce6dbffc8fa03fba804554db49833c20d8202a27e577774ca35c105b4",
+ "chksum_sha256": "46cb99dfe1c8580d4b5d76a09f41b78dbe3f59e8fd6b037b204fba3e299d7750",
"format": 1
},
{
- "name": "plugins/modules/netbox_power_port.py",
+ "name": "plugins/modules/netbox_device_bay_template.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c10b0bbf5340c13c1dc27378b169904679c146cceabdfbff0431540ecdc0cb75",
+ "chksum_sha256": "78b9cb6d4d613b317adad86574dfb910d62177f9394ae9c093e8913ce5b09abc",
"format": 1
},
{
- "name": "plugins/modules/netbox_region.py",
+ "name": "plugins/modules/netbox_contact.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "22f0fcbfeaeb68387b1f10df1338c4a47915b4f4c1abb20414cdb288e0e359b9",
+ "chksum_sha256": "20f811d01fb87b4b207d873c1ce1afa1a5e202d140ed6e1559782aafa22d5722",
"format": 1
},
{
- "name": "plugins/modules/netbox_console_port_template.py",
+ "name": "plugins/modules/netbox_rack_role.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2f290656792e035efde8a5ecd0af70074a8926c5201f0dcc265a4da448bb39ed",
+ "chksum_sha256": "b8adaac5cdc7ebdcdfa35adc9e6be3f781706e2b0cdeffc26313dba4ceea3be0",
"format": 1
},
{
- "name": "plugins/modules/netbox_console_port.py",
+ "name": "plugins/modules/netbox_vm_interface.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1f6fa00dd3919c60835a614be01a910e0d6f55da44f786e505a9c005d0ee0644",
+ "chksum_sha256": "6a7d1475ca5a998ed2903b62d17b83622c98a8dcd7925096d091477c9ee28d5d",
"format": 1
},
{
- "name": "plugins/modules/netbox_custom_link.py",
+ "name": "plugins/modules/netbox_virtual_machine.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ecd977417da31442926eddd59402a4577071dd250ed9e2e2b25969560fa6c968",
+ "chksum_sha256": "a7e34783a4b677503cef053372c015c78daeaf863132c725da18b5db82212434",
"format": 1
},
{
- "name": "plugins/modules/netbox_power_panel.py",
+ "name": "plugins/modules/netbox_region.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6b5fd67d569c845312ee6fa677be80baf6ba8867a08cbb9cead057892f304607",
+ "chksum_sha256": "4dc8b9cbcd390de75587ce141d16a5424259306e90dc604869d90dca3c13345d",
"format": 1
},
{
- "name": "plugins/modules/netbox_vlan.py",
+ "name": "plugins/modules/netbox_service.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ad12d8175ab978f7c7e739f64fc44f478f86f378fb02b10aeb1182bafaa4ad12",
+ "chksum_sha256": "1633413ba3a58551f1b0d47077d726726dd7f021241efdb759bd1a25d9ef0e98",
"format": 1
},
{
- "name": "plugins/modules/netbox_device_interface.py",
+ "name": "plugins/modules/netbox_rack_group.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9ee29f4bfd532625d90eb7e4e2a4798ba6d6a66cbd279c177fad747c657f2178",
+ "chksum_sha256": "868c00a12cb69124a62f12ee70dac8c9ee5fc6e7ac300405f121bd8310c08c43",
"format": 1
},
{
- "name": "plugins/modules/netbox_manufacturer.py",
+ "name": "plugins/modules/netbox_device_bay.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "872a353cec37ea7c4d8eee2de211f81ea1e74f1debb2544c3d53c97fad576cde",
+ "chksum_sha256": "9c3f2d1215abb817dd04ba6515530b3114fe290262f734eee3f1bf0d80ea6113",
"format": 1
},
{
- "name": "plugins/modules/netbox_provider.py",
+ "name": "plugins/modules/netbox_ip_address.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fef797794b81ff82648624931815433e7c9ca2be8cc4bc69d9248ac7b43926e0",
+ "chksum_sha256": "bfb2a61a2716406a54aafb2817d8501661412575ef8fa03a92f73e363323107e",
"format": 1
},
{
- "name": "plugins/modules/netbox_prefix.py",
+ "name": "plugins/modules/netbox_route_target.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "091f9f737a07daad5c3e35d6cbb100f58e32c01099fc16ad3667aa353fa42014",
+ "chksum_sha256": "80e0c903f3c8c84d6f80ef78e5212bd5469d1e8fcce822c745d84e3c53300c09",
"format": 1
},
{
- "name": "plugins/modules/netbox_cable.py",
+ "name": "plugins/modules/netbox_wireless_lan_group.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5a085df4f078e073421d4163306d9c505db961850629d28cd5ac7bdcee395480",
+ "chksum_sha256": "f421c362d197fc435a38fc22205968857dff44c42daa7d9a13214d9cec7725ee",
"format": 1
},
{
- "name": "plugins/modules/netbox_wireless_lan.py",
+ "name": "plugins/modules/netbox_cluster.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0be96fc84fdb21b809d4d8dd85888e4c2e6cd65ba1750cd5e523c57690f9945d",
+ "chksum_sha256": "e3f39e39d7e59888141e4325a3269e324c2f216c08881e22596d0cd3d03c1b99",
"format": 1
},
{
- "name": "plugins/modules/netbox_route_target.py",
+ "name": "plugins/inventory",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "plugins/inventory/nb_inventory.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9bb0ce2ad909ff7d9ad719b303943489ef609a8480eaf9f16fba654028486c7f",
+ "chksum_sha256": "9942727128363d0a785781e14dd23051e434464adc01463ef58dea3ea35bdbc3",
"format": 1
},
{
- "name": "plugins/modules/netbox_config_template.py",
+ "name": "plugins/lookup",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "plugins/lookup/nb_lookup.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ed8ae46bc9f109bc86fe5b32d705749710d99f7f0c07a5e7abe25649338eeaff",
+ "chksum_sha256": "50eed1cbe9447594e74b50a9e63079489e16dcf3c9d11ca51d3c81da915b0fd4",
"format": 1
},
{
- "name": "plugins/modules/netbox_ipam_role.py",
+ "name": "plugins/doc_fragments",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "plugins/doc_fragments/common.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9d8a0f0ef133e7fc3753981e2dd3db572ac34a3fa6077c5068cd0d7cce6cb193",
+ "chksum_sha256": "b4d6dcd47f317aeaf14303eca8299617b4c493b3fe233b30c6bfeb4eda9fdf46",
"format": 1
},
{
- "name": "plugins/modules/netbox_journal_entry.py",
+ "name": "tests",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/config.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3dfa3404b94a5d0860855b971b44daec97bb849773d00eb9f0756c6a4ef7b63d",
+ "chksum_sha256": "71ff5426485cc74b3c360a0941dc40661ef17ffc8983435481f55f9f4143ae4f",
"format": 1
},
{
- "name": "plugins/modules/netbox_rack_group.py",
+ "name": "tests/integration",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/integration.cfg",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "eeb755f6d2df6171075f79b8c7d27f4f7c77eb2d469ff9c1911ed68f2ac21eec",
+ "chksum_sha256": "b5e219ab076b1e90de2c650d480fe0a4b8724f839ac95f5240ed64a80202dfbf",
"format": 1
},
{
- "name": "plugins/modules/netbox_custom_field.py",
+ "name": "tests/integration/render_config.sh",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a3827e864ab004396ee154f798ebd875bcfb3e9505d623229bdb91b94930d476",
+ "chksum_sha256": "30d66afe6eb2e85dbbcba336f2f49db90499b8f6e2a382139a8ffba2ba3aaec5",
"format": 1
},
{
- "name": "plugins/modules/netbox_platform.py",
+ "name": "tests/integration/netbox-deploy.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "04dc885698a9e683fb749114d049e2e08d11f6e3480d7db8db1a4d1fea6a966b",
+ "chksum_sha256": "c1f1d876d55d22c6d9315bd4e5b5a2cd99ef811d6278d76177ef5c4428b59f78",
"format": 1
},
{
- "name": "plugins/module_utils",
+ "name": "tests/integration/targets",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/module_utils/netbox_ipam.py",
+ "name": "tests/integration/targets/regression-v3.5",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/regression-v3.5/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/regression-v3.5/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "368bdd616498612f33b9a3903abe22da7df375eb969be91d71e9c21e9fa34398",
+ "chksum_sha256": "d353cfb6cab6f4a778a9fcbb44ab5730d1150d866c96bdea997014d39fc1c831",
"format": 1
},
{
- "name": "plugins/module_utils/netbox_tenancy.py",
+ "name": "tests/integration/targets/regression-v3.6",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/regression-v3.6/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/regression-v3.6/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "50a1de4f8a2dd29d4ee73a75fa9ab8e3f5e444a5fc1935a8d09172a56f290d11",
+ "chksum_sha256": "d353cfb6cab6f4a778a9fcbb44ab5730d1150d866c96bdea997014d39fc1c831",
"format": 1
},
{
- "name": "plugins/module_utils/netbox_circuits.py",
+ "name": "tests/integration/targets/inventory-v3.7",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/inventory-v3.7/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1e448439cc15b42ac16bf9e390741429fc6293dae59817e680dc5dae4de5683e",
+ "chksum_sha256": "3efbf182d151ffe527a8a250a044a83997eec84d250c8651dcdfe6f62c60a97d",
"format": 1
},
{
- "name": "plugins/module_utils/netbox_wireless.py",
+ "name": "tests/integration/targets/inventory-v3.7/runme.sh",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e962fa78336a3c28fb139d8ef1485b74451d06bcad5bb779d849344e74303d84",
+ "chksum_sha256": "f2d26a7388590de5e1126420c402289975eded15a914293329f046406a82ed1d",
"format": 1
},
{
- "name": "plugins/module_utils/netbox_virtualization.py",
+ "name": "tests/integration/targets/inventory-v3.7/files",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/inventory-v3.7/files/test-inventory.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4888905fe5f076ebb504c9259b5a7a04975265001c91daab7a144772f6d331cc",
+ "chksum_sha256": "2ebe08e8fa91b5d960239258dc4e38a0ede81f73da3b4f55f07c6db61177cb4b",
"format": 1
},
{
- "name": "plugins/module_utils/netbox_secrets.py",
+ "name": "tests/integration/targets/inventory-v3.7/files/test-inventory-legacy.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b12f0853325dd27f38948cd9da65b72c563ce46612f8cf55cfa5806b1ab784c6",
+ "chksum_sha256": "71f03a61506d106d1985bf9c237f78b900dcbd1f360558a23e6bf5fac6e77e17",
"format": 1
},
{
- "name": "plugins/module_utils/netbox_vpn.py",
+ "name": "tests/integration/targets/inventory-v3.7/files/test-inventory-jinja2-filter.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "af3a42c86663730a585e2853c8bfb36b071b8c75bc33f071579d5471af5b3219",
+ "chksum_sha256": "13df164774b2a43a0b6cca377c3e17869a0b1c5394dcc18a3c67cf271fc3c618",
"format": 1
},
{
- "name": "plugins/module_utils/__init__.py",
+ "name": "tests/integration/targets/inventory-v3.7/files/test-inventory-plurals.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "af1bb368a12123645f179057976ab6ad11049c655c4f96f18185f1c2d4c53705",
"format": 1
},
{
- "name": "plugins/module_utils/netbox_utils.py",
+ "name": "tests/integration/targets/inventory-v3.7/files/test-inventory-options.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1c2b16cae39cf62fde683ea909f7d0b7ec75a5f8d1f8e34b72a6ab7eb5bb54b7",
+ "chksum_sha256": "abe32d82135b4cb211e594c2a789bea69427808de1a261f016bd68d82e31f6ad",
"format": 1
},
{
- "name": "plugins/module_utils/netbox_extras.py",
+ "name": "tests/integration/targets/inventory-v3.7/files/test-inventory-options-flatten.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7caec2ff44747f93ef6f18b1201d4b8abd1ba455f41db4a902d859e819bc99c2",
+ "chksum_sha256": "66fef6cc9a2d8d69a7d2eea0b52cfef5d6d0bb7681f11580065bab398f7ee441",
"format": 1
},
{
- "name": "plugins/module_utils/netbox_dcim.py",
+ "name": "tests/integration/targets/inventory-v3.7/files/test-inventory.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ad6b8d8a7537c776e8579cbbf75800c7fc57b1d4ea2d56d39d36e03bb1ec4ff6",
+ "chksum_sha256": "4fa1b3d0e6aae52974d9c5595bced609448e3c71d4183332fca6f9eaa5541162",
"format": 1
},
{
- "name": "plugins/lookup",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/inventory-v3.7/files/test-inventory-jinja2.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "45f7aba5d2f5500f7b43dc461a91140c4069910b2ba7c54a126aceb082d0c9d9",
"format": 1
},
{
- "name": "plugins/lookup/nb_lookup.py",
+ "name": "tests/integration/targets/inventory-v3.7/files/test-inventory-plurals-flatten.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "408f08203be7b512771b38b514fe7a29c60743b931262a610a1b9c622bb163d3",
+ "chksum_sha256": "a1240c3ef4e6fca185f723107c089e3079996b23e8d630e25a48623a1b77b706",
"format": 1
},
{
- "name": "plugins/doc_fragments",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/inventory-v3.7/files/test-inventory-jinja2.json",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "abe32d82135b4cb211e594c2a789bea69427808de1a261f016bd68d82e31f6ad",
"format": 1
},
{
- "name": "plugins/doc_fragments/common.py",
+ "name": "tests/integration/targets/inventory-v3.7/files/test-inventory-options-flatten.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b4d6dcd47f317aeaf14303eca8299617b4c493b3fe233b30c6bfeb4eda9fdf46",
+ "chksum_sha256": "fa8fa9856a8cb16c7e5fbe45826e20fa319f62a0f2672aa907285d6f81940f4f",
"format": 1
},
{
- "name": "plugins/inventory",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/inventory-v3.7/files/test-inventory-noracks.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d2f73a8cf0695c20b2d1b47bea80ecca284484c13da2e055e23f09fa9837dbd1",
"format": 1
},
{
- "name": "plugins/inventory/nb_inventory.py",
+ "name": "tests/integration/targets/inventory-v3.7/files/test-inventory-options.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1f6866857ed68627ad2d5ee14f3e1ee2f55d163e5f43f94595023acd1ee4a951",
+ "chksum_sha256": "e1bb426120346d183fd6496e2d4db53ee3613e16bebd5ab804b322848ac4a318",
"format": 1
},
{
- "name": "pyproject.toml",
+ "name": "tests/integration/targets/inventory-v3.7/files/test-inventory-legacy.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ba0b5e3fb7ba0fa1902c7bf060e295dedc18c86f67503b592d6a208ec0c8f401",
+ "chksum_sha256": "e60953c71b6fd825dc552ee6306a82a395f653677b94d6b75a22610003cd33e6",
"format": 1
},
{
- "name": "hacking",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/inventory-v3.7/files/test-inventory-jinja2-filter.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "07bf21906dd0fdad49a7da12b85dbe91bff6614b5205dc31c1514bdc4f684119",
"format": 1
},
{
- "name": "hacking/build.sh",
+ "name": "tests/integration/targets/inventory-v3.7/files/test-inventory-plurals-flatten.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c824f229eafb4ad8fcc24c800b6d08ad8af1c8ca866e9cdbfd543429b2a04a24",
+ "chksum_sha256": "628f7f5d1979ac14d573921f294a16f2e806435c0531b3c4812a4d451245c811",
"format": 1
},
{
- "name": "hacking/local-test.sh",
+ "name": "tests/integration/targets/inventory-v3.7/files/test-inventory-noracks.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3c4c16322033b5df7c08d9081098e11416b3bbdebede263ba84a92c4f043d815",
+ "chksum_sha256": "108e879a651ae65ef30040c4ab7910160f9ca5e47ef6d5c3d7ce8b48a5dc2d6d",
"format": 1
},
{
- "name": "hacking/black.sh",
+ "name": "tests/integration/targets/inventory-v3.7/files/test-inventory-bearer-token.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "31b38a6d54cf7fd233d647760c6f9b695bde61762b09b0aba5899ea746d14248",
+ "chksum_sha256": "8c0f26aca1c87eafa7524f177a6ab8dd1c3616a5bc1889e8de0a2ed036521d74",
"format": 1
},
{
- "name": "hacking/make-docs.sh",
+ "name": "tests/integration/targets/inventory-v3.7/files/test-inventory-bearer-token.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "eaf25c88bd9a0d8d8b583c8f65bed04ce553749fe2949e2e07063fc420b08bbb",
+ "chksum_sha256": "132a27a2b7b0112317290b60310c7b651631af9769cf07e139bbb01bc7b8657a",
"format": 1
},
{
- "name": "hacking/update_test_inventories.sh",
+ "name": "tests/integration/targets/inventory-v3.7/files/test-inventory-plurals.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c02406514a0a53c1328f5397a4df2aae6ed1096159a3a85db20e34b5c73acdbe",
+ "chksum_sha256": "3a53d46d9eafdedf2909441f1963e48bb986e07fbeacd96ceb1a337f85b2de65",
"format": 1
},
{
- "name": "meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/inventory-v3.7/.gitignore",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "deacc9eea815485333d506e5bd64fb0c4a4a9980a33ff1b443a2deb0ca488209",
"format": 1
},
{
- "name": "meta/execution-environment.yml",
+ "name": "tests/integration/targets/inventory-v3.7/compare_inventory_json.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0cd0b966cbb8fefb00f7122190d7a9b42dc5da517a8c5ba9d86e34c51bc3e43a",
+ "chksum_sha256": "b124d3651bbc95d0f9a91bf941603d005fb84cc6732f092f10f5fdb7e0606cc7",
"format": 1
},
{
- "name": "meta/runtime.yml",
+ "name": "tests/integration/targets/inventory-v3.7/runme_config.template",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "36b1216fc6aa52034a9eb81aeb6ce92e29b77a61ce63ca17fb3b508434a73880",
+ "chksum_sha256": "de40fd34a465f7da8925f1e9ba52359e10ac740e08f2dfa398691810fcc03bd7",
"format": 1
},
{
- "name": "changelogs",
+ "name": "tests/integration/targets/v3.7",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "changelogs/fragments",
+ "name": "tests/integration/targets/v3.7/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "changelogs/fragments/issue-951.yml",
+ "name": "tests/integration/targets/v3.7/tasks/netbox_rack.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "050f90608d55a316e87a32ab09547d8f85e6efd0b2b0d5839d53c6610ebe4388",
+ "chksum_sha256": "05008a27b746cbc423241ccf119122a4504c88622a804eb92fd9180daa652416",
"format": 1
},
{
- "name": "changelogs/fragments/619-v3.0-ci-fixes.yml",
+ "name": "tests/integration/targets/v3.7/tasks/netbox_virtual_chassis.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3444f1db81ccc1127d586342c8b97e95dfc000fac01623f525e4d299ebd30df1",
+ "chksum_sha256": "a6e10d83c29843e16136c44d1c83e59f5710af722516ee1a50a50d01ada9936c",
"format": 1
},
{
- "name": "changelogs/fragments/allow-jinja2.yml",
+ "name": "tests/integration/targets/v3.7/tasks/netbox_cable.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e53a34d2115046c1544201b3f5828506593ff8249f44b93357b814738073fb47",
+ "chksum_sha256": "949d8b64df5d75dd5aef8c8a4cb6fd2130a545bf3f2c4dff3b555d585bfbcf2f",
"format": 1
},
{
- "name": "changelogs/fragments/.gitkeep",
+ "name": "tests/integration/targets/v3.7/tasks/netbox_export_template.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "25758f818b6c78e918db571c0e88e8e8472e62ef2eea93e739f552b6c3df8a62",
"format": 1
},
{
- "name": "changelogs/fragments/serial_and_asset.yml",
+ "name": "tests/integration/targets/v3.7/tasks/netbox_lookup.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7b73ab2cd318b44b84da88311f5ebdab247f1baa4bddcf30f389f20d5aeb905d",
+ "chksum_sha256": "72696bdd3be84fa6a7a2af55a69c8febb5acba9ce9e92797cc476b5421261ffb",
"format": 1
},
{
- "name": "changelogs/fragments/issue-558.yml",
+ "name": "tests/integration/targets/v3.7/tasks/netbox_device.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9550e7ec1bce5adc7c348f053df4a296bb8f4ebd62aeb75eed39a682ea89ad32",
+ "chksum_sha256": "8bf622400692606b11595726a5bdf15d8c3698d3cd3fb576389c953a3aed8cda",
"format": 1
},
{
- "name": "changelogs/config.yaml",
+ "name": "tests/integration/targets/v3.7/tasks/netbox_ip_address.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bad0157e7ffee11df87f3adaa5dcfa3f1841e816a27cbcf5b866fa21f63ef1bd",
+ "chksum_sha256": "d8cd7d8a69f8a032b0a72ba9fbfe48869eb38a7fa57f6b57003e94c7e91451f7",
"format": 1
},
{
- "name": "changelogs/changelog.yaml",
+ "name": "tests/integration/targets/v3.7/tasks/netbox_device_role.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7d495cca776b949e1943c08646aaa9a0e275eeea77ae7fffcf5d673573207cb4",
+ "chksum_sha256": "ce033d9508643e8c49526a974501176855693695ec4df51fa939158759ee2f51",
"format": 1
},
{
- "name": "poetry.lock",
+ "name": "tests/integration/targets/v3.7/tasks/netbox_cluster.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c7ee82a085b1160c16be841b85dda1f16310e88bd9293710d41b8a02953c170f",
+ "chksum_sha256": "8a792917906c74e992cdbf619a5e4b21f262e19cf2608ada8c525816c88d0b6f",
"format": 1
},
{
- "name": "CHANGELOG.rst",
+ "name": "tests/integration/targets/v3.7/tasks/netbox_power_panel.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a2014457bc901a9f727a9e53a499627ea50dcb3b98ac5cece15052e05ccd5cb5",
+ "chksum_sha256": "17165ceb389769d30297e5d28cd61517f1753a3b1f0f86eea7b3dc2b698a66ba",
"format": 1
},
{
- "name": "requirements.txt",
+ "name": "tests/integration/targets/v3.7/tasks/netbox_inventory_item_role.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d003b5dde4117b9ef860b6d4fd67096a2909ea206f63116d5d46d1955fcc94f1",
+ "chksum_sha256": "f7fee772a4a9b6f48163ffec16cfee663e8e6747e2c26f9698b5074e97af1442",
"format": 1
},
{
- "name": ".readthedocs.yml",
+ "name": "tests/integration/targets/v3.7/tasks/netbox_platform.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "41d982a373e0e4da1176899b9f0df905cb5a780c37555ddda1aa9f8f962a1985",
+ "chksum_sha256": "ccb6154491d5ba029d390674c7bcfaaea27abfd9be491874d3051bbfe5b3a724",
"format": 1
},
{
- "name": "CONTRIBUTING.md",
+ "name": "tests/integration/targets/v3.7/tasks/netbox_aggregate.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "11337b573ba4ffe704e9a94e682e018f7a489b4bd9ed5b474643ec2ae136ddf4",
+ "chksum_sha256": "0ac4b09ba8fc47cbbdb3b5d4fc4830e6ab5325c5e283ea5424bb5665fea1bb97",
"format": 1
},
{
- "name": ".gitignore",
+ "name": "tests/integration/targets/v3.7/tasks/netbox_journal_entry.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c0ac384e24827235e0b17021954d2b803c3da263038dfaa24967d3d40a86ea15",
+ "chksum_sha256": "3a6524c6a048a0282c890f8216ca1eb19d40fa377842f2a5afcac395f11bf53a",
"format": 1
},
{
- "name": "tests",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/v3.7/tasks/netbox_l2vpn.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "3d0f64ef7ae2061a2447205db494ed8367c9646d2b10ae5549c3f10cccda6503",
"format": 1
},
{
- "name": "tests/sanity",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/v3.7/tasks/netbox_vrf.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "3f1a5dab109569be7a5aa4a7b7b568d8bfbd7d5b17fc0e2424a4d47f0b321866",
"format": 1
},
{
- "name": "tests/sanity/ignore-2.11.txt",
+ "name": "tests/integration/targets/v3.7/tasks/netbox_contact_role.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7ee1c1645a80d2c61de52c79e3bd9fd33be5f33567688af2a16c2b6784c8826f",
+ "chksum_sha256": "d7a5644fe75789ad3cc615de2c5d23aaa262763b80e078757e1a2247f64de617",
"format": 1
},
{
- "name": "tests/netbox-docker",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/v3.7/tasks/netbox_circuit.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "5a4518332beaffdecd006ea6f56082c0091990ecd6e1e07acaaf3f364a879a30",
"format": 1
},
{
- "name": "tests/netbox-docker/v3.6",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/v3.7/tasks/netbox_wireless_lan.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "64f26adbd0e58e8ade42fded522650f129b94ca54b85e0e19ecbf2c742a37f9c",
"format": 1
},
{
- "name": "tests/netbox-docker/v3.6/docker-compose.override.yml",
+ "name": "tests/integration/targets/v3.7/tasks/netbox_module_bay.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "20ca1270c545e81ecf0419e124f8980ce137ac880fbc42e1e4b033107e4d7e97",
+ "chksum_sha256": "4003a39a5132206a0f4b256bb466fbef857bcadbceedcd43a6fe9042be30619a",
"format": 1
},
{
- "name": "tests/netbox-docker/v3.5",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/v3.7/tasks/netbox_circuit_type.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "2ba7477427c6e0c5e4fb95f8731cf9134e0ae1ea7830de9c4d9aa930ab647508",
"format": 1
},
{
- "name": "tests/netbox-docker/v3.5/docker-compose.override.yml",
+ "name": "tests/integration/targets/v3.7/tasks/netbox_console_port.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "434cca0751c8c9b5cbceb4cdbf07402aad4037fea150fa6e79e5d4b420272d1a",
+ "chksum_sha256": "97a3b6773243884f57fdeb3f8e5b7759a774bdd49a5245edbf879ba158985370",
"format": 1
},
{
- "name": "tests/netbox-docker/v3.7",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/v3.7/tasks/netbox_rear_port_template.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "c447858ce56b010ade5e270a4946c136f55ce62f1acf537794bf5ecfd0bd764c",
"format": 1
},
{
- "name": "tests/netbox-docker/v3.7/docker-compose.override.yml",
+ "name": "tests/integration/targets/v3.7/tasks/netbox_cluster_type.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a86ebe548262e0150fe37695b70a346e562c1fdcc3e03324d7ed78ccb7e714f5",
+ "chksum_sha256": "7540de23db3cbc7fdb0fdf7ee8b401d1329a06ce86406b6af8149f527c497434",
"format": 1
},
{
- "name": "tests/unit",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/v3.7/tasks/netbox_module_type.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "3390662b64aa51fd25c120ba98815eba45eb2a7389dc828eb2eb985e791a7cce",
"format": 1
},
{
- "name": "tests/unit/module_utils",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/v3.7/tasks/netbox_l2vpn_termination.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "72993bb4e4c7519b83e30babedd32dabb4d90a9768d00d6b70f274aeac3ee45b",
"format": 1
},
{
- "name": "tests/unit/module_utils/fixtures",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/v3.7/tasks/netbox_fhrp_group_assignment.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "9dd8966d59ed6d3b9ed7250956715a6b79116e38851df7d4fd77c549490d5356",
"format": 1
},
{
- "name": "tests/unit/module_utils/fixtures/choices",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/v3.7/tasks/netbox_route_target.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "9142507b1fc0cf09182ce2a2df17a5ba4b10e02f4aef5e8221181d8a6ea4fe92",
"format": 1
},
{
- "name": "tests/unit/module_utils/fixtures/choices/racks.json",
+ "name": "tests/integration/targets/v3.7/tasks/netbox_console_server_port_template.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "710913087d5fe7c6250ac7a6bbdb378fd2a98f5425595ed92a452e6789ea62e1",
+ "chksum_sha256": "9995e0004b5473922fd605b5734a2bacb4a8ee28bd7d1a329a71081c6368fdeb",
"format": 1
},
{
- "name": "tests/unit/module_utils/fixtures/choices/interfaces.json",
+ "name": "tests/integration/targets/v3.7/tasks/netbox_rir.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "69edd276a4d6ff23a24aa290de80ed861b94cba637207a9edb481d7b4dfa9a59",
+ "chksum_sha256": "11e4da868d00d8de767f052be4d103d8cc5c3bb36b999f369a46db3456d15a58",
"format": 1
},
{
- "name": "tests/unit/module_utils/fixtures/choices/device_types.json",
+ "name": "tests/integration/targets/v3.7/tasks/netbox_device_interface.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "065d95e75aa2b700f265e33133b94b5c9f93bd30d9be9a0fdcfe4a17671421ee",
+ "chksum_sha256": "fa1bd5454f23ddb0f68440d46081384bc59d518065c482d82760722c53ae1148",
"format": 1
},
{
- "name": "tests/unit/module_utils/fixtures/choices/devices.json",
+ "name": "tests/integration/targets/v3.7/tasks/netbox_console_port_template.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d28c8ab2e5a53cf617e96c37caa10d4881e19f2756e59ce036610d3ac13935af",
+ "chksum_sha256": "34f6827b41414f45567db756890c96017801791620a0f5f3fd34957d52cea41e",
"format": 1
},
{
- "name": "tests/unit/module_utils/fixtures/choices/virtual_machines.json",
+ "name": "tests/integration/targets/v3.7/tasks/netbox_webhook.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8a019714226dd0208003a775d05c48aed8195b0f09a23ad4c5e976a31f2f4b91",
+ "chksum_sha256": "e6f3c29c240fec28b69d6bea5d9db984d2ed6a4ee54ac4b51078d2060c0ce895",
"format": 1
},
{
- "name": "tests/unit/module_utils/fixtures/choices/services.json",
+ "name": "tests/integration/targets/v3.7/tasks/netbox_tenant_group.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1eee733c9069414f6294f64ecf0bd65c0afb42053c0ee8e1d9bf0ac48ca9e59c",
+ "chksum_sha256": "c6013a1c956d588d0f927d7c513be42c8fc1a91e4deb016ddc23a251a9067245",
"format": 1
},
{
- "name": "tests/unit/module_utils/fixtures/choices/sites.json",
+ "name": "tests/integration/targets/v3.7/tasks/netbox_manufacturer.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "31d58287f5b07e8aa0c0e1c1ca12b907f88d8ab987b98b5ef09beef01a49a090",
+ "chksum_sha256": "602806156756634f7b9ca2d8b2a24364ef8385aa68f85a0cbe56ae6fed45be90",
"format": 1
},
{
- "name": "tests/unit/module_utils/fixtures/choices/vlans.json",
+ "name": "tests/integration/targets/v3.7/tasks/netbox_vlan_group.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c4b951095ee4c7443616cda2f26ed9e778226bf4b8e193e0f67cc5b0b96a6636",
+ "chksum_sha256": "cf6717d7f30ef367d4049c7d8087b88032e56c5041093e50dde12e3cc323fd82",
"format": 1
},
{
- "name": "tests/unit/module_utils/fixtures/choices/prefixes.json",
+ "name": "tests/integration/targets/v3.7/tasks/netbox_contact.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3a7f82c633273e126c2455161a1c832460b368d8e40677a0cb290ec9910e2bcf",
+ "chksum_sha256": "b0a3264d210d7a3fecb8edca38167f8615b842a4b54995e141aa35f7eaff1e4c",
"format": 1
},
{
- "name": "tests/unit/module_utils/fixtures/choices/circuits.json",
+ "name": "tests/integration/targets/v3.7/tasks/netbox_device_bay.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "eef93b5c65458cac7b33b1e1365518d381d3e12bcc73b7afbdd163a42350a521",
+ "chksum_sha256": "3d08ca60364615906bd685d55c4528058a507a37cab50378376e0b1cf356056c",
"format": 1
},
{
- "name": "tests/unit/module_utils/test_data",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/v3.7/tasks/netbox_site.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "892a39c82540cf0ee8315ec090fc01f8f7c71b51b06340cb460efd008a5a9294",
"format": 1
},
{
- "name": "tests/unit/module_utils/test_data/normalize_data",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/v3.7/tasks/netbox_wireless_lan_group.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "25034eac8b5e4c8cf293055a90906e70d807d3fd5789ba9dc87adbf7861a55a5",
"format": 1
},
{
- "name": "tests/unit/module_utils/test_data/normalize_data/data.json",
+ "name": "tests/integration/targets/v3.7/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fb8e60b069fb6b847cc286cf83c8df8ea5c8b19078f614be83983e85616036aa",
+ "chksum_sha256": "841c2460203179530e0a46216a7e82297446ffc96d8c3bc33455644b755ebe4d",
"format": 1
},
{
- "name": "tests/unit/module_utils/test_data/build_query_params_child",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/v3.7/tasks/netbox_config_template.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "4500c8b3a396ccf6590e426f4c3d8c67824fc180a486a93445375dfffb52a43c",
"format": 1
},
{
- "name": "tests/unit/module_utils/test_data/build_query_params_child/data.json",
+ "name": "tests/integration/targets/v3.7/tasks/netbox_inventory_item.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bb4a47f18fb6fcab7557a036a71612f65e75d22972139f8916012ebcbb8dcce0",
+ "chksum_sha256": "fcc2f0371f0f8995d81aeff8a3703657ae4fc9aab338b7580141728588fae777",
"format": 1
},
{
- "name": "tests/unit/module_utils/test_data/arg_spec_default",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/v3.7/tasks/netbox_module.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "702abfdc17f0d47b34325b99870e63e2d0a74a3baa5e8a35d875b3c65fe27b05",
"format": 1
},
{
- "name": "tests/unit/module_utils/test_data/arg_spec_default/data.json",
+ "name": "tests/integration/targets/v3.7/tasks/netbox_rear_port.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f48dd7aea115db209d32d4ce0fc6b9836c877a08bf035ee588bbc3f436b4a076",
+ "chksum_sha256": "17e9b9eafcac50f2b934c23bfc94213f84fe4eb23f5c80ef8e651b021fac7550",
"format": 1
},
{
- "name": "tests/unit/module_utils/test_data/choices_id",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/v3.7/tasks/netbox_front_port.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "234f840e4ccc8bf521f75e11089fa14656919a1a85d1e04a083c7b15cfb1925e",
"format": 1
},
{
- "name": "tests/unit/module_utils/test_data/choices_id/data.json",
+ "name": "tests/integration/targets/v3.7/tasks/netbox_tenant.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0ef637ce49cb7fbf3fb67f39641b2db2ddddae41a4a5c772e65cb401fc21d595",
+ "chksum_sha256": "374db9ef14a5ad3f28eb748e5a3e1a695ed622c89ecfe8c190264c934b2d3160",
"format": 1
},
{
- "name": "tests/unit/module_utils/test_data/build_query_params_no_child",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/v3.7/tasks/netbox_power_port_template.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "75a31cd47526a885482c3d44901674b3f33c8415978c343410b4897ed5423079",
"format": 1
},
{
- "name": "tests/unit/module_utils/test_data/build_query_params_no_child/data.json",
+ "name": "tests/integration/targets/v3.7/tasks/netbox_device_bay_template.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a2808a6706a4ba94c964b0bd5ccb27036fb6f4ae58402fc388c4a8a56229f758",
+ "chksum_sha256": "c942b0b528cea2de144b744e3d84b33223b9bd5c5b3110ffa9a4cbac926ee092",
"format": 1
},
{
- "name": "tests/unit/module_utils/test_data/find_app",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/v3.7/tasks/netbox_console_server_port.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "9aea5c2b0b02bfcf481b23201853cd354d04452b17507b5b3fdc9cb494c81530",
"format": 1
},
{
- "name": "tests/unit/module_utils/test_data/find_app/data.json",
+ "name": "tests/integration/targets/v3.7/tasks/netbox_wireless_link.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f57619e431da1db5f3459f0d390e0f98ed8422dd1dfaea0a322faa6689762e2f",
+ "chksum_sha256": "803d79b36bf3618e3283242d8dadae714c09913755288dc43c3bda9d19c1f6ac",
"format": 1
},
{
- "name": "tests/unit/module_utils/test_data/build_query_params_user_query_params",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/v3.7/tasks/netbox_custom_link.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "a1594b8640a23cb5ab804b64906ac62161696276dd8467c1ee2cab380e11726c",
"format": 1
},
{
- "name": "tests/unit/module_utils/test_data/build_query_params_user_query_params/data.json",
+ "name": "tests/integration/targets/v3.7/tasks/netbox_config_context.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "362739ebc62d83999764392653b6347662f5fadf3f88b5b38a9ce0924db2acea",
+ "chksum_sha256": "df4a1f9cd476ce57438b2c478667c9914e71923495fdebf489dd7811a51f55da",
"format": 1
},
{
- "name": "tests/unit/module_utils/test_data/slug",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/v3.7/tasks/netbox_region.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "ccd04021429ee77b69edb2e219cec7cb9d4ad291029bf735eb73860b70492c25",
"format": 1
},
{
- "name": "tests/unit/module_utils/test_data/slug/data.json",
+ "name": "tests/integration/targets/v3.7/tasks/netbox_device_interface_template.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d38464486cef0b272cd7879626d2fbccaba6359c5166c2aed483fcd0c108645c",
+ "chksum_sha256": "8cd6756a0407f04ab8790222293b3835ab242094ffb958a4c365ee6beffc93ba",
"format": 1
},
{
- "name": "tests/unit/module_utils/__init__.py",
+ "name": "tests/integration/targets/v3.7/tasks/netbox_power_feed.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "9791bbf4cda16f4189caa2f9910d48a57790baf9f00c06b3b2de33d7f8ec148c",
"format": 1
},
{
- "name": "tests/unit/module_utils/test_netbox_base_class.py",
+ "name": "tests/integration/targets/v3.7/tasks/netbox_power_outlet_template.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b0dc56f4039832885f094fd6afe72322d991372b113e600ad2f52d0f60a54e74",
+ "chksum_sha256": "d68cfbb7e8d0efeb2c683bd1a6cdd891de8c00cab74af5ccd08493551cfb55ce",
"format": 1
},
{
- "name": "tests/unit/inventory",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/v3.7/tasks/netbox_power_port.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "cfbebf7401c3d835bc8d2245803b093be89ab9747375704065a997713eef1865",
"format": 1
},
{
- "name": "tests/unit/inventory/test_nb_inventory.py",
+ "name": "tests/integration/targets/v3.7/tasks/netbox_virtual_disk.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2598c9a69dac190a21c54203db31729ee16acc0b4f08a62fe6130dea5230c2c8",
+ "chksum_sha256": "1c5554999363ab282f9eeac5f4a171a7192a129f88ea5598cda624d35c146332",
"format": 1
},
{
- "name": "tests/unit/inventory/test_data",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/v3.7/tasks/netbox_cluster_group.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "cf464a2bd2d8fe3338add9e782e37869ac573d2ca4b0070215cc6c5c18a53e82",
"format": 1
},
{
- "name": "tests/unit/inventory/test_data/refresh_url",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/v3.7/tasks/netbox_power_outlet.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "c2114d5a5403f08d7c8e805fc5f5deaaadff30f72dc41965a7869b513caa8a33",
"format": 1
},
{
- "name": "tests/unit/inventory/test_data/refresh_url/data.json",
+ "name": "tests/integration/targets/v3.7/tasks/netbox_circuit_termination.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "26ce71fb70dc9b712c6c61495f8e03308f03a1d38efce476737100cde2664d93",
+ "chksum_sha256": "5f65f18257e67029d5c7bb97aa06a8c3f2a75d662c0d3135333007e11db87dc8",
"format": 1
},
{
- "name": "tests/unit/inventory/test_data/group_extractors",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/v3.7/tasks/netbox_front_port_template.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "fc0477e2d1e41c7364e3afad1a0f880284ab4dca487d3a1dd8af8614de0c872c",
"format": 1
},
{
- "name": "tests/unit/inventory/test_data/group_extractors/data.json",
+ "name": "tests/integration/targets/v3.7/tasks/netbox_prefix.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a70575200ab06266b0d57856884be9e187cb5fe4737bb93883024dcb66ac8849",
+ "chksum_sha256": "aeb27e31ec707d92e3c6702c9f79d74d8eda59466e0d0be48ffe0956f80f6e41",
"format": 1
},
{
- "name": "tests/unit/inventory/test_data/get_resource_list_chunked",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/v3.7/tasks/netbox_vm_interface.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d1502b94dac2f7d32bf66caa19f82fe273f7caa1044034d0d8ea5c5db4db715b",
"format": 1
},
{
- "name": "tests/unit/inventory/test_data/get_resource_list_chunked/data.json",
+ "name": "tests/integration/targets/v3.7/tasks/netbox_fhrp_group.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "09bee34133057a8261ff63b1392f689076a161cefca45e489fe9ed0b9846dee4",
+ "chksum_sha256": "c4111d64926c954d64ecfa36bf0b19609b11ed079d181edcceaa7040efc04460",
"format": 1
},
{
- "name": "tests/unit/inventory/test_data/data.json",
+ "name": "tests/integration/targets/v3.7/tasks/netbox_rack_group.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3894903f8c1c89c1f679f9860e22dd3adb3d3e4c2de310e2b91ddcdf32673411",
+ "chksum_sha256": "38a8451d8064030aea164f006ef382c20f92f34909571eb3fd5f6e545c76b025",
"format": 1
},
{
- "name": "tests/unit/inventory/test_data/extract_custom_fields",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/v3.7/tasks/netbox_vlan.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "24675cf58ccbc62e103e2b12d58b1879626fdc4060e10883fcc0221654e4d57c",
"format": 1
},
{
- "name": "tests/unit/inventory/test_data/extract_custom_fields/data.json",
+ "name": "tests/integration/targets/v3.7/tasks/netbox_ipam_role.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b7b970702d1d2884cf0115bc7fef7a54a0c9b49c01d138e568e27902280afd21",
+ "chksum_sha256": "7ceb5fbc06093cfb74e70b9890cd7ee2a63400abde4859c232ae2696ed8c4a8e",
"format": 1
},
{
- "name": "tests/unit/inventory/test_data/validate_query_parameter",
+ "name": "tests/integration/targets/v3.7/tasks/netbox_service_template.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "3de123df2afca03aabda0c72df05dbe648539bd85dafa557e0e9875a3d187c88",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/v3.7/tasks/netbox_asn.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e166e0e9b57f80051a393c89d84e217aa3749040f9779b58bcfd0c6668756b42",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/v3.7/tasks/netbox_rack_role.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "f38ea66d9f75164b06ad3d3da8b7a4826dc03a3c22dd2dc20e6a1ab76c5d3446",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/v3.7/tasks/netbox_provider_network.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "9b0cad326050fd05d658114529543fd266660b18c56ac1dafdc645304c7c82ca",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/v3.7/tasks/netbox_service.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "284ba7a8c0c4289324906f530321a35d97ea04881d12b11dd72da2e187fa97aa",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/v3.7/tasks/netbox_site_group.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "0dc3566f5851e868d35dd673565b243652f09cfe2de7089f0b605241c11ab5c1",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/v3.7/tasks/netbox_location.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "eb2c983ee5af371a527fca7e20de968debb8cb00ab00a3288e91530d0e95edf1",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/v3.7/tasks/netbox_provider.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "9d0e8dcec59fd43ad5c4fd25e8b5cc3402f11d31981e4ef183a19be5cd204141",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/v3.7/tasks/netbox_device_type.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "b59c642074bb81a361367906f132fda8bd32479418a35d8c0aa56ff1831e80b8",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/v3.7/tasks/netbox_custom_field.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "aedb9b9c8a7ed00249fe494435614c5108d97a7dae79d844b9c1e5020a78d4ac",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/v3.7/tasks/netbox_virtual_machine.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "dcb63cfbe1c5e4bc03fd99c2acc5748166b17f9b515e74722a14a707a35af52c",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/v3.7/tasks/netbox_tag.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "0dd53a692f8af842a04c746f97259aefa6b9e33a02ac66f979a35616d0348043",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/inventory-v4.0",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/inventory/test_data/validate_query_parameter/data.json",
+ "name": "tests/integration/targets/inventory-v4.0/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c38afa75bd45672caa3cae3eef0306f045828f98f47f3ee8f41e2a51296e4f79",
+ "chksum_sha256": "3efbf182d151ffe527a8a250a044a83997eec84d250c8651dcdfe6f62c60a97d",
"format": 1
},
{
- "name": "tests/unit/inventory/test_data/filter_query_parameters",
+ "name": "tests/integration/targets/inventory-v4.0/runme.sh",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "f2d26a7388590de5e1126420c402289975eded15a914293329f046406a82ed1d",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/inventory-v4.0/files",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/inventory/test_data/filter_query_parameters/data.json",
+ "name": "tests/integration/targets/inventory-v4.0/files/test-inventory.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3ecebe7994f1acfae3e196b3f8bce30dab18114fa92f662a4e06a57f338870be",
+ "chksum_sha256": "2ebe08e8fa91b5d960239258dc4e38a0ede81f73da3b4f55f07c6db61177cb4b",
"format": 1
},
{
- "name": "tests/unit/inventory/__init__.py",
+ "name": "tests/integration/targets/inventory-v4.0/files/test-inventory-legacy.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "71f03a61506d106d1985bf9c237f78b900dcbd1f360558a23e6bf5fac6e77e17",
"format": 1
},
{
- "name": "tests/test_data.py",
+ "name": "tests/integration/targets/inventory-v4.0/files/test-inventory-jinja2-filter.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b5c34afc0e8ddbb4fc388802015f63ff1628e2f3335f8cce280c0e38933acd60",
+ "chksum_sha256": "1065a83e0e3b43e8b977c17edb8dae3922f0a069348248376c97952ff81b1bc8",
"format": 1
},
{
- "name": "tests/integration",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/inventory-v4.0/files/test-inventory-plurals.json",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "bef705d7a4bb0e838d5d175a5eeafd74a167e2fcef903ed15a3870df30fde263",
"format": 1
},
{
- "name": "tests/integration/render_config.sh",
+ "name": "tests/integration/targets/inventory-v4.0/files/test-inventory-options.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "76c5a19289c4ca86c9124b748e1a7d7d338ba92a46a451dfede864a91c96075c",
+ "chksum_sha256": "abe32d82135b4cb211e594c2a789bea69427808de1a261f016bd68d82e31f6ad",
"format": 1
},
{
- "name": "tests/integration/integration.cfg",
+ "name": "tests/integration/targets/inventory-v4.0/files/test-inventory-options-flatten.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b5e219ab076b1e90de2c650d480fe0a4b8724f839ac95f5240ed64a80202dfbf",
+ "chksum_sha256": "0eac007057f5196403d3372c9d67af60dde0647e2dce63f5c232f18a3cd31747",
"format": 1
},
{
- "name": "tests/integration/netbox-deploy.py",
+ "name": "tests/integration/targets/inventory-v4.0/files/test-inventory.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "eba03e4920c85a9e0ac4583ce7d3966d610af963600892f94f324e56cd0a4143",
+ "chksum_sha256": "6bbfd5fddf4478d645866f3407acfb809f8143a1440ef85e9ea5395843958433",
"format": 1
},
{
- "name": "tests/integration/targets",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/inventory-v4.0/files/test-inventory-jinja2.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "45f7aba5d2f5500f7b43dc461a91140c4069910b2ba7c54a126aceb082d0c9d9",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/inventory-v4.0/files/test-inventory-plurals-flatten.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "a1240c3ef4e6fca185f723107c089e3079996b23e8d630e25a48623a1b77b706",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/inventory-v4.0/files/test-inventory-jinja2.json",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "abe32d82135b4cb211e594c2a789bea69427808de1a261f016bd68d82e31f6ad",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/inventory-v4.0/files/test-inventory-options-flatten.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "fa8fa9856a8cb16c7e5fbe45826e20fa319f62a0f2672aa907285d6f81940f4f",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/inventory-v4.0/files/test-inventory-noracks.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d2f73a8cf0695c20b2d1b47bea80ecca284484c13da2e055e23f09fa9837dbd1",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/inventory-v4.0/files/test-inventory-options.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e1bb426120346d183fd6496e2d4db53ee3613e16bebd5ab804b322848ac4a318",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/inventory-v4.0/files/test-inventory-legacy.json",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "970d63d35cf9f73edcd7d192a9ff74a57bee700745ade35c9985515855f6d966",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/inventory-v4.0/files/test-inventory-jinja2-filter.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "07bf21906dd0fdad49a7da12b85dbe91bff6614b5205dc31c1514bdc4f684119",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/inventory-v4.0/files/test-inventory-plurals-flatten.json",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "628f7f5d1979ac14d573921f294a16f2e806435c0531b3c4812a4d451245c811",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/inventory-v4.0/files/test-inventory-noracks.json",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "2130c775b7edfc15ff9faf019f985f3c043d0a8dded624fd3b02c00f5eba95a6",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/inventory-v4.0/files/test-inventory-bearer-token.json",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "24721f38cc28e5e5b92b2aa9c2b8cd194375537599ec1e0953e57349a3224e4f",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/inventory-v4.0/files/test-inventory-bearer-token.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "132a27a2b7b0112317290b60310c7b651631af9769cf07e139bbb01bc7b8657a",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/inventory-v4.0/files/test-inventory-plurals.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "3a53d46d9eafdedf2909441f1963e48bb986e07fbeacd96ceb1a337f85b2de65",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/inventory-v4.0/.gitignore",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "deacc9eea815485333d506e5bd64fb0c4a4a9980a33ff1b443a2deb0ca488209",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/inventory-v4.0/compare_inventory_json.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "b124d3651bbc95d0f9a91bf941603d005fb84cc6732f092f10f5fdb7e0606cc7",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/inventory-v4.0/runme_config.template",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "de40fd34a465f7da8925f1e9ba52359e10ac740e08f2dfa398691810fcc03bd7",
"format": 1
},
{
@@ -2521,2355 +3109,2565 @@
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_site.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_rack.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "58ff74a2f2ec0eb44cccf7e545d11d5a1cdeded86afeee1061757117594b38b2",
+ "chksum_sha256": "05008a27b746cbc423241ccf119122a4504c88622a804eb92fd9180daa652416",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_console_port.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_virtual_chassis.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f22bf91272f6001c1a9d8819ca38a0524c8a442640e5ba2eef2d1d53f4f4340e",
+ "chksum_sha256": "a6e10d83c29843e16136c44d1c83e59f5710af722516ee1a50a50d01ada9936c",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/main.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_cable.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ea338364a67fc2261c3255b0e7c4bdbc18c0854744b5fdc70fa515f8e60ffcb5",
+ "chksum_sha256": "949d8b64df5d75dd5aef8c8a4cb6fd2130a545bf3f2c4dff3b555d585bfbcf2f",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_vm_interface.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_export_template.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0de602de86254d9c7f4c09f7b9d87da55dd0ddba4c7dbef6bc68bd041669151d",
+ "chksum_sha256": "25758f818b6c78e918db571c0e88e8e8472e62ef2eea93e739f552b6c3df8a62",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_aggregate.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_lookup.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1ea02627b92f257b58e16c77e038930542b949e6cfc7acb38beac52330318d8c",
+ "chksum_sha256": "72696bdd3be84fa6a7a2af55a69c8febb5acba9ce9e92797cc476b5421261ffb",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_platform.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_device.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2bdd6dd59eb4dcbfb1e285aef3bfbfe944552c3dd2bd20e17b58b5798926221e",
+ "chksum_sha256": "8bf622400692606b11595726a5bdf15d8c3698d3cd3fb576389c953a3aed8cda",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_wireless_link.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_ip_address.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "807da638ea8dc21080bc21a3033dd065570ff93e3dfe9edc4963922505a3613a",
+ "chksum_sha256": "0eefd696969ede9ce7708ffceca830598936edd409af86e0d392b49279cf647c",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_vlan.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_device_role.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f3de3e0d80daef5d6def7c55b974a6c88d4322ac35cb397ca1cbbfe90ef2fd8e",
+ "chksum_sha256": "ce033d9508643e8c49526a974501176855693695ec4df51fa939158759ee2f51",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_power_outlet.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_cluster.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4f94df7d99ba01a989c829231e3a2eab8ff955cb31e8bd1a19afa1b89c266f5f",
+ "chksum_sha256": "8a792917906c74e992cdbf619a5e4b21f262e19cf2608ada8c525816c88d0b6f",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_tag.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_power_panel.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "32e11eea93510a83b15b736bdf46995492dd0dcbdb025993f16532aad6de2b22",
+ "chksum_sha256": "17165ceb389769d30297e5d28cd61517f1753a3b1f0f86eea7b3dc2b698a66ba",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_rack_role.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_inventory_item_role.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e18dfdc340ea38186fe5fa2e420ee9982f6bbec423964490865ec6683f304cba",
+ "chksum_sha256": "f7fee772a4a9b6f48163ffec16cfee663e8e6747e2c26f9698b5074e97af1442",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_rear_port.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_platform.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a14bdfe2e9b2c5dc723a6da6b812da1163057f03cf19fe2e5aec663ee6f1452e",
+ "chksum_sha256": "ccb6154491d5ba029d390674c7bcfaaea27abfd9be491874d3051bbfe5b3a724",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_module_type.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_aggregate.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b78dc0f37cb445ca99d589c3f296ec1bb6b999dddbcbc2fe8ba64c78e700c53f",
+ "chksum_sha256": "0ac4b09ba8fc47cbbdb3b5d4fc4830e6ab5325c5e283ea5424bb5665fea1bb97",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_provider.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_journal_entry.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4deb86969d18e570533d5a2738afddf66725d6d4f4abc3576384aaf226c82ba2",
+ "chksum_sha256": "3a6524c6a048a0282c890f8216ca1eb19d40fa377842f2a5afcac395f11bf53a",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_circuit_termination.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_l2vpn.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "318daab4470ec628ff83d4dfb2a4b7270a42863eb3a80ca142ef9094fa9cb898",
+ "chksum_sha256": "3d0f64ef7ae2061a2447205db494ed8367c9646d2b10ae5549c3f10cccda6503",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_lookup.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_vrf.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "12bc28911f599b5e292425de7d0d6f5f596fee407befbabbc0e7de33f9a1d9e0",
+ "chksum_sha256": "3f1a5dab109569be7a5aa4a7b7b568d8bfbd7d5b17fc0e2424a4d47f0b321866",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_vrf.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_contact_role.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0a17e30d5154ca2aa90285ab251f52d457e89ced6509d83366101ad36e0da",
+ "chksum_sha256": "d7a5644fe75789ad3cc615de2c5d23aaa262763b80e078757e1a2247f64de617",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_fhrp_group_assignment.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_circuit.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6315cc64905295c66bc9143b67b6ce4e52e089c85953c0e8729d8ffbd435f0d2",
+ "chksum_sha256": "5a4518332beaffdecd006ea6f56082c0091990ecd6e1e07acaaf3f364a879a30",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_power_panel.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_wireless_lan.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "eac755f52d8df88bb2100cf44cd7557739922838f04ded5da4aff356c75e172b",
+ "chksum_sha256": "64f26adbd0e58e8ade42fded522650f129b94ca54b85e0e19ecbf2c742a37f9c",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_manufacturer.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_module_bay.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6f45b1dfe652beff573994e37d534d6c421f428d984f43d1bab497618009fef8",
+ "chksum_sha256": "4003a39a5132206a0f4b256bb466fbef857bcadbceedcd43a6fe9042be30619a",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_custom_link.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_circuit_type.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2e9c75e22e6e2540065ed53d26657677c927af1bf3a73c29064c0e3361382e3f",
+ "chksum_sha256": "2ba7477427c6e0c5e4fb95f8731cf9134e0ae1ea7830de9c4d9aa930ab647508",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_service.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_console_port.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1e3384a81b3bc49bcaf3f7d82268cc03a9dd609e2a88e18eeac2677bcf604154",
+ "chksum_sha256": "97a3b6773243884f57fdeb3f8e5b7759a774bdd49a5245edbf879ba158985370",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_device_bay.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_rear_port_template.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "482fe4849d9caff69a8604ab3e8324b0beaceb49e66360d0c3eb7c79d7ac8c34",
+ "chksum_sha256": "c447858ce56b010ade5e270a4946c136f55ce62f1acf537794bf5ecfd0bd764c",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_front_port.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_cluster_type.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3b56521d53c9a818916cb718e018214e7123db6b3feb0456532d476955d0aa28",
+ "chksum_sha256": "7540de23db3cbc7fdb0fdf7ee8b401d1329a06ce86406b6af8149f527c497434",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_prefix.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_module_type.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f55ed9fcbda0d371e51828c3ecee2e103ac45f60ce5e5d6c28a04bb16ba64d76",
+ "chksum_sha256": "3390662b64aa51fd25c120ba98815eba45eb2a7389dc828eb2eb985e791a7cce",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_site_group.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_l2vpn_termination.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c5770e6c7f178cfed77e7cc9b4b7c7f7d8ed7557420c8e1ee3a221f54e2cbaaf",
+ "chksum_sha256": "72993bb4e4c7519b83e30babedd32dabb4d90a9768d00d6b70f274aeac3ee45b",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_journal_entry.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_fhrp_group_assignment.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0526255ed056eea4324dc0615f4f1b18be2c3ce66951850736b9d6533ae2660a",
+ "chksum_sha256": "9dd8966d59ed6d3b9ed7250956715a6b79116e38851df7d4fd77c549490d5356",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_custom_field_choice_set.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_route_target.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "707e0f1c5e2fb4486d778314239c4eaa353dbc6a09ec6199e0aa4c52994642b8",
+ "chksum_sha256": "9142507b1fc0cf09182ce2a2df17a5ba4b10e02f4aef5e8221181d8a6ea4fe92",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_rir.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_console_server_port_template.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "280fb5eb850e14e7874ada263f1c03f15da3ed24f4730f960967cf8993eb81e3",
+ "chksum_sha256": "9995e0004b5473922fd605b5734a2bacb4a8ee28bd7d1a329a71081c6368fdeb",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_power_feed.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_rir.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2a784756ec7c4cf50c065187f7c92e6f14117e59f86f32d148304f8ebb4deb46",
+ "chksum_sha256": "11e4da868d00d8de767f052be4d103d8cc5c3bb36b999f369a46db3456d15a58",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_console_server_port_template.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_device_interface.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "31efff083088eb516aa5e7769b52024bee646ab8f996a708fc09d2445c16ce7d",
+ "chksum_sha256": "fa1bd5454f23ddb0f68440d46081384bc59d518065c482d82760722c53ae1148",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_console_server_port.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_console_port_template.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d1c5d0d6e586d445667dd44b70a72e7d50564ba7e95cd2321289859eee3e3f6a",
+ "chksum_sha256": "34f6827b41414f45567db756890c96017801791620a0f5f3fd34957d52cea41e",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_power_port.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_webhook.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d51e0c69d26bf3bb64ae02399ceb1a59f05105e73d17c9c1b0144b85657f5bac",
+ "chksum_sha256": "e6f3c29c240fec28b69d6bea5d9db984d2ed6a4ee54ac4b51078d2060c0ce895",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_contact_role.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_tenant_group.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "52d72f63e31c7de7bc1dc2276112f491466ddbad59628833213f0ce78fb59cb3",
+ "chksum_sha256": "c6013a1c956d588d0f927d7c513be42c8fc1a91e4deb016ddc23a251a9067245",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_custom_field.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_manufacturer.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "46cecc1067a4995a8820e51f451e9d5291864b79876cc8551b60d81baed55b53",
+ "chksum_sha256": "602806156756634f7b9ca2d8b2a24364ef8385aa68f85a0cbe56ae6fed45be90",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_export_template.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_vlan_group.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3e3cdf0b433461d0bc0db2cc67e0fd22150ff9922732ac023fb7afc5203216fc",
+ "chksum_sha256": "cf6717d7f30ef367d4049c7d8087b88032e56c5041093e50dde12e3cc323fd82",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_ip_address.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_contact.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2608d42fc6e56d8ab7d52c28d33658dcca6d8fb350fe2a92ede8d3e852c13ea7",
+ "chksum_sha256": "b0a3264d210d7a3fecb8edca38167f8615b842a4b54995e141aa35f7eaff1e4c",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_cluster_type.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_device_bay.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c88469e151627ba4f18c90ec7dc96449ebc38e0b10c3715b2527e8ed2317ff87",
+ "chksum_sha256": "3d08ca60364615906bd685d55c4528058a507a37cab50378376e0b1cf356056c",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_route_target.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_site.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a1265c144e813a4fe745934790f9283d30558187bef8d58f705754719ec480dc",
+ "chksum_sha256": "892a39c82540cf0ee8315ec090fc01f8f7c71b51b06340cb460efd008a5a9294",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_config_template.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_custom_field_choice_set.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "632cc2291e1afc3307a450b9ec6b5e75b321facf5c0a3881230edd834cb72d43",
+ "chksum_sha256": "43b89204a87c831e7464537c2947367aecd277f332a530b8d8a0d6e6f18e2665",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_device.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_wireless_lan_group.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "572c930adbd6f88811ac62e0616ef5228216368b450a9401445bcc18df53bf11",
+ "chksum_sha256": "25034eac8b5e4c8cf293055a90906e70d807d3fd5789ba9dc87adbf7861a55a5",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_l2vpn_termination.yml",
+ "name": "tests/integration/targets/v3.6/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b630f638bb826104bdc423470761fbf67d8495f64f4907deec67d37d7e02a743",
+ "chksum_sha256": "78f3c85c06c64676267bc8abc233bee99c6cbf5f0cf3cba8944fef82eb21b82a",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_tenant_group.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_config_template.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1ae1dc4a42a5fb65dcc1272f424a87cbdb6984dfe1787b310583059567fd181d",
+ "chksum_sha256": "4500c8b3a396ccf6590e426f4c3d8c67824fc180a486a93445375dfffb52a43c",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_ipam_role.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_inventory_item.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "87927259bdf063bc202fa76992dc4ea7fdf3953cb181261271af32c0464fef6b",
+ "chksum_sha256": "fcc2f0371f0f8995d81aeff8a3703657ae4fc9aab338b7580141728588fae777",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_console_port_template.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_module.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a0f5b420ba0e259d28ed11d0f9a81f3e6e67744ae3bfdaaf5beb737343a44cef",
+ "chksum_sha256": "702abfdc17f0d47b34325b99870e63e2d0a74a3baa5e8a35d875b3c65fe27b05",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_wireless_lan.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_rear_port.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d86fdee3d125962731b3d98a027a30f4d7ba445b0fdc74cd0b0fc3cda5a1d23c",
+ "chksum_sha256": "17e9b9eafcac50f2b934c23bfc94213f84fe4eb23f5c80ef8e651b021fac7550",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_power_outlet_template.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_front_port.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7076e4492f1e5a9320edee905c9d9b29c0ca0a9222adac0903cd31e424a97edb",
+ "chksum_sha256": "234f840e4ccc8bf521f75e11089fa14656919a1a85d1e04a083c7b15cfb1925e",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_location.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_tenant.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4fde47ce5a7ad1ad91120d23a53db43cf885d640fab3013fcdc4201faf0fccdc",
+ "chksum_sha256": "374db9ef14a5ad3f28eb748e5a3e1a695ed622c89ecfe8c190264c934b2d3160",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_contact.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_power_port_template.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "10800b08efc61984903e8d350ce3ade71ef7888bd67b75875f04ab463545704e",
+ "chksum_sha256": "75a31cd47526a885482c3d44901674b3f33c8415978c343410b4897ed5423079",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_region.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_device_bay_template.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "64c202f343c3b22244058176bdbe027d1cbc629d189d8dca03f8e2797758d93c",
+ "chksum_sha256": "c942b0b528cea2de144b744e3d84b33223b9bd5c5b3110ffa9a4cbac926ee092",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_tenant.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_console_server_port.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "12293e5057c80b6b491b0c99ec05d3556d2642f5fcb981fa0f215857b5855b3a",
+ "chksum_sha256": "9aea5c2b0b02bfcf481b23201853cd354d04452b17507b5b3fdc9cb494c81530",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_config_context.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_wireless_link.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e1dbc4f90c9607e997c2b11c1cd58b37be0363a75d2e791034aaeb6302e1e263",
+ "chksum_sha256": "803d79b36bf3618e3283242d8dadae714c09913755288dc43c3bda9d19c1f6ac",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_virtual_machine.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_custom_link.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a804425ab778b2dc44ba33cb1637ee3a336b20f543e6ac9160d0c81d57de0099",
+ "chksum_sha256": "a1594b8640a23cb5ab804b64906ac62161696276dd8467c1ee2cab380e11726c",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_module.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_config_context.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "884e2cd1c2a1912fa485b7362c2d50a6ba36035d43c020f644ef0323631aa5ef",
+ "chksum_sha256": "df4a1f9cd476ce57438b2c478667c9914e71923495fdebf489dd7811a51f55da",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_asn.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_region.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8ebdd0a1b9d7008523121a7c14be66d1400a9338828c231272c7f74d5726edce",
+ "chksum_sha256": "ccd04021429ee77b69edb2e219cec7cb9d4ad291029bf735eb73860b70492c25",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_virtual_chassis.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_device_interface_template.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "952d715fc4c1dbca7f8d83128b67a0234a37666b9f553506ac749684dd80ebfb",
+ "chksum_sha256": "8cd6756a0407f04ab8790222293b3835ab242094ffb958a4c365ee6beffc93ba",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_inventory_item_role.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_power_feed.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b1bbf7b2aabc7b200fb8a612ee092629492c4a0a8cfd5e859c6cd9aadfa09d40",
+ "chksum_sha256": "9791bbf4cda16f4189caa2f9910d48a57790baf9f00c06b3b2de33d7f8ec148c",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_front_port_template.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_power_outlet_template.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "961155a3a4abfda44ec2fef8512e7bf78304aa0c7aa366ce6756a054fe55a668",
+ "chksum_sha256": "d68cfbb7e8d0efeb2c683bd1a6cdd891de8c00cab74af5ccd08493551cfb55ce",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_device_role.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_power_port.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fa2e8d09e82a3f099913dc9cf23356a22626895123e59ef9528a183fa5d18dec",
+ "chksum_sha256": "cfbebf7401c3d835bc8d2245803b093be89ab9747375704065a997713eef1865",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_cable.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_cluster_group.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "480243fd666d163294704b2c74b9034115284efdeb6e0b73b308425b454bfa7f",
+ "chksum_sha256": "cf464a2bd2d8fe3338add9e782e37869ac573d2ca4b0070215cc6c5c18a53e82",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_webhook.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_power_outlet.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "094744978836b9c9e5859b0a927c6d6088c2c1fa2c69366158a3c53cb44db5b0",
+ "chksum_sha256": "c2114d5a5403f08d7c8e805fc5f5deaaadff30f72dc41965a7869b513caa8a33",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_l2vpn.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_circuit_termination.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "80bf09f6c370f9575209a81f28e5ed2717c05dd14345e395efc615e4884b1979",
+ "chksum_sha256": "5f65f18257e67029d5c7bb97aa06a8c3f2a75d662c0d3135333007e11db87dc8",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_provider_network.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_front_port_template.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d85ad4ca9dd85d75489fbdb13ef60bc0cead57e5b0161308dcb5625e75c919b4",
+ "chksum_sha256": "fc0477e2d1e41c7364e3afad1a0f880284ab4dca487d3a1dd8af8614de0c872c",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_device_bay_template.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_prefix.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b013bde7259d29fb3bc35de81fec4f2a0c24c09d54ada22355a39e406d0a9554",
+ "chksum_sha256": "aeb27e31ec707d92e3c6702c9f79d74d8eda59466e0d0be48ffe0956f80f6e41",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_fhrp_group.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_vm_interface.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c7385675cfb10a26888c09d7822315d06d9aaf3daa73a9ba5b1745190affa1ae",
+ "chksum_sha256": "d1502b94dac2f7d32bf66caa19f82fe273f7caa1044034d0d8ea5c5db4db715b",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_cluster.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_fhrp_group.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2967ce7f5f94401bfba564fa22901be8a7c81ba4eb848eb018c317538a5cc22e",
+ "chksum_sha256": "c4111d64926c954d64ecfa36bf0b19609b11ed079d181edcceaa7040efc04460",
"format": 1
},
{
"name": "tests/integration/targets/v3.6/tasks/netbox_rack_group.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b8e3c495f289093c94d3612213c45a6f89624e4d762fd04e74d3062dab34cf1a",
+ "chksum_sha256": "38a8451d8064030aea164f006ef382c20f92f34909571eb3fd5f6e545c76b025",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_device_interface.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_vlan.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b97b575dba1385ba7361f881a9b4b553982f470eac4183e880c2cf71927e572b",
+ "chksum_sha256": "24675cf58ccbc62e103e2b12d58b1879626fdc4060e10883fcc0221654e4d57c",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_wireless_lan_group.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_ipam_role.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "94a103ba1cd08739e44dd0be15e9ce1d88ba143b0efd88d47e78155409dfae3e",
+ "chksum_sha256": "7ceb5fbc06093cfb74e70b9890cd7ee2a63400abde4859c232ae2696ed8c4a8e",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_device_interface_template.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_service_template.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e5e071bccd038ddbae0dc33ed5b2ea7b9cce00cd28f29d8160f0d308784d30dc",
+ "chksum_sha256": "3de123df2afca03aabda0c72df05dbe648539bd85dafa557e0e9875a3d187c88",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_service_template.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_asn.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2dd7572f2ea6c0a92edce50caa99145ea1065fa4800e71afe9c7bfa5fb16f021",
+ "chksum_sha256": "e166e0e9b57f80051a393c89d84e217aa3749040f9779b58bcfd0c6668756b42",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_circuit_type.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_rack_role.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0b1e4ff5a8a0341bf0e4e123a523abfb9d194d420b83563536ea837750aa5ec1",
+ "chksum_sha256": "d49b5ef42b54cad2c517e85d50c095796a3f14ce4a675044cca9ba9cf5d99ac9",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_rack.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_provider_network.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "acd85ca6fd6944c33a6a58f14c96fa598e6ec6d19cc38e0ef36cacb1ef973660",
+ "chksum_sha256": "9b0cad326050fd05d658114529543fd266660b18c56ac1dafdc645304c7c82ca",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_cluster_group.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_service.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "beb8453c7e490b5b8b9412c0d8bc4cd6c234e24368aa1f7c6a3c2a7e651b61c6",
+ "chksum_sha256": "284ba7a8c0c4289324906f530321a35d97ea04881d12b11dd72da2e187fa97aa",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_inventory_item.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_site_group.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "94a9328d196fd3e810b0827ea67aac0093b0b1414bf94b865d45181892b42dd6",
+ "chksum_sha256": "0dc3566f5851e868d35dd673565b243652f09cfe2de7089f0b605241c11ab5c1",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_module_bay.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_location.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5b8ad8989f26551b4d5f7015dd202aed037bcc247bc5f04b1c8aa5d713019831",
+ "chksum_sha256": "eb2c983ee5af371a527fca7e20de968debb8cb00ab00a3288e91530d0e95edf1",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_vlan_group.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_provider.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9137743d782b3f5a7c93da6008c5c1251ef3d48f18926d4a6d1e2888fd3ea383",
+ "chksum_sha256": "9d0e8dcec59fd43ad5c4fd25e8b5cc3402f11d31981e4ef183a19be5cd204141",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_circuit.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_device_type.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "afb572e4f30a6abcbb2cc5a4441643bc8da05919f825de0681c5e886d6100f30",
+ "chksum_sha256": "b59c642074bb81a361367906f132fda8bd32479418a35d8c0aa56ff1831e80b8",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_rear_port_template.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_custom_field.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d3d62cd318b253c9f41f11d93ce4ccfa348c61bd0c5f281e7b33110ff9ca1d0f",
+ "chksum_sha256": "13b999462bc7082e25e787402f2b31734e9ed509bef4c90f13870e4f698c5d39",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_power_port_template.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_virtual_machine.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b3e31a81a5f25b51c1df6cc5d3f9376be8c93b54888bbcf9b957661792b9e526",
+ "chksum_sha256": "dcb63cfbe1c5e4bc03fd99c2acc5748166b17f9b515e74722a14a707a35af52c",
"format": 1
},
{
- "name": "tests/integration/targets/v3.6/tasks/netbox_device_type.yml",
+ "name": "tests/integration/targets/v3.6/tasks/netbox_tag.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6d6794c8ed22e4f2b14d881a98724c3152a4f03484307254b831cbf416e594ec",
+ "chksum_sha256": "0dd53a692f8af842a04c746f97259aefa6b9e33a02ac66f979a35616d0348043",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5",
+ "name": "tests/integration/targets/regression-v4.0",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks",
+ "name": "tests/integration/targets/regression-v4.0/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_site.yml",
+ "name": "tests/integration/targets/regression-v4.0/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "58ff74a2f2ec0eb44cccf7e545d11d5a1cdeded86afeee1061757117594b38b2",
+ "chksum_sha256": "d353cfb6cab6f4a778a9fcbb44ab5730d1150d866c96bdea997014d39fc1c831",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_console_port.yml",
+ "name": "tests/integration/targets/v4.0",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/v4.0/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/v4.0/tasks/netbox_rack.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f22bf91272f6001c1a9d8819ca38a0524c8a442640e5ba2eef2d1d53f4f4340e",
+ "chksum_sha256": "05008a27b746cbc423241ccf119122a4504c88622a804eb92fd9180daa652416",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/main.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_virtual_chassis.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "54bc62b0f16212e0dc2f60b91ee8dc8c5cbb579d4d2b180a70bd0c068e7b42ca",
+ "chksum_sha256": "a6e10d83c29843e16136c44d1c83e59f5710af722516ee1a50a50d01ada9936c",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_vm_interface.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_cable.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0de602de86254d9c7f4c09f7b9d87da55dd0ddba4c7dbef6bc68bd041669151d",
+ "chksum_sha256": "949d8b64df5d75dd5aef8c8a4cb6fd2130a545bf3f2c4dff3b555d585bfbcf2f",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_aggregate.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_export_template.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1ea02627b92f257b58e16c77e038930542b949e6cfc7acb38beac52330318d8c",
+ "chksum_sha256": "5eca00fd8b84237b66ec86e00cfc228fdd53b30006ed0d83ae534f020aac0af2",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_platform.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_lookup.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "35583509824aac0905c990d980a1e677ce4f259a82c1d2cd3addb0975f6a007d",
+ "chksum_sha256": "72696bdd3be84fa6a7a2af55a69c8febb5acba9ce9e92797cc476b5421261ffb",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_wireless_link.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_device.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "807da638ea8dc21080bc21a3033dd065570ff93e3dfe9edc4963922505a3613a",
+ "chksum_sha256": "d1ca0b9611dfe3bcfa427f8996e1c56ecdcae5e05769a35c3023d0e3cf99eea2",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_vlan.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_ip_address.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f3de3e0d80daef5d6def7c55b974a6c88d4322ac35cb397ca1cbbfe90ef2fd8e",
+ "chksum_sha256": "d8cd7d8a69f8a032b0a72ba9fbfe48869eb38a7fa57f6b57003e94c7e91451f7",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_power_outlet.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_device_role.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4f94df7d99ba01a989c829231e3a2eab8ff955cb31e8bd1a19afa1b89c266f5f",
+ "chksum_sha256": "ce033d9508643e8c49526a974501176855693695ec4df51fa939158759ee2f51",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_tag.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_cluster.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "32e11eea93510a83b15b736bdf46995492dd0dcbdb025993f16532aad6de2b22",
+ "chksum_sha256": "8a792917906c74e992cdbf619a5e4b21f262e19cf2608ada8c525816c88d0b6f",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_rack_role.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_power_panel.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "61501632bf0456ba39fbcebc22660f51d3e3000c6bf497c05e207851456537b6",
+ "chksum_sha256": "17165ceb389769d30297e5d28cd61517f1753a3b1f0f86eea7b3dc2b698a66ba",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_rear_port.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_inventory_item_role.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a14bdfe2e9b2c5dc723a6da6b812da1163057f03cf19fe2e5aec663ee6f1452e",
+ "chksum_sha256": "f7fee772a4a9b6f48163ffec16cfee663e8e6747e2c26f9698b5074e97af1442",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_module_type.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_platform.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b78dc0f37cb445ca99d589c3f296ec1bb6b999dddbcbc2fe8ba64c78e700c53f",
+ "chksum_sha256": "ccb6154491d5ba029d390674c7bcfaaea27abfd9be491874d3051bbfe5b3a724",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_provider.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_aggregate.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4deb86969d18e570533d5a2738afddf66725d6d4f4abc3576384aaf226c82ba2",
+ "chksum_sha256": "0ac4b09ba8fc47cbbdb3b5d4fc4830e6ab5325c5e283ea5424bb5665fea1bb97",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_circuit_termination.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_journal_entry.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "318daab4470ec628ff83d4dfb2a4b7270a42863eb3a80ca142ef9094fa9cb898",
+ "chksum_sha256": "3a6524c6a048a0282c890f8216ca1eb19d40fa377842f2a5afcac395f11bf53a",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_lookup.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_l2vpn.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "12bc28911f599b5e292425de7d0d6f5f596fee407befbabbc0e7de33f9a1d9e0",
+ "chksum_sha256": "3d0f64ef7ae2061a2447205db494ed8367c9646d2b10ae5549c3f10cccda6503",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_vrf.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_vrf.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0a17e30d5154ca2aa90285ab251f52d457e89ced6509d83366101ad36e0da",
+ "chksum_sha256": "3f1a5dab109569be7a5aa4a7b7b568d8bfbd7d5b17fc0e2424a4d47f0b321866",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_fhrp_group_assignment.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_contact_role.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6315cc64905295c66bc9143b67b6ce4e52e089c85953c0e8729d8ffbd435f0d2",
+ "chksum_sha256": "d7a5644fe75789ad3cc615de2c5d23aaa262763b80e078757e1a2247f64de617",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_power_panel.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_circuit.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "eac755f52d8df88bb2100cf44cd7557739922838f04ded5da4aff356c75e172b",
+ "chksum_sha256": "5a4518332beaffdecd006ea6f56082c0091990ecd6e1e07acaaf3f364a879a30",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_manufacturer.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_wireless_lan.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6f45b1dfe652beff573994e37d534d6c421f428d984f43d1bab497618009fef8",
+ "chksum_sha256": "64f26adbd0e58e8ade42fded522650f129b94ca54b85e0e19ecbf2c742a37f9c",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_custom_link.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_module_bay.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2e9c75e22e6e2540065ed53d26657677c927af1bf3a73c29064c0e3361382e3f",
+ "chksum_sha256": "4003a39a5132206a0f4b256bb466fbef857bcadbceedcd43a6fe9042be30619a",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_service.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_circuit_type.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1e3384a81b3bc49bcaf3f7d82268cc03a9dd609e2a88e18eeac2677bcf604154",
+ "chksum_sha256": "2ba7477427c6e0c5e4fb95f8731cf9134e0ae1ea7830de9c4d9aa930ab647508",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_device_bay.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_console_port.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "482fe4849d9caff69a8604ab3e8324b0beaceb49e66360d0c3eb7c79d7ac8c34",
+ "chksum_sha256": "97a3b6773243884f57fdeb3f8e5b7759a774bdd49a5245edbf879ba158985370",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_front_port.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_rear_port_template.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3b56521d53c9a818916cb718e018214e7123db6b3feb0456532d476955d0aa28",
+ "chksum_sha256": "c447858ce56b010ade5e270a4946c136f55ce62f1acf537794bf5ecfd0bd764c",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_prefix.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_cluster_type.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f55ed9fcbda0d371e51828c3ecee2e103ac45f60ce5e5d6c28a04bb16ba64d76",
+ "chksum_sha256": "7540de23db3cbc7fdb0fdf7ee8b401d1329a06ce86406b6af8149f527c497434",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_site_group.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_module_type.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c5770e6c7f178cfed77e7cc9b4b7c7f7d8ed7557420c8e1ee3a221f54e2cbaaf",
+ "chksum_sha256": "3390662b64aa51fd25c120ba98815eba45eb2a7389dc828eb2eb985e791a7cce",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_journal_entry.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_l2vpn_termination.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0526255ed056eea4324dc0615f4f1b18be2c3ce66951850736b9d6533ae2660a",
+ "chksum_sha256": "72993bb4e4c7519b83e30babedd32dabb4d90a9768d00d6b70f274aeac3ee45b",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_rir.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_fhrp_group_assignment.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "280fb5eb850e14e7874ada263f1c03f15da3ed24f4730f960967cf8993eb81e3",
+ "chksum_sha256": "9dd8966d59ed6d3b9ed7250956715a6b79116e38851df7d4fd77c549490d5356",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_power_feed.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_route_target.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2a784756ec7c4cf50c065187f7c92e6f14117e59f86f32d148304f8ebb4deb46",
+ "chksum_sha256": "9142507b1fc0cf09182ce2a2df17a5ba4b10e02f4aef5e8221181d8a6ea4fe92",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_console_server_port_template.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_console_server_port_template.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "31efff083088eb516aa5e7769b52024bee646ab8f996a708fc09d2445c16ce7d",
+ "chksum_sha256": "9995e0004b5473922fd605b5734a2bacb4a8ee28bd7d1a329a71081c6368fdeb",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_console_server_port.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_rir.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d1c5d0d6e586d445667dd44b70a72e7d50564ba7e95cd2321289859eee3e3f6a",
+ "chksum_sha256": "11e4da868d00d8de767f052be4d103d8cc5c3bb36b999f369a46db3456d15a58",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_power_port.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_device_interface.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d51e0c69d26bf3bb64ae02399ceb1a59f05105e73d17c9c1b0144b85657f5bac",
+ "chksum_sha256": "fa1bd5454f23ddb0f68440d46081384bc59d518065c482d82760722c53ae1148",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_contact_role.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_console_port_template.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "52d72f63e31c7de7bc1dc2276112f491466ddbad59628833213f0ce78fb59cb3",
+ "chksum_sha256": "34f6827b41414f45567db756890c96017801791620a0f5f3fd34957d52cea41e",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_custom_field.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_webhook.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "46cecc1067a4995a8820e51f451e9d5291864b79876cc8551b60d81baed55b53",
+ "chksum_sha256": "e6f3c29c240fec28b69d6bea5d9db984d2ed6a4ee54ac4b51078d2060c0ce895",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_export_template.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_tenant_group.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3e3cdf0b433461d0bc0db2cc67e0fd22150ff9922732ac023fb7afc5203216fc",
+ "chksum_sha256": "c6013a1c956d588d0f927d7c513be42c8fc1a91e4deb016ddc23a251a9067245",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_ip_address.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_manufacturer.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2608d42fc6e56d8ab7d52c28d33658dcca6d8fb350fe2a92ede8d3e852c13ea7",
+ "chksum_sha256": "602806156756634f7b9ca2d8b2a24364ef8385aa68f85a0cbe56ae6fed45be90",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_cluster_type.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_vlan_group.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c88469e151627ba4f18c90ec7dc96449ebc38e0b10c3715b2527e8ed2317ff87",
+ "chksum_sha256": "cf6717d7f30ef367d4049c7d8087b88032e56c5041093e50dde12e3cc323fd82",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_route_target.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_contact.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a1265c144e813a4fe745934790f9283d30558187bef8d58f705754719ec480dc",
+ "chksum_sha256": "b0a3264d210d7a3fecb8edca38167f8615b842a4b54995e141aa35f7eaff1e4c",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_device.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_device_bay.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "572c930adbd6f88811ac62e0616ef5228216368b450a9401445bcc18df53bf11",
+ "chksum_sha256": "3d08ca60364615906bd685d55c4528058a507a37cab50378376e0b1cf356056c",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_l2vpn_termination.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_site.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b630f638bb826104bdc423470761fbf67d8495f64f4907deec67d37d7e02a743",
+ "chksum_sha256": "892a39c82540cf0ee8315ec090fc01f8f7c71b51b06340cb460efd008a5a9294",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_tenant_group.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_wireless_lan_group.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1ae1dc4a42a5fb65dcc1272f424a87cbdb6984dfe1787b310583059567fd181d",
+ "chksum_sha256": "25034eac8b5e4c8cf293055a90906e70d807d3fd5789ba9dc87adbf7861a55a5",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_ipam_role.yml",
+ "name": "tests/integration/targets/v4.0/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "87927259bdf063bc202fa76992dc4ea7fdf3953cb181261271af32c0464fef6b",
+ "chksum_sha256": "bde20c254d567e709b7eebb6817890925ab77a7b57d7111f6a2172ba0f9f0ac0",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_console_port_template.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_config_template.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a0f5b420ba0e259d28ed11d0f9a81f3e6e67744ae3bfdaaf5beb737343a44cef",
+ "chksum_sha256": "4500c8b3a396ccf6590e426f4c3d8c67824fc180a486a93445375dfffb52a43c",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_wireless_lan.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_inventory_item.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d86fdee3d125962731b3d98a027a30f4d7ba445b0fdc74cd0b0fc3cda5a1d23c",
+ "chksum_sha256": "fcc2f0371f0f8995d81aeff8a3703657ae4fc9aab338b7580141728588fae777",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_power_outlet_template.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_module.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7076e4492f1e5a9320edee905c9d9b29c0ca0a9222adac0903cd31e424a97edb",
+ "chksum_sha256": "702abfdc17f0d47b34325b99870e63e2d0a74a3baa5e8a35d875b3c65fe27b05",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_location.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_rear_port.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4fde47ce5a7ad1ad91120d23a53db43cf885d640fab3013fcdc4201faf0fccdc",
+ "chksum_sha256": "17e9b9eafcac50f2b934c23bfc94213f84fe4eb23f5c80ef8e651b021fac7550",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_contact.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_front_port.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "10800b08efc61984903e8d350ce3ade71ef7888bd67b75875f04ab463545704e",
+ "chksum_sha256": "234f840e4ccc8bf521f75e11089fa14656919a1a85d1e04a083c7b15cfb1925e",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_region.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_tenant.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "64c202f343c3b22244058176bdbe027d1cbc629d189d8dca03f8e2797758d93c",
+ "chksum_sha256": "374db9ef14a5ad3f28eb748e5a3e1a695ed622c89ecfe8c190264c934b2d3160",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_tenant.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_power_port_template.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "12293e5057c80b6b491b0c99ec05d3556d2642f5fcb981fa0f215857b5855b3a",
+ "chksum_sha256": "75a31cd47526a885482c3d44901674b3f33c8415978c343410b4897ed5423079",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_config_context.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_device_bay_template.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e1dbc4f90c9607e997c2b11c1cd58b37be0363a75d2e791034aaeb6302e1e263",
+ "chksum_sha256": "c942b0b528cea2de144b744e3d84b33223b9bd5c5b3110ffa9a4cbac926ee092",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_virtual_machine.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_console_server_port.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a804425ab778b2dc44ba33cb1637ee3a336b20f543e6ac9160d0c81d57de0099",
+ "chksum_sha256": "9aea5c2b0b02bfcf481b23201853cd354d04452b17507b5b3fdc9cb494c81530",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_module.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_wireless_link.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "884e2cd1c2a1912fa485b7362c2d50a6ba36035d43c020f644ef0323631aa5ef",
+ "chksum_sha256": "803d79b36bf3618e3283242d8dadae714c09913755288dc43c3bda9d19c1f6ac",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_asn.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_custom_link.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8ebdd0a1b9d7008523121a7c14be66d1400a9338828c231272c7f74d5726edce",
+ "chksum_sha256": "6e8639a6b80b9885ceb706b2f3af4516bd300db7e60b44178a78c27a4a0c0211",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_virtual_chassis.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_config_context.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "952d715fc4c1dbca7f8d83128b67a0234a37666b9f553506ac749684dd80ebfb",
+ "chksum_sha256": "df4a1f9cd476ce57438b2c478667c9914e71923495fdebf489dd7811a51f55da",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_inventory_item_role.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_region.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b1bbf7b2aabc7b200fb8a612ee092629492c4a0a8cfd5e859c6cd9aadfa09d40",
+ "chksum_sha256": "ccd04021429ee77b69edb2e219cec7cb9d4ad291029bf735eb73860b70492c25",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_front_port_template.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_device_interface_template.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "961155a3a4abfda44ec2fef8512e7bf78304aa0c7aa366ce6756a054fe55a668",
+ "chksum_sha256": "8cd6756a0407f04ab8790222293b3835ab242094ffb958a4c365ee6beffc93ba",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_device_role.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_power_feed.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fa2e8d09e82a3f099913dc9cf23356a22626895123e59ef9528a183fa5d18dec",
+ "chksum_sha256": "9791bbf4cda16f4189caa2f9910d48a57790baf9f00c06b3b2de33d7f8ec148c",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_cable.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_power_outlet_template.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "480243fd666d163294704b2c74b9034115284efdeb6e0b73b308425b454bfa7f",
+ "chksum_sha256": "d68cfbb7e8d0efeb2c683bd1a6cdd891de8c00cab74af5ccd08493551cfb55ce",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_webhook.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_power_port.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "094744978836b9c9e5859b0a927c6d6088c2c1fa2c69366158a3c53cb44db5b0",
+ "chksum_sha256": "cfbebf7401c3d835bc8d2245803b093be89ab9747375704065a997713eef1865",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_l2vpn.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_virtual_disk.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "80bf09f6c370f9575209a81f28e5ed2717c05dd14345e395efc615e4884b1979",
+ "chksum_sha256": "1c5554999363ab282f9eeac5f4a171a7192a129f88ea5598cda624d35c146332",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_provider_network.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_cluster_group.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d85ad4ca9dd85d75489fbdb13ef60bc0cead57e5b0161308dcb5625e75c919b4",
+ "chksum_sha256": "cf464a2bd2d8fe3338add9e782e37869ac573d2ca4b0070215cc6c5c18a53e82",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_device_bay_template.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_power_outlet.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b013bde7259d29fb3bc35de81fec4f2a0c24c09d54ada22355a39e406d0a9554",
+ "chksum_sha256": "c2114d5a5403f08d7c8e805fc5f5deaaadff30f72dc41965a7869b513caa8a33",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_fhrp_group.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_circuit_termination.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c7385675cfb10a26888c09d7822315d06d9aaf3daa73a9ba5b1745190affa1ae",
+ "chksum_sha256": "5f65f18257e67029d5c7bb97aa06a8c3f2a75d662c0d3135333007e11db87dc8",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_cluster.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_front_port_template.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2967ce7f5f94401bfba564fa22901be8a7c81ba4eb848eb018c317538a5cc22e",
+ "chksum_sha256": "fc0477e2d1e41c7364e3afad1a0f880284ab4dca487d3a1dd8af8614de0c872c",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_rack_group.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_prefix.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b8e3c495f289093c94d3612213c45a6f89624e4d762fd04e74d3062dab34cf1a",
+ "chksum_sha256": "aeb27e31ec707d92e3c6702c9f79d74d8eda59466e0d0be48ffe0956f80f6e41",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_device_interface.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_vm_interface.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cb352542fd0d9fb8d6eaf215bb6702e2f1fa131b18405c5a410936b49af156a2",
+ "chksum_sha256": "cb904924261d3e7e3e815dd73aa1d29c96c29d7e1d0ce4af324058023e7c9a30",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_wireless_lan_group.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_fhrp_group.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "94a103ba1cd08739e44dd0be15e9ce1d88ba143b0efd88d47e78155409dfae3e",
+ "chksum_sha256": "c4111d64926c954d64ecfa36bf0b19609b11ed079d181edcceaa7040efc04460",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_device_interface_template.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_rack_group.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e5e071bccd038ddbae0dc33ed5b2ea7b9cce00cd28f29d8160f0d308784d30dc",
+ "chksum_sha256": "38a8451d8064030aea164f006ef382c20f92f34909571eb3fd5f6e545c76b025",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_service_template.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_vlan.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2dd7572f2ea6c0a92edce50caa99145ea1065fa4800e71afe9c7bfa5fb16f021",
+ "chksum_sha256": "24675cf58ccbc62e103e2b12d58b1879626fdc4060e10883fcc0221654e4d57c",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_circuit_type.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_ipam_role.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0b1e4ff5a8a0341bf0e4e123a523abfb9d194d420b83563536ea837750aa5ec1",
+ "chksum_sha256": "7ceb5fbc06093cfb74e70b9890cd7ee2a63400abde4859c232ae2696ed8c4a8e",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_rack.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_service_template.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a5cc467d406405bb9458401645826acc394819d6b994fec2f27477d25677a6b4",
+ "chksum_sha256": "3de123df2afca03aabda0c72df05dbe648539bd85dafa557e0e9875a3d187c88",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_cluster_group.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_asn.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "beb8453c7e490b5b8b9412c0d8bc4cd6c234e24368aa1f7c6a3c2a7e651b61c6",
+ "chksum_sha256": "e166e0e9b57f80051a393c89d84e217aa3749040f9779b58bcfd0c6668756b42",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_inventory_item.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_rack_role.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "94a9328d196fd3e810b0827ea67aac0093b0b1414bf94b865d45181892b42dd6",
+ "chksum_sha256": "f38ea66d9f75164b06ad3d3da8b7a4826dc03a3c22dd2dc20e6a1ab76c5d3446",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_module_bay.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_provider_network.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5b8ad8989f26551b4d5f7015dd202aed037bcc247bc5f04b1c8aa5d713019831",
+ "chksum_sha256": "9b0cad326050fd05d658114529543fd266660b18c56ac1dafdc645304c7c82ca",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_vlan_group.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_service.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9137743d782b3f5a7c93da6008c5c1251ef3d48f18926d4a6d1e2888fd3ea383",
+ "chksum_sha256": "284ba7a8c0c4289324906f530321a35d97ea04881d12b11dd72da2e187fa97aa",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_circuit.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_site_group.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "afb572e4f30a6abcbb2cc5a4441643bc8da05919f825de0681c5e886d6100f30",
+ "chksum_sha256": "0dc3566f5851e868d35dd673565b243652f09cfe2de7089f0b605241c11ab5c1",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_rear_port_template.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_location.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d3d62cd318b253c9f41f11d93ce4ccfa348c61bd0c5f281e7b33110ff9ca1d0f",
+ "chksum_sha256": "eb2c983ee5af371a527fca7e20de968debb8cb00ab00a3288e91530d0e95edf1",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_power_port_template.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_provider.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b3e31a81a5f25b51c1df6cc5d3f9376be8c93b54888bbcf9b957661792b9e526",
+ "chksum_sha256": "9d0e8dcec59fd43ad5c4fd25e8b5cc3402f11d31981e4ef183a19be5cd204141",
"format": 1
},
{
- "name": "tests/integration/targets/v3.5/tasks/netbox_device_type.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_device_type.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6d6794c8ed22e4f2b14d881a98724c3152a4f03484307254b831cbf416e594ec",
+ "chksum_sha256": "b59c642074bb81a361367906f132fda8bd32479418a35d8c0aa56ff1831e80b8",
"format": 1
},
{
- "name": "tests/integration/targets/regression-v3.6",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/v4.0/tasks/netbox_custom_field.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "059b52971a9beac0b9197eca68e2adf37094244b7e9141c0a329a617cce6a8b4",
"format": 1
},
{
- "name": "tests/integration/targets/regression-v3.6/tasks",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/v4.0/tasks/netbox_virtual_machine.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "dcb63cfbe1c5e4bc03fd99c2acc5748166b17f9b515e74722a14a707a35af52c",
"format": 1
},
{
- "name": "tests/integration/targets/regression-v3.6/tasks/main.yml",
+ "name": "tests/integration/targets/v4.0/tasks/netbox_tag.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "41cdf99d833f0b3525d59fee564411727dc3f89e908eaf1240f064ec051291be",
+ "chksum_sha256": "0dd53a692f8af842a04c746f97259aefa6b9e33a02ac66f979a35616d0348043",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.5",
+ "name": "tests/integration/targets/v3.5",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.5/files",
+ "name": "tests/integration/targets/v3.5/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.5/files/test-inventory-plurals-flatten.json",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_rack.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b10179aa589cbcd92340b4dbfd4c816af473e22c2ca6b2edcd60ee9ade7c3b7a",
+ "chksum_sha256": "bab9c5fe6d207a0e8cf593fc3b83f98c2538053c8def4f22421abacb12b01eb3",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.5/files/test-inventory.json",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_virtual_chassis.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d1009b6685415e91a9a11dfaa939cacb03fd5f3efd6adff0eb7fa2958ddbdbee",
+ "chksum_sha256": "a6e10d83c29843e16136c44d1c83e59f5710af722516ee1a50a50d01ada9936c",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.5/files/test-inventory-options.json",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_cable.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c17f5eef88e43f08288f480719439acd00330c5196d1c14134fa6194103c1519",
+ "chksum_sha256": "949d8b64df5d75dd5aef8c8a4cb6fd2130a545bf3f2c4dff3b555d585bfbcf2f",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.5/files/test-inventory-plurals-flatten.yml",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_export_template.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5e9c03704abbec6258adf89461e897f3ea277970a5f172f651d879fb1da921ed",
+ "chksum_sha256": "25758f818b6c78e918db571c0e88e8e8472e62ef2eea93e739f552b6c3df8a62",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.5/files/test-inventory-legacy.json",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_lookup.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "83e2fb444512331c6bc9e0677cd7d71450dffcb2625dfa7c759612c254af5c81",
+ "chksum_sha256": "72696bdd3be84fa6a7a2af55a69c8febb5acba9ce9e92797cc476b5421261ffb",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.5/files/test-inventory-plurals.json",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_device.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4ba0e5ede35b7822fd1658b50fd9bc80fa259bee9989b0818c8af95b53577999",
+ "chksum_sha256": "8bf622400692606b11595726a5bdf15d8c3698d3cd3fb576389c953a3aed8cda",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.5/files/test-inventory-options.yml",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_ip_address.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3f3f9031d9c1d74774e7580fe9676c7f218de99fb8a1fe5e917db51f3bf3f4af",
+ "chksum_sha256": "0eefd696969ede9ce7708ffceca830598936edd409af86e0d392b49279cf647c",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.5/files/test-inventory-bearer-token.json",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_device_role.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d1009b6685415e91a9a11dfaa939cacb03fd5f3efd6adff0eb7fa2958ddbdbee",
+ "chksum_sha256": "ce033d9508643e8c49526a974501176855693695ec4df51fa939158759ee2f51",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.5/files/test-inventory-jinja2-filter.json",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_cluster.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "65f7c6327d4cbfd7c59d903100681fd668b50e441fb280d3ad38c26d0d03d915",
+ "chksum_sha256": "8a792917906c74e992cdbf619a5e4b21f262e19cf2608ada8c525816c88d0b6f",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.5/files/test-inventory-legacy.yml",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_power_panel.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "46884cd5540b0fef87c808434d64cf34d11b4cb2a837083c024a4280a444e457",
+ "chksum_sha256": "17165ceb389769d30297e5d28cd61517f1753a3b1f0f86eea7b3dc2b698a66ba",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.5/files/test-inventory-options-flatten.json",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_inventory_item_role.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cd27110ff787e21cddc8beaa564db9b7e29a8ac77ce10c99ed1c166006708802",
+ "chksum_sha256": "f7fee772a4a9b6f48163ffec16cfee663e8e6747e2c26f9698b5074e97af1442",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.5/files/test-inventory-jinja2-filter.yml",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_platform.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d3b673c5a5b01a862fef9114d9a4ba7dcbebcb135efb86f33072ff49f144ddb9",
+ "chksum_sha256": "e331ca0e2444f971c11d74ecbc724d6ccc60a6ed4fad06dcad4abe4e8575f7e7",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.5/files/test-inventory-noracks.yml",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_aggregate.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6d0971c39294eb2c759bfaed12e5c266b1606216d6c384b3ed322240e644d3e2",
+ "chksum_sha256": "0ac4b09ba8fc47cbbdb3b5d4fc4830e6ab5325c5e283ea5424bb5665fea1bb97",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.5/files/test-inventory-bearer-token.yml",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_journal_entry.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "06c2028c7743ed3ac39e2ff50f7db2cdf81096bf8412cc67af25f0041c9aae1b",
+ "chksum_sha256": "3a6524c6a048a0282c890f8216ca1eb19d40fa377842f2a5afcac395f11bf53a",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.5/files/test-inventory-options-flatten.yml",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_l2vpn.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e74d737990bde83f01d36ba03891d575d347b9d522c8bed244851ec6001338c0",
+ "chksum_sha256": "3d0f64ef7ae2061a2447205db494ed8367c9646d2b10ae5549c3f10cccda6503",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.5/files/test-inventory-jinja2.json",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_vrf.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c17f5eef88e43f08288f480719439acd00330c5196d1c14134fa6194103c1519",
+ "chksum_sha256": "3f1a5dab109569be7a5aa4a7b7b568d8bfbd7d5b17fc0e2424a4d47f0b321866",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.5/files/test-inventory-noracks.json",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_contact_role.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7c504c26d63da38458f77027c0e542aa2bb34eef183829623de02086e79c031c",
+ "chksum_sha256": "d7a5644fe75789ad3cc615de2c5d23aaa262763b80e078757e1a2247f64de617",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.5/files/test-inventory-plurals.yml",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_circuit.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "08ff16818cf6b69f12219eec23a73b8dd4ed7c10b59869628df74daa8d3c414f",
+ "chksum_sha256": "5a4518332beaffdecd006ea6f56082c0091990ecd6e1e07acaaf3f364a879a30",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.5/files/test-inventory.yml",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_wireless_lan.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f8315a7e1f2c4aaade12b7eab7f2eba9e9763e45fb846730775107aea87b2d86",
+ "chksum_sha256": "64f26adbd0e58e8ade42fded522650f129b94ca54b85e0e19ecbf2c742a37f9c",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.5/files/test-inventory-jinja2.yml",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_module_bay.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1b9009a12cfbed3386c6a7d857edc56c8fa88b1917fb6fee6334eb4209f7e349",
+ "chksum_sha256": "4003a39a5132206a0f4b256bb466fbef857bcadbceedcd43a6fe9042be30619a",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.5/compare_inventory_json.py",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_circuit_type.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "30c33ec36a02ba2483003189f86f977cc29e460ab660e040f70ec4136770fec1",
+ "chksum_sha256": "2ba7477427c6e0c5e4fb95f8731cf9134e0ae1ea7830de9c4d9aa930ab647508",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.5/runme.sh",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_console_port.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f2d26a7388590de5e1126420c402289975eded15a914293329f046406a82ed1d",
+ "chksum_sha256": "97a3b6773243884f57fdeb3f8e5b7759a774bdd49a5245edbf879ba158985370",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.5/runme_config.template",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_rear_port_template.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "de40fd34a465f7da8925f1e9ba52359e10ac740e08f2dfa398691810fcc03bd7",
+ "chksum_sha256": "c447858ce56b010ade5e270a4946c136f55ce62f1acf537794bf5ecfd0bd764c",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.5/.gitignore",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_cluster_type.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "deacc9eea815485333d506e5bd64fb0c4a4a9980a33ff1b443a2deb0ca488209",
+ "chksum_sha256": "7540de23db3cbc7fdb0fdf7ee8b401d1329a06ce86406b6af8149f527c497434",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.5/aliases",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_module_type.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3efbf182d151ffe527a8a250a044a83997eec84d250c8651dcdfe6f62c60a97d",
+ "chksum_sha256": "3390662b64aa51fd25c120ba98815eba45eb2a7389dc828eb2eb985e791a7cce",
"format": 1
},
{
- "name": "tests/integration/targets/regression-v3.7",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/v3.5/tasks/netbox_l2vpn_termination.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "72993bb4e4c7519b83e30babedd32dabb4d90a9768d00d6b70f274aeac3ee45b",
"format": 1
},
{
- "name": "tests/integration/targets/regression-v3.7/tasks",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/v3.5/tasks/netbox_fhrp_group_assignment.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "9dd8966d59ed6d3b9ed7250956715a6b79116e38851df7d4fd77c549490d5356",
"format": 1
},
{
- "name": "tests/integration/targets/regression-v3.7/tasks/main.yml",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_route_target.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "41cdf99d833f0b3525d59fee564411727dc3f89e908eaf1240f064ec051291be",
+ "chksum_sha256": "9142507b1fc0cf09182ce2a2df17a5ba4b10e02f4aef5e8221181d8a6ea4fe92",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.6",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/v3.5/tasks/netbox_console_server_port_template.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "9995e0004b5473922fd605b5734a2bacb4a8ee28bd7d1a329a71081c6368fdeb",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.6/files",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/v3.5/tasks/netbox_rir.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "11e4da868d00d8de767f052be4d103d8cc5c3bb36b999f369a46db3456d15a58",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.6/files/test-inventory-plurals-flatten.json",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_device_interface.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b10179aa589cbcd92340b4dbfd4c816af473e22c2ca6b2edcd60ee9ade7c3b7a",
+ "chksum_sha256": "09129e207163d826de851d3f9db9cb7a3fadcad76000691d1806acf6477ce11b",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.6/files/test-inventory.json",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_console_port_template.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d1009b6685415e91a9a11dfaa939cacb03fd5f3efd6adff0eb7fa2958ddbdbee",
+ "chksum_sha256": "34f6827b41414f45567db756890c96017801791620a0f5f3fd34957d52cea41e",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.6/files/test-inventory-options.json",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_webhook.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c17f5eef88e43f08288f480719439acd00330c5196d1c14134fa6194103c1519",
+ "chksum_sha256": "e6f3c29c240fec28b69d6bea5d9db984d2ed6a4ee54ac4b51078d2060c0ce895",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.6/files/test-inventory-plurals-flatten.yml",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_tenant_group.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5e9c03704abbec6258adf89461e897f3ea277970a5f172f651d879fb1da921ed",
+ "chksum_sha256": "c6013a1c956d588d0f927d7c513be42c8fc1a91e4deb016ddc23a251a9067245",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.6/files/test-inventory-legacy.json",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_manufacturer.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "83e2fb444512331c6bc9e0677cd7d71450dffcb2625dfa7c759612c254af5c81",
+ "chksum_sha256": "602806156756634f7b9ca2d8b2a24364ef8385aa68f85a0cbe56ae6fed45be90",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.6/files/test-inventory-plurals.json",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_vlan_group.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4ba0e5ede35b7822fd1658b50fd9bc80fa259bee9989b0818c8af95b53577999",
+ "chksum_sha256": "cf6717d7f30ef367d4049c7d8087b88032e56c5041093e50dde12e3cc323fd82",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.6/files/test-inventory-options.yml",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_contact.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3f3f9031d9c1d74774e7580fe9676c7f218de99fb8a1fe5e917db51f3bf3f4af",
+ "chksum_sha256": "b0a3264d210d7a3fecb8edca38167f8615b842a4b54995e141aa35f7eaff1e4c",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.6/files/test-inventory-bearer-token.json",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_device_bay.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d1009b6685415e91a9a11dfaa939cacb03fd5f3efd6adff0eb7fa2958ddbdbee",
+ "chksum_sha256": "3d08ca60364615906bd685d55c4528058a507a37cab50378376e0b1cf356056c",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.6/files/test-inventory-jinja2-filter.json",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_site.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "65f7c6327d4cbfd7c59d903100681fd668b50e441fb280d3ad38c26d0d03d915",
+ "chksum_sha256": "892a39c82540cf0ee8315ec090fc01f8f7c71b51b06340cb460efd008a5a9294",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.6/files/test-inventory-legacy.yml",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_wireless_lan_group.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "46884cd5540b0fef87c808434d64cf34d11b4cb2a837083c024a4280a444e457",
+ "chksum_sha256": "25034eac8b5e4c8cf293055a90906e70d807d3fd5789ba9dc87adbf7861a55a5",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.6/files/test-inventory-options-flatten.json",
+ "name": "tests/integration/targets/v3.5/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "cd27110ff787e21cddc8beaa564db9b7e29a8ac77ce10c99ed1c166006708802",
+ "chksum_sha256": "6628d276d3203da9eb781f395879ae93b0e3445ba92a9c60c7c63c80069e68fc",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.6/files/test-inventory-jinja2-filter.yml",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_inventory_item.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d3b673c5a5b01a862fef9114d9a4ba7dcbebcb135efb86f33072ff49f144ddb9",
+ "chksum_sha256": "fcc2f0371f0f8995d81aeff8a3703657ae4fc9aab338b7580141728588fae777",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.6/files/test-inventory-noracks.yml",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_module.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6d0971c39294eb2c759bfaed12e5c266b1606216d6c384b3ed322240e644d3e2",
+ "chksum_sha256": "702abfdc17f0d47b34325b99870e63e2d0a74a3baa5e8a35d875b3c65fe27b05",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.6/files/test-inventory-bearer-token.yml",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_rear_port.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "06c2028c7743ed3ac39e2ff50f7db2cdf81096bf8412cc67af25f0041c9aae1b",
+ "chksum_sha256": "17e9b9eafcac50f2b934c23bfc94213f84fe4eb23f5c80ef8e651b021fac7550",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.6/files/test-inventory-options-flatten.yml",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_front_port.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e74d737990bde83f01d36ba03891d575d347b9d522c8bed244851ec6001338c0",
+ "chksum_sha256": "234f840e4ccc8bf521f75e11089fa14656919a1a85d1e04a083c7b15cfb1925e",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.6/files/test-inventory-jinja2.json",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_tenant.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c17f5eef88e43f08288f480719439acd00330c5196d1c14134fa6194103c1519",
+ "chksum_sha256": "374db9ef14a5ad3f28eb748e5a3e1a695ed622c89ecfe8c190264c934b2d3160",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.6/files/test-inventory-noracks.json",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_power_port_template.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7c504c26d63da38458f77027c0e542aa2bb34eef183829623de02086e79c031c",
+ "chksum_sha256": "75a31cd47526a885482c3d44901674b3f33c8415978c343410b4897ed5423079",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.6/files/test-inventory-plurals.yml",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_device_bay_template.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "08ff16818cf6b69f12219eec23a73b8dd4ed7c10b59869628df74daa8d3c414f",
+ "chksum_sha256": "c942b0b528cea2de144b744e3d84b33223b9bd5c5b3110ffa9a4cbac926ee092",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.6/files/test-inventory.yml",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_console_server_port.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f8315a7e1f2c4aaade12b7eab7f2eba9e9763e45fb846730775107aea87b2d86",
+ "chksum_sha256": "9aea5c2b0b02bfcf481b23201853cd354d04452b17507b5b3fdc9cb494c81530",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.6/files/test-inventory-jinja2.yml",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_wireless_link.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1b9009a12cfbed3386c6a7d857edc56c8fa88b1917fb6fee6334eb4209f7e349",
+ "chksum_sha256": "803d79b36bf3618e3283242d8dadae714c09913755288dc43c3bda9d19c1f6ac",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.6/compare_inventory_json.py",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_custom_link.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "30c33ec36a02ba2483003189f86f977cc29e460ab660e040f70ec4136770fec1",
+ "chksum_sha256": "a1594b8640a23cb5ab804b64906ac62161696276dd8467c1ee2cab380e11726c",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.6/runme.sh",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_config_context.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f2d26a7388590de5e1126420c402289975eded15a914293329f046406a82ed1d",
+ "chksum_sha256": "df4a1f9cd476ce57438b2c478667c9914e71923495fdebf489dd7811a51f55da",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.6/runme_config.template",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_region.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "de40fd34a465f7da8925f1e9ba52359e10ac740e08f2dfa398691810fcc03bd7",
+ "chksum_sha256": "ccd04021429ee77b69edb2e219cec7cb9d4ad291029bf735eb73860b70492c25",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.6/.gitignore",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_device_interface_template.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "deacc9eea815485333d506e5bd64fb0c4a4a9980a33ff1b443a2deb0ca488209",
+ "chksum_sha256": "8cd6756a0407f04ab8790222293b3835ab242094ffb958a4c365ee6beffc93ba",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.6/aliases",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_power_feed.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3efbf182d151ffe527a8a250a044a83997eec84d250c8651dcdfe6f62c60a97d",
+ "chksum_sha256": "9791bbf4cda16f4189caa2f9910d48a57790baf9f00c06b3b2de33d7f8ec148c",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/v3.5/tasks/netbox_power_outlet_template.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d68cfbb7e8d0efeb2c683bd1a6cdd891de8c00cab74af5ccd08493551cfb55ce",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/v3.5/tasks/netbox_power_port.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "cfbebf7401c3d835bc8d2245803b093be89ab9747375704065a997713eef1865",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_site.yml",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_cluster_group.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "58ff74a2f2ec0eb44cccf7e545d11d5a1cdeded86afeee1061757117594b38b2",
+ "chksum_sha256": "cf464a2bd2d8fe3338add9e782e37869ac573d2ca4b0070215cc6c5c18a53e82",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_console_port.yml",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_power_outlet.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f22bf91272f6001c1a9d8819ca38a0524c8a442640e5ba2eef2d1d53f4f4340e",
+ "chksum_sha256": "c2114d5a5403f08d7c8e805fc5f5deaaadff30f72dc41965a7869b513caa8a33",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/main.yml",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_circuit_termination.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "50df8b90eb941e06a9a1c738966928ce2efbde1bc418228ed735f1c9024a6759",
+ "chksum_sha256": "5f65f18257e67029d5c7bb97aa06a8c3f2a75d662c0d3135333007e11db87dc8",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_vm_interface.yml",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_front_port_template.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0de602de86254d9c7f4c09f7b9d87da55dd0ddba4c7dbef6bc68bd041669151d",
+ "chksum_sha256": "fc0477e2d1e41c7364e3afad1a0f880284ab4dca487d3a1dd8af8614de0c872c",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_aggregate.yml",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_prefix.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1ea02627b92f257b58e16c77e038930542b949e6cfc7acb38beac52330318d8c",
+ "chksum_sha256": "aeb27e31ec707d92e3c6702c9f79d74d8eda59466e0d0be48ffe0956f80f6e41",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_platform.yml",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_vm_interface.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2bdd6dd59eb4dcbfb1e285aef3bfbfe944552c3dd2bd20e17b58b5798926221e",
+ "chksum_sha256": "d1502b94dac2f7d32bf66caa19f82fe273f7caa1044034d0d8ea5c5db4db715b",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_wireless_link.yml",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_fhrp_group.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "807da638ea8dc21080bc21a3033dd065570ff93e3dfe9edc4963922505a3613a",
+ "chksum_sha256": "c4111d64926c954d64ecfa36bf0b19609b11ed079d181edcceaa7040efc04460",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_vlan.yml",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_rack_group.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f3de3e0d80daef5d6def7c55b974a6c88d4322ac35cb397ca1cbbfe90ef2fd8e",
+ "chksum_sha256": "38a8451d8064030aea164f006ef382c20f92f34909571eb3fd5f6e545c76b025",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_power_outlet.yml",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_vlan.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4f94df7d99ba01a989c829231e3a2eab8ff955cb31e8bd1a19afa1b89c266f5f",
+ "chksum_sha256": "24675cf58ccbc62e103e2b12d58b1879626fdc4060e10883fcc0221654e4d57c",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_tag.yml",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_ipam_role.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "32e11eea93510a83b15b736bdf46995492dd0dcbdb025993f16532aad6de2b22",
+ "chksum_sha256": "7ceb5fbc06093cfb74e70b9890cd7ee2a63400abde4859c232ae2696ed8c4a8e",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_rack_role.yml",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_service_template.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "61501632bf0456ba39fbcebc22660f51d3e3000c6bf497c05e207851456537b6",
+ "chksum_sha256": "3de123df2afca03aabda0c72df05dbe648539bd85dafa557e0e9875a3d187c88",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_rear_port.yml",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_asn.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a14bdfe2e9b2c5dc723a6da6b812da1163057f03cf19fe2e5aec663ee6f1452e",
+ "chksum_sha256": "e166e0e9b57f80051a393c89d84e217aa3749040f9779b58bcfd0c6668756b42",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_module_type.yml",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_rack_role.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b78dc0f37cb445ca99d589c3f296ec1bb6b999dddbcbc2fe8ba64c78e700c53f",
+ "chksum_sha256": "f38ea66d9f75164b06ad3d3da8b7a4826dc03a3c22dd2dc20e6a1ab76c5d3446",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_provider.yml",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_provider_network.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4deb86969d18e570533d5a2738afddf66725d6d4f4abc3576384aaf226c82ba2",
+ "chksum_sha256": "9b0cad326050fd05d658114529543fd266660b18c56ac1dafdc645304c7c82ca",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_circuit_termination.yml",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_service.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "318daab4470ec628ff83d4dfb2a4b7270a42863eb3a80ca142ef9094fa9cb898",
+ "chksum_sha256": "284ba7a8c0c4289324906f530321a35d97ea04881d12b11dd72da2e187fa97aa",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_lookup.yml",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_site_group.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "12bc28911f599b5e292425de7d0d6f5f596fee407befbabbc0e7de33f9a1d9e0",
+ "chksum_sha256": "0dc3566f5851e868d35dd673565b243652f09cfe2de7089f0b605241c11ab5c1",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_vrf.yml",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_location.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0a17e30d5154ca2aa90285ab251f52d457e89ced6509d83366101ad36e0da",
+ "chksum_sha256": "eb2c983ee5af371a527fca7e20de968debb8cb00ab00a3288e91530d0e95edf1",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_fhrp_group_assignment.yml",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_provider.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6315cc64905295c66bc9143b67b6ce4e52e089c85953c0e8729d8ffbd435f0d2",
+ "chksum_sha256": "9d0e8dcec59fd43ad5c4fd25e8b5cc3402f11d31981e4ef183a19be5cd204141",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_power_panel.yml",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_device_type.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "eac755f52d8df88bb2100cf44cd7557739922838f04ded5da4aff356c75e172b",
+ "chksum_sha256": "b59c642074bb81a361367906f132fda8bd32479418a35d8c0aa56ff1831e80b8",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_manufacturer.yml",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_custom_field.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6f45b1dfe652beff573994e37d534d6c421f428d984f43d1bab497618009fef8",
+ "chksum_sha256": "13b999462bc7082e25e787402f2b31734e9ed509bef4c90f13870e4f698c5d39",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_custom_link.yml",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_virtual_machine.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2e9c75e22e6e2540065ed53d26657677c927af1bf3a73c29064c0e3361382e3f",
+ "chksum_sha256": "dcb63cfbe1c5e4bc03fd99c2acc5748166b17f9b515e74722a14a707a35af52c",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_service.yml",
+ "name": "tests/integration/targets/v3.5/tasks/netbox_tag.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1e3384a81b3bc49bcaf3f7d82268cc03a9dd609e2a88e18eeac2677bcf604154",
+ "chksum_sha256": "0dd53a692f8af842a04c746f97259aefa6b9e33a02ac66f979a35616d0348043",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_device_bay.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "482fe4849d9caff69a8604ab3e8324b0beaceb49e66360d0c3eb7c79d7ac8c34",
+ "name": "tests/integration/targets/regression-v3.7",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_front_port.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "3b56521d53c9a818916cb718e018214e7123db6b3feb0456532d476955d0aa28",
+ "name": "tests/integration/targets/regression-v3.7/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_prefix.yml",
+ "name": "tests/integration/targets/regression-v3.7/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f55ed9fcbda0d371e51828c3ecee2e103ac45f60ce5e5d6c28a04bb16ba64d76",
+ "chksum_sha256": "d353cfb6cab6f4a778a9fcbb44ab5730d1150d866c96bdea997014d39fc1c831",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_site_group.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "c5770e6c7f178cfed77e7cc9b4b7c7f7d8ed7557420c8e1ee3a221f54e2cbaaf",
+ "name": "tests/integration/targets/inventory-v3.6",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_journal_entry.yml",
+ "name": "tests/integration/targets/inventory-v3.6/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0526255ed056eea4324dc0615f4f1b18be2c3ce66951850736b9d6533ae2660a",
+ "chksum_sha256": "3efbf182d151ffe527a8a250a044a83997eec84d250c8651dcdfe6f62c60a97d",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_rir.yml",
+ "name": "tests/integration/targets/inventory-v3.6/runme.sh",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "280fb5eb850e14e7874ada263f1c03f15da3ed24f4730f960967cf8993eb81e3",
+ "chksum_sha256": "f2d26a7388590de5e1126420c402289975eded15a914293329f046406a82ed1d",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_power_feed.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "2a784756ec7c4cf50c065187f7c92e6f14117e59f86f32d148304f8ebb4deb46",
+ "name": "tests/integration/targets/inventory-v3.6/files",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_console_server_port_template.yml",
+ "name": "tests/integration/targets/inventory-v3.6/files/test-inventory.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "31efff083088eb516aa5e7769b52024bee646ab8f996a708fc09d2445c16ce7d",
+ "chksum_sha256": "19ede9a14420befc0ac1168f21c6d2c5dcaf16bef094d6c20cf766dacce2c1ba",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_console_server_port.yml",
+ "name": "tests/integration/targets/inventory-v3.6/files/test-inventory-legacy.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d1c5d0d6e586d445667dd44b70a72e7d50564ba7e95cd2321289859eee3e3f6a",
+ "chksum_sha256": "71f03a61506d106d1985bf9c237f78b900dcbd1f360558a23e6bf5fac6e77e17",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_power_port.yml",
+ "name": "tests/integration/targets/inventory-v3.6/files/test-inventory-jinja2-filter.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d51e0c69d26bf3bb64ae02399ceb1a59f05105e73d17c9c1b0144b85657f5bac",
+ "chksum_sha256": "65f7c6327d4cbfd7c59d903100681fd668b50e441fb280d3ad38c26d0d03d915",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_contact_role.yml",
+ "name": "tests/integration/targets/inventory-v3.6/files/test-inventory-plurals.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "52d72f63e31c7de7bc1dc2276112f491466ddbad59628833213f0ce78fb59cb3",
+ "chksum_sha256": "4ba0e5ede35b7822fd1658b50fd9bc80fa259bee9989b0818c8af95b53577999",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_custom_field.yml",
+ "name": "tests/integration/targets/inventory-v3.6/files/test-inventory-options.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6e072cc7a5577ea307dfdb9458bc68ffc98d848afb42afb4824765133804cbb3",
+ "chksum_sha256": "c17f5eef88e43f08288f480719439acd00330c5196d1c14134fa6194103c1519",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_export_template.yml",
+ "name": "tests/integration/targets/inventory-v3.6/files/test-inventory-options-flatten.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3e3cdf0b433461d0bc0db2cc67e0fd22150ff9922732ac023fb7afc5203216fc",
+ "chksum_sha256": "cd27110ff787e21cddc8beaa564db9b7e29a8ac77ce10c99ed1c166006708802",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_ip_address.yml",
+ "name": "tests/integration/targets/inventory-v3.6/files/test-inventory.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "52a3c82dcf21c3421932677215737f10a3abe07f2a6cb3357b5574695ecc0902",
+ "chksum_sha256": "d1009b6685415e91a9a11dfaa939cacb03fd5f3efd6adff0eb7fa2958ddbdbee",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_cluster_type.yml",
+ "name": "tests/integration/targets/inventory-v3.6/files/test-inventory-jinja2.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c88469e151627ba4f18c90ec7dc96449ebc38e0b10c3715b2527e8ed2317ff87",
+ "chksum_sha256": "45f7aba5d2f5500f7b43dc461a91140c4069910b2ba7c54a126aceb082d0c9d9",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_route_target.yml",
+ "name": "tests/integration/targets/inventory-v3.6/files/test-inventory-plurals-flatten.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a1265c144e813a4fe745934790f9283d30558187bef8d58f705754719ec480dc",
+ "chksum_sha256": "a1240c3ef4e6fca185f723107c089e3079996b23e8d630e25a48623a1b77b706",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_config_template.yml",
+ "name": "tests/integration/targets/inventory-v3.6/files/test-inventory-jinja2.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "632cc2291e1afc3307a450b9ec6b5e75b321facf5c0a3881230edd834cb72d43",
+ "chksum_sha256": "c17f5eef88e43f08288f480719439acd00330c5196d1c14134fa6194103c1519",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_device.yml",
+ "name": "tests/integration/targets/inventory-v3.6/files/test-inventory-options-flatten.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "572c930adbd6f88811ac62e0616ef5228216368b450a9401445bcc18df53bf11",
+ "chksum_sha256": "fa8fa9856a8cb16c7e5fbe45826e20fa319f62a0f2672aa907285d6f81940f4f",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_l2vpn_termination.yml",
+ "name": "tests/integration/targets/inventory-v3.6/files/test-inventory-noracks.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b630f638bb826104bdc423470761fbf67d8495f64f4907deec67d37d7e02a743",
+ "chksum_sha256": "d2f73a8cf0695c20b2d1b47bea80ecca284484c13da2e055e23f09fa9837dbd1",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_tenant_group.yml",
+ "name": "tests/integration/targets/inventory-v3.6/files/test-inventory-options.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1ae1dc4a42a5fb65dcc1272f424a87cbdb6984dfe1787b310583059567fd181d",
+ "chksum_sha256": "e1bb426120346d183fd6496e2d4db53ee3613e16bebd5ab804b322848ac4a318",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_ipam_role.yml",
+ "name": "tests/integration/targets/inventory-v3.6/files/test-inventory-legacy.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "87927259bdf063bc202fa76992dc4ea7fdf3953cb181261271af32c0464fef6b",
+ "chksum_sha256": "83e2fb444512331c6bc9e0677cd7d71450dffcb2625dfa7c759612c254af5c81",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_console_port_template.yml",
+ "name": "tests/integration/targets/inventory-v3.6/files/test-inventory-jinja2-filter.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a0f5b420ba0e259d28ed11d0f9a81f3e6e67744ae3bfdaaf5beb737343a44cef",
+ "chksum_sha256": "07bf21906dd0fdad49a7da12b85dbe91bff6614b5205dc31c1514bdc4f684119",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_wireless_lan.yml",
+ "name": "tests/integration/targets/inventory-v3.6/files/test-inventory-plurals-flatten.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d86fdee3d125962731b3d98a027a30f4d7ba445b0fdc74cd0b0fc3cda5a1d23c",
+ "chksum_sha256": "b10179aa589cbcd92340b4dbfd4c816af473e22c2ca6b2edcd60ee9ade7c3b7a",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_power_outlet_template.yml",
+ "name": "tests/integration/targets/inventory-v3.6/files/test-inventory-noracks.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7076e4492f1e5a9320edee905c9d9b29c0ca0a9222adac0903cd31e424a97edb",
+ "chksum_sha256": "7c504c26d63da38458f77027c0e542aa2bb34eef183829623de02086e79c031c",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_location.yml",
+ "name": "tests/integration/targets/inventory-v3.6/files/test-inventory-bearer-token.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4fde47ce5a7ad1ad91120d23a53db43cf885d640fab3013fcdc4201faf0fccdc",
+ "chksum_sha256": "d1009b6685415e91a9a11dfaa939cacb03fd5f3efd6adff0eb7fa2958ddbdbee",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_contact.yml",
+ "name": "tests/integration/targets/inventory-v3.6/files/test-inventory-bearer-token.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "10800b08efc61984903e8d350ce3ade71ef7888bd67b75875f04ab463545704e",
+ "chksum_sha256": "132a27a2b7b0112317290b60310c7b651631af9769cf07e139bbb01bc7b8657a",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_region.yml",
+ "name": "tests/integration/targets/inventory-v3.6/files/test-inventory-plurals.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "64c202f343c3b22244058176bdbe027d1cbc629d189d8dca03f8e2797758d93c",
+ "chksum_sha256": "3a53d46d9eafdedf2909441f1963e48bb986e07fbeacd96ceb1a337f85b2de65",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_tenant.yml",
+ "name": "tests/integration/targets/inventory-v3.6/.gitignore",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "12293e5057c80b6b491b0c99ec05d3556d2642f5fcb981fa0f215857b5855b3a",
+ "chksum_sha256": "deacc9eea815485333d506e5bd64fb0c4a4a9980a33ff1b443a2deb0ca488209",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_config_context.yml",
+ "name": "tests/integration/targets/inventory-v3.6/compare_inventory_json.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e1dbc4f90c9607e997c2b11c1cd58b37be0363a75d2e791034aaeb6302e1e263",
+ "chksum_sha256": "b124d3651bbc95d0f9a91bf941603d005fb84cc6732f092f10f5fdb7e0606cc7",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_virtual_machine.yml",
+ "name": "tests/integration/targets/inventory-v3.6/runme_config.template",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a804425ab778b2dc44ba33cb1637ee3a336b20f543e6ac9160d0c81d57de0099",
+ "chksum_sha256": "de40fd34a465f7da8925f1e9ba52359e10ac740e08f2dfa398691810fcc03bd7",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_module.yml",
+ "name": "tests/integration/targets/inventory-v3.5",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/inventory-v3.5/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "884e2cd1c2a1912fa485b7362c2d50a6ba36035d43c020f644ef0323631aa5ef",
+ "chksum_sha256": "3efbf182d151ffe527a8a250a044a83997eec84d250c8651dcdfe6f62c60a97d",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_asn.yml",
+ "name": "tests/integration/targets/inventory-v3.5/runme.sh",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8ebdd0a1b9d7008523121a7c14be66d1400a9338828c231272c7f74d5726edce",
+ "chksum_sha256": "f2d26a7388590de5e1126420c402289975eded15a914293329f046406a82ed1d",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_virtual_chassis.yml",
+ "name": "tests/integration/targets/inventory-v3.5/files",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/inventory-v3.5/files/test-inventory.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "952d715fc4c1dbca7f8d83128b67a0234a37666b9f553506ac749684dd80ebfb",
+ "chksum_sha256": "19ede9a14420befc0ac1168f21c6d2c5dcaf16bef094d6c20cf766dacce2c1ba",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_inventory_item_role.yml",
+ "name": "tests/integration/targets/inventory-v3.5/files/test-inventory-legacy.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b1bbf7b2aabc7b200fb8a612ee092629492c4a0a8cfd5e859c6cd9aadfa09d40",
+ "chksum_sha256": "71f03a61506d106d1985bf9c237f78b900dcbd1f360558a23e6bf5fac6e77e17",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_front_port_template.yml",
+ "name": "tests/integration/targets/inventory-v3.5/files/test-inventory-jinja2-filter.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "961155a3a4abfda44ec2fef8512e7bf78304aa0c7aa366ce6756a054fe55a668",
+ "chksum_sha256": "65f7c6327d4cbfd7c59d903100681fd668b50e441fb280d3ad38c26d0d03d915",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_device_role.yml",
+ "name": "tests/integration/targets/inventory-v3.5/files/test-inventory-plurals.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fa2e8d09e82a3f099913dc9cf23356a22626895123e59ef9528a183fa5d18dec",
+ "chksum_sha256": "4ba0e5ede35b7822fd1658b50fd9bc80fa259bee9989b0818c8af95b53577999",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_cable.yml",
+ "name": "tests/integration/targets/inventory-v3.5/files/test-inventory-options.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "480243fd666d163294704b2c74b9034115284efdeb6e0b73b308425b454bfa7f",
+ "chksum_sha256": "c17f5eef88e43f08288f480719439acd00330c5196d1c14134fa6194103c1519",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_webhook.yml",
+ "name": "tests/integration/targets/inventory-v3.5/files/test-inventory-options-flatten.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "094744978836b9c9e5859b0a927c6d6088c2c1fa2c69366158a3c53cb44db5b0",
+ "chksum_sha256": "cd27110ff787e21cddc8beaa564db9b7e29a8ac77ce10c99ed1c166006708802",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_l2vpn.yml",
+ "name": "tests/integration/targets/inventory-v3.5/files/test-inventory.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "80bf09f6c370f9575209a81f28e5ed2717c05dd14345e395efc615e4884b1979",
+ "chksum_sha256": "d1009b6685415e91a9a11dfaa939cacb03fd5f3efd6adff0eb7fa2958ddbdbee",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_provider_network.yml",
+ "name": "tests/integration/targets/inventory-v3.5/files/test-inventory-jinja2.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d85ad4ca9dd85d75489fbdb13ef60bc0cead57e5b0161308dcb5625e75c919b4",
+ "chksum_sha256": "45f7aba5d2f5500f7b43dc461a91140c4069910b2ba7c54a126aceb082d0c9d9",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_device_bay_template.yml",
+ "name": "tests/integration/targets/inventory-v3.5/files/test-inventory-plurals-flatten.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b013bde7259d29fb3bc35de81fec4f2a0c24c09d54ada22355a39e406d0a9554",
+ "chksum_sha256": "a1240c3ef4e6fca185f723107c089e3079996b23e8d630e25a48623a1b77b706",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_fhrp_group.yml",
+ "name": "tests/integration/targets/inventory-v3.5/files/test-inventory-jinja2.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c7385675cfb10a26888c09d7822315d06d9aaf3daa73a9ba5b1745190affa1ae",
+ "chksum_sha256": "c17f5eef88e43f08288f480719439acd00330c5196d1c14134fa6194103c1519",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_cluster.yml",
+ "name": "tests/integration/targets/inventory-v3.5/files/test-inventory-options-flatten.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2967ce7f5f94401bfba564fa22901be8a7c81ba4eb848eb018c317538a5cc22e",
+ "chksum_sha256": "fa8fa9856a8cb16c7e5fbe45826e20fa319f62a0f2672aa907285d6f81940f4f",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_rack_group.yml",
+ "name": "tests/integration/targets/inventory-v3.5/files/test-inventory-noracks.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b8e3c495f289093c94d3612213c45a6f89624e4d762fd04e74d3062dab34cf1a",
+ "chksum_sha256": "d2f73a8cf0695c20b2d1b47bea80ecca284484c13da2e055e23f09fa9837dbd1",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_device_interface.yml",
+ "name": "tests/integration/targets/inventory-v3.5/files/test-inventory-options.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b97b575dba1385ba7361f881a9b4b553982f470eac4183e880c2cf71927e572b",
+ "chksum_sha256": "e1bb426120346d183fd6496e2d4db53ee3613e16bebd5ab804b322848ac4a318",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_wireless_lan_group.yml",
+ "name": "tests/integration/targets/inventory-v3.5/files/test-inventory-legacy.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "94a103ba1cd08739e44dd0be15e9ce1d88ba143b0efd88d47e78155409dfae3e",
+ "chksum_sha256": "83e2fb444512331c6bc9e0677cd7d71450dffcb2625dfa7c759612c254af5c81",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_device_interface_template.yml",
+ "name": "tests/integration/targets/inventory-v3.5/files/test-inventory-jinja2-filter.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e5e071bccd038ddbae0dc33ed5b2ea7b9cce00cd28f29d8160f0d308784d30dc",
+ "chksum_sha256": "07bf21906dd0fdad49a7da12b85dbe91bff6614b5205dc31c1514bdc4f684119",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_service_template.yml",
+ "name": "tests/integration/targets/inventory-v3.5/files/test-inventory-plurals-flatten.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2dd7572f2ea6c0a92edce50caa99145ea1065fa4800e71afe9c7bfa5fb16f021",
+ "chksum_sha256": "b10179aa589cbcd92340b4dbfd4c816af473e22c2ca6b2edcd60ee9ade7c3b7a",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_circuit_type.yml",
+ "name": "tests/integration/targets/inventory-v3.5/files/test-inventory-noracks.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0b1e4ff5a8a0341bf0e4e123a523abfb9d194d420b83563536ea837750aa5ec1",
+ "chksum_sha256": "7c504c26d63da38458f77027c0e542aa2bb34eef183829623de02086e79c031c",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_rack.yml",
+ "name": "tests/integration/targets/inventory-v3.5/files/test-inventory-bearer-token.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "acd85ca6fd6944c33a6a58f14c96fa598e6ec6d19cc38e0ef36cacb1ef973660",
+ "chksum_sha256": "d1009b6685415e91a9a11dfaa939cacb03fd5f3efd6adff0eb7fa2958ddbdbee",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_cluster_group.yml",
+ "name": "tests/integration/targets/inventory-v3.5/files/test-inventory-bearer-token.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "beb8453c7e490b5b8b9412c0d8bc4cd6c234e24368aa1f7c6a3c2a7e651b61c6",
+ "chksum_sha256": "132a27a2b7b0112317290b60310c7b651631af9769cf07e139bbb01bc7b8657a",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_inventory_item.yml",
+ "name": "tests/integration/targets/inventory-v3.5/files/test-inventory-plurals.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "94a9328d196fd3e810b0827ea67aac0093b0b1414bf94b865d45181892b42dd6",
+ "chksum_sha256": "3a53d46d9eafdedf2909441f1963e48bb986e07fbeacd96ceb1a337f85b2de65",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_module_bay.yml",
+ "name": "tests/integration/targets/inventory-v3.5/.gitignore",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5b8ad8989f26551b4d5f7015dd202aed037bcc247bc5f04b1c8aa5d713019831",
+ "chksum_sha256": "deacc9eea815485333d506e5bd64fb0c4a4a9980a33ff1b443a2deb0ca488209",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_vlan_group.yml",
+ "name": "tests/integration/targets/inventory-v3.5/compare_inventory_json.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9137743d782b3f5a7c93da6008c5c1251ef3d48f18926d4a6d1e2888fd3ea383",
+ "chksum_sha256": "b124d3651bbc95d0f9a91bf941603d005fb84cc6732f092f10f5fdb7e0606cc7",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_circuit.yml",
+ "name": "tests/integration/targets/inventory-v3.5/runme_config.template",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "afb572e4f30a6abcbb2cc5a4441643bc8da05919f825de0681c5e886d6100f30",
+ "chksum_sha256": "de40fd34a465f7da8925f1e9ba52359e10ac740e08f2dfa398691810fcc03bd7",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_rear_port_template.yml",
+ "name": "tests/integration/inventory",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d3d62cd318b253c9f41f11d93ce4ccfa348c61bd0c5f281e7b33110ff9ca1d0f",
+ "chksum_sha256": "ba4658da440ac3efc0a81e148618c1c38eff956943329403bc863b8c26ab79fb",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_power_port_template.yml",
+ "name": "tests/test_data.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b3e31a81a5f25b51c1df6cc5d3f9376be8c93b54888bbcf9b957661792b9e526",
+ "chksum_sha256": "b5c34afc0e8ddbb4fc388802015f63ff1628e2f3335f8cce280c0e38933acd60",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_virtual_disk.yml",
+ "name": "tests/unit",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/unit/module_utils",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/unit/module_utils/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1d9a63d51a8c013d78872361871d75dafc46f01305a08c0c9cff37cf3d271729",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "tests/integration/targets/v3.7/tasks/netbox_device_type.yml",
+ "name": "tests/unit/module_utils/test_netbox_base_class.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6d6794c8ed22e4f2b14d881a98724c3152a4f03484307254b831cbf416e594ec",
+ "chksum_sha256": "b0dc56f4039832885f094fd6afe72322d991372b113e600ad2f52d0f60a54e74",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.7",
+ "name": "tests/unit/module_utils/fixtures",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.7/files",
+ "name": "tests/unit/module_utils/fixtures/choices",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.7/files/test-inventory-plurals-flatten.json",
+ "name": "tests/unit/module_utils/fixtures/choices/racks.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "628f7f5d1979ac14d573921f294a16f2e806435c0531b3c4812a4d451245c811",
+ "chksum_sha256": "710913087d5fe7c6250ac7a6bbdb378fd2a98f5425595ed92a452e6789ea62e1",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.7/files/test-inventory.json",
+ "name": "tests/unit/module_utils/fixtures/choices/virtual_machines.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4fa1b3d0e6aae52974d9c5595bced609448e3c71d4183332fca6f9eaa5541162",
+ "chksum_sha256": "8a019714226dd0208003a775d05c48aed8195b0f09a23ad4c5e976a31f2f4b91",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.7/files/test-inventory-options.json",
+ "name": "tests/unit/module_utils/fixtures/choices/interfaces.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "abe32d82135b4cb211e594c2a789bea69427808de1a261f016bd68d82e31f6ad",
+ "chksum_sha256": "69edd276a4d6ff23a24aa290de80ed861b94cba637207a9edb481d7b4dfa9a59",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.7/files/test-inventory-plurals-flatten.yml",
+ "name": "tests/unit/module_utils/fixtures/choices/circuits.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5e9c03704abbec6258adf89461e897f3ea277970a5f172f651d879fb1da921ed",
+ "chksum_sha256": "eef93b5c65458cac7b33b1e1365518d381d3e12bcc73b7afbdd163a42350a521",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.7/files/test-inventory-legacy.json",
+ "name": "tests/unit/module_utils/fixtures/choices/services.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e60953c71b6fd825dc552ee6306a82a395f653677b94d6b75a22610003cd33e6",
+ "chksum_sha256": "1eee733c9069414f6294f64ecf0bd65c0afb42053c0ee8e1d9bf0ac48ca9e59c",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.7/files/test-inventory-plurals.json",
+ "name": "tests/unit/module_utils/fixtures/choices/devices.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "af1bb368a12123645f179057976ab6ad11049c655c4f96f18185f1c2d4c53705",
+ "chksum_sha256": "d28c8ab2e5a53cf617e96c37caa10d4881e19f2756e59ce036610d3ac13935af",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.7/files/test-inventory-options.yml",
+ "name": "tests/unit/module_utils/fixtures/choices/sites.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3f3f9031d9c1d74774e7580fe9676c7f218de99fb8a1fe5e917db51f3bf3f4af",
+ "chksum_sha256": "31d58287f5b07e8aa0c0e1c1ca12b907f88d8ab987b98b5ef09beef01a49a090",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.7/files/test-inventory-bearer-token.json",
+ "name": "tests/unit/module_utils/fixtures/choices/device_types.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8c0f26aca1c87eafa7524f177a6ab8dd1c3616a5bc1889e8de0a2ed036521d74",
+ "chksum_sha256": "065d95e75aa2b700f265e33133b94b5c9f93bd30d9be9a0fdcfe4a17671421ee",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.7/files/test-inventory-jinja2-filter.json",
+ "name": "tests/unit/module_utils/fixtures/choices/vlans.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "13df164774b2a43a0b6cca377c3e17869a0b1c5394dcc18a3c67cf271fc3c618",
+ "chksum_sha256": "c4b951095ee4c7443616cda2f26ed9e778226bf4b8e193e0f67cc5b0b96a6636",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.7/files/test-inventory-legacy.yml",
+ "name": "tests/unit/module_utils/fixtures/choices/prefixes.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "46884cd5540b0fef87c808434d64cf34d11b4cb2a837083c024a4280a444e457",
+ "chksum_sha256": "3a7f82c633273e126c2455161a1c832460b368d8e40677a0cb290ec9910e2bcf",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.7/files/test-inventory-options-flatten.json",
+ "name": "tests/unit/module_utils/test_data",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/unit/module_utils/test_data/build_query_params_no_child",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/unit/module_utils/test_data/build_query_params_no_child/data.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "66fef6cc9a2d8d69a7d2eea0b52cfef5d6d0bb7681f11580065bab398f7ee441",
+ "chksum_sha256": "a2808a6706a4ba94c964b0bd5ccb27036fb6f4ae58402fc388c4a8a56229f758",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.7/files/test-inventory-jinja2-filter.yml",
+ "name": "tests/unit/module_utils/test_data/build_query_params_user_query_params",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/unit/module_utils/test_data/build_query_params_user_query_params/data.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d3b673c5a5b01a862fef9114d9a4ba7dcbebcb135efb86f33072ff49f144ddb9",
+ "chksum_sha256": "362739ebc62d83999764392653b6347662f5fadf3f88b5b38a9ce0924db2acea",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.7/files/test-inventory-noracks.yml",
+ "name": "tests/unit/module_utils/test_data/arg_spec_default",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/unit/module_utils/test_data/arg_spec_default/data.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6d0971c39294eb2c759bfaed12e5c266b1606216d6c384b3ed322240e644d3e2",
+ "chksum_sha256": "f48dd7aea115db209d32d4ce0fc6b9836c877a08bf035ee588bbc3f436b4a076",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.7/files/test-inventory-bearer-token.yml",
+ "name": "tests/unit/module_utils/test_data/find_app",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/unit/module_utils/test_data/find_app/data.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "06c2028c7743ed3ac39e2ff50f7db2cdf81096bf8412cc67af25f0041c9aae1b",
+ "chksum_sha256": "f57619e431da1db5f3459f0d390e0f98ed8422dd1dfaea0a322faa6689762e2f",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.7/files/test-inventory-options-flatten.yml",
+ "name": "tests/unit/module_utils/test_data/build_query_params_child",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/unit/module_utils/test_data/build_query_params_child/data.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e74d737990bde83f01d36ba03891d575d347b9d522c8bed244851ec6001338c0",
+ "chksum_sha256": "bb4a47f18fb6fcab7557a036a71612f65e75d22972139f8916012ebcbb8dcce0",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.7/files/test-inventory-jinja2.json",
+ "name": "tests/unit/module_utils/test_data/slug",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/unit/module_utils/test_data/slug/data.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "abe32d82135b4cb211e594c2a789bea69427808de1a261f016bd68d82e31f6ad",
+ "chksum_sha256": "d38464486cef0b272cd7879626d2fbccaba6359c5166c2aed483fcd0c108645c",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.7/files/test-inventory-noracks.json",
+ "name": "tests/unit/module_utils/test_data/normalize_data",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/unit/module_utils/test_data/normalize_data/data.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "108e879a651ae65ef30040c4ab7910160f9ca5e47ef6d5c3d7ce8b48a5dc2d6d",
+ "chksum_sha256": "fb8e60b069fb6b847cc286cf83c8df8ea5c8b19078f614be83983e85616036aa",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.7/files/test-inventory-plurals.yml",
+ "name": "tests/unit/module_utils/test_data/choices_id",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/unit/module_utils/test_data/choices_id/data.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "08ff16818cf6b69f12219eec23a73b8dd4ed7c10b59869628df74daa8d3c414f",
+ "chksum_sha256": "0ef637ce49cb7fbf3fb67f39641b2db2ddddae41a4a5c772e65cb401fc21d595",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.7/files/test-inventory.yml",
+ "name": "tests/unit/inventory",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/unit/inventory/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ed8145f07aad7f80e1d546e815b9bddca58d678466b7cee8f04cae6cf95a3c89",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.7/files/test-inventory-jinja2.yml",
+ "name": "tests/unit/inventory/test_nb_inventory.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1b9009a12cfbed3386c6a7d857edc56c8fa88b1917fb6fee6334eb4209f7e349",
+ "chksum_sha256": "2598c9a69dac190a21c54203db31729ee16acc0b4f08a62fe6130dea5230c2c8",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.7/compare_inventory_json.py",
+ "name": "tests/unit/inventory/test_data",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/unit/inventory/test_data/refresh_url",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/unit/inventory/test_data/refresh_url/data.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "30c33ec36a02ba2483003189f86f977cc29e460ab660e040f70ec4136770fec1",
+ "chksum_sha256": "26ce71fb70dc9b712c6c61495f8e03308f03a1d38efce476737100cde2664d93",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.7/runme.sh",
+ "name": "tests/unit/inventory/test_data/group_extractors",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/unit/inventory/test_data/group_extractors/data.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f2d26a7388590de5e1126420c402289975eded15a914293329f046406a82ed1d",
+ "chksum_sha256": "a70575200ab06266b0d57856884be9e187cb5fe4737bb93883024dcb66ac8849",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.7/runme_config.template",
+ "name": "tests/unit/inventory/test_data/filter_query_parameters",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/unit/inventory/test_data/filter_query_parameters/data.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "de40fd34a465f7da8925f1e9ba52359e10ac740e08f2dfa398691810fcc03bd7",
+ "chksum_sha256": "3ecebe7994f1acfae3e196b3f8bce30dab18114fa92f662a4e06a57f338870be",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.7/.gitignore",
+ "name": "tests/unit/inventory/test_data/validate_query_parameter",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/unit/inventory/test_data/validate_query_parameter/data.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "deacc9eea815485333d506e5bd64fb0c4a4a9980a33ff1b443a2deb0ca488209",
+ "chksum_sha256": "c38afa75bd45672caa3cae3eef0306f045828f98f47f3ee8f41e2a51296e4f79",
"format": 1
},
{
- "name": "tests/integration/targets/inventory-v3.7/aliases",
+ "name": "tests/unit/inventory/test_data/get_resource_list_chunked",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/unit/inventory/test_data/get_resource_list_chunked/data.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3efbf182d151ffe527a8a250a044a83997eec84d250c8651dcdfe6f62c60a97d",
+ "chksum_sha256": "09bee34133057a8261ff63b1392f689076a161cefca45e489fe9ed0b9846dee4",
"format": 1
},
{
- "name": "tests/integration/targets/regression-v3.5",
+ "name": "tests/unit/inventory/test_data/extract_custom_fields",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/regression-v3.5/tasks",
+ "name": "tests/unit/inventory/test_data/extract_custom_fields/data.json",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "b7b970702d1d2884cf0115bc7fef7a54a0c9b49c01d138e568e27902280afd21",
+ "format": 1
+ },
+ {
+ "name": "tests/unit/inventory/test_data/data.json",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "3894903f8c1c89c1f679f9860e22dd3adb3d3e4c2de310e2b91ddcdf32673411",
+ "format": 1
+ },
+ {
+ "name": "tests/netbox-docker",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/regression-v3.5/tasks/main.yml",
+ "name": "tests/netbox-docker/v3.7",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/netbox-docker/v3.7/docker-compose.override.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "41cdf99d833f0b3525d59fee564411727dc3f89e908eaf1240f064ec051291be",
+ "chksum_sha256": "7fc06b91653afc69ad21714fb8ca94fdc4cc1e54314ab3e7181b697d63138ba8",
"format": 1
},
{
- "name": "tests/integration/inventory",
+ "name": "tests/netbox-docker/v3.6",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/netbox-docker/v3.6/docker-compose.override.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ba4658da440ac3efc0a81e148618c1c38eff956943329403bc863b8c26ab79fb",
+ "chksum_sha256": "f4e3d8d388ffd7513cf75a361a9050ca073b86c426b3a8ca9197861186cc2bbf",
+ "format": 1
+ },
+ {
+ "name": "tests/netbox-docker/v4.0",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/netbox-docker/v4.0/docker-compose.override.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "27b3a14ab0d3014c101ed8fe6ecd6a8b366c166950c2cdee1cc3647a8ac41986",
+ "format": 1
+ },
+ {
+ "name": "tests/netbox-docker/v3.5",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/netbox-docker/v3.5/docker-compose.override.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "4a01949c8618fc7aaa163441798292a3b6add189a351f7ac99731ce42d87d3b5",
"format": 1
}
],
diff --git a/ansible_collections/netbox/netbox/MANIFEST.json b/ansible_collections/netbox/netbox/MANIFEST.json
index 6c3628492..7309d129a 100644
--- a/ansible_collections/netbox/netbox/MANIFEST.json
+++ b/ansible_collections/netbox/netbox/MANIFEST.json
@@ -2,7 +2,7 @@
"collection_info": {
"namespace": "netbox",
"name": "netbox",
- "version": "3.18.0",
+ "version": "3.19.1",
"authors": [
"Mikhail Yohman <mikhail.yohman@gmail.com>",
"Martin R\u00f8dvand <martin@rodvand.net>"
@@ -27,7 +27,7 @@
"name": "FILES.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "80b7bfa1c27a0cf3fd04c12eb8d74317ed4fe2ac8b217a38ec5a83db94fb6f05",
+ "chksum_sha256": "ef58e2c38c73d708b18334b0f4547c80cb4f66d000382ab4237314b30b35383d",
"format": 1
},
"format": 1
diff --git a/ansible_collections/netbox/netbox/README.md b/ansible_collections/netbox/netbox/README.md
index c5869cf23..a86a48264 100644
--- a/ansible_collections/netbox/netbox/README.md
+++ b/ansible_collections/netbox/netbox/README.md
@@ -1,29 +1,165 @@
-![Devel CI Status](https://github.com/netbox-community/ansible_modules/workflows/All%20CI%20related%20tasks/badge.svg?branch=devel)
-[![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/ambv/black)
-![Release](https://img.shields.io/github/v/release/netbox-community/ansible_modules)
-[![Talk to us: Slack](https://img.shields.io/badge/Slack-blue.svg)](https://netdev-community.slack.com/join/shared_invite/zt-mtts8g0n-Sm6Wutn62q_M4OdsaIycrQ#/shared-invite/email)
+# Ansible Modules for NetBox
-# NetBox modules for Ansible using Ansible Collections
+## Description
+The NetBox Ansible project provides an Ansible collection for interacting with NetBox, the leading solution for modeling and documenting modern networks. By combining the traditional disciplines of IP address management (IPAM) and datacenter infrastructure management (DCIM) with powerful APIs and extensions, NetBox provides the ideal "source of truth" to power network automation.
-We have moved this collection to a different namespace and collection name on Ansible Galaxy. The new versions will be at `netbox.netbox`.
-
-To keep the code simple, we only officially support the two latest releases of NetBox and don't guarantee backwards compatibility beyond that. We do try and keep these breaking changes to a minimum, but sometimes changes to NetBox's API cause us to have to make breaking changes.
+This Ansible collection consists of a set of modules to define the intended network state in NetBox, along with plugins to drive automation of the network using data from NetBox.
## Requirements
-- The two latest NetBox releases
-- Python 3.8+
+- You must be running one of the two most recent releases of NetBox
+- A NetBox write-enabled API token when using modules or a read-only token for the `nb_lookup ` and `nb_inventory` plugins.
+- Python 3.10+
- Python modules:
- - `pytz`
- - `pynetbox`
- - `packaging` if using Ansible < 2.10, as it's included in Ansible 2.10+
-- Ansible 2.12+
-- NetBox write-enabled token when using modules or read-only token for `nb_lookup/nb_inventory`
+ - pytz
+ - pynetbox
+- Ansible 2.15+
+
+## Installation
+
+### Python Modules and Ansible
+```
+pip install pytz
+pip install pynetbox
+pip install ansible
+```
+
+### NetBox Ansible Collection
+Before using this collection, you need to install it with the Ansible Galaxy command-line tool:
+
+```
+ansible-galaxy collection install netbox.netbox
+```
+
+You can also include it in a `requirements.yml` file and install it with `ansible-galaxy collection install -r requirements.yml`, using the format:
+```
+collections:
+ - name: netbox.netbox
+```
+
+To upgrade the collection to the latest available version, run the following command:
+```
+ansible-galaxy collection install netbox.netbox --upgrade
+```
+You can also install a specific version of the collection, for example, if you need to downgrade when something is broken in the latest version (please report an issue in this repository). Use the following syntax to install version 3.19.1:
+
+```
+ansible-galaxy collection install netbox.netbox:==3.19.1
+```
+See using [Ansible collections](https://docs.ansible.com/ansible/latest/user_guide/collections_using.html#installing-collections) for more details.
+
+### Other Installation Options
+
+#### Build From Source
+
+Follow these steps to install from source:
+
+1. ``git clone git@github.com:netbox-community/ansible_modules.git``
+2. ``cd ansible_modules``
+3. ``ansible-galaxy collection build .``
+4. ``ansible-galaxy collection install netbox-netbox*.tar.gz``
+
+#### Build From Source (Pull Request)
+
+This is useful to test code within PRs.
+
+1. ``git clone git@github.com:netbox-community/ansible_modules.git``
+2. ``cd ansible_modules``
+3. ``git fetch origin pull/<pr #>/head:<whatever-name-you-want>``
+4. ``git checkout <whatever-name-you-want>``
+5. ``ansible-galaxy collection build .``
+6. ``ansible-galaxy collection install netbox-netbox*.tar.gz``
+
+**_Note:_** This [GitHub link](https://docs.github.com/en/free-pro-team@latest/github/collaborating-with-issues-and-pull-requests/checking-out-pull-requests-locally) provides detailed information on checking out pull requests locally.
+
+## Use Cases
+
+### Use Case 1 - Define Intended Network State in NetBox
+Define the intended state of your network in NetBox, by interacting with the NetBox database to define objects and their associated state in the following ways:
+
+- Make sure objects exit
+- Update objects if they do exist
+- Remove objects if they do not not exist
+
+For example, to make sure a new aggregate network prefix exists:
+```
+tasks:
+ - name: Create aggregate within NetBox with only required information
+ netbox.netbox.netbox_aggregate:
+ netbox_url: http://netbox.local
+ netbox_token: thisIsMyToken
+ data:
+ prefix: 192.168.0.0/16
+ rir: Test RIR
+ state: present
+```
+
+### Use Case 2 - NetBox as a Dynamic Inventory Source for Ansible
+Use the Inventory Plugin to dynamically generate Ansible inventory from device data in NetBox. Use query filters, groups and mappings to tailor the generated inventory to your specific needs.
+
+The following example builds an Ansible inventory that groups devices by `role`, and filters for only devices that have the `network-edge-router` role, have a primary IP address and belong to the `internal` tenant:
+```
+# netbox_inventory.yml file in YAML format
+# Example command line: ansible-inventory -v --list -i netbox_inventory.yml
+
+plugin: netbox.netbox.nb_inventory
+api_endpoint: http://localhost:8000
+validate_certs: True
+config_context: False
+group_by:
+ - device_roles
+query_filters:
+ - role: network-edge-router
+device_query_filters:
+ - has_primary_ip: 'true'
+ - tenant__n: internal
+```
+
+### Use Case 3 - Query and Return Elements from NetBox
+Use the Lookup plugin to query NetBox and return data to drive network automation, such as lists of devices, device configurations, prefixes and IP addresses etc.
+
+The following example returns a list of devices and their manufacturers, using an API filter to only return devices with the `management` role and the NetBox tag of `Dell`:
+```
+tasks:
+ # query a list of devices
+ - name: Obtain list of devices from NetBox
+ debug:
+ msg: >
+ "Device {{ item.value.display_name }} (ID: {{ item.key }}) was
+ manufactured by {{ item.value.device_type.manufacturer.name }}"
+ loop: "{{ query('netbox.netbox.nb_lookup', 'devices',
+ api_endpoint='http://localhost/',
+ api_filter='role=management tag=Dell'),
+ token='<redacted>') }}"
+
+```
+## Testing
+Tested with Ansible Core v2.15+ Ansible Core versions prior to 2.15 are not supported.
+
+## Contributing
+If you would to contribute to the project then you can find out how to get started [here](https://github.com/netbox-community/ansible_modules/blob/devel/CONTRIBUTING.md).
+
+## Support
+There are various options to get support for the collection:
+- Search through previous [GitHub Discussions](https://github.com/netbox-community/ansible_modules/discussions) or start a new one.
+- Raise a [GitHub Issue](https://github.com/netbox-community/ansible_modules/issues)
+- Read the module [documentation](https://netbox-ansible-collection.readthedocs.io/en/latest/)
+- Join the discussion on the dedicated `#ansible` Slack Channel on [netdev-community.slack.com](https://netdev-community.slack.com/join/shared_invite/zt-mtts8g0n-Sm6Wutn62q_M4OdsaIycrQ#/shared-invite/email)
+
+Customers of NetBox Labs and Ansible using the officially certified version of the collection can get support via the usual Ansible channels. Escalation to the NetBox Labs support team will be provided as needed.
+
+## Release Notes
+The collection release notes and changelog can be found [here](https://github.com/netbox-community/ansible_modules/releases).
-## Docs
+## Related Information
+Some extra resources you might find useful for both the Anisble collection and for NetBox itself:
+- [NetBox Zero to Hero](https://netboxlabs.com/zero-to-hero/) - free 12 part course that takes you from an empty NetBox through to a fully deployed branch site, using the Ansible collection extensively along the way.
+- [Network Configuration Assurance with NetBox and Ansible](https://netboxlabs.com/blog/network-configuration-assurance-with-netbox-and-ansible/) - blog post featuring the Inventory plugin being used in a simple network automation use case to compare actual network state Vs intended state as defined in NetBox.
+- Official NetBox [documentation](https://docs.netbox.dev/en/stable/).
-Module documentation exists on [netbox-ansible-collection.readthedocs.io](https://netbox-ansible-collection.readthedocs.io/en/latest/).
+## License Information
+GNU General Public License v3.0 or later.
-## Join the discussion
+See [LICENSE](https://github.com/netbox-community/ansible_modules/blob/devel/LICENSE) for the full text of the license.
-We have a dedicated Slack channel `#ansible` on [netdev-community.slack.com](https://netdev-community.slack.com/join/shared_invite/zt-mtts8g0n-Sm6Wutn62q_M4OdsaIycrQ#/shared-invite/email)
+Link to the license that the collection is published under.
diff --git a/ansible_collections/netbox/netbox/changelogs/changelog.yaml b/ansible_collections/netbox/netbox/changelogs/changelog.yaml
index 1c1567a80..03360ec39 100644
--- a/ansible_collections/netbox/netbox/changelogs/changelog.yaml
+++ b/ansible_collections/netbox/netbox/changelogs/changelog.yaml
@@ -1,4 +1,3 @@
----
ancestor: null
releases:
0.1.0:
@@ -541,342 +540,397 @@ releases:
3.1.2:
changes:
minor_changes:
- - "Release v3.1.2 [#594](https://github.com/netbox-community/ansible_modules/pull/594)"
- - "CI testing & integration tests now leverage ansible-core 2.11 - Fixes #583: Move to Ansible-core for CI tests [#591](https://github.com/netbox-community/ansible_modules/pull/591)"
- - "README: Slack link and tidyup [#584](https://github.com/netbox-community/ansible_modules/pull/584)"
- - "Fixes to CI due to not pinning NetBox & NetBox-Docker version CI among other minor CI corrections - General CI Fix [573](https://github.com/netbox-community/ansible_modules/pull/573)"
- - "Correct Invalid NetBox readthedocs URL in nb_inventory docs [#568](https://github.com/netbox-community/ansible_modules/pull/568)"
- - "Add ansible-core support - Quick fix to support ansible-core 2.11 [#558](https://github.com/netbox-community/ansible_modules/pull/558)"
- - "Update netbox_region documentation - Documentation: netbox_region - Correct examples [#548](https://github.com/netbox-community/ansible_modules/pull/548)"
- - "Added custom certificate support [#534](https://github.com/netbox-community/ansible_modules/pull/534)"
- - "Add private_key option to nb_lookup for secret decryption [#532](https://github.com/netbox-community/ansible_modules/pull/532)"
- release_summary:
- "This release focuses on ansible-core 2.11 compatibility, general CI fixes for NetBox 2.10-2.11, and doc updates.
- \nThis release works with NetBox 3.0, but needs further investigation with regards to idopmentcy (nothing really major) & some CI.
- \nThe next release will idenify and resolve any issues with NetBox 3.0."
+ - Add ansible-core support - Quick fix to support ansible-core 2.11 [#558](https://github.com/netbox-community/ansible_modules/pull/558)
+ - Add private_key option to nb_lookup for secret decryption [#532](https://github.com/netbox-community/ansible_modules/pull/532)
+ - Added custom certificate support [#534](https://github.com/netbox-community/ansible_modules/pull/534)
+ - 'CI testing & integration tests now leverage ansible-core 2.11 - Fixes #583:
+ Move to Ansible-core for CI tests [#591](https://github.com/netbox-community/ansible_modules/pull/591)'
+ - Correct Invalid NetBox readthedocs URL in nb_inventory docs [#568](https://github.com/netbox-community/ansible_modules/pull/568)
+ - Fixes to CI due to not pinning NetBox & NetBox-Docker version CI among other
+ minor CI corrections - General CI Fix [573](https://github.com/netbox-community/ansible_modules/pull/573)
+ - 'README: Slack link and tidyup [#584](https://github.com/netbox-community/ansible_modules/pull/584)'
+ - Release v3.1.2 [#594](https://github.com/netbox-community/ansible_modules/pull/594)
+ - 'Update netbox_region documentation - Documentation: netbox_region - Correct
+ examples [#548](https://github.com/netbox-community/ansible_modules/pull/548)'
+ release_summary: "This release focuses on ansible-core 2.11 compatibility, general
+ CI fixes for NetBox 2.10-2.11, and doc updates. \nThis release works with
+ NetBox 3.0, but needs further investigation with regards to idopmentcy (nothing
+ really major) & some CI. \nThe next release will idenify and resolve any issues
+ with NetBox 3.0."
release_date: '2021-09-20'
- 3.2.0:
+ 3.10.0:
+ changes:
+ bugfixes:
+ - nb_inventory - Raise exception on missing packaging [#900](https://github.com/netbox-community/ansible_modules/pull/900)
+ minor_changes:
+ - Add options for NetBox 3.4 [#905](https://github.com/netbox-community/ansible_modules/pull/905)
+ - netbox_custom_field - Add group_name [#882](https://github.com/netbox-community/ansible_modules/pull/882)
+ - netbox_device_type and netbox_device - Add airflow [#907](https://github.com/netbox-community/ansible_modules/pull/907)
+ - netbox_invventory_item_role - Add module [#885](https://github.com/netbox-community/ansible_modules/pull/885)
+ - netbox_module_type - Add module [#887](https://github.com/netbox-community/ansible_modules/pull/887)
+ - netbox_service_template - Add module [#908](https://github.com/netbox-community/ansible_modules/pull/908)
+ trivial:
+ - Update CI for NetBox 3.4 [#906](https://github.com/netbox-community/ansible_modules/pull/906)
+ - Update documentation to use FQCN [#881](https://github.com/netbox-community/ansible_modules/pull/881)
+ modules:
+ - description: Create, update or delete inventory item roles in NetBox
+ name: netbox_inventory_item_role
+ namespace: ''
+ - description: Create, update or delete module types in NetBox
+ name: netbox_module_type
+ namespace: ''
+ - description: Create, update or delete service templates in NetBox
+ name: netbox_service_template
+ namespace: ''
+ release_date: '2022-12-16'
+ 3.11.0:
changes:
+ bugfixes:
+ - nb_lookup - Fix pynetbox 7.0.1 compatibility [#934](https://github.com/netbox-community/ansible_modules/pull/934)
+ - netbox_cable - Fix NetBox 3.3 compatibility [#938](https://github.com/netbox-community/ansible_modules/pull/938)
+ - netbox_webhook - Fix conditions bug [#926](https://github.com/netbox-community/ansible_modules/pull/926)
minor_changes:
- - netbox_device_interface and netbox_vm_interface - Add parent interface to modules [#604](https://github.com/netbox-community/ansible_modules/pull/604)
- - netbox_virtual_machine - Change vCPU to float from int (to reflect NetBox 3.0) [#605](https://github.com/netbox-community/ansible_modules/pull/605)
- - Add location and power panel as lookup keys to nb_lookup [#599](https://github.com/netbox-community/ansible_modules/pull/599)
- - Add connected-devices to nb_lookup [#540](https://github.com/netbox-community/ansible_modules/pull/540)
+ - nb_inventory - Add serial and asset tag to extracted attributes [#826](https://github.com/netbox-community/ansible_modules/pull/826)
+ release_date: '2023-02-12'
+ 3.12.0:
+ changes:
bugfixes:
- - Make attached_ips subscriptable. [#609](https://github.com/netbox-community/ansible_modules/pull/609)
- - Copy interfaces before processing [#556](https://github.com/netbox-community/ansible_modules/pull/556)
+ - Fix partial updates of custom_fields [#944](https://github.com/netbox-community/ansible_modules/pull/944)
+ - nb_inventory - Fix nb_inventory group_by by site_group [#952](https://github.com/netbox-community/ansible_modules/pull/952)
+ - netbox_location - Add multiple filter options to make sure we find the unique
+ location [#963](https://github.com/netbox-community/ansible_modules/pull/963)
+ minor_changes:
+ - netbox_aggregate - Add tenant as parameter to module [#968](https://github.com/netbox-community/ansible_modules/pull/968)
+ - netbox_asn - Add module [#947](https://github.com/netbox-community/ansible_modules/pull/947)
+ - netbox_fhrp_group - Add module [#957](https://github.com/netbox-community/ansible_modules/pull/957)
+ - netbox_journal_entry - Add module [#961](https://github.com/netbox-community/ansible_modules/pull/961)
+ modules:
+ - description: Create, update or delete ASN in NetBox
+ name: netbox_asn
+ namespace: ''
+ - description: Create, update or delete FHRP groups in NetBox
+ name: netbox_fhrp_group
+ namespace: ''
+ - description: Create journal entries in NetBox
+ name: netbox_journal_entry
+ namespace: ''
+ release_date: '2023-04-04'
+ 3.13.0:
+ changes:
+ bugfixes:
+ - netbox_cable - Fix idempotency [#990](https://github.com/netbox-community/ansible_modules/pull/990)
+ minor_changes:
+ - nb_inventory - Enable NetBox 3.5 support [#999](https://github.com/netbox-community/ansible_modules/pull/999)
+ - netbox_fhrp_group_assignment - Add module [#974](https://github.com/netbox-community/ansible_modules/pull/974)
+ - netbox_lsvpn_termination - Add module [#994](https://github.com/netbox-community/ansible_modules/pull/994)
+ modules:
+ - description: Creates, updates or removes FHRP group assignments from NetBox
+ name: netbox_fhrp_group_assignment
+ namespace: ''
+ - description: Creates, updates or removes L2VPNs terminations from NetBox
+ name: netbox_lsvpn_termination
+ namespace: ''
+ release_date: '2023-05-11'
+ 3.14.0:
+ changes:
+ bugfixes:
+ - Fix schema caching [#1053](https://github.com/netbox-community/ansible_modules/pull/1053)
+ minor_changes:
+ - API - Add possibility to use Bearer token [#1023](https://github.com/netbox-community/ansible_modules/pull/1023)
+ - custom fields - Add datetime as an custom field option [#1019](https://github.com/netbox-community/ansible_modules/pull/1019)
+ - netbox_cable - Add tenant [#1027](https://github.com/netbox-community/ansible_modules/pull/1027)
+ - netbox_circuit_type, netbox_device_interface - Add missing options [#1025](https://github.com/netbox-community/ansible_modules/pull/1025)
+ - netbox_custom_field - Add hidden-ifunset option [#1048](https://github.com/netbox-community/ansible_modules/pull/1048)
+ - netbox_inventory_item - Add role to module [#1050](https://github.com/netbox-community/ansible_modules/pull/1050)
+ - netbox_power_port - Add missing power port option [#1049](https://github.com/netbox-community/ansible_modules/pull/1049)
+ release_date: '2023-08-18'
+ 3.15.0:
+ changes:
+ bugfixes:
+ - netbox_ device - Adjust device_role to role for NetBox 3.6 [#1066](https://github.com/netbox-community/ansible_modules/pull/1066)
+ minor_changes:
+ - netbox_config_template - New module [#1090](https://github.com/netbox-community/ansible_modules/pull/1090)
+ - netbox_device - Add oob_ip to device [#1085](https://github.com/netbox-community/ansible_modules/pull/1085)
+ - netbox_device_type - Add default_platform [#1092](https://github.com/netbox-community/ansible_modules/pull/1092)
+ modules:
+ - description: Creates, updates, or removed a config template from NetBox
+ name: netbox_config_template
+ namespace: ''
+ release_date: '2023-10-18'
+ 3.16.0:
+ changes:
+ bugfixes:
+ - Improve error reporting for missing module [#1126](https://github.com/netbox-community/ansible_modules/pull/1126)
+ - nb_inventory - Fix API cache failure [#1111](https://github.com/netbox-community/ansible_modules/pull/1111)
+ - nb_lookup - Allow multiple IDs in nb_lookup [#1042](https://github.com/netbox-community/ansible_modules/pull/1042)
+ minor_changes:
+ - nb_inventory - Add facility group_by option [#1059](https://github.com/netbox-community/ansible_modules/pull/1059)
+ - nb_inventory - Enable ansible-vault strings in config-context data [#1114](https://github.com/netbox-community/ansible_modules/pull/1114)
+ - netbox_platform - Add config_template option to netbox_platform [#1119](https://github.com/netbox-community/ansible_modules/pull/1119)
+ - netbox_power_port_template - Add option module_type to netbox_power_port_template
+ [#1105](https://github.com/netbox-community/ansible_modules/pull/1105)
+ release_date: '2023-12-10'
+ 3.17.0:
+ changes:
+ bugfixes:
+ - netbox_vlan - Fix documentation of vlan_group [#1138](https://github.com/netbox-community/ansible_modules/pull/1138)
+ minor_changes:
+ - CI - CI adjustments [#1154](https://github.com/netbox-community/ansible_modules/pull/1154)
+ [#1155](https://github.com/netbox-community/ansible_modules/pull/1155) [#1157](https://github.com/netbox-community/ansible_modules/pull/1157)
+ - nb_lookup - Add new VPN endpoints for NetBox 3.7 support [#1162](https://github.com/netbox-community/ansible_modules/pull/1162)
+ - netbox_rack_role - Add description option [#1143](https://github.com/netbox-community/ansible_modules/pull/1143)
+ - netbox_virtual_disk - New module [#1153](https://github.com/netbox-community/ansible_modules/pull/1153)
+ - netbox_virtual_machine and netbox_device - Add option config_template [#1171](https://github.com/netbox-community/ansible_modules/pull/1171)
+ modules:
+ - description: Create, updates, or removes a disk from a Virtual Machine
+ name: netbox_virtual_disk
+ namespace: ''
+ release_date: '2024-02-14'
+ 3.18.0:
+ changes:
+ minor_changes:
+ - nb_inventory - Add Virtual Disks to inventory [#1188](https://github.com/netbox-community/ansible_modules/pull/1188)
+ - nb_inventory - Don't extract null values from custom fields [#1184](https://github.com/netbox-community/ansible_modules/pull/1184)
+ - nb_inventory - Improve documentation for oob_ip_as_primary_ip [#1218](https://github.com/netbox-community/ansible_modules/pull/1218)
+ - nb_inventory - Make oob_ip available regardless of oob_ip_as_primary_ip option
+ [#1211](https://github.com/netbox-community/ansible_modules/pull/1211)
+ - nb_lookup - Add custom field choice set [#1186](https://github.com/netbox-community/ansible_modules/pull/1186)
+ - nb_lookup - Add endpoint for Virtual Disks [#1177](https://github.com/netbox-community/ansible_modules/pull/1177)
+ - netbox_device_type and netbox_rack - Change u_height to float [#1200](https://github.com/netbox-community/ansible_modules/pull/1200)
+ - netbox_export_templates - Update documentation [#1214](https://github.com/netbox-community/ansible_modules/pull/1214)
+ - netbox_power_port - Add label [#1202](https://github.com/netbox-community/ansible_modules/pull/1202)
+ modules:
+ - description: Create, updates, or removes Custom Field Choice sets
+ name: netbox_custom_field_choice_set
+ namespace: ''
+ - description: Create, updates, or removes Module Bay
+ name: netbox_module_bay
+ namespace: ''
+ release_date: '2024-05-05'
+ 3.19.0:
+ changes:
+ bugfixes:
+ - Added ALLOWED_QUERY_PARAMS module_bay by device `#1228 <https://github.com/netbox-community/ansible_modules/pull/1228>`_
+ - Added label to power outlet `#1222 <https://github.com/netbox-community/ansible_modules/pull/1222>`_
+ - Added power outlet type iec-60320-c21 to power outlet template and power outlet
+ modules `#1229 <https://github.com/netbox-community/ansible_modules/issues/1229>`_
+ - Extend query param for parent_location `#1233 <https://github.com/netbox-community/ansible_modules/issues/1233>`_
+ minor_changes:
+ - Add cluster host to dynamic inventory response `#1219 <https://github.com/netbox-community/ansible_modules/pull/1219>`_
+ - Add galaxy-importer to CI process `#1245 <https://github.com/netbox-community/ansible_modules/issues/1245>`_
+ - Adjust modules to support NetBox v4.0.0 `#1234 <https://github.com/netbox-community/ansible_modules/pull/1234>`_
+ - Bump jinja2 from 3.1.2 to 3.1.4 `#1226 <https://github.com/netbox-community/ansible_modules/pull/1226>`_
+ - Bump requests from 2.31.0 to 2.32.0 `#1236 <https://github.com/netbox-community/ansible_modules/pull/1236>`_
+ - Drop obsolete Ansible and Python versions and fix tests `#1241 <https://github.com/netbox-community/ansible_modules/issues/1241>`_
+ - Get ansible-lint passing again (sequence after `#1241 <https://github.com/netbox-community/ansible_modules/issues/1241>`_)
+ `#1243 <https://github.com/netbox-community/ansible_modules/issues/1243>`_
+ - Update CI process to follow Ansible Collection Standards `#1247 <https://github.com/netbox-community/ansible_modules/issues/1247>`_
+ - Update CI to use master instead of main. `#1253 <https://github.com/netbox-community/ansible_modules/issues/1253>`_
+ - Update ansible-lint to ignore changelog file for yaml indentation. `#1256
+ <https://github.com/netbox-community/ansible_modules/issues/1256>`_
+ - Update top-level README with new minimum Ansible version (sequence after `#1241
+ <https://github.com/netbox-community/ansible_modules/issues/1241>`_ `#1244
+ <https://github.com/netbox-community/ansible_modules/issues/1244>`_
+ - Updated CI to only run changelog job if PR into devel branch is detected.
+ `#1251 <https://github.com/netbox-community/ansible_modules/issues/1251>`_
+ - Updated CI to support NetBox 4.0 `#1230 <https://github.com/netbox-community/ansible_modules/pull/1230>`_
+ - Updates to top-level README.md to align collection with Ansible best practices
+ `#1238 <https://github.com/netbox-community/ansible_modules/issues/1238>`_
+ fragments:
+ - fix_ci.yml
+ - housekeeping.yml
+ - issue_1249.yml
+ - issue_1253.yml
+ - issue_1256.yml
+ release_date: '2024-05-28'
+ 3.19.1:
+ changes:
+ minor_changes:
+ - Bump version 3.19.1
+ fragments:
+ - bump.yml
+ release_date: '2024-05-29'
+ 3.2.0:
+ changes:
+ bugfixes:
+ - Copy interfaces before processing [#556](https://github.com/netbox-community/ansible_modules/pull/556)
+ - Make attached_ips subscriptable. [#609](https://github.com/netbox-community/ansible_modules/pull/609)
+ minor_changes:
+ - Add connected-devices to nb_lookup [#540](https://github.com/netbox-community/ansible_modules/pull/540)
+ - Add location and power panel as lookup keys to nb_lookup [#599](https://github.com/netbox-community/ansible_modules/pull/599)
+ - netbox_device_interface and netbox_vm_interface - Add parent interface to
+ modules [#604](https://github.com/netbox-community/ansible_modules/pull/604)
+ - netbox_virtual_machine - Change vCPU to float from int (to reflect NetBox
+ 3.0) [#605](https://github.com/netbox-community/ansible_modules/pull/605)
release_date: '2021-10-18'
3.3.0:
changes:
minor_changes:
- - Improve speed of netbox_cable module on NetBox version 3.0.6 or later [#624](https://github.com/netbox-community/ansible_modules/pull/624)
- - netbox_location - add module for handling Location [#543](https://github.com/netbox-community/ansible_modules/pull/543)
- - netbox_site_group - add module for handling Site Group [#547](https://github.com/netbox-community/ansible_modules/pull/547)
- - netbox_config_context - add module for handling Config Context [#610](https://github.com/netbox-community/ansible_modules/pull/610)
+ - Improve speed of netbox_cable module on NetBox version 3.0.6 or later [#624](https://github.com/netbox-community/ansible_modules/pull/624)
+ - netbox_config_context - add module for handling Config Context [#610](https://github.com/netbox-community/ansible_modules/pull/610)
+ - netbox_location - add module for handling Location [#543](https://github.com/netbox-community/ansible_modules/pull/543)
+ - netbox_site_group - add module for handling Site Group [#547](https://github.com/netbox-community/ansible_modules/pull/547)
modules:
+ - description: Create, update or delete Config Context in NetBox
+ name: netbox_config_context
+ namespace: ''
- description: Create, update or delete Location in NetBox
name: netbox_location
namespace: ''
- description: Create, update or delete Site Group in NetBox
name: netbox_site_group
namespace: ''
- - description: Create, update or delete Config Context in NetBox
- name: netbox_config_context
- namespace: ''
release_date: '2021-10-25'
3.4.0:
changes:
- minor_changes:
- - netbox_inventory_item - Add label and custom fields to module [#632](https://github.com/netbox-community/ansible_modules/pull/632)
- - nb_inventory - Cache OpenAPI locally to speed up inventory [#617](https://github.com/netbox-community/ansible_modules/pull/617)
- - netbox_vm_interface - Add custom fields to module [#637](https://github.com/netbox-community/ansible_modules/pull/637)
- - netbox_provider_network - Add module for handling provider networks [#653](https://github.com/netbox-community/ansible_modules/pull/653)
- - nb_lookup - Add missing endpoints to nb_lookup [#655](https://github.com/netbox-community/ansible_modules/pull/655)
- - netbox_virtual_chassis - Add custom_fields to netbox_virtual_chassis [#657](https://github.com/netbox-community/ansible_modules/pull/657)
- - nb_inventory - Add documentation for use of inventory plugin in Tower/AWX [#648](https://github.com/netbox-community/ansible_modules/pull/648)
- - netbox_cable - Improve lookup speed on NetBox versions earlier than 3.0.6 [#645](https://github.com/netbox-community/ansible_modules/pull/645)
bugfixes:
- - netbox_site - Ensure idempotency between NetBox version 2.11 and 3.00 [#631](https://github.com/netbox-community/ansible_modules/pull/631)
- - netbox_virtual_machine - Ensure idempotency between NetBox version 2.11 and 3.00 [#633](https://github.com/netbox-community/ansible_modules/pull/633)
- - nb_lookup - Fix documentation of validate_cert [#629](https://github.com/netbox-community/ansible_modules/pull/629)
- - netbox_virtual_chassis - Fix issue with virtual chassis creation [#657](https://github.com/netbox-community/ansible_modules/pull/657)
+ - nb_lookup - Fix documentation of validate_cert [#629](https://github.com/netbox-community/ansible_modules/pull/629)
+ - netbox_site - Ensure idempotency between NetBox version 2.11 and 3.00 [#631](https://github.com/netbox-community/ansible_modules/pull/631)
+ - netbox_virtual_chassis - Fix issue with virtual chassis creation [#657](https://github.com/netbox-community/ansible_modules/pull/657)
+ - netbox_virtual_machine - Ensure idempotency between NetBox version 2.11 and
+ 3.00 [#633](https://github.com/netbox-community/ansible_modules/pull/633)
+ minor_changes:
+ - nb_inventory - Add documentation for use of inventory plugin in Tower/AWX
+ [#648](https://github.com/netbox-community/ansible_modules/pull/648)
+ - nb_inventory - Cache OpenAPI locally to speed up inventory [#617](https://github.com/netbox-community/ansible_modules/pull/617)
+ - nb_lookup - Add missing endpoints to nb_lookup [#655](https://github.com/netbox-community/ansible_modules/pull/655)
+ - netbox_cable - Improve lookup speed on NetBox versions earlier than 3.0.6
+ [#645](https://github.com/netbox-community/ansible_modules/pull/645)
+ - netbox_inventory_item - Add label and custom fields to module [#632](https://github.com/netbox-community/ansible_modules/pull/632)
+ - netbox_provider_network - Add module for handling provider networks [#653](https://github.com/netbox-community/ansible_modules/pull/653)
+ - netbox_virtual_chassis - Add custom_fields to netbox_virtual_chassis [#657](https://github.com/netbox-community/ansible_modules/pull/657)
+ - netbox_vm_interface - Add custom fields to module [#637](https://github.com/netbox-community/ansible_modules/pull/637)
modules:
- - description: Create, update or delete Provider Network in NetBox
- name: netbox_provider_network
- namespace: ''
+ - description: Create, update or delete Provider Network in NetBox
+ name: netbox_provider_network
+ namespace: ''
release_date: '2021-11-28'
3.5.0:
changes:
- release_summary:
- First release with some NetBox 3.1 modules, and new wireless endpoint support. Remember to update your pynetbox
- to utilise the wireless endpoint support.
- minor_changes:
- - netbox_contact, netbox_contact_group, netbox_contact_role - Add modules [#671](https://github.com/netbox-community/ansible_modules/pull/671)
- - netbox_wireless_lan, netbox_wireless_lan_group, netbox_wireless_link - Add modules [#678](https://github.com/netbox-community/ansible_modules/pull/678)
- - nb_inventory - Pull extended inventory data for prefixes and site [#646](https://github.com/netbox-community/ansible_modules/pull/646)
- - nb_lookup - Add endpoints for wireless (new in NetBox 3.1) [#673](https://github.com/netbox-community/ansible_modules/pull/673)
- - netbox_inventory_item - Add parent field to module [#682](https://github.com/netbox-community/ansible_modules/pull/682)
- - netbox_circuit_termination - Add mark_connected field to module [#686](https://github.com/netbox-community/ansible_modules/pull/686)
- - netbox_region - Add description, tags, custom_fields to module [#689](https://github.com/netbox-community/ansible_modules/pull/689)
-
bugfixes:
- - Use individual list items when looking for objects [#570](https://github.com/netbox-community/ansible_modules/pull/570)
-
+ - Use individual list items when looking for objects [#570](https://github.com/netbox-community/ansible_modules/pull/570)
+ minor_changes:
+ - nb_inventory - Pull extended inventory data for prefixes and site [#646](https://github.com/netbox-community/ansible_modules/pull/646)
+ - nb_lookup - Add endpoints for wireless (new in NetBox 3.1) [#673](https://github.com/netbox-community/ansible_modules/pull/673)
+ - netbox_circuit_termination - Add mark_connected field to module [#686](https://github.com/netbox-community/ansible_modules/pull/686)
+ - netbox_contact, netbox_contact_group, netbox_contact_role - Add modules [#671](https://github.com/netbox-community/ansible_modules/pull/671)
+ - netbox_inventory_item - Add parent field to module [#682](https://github.com/netbox-community/ansible_modules/pull/682)
+ - netbox_region - Add description, tags, custom_fields to module [#689](https://github.com/netbox-community/ansible_modules/pull/689)
+ - netbox_wireless_lan, netbox_wireless_lan_group, netbox_wireless_link - Add
+ modules [#678](https://github.com/netbox-community/ansible_modules/pull/678)
+ release_summary: First release with some NetBox 3.1 modules, and new wireless
+ endpoint support. Remember to update your pynetbox to utilise the wireless
+ endpoint support.
modules:
- - description: Create, update or delete Contact objects in NetBox
- name: netbox_contact
- namespace: ''
- - description: Create, update or delete Contact Group objects in NetBox
- name: netbox_contact_group
- namespace: ''
- - description: Create, update or delete Contact Role objects in NetBox
- name: netbox_contact_role
- - description: Create, update or delete Wireless LAN objects in NetBox
- name: netbox_wireless_lan
- namespace: ''
- - description: Create, update or delete Wireless LAN Group objects in NetBox
- name: netbox_wireless_lan_group
- namespace: ''
- - description: Create, update or delete Wireless Link objects in NetBox
- name: netbox_wireless_link
- namespace: ''
+ - description: Create, update or delete Contact objects in NetBox
+ name: netbox_contact
+ namespace: ''
+ - description: Create, update or delete Contact Group objects in NetBox
+ name: netbox_contact_group
+ namespace: ''
+ - description: Create, update or delete Wireless LAN objects in NetBox
+ name: netbox_wireless_lan
+ namespace: ''
+ - description: Create, update or delete Wireless LAN Group objects in NetBox
+ name: netbox_wireless_lan_group
+ namespace: ''
+ - description: Create, update or delete Wireless Link objects in NetBox
+ name: netbox_wireless_link
+ namespace: ''
release_date: '2022-01-11'
3.5.1:
changes:
bugfixes:
- - Fix prefix_count error on older NetBox versions in nb_inventory [#696](https://github.com/netbox-community/ansible_modules/pull/696)
+ - Fix prefix_count error on older NetBox versions in nb_inventory [#696](https://github.com/netbox-community/ansible_modules/pull/696)
release_date: '2022-01-19'
3.6.0:
changes:
- minor_changes:
- - netbox_webhook - Add module [#738](https://github.com/netbox-community/ansible_modules/pull/738)
- - netbox_export_template - Add module [#727](https://github.com/netbox-community/ansible_modules/pull/727)
- - Add tags to modules missing it [#725](https://github.com/netbox-community/ansible_modules/pull/725)
- - Add custom fields to modules missing it [#723](https://github.com/netbox-community/ansible_modules/pull/723)
- - netbox_custom_link - Add module [#722](https://github.com/netbox-community/ansible_modules/pull/722)
- - netbox_custom_field - Add module [#719](https://github.com/netbox-community/ansible_modules/pull/719)
- - netbox_service - Add virtual_machine as an allowed query parameter for ipaddresses [#718](https://github.com/netbox-community/ansible_modules/pull/718)
- - netbox_device_interface, netbox_vm_interface - Add bridge to netbox_device_interface and netbox_vm_interface [#713](https://github.com/netbox-community/ansible_modules/pull/713)
- - nb_inventory - Add a racks option [#701](https://github.com/netbox-community/ansible_modules/pull/701)
- bugfixes:
- - Fix prefixes option in nb_inventory to ensure all prefixes are returned [#742](https://github.com/netbox-community/ansible_modules/pull/742)
- - Ensure proper filtering for VLAN group [#741](https://github.com/netbox-community/ansible_modules/pull/741)
- - Config Context is now able to be added to cluster [#715](https://github.com/netbox-community/ansible_modules/pull/715)
- - Make sure API calls on versions without the /api/status endpoint [#707](https://github.com/netbox-community/ansible_modules/pull/707)
+ bugfixes:
+ - Config Context is now able to be added to cluster [#715](https://github.com/netbox-community/ansible_modules/pull/715)
+ - Ensure proper filtering for VLAN group [#741](https://github.com/netbox-community/ansible_modules/pull/741)
+ - Fix prefixes option in nb_inventory to ensure all prefixes are returned [#742](https://github.com/netbox-community/ansible_modules/pull/742)
+ - Make sure API calls on versions without the /api/status endpoint [#707](https://github.com/netbox-community/ansible_modules/pull/707)
+ minor_changes:
+ - Add custom fields to modules missing it [#723](https://github.com/netbox-community/ansible_modules/pull/723)
+ - Add tags to modules missing it [#725](https://github.com/netbox-community/ansible_modules/pull/725)
+ - nb_inventory - Add a racks option [#701](https://github.com/netbox-community/ansible_modules/pull/701)
+ - netbox_custom_field - Add module [#719](https://github.com/netbox-community/ansible_modules/pull/719)
+ - netbox_custom_link - Add module [#722](https://github.com/netbox-community/ansible_modules/pull/722)
+ - netbox_device_interface, netbox_vm_interface - Add bridge to netbox_device_interface
+ and netbox_vm_interface [#713](https://github.com/netbox-community/ansible_modules/pull/713)
+ - netbox_export_template - Add module [#727](https://github.com/netbox-community/ansible_modules/pull/727)
+ - netbox_service - Add virtual_machine as an allowed query parameter for ipaddresses
+ [#718](https://github.com/netbox-community/ansible_modules/pull/718)
+ - netbox_webhook - Add module [#738](https://github.com/netbox-community/ansible_modules/pull/738)
modules:
- - description: Create, update or delete Custom fields in NetBox
- name: netbox_custom_field
- namespace: ''
- - description: Create, update or delete Custom links in NetBox
- name: netbox_custom_link
- namespace: ''
- - description: Create, update or delete Export templates in NetBox
- name: netbox_export_template
- namespace: ''
- - description: Create, update or delete Webhooks in NetBox
- name: netbox_webhook
- namespace: ''
+ - description: Create, update or delete Custom fields in NetBox
+ name: netbox_custom_field
+ namespace: ''
+ - description: Create, update or delete Custom links in NetBox
+ name: netbox_custom_link
+ namespace: ''
+ - description: Create, update or delete Export templates in NetBox
+ name: netbox_export_template
+ namespace: ''
+ - description: Create, update or delete Webhooks in NetBox
+ name: netbox_webhook
+ namespace: ''
release_date: '2022-02-27'
3.7.0:
changes:
- minor_changes:
- - Multiple modules - add new parameters added in NetBox 3.2 [#768](https://github.com/netbox-community/ansible_modules/pull/768)
- - netbox_front_port and netbox_rear_port - Add label as parameter [#766](https://github.com/netbox-community/ansible_modules/pull/766)
- - nb_inventory - Add site_group as an option [#755](https://github.com/netbox-community/ansible_modules/pull/755)
- - Add meta information for use in Execution Environments [#753](https://github.com/netbox-community/ansible_modules/pull/753)
bugfixes:
- - netbox_rack - Add location as a query parameter for uniqueness check [#751](https://github.com/netbox-community/ansible_modules/pull/751)
- - netbox_contact_group - Fix field description [#762](https://github.com/netbox-community/ansible_modules/pull/762)
+ - netbox_contact_group - Fix field description [#762](https://github.com/netbox-community/ansible_modules/pull/762)
+ - netbox_rack - Add location as a query parameter for uniqueness check [#751](https://github.com/netbox-community/ansible_modules/pull/751)
+ minor_changes:
+ - Add meta information for use in Execution Environments [#753](https://github.com/netbox-community/ansible_modules/pull/753)
+ - Multiple modules - add new parameters added in NetBox 3.2 [#768](https://github.com/netbox-community/ansible_modules/pull/768)
+ - nb_inventory - Add site_group as an option [#755](https://github.com/netbox-community/ansible_modules/pull/755)
+ - netbox_front_port and netbox_rear_port - Add label as parameter [#766](https://github.com/netbox-community/ansible_modules/pull/766)
trivial:
- - Updated CI to test against NetBox 3.2 [#759](https://github.com/netbox-community/ansible_modules/pull/759)
+ - Updated CI to test against NetBox 3.2 [#759](https://github.com/netbox-community/ansible_modules/pull/759)
release_date: '2022-04-18'
3.7.1:
changes:
bugfixes:
- - nb_inventory - Fix netbox_inventory site_group group_by @ryanmerolle in [#780](https://github.com/netbox-community/ansible_modules/pull/780)
- - nb_inventory - Ensure inventory works on NetBox versions without the site group model [#781](https://github.com/netbox-community/ansible_modules/pull/781)
+ - nb_inventory - Ensure inventory works on NetBox versions without the site
+ group model [#781](https://github.com/netbox-community/ansible_modules/pull/781)
+ - nb_inventory - Fix netbox_inventory site_group group_by @ryanmerolle in [#780](https://github.com/netbox-community/ansible_modules/pull/780)
release_date: '2022-04-26'
3.8.0:
changes:
minor_changes:
- - Add action_group to enable module defaults groups [#800](https://github.com/netbox-community/ansible_modules/pull/800)
- - netbox_device_interface - Add PoE attribute [#820](https://github.com/netbox-community/ansible_modules/pull/820)
- - Expand on query_filter for site [#824](https://github.com/netbox-community/ansible_modules/pull/824)
- - nb_inventory - Allow API token to be templated [#806](https://github.com/netbox-community/ansible_modules/pull/806)
- - netbox_prefix - Add mark_utilized to module [#827](https://github.com/netbox-community/ansible_modules/pull/827)
- - netbox_cable - Change length to float from int [#828](https://github.com/netbox-community/ansible_modules/pull/828)
- - netbox_location - Add tenant to module [#829](https://github.com/netbox-community/ansible_modules/pull/829)
+ - Add action_group to enable module defaults groups [#800](https://github.com/netbox-community/ansible_modules/pull/800)
+ - Expand on query_filter for site [#824](https://github.com/netbox-community/ansible_modules/pull/824)
+ - nb_inventory - Allow API token to be templated [#806](https://github.com/netbox-community/ansible_modules/pull/806)
+ - netbox_cable - Change length to float from int [#828](https://github.com/netbox-community/ansible_modules/pull/828)
+ - netbox_device_interface - Add PoE attribute [#820](https://github.com/netbox-community/ansible_modules/pull/820)
+ - netbox_location - Add tenant to module [#829](https://github.com/netbox-community/ansible_modules/pull/829)
+ - netbox_prefix - Add mark_utilized to module [#827](https://github.com/netbox-community/ansible_modules/pull/827)
release_date: '2022-09-19'
3.8.1:
changes:
bugfixes:
- - Fix idempotency with custom_fields [#839](https://github.com/netbox-community/ansible_modules/pull/839)
+ - Fix idempotency with custom_fields [#839](https://github.com/netbox-community/ansible_modules/pull/839)
minor_changes:
- - nb_inventory - Allow for jinja templating [#834](https://github.com/netbox-community/ansible_modules/pull/834)
+ - nb_inventory - Allow for jinja templating [#834](https://github.com/netbox-community/ansible_modules/pull/834)
trivial:
- - Update CI to include NetBox 3.3 [#841](https://github.com/netbox-community/ansible_modules/pull/841)
- - Update documentation to show exclusion filter [#837](https://github.com/netbox-community/ansible_modules/pull/837)
+ - Update CI to include NetBox 3.3 [#841](https://github.com/netbox-community/ansible_modules/pull/841)
+ - Update documentation to show exclusion filter [#837](https://github.com/netbox-community/ansible_modules/pull/837)
release_date: '2022-10-17'
3.9.0:
changes:
bugfixes:
- - netbox_virtual_machine - Fix idempotency with virtual machine and NetBox 3.0 [#859](https://github.com/netbox-community/ansible_modules/pull/859)
- - nb_inventory - Make sure inventory works with ansible < 2.11 [#861](https://github.com/netbox-community/ansible_modules/pull/861)
- - nb_inventory - Fix site_group in inventory plugin [#872](https://github.com/netbox-community/ansible_modules/pull/872)
+ - nb_inventory - Fix site_group in inventory plugin [#872](https://github.com/netbox-community/ansible_modules/pull/872)
+ - nb_inventory - Make sure inventory works with ansible < 2.11 [#861](https://github.com/netbox-community/ansible_modules/pull/861)
+ - netbox_virtual_machine - Fix idempotency with virtual machine and NetBox 3.0
+ [#859](https://github.com/netbox-community/ansible_modules/pull/859)
minor_changes:
- - netbox_l2vpn - Add module [#846](https://github.com/netbox-community/ansible_modules/pull/846)
- - nb_lookup - Add 3.3 endpoints for lookup [#865](https://github.com/netbox-community/ansible_modules/pull/865)
- - netbox_device_bay - Add label [#868](https://github.com/netbox-community/ansible_modules/pull/868)
- - netbox_console_server and netbox_console_server_port - Add new field [#866](https://github.com/netbox-community/ansible_modules/pull/866)
+ - nb_lookup - Add 3.3 endpoints for lookup [#865](https://github.com/netbox-community/ansible_modules/pull/865)
+ - netbox_console_server and netbox_console_server_port - Add new field [#866](https://github.com/netbox-community/ansible_modules/pull/866)
+ - netbox_device_bay - Add label [#868](https://github.com/netbox-community/ansible_modules/pull/868)
+ - netbox_l2vpn - Add module [#846](https://github.com/netbox-community/ansible_modules/pull/846)
modules:
- - description: Create, update or delete L2VPN objects in NetBox
- name: netbox_l2vpn
- namespace: ''
+ - description: Create, update or delete L2VPN objects in NetBox
+ name: netbox_l2vpn
+ namespace: ''
release_date: '2022-11-14'
- 3.10.0:
- changes:
- bugfixes:
- - nb_inventory - Raise exception on missing packaging [#900](https://github.com/netbox-community/ansible_modules/pull/900)
- minor_changes:
- - netbox_service_template - Add module [#908](https://github.com/netbox-community/ansible_modules/pull/908)
- - netbox_module_type - Add module [#887](https://github.com/netbox-community/ansible_modules/pull/887)
- - netbox_invventory_item_role - Add module [#885](https://github.com/netbox-community/ansible_modules/pull/885)
- - Add options for NetBox 3.4 [#905](https://github.com/netbox-community/ansible_modules/pull/905)
- - netbox_custom_field - Add group_name [#882](https://github.com/netbox-community/ansible_modules/pull/882)
- - netbox_device_type and netbox_device - Add airflow [#907](https://github.com/netbox-community/ansible_modules/pull/907)
- trivial:
- - Update CI for NetBox 3.4 [#906](https://github.com/netbox-community/ansible_modules/pull/906)
- - Update documentation to use FQCN [#881](https://github.com/netbox-community/ansible_modules/pull/881)
- modules:
- - description: Create, update or delete service templates in NetBox
- name: netbox_service_template
- namespace: ''
- - description: Create, update or delete module types in NetBox
- name: netbox_module_type
- namespace: ''
- - description: Create, update or delete inventory item roles in NetBox
- name: netbox_inventory_item_role
- namespace: ''
- release_date: '2022-12-16'
- 3.11.0:
- changes:
- bugfixes:
- - netbox_cable - Fix NetBox 3.3 compatibility [#938](https://github.com/netbox-community/ansible_modules/pull/938)
- - netbox_webhook - Fix conditions bug [#926](https://github.com/netbox-community/ansible_modules/pull/926)
- - nb_lookup - Fix pynetbox 7.0.1 compatibility [#934](https://github.com/netbox-community/ansible_modules/pull/934)
- minor_changes:
- - nb_inventory - Add serial and asset tag to extracted attributes [#826](https://github.com/netbox-community/ansible_modules/pull/826)
- release_date: '2023-02-12'
- 3.12.0:
- changes:
- bugfixes:
- - nb_inventory - Fix nb_inventory group_by by site_group [#952](https://github.com/netbox-community/ansible_modules/pull/952)
- - netbox_location - Add multiple filter options to make sure we find the unique location [#963](https://github.com/netbox-community/ansible_modules/pull/963)
- - Fix partial updates of custom_fields [#944](https://github.com/netbox-community/ansible_modules/pull/944)
- minor_changes:
- - netbox_asn - Add module [#947](https://github.com/netbox-community/ansible_modules/pull/947)
- - netbox_journal_entry - Add module [#961](https://github.com/netbox-community/ansible_modules/pull/961)
- - netbox_fhrp_group - Add module [#957](https://github.com/netbox-community/ansible_modules/pull/957)
- - netbox_aggregate - Add tenant as parameter to module [#968](https://github.com/netbox-community/ansible_modules/pull/968)
- modules:
- - description: Create journal entries in NetBox
- name: netbox_journal_entry
- namespace: ''
- - description: Create, update or delete FHRP groups in NetBox
- name: netbox_fhrp_group
- namespace: ''
- - description: Create, update or delete ASN in NetBox
- name: netbox_asn
- namespace: ''
- release_date: '2023-04-04'
- 3.13.0:
- changes:
- bugfixes:
- - netbox_cable - Fix idempotency [#990](https://github.com/netbox-community/ansible_modules/pull/990)
- minor_changes:
- - netbox_fhrp_group_assignment - Add module [#974](https://github.com/netbox-community/ansible_modules/pull/974)
- - netbox_lsvpn_termination - Add module [#994](https://github.com/netbox-community/ansible_modules/pull/994)
- - nb_inventory - Enable NetBox 3.5 support [#999](https://github.com/netbox-community/ansible_modules/pull/999)
- modules:
- - description: Creates, updates or removes FHRP group assignments from NetBox
- name: netbox_fhrp_group_assignment
- namespace: ''
- - description: Creates, updates or removes L2VPNs terminations from NetBox
- name: netbox_lsvpn_termination
- namespace: ''
- release_date: '2023-05-11'
- 3.14.0:
- changes:
- bugfixes:
- - Fix schema caching [#1053](https://github.com/netbox-community/ansible_modules/pull/1053)
- minor_changes:
- - custom fields - Add datetime as an custom field option [#1019](https://github.com/netbox-community/ansible_modules/pull/1019)
- - API - Add possibility to use Bearer token [#1023](https://github.com/netbox-community/ansible_modules/pull/1023)
- - netbox_circuit_type, netbox_device_interface - Add missing options [#1025](https://github.com/netbox-community/ansible_modules/pull/1025)
- - netbox_cable - Add tenant [#1027](https://github.com/netbox-community/ansible_modules/pull/1027)
- - netbox_custom_field - Add hidden-ifunset option [#1048](https://github.com/netbox-community/ansible_modules/pull/1048)
- - netbox_power_port - Add missing power port option [#1049](https://github.com/netbox-community/ansible_modules/pull/1049)
- - netbox_inventory_item - Add role to module [#1050](https://github.com/netbox-community/ansible_modules/pull/1050)
- release_date: '2023-08-18'
- 3.15.0:
- changes:
- bugfixes:
- - netbox_ device - Adjust device_role to role for NetBox 3.6 [#1066](https://github.com/netbox-community/ansible_modules/pull/1066)
- minor_changes:
- - netbox_device - Add oob_ip to device [#1085](https://github.com/netbox-community/ansible_modules/pull/1085)
- - netbox_device_type - Add default_platform [#1092](https://github.com/netbox-community/ansible_modules/pull/1092)
- - netbox_config_template - New module [#1090](https://github.com/netbox-community/ansible_modules/pull/1090)
- modules:
- - description: Creates, updates, or removed a config template from NetBox
- name: netbox_config_template
- namespace: ''
- release_date: '2023-10-18'
- 3.16.0:
- changes:
- bugfixes:
- - nb_lookup - Allow multiple IDs in nb_lookup [#1042](https://github.com/netbox-community/ansible_modules/pull/1042)
- - Improve error reporting for missing module [#1126](https://github.com/netbox-community/ansible_modules/pull/1126)
- - nb_inventory - Fix API cache failure [#1111](https://github.com/netbox-community/ansible_modules/pull/1111)
- minor_changes:
- - nb_inventory - Add facility group_by option [#1059](https://github.com/netbox-community/ansible_modules/pull/1059)
- - netbox_power_port_template - Add option module_type to netbox_power_port_template [#1105](https://github.com/netbox-community/ansible_modules/pull/1105)
- - nb_inventory - Enable ansible-vault strings in config-context data [#1114](https://github.com/netbox-community/ansible_modules/pull/1114)
- - netbox_platform - Add config_template option to netbox_platform [#1119](https://github.com/netbox-community/ansible_modules/pull/1119)
- release_date: '2023-12-10'
- 3.17.0:
- changes:
- bugfixes:
- - netbox_vlan - Fix documentation of vlan_group [#1138](https://github.com/netbox-community/ansible_modules/pull/1138)
- minor_changes:
- - nb_lookup - Add new VPN endpoints for NetBox 3.7 support [#1162](https://github.com/netbox-community/ansible_modules/pull/1162)
- - netbox_rack_role - Add description option [#1143](https://github.com/netbox-community/ansible_modules/pull/1143)
- - netbox_virtual_disk - New module [#1153](https://github.com/netbox-community/ansible_modules/pull/1153)
- - CI - CI adjustments [#1154](https://github.com/netbox-community/ansible_modules/pull/1154) [#1155](https://github.com/netbox-community/ansible_modules/pull/1155) [#1157](https://github.com/netbox-community/ansible_modules/pull/1157)
- - netbox_virtual_machine and netbox_device - Add option config_template [#1171](https://github.com/netbox-community/ansible_modules/pull/1171)
- modules:
- - description: Create, updates, or removes a disk from a Virtual Machine
- name: netbox_virtual_disk
- namespace: ''
- release_date: '2024-02-14'
- 3.18.0:
- changes:
- minor_changes:
- - nb_lookup - Add endpoint for Virtual Disks [#1177](https://github.com/netbox-community/ansible_modules/pull/1177)
- - nb_lookup - Add custom field choice set [#1186](https://github.com/netbox-community/ansible_modules/pull/1186)
- - nb_inventory - Don't extract null values from custom fields [#1184](https://github.com/netbox-community/ansible_modules/pull/1184)
- - nb_inventory - Add Virtual Disks to inventory [#1188](https://github.com/netbox-community/ansible_modules/pull/1188)
- - netbox_device_type and netbox_rack - Change u_height to float [#1200](https://github.com/netbox-community/ansible_modules/pull/1200)
- - netbox_power_port - Add label [#1202](https://github.com/netbox-community/ansible_modules/pull/1202)
- - nb_inventory - Make oob_ip available regardless of oob_ip_as_primary_ip option [#1211](https://github.com/netbox-community/ansible_modules/pull/1211)
- - netbox_export_templates - Update documentation [#1214](https://github.com/netbox-community/ansible_modules/pull/1214)
- - nb_inventory - Improve documentation for oob_ip_as_primary_ip [#1218](https://github.com/netbox-community/ansible_modules/pull/1218)
- modules:
- - description: Create, updates, or removes Custom Field Choice sets
- name: netbox_custom_field_choice_set
- namespace: ''
- - description: Create, updates, or removes Module Bay
- name: netbox_module_bay
- namespace: ''
- - description: Create, updates, or removes Module
- namespace: ''
- release_date: '2024-05-05'
diff --git a/ansible_collections/netbox/netbox/changelogs/fragments/619-v3.0-ci-fixes.yml b/ansible_collections/netbox/netbox/changelogs/fragments/619-v3.0-ci-fixes.yml
deleted file mode 100644
index 97d10b9e9..000000000
--- a/ansible_collections/netbox/netbox/changelogs/fragments/619-v3.0-ci-fixes.yml
+++ /dev/null
@@ -1,3 +0,0 @@
----
-minor_changes:
- - netbox.netbox - Fixed integration tests with v3.0
diff --git a/ansible_collections/netbox/netbox/changelogs/fragments/allow-jinja2.yml b/ansible_collections/netbox/netbox/changelogs/fragments/allow-jinja2.yml
deleted file mode 100644
index f2699e086..000000000
--- a/ansible_collections/netbox/netbox/changelogs/fragments/allow-jinja2.yml
+++ /dev/null
@@ -1,3 +0,0 @@
----
-minor_changes:
- - nb_inventory - Allow Jinja2 template on filter
diff --git a/ansible_collections/netbox/netbox/changelogs/fragments/issue-558.yml b/ansible_collections/netbox/netbox/changelogs/fragments/issue-558.yml
deleted file mode 100644
index 72bb43242..000000000
--- a/ansible_collections/netbox/netbox/changelogs/fragments/issue-558.yml
+++ /dev/null
@@ -1,3 +0,0 @@
----
-bugfixes:
- - Fix to the argspec for support of ansible-core 2.11 (#558)
diff --git a/ansible_collections/netbox/netbox/changelogs/fragments/issue-951.yml b/ansible_collections/netbox/netbox/changelogs/fragments/issue-951.yml
deleted file mode 100644
index 55b307e43..000000000
--- a/ansible_collections/netbox/netbox/changelogs/fragments/issue-951.yml
+++ /dev/null
@@ -1,3 +0,0 @@
----
-bugfixes:
- - Fix nb_inventory group_by by site_group (#951)
diff --git a/ansible_collections/netbox/netbox/changelogs/fragments/serial_and_asset.yml b/ansible_collections/netbox/netbox/changelogs/fragments/serial_and_asset.yml
deleted file mode 100644
index 46a225807..000000000
--- a/ansible_collections/netbox/netbox/changelogs/fragments/serial_and_asset.yml
+++ /dev/null
@@ -1,3 +0,0 @@
----
-minor_changes:
- - nb_inventory - extract serial and asset_tag from devices
diff --git a/ansible_collections/netbox/netbox/docs/_extensions/pygments_lexer.py b/ansible_collections/netbox/netbox/docs/_extensions/pygments_lexer.py
index 46ce6f81b..a1c0ec8af 100644
--- a/ansible_collections/netbox/netbox/docs/_extensions/pygments_lexer.py
+++ b/ansible_collections/netbox/netbox/docs/_extensions/pygments_lexer.py
@@ -38,8 +38,6 @@ from __future__ import absolute_import, division, print_function
__metaclass__ = type
from pygments.lexer import (
- LexerContext,
- ExtendedRegexLexer,
DelegatingLexer,
RegexLexer,
bygroups,
@@ -48,8 +46,6 @@ from pygments.lexer import (
from pygments.lexers import DiffLexer
from pygments import token
-import re
-
class AnsibleOutputPrimaryLexer(RegexLexer):
name = "Ansible-output-primary"
diff --git a/ansible_collections/netbox/netbox/docs/conf.py b/ansible_collections/netbox/netbox/docs/conf.py
index 87cfe5e7a..47651bd12 100644
--- a/ansible_collections/netbox/netbox/docs/conf.py
+++ b/ansible_collections/netbox/netbox/docs/conf.py
@@ -27,7 +27,7 @@ copyright = "2020, Mikhail Yohman"
author = "Mikhail Yohman <@FragmentedPacket>"
# The full version, including alpha/beta/rc tags
-release = "3.18.0"
+release = "3.19.1"
# -- General configuration ---------------------------------------------------
diff --git a/ansible_collections/netbox/netbox/docs/getting_started/contributing/modules/architecture.rst b/ansible_collections/netbox/netbox/docs/getting_started/contributing/modules/architecture.rst
index cc3f46de9..edeac7a56 100644
--- a/ansible_collections/netbox/netbox/docs/getting_started/contributing/modules/architecture.rst
+++ b/ansible_collections/netbox/netbox/docs/getting_started/contributing/modules/architecture.rst
@@ -28,7 +28,7 @@ Let's take a look at the output of the ``tree`` command within the ``plugins/``
│ ├── netbox_device.py
│ ... omitted
│ └── netbox_vrf.py
-
+
128 directories, 357 files
As you can see, we have a handful of ``module_utils`` that correspond to each application in **NetBox** as well as a ``netbox_utils`` module that provides a common interface for the collection.
@@ -51,21 +51,21 @@ Let's take a look at some of the code within ``netbox_dcim.py``.
# Copyright: (c) 2020, Nokia, Tobias Groß (@toerb) <tobias.gross@nokia.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
-
+
__metaclass__ = type
-
+
from ansible_collections.netbox.netbox.plugins.module_utils.netbox_utils import (
NetboxModule,
ENDPOINT_NAME_MAPPING,
SLUG_REQUIRED,
)
-
+
NB_CABLES = "cables"
NB_CONSOLE_PORTS = "console_ports"
NB_CONSOLE_PORT_TEMPLATES = "console_port_templates"
...
-The top of the code is importing the ``NetboxModule`` class, ``ENDPOINT_NAME_MAPPING``, and ``SLUG_REQUIRED`` from ``netbox_utils.py``.
+The top of the code is importing the ``NetboxModule`` class, ``ENDPOINT_NAME_MAPPING``, and ``SLUG_REQUIRED`` from ``netbox_utils.py``.
After the imports, we define constants to define the endpoints that are supported as well as these being passed into the initialization of ``NetboxModule``. We'll see these within the actual modules themselves when we take a look later.
@@ -76,7 +76,7 @@ Now let's take a look at the class definition.
class NetboxDcimModule(NetboxModule):
def __init__(self, module, endpoint):
super().__init__(module, endpoint)
-
+
def run(self):
...
@@ -94,9 +94,9 @@ The ``run`` method contains all the logic for executing the module and we'll sta
...
# Used to dynamically set key when returning results
endpoint_name = ENDPOINT_NAME_MAPPING[self.endpoint]
-
+
self.result = {"changed": False}
-
+
application = self._find_app(self.endpoint)
nb_app = getattr(self.nb, application)
nb_endpoint = getattr(nb_app, self.endpoint)
@@ -259,14 +259,14 @@ The ``NetboxModule`` is the cornerstone of this collection and contains most of
:params endpoint (str): Used to tell class which endpoint the logic needs to follow
:params nb_client (obj): pynetbox.api object passed in (not required)
"""
-
+
def __init__(self, module, endpoint, nb_client=None):
self.module = module
self.state = self.module.params["state"]
self.check_mode = self.module.check_mode
self.endpoint = endpoint
query_params = self.module.params.get("query_params")
-
+
if not HAS_PYNETBOX:
self.module.fail_json(
msg=missing_required_lib("pynetbox"), exception=PYNETBOX_IMP_ERR
@@ -284,7 +284,7 @@ We set several instance attributes that are used within other methods throughout
url = self.module.params["netbox_url"]
token = self.module.params["netbox_token"]
ssl_verify = self.module.params["validate_certs"]
-
+
# Attempt to initiate connection to NetBox
if nb_client is None:
self.nb = self._connect_netbox_api(url, token, ssl_verify)
diff --git a/ansible_collections/netbox/netbox/docs/getting_started/contributing/modules/new_module.rst b/ansible_collections/netbox/netbox/docs/getting_started/contributing/modules/new_module.rst
index 073984aab..43da00381 100644
--- a/ansible_collections/netbox/netbox/docs/getting_started/contributing/modules/new_module.rst
+++ b/ansible_collections/netbox/netbox/docs/getting_started/contributing/modules/new_module.rst
@@ -32,7 +32,7 @@ Open ``plugins/module_utils/netbox_ipam.py`` file and update the following.
...
NB_ROUTE_TARGETS = "route_targets"
...
-
+
class NetboxIpamModule(NetboxModule):
...
def run(self):
@@ -195,7 +195,7 @@ The next step is to update the ``EXAMPLES`` variable.
loop:
- { name: "65000:65001", description: "management" }
- { name: "65000:65002", description: "tunnel" }
-
+
- name: Update Description on Route Targets
netbox.netbox.netbox_route_target:
netbox_url: http://netbox.local
@@ -209,7 +209,7 @@ The next step is to update the ``EXAMPLES`` variable.
loop:
- { name: "65000:65001", description: "management" }
- { name: "65000:65002", description: "tunnel" }
-
+
- name: Delete Route Targets
netbox.netbox.netbox_route_target:
netbox_url: http://netbox.local
@@ -322,30 +322,30 @@ Here is the output of the a playbook I created using the examples we documented
Skipping callback 'default', as we already have a stdout callback.
Skipping callback 'minimal', as we already have a stdout callback.
Skipping callback 'oneline', as we already have a stdout callback.
-
+
PLAYBOOK: pb.test-rt.yml *********************************************************************************************************************************************************************************************************************************************************************************************
1 plays in pb.test-rt.yml
-
+
PLAY [Test route target creation/deletion] ***************************************************************************************************************************************************************************************************************************************************************************
META: ran handlers
-
+
TASK [Create Route Targets] ******************************************************************************************************************************************************************************************************************************************************************************************
task path: /Users/myohman/cloned-repos/ansible_modules/pb.test-rt.yml:7
changed: [localhost] => (item={'name': '65000:65001', 'description': 'management'}) => {"ansible_loop_var": "item", "changed": true, "item": {"description": "management", "name": "65000:65001"}, "msg": "route_target 65000:65001 updated", "route_target": {"created": "2021-01-13", "custom_fields": {}, "description": "", "id": 1, "last_updated": "2021-01-13T23:06:40.211082Z", "name": "65000:65001", "tags": [4], "tenant": 1, "url": "http://192.168.50.10:8000/api/ipam/route-targets/1/"}}
changed: [localhost] => (item={'name': '65000:65002', 'description': 'tunnel'}) => {"ansible_loop_var": "item", "changed": true, "item": {"description": "tunnel", "name": "65000:65002"}, "msg": "route_target 65000:65002 created", "route_target": {"created": "2021-01-13", "custom_fields": {}, "description": "", "id": 2, "last_updated": "2021-01-13T23:59:29.946943Z", "name": "65000:65002", "tags": [4], "tenant": 1, "url": "http://192.168.50.10:8000/api/ipam/route-targets/2/"}}
-
+
TASK [Update Description on Route Targets] ***************************************************************************************************************************************************************************************************************************************************************************
task path: /Users/myohman/cloned-repos/ansible_modules/pb.test-rt.yml:20
changed: [localhost] => (item={'name': '65000:65001', 'description': 'management'}) => {"ansible_loop_var": "item", "changed": true, "item": {"description": "management", "name": "65000:65001"}, "msg": "route_target 65000:65001 updated", "route_target": {"created": "2021-01-13", "custom_fields": {}, "description": "management", "id": 1, "last_updated": "2021-01-13T23:59:29.146435Z", "name": "65000:65001", "tags": [4], "tenant": 1, "url": "http://192.168.50.10:8000/api/ipam/route-targets/1/"}}
changed: [localhost] => (item={'name': '65000:65002', 'description': 'tunnel'}) => {"ansible_loop_var": "item", "changed": true, "item": {"description": "tunnel", "name": "65000:65002"}, "msg": "route_target 65000:65002 updated", "route_target": {"created": "2021-01-13", "custom_fields": {}, "description": "tunnel", "id": 2, "last_updated": "2021-01-13T23:59:29.946943Z", "name": "65000:65002", "tags": [4], "tenant": 1, "url": "http://192.168.50.10:8000/api/ipam/route-targets/2/"}}
-
+
TASK [Delete Route Targets] ******************************************************************************************************************************************************************************************************************************************************************************************
task path: /Users/myohman/cloned-repos/ansible_modules/pb.test-rt.yml:34
changed: [localhost] => (item=65000:65001) => {"ansible_loop_var": "item", "changed": true, "item": "65000:65001", "msg": "route_target 65000:65001 deleted", "route_target": {"created": "2021-01-13", "custom_fields": {}, "description": "management", "id": 1, "last_updated": "2021-01-13T23:59:30.829004Z", "name": "65000:65001", "tags": [4], "tenant": 1, "url": "http://192.168.50.10:8000/api/ipam/route-targets/1/"}}
changed: [localhost] => (item=65000:65002) => {"ansible_loop_var": "item", "changed": true, "item": "65000:65002", "msg": "route_target 65000:65002 deleted", "route_target": {"created": "2021-01-13", "custom_fields": {}, "description": "tunnel", "id": 2, "last_updated": "2021-01-13T23:59:31.748181Z", "name": "65000:65002", "tags": [4], "tenant": 1, "url": "http://192.168.50.10:8000/api/ipam/route-targets/2/"}}
META: ran handlers
META: ran handlers
-
+
PLAY RECAP ***********************************************************************************************************************************************************************************************************************************************************************************************************
localhost : ok=3 changed=3 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
diff --git a/ansible_collections/netbox/netbox/docs/getting_started/contributing/modules/update_module.rst b/ansible_collections/netbox/netbox/docs/getting_started/contributing/modules/update_module.rst
index 6f6568dd0..8d148ce03 100644
--- a/ansible_collections/netbox/netbox/docs/getting_started/contributing/modules/update_module.rst
+++ b/ansible_collections/netbox/netbox/docs/getting_started/contributing/modules/update_module.rst
@@ -129,7 +129,7 @@ Edit ``tests/integration/netbox-deploy.py``.
{"name": "6000:6000"},
]
created_route_targets = make_netbox_calls(nb.ipam.route_targets, route_targets)
-
+
if ERRORS:
sys.exit(
"Errors have occurred when creating objects, and should have been printed out. Check previous output."
@@ -147,7 +147,7 @@ Next we'll update ``netbox_vrf.yml`` for the **latest** integration target.
│ ├── ...
│ ├── netbox_vm_interface.yml
│ └── netbox_vrf.yml
-
+
12 directories, 143 files
.. code-block:: yaml
@@ -170,7 +170,7 @@ Next we'll update ``netbox_vrf.yml`` for the **latest** integration target.
- "Schnozzberry"
state: present
register: test_four
-
+
- name: "VRF 4: ASSERT - Updated"
assert:
that:
diff --git a/ansible_collections/netbox/netbox/docs/getting_started/how-to-use/advanced.rst b/ansible_collections/netbox/netbox/docs/getting_started/how-to-use/advanced.rst
index a7d91db1c..d1129fc37 100644
--- a/ansible_collections/netbox/netbox/docs/getting_started/how-to-use/advanced.rst
+++ b/ansible_collections/netbox/netbox/docs/getting_started/how-to-use/advanced.rst
@@ -35,9 +35,9 @@ Let's take a look at specifying just the IP address in CIDR notation and see wha
No config file found; using defaults
[WARNING]: No inventory was parsed, only implicit localhost is available
[WARNING]: provided hosts list is empty, only localhost is available. Note that the implicit localhost does not match 'all'
-
+
PLAY [localhost] **********************************************************************************************************************
-
+
TASK [Add ip address to netbox] *******************************************************************************************************
fatal: [localhost]: FAILED! => {"changed": false, "msg": "More than one result returned for nat_inside"}
@@ -117,12 +117,12 @@ Here are the results and they're identical to the when we specified a dictionary
No config file found; using defaults
[WARNING]: No inventory was parsed, only implicit localhost is available
[WARNING]: provided hosts list is empty, only localhost is available. Note that the implicit localhost does not match 'all'
-
+
PLAY [localhost] **********************************************************************************************************************
-
+
TASK [Add ip address to netbox] *******************************************************************************************************
changed: [localhost] => {"changed": true, "ip_address": {"address": "192.168.10.60/24", "assigned_object": null, "assigned_object_id": null, "assigned_object_type": null, "created": "2021-01-01", "custom_fields": {}, "description": "", "dns_name": "", "family": 4, "id": 12, "last_updated": "2021-01-01T17:28:29.770142Z", "nat_inside": 10, "nat_outside": null, "role": null, "status": "active", "tags": [], "tenant": null, "url": "http://192.168.50.10:8000/api/ipam/ip-addresses/12/", "vrf": 1}, "msg": "ip_address 192.168.10.60/24 created"}
-
+
PLAY RECAP ****************************************************************************************************************************
localhost : ok=1 changed=1 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
@@ -162,12 +162,12 @@ Let's go ahead and look at the first way by just passing in the complete object
No config file found; using defaults
[WARNING]: No inventory was parsed, only implicit localhost is available
[WARNING]: provided hosts list is empty, only localhost is available. Note that the implicit localhost does not match 'all'
-
+
PLAY [localhost] **********************************************************************************************************************
-
+
TASK [Add ip address to netbox] *******************************************************************************************************
changed: [localhost] => {"changed": true, "ip_address": {"address": "192.168.10.60/24", "assigned_object": null, "assigned_object_id": null, "assigned_object_type": null, "created": "2021-01-01", "custom_fields": {}, "description": "", "dns_name": "", "family": 4, "id": 12, "last_updated": "2021-01-01T17:28:29.770142Z", "nat_inside": 10, "nat_outside": null, "role": null, "status": "active", "tags": [], "tenant": null, "url": "http://192.168.50.10:8000/api/ipam/ip-addresses/12/", "vrf": 1}, "msg": "ip_address 192.168.10.60/24 created"}
-
+
PLAY RECAP ****************************************************************************************************************************
localhost : ok=1 changed=1 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
@@ -206,12 +206,12 @@ The collection will actually convert the string it receives for the ``id`` and t
No config file found; using defaults
[WARNING]: No inventory was parsed, only implicit localhost is available
[WARNING]: provided hosts list is empty, only localhost is available. Note that the implicit localhost does not match 'all'
-
+
PLAY [localhost] **********************************************************************************************************************
-
+
TASK [Add ip address to netbox] *******************************************************************************************************
changed: [localhost] => {"changed": true, "ip_address": {"address": "192.168.10.60/24", "assigned_object": null, "assigned_object_id": null, "assigned_object_type": null, "created": "2021-01-01", "custom_fields": {}, "description": "", "dns_name": "", "family": 4, "id": 12, "last_updated": "2021-01-01T17:28:29.770142Z", "nat_inside": 10, "nat_outside": null, "role": null, "status": "active", "tags": [], "tenant": null, "url": "http://192.168.50.10:8000/api/ipam/ip-addresses/12/", "vrf": 1}, "msg": "ip_address 192.168.10.60/24 created"}
-
+
PLAY RECAP ****************************************************************************************************************************
localhost : ok=1 changed=1 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
@@ -245,13 +245,13 @@ the global IP address space within NetBox. This task should fail saying there wa
No config file found; using defaults
[WARNING]: No inventory was parsed, only implicit localhost is available
[WARNING]: provided hosts list is empty, only localhost is available. Note that the implicit localhost does not match 'all'
-
+
PLAY [localhost] ****************************************************************************************************************************************************************************************************************
-
+
TASK [Update non-unique IP address] *********************************************************************************************************************************************************************************************
[WARNING]: Skipping plugin (/Users/myohman/Documents/local-dev/ansible/filter_plugins/my_filter_plugin.py) as it seems to be invalid: expected an indented block (my_filter_plugin.py, line 19)
fatal: [localhost]: FAILED! => {"changed": false, "msg": "More than one result returned for 192.168.100.1/24"}
-
+
PLAY RECAP **********************************************************************************************************************************************************************************************************************
localhost : ok=0 changed=0 unreachable=0 failed=1 skipped=0 rescued=0 ignored=0
@@ -287,12 +287,12 @@ IP address to be within **Test VRF 2**.
No config file found; using defaults
[WARNING]: No inventory was parsed, only implicit localhost is available
[WARNING]: provided hosts list is empty, only localhost is available. Note that the implicit localhost does not match 'all'
-
+
PLAY [localhost] **********************************************************************************************************************
-
+
TASK [Update non-unique IP address] ***************************************************************************************************
changed: [localhost] => {"changed": true, "ip_address": {"address": "192.168.100.1/24", "assigned_object": null, "assigned_object_id": null, "assigned_object_type": null, "created": "2021-01-01", "custom_fields": {}, "description": "", "dns_name": "docs.netbox-modules.com", "family": 4, "id": 15, "last_updated": "2021-01-01T19:16:49.756265Z", "nat_inside": null, "nat_outside": null, "role": null, "status": "active", "tags": [], "tenant": null, "url": "http://192.168.50.10:8000/api/ipam/ip-addresses/15/", "vrf": 2}, "msg": "ip_address 192.168.100.1/24 updated"}
-
+
PLAY RECAP ****************************************************************************************************************************
localhost : ok=1 changed=1 unreachable=0 failed=0 skipped=0 rescued=0 ignored=0
diff --git a/ansible_collections/netbox/netbox/docs/getting_started/how-to-use/modules.rst b/ansible_collections/netbox/netbox/docs/getting_started/how-to-use/modules.rst
index 514b9f882..1e3837c5b 100644
--- a/ansible_collections/netbox/netbox/docs/getting_started/how-to-use/modules.rst
+++ b/ansible_collections/netbox/netbox/docs/getting_started/how-to-use/modules.rst
@@ -14,7 +14,7 @@ There are two methods when using a collection in a playbook (in preferred order)
---
- hosts: "localhost"
-
+
tasks:
- name: "Configure a device in NetBox"
netbox.netbox.netbox_device:
@@ -27,7 +27,7 @@ There are two methods when using a collection in a playbook (in preferred order)
- hosts: "localhost"
collections:
- netbox.netbox
-
+
tasks:
- name: "Configure a device in NetBox"
netbox_device:
@@ -52,7 +52,7 @@ Module Arguments & States
This section will provide details on why some module arguments are required for certain states or even change if an object already exists.
-Before we go any further, let's provide some preliminary knowledge of how Ansible works when accepting arguments into a module.
+Before we go any further, let's provide some preliminary knowledge of how Ansible works when accepting arguments into a module.
Ansible provides several builtin methods when initializing the ``AnsibleModule`` to help build flexible module argument requirements. We aren't going to explore all the options in depth, but there are three that we currently use within this collection.
@@ -225,7 +225,7 @@ that specify fields that are unique to each tag. Name can be used, but we always
Using module default groups
+++++++++++++++++++++++++++++
-To avoid having to define the ``netbox_url`` and ``netbox_token`` in each task you can use the module default group feature.
+To avoid having to define the ``netbox_url`` and ``netbox_token`` in each task you can use the module default group feature.
.. code-block:: yaml
@@ -246,7 +246,7 @@ To avoid having to define the ``netbox_url`` and ``netbox_token`` in each task y
state: "present"
- name: "Example state: present - Update"
- netbox.netbox.netbox_device:
+ netbox.netbox.netbox_device:
data:
name: "Test Device"
serial: "FXS110011"
diff --git a/ansible_collections/netbox/netbox/docs/plugins/environment_variables.rst b/ansible_collections/netbox/netbox/docs/plugins/environment_variables.rst
index da663dd63..bbe6a8f3a 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/environment_variables.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/environment_variables.rst
@@ -2,7 +2,7 @@
:orphan:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. _list_of_collection_env_vars:
diff --git a/ansible_collections/netbox/netbox/docs/plugins/index.rst b/ansible_collections/netbox/netbox/docs/plugins/index.rst
index 81f04d1cf..92df1dcd2 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/index.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/index.rst
@@ -1,7 +1,7 @@
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. _plugins_in_netbox.netbox:
@@ -9,7 +9,7 @@
Netbox.Netbox
=============
-Collection version 3.18.0
+Collection version 3.19.1
.. contents::
:local:
@@ -27,7 +27,7 @@ This is a collection of NetBox Ansible modules
**Supported ansible-core versions:**
-* 2.9.10 or newer
+* 2.15.0 or newer
.. ansible-links::
@@ -44,7 +44,6 @@ This is a collection of NetBox Ansible modules
.. toctree::
:maxdepth: 1
-
Plugin Index
------------
diff --git a/ansible_collections/netbox/netbox/docs/plugins/nb_inventory_inventory.rst b/ansible_collections/netbox/netbox/docs/plugins/nb_inventory_inventory.rst
index ea6711489..c4695d224 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/nb_inventory_inventory.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/nb_inventory_inventory.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.nb_inventory inventory -- NetBox inventory source
.. Collection note
.. note::
- This inventory plugin is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This inventory plugin is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -437,6 +437,21 @@ Parameters
.. code-block::
+ [default]
+ fact_caching_prefix = ansible_inventory_
+
+
+ Removed in: version 2.16 of ansible.builtin
+
+
+ Why: Fixes typing error in INI section name
+
+ Alternative: Use the 'defaults' section instead
+
+
+
+ .. code-block::
+
[defaults]
fact_caching_prefix = ansible_inventory_
@@ -1340,7 +1355,7 @@ Parameters
The default value when the host variable's value is an empty string.
- This option is mutually exclusive with \ :ansopt:`netbox.netbox.nb\_inventory#inventory:keyed\_groups[].trailing\_separator`\ .
+ This option is mutually exclusive with \ :literal:`trailing\_separator`\ .
.. raw:: html
@@ -1576,9 +1591,9 @@ Parameters
<div class="ansible-option-indent-desc"></div><div class="ansible-option-cell">
- Set this option to \ :ansval:`False`\ to omit the \ :ansopt:`netbox.netbox.nb\_inventory#inventory:keyed\_groups[].separator`\ after the host variable when the value is an empty string.
+ Set this option to \ :emphasis:`False`\ to omit the \ :literal:`separator`\ after the host variable when the value is an empty string.
- This option is mutually exclusive with \ :ansopt:`netbox.netbox.nb\_inventory#inventory:keyed\_groups[].default\_value`\ .
+ This option is mutually exclusive with \ :literal:`default\_value`\ .
.. rst-class:: ansible-option-line
@@ -2108,7 +2123,7 @@ Parameters
<div class="ansible-option-cell">
- If \ :ansval:`yes`\ make invalid entries a fatal error, otherwise skip and continue.
+ If \ :literal:`yes`\ make invalid entries a fatal error, otherwise skip and continue.
Since it is possible to use facts in the expressions they might not always be available and we ignore those errors by default.
@@ -2487,8 +2502,8 @@ Examples
plugin: netbox.netbox.nb_inventory
api_endpoint: http://localhost:8000
- validate_certs: True
- config_context: False
+ validate_certs: true
+ config_context: false
group_by:
- device_roles
query_filters:
@@ -2560,8 +2575,8 @@ Examples
plugin: netbox.netbox.nb_inventory
api_endpoint: http://localhost:8000
token: <insert token>
- validate_certs: True
- config_context: True
+ validate_certs: true
+ config_context: true
group_by:
- site
- role
diff --git a/ansible_collections/netbox/netbox/docs/plugins/nb_lookup_lookup.rst b/ansible_collections/netbox/netbox/docs/plugins/nb_lookup_lookup.rst
index 426b77580..7ac5ef3e5 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/nb_lookup_lookup.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/nb_lookup_lookup.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.nb_lookup lookup -- Queries and returns elements from NetBox
.. Collection note
.. note::
- This lookup plugin is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This lookup plugin is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -514,10 +514,7 @@ Examples
api_endpoint='http://localhost/',
token='<redacted>') }}"
- # This example uses an API Filter
-
- tasks:
- # query a list of devices
+ # This example uses an API Filter
- name: Obtain list of devices from NetBox
debug:
msg: >
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_aggregate_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_aggregate_module.rst
index fe57cd6f7..417e141c3 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_aggregate_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_aggregate_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_aggregate module -- Creates or removes aggregates from NetB
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -717,7 +717,7 @@ Examples
- name: "Test NetBox aggregate module"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create aggregate within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_asn_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_asn_module.rst
index 8a9337e7a..228644804 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_asn_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_asn_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_asn module -- Create, update or delete ASNs within NetBox
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -627,7 +627,7 @@ Examples
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create ASN within NetBox with only required information
@@ -651,7 +651,6 @@ Examples
-
.. Facts
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_cable_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_cable_module.rst
index 3bbcd412d..07d8cee5c 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_cable_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_cable_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_cable module -- Create, update or delete cables within NetB
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -1092,7 +1092,7 @@ Examples
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create cable within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_circuit_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_circuit_module.rst
index 4c00f4555..af32b52f7 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_circuit_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_circuit_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_circuit module -- Create, update or delete circuits within
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -837,7 +837,7 @@ Examples
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create circuit within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_circuit_termination_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_circuit_termination_module.rst
index dfcd65f8f..898b1a27b 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_circuit_termination_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_circuit_termination_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_circuit_termination module -- Create, update or delete circ
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -814,7 +814,7 @@ Examples
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create circuit termination within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_circuit_type_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_circuit_type_module.rst
index 9ad126b18..10fc1239b 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_circuit_type_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_circuit_type_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_circuit_type module -- Create, update or delete circuit typ
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -641,7 +641,7 @@ Examples
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create type within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_cluster_group_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_cluster_group_module.rst
index 547713794..bad767f49 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_cluster_group_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_cluster_group_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_cluster_group module -- Create, update or delete cluster gr
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -551,7 +551,7 @@ Examples
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create cluster group within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_cluster_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_cluster_module.rst
index f38502ebe..39eb8f3c1 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_cluster_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_cluster_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_cluster module -- Create, update or delete clusters within
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -756,7 +756,7 @@ Examples
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create cluster within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_cluster_type_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_cluster_type_module.rst
index 65bdb5e20..63a98a426 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_cluster_type_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_cluster_type_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_cluster_type module -- Create, update or delete cluster typ
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -593,7 +593,7 @@ Examples
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create cluster type within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_config_context_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_config_context_module.rst
index e1e8705fa..9f4efd94c 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_config_context_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_config_context_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_config_context module -- Creates, updates or deletes config
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -1097,7 +1097,7 @@ Examples
- name: "Test NetBox config_context module"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create config context and apply it to sites euc1-az1, euc1-az2 with the default weight of 1000
netbox.netbox.netbox_config_context:
@@ -1107,7 +1107,7 @@ Examples
name: "dns_nameservers-quadnine"
description: "9.9.9.9"
data: "{ \"dns\": { \"nameservers\": [ \"9.9.9.9\" ] } }"
- sites: [ euc1-az1, euc1-az2 ]
+ sites: [euc1-az1, euc1-az2]
- name: Detach config context from euc1-az1, euc1-az2 and attach to euc1-az3
netbox.netbox.netbox_config_context:
@@ -1116,7 +1116,7 @@ Examples
data:
name: "dns_nameservers-quadnine"
data: "{ \"dns\": { \"nameservers\": [ \"9.9.9.9\" ] } }"
- sites: [ euc1-az3 ]
+ sites: [euc1-az3]
- name: Delete config context
netbox.netbox.netbox_config_context:
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_config_template_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_config_template_module.rst
index 33c652098..24fbfb9d9 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_config_template_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_config_template_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_config_template module -- Creates or removes config templat
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -584,7 +584,7 @@ Examples
- name: "Test config template creation/deletion"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create config template
netbox.netbox.netbox_config_template:
@@ -596,7 +596,7 @@ Examples
- Cloud
template_code: |
#cloud-config
- packages:
+ packages:
- ansible
- name: Delete config template
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_console_port_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_console_port_module.rst
index 75f18bcd8..cda040f35 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_console_port_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_console_port_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_console_port module -- Create, update or delete console por
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -884,7 +884,7 @@ Examples
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create console port within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_console_port_template_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_console_port_template_module.rst
index 08dd9215f..011d1a8a3 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_console_port_template_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_console_port_template_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_console_port_template module -- Create, update or delete co
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -520,7 +520,7 @@ Examples
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create console port template within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_console_server_port_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_console_server_port_module.rst
index eb14d265b..d3d358181 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_console_server_port_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_console_server_port_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_console_server_port module -- Create, update or delete cons
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -839,7 +839,7 @@ Examples
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create console server port within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_console_server_port_template_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_console_server_port_template_module.rst
index a36a72d5b..2990775f6 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_console_server_port_template_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_console_server_port_template_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_console_server_port_template module -- Create, update or de
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -520,7 +520,7 @@ Examples
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create console server port template within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_contact_group_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_contact_group_module.rst
index af898bc34..e3504abbe 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_contact_group_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_contact_group_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_contact_group module -- Creates or removes contact groups f
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -627,7 +627,7 @@ Examples
- name: "Test NetBox contact group module"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create contact group within NetBox with only required information
netbox.netbox.netbox_contact_group:
@@ -649,7 +649,6 @@ Examples
-
.. Facts
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_contact_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_contact_module.rst
index 37d57552f..c80e046b7 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_contact_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_contact_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_contact module -- Creates or removes contacts from NetBox
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -843,7 +843,7 @@ Examples
- name: "Test NetBox module"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create contact within NetBox with only required information
netbox.netbox.netbox_contact:
@@ -866,10 +866,10 @@ Examples
netbox_url: http://netbox.local
netbox_token: thisIsMyToken
data:
- name: contact ABC
+ name: contact ABC
title: Mr Contact
phone: 123456789
- email: contac@contact.com
+ email: contac@contact.com
tags:
- tagA
- tagB
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_contact_role_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_contact_role_module.rst
index e17d765e1..6ac8f61fe 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_contact_role_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_contact_role_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_contact_role module -- Creates or removes contact roles fro
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -587,7 +587,7 @@ Examples
- name: "Test NetBox module"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create contact role within NetBox with only required information
netbox.netbox.netbox_contact_role:
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_custom_field_choice_set_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_custom_field_choice_set_module.rst
index 8b4e168b0..7674415a8 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_custom_field_choice_set_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_custom_field_choice_set_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_custom_field_choice_set module -- Creates, updates or delet
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -604,7 +604,7 @@ Examples
- name: "Test Netbox custom_field_choice_set module"
connection: local
- hosts: localhost
+ hosts: localhost
tasks:
- name: Create a choice set with choices
netbox.netbox.netbox_custom_field_choice_set:
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_custom_field_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_custom_field_module.rst
index cae37424e..0b3d5b74b 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_custom_field_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_custom_field_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_custom_field module -- Creates, updates or deletes custom f
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -544,6 +544,51 @@ Parameters
* - .. raw:: html
<div class="ansible-option-indent"></div><div class="ansible-option-cell">
+ <div class="ansibleOptionAnchor" id="parameter-data/object_types"></div>
+
+ .. raw:: latex
+
+ \hspace{0.02\textwidth}\begin{minipage}[t]{0.3\textwidth}
+
+ .. _ansible_collections.netbox.netbox.netbox_custom_field_module__parameter-data/object_types:
+
+ .. rst-class:: ansible-option-title
+
+ **object_types**
+
+ .. raw:: html
+
+ <a class="ansibleOptionLink" href="#parameter-data/object_types" title="Permalink to this option"></a>
+
+ .. ansible-option-type-line::
+
+ :ansible-option-type:`list` / :ansible-option-elements:`elements=any`
+
+ :ansible-option-versionadded:`added in netbox.netbox 3.19.0`
+
+
+ .. raw:: html
+
+ </div>
+
+ .. raw:: latex
+
+ \end{minipage}
+
+ - .. raw:: html
+
+ <div class="ansible-option-indent-desc"></div><div class="ansible-option-cell">
+
+ The content type(s) to apply this custom field to (NetBox 4.0+)
+
+
+ .. raw:: html
+
+ </div>
+
+ * - .. raw:: html
+
+ <div class="ansible-option-indent"></div><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-data/required"></div>
.. raw:: latex
@@ -1136,7 +1181,7 @@ Examples
- name: "Test NetBox custom_fields module"
connection: local
- hosts: localhost
+ hosts: localhost
tasks:
- name: Create a custom field on device and virtual machine
netbox.netbox.netbox_custom_field:
@@ -1167,7 +1212,7 @@ Examples
netbox_token: thisIsMyToken
data:
name: A Custom Field
- required: yes
+ required: true
- name: Update the custom field to make it read only
netbox.netbox.netbox_custom_field:
@@ -1175,7 +1220,7 @@ Examples
netbox_token: thisIsMyToken
data:
name: A Custom Field
- ui_visibility: read-only
+ ui_visibility: read-only
- name: Delete the custom field
netbox.netbox.netbox_custom_field:
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_custom_link_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_custom_link_module.rst
index d5da98511..18ea45e60 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_custom_link_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_custom_link_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_custom_link module -- Creates, updates or deletes custom li
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -560,6 +560,51 @@ Parameters
* - .. raw:: html
<div class="ansible-option-indent"></div><div class="ansible-option-cell">
+ <div class="ansibleOptionAnchor" id="parameter-data/object_types"></div>
+
+ .. raw:: latex
+
+ \hspace{0.02\textwidth}\begin{minipage}[t]{0.3\textwidth}
+
+ .. _ansible_collections.netbox.netbox.netbox_custom_link_module__parameter-data/object_types:
+
+ .. rst-class:: ansible-option-title
+
+ **object_types**
+
+ .. raw:: html
+
+ <a class="ansibleOptionLink" href="#parameter-data/object_types" title="Permalink to this option"></a>
+
+ .. ansible-option-type-line::
+
+ :ansible-option-type:`list` / :ansible-option-elements:`elements=any`
+
+ :ansible-option-versionadded:`added in netbox.netbox 3.19.0`
+
+
+ .. raw:: html
+
+ </div>
+
+ .. raw:: latex
+
+ \end{minipage}
+
+ - .. raw:: html
+
+ <div class="ansible-option-indent-desc"></div><div class="ansible-option-cell">
+
+ The object type(s) to apply this custom link to (NetBox 4.0+)
+
+
+ .. raw:: html
+
+ </div>
+
+ * - .. raw:: html
+
+ <div class="ansible-option-indent"></div><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-data/weight"></div>
.. raw:: latex
@@ -816,24 +861,24 @@ Examples
- name: "Test NetBox custom_link module"
connection: local
- hosts: localhost
+ hosts: localhost
tasks:
- name: Create a custom link on device
netbox.netbox.netbox_custom_link:
netbox_url: http://netbox.local
netbox_token: thisIsMyToken
data:
- content_type: "dcim.device"
+ content_type: "dcim.device"
name: Custom Link
link_text: "Open Web Management"
- link_url: !unsafe https://{{ obj.name }}.domain.local
+ link_url: !unsafe https://{{ obj.name }}.domain.local
- name: Delete the custom link
netbox.netbox.netbox_custom_link:
netbox_url: http://netbox.local
netbox_token: thisIsMyToken
data:
- content_type: "dcim.device"
+ content_type: "dcim.device"
name: Custom Link
link_text: "Open Web Management"
link_url: !unsafe https://{{ obj.name }}.domain.local
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_device_bay_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_device_bay_module.rst
index 64ca07256..e5b946fc7 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_device_bay_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_device_bay_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_device_bay module -- Create, update or delete device bays w
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -630,7 +630,7 @@ Examples
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create device bay within NetBox with only required information
@@ -664,7 +664,6 @@ Examples
-
.. Facts
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_device_bay_template_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_device_bay_template_module.rst
index cbd59637f..549897ea3 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_device_bay_template_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_device_bay_template_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_device_bay_template module -- Create, update or delete devi
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -459,7 +459,7 @@ Examples
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create device bay template within NetBox with only required information
@@ -483,7 +483,6 @@ Examples
-
.. Facts
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_device_interface_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_device_interface_module.rst
index a1b1126d5..228a3d84d 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_device_interface_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_device_interface_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_device_interface module -- Creates or removes interfaces on
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -1577,7 +1577,7 @@ Examples
- name: "Test NetBox interface module"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create interface within NetBox with only required information
netbox.netbox.netbox_device_interface:
@@ -1656,7 +1656,7 @@ Examples
device: test100
name: GigabitEthernet2/0/1
enabled: false
- update_vc_child: True
+ update_vc_child: true
- name: Mark interface as connected without a cable (netbox >= 2.11 required)
netbox.netbox.netbox_device_interface:
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_device_interface_template_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_device_interface_template_module.rst
index 02f883336..88b661de2 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_device_interface_template_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_device_interface_template_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_device_interface_template module -- Creates or removes inte
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -644,7 +644,7 @@ Examples
- name: "Test NetBox interface template module"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create interface template within NetBox with only required information
netbox.netbox.netbox_device_interface_template:
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_device_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_device_module.rst
index 55fbde240..3e1943f1c 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_device_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_device_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_device module -- Create, update or delete devices within Ne
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -1547,7 +1547,7 @@ Examples
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create device within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_device_role_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_device_role_module.rst
index d2b2c6fae..a40d701ee 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_device_role_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_device_role_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_device_role module -- Create, update or delete devices role
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -685,7 +685,7 @@ Examples
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create device role within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_device_type_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_device_type_module.rst
index 4412fac8d..39b807a68 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_device_type_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_device_type_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_device_type module -- Create, update or delete device types
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -1065,7 +1065,7 @@ Examples
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create device type within NetBox with only required information
@@ -1088,7 +1088,7 @@ Examples
manufacturer: Test Manufacturer
part_number: ws-3750g-v2
u_height: 1.5
- is_full_depth: False
+ is_full_depth: false
subdevice_role: parent
state: present
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_export_template_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_export_template_module.rst
index 82827bb79..bfa549727 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_export_template_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_export_template_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_export_template module -- Creates, updates or deletes expor
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -465,6 +465,51 @@ Parameters
* - .. raw:: html
<div class="ansible-option-indent"></div><div class="ansible-option-cell">
+ <div class="ansibleOptionAnchor" id="parameter-data/object_types"></div>
+
+ .. raw:: latex
+
+ \hspace{0.02\textwidth}\begin{minipage}[t]{0.3\textwidth}
+
+ .. _ansible_collections.netbox.netbox.netbox_export_template_module__parameter-data/object_types:
+
+ .. rst-class:: ansible-option-title
+
+ **object_types**
+
+ .. raw:: html
+
+ <a class="ansibleOptionLink" href="#parameter-data/object_types" title="Permalink to this option"></a>
+
+ .. ansible-option-type-line::
+
+ :ansible-option-type:`list` / :ansible-option-elements:`elements=any`
+
+ :ansible-option-versionadded:`added in netbox.netbox 3.19.0`
+
+
+ .. raw:: html
+
+ </div>
+
+ .. raw:: latex
+
+ \end{minipage}
+
+ - .. raw:: html
+
+ <div class="ansible-option-indent-desc"></div><div class="ansible-option-cell">
+
+ The object type to apply this export template to (NetBox 4.0+)
+
+
+ .. raw:: html
+
+ </div>
+
+ * - .. raw:: html
+
+ <div class="ansible-option-indent"></div><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-data/template_code"></div>
.. raw:: latex
@@ -721,14 +766,14 @@ Examples
- name: "Test NetBox export_templates module"
connection: local
- hosts: localhost
+ hosts: localhost
tasks:
- name: "Ensure export template for /etc/hosts entries exists"
netbox.netbox.netbox_export_template:
netbox_url: http://netbox.local
netbox_token: thisIsMyToken
data:
- content_types: ["dcim.device", "virtualization.virtualmachine"]
+ object_types: ["dcim.device", "virtualization.virtualmachine"]
name: /etc/hosts
description: "Generate entries for /etc/hosts"
as_attachment: true
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_fhrp_group_assignment_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_fhrp_group_assignment_module.rst
index 5abb70084..1837d0e12 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_fhrp_group_assignment_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_fhrp_group_assignment_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_fhrp_group_assignment module -- Create, update or delete FH
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -552,7 +552,8 @@ Examples
.. code-block:: yaml+jinja
- - hosts: localhost
+ - name: "Test NetBox modules"
+ hosts: localhost
connection: local
module_defaults:
group/netbox.netbox.netbox:
@@ -580,7 +581,6 @@ Examples
-
.. Facts
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_fhrp_group_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_fhrp_group_module.rst
index 8afcf6262..b7900ccf3 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_fhrp_group_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_fhrp_group_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_fhrp_group module -- Create, update or delete FHRP groups w
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -694,7 +694,8 @@ Examples
.. code-block:: yaml+jinja
- - hosts: localhost
+ - name: "Test NetBox modules"
+ hosts: localhost
connection: local
module_defaults:
group/netbox.netbox.netbox:
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_front_port_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_front_port_module.rst
index 0333c2ccb..b0f50c52c 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_front_port_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_front_port_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_front_port module -- Create, update or delete front ports w
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -734,7 +734,7 @@ Examples
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create front port within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_front_port_template_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_front_port_template_module.rst
index 3fd593dc6..a953e2709 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_front_port_template_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_front_port_template_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_front_port_template module -- Create, update or delete fron
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -695,7 +695,7 @@ Examples
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create front port template within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_interface_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_interface_module.rst
index 2aa24f73a..673714bf6 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_interface_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_interface_module.rst
@@ -4,7 +4,7 @@
:orphan:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -18,7 +18,7 @@ netbox.netbox.netbox_interface
.. Collection note
.. note::
- This plugin was part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This plugin was part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
This module has been removed
in version 0.1.0 of netbox.netbox.
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_inventory_item_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_inventory_item_module.rst
index 018c4318e..3fc9ec1f1 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_inventory_item_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_inventory_item_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_inventory_item module -- Creates or removes inventory items
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -1129,7 +1129,7 @@ Examples
- name: "Test NetBox inventory_item module"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create inventory item within NetBox with only required information
netbox.netbox.netbox_inventory_item:
@@ -1154,7 +1154,7 @@ Examples
description: "New SFP"
inventory_item_role: NIC
state: present
-
+
- name: Create inventory item with parent
netbox.netbox.netbox_inventory_item:
netbox_url: http://netbox.local
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_inventory_item_role_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_inventory_item_role_module.rst
index 933d52e5f..6c8576e43 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_inventory_item_role_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_inventory_item_role_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_inventory_item_role module -- Create, update or delete devi
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -629,7 +629,7 @@ Examples
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create inventory item role within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_ip_address_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_ip_address_module.rst
index 7820c28ee..3531d8d99 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_ip_address_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_ip_address_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_ip_address module -- Creates or removes IP addresses from N
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -1175,7 +1175,7 @@ Examples
- name: "Test NetBox IP address module"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create IP address within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_ipam_role_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_ipam_role_module.rst
index 72c1a7795..82658f8f5 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_ipam_role_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_ipam_role_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_ipam_role module -- Creates or removes ipam roles from NetB
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -635,7 +635,7 @@ Examples
- name: "Test NetBox module"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create ipam role within NetBox with only required information
netbox.netbox.netbox_ipam_role:
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_journal_entry_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_journal_entry_module.rst
index b93be3f61..a19ce2807 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_journal_entry_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_journal_entry_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_journal_entry module -- Creates a journal entry
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -587,7 +587,7 @@ Parameters
<div class="ansible-option-cell">
- Use \ :literal:`new`\ for adding a journal entry.
+ Use \ :literal:`new`\ for adding a journal entry.
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_l2vpn_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_l2vpn_module.rst
index 51aae8bad..1e15e1181 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_l2vpn_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_l2vpn_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_l2vpn module -- Create, update or delete L2VPNs within NetB
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -798,7 +798,7 @@ Examples
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create L2VPN within NetBox with only required information
@@ -830,8 +830,8 @@ Examples
import_targets:
- "65000:1"
export_targets:
- - "65000:2"
- tenant: Test Tenant
+ - "65000:2"
+ tenant: Test Tenant
description: Just a test
tags:
- Schnozzberry
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_l2vpn_termination_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_l2vpn_termination_module.rst
index 456635103..43636937d 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_l2vpn_termination_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_l2vpn_termination_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_l2vpn_termination module -- Create, update or delete L2VPNs
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -598,7 +598,8 @@ Examples
.. code-block:: yaml+jinja
- - hosts: localhost
+ - name: "Test NetBox modules"
+ hosts: localhost
connection: local
module_defaults:
group/netbox.netbox.netbox:
@@ -624,7 +625,6 @@ Examples
-
.. Facts
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_location_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_location_module.rst
index cc792a21c..f40ec5e9b 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_location_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_location_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_location module -- Create, update or delete locations withi
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -722,7 +722,7 @@ Examples
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create location within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_manufacturer_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_manufacturer_module.rst
index 338b46357..b13e9e412 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_manufacturer_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_manufacturer_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_manufacturer module -- Create or delete manufacturers withi
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -593,7 +593,7 @@ Examples
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create manufacturer within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_module_bay_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_module_bay_module.rst
index 613de7741..b9ed205e4 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_module_bay_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_module_bay_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_module_bay module -- Create, update or delete module bay wi
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -388,7 +388,7 @@ Parameters
.. ansible-option-type-line::
- :ansible-option-type:`string` / :ansible-option-required:`required`
+ :ansible-option-type:`string`
.. raw:: html
@@ -669,7 +669,7 @@ Examples
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create module bay within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_module_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_module_module.rst
index e014a8f2f..5cd29e3be 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_module_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_module_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_module module -- Create, update or delete module within Net
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -808,14 +808,14 @@ Examples
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create module type within NetBox with only required information
netbox.netbox.netbox_module:
netbox_url: http://netbox.local
netbox_token: thisIsMyToken
- data:
+ data:
device: C9300-DEMO
module_bay: Network Module
module_type: C9300-NM-8X
@@ -825,7 +825,7 @@ Examples
netbox.netbox.netbox_module:
netbox_url: http://netbox.local
netbox_token: thisIsMyToken
- data:
+ data:
device:
name: C9300-DEMO
site: EUPARIS
@@ -845,7 +845,7 @@ Examples
device: C9300-DEMO
module_bay: Network Module
module_type: C9300-NM-8X
- asset_tag: 00001
+ asset_tag: "00001"
serial: XXXNNNNXXXX
state: absent
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_module_type_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_module_type_module.rst
index b139455b6..987fd0236 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_module_type_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_module_type_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_module_type module -- Create, update or delete module types
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -724,14 +724,14 @@ Examples
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create module type within NetBox with only required information
netbox.netbox.netbox_module_type:
netbox_url: http://netbox.local
netbox_token: thisIsMyToken
- data:
+ data:
model: ws-test-3750
manufacturer: Test Manufacturer
state: present
@@ -740,10 +740,10 @@ Examples
netbox.netbox.netbox_module_type:
netbox_url: http://netbox.local
netbox_token: thisIsMyToken
- data:
+ data:
model: ws-test-3750
manufacturer: Test Manufacturer
- part_number: ws-3750g-v2
+ part_number: ws-3750g-v2
state: present
- name: Delete module type within netbox
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_platform_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_platform_module.rst
index b7280a3ef..d1871e299 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_platform_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_platform_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_platform module -- Create or delete platforms within NetBox
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -764,7 +764,7 @@ Examples
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create platform within NetBox with only required information
@@ -774,7 +774,7 @@ Examples
data:
name: Test Platform
state: present
-
+
- name: Create platform within NetBox with a config template
netbox.netbox.netbox_platform:
netbox_url: http://netbox.local
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_power_feed_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_power_feed_module.rst
index e75e1191c..69792d7ce 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_power_feed_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_power_feed_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_power_feed module -- Create, update or delete power feeds w
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -1000,7 +1000,7 @@ Examples
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create power feed within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_power_outlet_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_power_outlet_module.rst
index 4069dde6f..20ff3bdb4 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_power_outlet_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_power_outlet_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_power_outlet module -- Create, update or delete power outle
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -295,6 +295,51 @@ Parameters
* - .. raw:: html
<div class="ansible-option-indent"></div><div class="ansible-option-cell">
+ <div class="ansibleOptionAnchor" id="parameter-data/label"></div>
+
+ .. raw:: latex
+
+ \hspace{0.02\textwidth}\begin{minipage}[t]{0.3\textwidth}
+
+ .. _ansible_collections.netbox.netbox.netbox_power_outlet_module__parameter-data/label:
+
+ .. rst-class:: ansible-option-title
+
+ **label**
+
+ .. raw:: html
+
+ <a class="ansibleOptionLink" href="#parameter-data/label" title="Permalink to this option"></a>
+
+ .. ansible-option-type-line::
+
+ :ansible-option-type:`string`
+
+ :ansible-option-versionadded:`added in netbox.netbox 3.19.0`
+
+
+ .. raw:: html
+
+ </div>
+
+ .. raw:: latex
+
+ \end{minipage}
+
+ - .. raw:: html
+
+ <div class="ansible-option-indent-desc"></div><div class="ansible-option-cell">
+
+ The label of the power outlet
+
+
+ .. raw:: html
+
+ </div>
+
+ * - .. raw:: html
+
+ <div class="ansible-option-indent"></div><div class="ansible-option-cell">
<div class="ansibleOptionAnchor" id="parameter-data/name"></div>
.. raw:: latex
@@ -465,6 +510,7 @@ Parameters
- :ansible-option-choices-entry:`"iec-60320-c13"`
- :ansible-option-choices-entry:`"iec-60320-c15"`
- :ansible-option-choices-entry:`"iec-60320-c19"`
+ - :ansible-option-choices-entry:`"iec-60320-c21"`
- :ansible-option-choices-entry:`"iec-60309-p-n-e-4h"`
- :ansible-option-choices-entry:`"iec-60309-p-n-e-6h"`
- :ansible-option-choices-entry:`"iec-60309-p-n-e-9h"`
@@ -738,7 +784,7 @@ Examples
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create power port within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_power_outlet_template_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_power_outlet_template_module.rst
index 8455ed9d8..7a37688ae 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_power_outlet_template_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_power_outlet_template_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_power_outlet_template module -- Create, update or delete po
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -381,6 +381,7 @@ Parameters
- :ansible-option-choices-entry:`"iec-60320-c13"`
- :ansible-option-choices-entry:`"iec-60320-c15"`
- :ansible-option-choices-entry:`"iec-60320-c19"`
+ - :ansible-option-choices-entry:`"iec-60320-c21"`
- :ansible-option-choices-entry:`"iec-60309-p-n-e-4h"`
- :ansible-option-choices-entry:`"iec-60309-p-n-e-6h"`
- :ansible-option-choices-entry:`"iec-60309-p-n-e-9h"`
@@ -654,7 +655,7 @@ Examples
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create power outlet template within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_power_panel_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_power_panel_module.rst
index 34c656caf..b5aae5968 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_power_panel_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_power_panel_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_power_panel module -- Create, update or delete power panels
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -728,7 +728,7 @@ Examples
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create power panel within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_power_port_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_power_port_module.rst
index 52529d411..a6d4fd88c 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_power_port_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_power_port_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_power_port module -- Create, update or delete power ports w
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -820,7 +820,7 @@ Examples
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create power port within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_power_port_template_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_power_port_template_module.rst
index cf841798f..81d469ba2 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_power_port_template_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_power_port_template_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_power_port_template module -- Create, update or delete powe
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -740,7 +740,7 @@ Examples
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create power port within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_prefix_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_prefix_module.rst
index 41790ddba..d300754a1 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_prefix_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_prefix_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_prefix module -- Creates or removes prefixes from NetBox
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -1113,7 +1113,7 @@ Examples
- name: "Test NetBox prefix module"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create prefix within NetBox with only required information
@@ -1163,7 +1163,7 @@ Examples
parent: 10.156.0.0/19
prefix_length: 24
state: present
- first_available: yes
+ first_available: true
- name: Create prefix within NetBox with only required information
netbox.netbox.netbox_prefix:
@@ -1181,7 +1181,7 @@ Examples
parent: 10.156.0.0/19
prefix_length: 24
state: present
- first_available: yes
+ first_available: true
- name: Get a new /24 inside 10.157.0.0/19 within NetBox with additional values
netbox.netbox.netbox_prefix:
@@ -1193,7 +1193,7 @@ Examples
vrf: Test VRF
site: Test Site
state: present
- first_available: yes
+ first_available: true
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_provider_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_provider_module.rst
index ff113b098..cd8ca3de6 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_provider_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_provider_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_provider module -- Create, update or delete providers withi
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -798,7 +798,7 @@ Examples
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create provider within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_provider_network_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_provider_network_module.rst
index 6e8561392..836c2a9c1 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_provider_network_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_provider_network_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_provider_network module -- Create, update or delete provide
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -672,7 +672,7 @@ Examples
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create provider network within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_rack_group_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_rack_group_module.rst
index 32cb29754..03776b7c1 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_rack_group_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_rack_group_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_rack_group module -- Create, update or delete racks groups
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -503,7 +503,7 @@ Examples
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create rack group within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_rack_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_rack_module.rst
index 2af844e18..929273ff1 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_rack_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_rack_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_rack module -- Create, update or delete racks within NetBox
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -1492,7 +1492,7 @@ Examples
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create rack within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_rack_role_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_rack_role_module.rst
index 47113e874..17a4cfb95 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_rack_role_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_rack_role_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_rack_role module -- Create, update or delete racks roles wi
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -638,7 +638,7 @@ Examples
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create rack role within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_rear_port_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_rear_port_module.rst
index 9af01eeff..b894fc221 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_rear_port_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_rear_port_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_rear_port module -- Create, update or delete rear ports wit
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -692,7 +692,7 @@ Examples
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create rear port within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_rear_port_template_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_rear_port_template_module.rst
index c7d50c12f..b73c13eb5 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_rear_port_template_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_rear_port_template_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_rear_port_template module -- Create, update or delete rear
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -653,7 +653,7 @@ Examples
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create rear port template within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_region_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_region_module.rst
index 1f1f5b5b7..58ac03c28 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_region_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_region_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_region module -- Creates or removes regions from NetBox
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -638,7 +638,7 @@ Examples
- name: "Test NetBox region module"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create region within NetBox with only required information
netbox.netbox.netbox_region:
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_rir_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_rir_module.rst
index adfa932bc..8e7616f84 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_rir_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_rir_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_rir module -- Create, update or delete RIRs within NetBox
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -643,7 +643,7 @@ Examples
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create RIR within NetBox with only required information
@@ -660,7 +660,7 @@ Examples
netbox_token: thisIsMyToken
data:
name: Test RIR One
- is_private: True
+ is_private: true
state: present
- name: Delete RIR within netbox
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_route_target_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_route_target_module.rst
index be1d95ae4..8c37378ea 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_route_target_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_route_target_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_route_target module -- Creates or removes route targets fro
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -630,7 +630,7 @@ Examples
- name: "Test route target creation/deletion"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create Route Targets
netbox.netbox.netbox_route_target:
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_service_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_service_module.rst
index 7ebcb52eb..8c74bdbc5 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_service_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_service_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_service module -- Creates or removes service from NetBox
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -839,7 +839,7 @@ Examples
- name: "Create netbox service"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create service
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_service_template_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_service_template_module.rst
index a8eaa5549..5880545e6 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_service_template_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_service_template_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_service_template module -- Create, update or delete service
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -678,7 +678,7 @@ Examples
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create service template within NetBox with only required information
@@ -709,7 +709,7 @@ Examples
netbox_url: http://netbox.local
netbox_token: thisIsMyToken
data:
- name: SSH
+ name: SSH
state: absent
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_site_group_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_site_group_module.rst
index 95b91ae78..7975300f2 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_site_group_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_site_group_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_site_group module -- Create, update, or delete site groups
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -635,7 +635,7 @@ Examples
- name: "Test NetBox site group module"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create site group within NetBox with only required information
netbox.netbox.netbox_site_group:
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_site_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_site_module.rst
index 4b55f9402..5d97e3d2c 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_site_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_site_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_site module -- Creates or removes sites from NetBox
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -1218,7 +1218,7 @@ Examples
- name: "Test NetBox site module"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create site within NetBox with only required information
netbox.netbox.netbox_site:
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_tag_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_tag_module.rst
index 40d60705c..b283e1d28 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_tag_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_tag_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_tag module -- Creates or removes tags from NetBox
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -545,7 +545,7 @@ Examples
- name: "Test tags creation/deletion"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create tags
netbox.netbox.netbox_tag:
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_tenant_group_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_tenant_group_module.rst
index 3ed872aa4..bc29df22b 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_tenant_group_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_tenant_group_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_tenant_group module -- Creates or removes tenant groups fro
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -633,7 +633,7 @@ Examples
- name: "Test NetBox tenant group module"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create tenant within NetBox with only required information
netbox_tenant_group:
@@ -655,7 +655,6 @@ Examples
-
.. Facts
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_tenant_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_tenant_module.rst
index 8a606978d..4f7dbdb58 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_tenant_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_tenant_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_tenant module -- Creates or removes tenants from NetBox
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -669,7 +669,7 @@ Examples
- name: "Test NetBox module"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create tenant within NetBox with only required information
netbox_tenant:
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_virtual_chassis_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_virtual_chassis_module.rst
index ae5726200..77b970898 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_virtual_chassis_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_virtual_chassis_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_virtual_chassis module -- Create, update or delete virtual
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -675,7 +675,7 @@ Examples
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create virtual chassis within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_virtual_disk_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_virtual_disk_module.rst
index c259b5764..325fa866e 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_virtual_disk_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_virtual_disk_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_virtual_disk module -- Creates or removes disks from virtua
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -627,10 +627,10 @@ Examples
- name: "Test NetBox virtual disk module"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create virtual disk
- netbox_virtual_disk:
+ netbox_virtual_disk:
data:
virtual_machine: test100
name: disk0
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_virtual_machine_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_virtual_machine_module.rst
index 8c591f537..4ecbc7016 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_virtual_machine_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_virtual_machine_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_virtual_machine module -- Create, update or delete virtual\
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -1182,7 +1182,7 @@ Examples
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create virtual machine within NetBox with only required information
netbox_virtual_machine:
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_vlan_group_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_vlan_group_module.rst
index cc2c938dc..950b8d6d9 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_vlan_group_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_vlan_group_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_vlan_group module -- Create, update or delete vlans groups
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -833,7 +833,7 @@ Examples
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create vlan group within NetBox with only required information - Pre 2.11
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_vlan_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_vlan_module.rst
index b96da4814..e76f2722b 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_vlan_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_vlan_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_vlan module -- Create, update or delete vlans within NetBox
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -840,7 +840,7 @@ Examples
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create vlan within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_vm_interface_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_vm_interface_module.rst
index af0171388..a12c89916 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_vm_interface_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_vm_interface_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_vm_interface module -- Creates or removes interfaces from v
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -983,7 +983,7 @@ Examples
- name: "Test NetBox interface module"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create interface within NetBox with only required information
netbox_vm_interface:
@@ -1022,7 +1022,7 @@ Examples
mtu: 1600
mode: Tagged
state: present
-
+
- name: Create bridge interface within NetBox
netbox_vm_interface:
netbox_url: http://netbox.local
@@ -1031,7 +1031,7 @@ Examples
virtual_machine: test100
name: br1000
state: present
-
+
- name: Connect bridge interface within NetBox
netbox_vm_interface:
netbox_url: http://netbox.local
@@ -1039,7 +1039,7 @@ Examples
data:
virtual_machine: test100
name: br1001
- vm_bridge: br1000
+ vm_bridge: br1000
state: present
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_vrf_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_vrf_module.rst
index 8107990ea..0d0fa27c1 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_vrf_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_vrf_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_vrf module -- Create, update or delete vrfs within NetBox
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -812,7 +812,7 @@ Examples
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create vrf within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_webhook_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_webhook_module.rst
index fa20b238e..c0f24966d 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_webhook_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_webhook_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_webhook module -- Creates, updates or deletes webhook confi
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -1048,7 +1048,7 @@ Examples
- name: "Test NetBox webhook module"
connection: local
- hosts: localhost
+ hosts: localhost
tasks:
- name: Create a webhook
netbox_webhook:
@@ -1056,9 +1056,9 @@ Examples
netbox_token: thisIsMyToken
data:
content_types:
- - dcim.device
+ - dcim.device
name: Example Webhook
- type_create: yes
+ type_create: true
payload_url: https://payload.url/
body_template: !unsafe >-
{{ data }}
@@ -1069,11 +1069,11 @@ Examples
netbox_token: thisIsMyToken
data:
name: Example Webhook
- type_create: yes
- type_delete: yes
+ type_create: true
+ type_delete: true
payload_url: https://payload.url/
body_template: !unsafe >-
- {{ data }}
+ {{ data }}
- name: Delete the webhook
netbox_webhook:
@@ -1081,11 +1081,11 @@ Examples
netbox_token: thisIsMyToken
data:
name: Example Webhook
- type_create: yes
- type_delete: yes
+ type_create: true
+ type_delete: true
payload_url: https://payload.url/
body_template: !unsafe >-
- {{ data }}
+ {{ data }}
state: absent
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_wireless_lan_group_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_wireless_lan_group_module.rst
index 479fa8a36..b4eaaddee 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_wireless_lan_group_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_wireless_lan_group_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_wireless_lan_group module -- Creates or removes Wireless LA
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -626,7 +626,7 @@ Examples
- name: "Test NetBox module"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create Wireless LAN Group within NetBox with only required information
netbox_wireless_lan_group:
@@ -650,7 +650,7 @@ Examples
netbox_token: thisIsMyToken
data:
name: Wireless LAN Group One
- description: Wireless LAN Group description
+ description: Wireless LAN Group description
tags:
- tagA
- tagB
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_wireless_lan_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_wireless_lan_module.rst
index 285a54ea6..c75251d8c 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_wireless_lan_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_wireless_lan_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_wireless_lan module -- Creates or removes Wireless LANs fro
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -858,7 +858,7 @@ Examples
- name: "Test NetBox module"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create Wireless LAN within NetBox with only required information
netbox_wireless_lan:
@@ -881,11 +881,11 @@ Examples
netbox_url: http://netbox.local
netbox_token: thisIsMyToken
data:
- ssid: Wireless Network One
+ ssid: Wireless Network One
description: Cool Wireless Network
auth_type: wpa-enterprise
auth_cipher: aes
- auth_psk: psk123456
+ auth_psk: psk123456
tags:
- tagA
- tagB
diff --git a/ansible_collections/netbox/netbox/docs/plugins/netbox_wireless_link_module.rst b/ansible_collections/netbox/netbox/docs/plugins/netbox_wireless_link_module.rst
index adf0384ce..b4cbe76f4 100644
--- a/ansible_collections/netbox/netbox/docs/plugins/netbox_wireless_link_module.rst
+++ b/ansible_collections/netbox/netbox/docs/plugins/netbox_wireless_link_module.rst
@@ -7,7 +7,7 @@
:trim:
.. meta::
- :antsibull-docs: 2.7.0
+ :antsibull-docs: 2.11.0
.. Anchors
@@ -23,7 +23,7 @@ netbox.netbox.netbox_wireless_link module -- Creates or removes Wireless links f
.. Collection note
.. note::
- This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.18.0).
+ This module is part of the `netbox.netbox collection <https://galaxy.ansible.com/ui/repo/published/netbox/netbox/>`_ (version 3.19.1).
It is not included in ``ansible-core``.
To check whether it is installed, run :code:`ansible-galaxy collection list`.
@@ -867,7 +867,7 @@ Examples
- name: "Test NetBox module"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create wireless link within NetBox with only required information
netbox_wireless_link:
@@ -906,11 +906,11 @@ Examples
interface_b:
device: Device Two
name: wireless_link_0
- ssid: Wireless Network One
+ ssid: Wireless Network One
description: Cool Wireless Network
auth_type: wpa-enterprise
auth_cipher: aes
- auth_psk: psk123456
+ auth_psk: psk123456
tags:
- tagA
- tagB
diff --git a/ansible_collections/netbox/netbox/meta/runtime.yml b/ansible_collections/netbox/netbox/meta/runtime.yml
index 5c69ef1db..ec581926c 100644
--- a/ansible_collections/netbox/netbox/meta/runtime.yml
+++ b/ansible_collections/netbox/netbox/meta/runtime.yml
@@ -1,11 +1,11 @@
---
-requires_ansible: ">=2.9.10"
+requires_ansible: ">=2.15.0"
plugin_routing:
modules:
netbox_interface:
tombstone:
removal_version: "0.1.0"
- warning_text: "netbox_interface has been superseceded by netbox.netbox.netbox_device_interface"
+ warning_text: netbox_interface has been superseceded by netbox.netbox.netbox_device_interface
redirect: netbox.netbox.plugins.modules.netbox_device_interface
action_groups:
diff --git a/ansible_collections/netbox/netbox/plugins/inventory/nb_inventory.py b/ansible_collections/netbox/netbox/plugins/inventory/nb_inventory.py
index 77c501939..0aac76a8d 100644
--- a/ansible_collections/netbox/netbox/plugins/inventory/nb_inventory.py
+++ b/ansible_collections/netbox/netbox/plugins/inventory/nb_inventory.py
@@ -23,30 +23,30 @@ DOCUMENTATION = """
options:
plugin:
description: token that ensures this is a source file for the 'netbox' plugin.
- required: True
+ required: true
choices: ['netbox.netbox.nb_inventory']
api_endpoint:
description: Endpoint of the NetBox API
- required: True
+ required: true
env:
- name: NETBOX_API
validate_certs:
description:
- Allows connection when SSL certificates are not valid. Set to C(false) when certificates are not trusted.
- default: True
+ default: true
type: boolean
cert:
description:
- Certificate path
- default: False
+ default: false
key:
description:
- Certificate key path
- default: False
+ default: false
ca_path:
description:
- CA path
- default: False
+ default: false
follow_redirects:
description:
- Determine how redirects are followed.
@@ -58,31 +58,31 @@ DOCUMENTATION = """
- If True, it adds config_context in host vars.
- Config-context enables the association of arbitrary data to devices and virtual machines grouped by
region, site, role, platform, and/or tenant. Please check official netbox docs for more info.
- default: False
+ default: false
type: boolean
flatten_config_context:
description:
- If I(config_context) is enabled, by default it's added as a host var named config_context.
- If flatten_config_context is set to True, the config context variables will be added directly to the host instead.
- default: False
+ default: false
type: boolean
version_added: "0.2.1"
flatten_local_context_data:
description:
- If I(local_context_data) is enabled, by default it's added as a host var named local_context_data.
- If flatten_local_context_data is set to True, the config context variables will be added directly to the host instead.
- default: False
+ default: false
type: boolean
version_added: "0.3.0"
flatten_custom_fields:
description:
- By default, host custom fields are added as a dictionary host var named custom_fields.
- If flatten_custom_fields is set to True, the fields will be added directly to the host instead.
- default: False
+ default: false
type: boolean
version_added: "0.2.1"
token:
- required: False
+ required: false
description:
- NetBox API token to be able to read against NetBox.
- This may not be required depending on the NetBox setup.
@@ -97,48 +97,48 @@ DOCUMENTATION = """
- If True, all host vars are contained inside single-element arrays for legacy compatibility with old versions of this plugin.
- Group names will be plural (ie. "sites_mysite" instead of "site_mysite")
- The choices of I(group_by) will be changed by this option.
- default: True
+ default: true
type: boolean
version_added: "0.2.1"
virtual_disks:
description:
- If True, it adds the virtual disks information in host vars.
- default: False
+ default: false
type: boolean
version_added: "3.18.0"
interfaces:
description:
- If True, it adds the device or virtual machine interface information in host vars.
- default: False
+ default: false
type: boolean
version_added: "0.1.7"
site_data:
description:
- If True, sites' full data structures returned from Netbox API are included in host vars.
- default: False
+ default: false
type: boolean
version_added: "3.5.0"
prefixes:
description:
- If True, it adds the device or virtual machine prefixes to hostvars nested under "site".
- Must match selection for "site_data", as this changes the structure of "site" in hostvars
- default: False
+ default: false
type: boolean
version_added: "3.5.0"
services:
description:
- If True, it adds the device or virtual machine services information in host vars.
- default: True
+ default: true
type: boolean
version_added: "0.2.0"
fetch_all:
description:
- By default, fetching interfaces and services will get all of the contents of NetBox regardless of query_filters applied to devices and VMs.
- When set to False, separate requests will be made fetching interfaces, services, and IP addresses for each device_id and virtual_machine_id.
- - If you are using the various query_filters options to reduce the number of devices, you may find querying NetBox faster with fetch_all set to False.
+ - If you are using the various query_filters options to reduce the number of devices, you may find querying NetBox faster with fetch_all set to False. # noqa: E501
- For efficiency, when False, these requests will be batched, for example /api/dcim/interfaces?limit=0&device_id=1&device_id=2&device_id=3
- - These GET request URIs can become quite large for a large number of devices. If you run into HTTP 414 errors, you can adjust the max_uri_length option to suit your web server.
- default: True
+ - These GET request URIs can become quite large for a large number of devices. If you run into HTTP 414 errors, you can adjust the max_uri_length option to suit your web server. # noqa: E501
+ default: true
type: boolean
version_added: "0.2.1"
group_by:
@@ -182,7 +182,7 @@ DOCUMENTATION = """
default: []
group_names_raw:
description: Will not add the group_by choice name to the group names
- default: False
+ default: false
type: boolean
version_added: "0.2.0"
query_filters:
@@ -222,18 +222,18 @@ DOCUMENTATION = """
- When a device is part of a virtual chassis, use the virtual chassis name as the Ansible inventory hostname.
- The host var values will be from the virtual chassis master.
type: boolean
- default: False
+ default: false
dns_name:
description:
- Force IP Addresses to be fetched so that the dns_name for the primary_ip of each device or VM is set as a host_var.
- Setting interfaces will also fetch IP addresses and the dns_name host_var will be set.
type: boolean
- default: False
+ default: false
ansible_host_dns_name:
description:
- If True, sets DNS Name (fetched from primary_ip) to be used in ansible_host variable, instead of IP Address.
type: boolean
- default: False
+ default: false
compose:
description: List of custom ansible host vars to create from the device object fetched from NetBox
default: {}
@@ -243,12 +243,12 @@ DOCUMENTATION = """
- If False, skip querying the racks for information, which can be slow with great amounts of racks.
- The choices of I(group_by) will be changed by this option.
type: boolean
- default: True
+ default: true
version_added: "3.6.0"
oob_ip_as_primary_ip:
description: Use out of band IP as `ansible host`
type: boolean
- default: False
+ default: false
"""
EXAMPLES = """
@@ -257,8 +257,8 @@ EXAMPLES = """
plugin: netbox.netbox.nb_inventory
api_endpoint: http://localhost:8000
-validate_certs: True
-config_context: False
+validate_certs: true
+config_context: false
group_by:
- device_roles
query_filters:
@@ -330,8 +330,8 @@ env:
plugin: netbox.netbox.nb_inventory
api_endpoint: http://localhost:8000
token: <insert token>
-validate_certs: True
-config_context: True
+validate_certs: true
+config_context: true
group_by:
- site
- role
@@ -447,9 +447,8 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
# Prevent inventory from failing completely if the token does not have the proper permissions for specific URLs
if e.code == 403:
self.display.display(
- "Permission denied: {0}. This may impair functionality of the inventory plugin.".format(
- url
- ),
+ "Permission denied: {0}. This may impair functionality of the"
+ " inventory plugin.".format(url),
color="red",
)
# Need to return mock response data that is empty to prevent any failures downstream
@@ -516,14 +515,14 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
if self.api_version in specifiers.SpecifierSet("~=2.6.0"):
# Issue netbox-community/netbox#3507 was fixed in v2.7.5
# If using NetBox v2.7.0-v2.7.4 will have to manually set max_uri_length to 0,
- # but it's probably faster to keep fetch_all: True
+ # but it's probably faster to keep fetch_all: true
# (You should really just upgrade your NetBox install)
chunk_size = 1
resources = []
for i in range(0, len(query_values), chunk_size):
- chunk = query_values[i : i + chunk_size]
+ chunk = query_values[i : i + chunk_size] # noqa: E203
# process chunk of size <= chunk_size
url = api_url
for value in chunk:
@@ -550,6 +549,7 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
"cluster": self.extract_cluster,
"cluster_group": self.extract_cluster_group,
"cluster_type": self.extract_cluster_type,
+ "cluster_device": self.extract_cluster_device,
"is_virtual": self.extract_is_virtual,
"serial": self.extract_serial,
"asset_tag": self.extract_asset_tag,
@@ -991,6 +991,9 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
except Exception:
return
+ def extract_cluster_device(self, host):
+ return host.get("device")
+
def extract_is_virtual(self, host):
return host.get("is_virtual")
@@ -1787,12 +1790,9 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
if grouping not in self.group_extractors:
raise AnsibleError(
- (
- 'group_by option "%s" is not valid.'
- " Check group_by documentation or check the plurals option, as well as the racks options."
- " It can determine what group_by options are valid."
- )
- % grouping
+ 'group_by option "%s" is not valid. Check group_by documentation or'
+ " check the plurals option, as well as the racks options. It can"
+ " determine what group_by options are valid." % grouping
)
groups_for_host = self.group_extractors[grouping](host)
diff --git a/ansible_collections/netbox/netbox/plugins/lookup/nb_lookup.py b/ansible_collections/netbox/netbox/plugins/lookup/nb_lookup.py
index b70cb22aa..99de7323a 100644
--- a/ansible_collections/netbox/netbox/plugins/lookup/nb_lookup.py
+++ b/ansible_collections/netbox/netbox/plugins/lookup/nb_lookup.py
@@ -20,12 +20,12 @@ DOCUMENTATION = """
short_description: Queries and returns elements from NetBox
description:
- Queries NetBox via its API to return virtually any information
- capable of being held in NetBox.
+ capable of being held in NetBox.
options:
_terms:
description:
- The NetBox object type to query
- required: True
+ required: true
api_endpoint:
description:
- The URL to the NetBox instance to query
@@ -33,15 +33,15 @@ DOCUMENTATION = """
# in order of precendence
- name: NETBOX_API
- name: NETBOX_URL
- required: True
+ required: true
api_filter:
description:
- The api_filter to use. Filters should be key value pairs separated by a space.
- required: False
+ required: false
plugin:
description:
- The NetBox plugin to query
- required: False
+ required: false
token:
description:
- The API token created through NetBox
@@ -50,27 +50,27 @@ DOCUMENTATION = """
# in order of precendence
- name: NETBOX_TOKEN
- name: NETBOX_API_TOKEN
- required: False
+ required: false
validate_certs:
description:
- Whether or not to validate SSL of the NetBox instance
- required: False
- default: True
+ required: false
+ default: true
private_key:
description:
- (DEPRECATED) - NetBox 2.11 and earlier only
- The private key as a string. Mutually exclusive with I(key_file).
- required: False
+ required: false
key_file:
description:
- (DEPRECATED) - NetBox 2.11 and earlier only
- The location of the private key tied to user account. Mutually exclusive with I(private_key).
- required: False
+ required: false
raw_data:
type: bool
description:
- Whether to return raw API data with the lookup/query or whether to return a key/value dict
- required: False
+ required: false
requirements:
- pynetbox
"""
@@ -87,10 +87,7 @@ tasks:
api_endpoint='http://localhost/',
token='<redacted>') }}"
-# This example uses an API Filter
-
-tasks:
- # query a list of devices
+ # This example uses an API Filter
- name: Obtain list of devices from NetBox
debug:
msg: >
diff --git a/ansible_collections/netbox/netbox/plugins/module_utils/netbox_utils.py b/ansible_collections/netbox/netbox/plugins/module_utils/netbox_utils.py
index 8e9215ef8..345786884 100644
--- a/ansible_collections/netbox/netbox/plugins/module_utils/netbox_utils.py
+++ b/ansible_collections/netbox/netbox/plugins/module_utils/netbox_utils.py
@@ -492,13 +492,13 @@ ALLOWED_QUERY_PARAMS = {
"lag": set(["name"]),
"location": set(["name", "slug", "site"]),
"module": set(["device", "module_bay", "module_type"]),
- "module_bay": set(["name"]),
+ "module_bay": set(["device", "name"]),
"module_type": set(["model"]),
"manufacturer": set(["slug"]),
"master": set(["name"]),
"nat_inside": set(["vrf", "address"]),
"parent_contact_group": set(["name"]),
- "parent_location": set(["slug"]),
+ "parent_location": set(["slug", "name", "site"]),
"parent_interface": set(["name"]),
"parent_inventory_item": set(["name", "device"]),
"parent_vm_interface": set(["name"]),
@@ -1105,7 +1105,9 @@ class NetboxModule(object):
user_query_params if user_query_params else query_params
)
self._handle_errors(
- f"One or more of the kwargs provided are invalid for {parent}, provided kwargs: {', '.join(sorted(provided_kwargs))}. Acceptable kwargs: {', '.join(sorted(acceptable_query_params))}"
+ f"One or more of the kwargs provided are invalid for {parent}, provided"
+ f" kwargs: {', '.join(sorted(provided_kwargs))}. Acceptable kwargs:"
+ f" {', '.join(sorted(acceptable_query_params))}"
)
query_dict = self._convert_identical_keys(query_dict)
@@ -1119,7 +1121,12 @@ class NetboxModule(object):
endpoint_choices = nb_endpoint.choices()
except ValueError:
self._handle_errors(
- msg="Failed to fetch endpoint choices to validate against. This requires a write-enabled token. Make sure the token is write-enabled. If looking to fetch only information, use either the inventory or lookup plugin."
+ msg=(
+ "Failed to fetch endpoint choices to validate against. This"
+ " requires a write-enabled token. Make sure the token is"
+ " write-enabled. If looking to fetch only information, use either"
+ " the inventory or lookup plugin."
+ )
)
choices = list(chain.from_iterable(endpoint_choices.values()))
@@ -1470,11 +1477,14 @@ class NetboxModule(object):
data_after[key] = updated_obj[key]
except KeyError:
if key == "form_factor":
- msg = "form_factor is not valid for NetBox 2.7 onward. Please use the type key instead."
+ msg = (
+ "form_factor is not valid for NetBox 2.7 onward. Please use"
+ " the type key instead."
+ )
else:
msg = (
- "%s does not exist on existing object. Check to make sure valid field."
- % (key)
+ "%s does not exist on existing object. Check to make sure"
+ " valid field." % (key)
)
self._handle_errors(msg=msg)
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_aggregate.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_aggregate.py
index ecd742455..19ffee32c 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_aggregate.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_aggregate.py
@@ -79,7 +79,7 @@ EXAMPLES = r"""
- name: "Test NetBox aggregate module"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create aggregate within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_asn.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_asn.py
index bb526449a..b709934e6 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_asn.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_asn.py
@@ -67,7 +67,7 @@ EXAMPLES = r"""
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create ASN within NetBox with only required information
@@ -87,7 +87,6 @@ EXAMPLES = r"""
data:
asn: 1111111111
state: absent
-
"""
RETURN = r"""
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_cable.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_cable.py
index 133da73b4..8fb2ec4ae 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_cable.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_cable.py
@@ -167,7 +167,7 @@ EXAMPLES = r"""
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create cable within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_circuit.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_circuit.py
index e2e506d1f..ed86f736a 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_circuit.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_circuit.py
@@ -92,7 +92,7 @@ EXAMPLES = r"""
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create circuit within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_circuit_termination.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_circuit_termination.py
index f5494440e..4a05d62cf 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_circuit_termination.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_circuit_termination.py
@@ -44,7 +44,7 @@ options:
required: true
type: str
mark_connected:
- description:
+ description:
- Treat as if cable is connected
required: false
type: bool
@@ -90,7 +90,7 @@ EXAMPLES = r"""
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create circuit termination within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_circuit_type.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_circuit_type.py
index 5b0958afd..b4cb336db 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_circuit_type.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_circuit_type.py
@@ -72,7 +72,7 @@ EXAMPLES = r"""
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create type within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_cluster.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_cluster.py
index 7c2951d5d..81677fd6e 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_cluster.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_cluster.py
@@ -83,7 +83,7 @@ EXAMPLES = r"""
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create cluster within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_cluster_group.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_cluster_group.py
index 5d3d1f3d3..fa2f38de7 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_cluster_group.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_cluster_group.py
@@ -60,7 +60,7 @@ EXAMPLES = r"""
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create cluster group within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_cluster_type.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_cluster_type.py
index f2d6b4f3a..8f19ad886 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_cluster_type.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_cluster_type.py
@@ -65,7 +65,7 @@ EXAMPLES = r"""
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create cluster type within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_config_context.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_config_context.py
index cb3da325f..0f96dab00 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_config_context.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_config_context.py
@@ -133,7 +133,7 @@ EXAMPLES = r"""
- name: "Test NetBox config_context module"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create config context and apply it to sites euc1-az1, euc1-az2 with the default weight of 1000
netbox.netbox.netbox_config_context:
@@ -143,7 +143,7 @@ EXAMPLES = r"""
name: "dns_nameservers-quadnine"
description: "9.9.9.9"
data: "{ \"dns\": { \"nameservers\": [ \"9.9.9.9\" ] } }"
- sites: [ euc1-az1, euc1-az2 ]
+ sites: [euc1-az1, euc1-az2]
- name: Detach config context from euc1-az1, euc1-az2 and attach to euc1-az3
netbox.netbox.netbox_config_context:
@@ -152,7 +152,7 @@ EXAMPLES = r"""
data:
name: "dns_nameservers-quadnine"
data: "{ \"dns\": { \"nameservers\": [ \"9.9.9.9\" ] } }"
- sites: [ euc1-az3 ]
+ sites: [euc1-az3]
- name: Delete config context
netbox.netbox.netbox_config_context:
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_config_template.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_config_template.py
index 719c14abf..a39b46d02 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_config_template.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_config_template.py
@@ -50,7 +50,7 @@ options:
- Any additional parameters to pass when constructing the Jinja2 environment
required: false
type: dict
- template_code:
+ template_code:
description:
- The template code to be rendered.
required: false
@@ -61,7 +61,7 @@ EXAMPLES = r"""
- name: "Test config template creation/deletion"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create config template
netbox.netbox.netbox_config_template:
@@ -73,7 +73,7 @@ EXAMPLES = r"""
- Cloud
template_code: |
#cloud-config
- packages:
+ packages:
- ansible
- name: Delete config template
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_console_port.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_console_port.py
index f995ecd13..f308d1152 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_console_port.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_console_port.py
@@ -115,7 +115,7 @@ EXAMPLES = r"""
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create console port within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_console_port_template.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_console_port_template.py
index 5bc80c162..5e544c37a 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_console_port_template.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_console_port_template.py
@@ -66,7 +66,7 @@ EXAMPLES = r"""
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create console port template within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_console_server_port.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_console_server_port.py
index 3ca3c1d07..84059569a 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_console_server_port.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_console_server_port.py
@@ -109,7 +109,7 @@ EXAMPLES = r"""
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create console server port within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_console_server_port_template.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_console_server_port_template.py
index d624d139b..4ecc2085a 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_console_server_port_template.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_console_server_port_template.py
@@ -66,7 +66,7 @@ EXAMPLES = r"""
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create console server port template within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_contact.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_contact.py
index 074cc0267..a89cd59c1 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_contact.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_contact.py
@@ -75,7 +75,7 @@ options:
- URL associated with the contact
required: false
type: str
- version_added: "3.7.0"
+ version_added: "3.7.0"
tags:
description:
- Any tags that the contact may need to be associated with
@@ -94,7 +94,7 @@ EXAMPLES = r"""
- name: "Test NetBox module"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create contact within NetBox with only required information
netbox.netbox.netbox_contact:
@@ -117,10 +117,10 @@ EXAMPLES = r"""
netbox_url: http://netbox.local
netbox_token: thisIsMyToken
data:
- name: contact ABC
+ name: contact ABC
title: Mr Contact
phone: 123456789
- email: contac@contact.com
+ email: contac@contact.com
tags:
- tagA
- tagB
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_contact_group.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_contact_group.py
index 419b746f0..85f04b792 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_contact_group.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_contact_group.py
@@ -67,7 +67,7 @@ EXAMPLES = r"""
- name: "Test NetBox contact group module"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create contact group within NetBox with only required information
netbox.netbox.netbox_contact_group:
@@ -85,7 +85,6 @@ EXAMPLES = r"""
data:
name: Contact Group ABC
state: absent
-
"""
RETURN = r"""
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_contact_role.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_contact_role.py
index ef3666cf5..ae11962cd 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_contact_role.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_contact_role.py
@@ -63,7 +63,7 @@ EXAMPLES = r"""
- name: "Test NetBox module"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create contact role within NetBox with only required information
netbox.netbox.netbox_contact_role:
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_custom_field.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_custom_field.py
index ed9c8ed0a..725ac5fe8 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_custom_field.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_custom_field.py
@@ -13,7 +13,7 @@ module: netbox_custom_field
short_description: Creates, updates or deletes custom fields within NetBox
description:
- Creates, updates or removes custom fields from NetBox
-notes:
+notes:
- This should be ran with connection C(local) and hosts C(localhost)
author:
- Martin Rødvand (@rodvand)
@@ -34,11 +34,18 @@ options:
required: false
type: list
elements: raw
- type:
- description:
+ object_types:
+ description:
+ - The content type(s) to apply this custom field to (NetBox 4.0+)
+ required: false
+ type: list
+ elements: raw
+ version_added: "3.19.0"
+ type:
+ description:
- The type of custom field
required: false
- choices:
+ choices:
- text
- longtext
- integer
@@ -53,8 +60,8 @@ options:
- object
- multiobject
type: str
- object_type:
- description:
+ object_type:
+ description:
- The object type of the custom field (if any)
required: false
type: str
@@ -104,18 +111,18 @@ options:
description:
- The group to associate the custom field with
required: false
- type: str
+ type: str
version_added: "3.10.0"
ui_visibility:
description:
- The UI visibility of the custom field
required: false
- choices:
+ choices:
- read-write
- read-only
- hidden
- hidden-ifunset
- type: str
+ type: str
version_added: "3.10.0"
validation_minimum:
description:
@@ -131,10 +138,10 @@ options:
description:
- The regular expression to enforce on text fields
required: false
- type: str
+ type: str
choice_set:
description:
- - The name of the choice set to use (for selection fields)
+ - The name of the choice set to use (for selection fields)
required: false
type: str
required: true
@@ -143,7 +150,7 @@ options:
EXAMPLES = r"""
- name: "Test NetBox custom_fields module"
connection: local
- hosts: localhost
+ hosts: localhost
tasks:
- name: Create a custom field on device and virtual machine
netbox.netbox.netbox_custom_field:
@@ -174,7 +181,7 @@ EXAMPLES = r"""
netbox_token: thisIsMyToken
data:
name: A Custom Field
- required: yes
+ required: true
- name: Update the custom field to make it read only
netbox.netbox.netbox_custom_field:
@@ -182,7 +189,7 @@ EXAMPLES = r"""
netbox_token: thisIsMyToken
data:
name: A Custom Field
- ui_visibility: read-only
+ ui_visibility: read-only
- name: Delete the custom field
netbox.netbox.netbox_custom_field:
@@ -227,6 +234,7 @@ def main():
required=True,
options=dict(
content_types=dict(required=False, type="list", elements="raw"),
+ object_types=dict(required=False, type="list", elements="raw"),
type=dict(
required=False,
choices=[
@@ -279,7 +287,7 @@ def main():
)
required_if = [
- ("state", "present", ["content_types", "name"]),
+ ("state", "present", ["name"]),
("state", "absent", ["name"]),
]
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_custom_field_choice_set.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_custom_field_choice_set.py
index 89badcbd3..fdeeb77c5 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_custom_field_choice_set.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_custom_field_choice_set.py
@@ -13,7 +13,7 @@ module: netbox_custom_field_choice_set
short_description: Creates, updates or deletes custom field choice sets within Netbox
description:
- Creates, updates or removes custom fields choice sets from Netbox
-notes:
+notes:
- This should be run with connection C(local) and hosts C(localhost)
author:
- Philipp Rintz (@p-rintz)
@@ -40,11 +40,11 @@ options:
type: str
extra_choices:
description:
- - List of available choices in the choice set
+ - List of available choices in the choice set
required: false
default: []
type: list
- elements: list
+ elements: list
base_choices:
description:
- Selection of base choice to use in the choice set
@@ -53,7 +53,7 @@ options:
choices:
- IATA
- ISO_3166
- - UN_LOCODE
+ - UN_LOCODE
order_alphabetically:
description:
- Order the choices alphabetically
@@ -65,7 +65,7 @@ options:
EXAMPLES = r"""
- name: "Test Netbox custom_field_choice_set module"
connection: local
- hosts: localhost
+ hosts: localhost
tasks:
- name: Create a choice set with choices
netbox.netbox.netbox_custom_field_choice_set:
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_custom_link.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_custom_link.py
index 382b9e9bb..6d11f9440 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_custom_link.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_custom_link.py
@@ -13,7 +13,7 @@ module: netbox_custom_link
short_description: Creates, updates or deletes custom links within NetBox
description:
- Creates, updates or removes custom links from NetBox
-notes:
+notes:
- This should be ran with connection C(local) and hosts C(localhost)
- Use the C(!unsafe) data type if you want jinja2 code in link_text or link_url
author:
@@ -33,16 +33,23 @@ options:
description:
- The content type to apply this custom link to
required: false
- type: raw
+ type: raw
content_types:
description:
- The content type to apply this custom link to (NetBox 3.4+)
required: false
type: list
- elements: raw
+ elements: raw
version_added: "3.10.0"
- name:
- description:
+ object_types:
+ description:
+ - The object type(s) to apply this custom link to (NetBox 4.0+)
+ required: false
+ type: list
+ elements: raw
+ version_added: "3.19.0"
+ name:
+ description:
- The name of the custom link
required: true
type: str
@@ -55,57 +62,57 @@ options:
description:
- Link URL of the custom link
required: true
- type: raw
+ type: raw
weight:
description:
- Fields with higher weights appear lower in a form
required: false
- type: int
+ type: int
group_name:
description:
- The group to associate the custom link with
required: false
- type: str
+ type: str
button_class:
description:
- - Button class for the custom link
+ - Button class for the custom link
required: false
type: raw
new_window:
description:
- - Open link in new window
+ - Open link in new window
required: false
- type: bool
+ type: bool
enabled:
description:
- - Enable/disable custom link
+ - Enable/disable custom link
required: false
type: bool
- version_added: "3.7.0"
+ version_added: "3.7.0"
required: true
"""
EXAMPLES = r"""
- name: "Test NetBox custom_link module"
connection: local
- hosts: localhost
+ hosts: localhost
tasks:
- name: Create a custom link on device
netbox.netbox.netbox_custom_link:
netbox_url: http://netbox.local
netbox_token: thisIsMyToken
data:
- content_type: "dcim.device"
+ content_type: "dcim.device"
name: Custom Link
link_text: "Open Web Management"
- link_url: !unsafe https://{{ obj.name }}.domain.local
+ link_url: !unsafe https://{{ obj.name }}.domain.local
- name: Delete the custom link
netbox.netbox.netbox_custom_link:
netbox_url: http://netbox.local
netbox_token: thisIsMyToken
data:
- content_type: "dcim.device"
+ content_type: "dcim.device"
name: Custom Link
link_text: "Open Web Management"
link_url: !unsafe https://{{ obj.name }}.domain.local
@@ -147,6 +154,7 @@ def main():
options=dict(
content_type=dict(required=False, type="raw"),
content_types=dict(required=False, type="list", elements="raw"),
+ object_types=dict(required=False, type="list", elements="raw"),
name=dict(required=True, type="str"),
link_text=dict(required=True, type="raw"),
link_url=dict(required=True, type="raw"),
@@ -165,7 +173,7 @@ def main():
("state", "absent", ["name"]),
]
- required_one_of = [("content_type", "content_types")]
+ required_one_of = [("content_type", "content_types", "object_types")]
module = NetboxAnsibleModule(
argument_spec=argument_spec,
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_device.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_device.py
index 30c8d003c..59d7e2cc7 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_device.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_device.py
@@ -100,13 +100,13 @@ options:
description:
- Airflow of the device
choices:
- - front-to-rear
- - rear-to-front
- - left-to-right
- - right-to-left
+ - front-to-rear
+ - rear-to-front
+ - left-to-right
+ - right-to-left
- side-to-rear
- passive
- - mixed
+ - mixed
required: false
type: str
version_added: "3.10.0"
@@ -192,7 +192,7 @@ EXAMPLES = r"""
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create device within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_device_bay.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_device_bay.py
index 8af28db77..74bc327a4 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_device_bay.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_device_bay.py
@@ -68,7 +68,7 @@ EXAMPLES = r"""
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create device bay within NetBox with only required information
@@ -98,7 +98,6 @@ EXAMPLES = r"""
data:
name: Device Bay One
state: absent
-
"""
RETURN = r"""
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_device_bay_template.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_device_bay_template.py
index 04dab428c..ca2561724 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_device_bay_template.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_device_bay_template.py
@@ -47,7 +47,7 @@ EXAMPLES = r"""
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create device bay template within NetBox with only required information
@@ -67,7 +67,6 @@ EXAMPLES = r"""
name: device bay template One
device_type: Device Type One
state: absent
-
"""
RETURN = r"""
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_device_interface.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_device_interface.py
index 49b08250e..63980f922 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_device_interface.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_device_interface.py
@@ -75,7 +75,7 @@ options:
- Bridge the interface will connected to
required: false
type: raw
- version_added: "3.6.0"
+ version_added: "3.6.0"
mtu:
description:
- The MTU of the interface
@@ -183,7 +183,7 @@ options:
type: dict
update_vc_child:
type: bool
- default: False
+ default: false
description:
- |
Use when master device is specified for C(device) and the specified interface exists on a child device
@@ -194,7 +194,7 @@ EXAMPLES = r"""
- name: "Test NetBox interface module"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create interface within NetBox with only required information
netbox.netbox.netbox_device_interface:
@@ -273,7 +273,7 @@ EXAMPLES = r"""
device: test100
name: GigabitEthernet2/0/1
enabled: false
- update_vc_child: True
+ update_vc_child: true
- name: Mark interface as connected without a cable (netbox >= 2.11 required)
netbox.netbox.netbox_device_interface:
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_device_interface_template.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_device_interface_template.py
index d2157d276..8fd749c8b 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_device_interface_template.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_device_interface_template.py
@@ -72,7 +72,7 @@ EXAMPLES = r"""
- name: "Test NetBox interface template module"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create interface template within NetBox with only required information
netbox.netbox.netbox_device_interface_template:
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_device_role.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_device_role.py
index cd7c3209c..91bf1a0f4 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_device_role.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_device_role.py
@@ -74,7 +74,7 @@ EXAMPLES = r"""
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create device role within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_device_type.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_device_type.py
index 118fbfe77..65c82e6ef 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_device_type.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_device_type.py
@@ -81,13 +81,13 @@ options:
description:
- Airflow of the device
choices:
- - front-to-rear
- - rear-to-front
- - left-to-right
- - right-to-left
+ - front-to-rear
+ - rear-to-front
+ - left-to-right
+ - right-to-left
- side-to-rear
- passive
- - mixed
+ - mixed
required: false
type: str
version_added: "3.10.0"
@@ -113,7 +113,7 @@ options:
required: false
type: str
default_platform:
- description:
+ description:
- Set the default platform used by the device
required: false
type: raw
@@ -137,7 +137,7 @@ EXAMPLES = r"""
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create device type within NetBox with only required information
@@ -160,7 +160,7 @@ EXAMPLES = r"""
manufacturer: Test Manufacturer
part_number: ws-3750g-v2
u_height: 1.5
- is_full_depth: False
+ is_full_depth: false
subdevice_role: parent
state: present
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_export_template.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_export_template.py
index bd3056db4..3e51fc259 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_export_template.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_export_template.py
@@ -13,7 +13,7 @@ module: netbox_export_template
short_description: Creates, updates or deletes export templates within NetBox
description:
- Creates, updates or removes export templates from NetBox
-notes:
+notes:
- This should be ran with connection C(local) and hosts C(localhost)
- Use the C(!unsafe) data type if you want jinja2 code in template_code
author:
@@ -39,10 +39,17 @@ options:
- The content type to apply this export template to (NetBox 3.4+)
required: false
type: list
- elements: raw
- version_added: "3.10.0"
- name:
- description:
+ elements: raw
+ version_added: "3.10.0"
+ object_types:
+ description:
+ - The object type to apply this export template to (NetBox 4.0+)
+ required: false
+ type: list
+ elements: raw
+ version_added: "3.19.0"
+ name:
+ description:
- The name of the export template
required: true
type: str
@@ -55,7 +62,7 @@ options:
description:
- Template code of the export template
required: true
- type: raw
+ type: raw
mime_type:
description:
- MIME type of the export template
@@ -65,26 +72,26 @@ options:
description:
- The file extension of the export template
required: false
- type: str
+ type: str
as_attachment:
description:
- - Download file as attachment
+ - Download file as attachment
required: false
- type: bool
+ type: bool
required: true
"""
EXAMPLES = r"""
- name: "Test NetBox export_templates module"
connection: local
- hosts: localhost
+ hosts: localhost
tasks:
- name: "Ensure export template for /etc/hosts entries exists"
netbox.netbox.netbox_export_template:
netbox_url: http://netbox.local
netbox_token: thisIsMyToken
data:
- content_types: ["dcim.device", "virtualization.virtualmachine"]
+ object_types: ["dcim.device", "virtualization.virtualmachine"]
name: /etc/hosts
description: "Generate entries for /etc/hosts"
as_attachment: true
@@ -144,6 +151,7 @@ def main():
options=dict(
content_type=dict(required=False, type="raw"),
content_types=dict(required=False, type="list", elements="raw"),
+ object_types=dict(required=False, type="list", elements="raw"),
name=dict(required=True, type="str"),
description=dict(required=False, type="str"),
template_code=dict(required=True, type="raw"),
@@ -160,7 +168,7 @@ def main():
("state", "absent", ["name"]),
]
- required_one_of = [["content_type", "content_types"]]
+ required_one_of = [["content_type", "content_types", "object_types"]]
module = NetboxAnsibleModule(
argument_spec=argument_spec,
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_fhrp_group.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_fhrp_group.py
index 97471497d..c142924be 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_fhrp_group.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_fhrp_group.py
@@ -36,7 +36,7 @@ options:
protocol:
description:
- Protocol
- required: False
+ required: false
type: str
choices:
- vrrp2
@@ -82,7 +82,8 @@ options:
"""
EXAMPLES = r"""
-- hosts: localhost
+- name: "Test NetBox modules"
+ hosts: localhost
connection: local
module_defaults:
group/netbox.netbox.netbox:
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_fhrp_group_assignment.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_fhrp_group_assignment.py
index c9952d2b3..13ac2f6a8 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_fhrp_group_assignment.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_fhrp_group_assignment.py
@@ -35,12 +35,12 @@ options:
fhrp_group:
description:
- FHRP Group ID
- required: True
+ required: true
type: int
interface_type:
description:
- Interface type
- required: True
+ required: true
choices:
- dcim.interface
- virtualization.vminterface
@@ -49,16 +49,17 @@ options:
description:
- Interface ID
type: int
- required: True
+ required: true
priority:
description:
- Priority (0 .. 255)
type: int
- required: True
+ required: true
"""
EXAMPLES = r"""
-- hosts: localhost
+- name: "Test NetBox modules"
+ hosts: localhost
connection: local
module_defaults:
group/netbox.netbox.netbox:
@@ -82,7 +83,6 @@ EXAMPLES = r"""
interface_type: dcim.interface
interface_id: 5
state: absent
-
"""
RETURN = r"""
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_front_port.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_front_port.py
index 1f8a2b581..afd9ca1b2 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_front_port.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_front_port.py
@@ -94,7 +94,7 @@ EXAMPLES = r"""
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create front port within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_front_port_template.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_front_port_template.py
index 94e2181fe..6d1969200 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_front_port_template.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_front_port_template.py
@@ -89,7 +89,7 @@ EXAMPLES = r"""
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create front port template within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_inventory_item.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_inventory_item.py
index 7be84b32a..69900ffab 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_inventory_item.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_inventory_item.py
@@ -43,8 +43,8 @@ options:
description:
- The parent inventory item the inventory item will be associated with
required: false
- type: raw
- version_added: "3.5.0"
+ type: raw
+ version_added: "3.5.0"
label:
description:
- The physical label of the inventory item
@@ -106,12 +106,12 @@ options:
description:
- The name of the component
type: str
- required: False
+ required: false
device:
description:
- The device the component is attached to.
type: str
- required: False
+ required: false
version_added: "3.15.0"
tags:
description:
@@ -138,7 +138,7 @@ EXAMPLES = r"""
- name: "Test NetBox inventory_item module"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create inventory item within NetBox with only required information
netbox.netbox.netbox_inventory_item:
@@ -163,7 +163,7 @@ EXAMPLES = r"""
description: "New SFP"
inventory_item_role: NIC
state: present
-
+
- name: Create inventory item with parent
netbox.netbox.netbox_inventory_item:
netbox_url: http://netbox.local
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_inventory_item_role.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_inventory_item_role.py
index 62faa8749..0fdee8340 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_inventory_item_role.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_inventory_item_role.py
@@ -48,18 +48,18 @@ options:
- The slugified version of the name or custom slug.
- This is auto-generated following NetBox rules if not provided
required: false
- type: str
+ type: str
tags:
description:
- The tags to add/update
required: false
type: list
- elements: raw
+ elements: raw
custom_fields:
description:
- Must exist in NetBox
required: false
- type: dict
+ type: dict
required: true
type: dict
"""
@@ -68,7 +68,7 @@ EXAMPLES = r"""
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create inventory item role within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_ip_address.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_ip_address.py
index ceb6a0202..961bab7b7 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_ip_address.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_ip_address.py
@@ -120,17 +120,17 @@ options:
description:
- The name of the interface
type: str
- required: False
+ required: false
device:
description:
- The device the interface is attached to.
type: str
- required: False
+ required: false
virtual_machine:
description:
- The virtual machine the interface is attached to.
type: str
- required: False
+ required: false
comments:
description:
- Comments that may include additional information in regards to the IP Address
@@ -165,7 +165,7 @@ EXAMPLES = r"""
- name: "Test NetBox IP address module"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create IP address within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_ipam_role.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_ipam_role.py
index 9a5e00609..6863b94bd 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_ipam_role.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_ipam_role.py
@@ -70,7 +70,7 @@ EXAMPLES = r"""
- name: "Test NetBox module"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create ipam role within NetBox with only required information
netbox.netbox.netbox_ipam_role:
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_journal_entry.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_journal_entry.py
index 87624a19b..238292674 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_journal_entry.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_journal_entry.py
@@ -70,7 +70,7 @@ options:
state:
description:
- |
- Use C(new) for adding a journal entry.
+ Use C(new) for adding a journal entry.
choices: [new]
default: new
type: str
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_l2vpn.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_l2vpn.py
index 8002660f9..97f2e9b77 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_l2vpn.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_l2vpn.py
@@ -33,7 +33,7 @@ options:
description:
- The name of the L2VPN
required: true
- type: str
+ type: str
type:
description:
- The type of L2VPN
@@ -43,7 +43,7 @@ options:
description:
- The identifier of the L2VPN
required: false
- type: int
+ type: int
import_targets:
description:
- Route targets to import
@@ -55,7 +55,7 @@ options:
- Route targets to export
required: false
type: list
- elements: raw
+ elements: raw
description:
description:
- The description of the L2VPN
@@ -90,7 +90,7 @@ EXAMPLES = r"""
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create L2VPN within NetBox with only required information
@@ -122,8 +122,8 @@ EXAMPLES = r"""
import_targets:
- "65000:1"
export_targets:
- - "65000:2"
- tenant: Test Tenant
+ - "65000:2"
+ tenant: Test Tenant
description: Just a test
tags:
- Schnozzberry
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_l2vpn_termination.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_l2vpn_termination.py
index 965a38785..de2e14bfb 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_l2vpn_termination.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_l2vpn_termination.py
@@ -67,7 +67,8 @@ options:
"""
EXAMPLES = r"""
-- hosts: localhost
+- name: "Test NetBox modules"
+ hosts: localhost
connection: local
module_defaults:
group/netbox.netbox.netbox:
@@ -89,7 +90,6 @@ EXAMPLES = r"""
assigned_object_type: dcim.interface
assigned_object_id: 32
state: absent
-
"""
RETURN = r"""
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_location.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_location.py
index 4c124ae13..880cc4020 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_location.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_location.py
@@ -81,7 +81,7 @@ EXAMPLES = r"""
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create location within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_manufacturer.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_manufacturer.py
index 3a6df1273..04e47b109 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_manufacturer.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_manufacturer.py
@@ -65,7 +65,7 @@ EXAMPLES = r"""
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create manufacturer within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_module.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_module.py
index 3fb75a031..c9f1e9e97 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_module.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_module.py
@@ -31,7 +31,7 @@ options:
device:
description:
- The device of the module
- required: True
+ required: true
type: raw
module_bay:
description:
@@ -42,7 +42,7 @@ options:
description:
- The module type of the module
required: true
- type: raw
+ type: raw
status:
description:
- The status of the module
@@ -65,7 +65,7 @@ options:
description:
- The description of the module
required: false
- type: str
+ type: str
asset_tag:
description:
- The asset tag of the modyle
@@ -95,14 +95,14 @@ EXAMPLES = r"""
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create module type within NetBox with only required information
netbox.netbox.netbox_module:
netbox_url: http://netbox.local
netbox_token: thisIsMyToken
- data:
+ data:
device: C9300-DEMO
module_bay: Network Module
module_type: C9300-NM-8X
@@ -112,7 +112,7 @@ EXAMPLES = r"""
netbox.netbox.netbox_module:
netbox_url: http://netbox.local
netbox_token: thisIsMyToken
- data:
+ data:
device:
name: C9300-DEMO
site: EUPARIS
@@ -132,7 +132,7 @@ EXAMPLES = r"""
device: C9300-DEMO
module_bay: Network Module
module_type: C9300-NM-8X
- asset_tag: 00001
+ asset_tag: "00001"
serial: XXXNNNNXXXX
state: absent
"""
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_module_bay.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_module_bay.py
index df8021a7f..56806e4fd 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_module_bay.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_module_bay.py
@@ -46,7 +46,7 @@ options:
position:
description:
- The position of the module bay
- required: true
+ required: false
type: str
description:
description:
@@ -72,7 +72,7 @@ EXAMPLES = r"""
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create module bay within NetBox with only required information
@@ -130,7 +130,7 @@ def main():
device=dict(required=True, type="raw"),
name=dict(required=True, type="raw"),
label=dict(required=False, type="str"),
- position=dict(required=True, type="str"),
+ position=dict(required=False, type="str"),
description=dict(required=False, type="str"),
tags=dict(required=False, type="list", elements="raw"),
custom_fields=dict(required=False, type="dict"),
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_module_type.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_module_type.py
index cff26593a..ee3e3f7ee 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_module_type.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_module_type.py
@@ -37,17 +37,17 @@ options:
description:
- The model of the module type
required: true
- type: raw
+ type: raw
part_number:
description:
- The part number of the module type
required: false
- type: str
+ type: str
weight:
description:
- The weight of the device type
required: false
- type: float
+ type: float
weight_unit:
description:
- The weight unit
@@ -58,7 +58,7 @@ options:
- oz
required: false
type: str
- version_added: "3.10.0"
+ version_added: "3.10.0"
comments:
description:
- Comments that may include additional information in regards to the module type
@@ -83,14 +83,14 @@ EXAMPLES = r"""
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create module type within NetBox with only required information
netbox.netbox.netbox_module_type:
netbox_url: http://netbox.local
netbox_token: thisIsMyToken
- data:
+ data:
model: ws-test-3750
manufacturer: Test Manufacturer
state: present
@@ -99,10 +99,10 @@ EXAMPLES = r"""
netbox.netbox.netbox_module_type:
netbox_url: http://netbox.local
netbox_token: thisIsMyToken
- data:
+ data:
model: ws-test-3750
manufacturer: Test Manufacturer
- part_number: ws-3750g-v2
+ part_number: ws-3750g-v2
state: present
- name: Delete module type within netbox
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_platform.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_platform.py
index 5b73ff52f..61e37d6b6 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_platform.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_platform.py
@@ -86,7 +86,7 @@ EXAMPLES = r"""
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create platform within NetBox with only required information
@@ -96,7 +96,7 @@ EXAMPLES = r"""
data:
name: Test Platform
state: present
-
+
- name: Create platform within NetBox with a config template
netbox.netbox.netbox_platform:
netbox_url: http://netbox.local
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_power_feed.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_power_feed.py
index 2070028c8..3ab35ad47 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_power_feed.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_power_feed.py
@@ -123,7 +123,7 @@ EXAMPLES = r"""
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create power feed within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_power_outlet.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_power_outlet.py
index 8943a01f6..e5e183636 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_power_outlet.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_power_outlet.py
@@ -41,6 +41,12 @@ options:
- The name of the power outlet
required: true
type: str
+ label:
+ description:
+ - The label of the power outlet
+ required: false
+ type: str
+ version_added: '3.19.0'
type:
description:
- The type of the power outlet
@@ -50,6 +56,7 @@ options:
- iec-60320-c13
- iec-60320-c15
- iec-60320-c19
+ - iec-60320-c21
- iec-60309-p-n-e-4h
- iec-60309-p-n-e-6h
- iec-60309-p-n-e-9h
@@ -132,7 +139,7 @@ EXAMPLES = r"""
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create power port within NetBox with only required information
@@ -202,6 +209,7 @@ def main():
options=dict(
device=dict(required=True, type="raw"),
name=dict(required=True, type="str"),
+ label=dict(required=False, type="str"),
type=dict(
required=False,
choices=[
@@ -210,6 +218,7 @@ def main():
"iec-60320-c13",
"iec-60320-c15",
"iec-60320-c19",
+ "iec-60320-c21",
"iec-60309-p-n-e-4h",
"iec-60309-p-n-e-6h",
"iec-60309-p-n-e-9h",
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_power_outlet_template.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_power_outlet_template.py
index 4f973e3b7..055d359a7 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_power_outlet_template.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_power_outlet_template.py
@@ -50,6 +50,7 @@ options:
- iec-60320-c13
- iec-60320-c15
- iec-60320-c19
+ - iec-60320-c21
- iec-60309-p-n-e-4h
- iec-60309-p-n-e-6h
- iec-60309-p-n-e-9h
@@ -121,7 +122,7 @@ EXAMPLES = r"""
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create power outlet template within NetBox with only required information
@@ -198,6 +199,7 @@ def main():
"iec-60320-c13",
"iec-60320-c15",
"iec-60320-c19",
+ "iec-60320-c21",
"iec-60309-p-n-e-4h",
"iec-60309-p-n-e-6h",
"iec-60309-p-n-e-9h",
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_power_panel.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_power_panel.py
index e2e0bd8d0..cc574b3fb 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_power_panel.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_power_panel.py
@@ -84,7 +84,7 @@ EXAMPLES = r"""
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create power panel within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_power_port.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_power_port.py
index 579242a67..dd0f128ae 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_power_port.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_power_port.py
@@ -180,7 +180,7 @@ EXAMPLES = r"""
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create power port within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_power_port_template.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_power_port_template.py
index ee32a3cc3..7683021cc 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_power_port_template.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_power_port_template.py
@@ -169,7 +169,7 @@ EXAMPLES = r"""
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create power port within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_prefix.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_prefix.py
index 0807351a1..bfb9bba48 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_prefix.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_prefix.py
@@ -131,7 +131,7 @@ EXAMPLES = r"""
- name: "Test NetBox prefix module"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create prefix within NetBox with only required information
@@ -181,7 +181,7 @@ EXAMPLES = r"""
parent: 10.156.0.0/19
prefix_length: 24
state: present
- first_available: yes
+ first_available: true
- name: Create prefix within NetBox with only required information
netbox.netbox.netbox_prefix:
@@ -199,7 +199,7 @@ EXAMPLES = r"""
parent: 10.156.0.0/19
prefix_length: 24
state: present
- first_available: yes
+ first_available: true
- name: Get a new /24 inside 10.157.0.0/19 within NetBox with additional values
netbox.netbox.netbox_prefix:
@@ -211,7 +211,7 @@ EXAMPLES = r"""
vrf: Test VRF
site: Test Site
state: present
- first_available: yes
+ first_available: true
"""
RETURN = r"""
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_provider.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_provider.py
index 0fbd1e217..367c23ae6 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_provider.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_provider.py
@@ -89,7 +89,7 @@ EXAMPLES = r"""
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create provider within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_provider_network.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_provider_network.py
index 5346b461a..730187212 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_provider_network.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_provider_network.py
@@ -74,7 +74,7 @@ EXAMPLES = r"""
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create provider network within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_rack.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_rack.py
index 843568955..b89ee42c0 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_rack.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_rack.py
@@ -188,7 +188,7 @@ EXAMPLES = r"""
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create rack within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_rack_group.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_rack_group.py
index b608673a4..2755a337a 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_rack_group.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_rack_group.py
@@ -52,7 +52,7 @@ EXAMPLES = r"""
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create rack group within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_rack_role.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_rack_role.py
index 8cbd8431e..1b77ce853 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_rack_role.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_rack_role.py
@@ -71,7 +71,7 @@ EXAMPLES = r"""
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create rack role within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_rear_port.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_rear_port.py
index 226e0ac3a..c54aa3d5c 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_rear_port.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_rear_port.py
@@ -89,7 +89,7 @@ EXAMPLES = r"""
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create rear port within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_rear_port_template.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_rear_port_template.py
index c8ceb9cd1..87e91b50b 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_rear_port_template.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_rear_port_template.py
@@ -84,7 +84,7 @@ EXAMPLES = r"""
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create rear port template within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_region.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_region.py
index f67f2e4f1..897363b5a 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_region.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_region.py
@@ -71,7 +71,7 @@ EXAMPLES = r"""
- name: "Test NetBox region module"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create region within NetBox with only required information
netbox.netbox.netbox_region:
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_rir.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_rir.py
index e7d1de316..377941565 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_rir.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_rir.py
@@ -70,7 +70,7 @@ EXAMPLES = r"""
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create RIR within NetBox with only required information
@@ -87,7 +87,7 @@ EXAMPLES = r"""
netbox_token: thisIsMyToken
data:
name: Test RIR One
- is_private: True
+ is_private: true
state: present
- name: Delete RIR within netbox
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_route_target.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_route_target.py
index 5fc3c92b2..a6b24b28c 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_route_target.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_route_target.py
@@ -68,7 +68,7 @@ EXAMPLES = r"""
- name: "Test route target creation/deletion"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create Route Targets
netbox.netbox.netbox_route_target:
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_service.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_service.py
index ba90b1c50..972127127 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_service.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_service.py
@@ -92,7 +92,7 @@ EXAMPLES = r"""
- name: "Create netbox service"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create service
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_service_template.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_service_template.py
index ae87d07b8..9230199a1 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_service_template.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_service_template.py
@@ -47,19 +47,19 @@ options:
choices:
- tcp
- udp
- - sctp
+ - sctp
required: false
- type: str
+ type: str
description:
description:
- Description of the service template
required: false
- type: str
+ type: str
comments:
description:
- Comments
required: false
- type: str
+ type: str
tags:
description:
- Any tags that the service template may need to be associated with
@@ -77,7 +77,7 @@ EXAMPLES = r"""
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create service template within NetBox with only required information
@@ -108,7 +108,7 @@ EXAMPLES = r"""
netbox_url: http://netbox.local
netbox_token: thisIsMyToken
data:
- name: SSH
+ name: SSH
state: absent
"""
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_site.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_site.py
index 32f05d007..135db711c 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_site.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_site.py
@@ -139,7 +139,7 @@ EXAMPLES = r"""
- name: "Test NetBox site module"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create site within NetBox with only required information
netbox.netbox.netbox_site:
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_site_group.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_site_group.py
index 19d368f36..066f25b02 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_site_group.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_site_group.py
@@ -70,7 +70,7 @@ EXAMPLES = r"""
- name: "Test NetBox site group module"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create site group within NetBox with only required information
netbox.netbox.netbox_site_group:
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_tag.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_tag.py
index acf3b8562..d429e16a9 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_tag.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_tag.py
@@ -57,7 +57,7 @@ EXAMPLES = r"""
- name: "Test tags creation/deletion"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create tags
netbox.netbox.netbox_tag:
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_tenant.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_tenant.py
index 6a5b40f1d..a1bbc548d 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_tenant.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_tenant.py
@@ -72,7 +72,7 @@ EXAMPLES = r"""
- name: "Test NetBox module"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create tenant within NetBox with only required information
netbox_tenant:
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_tenant_group.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_tenant_group.py
index 8eb1f1a10..3a2f7c741 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_tenant_group.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_tenant_group.py
@@ -69,7 +69,7 @@ EXAMPLES = r"""
- name: "Test NetBox tenant group module"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create tenant within NetBox with only required information
netbox_tenant_group:
@@ -87,7 +87,6 @@ EXAMPLES = r"""
data:
name: Tenant ABC
state: absent
-
"""
RETURN = r"""
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_virtual_chassis.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_virtual_chassis.py
index 18fbca2ee..fb54cba53 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_virtual_chassis.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_virtual_chassis.py
@@ -75,7 +75,7 @@ EXAMPLES = r"""
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create virtual chassis within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_virtual_disk.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_virtual_disk.py
index fc330c640..41310bb05 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_virtual_disk.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_virtual_disk.py
@@ -37,7 +37,7 @@ options:
description:
- Name of the disk to be created
required: true
- type: str
+ type: str
description:
description:
- The description of the disk
@@ -47,7 +47,7 @@ options:
description:
- The size (in GB) of the disk
required: false
- type: int
+ type: int
tags:
description:
- Any tags that the virtual disk may need to be associated with
@@ -58,7 +58,7 @@ options:
description:
- Must exist in NetBox
required: false
- type: dict
+ type: dict
required: true
type: dict
"""
@@ -67,10 +67,10 @@ EXAMPLES = r"""
- name: "Test NetBox virtual disk module"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create virtual disk
- netbox_virtual_disk:
+ netbox_virtual_disk:
data:
virtual_machine: test100
name: disk0
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_virtual_machine.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_virtual_machine.py
index ec5010274..44db31d51 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_virtual_machine.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_virtual_machine.py
@@ -135,7 +135,7 @@ EXAMPLES = r"""
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create virtual machine within NetBox with only required information
netbox_virtual_machine:
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_vlan.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_vlan.py
index 254278463..ca8ecca78 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_vlan.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_vlan.py
@@ -93,7 +93,7 @@ EXAMPLES = r"""
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create vlan within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_vlan_group.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_vlan_group.py
index ac5967c70..07d60d1d6 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_vlan_group.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_vlan_group.py
@@ -104,7 +104,7 @@ EXAMPLES = r"""
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create vlan group within NetBox with only required information - Pre 2.11
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_vm_interface.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_vm_interface.py
index 686b0f6d4..d4bf739f2 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_vm_interface.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_vm_interface.py
@@ -111,7 +111,7 @@ EXAMPLES = r"""
- name: "Test NetBox interface module"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create interface within NetBox with only required information
netbox_vm_interface:
@@ -150,7 +150,7 @@ EXAMPLES = r"""
mtu: 1600
mode: Tagged
state: present
-
+
- name: Create bridge interface within NetBox
netbox_vm_interface:
netbox_url: http://netbox.local
@@ -159,7 +159,7 @@ EXAMPLES = r"""
virtual_machine: test100
name: br1000
state: present
-
+
- name: Connect bridge interface within NetBox
netbox_vm_interface:
netbox_url: http://netbox.local
@@ -167,7 +167,7 @@ EXAMPLES = r"""
data:
virtual_machine: test100
name: br1001
- vm_bridge: br1000
+ vm_bridge: br1000
state: present
"""
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_vrf.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_vrf.py
index 64d636e55..433c47b93 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_vrf.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_vrf.py
@@ -92,7 +92,7 @@ EXAMPLES = r"""
- name: "Test NetBox modules"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create vrf within NetBox with only required information
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_webhook.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_webhook.py
index ae9e42ba5..02566fe5c 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_webhook.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_webhook.py
@@ -13,7 +13,7 @@ module: netbox_webhook
short_description: Creates, updates or deletes webhook configuration within NetBox
description:
- Creates, updates or removes webhook configuration within NetBox
-notes:
+notes:
- This should be ran with connection C(local) and hosts C(localhost)
- Use C(!unsafe) when adding jinja2 code to C(additional_headers) or C(body_template)
author:
@@ -35,12 +35,12 @@ options:
- Required when I(state=present)
required: false
type: list
- elements: raw
+ elements: raw
name:
description:
- Name of the webhook
required: true
- type: str
+ type: str
type_create:
description:
- Call this webhook when a matching object is created
@@ -91,29 +91,29 @@ options:
description:
- Secret key to generate X-Hook-Signature to include in the payload.
required: false
- type: str
+ type: str
conditions:
description:
- A set of conditions which determine whether the webhook will be generated.
required: false
- type: dict
+ type: dict
ssl_verification:
description:
- - Enable ssl verification.
+ - Enable ssl verification.
required: false
type: bool
ca_file_path:
description:
- CA certificate file to use for SSL verification
required: false
- type: str
+ type: str
required: true
"""
EXAMPLES = r"""
- name: "Test NetBox webhook module"
connection: local
- hosts: localhost
+ hosts: localhost
tasks:
- name: Create a webhook
netbox_webhook:
@@ -121,9 +121,9 @@ EXAMPLES = r"""
netbox_token: thisIsMyToken
data:
content_types:
- - dcim.device
+ - dcim.device
name: Example Webhook
- type_create: yes
+ type_create: true
payload_url: https://payload.url/
body_template: !unsafe >-
{{ data }}
@@ -134,11 +134,11 @@ EXAMPLES = r"""
netbox_token: thisIsMyToken
data:
name: Example Webhook
- type_create: yes
- type_delete: yes
+ type_create: true
+ type_delete: true
payload_url: https://payload.url/
body_template: !unsafe >-
- {{ data }}
+ {{ data }}
- name: Delete the webhook
netbox_webhook:
@@ -146,11 +146,11 @@ EXAMPLES = r"""
netbox_token: thisIsMyToken
data:
name: Example Webhook
- type_create: yes
- type_delete: yes
+ type_create: true
+ type_delete: true
payload_url: https://payload.url/
body_template: !unsafe >-
- {{ data }}
+ {{ data }}
state: absent
"""
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_wireless_lan.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_wireless_lan.py
index 52db2c050..794ad152c 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_wireless_lan.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_wireless_lan.py
@@ -13,7 +13,7 @@ module: netbox_wireless_lan
short_description: Creates or removes Wireless LANs from NetBox
description:
- Creates or removes wireless LANs from NetBox
-notes:
+notes:
- This should be ran with connection C(local) and hosts C(localhost)
author:
- Martin Rødvand (@rodvand)
@@ -76,7 +76,7 @@ options:
description:
- The PSK of the Wireless LAN
required: false
- type: str
+ type: str
tags:
description:
- Any tags that the Wireless LAN may need to be associated with
@@ -87,7 +87,7 @@ options:
description:
- must exist in NetBox
required: false
- type: dict
+ type: dict
comments:
description:
- Comments of the wireless LAN
@@ -101,7 +101,7 @@ EXAMPLES = r"""
- name: "Test NetBox module"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create Wireless LAN within NetBox with only required information
netbox_wireless_lan:
@@ -124,11 +124,11 @@ EXAMPLES = r"""
netbox_url: http://netbox.local
netbox_token: thisIsMyToken
data:
- ssid: Wireless Network One
+ ssid: Wireless Network One
description: Cool Wireless Network
auth_type: wpa-enterprise
auth_cipher: aes
- auth_psk: psk123456
+ auth_psk: psk123456
tags:
- tagA
- tagB
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_wireless_lan_group.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_wireless_lan_group.py
index 2779be21d..e15650210 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_wireless_lan_group.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_wireless_lan_group.py
@@ -13,7 +13,7 @@ module: netbox_wireless_lan_group
short_description: Creates or removes Wireless LAN Groups from NetBox
description:
- Creates or removes Wireless LAN Groups from NetBox
-notes:
+notes:
- This should be ran with connection C(local) and hosts C(localhost)
author:
- Martin Rødvand (@rodvand)
@@ -47,7 +47,7 @@ options:
description:
- Description of the Wireless LAN Group
required: false
- type: str
+ type: str
tags:
description:
- Any tags that the Wireless LAN Group may need to be associated with
@@ -58,7 +58,7 @@ options:
description:
- must exist in NetBox
required: false
- type: dict
+ type: dict
required: true
"""
@@ -66,7 +66,7 @@ EXAMPLES = r"""
- name: "Test NetBox module"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create Wireless LAN Group within NetBox with only required information
netbox_wireless_lan_group:
@@ -90,7 +90,7 @@ EXAMPLES = r"""
netbox_token: thisIsMyToken
data:
name: Wireless LAN Group One
- description: Wireless LAN Group description
+ description: Wireless LAN Group description
tags:
- tagA
- tagB
diff --git a/ansible_collections/netbox/netbox/plugins/modules/netbox_wireless_link.py b/ansible_collections/netbox/netbox/plugins/modules/netbox_wireless_link.py
index 11b55a630..6f52917b7 100644
--- a/ansible_collections/netbox/netbox/plugins/modules/netbox_wireless_link.py
+++ b/ansible_collections/netbox/netbox/plugins/modules/netbox_wireless_link.py
@@ -13,7 +13,7 @@ module: netbox_wireless_link
short_description: Creates or removes Wireless links from NetBox
description:
- Creates or removes wireless links from NetBox
-notes:
+notes:
- This should be ran with connection C(local) and hosts C(localhost)
author:
- Martin Rødvand (@rodvand)
@@ -47,7 +47,7 @@ options:
description:
- Description of the wireless link
required: false
- type: str
+ type: str
status:
description:
- The status of the wireless link
@@ -56,7 +56,7 @@ options:
- planned
- decommissioning
required: false
- type: str
+ type: str
auth_type:
description:
- The authentication type of the wireless link
@@ -80,7 +80,7 @@ options:
description:
- The PSK of the wireless link
required: false
- type: str
+ type: str
comments:
description:
- Comments of the wireless link
@@ -97,7 +97,7 @@ options:
description:
- must exist in NetBox
required: false
- type: dict
+ type: dict
required: true
"""
@@ -105,7 +105,7 @@ EXAMPLES = r"""
- name: "Test NetBox module"
connection: local
hosts: localhost
- gather_facts: False
+ gather_facts: false
tasks:
- name: Create wireless link within NetBox with only required information
netbox_wireless_link:
@@ -144,11 +144,11 @@ EXAMPLES = r"""
interface_b:
device: Device Two
name: wireless_link_0
- ssid: Wireless Network One
+ ssid: Wireless Network One
description: Cool Wireless Network
auth_type: wpa-enterprise
auth_cipher: aes
- auth_psk: psk123456
+ auth_psk: psk123456
tags:
- tagA
- tagB
diff --git a/ansible_collections/netbox/netbox/poetry.lock b/ansible_collections/netbox/netbox/poetry.lock
index e1a9ff24f..a2392f3b8 100644
--- a/ansible_collections/netbox/netbox/poetry.lock
+++ b/ansible_collections/netbox/netbox/poetry.lock
@@ -1,4 +1,4 @@
-# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand.
+# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand.
[[package]]
name = "aiofiles"
@@ -13,87 +13,87 @@ files = [
[[package]]
name = "aiohttp"
-version = "3.9.4"
+version = "3.9.5"
description = "Async http client/server framework (asyncio)"
optional = false
python-versions = ">=3.8"
files = [
- {file = "aiohttp-3.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:76d32588ef7e4a3f3adff1956a0ba96faabbdee58f2407c122dd45aa6e34f372"},
- {file = "aiohttp-3.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:56181093c10dbc6ceb8a29dfeea1e815e1dfdc020169203d87fd8d37616f73f9"},
- {file = "aiohttp-3.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7a5b676d3c65e88b3aca41816bf72831898fcd73f0cbb2680e9d88e819d1e4d"},
- {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1df528a85fb404899d4207a8d9934cfd6be626e30e5d3a5544a83dbae6d8a7e"},
- {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f595db1bceabd71c82e92df212dd9525a8a2c6947d39e3c994c4f27d2fe15b11"},
- {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c0b09d76e5a4caac3d27752027fbd43dc987b95f3748fad2b924a03fe8632ad"},
- {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:689eb4356649ec9535b3686200b231876fb4cab4aca54e3bece71d37f50c1d13"},
- {file = "aiohttp-3.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3666cf4182efdb44d73602379a66f5fdfd5da0db5e4520f0ac0dcca644a3497"},
- {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b65b0f8747b013570eea2f75726046fa54fa8e0c5db60f3b98dd5d161052004a"},
- {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1885d2470955f70dfdd33a02e1749613c5a9c5ab855f6db38e0b9389453dce7"},
- {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0593822dcdb9483d41f12041ff7c90d4d1033ec0e880bcfaf102919b715f47f1"},
- {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:47f6eb74e1ecb5e19a78f4a4228aa24df7fbab3b62d4a625d3f41194a08bd54f"},
- {file = "aiohttp-3.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c8b04a3dbd54de6ccb7604242fe3ad67f2f3ca558f2d33fe19d4b08d90701a89"},
- {file = "aiohttp-3.9.4-cp310-cp310-win32.whl", hash = "sha256:8a78dfb198a328bfb38e4308ca8167028920fb747ddcf086ce706fbdd23b2926"},
- {file = "aiohttp-3.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:e78da6b55275987cbc89141a1d8e75f5070e577c482dd48bd9123a76a96f0bbb"},
- {file = "aiohttp-3.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c111b3c69060d2bafc446917534150fd049e7aedd6cbf21ba526a5a97b4402a5"},
- {file = "aiohttp-3.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:efbdd51872cf170093998c87ccdf3cb5993add3559341a8e5708bcb311934c94"},
- {file = "aiohttp-3.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7bfdb41dc6e85d8535b00d73947548a748e9534e8e4fddd2638109ff3fb081df"},
- {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bd9d334412961125e9f68d5b73c1d0ab9ea3f74a58a475e6b119f5293eee7ba"},
- {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:35d78076736f4a668d57ade00c65d30a8ce28719d8a42471b2a06ccd1a2e3063"},
- {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:824dff4f9f4d0f59d0fa3577932ee9a20e09edec8a2f813e1d6b9f89ced8293f"},
- {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52b8b4e06fc15519019e128abedaeb56412b106ab88b3c452188ca47a25c4093"},
- {file = "aiohttp-3.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eae569fb1e7559d4f3919965617bb39f9e753967fae55ce13454bec2d1c54f09"},
- {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:69b97aa5792428f321f72aeb2f118e56893371f27e0b7d05750bcad06fc42ca1"},
- {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4d79aad0ad4b980663316f26d9a492e8fab2af77c69c0f33780a56843ad2f89e"},
- {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:d6577140cd7db19e430661e4b2653680194ea8c22c994bc65b7a19d8ec834403"},
- {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:9860d455847cd98eb67897f5957b7cd69fbcb436dd3f06099230f16a66e66f79"},
- {file = "aiohttp-3.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:69ff36d3f8f5652994e08bd22f093e11cfd0444cea310f92e01b45a4e46b624e"},
- {file = "aiohttp-3.9.4-cp311-cp311-win32.whl", hash = "sha256:e27d3b5ed2c2013bce66ad67ee57cbf614288bda8cdf426c8d8fe548316f1b5f"},
- {file = "aiohttp-3.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d6a67e26daa686a6fbdb600a9af8619c80a332556245fa8e86c747d226ab1a1e"},
- {file = "aiohttp-3.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:c5ff8ff44825736a4065d8544b43b43ee4c6dd1530f3a08e6c0578a813b0aa35"},
- {file = "aiohttp-3.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d12a244627eba4e9dc52cbf924edef905ddd6cafc6513849b4876076a6f38b0e"},
- {file = "aiohttp-3.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:dcad56c8d8348e7e468899d2fb3b309b9bc59d94e6db08710555f7436156097f"},
- {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f7e69a7fd4b5ce419238388e55abd220336bd32212c673ceabc57ccf3d05b55"},
- {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4870cb049f10d7680c239b55428916d84158798eb8f353e74fa2c98980dcc0b"},
- {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2feaf1b7031ede1bc0880cec4b0776fd347259a723d625357bb4b82f62687b"},
- {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:939393e8c3f0a5bcd33ef7ace67680c318dc2ae406f15e381c0054dd658397de"},
- {file = "aiohttp-3.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d2334e387b2adcc944680bebcf412743f2caf4eeebd550f67249c1c3696be04"},
- {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e0198ea897680e480845ec0ffc5a14e8b694e25b3f104f63676d55bf76a82f1a"},
- {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e40d2cd22914d67c84824045861a5bb0fb46586b15dfe4f046c7495bf08306b2"},
- {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:aba80e77c227f4234aa34a5ff2b6ff30c5d6a827a91d22ff6b999de9175d71bd"},
- {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:fb68dc73bc8ac322d2e392a59a9e396c4f35cb6fdbdd749e139d1d6c985f2527"},
- {file = "aiohttp-3.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f3460a92638dce7e47062cf088d6e7663adb135e936cb117be88d5e6c48c9d53"},
- {file = "aiohttp-3.9.4-cp312-cp312-win32.whl", hash = "sha256:32dc814ddbb254f6170bca198fe307920f6c1308a5492f049f7f63554b88ef36"},
- {file = "aiohttp-3.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:63f41a909d182d2b78fe3abef557fcc14da50c7852f70ae3be60e83ff64edba5"},
- {file = "aiohttp-3.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c3770365675f6be220032f6609a8fbad994d6dcf3ef7dbcf295c7ee70884c9af"},
- {file = "aiohttp-3.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:305edae1dea368ce09bcb858cf5a63a064f3bff4767dec6fa60a0cc0e805a1d3"},
- {file = "aiohttp-3.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6f121900131d116e4a93b55ab0d12ad72573f967b100e49086e496a9b24523ea"},
- {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b71e614c1ae35c3d62a293b19eface83d5e4d194e3eb2fabb10059d33e6e8cbf"},
- {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:419f009fa4cfde4d16a7fc070d64f36d70a8d35a90d71aa27670bba2be4fd039"},
- {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7b39476ee69cfe64061fd77a73bf692c40021f8547cda617a3466530ef63f947"},
- {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b33f34c9c7decdb2ab99c74be6443942b730b56d9c5ee48fb7df2c86492f293c"},
- {file = "aiohttp-3.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c78700130ce2dcebb1a8103202ae795be2fa8c9351d0dd22338fe3dac74847d9"},
- {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:268ba22d917655d1259af2d5659072b7dc11b4e1dc2cb9662fdd867d75afc6a4"},
- {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:17e7c051f53a0d2ebf33013a9cbf020bb4e098c4bc5bce6f7b0c962108d97eab"},
- {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:7be99f4abb008cb38e144f85f515598f4c2c8932bf11b65add0ff59c9c876d99"},
- {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:d58a54d6ff08d2547656356eea8572b224e6f9bbc0cf55fa9966bcaac4ddfb10"},
- {file = "aiohttp-3.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7673a76772bda15d0d10d1aa881b7911d0580c980dbd16e59d7ba1422b2d83cd"},
- {file = "aiohttp-3.9.4-cp38-cp38-win32.whl", hash = "sha256:e4370dda04dc8951012f30e1ce7956a0a226ac0714a7b6c389fb2f43f22a250e"},
- {file = "aiohttp-3.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:eb30c4510a691bb87081192a394fb661860e75ca3896c01c6d186febe7c88530"},
- {file = "aiohttp-3.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:84e90494db7df3be5e056f91412f9fa9e611fbe8ce4aaef70647297f5943b276"},
- {file = "aiohttp-3.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7d4845f8501ab28ebfdbeab980a50a273b415cf69e96e4e674d43d86a464df9d"},
- {file = "aiohttp-3.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:69046cd9a2a17245c4ce3c1f1a4ff8c70c7701ef222fce3d1d8435f09042bba1"},
- {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b73a06bafc8dcc508420db43b4dd5850e41e69de99009d0351c4f3007960019"},
- {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:418bb0038dfafeac923823c2e63226179976c76f981a2aaad0ad5d51f2229bca"},
- {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:71a8f241456b6c2668374d5d28398f8e8cdae4cce568aaea54e0f39359cd928d"},
- {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:935c369bf8acc2dc26f6eeb5222768aa7c62917c3554f7215f2ead7386b33748"},
- {file = "aiohttp-3.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74e4e48c8752d14ecfb36d2ebb3d76d614320570e14de0a3aa7a726ff150a03c"},
- {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:916b0417aeddf2c8c61291238ce25286f391a6acb6f28005dd9ce282bd6311b6"},
- {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9b6787b6d0b3518b2ee4cbeadd24a507756ee703adbac1ab6dc7c4434b8c572a"},
- {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:221204dbda5ef350e8db6287937621cf75e85778b296c9c52260b522231940ed"},
- {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:10afd99b8251022ddf81eaed1d90f5a988e349ee7d779eb429fb07b670751e8c"},
- {file = "aiohttp-3.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2506d9f7a9b91033201be9ffe7d89c6a54150b0578803cce5cb84a943d075bc3"},
- {file = "aiohttp-3.9.4-cp39-cp39-win32.whl", hash = "sha256:e571fdd9efd65e86c6af2f332e0e95dad259bfe6beb5d15b3c3eca3a6eb5d87b"},
- {file = "aiohttp-3.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:7d29dd5319d20aa3b7749719ac9685fbd926f71ac8c77b2477272725f882072d"},
- {file = "aiohttp-3.9.4.tar.gz", hash = "sha256:6ff71ede6d9a5a58cfb7b6fffc83ab5d4a63138276c771ac91ceaaddf5459644"},
+ {file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fcde4c397f673fdec23e6b05ebf8d4751314fa7c24f93334bf1f1364c1c69ac7"},
+ {file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d6b3f1fabe465e819aed2c421a6743d8debbde79b6a8600739300630a01bf2c"},
+ {file = "aiohttp-3.9.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6ae79c1bc12c34082d92bf9422764f799aee4746fd7a392db46b7fd357d4a17a"},
+ {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d3ebb9e1316ec74277d19c5f482f98cc65a73ccd5430540d6d11682cd857430"},
+ {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84dabd95154f43a2ea80deffec9cb44d2e301e38a0c9d331cc4aa0166fe28ae3"},
+ {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c8a02fbeca6f63cb1f0475c799679057fc9268b77075ab7cf3f1c600e81dd46b"},
+ {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c26959ca7b75ff768e2776d8055bf9582a6267e24556bb7f7bd29e677932be72"},
+ {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:714d4e5231fed4ba2762ed489b4aec07b2b9953cf4ee31e9871caac895a839c0"},
+ {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7a6a8354f1b62e15d48e04350f13e726fa08b62c3d7b8401c0a1314f02e3558"},
+ {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c413016880e03e69d166efb5a1a95d40f83d5a3a648d16486592c49ffb76d0db"},
+ {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ff84aeb864e0fac81f676be9f4685f0527b660f1efdc40dcede3c251ef1e867f"},
+ {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ad7f2919d7dac062f24d6f5fe95d401597fbb015a25771f85e692d043c9d7832"},
+ {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:702e2c7c187c1a498a4e2b03155d52658fdd6fda882d3d7fbb891a5cf108bb10"},
+ {file = "aiohttp-3.9.5-cp310-cp310-win32.whl", hash = "sha256:67c3119f5ddc7261d47163ed86d760ddf0e625cd6246b4ed852e82159617b5fb"},
+ {file = "aiohttp-3.9.5-cp310-cp310-win_amd64.whl", hash = "sha256:471f0ef53ccedec9995287f02caf0c068732f026455f07db3f01a46e49d76bbb"},
+ {file = "aiohttp-3.9.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e0ae53e33ee7476dd3d1132f932eeb39bf6125083820049d06edcdca4381f342"},
+ {file = "aiohttp-3.9.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c088c4d70d21f8ca5c0b8b5403fe84a7bc8e024161febdd4ef04575ef35d474d"},
+ {file = "aiohttp-3.9.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:639d0042b7670222f33b0028de6b4e2fad6451462ce7df2af8aee37dcac55424"},
+ {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f26383adb94da5e7fb388d441bf09c61e5e35f455a3217bfd790c6b6bc64b2ee"},
+ {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:66331d00fb28dc90aa606d9a54304af76b335ae204d1836f65797d6fe27f1ca2"},
+ {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ff550491f5492ab5ed3533e76b8567f4b37bd2995e780a1f46bca2024223233"},
+ {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f22eb3a6c1080d862befa0a89c380b4dafce29dc6cd56083f630073d102eb595"},
+ {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a81b1143d42b66ffc40a441379387076243ef7b51019204fd3ec36b9f69e77d6"},
+ {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f64fd07515dad67f24b6ea4a66ae2876c01031de91c93075b8093f07c0a2d93d"},
+ {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:93e22add827447d2e26d67c9ac0161756007f152fdc5210277d00a85f6c92323"},
+ {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:55b39c8684a46e56ef8c8d24faf02de4a2b2ac60d26cee93bc595651ff545de9"},
+ {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4715a9b778f4293b9f8ae7a0a7cef9829f02ff8d6277a39d7f40565c737d3771"},
+ {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:afc52b8d969eff14e069a710057d15ab9ac17cd4b6753042c407dcea0e40bf75"},
+ {file = "aiohttp-3.9.5-cp311-cp311-win32.whl", hash = "sha256:b3df71da99c98534be076196791adca8819761f0bf6e08e07fd7da25127150d6"},
+ {file = "aiohttp-3.9.5-cp311-cp311-win_amd64.whl", hash = "sha256:88e311d98cc0bf45b62fc46c66753a83445f5ab20038bcc1b8a1cc05666f428a"},
+ {file = "aiohttp-3.9.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:c7a4b7a6cf5b6eb11e109a9755fd4fda7d57395f8c575e166d363b9fc3ec4678"},
+ {file = "aiohttp-3.9.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:0a158704edf0abcac8ac371fbb54044f3270bdbc93e254a82b6c82be1ef08f3c"},
+ {file = "aiohttp-3.9.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d153f652a687a8e95ad367a86a61e8d53d528b0530ef382ec5aaf533140ed00f"},
+ {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82a6a97d9771cb48ae16979c3a3a9a18b600a8505b1115cfe354dfb2054468b4"},
+ {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60cdbd56f4cad9f69c35eaac0fbbdf1f77b0ff9456cebd4902f3dd1cf096464c"},
+ {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8676e8fd73141ded15ea586de0b7cda1542960a7b9ad89b2b06428e97125d4fa"},
+ {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da00da442a0e31f1c69d26d224e1efd3a1ca5bcbf210978a2ca7426dfcae9f58"},
+ {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18f634d540dd099c262e9f887c8bbacc959847cfe5da7a0e2e1cf3f14dbf2daf"},
+ {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:320e8618eda64e19d11bdb3bd04ccc0a816c17eaecb7e4945d01deee2a22f95f"},
+ {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:2faa61a904b83142747fc6a6d7ad8fccff898c849123030f8e75d5d967fd4a81"},
+ {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:8c64a6dc3fe5db7b1b4d2b5cb84c4f677768bdc340611eca673afb7cf416ef5a"},
+ {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:393c7aba2b55559ef7ab791c94b44f7482a07bf7640d17b341b79081f5e5cd1a"},
+ {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c671dc117c2c21a1ca10c116cfcd6e3e44da7fcde37bf83b2be485ab377b25da"},
+ {file = "aiohttp-3.9.5-cp312-cp312-win32.whl", hash = "sha256:5a7ee16aab26e76add4afc45e8f8206c95d1d75540f1039b84a03c3b3800dd59"},
+ {file = "aiohttp-3.9.5-cp312-cp312-win_amd64.whl", hash = "sha256:5ca51eadbd67045396bc92a4345d1790b7301c14d1848feaac1d6a6c9289e888"},
+ {file = "aiohttp-3.9.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:694d828b5c41255e54bc2dddb51a9f5150b4eefa9886e38b52605a05d96566e8"},
+ {file = "aiohttp-3.9.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0605cc2c0088fcaae79f01c913a38611ad09ba68ff482402d3410bf59039bfb8"},
+ {file = "aiohttp-3.9.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4558e5012ee03d2638c681e156461d37b7a113fe13970d438d95d10173d25f78"},
+ {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dbc053ac75ccc63dc3a3cc547b98c7258ec35a215a92bd9f983e0aac95d3d5b"},
+ {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4109adee842b90671f1b689901b948f347325045c15f46b39797ae1bf17019de"},
+ {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6ea1a5b409a85477fd8e5ee6ad8f0e40bf2844c270955e09360418cfd09abac"},
+ {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3c2890ca8c59ee683fd09adf32321a40fe1cf164e3387799efb2acebf090c11"},
+ {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3916c8692dbd9d55c523374a3b8213e628424d19116ac4308e434dbf6d95bbdd"},
+ {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8d1964eb7617907c792ca00b341b5ec3e01ae8c280825deadbbd678447b127e1"},
+ {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d5ab8e1f6bee051a4bf6195e38a5c13e5e161cb7bad83d8854524798bd9fcd6e"},
+ {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:52c27110f3862a1afbcb2af4281fc9fdc40327fa286c4625dfee247c3ba90156"},
+ {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:7f64cbd44443e80094309875d4f9c71d0401e966d191c3d469cde4642bc2e031"},
+ {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8b4f72fbb66279624bfe83fd5eb6aea0022dad8eec62b71e7bf63ee1caadeafe"},
+ {file = "aiohttp-3.9.5-cp38-cp38-win32.whl", hash = "sha256:6380c039ec52866c06d69b5c7aad5478b24ed11696f0e72f6b807cfb261453da"},
+ {file = "aiohttp-3.9.5-cp38-cp38-win_amd64.whl", hash = "sha256:da22dab31d7180f8c3ac7c7635f3bcd53808f374f6aa333fe0b0b9e14b01f91a"},
+ {file = "aiohttp-3.9.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1732102949ff6087589408d76cd6dea656b93c896b011ecafff418c9661dc4ed"},
+ {file = "aiohttp-3.9.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c6021d296318cb6f9414b48e6a439a7f5d1f665464da507e8ff640848ee2a58a"},
+ {file = "aiohttp-3.9.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:239f975589a944eeb1bad26b8b140a59a3a320067fb3cd10b75c3092405a1372"},
+ {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b7b30258348082826d274504fbc7c849959f1989d86c29bc355107accec6cfb"},
+ {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd2adf5c87ff6d8b277814a28a535b59e20bfea40a101db6b3bdca7e9926bc24"},
+ {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e9a3d838441bebcf5cf442700e3963f58b5c33f015341f9ea86dcd7d503c07e2"},
+ {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e3a1ae66e3d0c17cf65c08968a5ee3180c5a95920ec2731f53343fac9bad106"},
+ {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9c69e77370cce2d6df5d12b4e12bdcca60c47ba13d1cbbc8645dd005a20b738b"},
+ {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf56238f4bbf49dab8c2dc2e6b1b68502b1e88d335bea59b3f5b9f4c001475"},
+ {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d1469f228cd9ffddd396d9948b8c9cd8022b6d1bf1e40c6f25b0fb90b4f893ed"},
+ {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:45731330e754f5811c314901cebdf19dd776a44b31927fa4b4dbecab9e457b0c"},
+ {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:3fcb4046d2904378e3aeea1df51f697b0467f2aac55d232c87ba162709478c46"},
+ {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8cf142aa6c1a751fcb364158fd710b8a9be874b81889c2bd13aa8893197455e2"},
+ {file = "aiohttp-3.9.5-cp39-cp39-win32.whl", hash = "sha256:7b179eea70833c8dee51ec42f3b4097bd6370892fa93f510f76762105568cf09"},
+ {file = "aiohttp-3.9.5-cp39-cp39-win_amd64.whl", hash = "sha256:38d80498e2e169bc61418ff36170e0aad0cd268da8b38a17c4cf29d254a8b3f1"},
+ {file = "aiohttp-3.9.5.tar.gz", hash = "sha256:edea7d15772ceeb29db4aff55e482d4bcfb6ae160ce144f2682de02f6d693551"},
]
[package.dependencies]
@@ -122,6 +122,28 @@ files = [
frozenlist = ">=1.1.0"
[[package]]
+name = "alabaster"
+version = "0.7.16"
+description = "A light, configurable Sphinx theme"
+optional = false
+python-versions = ">=3.9"
+files = [
+ {file = "alabaster-0.7.16-py3-none-any.whl", hash = "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92"},
+ {file = "alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65"},
+]
+
+[[package]]
+name = "annotated-types"
+version = "0.6.0"
+description = "Reusable constraint types to use with typing.Annotated"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"},
+ {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"},
+]
+
+[[package]]
name = "ansible-core"
version = "2.15.9"
description = "Radically simple IT automation"
@@ -134,52 +156,69 @@ files = [
[package.dependencies]
cryptography = "*"
-importlib-resources = {version = ">=5.0,<5.1", markers = "python_version < \"3.10\""}
jinja2 = ">=3.0.0"
packaging = "*"
PyYAML = ">=5.1"
resolvelib = ">=0.5.3,<1.1.0"
[[package]]
+name = "ansible-pygments"
+version = "0.1.1"
+description = "Tools for building the Ansible Distribution"
+optional = false
+python-versions = ">=3.6.0,<4.0.0"
+files = [
+ {file = "ansible-pygments-0.1.1.tar.gz", hash = "sha256:0d0a23cc562f94f4b464f931059ad1688635aac9642962bc68ae3acdb6efbcd0"},
+ {file = "ansible_pygments-0.1.1-py3-none-any.whl", hash = "sha256:85aa7412a46c83efcfe460c8016da289600c171d2edfa0f474e0dc30a398b002"},
+]
+
+[package.dependencies]
+pygments = ">=2.4.0"
+
+[[package]]
name = "antsibull"
-version = "0.58.0"
+version = "0.62.0"
description = "Tools for building the Ansible Distribution"
optional = false
python-versions = ">=3.9"
files = [
- {file = "antsibull-0.58.0-py3-none-any.whl", hash = "sha256:f84c812243c67328b77835e0a22fde1a784edf2e28c5beabee95a51c39160ca3"},
- {file = "antsibull-0.58.0.tar.gz", hash = "sha256:a9c46144dc811cd6ca066bbeab4fea15b226fbed81dad370ef46ae62cc38cdfb"},
+ {file = "antsibull-0.62.0-py3-none-any.whl", hash = "sha256:23f5ccdbc2fb15bc5a60b129b5a9026ae2f3578f50673cd2ea32e80a1c20b891"},
+ {file = "antsibull-0.62.0.tar.gz", hash = "sha256:c1f7b34d5f5266b97c88a73d7c2b14afb21666eefb0d21705c7b0e97b4c06382"},
]
[package.dependencies]
aiofiles = "*"
aiohttp = ">=3.0.0"
-antsibull-changelog = ">=0.14.0"
-antsibull-core = ">=2.0.0,<3.0.0"
+antsibull-changelog = ">=0.24.0"
+antsibull-core = ">=2.0.0,<4.0.0"
asyncio-pool = "*"
build = "*"
jinja2 = "*"
packaging = ">=20.0"
+pydantic = "<3"
semantic-version = "*"
twiggy = "*"
+typing-extensions = "*"
[package.extras]
+all = ["pyperclip"]
+clipboard = ["pyperclip"]
codeqa = ["flake8 (>=3.8.0)", "pylint", "reuse"]
coverage = ["coverage[toml]"]
-dev = ["antsibull[codeqa]", "antsibull[coverage]", "antsibull[test]", "antsibull[typing]", "nox"]
-formatters = ["black", "isort"]
+dev = ["asynctest", "coverage[toml]", "cryptography", "flake8 (>=3.8.0)", "mypy", "nox", "pylint", "pyperclip", "pyre-check (>=0.9.15)", "pytest", "pytest-asyncio (>=0.12)", "pytest-cov", "pytest-error-for-skips", "reuse", "types-aiofiles", "types-docutils", "types-pyyaml", "types-setuptools"]
+formatters = ["black (>=24)", "isort"]
test = ["asynctest", "cryptography", "pytest", "pytest-asyncio (>=0.12)", "pytest-cov", "pytest-error-for-skips"]
typing = ["mypy", "pyre-check (>=0.9.15)", "types-aiofiles", "types-docutils", "types-pyyaml", "types-setuptools"]
[[package]]
name = "antsibull-changelog"
-version = "0.23.0"
-description = "Changelog tool for Ansible-base and Ansible collections"
+version = "0.26.0"
+description = "Changelog tool for Ansible-core and Ansible collections"
optional = false
python-versions = ">=3.9.0"
files = [
- {file = "antsibull_changelog-0.23.0-py3-none-any.whl", hash = "sha256:e9c3425fe6ef8e495aec19705be4af826612f921bdc8dd6dc54d15f70532065f"},
- {file = "antsibull_changelog-0.23.0.tar.gz", hash = "sha256:521985407e1aa2aef1dbfb2b87275cd6f03ea9a96c0c704e9f9cb04141f90e0b"},
+ {file = "antsibull_changelog-0.26.0-py3-none-any.whl", hash = "sha256:0a6aa5327790e89a686872fd452a9c31be288a3cb2deb9cc23eb3fc5ea7f0a19"},
+ {file = "antsibull_changelog-0.26.0.tar.gz", hash = "sha256:8060c438d9fb5a1025a1e98d4de0ce8d897be6e0c21014216151b93f4e4891ff"},
]
[package.dependencies]
@@ -193,20 +232,20 @@ semantic-version = "*"
codeqa = ["flake8 (>=3.8.0)", "pylint", "reuse"]
coverage = ["coverage[toml]"]
dev = ["antsibull-changelog[codeqa]", "antsibull-changelog[coverage]", "antsibull-changelog[formatters]", "antsibull-changelog[test]", "antsibull-changelog[typing]", "nox"]
-formatters = ["black", "isort"]
+formatters = ["black (>=24)", "isort"]
test = ["pytest", "pytest-cov", "pytest-error-for-skips"]
toml = ["tomli"]
typing = ["mypy", "pyre-check (>=0.9.17)", "types-docutils", "types-pyyaml", "types-toml"]
[[package]]
name = "antsibull-core"
-version = "2.1.0"
+version = "3.0.1"
description = "Tools for building the Ansible Distribution"
optional = false
python-versions = ">=3.9"
files = [
- {file = "antsibull_core-2.1.0-py3-none-any.whl", hash = "sha256:acf403ee1e02b303a701c913869bd0be2e2bd4d9212ed380ad05bd5cbddea730"},
- {file = "antsibull_core-2.1.0.tar.gz", hash = "sha256:c16c601802d35419120a7da33f4d562768a5ac3fa9c5ed4ee7d7b68c71d8aeee"},
+ {file = "antsibull_core-3.0.1-py3-none-any.whl", hash = "sha256:653f44b010c85b6bcd37aacde64b35e56adec8f62119d2764aa56add7299c2f8"},
+ {file = "antsibull_core-3.0.1.tar.gz", hash = "sha256:d7fddfb539757849725f8ae88ff049cef360c088a67251de0d1d8cb041243a9c"},
]
[package.dependencies]
@@ -215,29 +254,83 @@ aiohttp = ">=3.0.0"
build = "*"
packaging = ">=20.0"
perky = "*"
-pydantic = ">=1.0.0,<2.0.0"
+pydantic = ">=2.0,<3.0"
pyyaml = "*"
semantic-version = "*"
-sh = ">=1.0.0,<2.0.0"
twiggy = ">=0.5.0"
[package.extras]
codeqa = ["antsibull-changelog", "flake8 (>=6.0.0)", "pylint (>=2.15.7)", "reuse"]
coverage = ["coverage[toml]"]
-dev = ["antsibull-core[codeqa]", "antsibull-core[coverage]", "antsibull-core[formatters]", "antsibull-core[test]", "antsibull-core[typing]", "nox"]
-formatters = ["black", "isort"]
+dev = ["antsibull-changelog", "asynctest", "black (>=24)", "coverage[toml]", "cryptography", "flake8 (>=6.0.0)", "isort", "mypy", "nox", "pylint (>=2.15.7)", "pyre-check (>=0.9.17)", "pytest", "pytest-asyncio (>=0.20)", "pytest-cov", "pytest-error-for-skips", "reuse", "types-aiofiles", "types-pyyaml", "typing-extensions"]
+formatters = ["black (>=24)", "isort"]
test = ["asynctest", "cryptography", "pytest", "pytest-asyncio (>=0.20)", "pytest-cov", "pytest-error-for-skips"]
-typing = ["mypy", "pyre-check (>=0.9.17)", "types-aiofiles", "types-pyyaml"]
+typing = ["mypy", "pyre-check (>=0.9.17)", "types-aiofiles", "types-pyyaml", "typing-extensions"]
+
+[[package]]
+name = "antsibull-docs"
+version = "2.11.0"
+description = "Tools for building Ansible documentation"
+optional = false
+python-versions = ">=3.9"
+files = [
+ {file = "antsibull_docs-2.11.0-py3-none-any.whl", hash = "sha256:110cbe8c22c8728f341e4cd914e3e200a9d37ed04412a7c059447a63e44166df"},
+ {file = "antsibull_docs-2.11.0.tar.gz", hash = "sha256:2eb252e05f33024210ac564435cb3ccd482285d21d42ac48241561db4c2240b3"},
+]
+
+[package.dependencies]
+aiohttp = ">=3.0.0"
+ansible-pygments = "*"
+antsibull-changelog = ">=0.24.0"
+antsibull-core = ">=2.1.0,<4.0.0"
+antsibull-docs-parser = ">=1.0.0,<2.0.0"
+asyncio-pool = "*"
+docutils = "*"
+jinja2 = ">=3.0"
+packaging = ">=20.0"
+pydantic = ">=1.0.0,<3.0.0"
+pyyaml = "*"
+rstcheck = ">=3.0.0,<7.0.0"
+semantic-version = "*"
+sphinx = "*"
+twiggy = "*"
+
+[package.extras]
+codeqa = ["flake8 (>=3.8.0)", "pylint (>=2.17.2)", "reuse"]
+coverage = ["coverage[toml]"]
+dev = ["ansible-core (>=2.14.0)", "asynctest", "black (>=24)", "cryptography", "flake8 (>=3.8.0)", "isort", "mypy", "nox", "pylint (>=2.17.2)", "pyre-check (>=0.9.17)", "pytest", "pytest-asyncio (>=0.12)", "pytest-cov", "pytest-error-for-skips", "reuse", "types-aiofiles", "types-docutils", "types-pyyaml"]
+formatters = ["black (>=24)", "isort"]
+test = ["ansible-core (>=2.14.0)", "asynctest", "cryptography", "pytest", "pytest-asyncio (>=0.12)", "pytest-cov", "pytest-error-for-skips"]
+typing = ["mypy", "pyre-check (>=0.9.17)", "types-aiofiles", "types-docutils", "types-pyyaml"]
+
+[[package]]
+name = "antsibull-docs-parser"
+version = "1.0.1"
+description = "Python library for processing Ansible documentation markup"
+optional = false
+python-versions = ">=3.6.1"
+files = [
+ {file = "antsibull_docs_parser-1.0.1-py3-none-any.whl", hash = "sha256:2c92e60315ee8635537242226cfdb6c43d8036fe6d771f7c52fd037667ff0b4f"},
+ {file = "antsibull_docs_parser-1.0.1.tar.gz", hash = "sha256:38c49a9cc9362c70c3024ea6ad1bd44cad8006eb80086ad2f59bd2ead7b16bb3"},
+]
+
+[package.extras]
+codeqa = ["antsibull-changelog", "flake8", "pylint", "reuse"]
+coverage = ["coverage[toml]"]
+dev = ["antsibull-docs-parser[codeqa]", "antsibull-docs-parser[coverage]", "antsibull-docs-parser[formatters]", "antsibull-docs-parser[test]", "antsibull-docs-parser[typing]", "nox"]
+formatters = ["black", "isort"]
+test = ["pytest", "pytest-cov", "pytest-error-for-skips", "pyyaml"]
+typing = ["mypy", "pyre-check (>=0.9.17)"]
[[package]]
name = "astroid"
-version = "3.0.1"
+version = "3.1.0"
description = "An abstract syntax tree for Python with inference support."
optional = false
python-versions = ">=3.8.0"
files = [
- {file = "astroid-3.0.1-py3-none-any.whl", hash = "sha256:7d5895c9825e18079c5aeac0572bc2e4c83205c95d416e0b4fee8bc361d2d9ca"},
- {file = "astroid-3.0.1.tar.gz", hash = "sha256:86b0bb7d7da0be1a7c4aedb7974e391b32d4ed89e33de6ed6902b4b15c97577e"},
+ {file = "astroid-3.1.0-py3-none-any.whl", hash = "sha256:951798f922990137ac090c53af473db7ab4e70c770e6d7fae0cec59f74411819"},
+ {file = "astroid-3.1.0.tar.gz", hash = "sha256:ac248253bfa4bd924a0de213707e7ebeeb3138abeb48d798784ead1e56d419d4"},
]
[package.dependencies]
@@ -267,51 +360,66 @@ files = [
[[package]]
name = "attrs"
-version = "23.1.0"
+version = "23.2.0"
description = "Classes Without Boilerplate"
optional = false
python-versions = ">=3.7"
files = [
- {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"},
- {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"},
+ {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"},
+ {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"},
]
[package.extras]
cov = ["attrs[tests]", "coverage[toml] (>=5.3)"]
-dev = ["attrs[docs,tests]", "pre-commit"]
+dev = ["attrs[tests]", "pre-commit"]
docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"]
tests = ["attrs[tests-no-zope]", "zope-interface"]
-tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
+tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"]
+tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"]
+
+[[package]]
+name = "babel"
+version = "2.15.0"
+description = "Internationalization utilities"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "Babel-2.15.0-py3-none-any.whl", hash = "sha256:08706bdad8d0a3413266ab61bd6c34d0c28d6e1e7badf40a2cebe67644e2e1fb"},
+ {file = "babel-2.15.0.tar.gz", hash = "sha256:8daf0e265d05768bc6c7a314cf1321e9a123afc328cc635c18622a2f30a04413"},
+]
+
+[package.extras]
+dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"]
[[package]]
name = "black"
-version = "24.3.0"
+version = "24.4.2"
description = "The uncompromising code formatter."
optional = false
python-versions = ">=3.8"
files = [
- {file = "black-24.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7d5e026f8da0322b5662fa7a8e752b3fa2dac1c1cbc213c3d7ff9bdd0ab12395"},
- {file = "black-24.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9f50ea1132e2189d8dff0115ab75b65590a3e97de1e143795adb4ce317934995"},
- {file = "black-24.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2af80566f43c85f5797365077fb64a393861a3730bd110971ab7a0c94e873e7"},
- {file = "black-24.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:4be5bb28e090456adfc1255e03967fb67ca846a03be7aadf6249096100ee32d0"},
- {file = "black-24.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4f1373a7808a8f135b774039f61d59e4be7eb56b2513d3d2f02a8b9365b8a8a9"},
- {file = "black-24.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:aadf7a02d947936ee418777e0247ea114f78aff0d0959461057cae8a04f20597"},
- {file = "black-24.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c02e4ea2ae09d16314d30912a58ada9a5c4fdfedf9512d23326128ac08ac3d"},
- {file = "black-24.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:bf21b7b230718a5f08bd32d5e4f1db7fc8788345c8aea1d155fc17852b3410f5"},
- {file = "black-24.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:2818cf72dfd5d289e48f37ccfa08b460bf469e67fb7c4abb07edc2e9f16fb63f"},
- {file = "black-24.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4acf672def7eb1725f41f38bf6bf425c8237248bb0804faa3965c036f7672d11"},
- {file = "black-24.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7ed6668cbbfcd231fa0dc1b137d3e40c04c7f786e626b405c62bcd5db5857e4"},
- {file = "black-24.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:56f52cfbd3dabe2798d76dbdd299faa046a901041faf2cf33288bc4e6dae57b5"},
- {file = "black-24.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:79dcf34b33e38ed1b17434693763301d7ccbd1c5860674a8f871bd15139e7837"},
- {file = "black-24.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e19cb1c6365fd6dc38a6eae2dcb691d7d83935c10215aef8e6c38edee3f77abd"},
- {file = "black-24.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65b76c275e4c1c5ce6e9870911384bff5ca31ab63d19c76811cb1fb162678213"},
- {file = "black-24.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:b5991d523eee14756f3c8d5df5231550ae8993e2286b8014e2fdea7156ed0959"},
- {file = "black-24.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c45f8dff244b3c431b36e3224b6be4a127c6aca780853574c00faf99258041eb"},
- {file = "black-24.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6905238a754ceb7788a73f02b45637d820b2f5478b20fec82ea865e4f5d4d9f7"},
- {file = "black-24.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7de8d330763c66663661a1ffd432274a2f92f07feeddd89ffd085b5744f85e7"},
- {file = "black-24.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:7bb041dca0d784697af4646d3b62ba4a6b028276ae878e53f6b4f74ddd6db99f"},
- {file = "black-24.3.0-py3-none-any.whl", hash = "sha256:41622020d7120e01d377f74249e677039d20e6344ff5851de8a10f11f513bf93"},
- {file = "black-24.3.0.tar.gz", hash = "sha256:a0c9c4a0771afc6919578cec71ce82a3e31e054904e7197deacbc9382671c41f"},
+ {file = "black-24.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dd1b5a14e417189db4c7b64a6540f31730713d173f0b63e55fabd52d61d8fdce"},
+ {file = "black-24.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e537d281831ad0e71007dcdcbe50a71470b978c453fa41ce77186bbe0ed6021"},
+ {file = "black-24.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaea3008c281f1038edb473c1aa8ed8143a5535ff18f978a318f10302b254063"},
+ {file = "black-24.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:7768a0dbf16a39aa5e9a3ded568bb545c8c2727396d063bbaf847df05b08cd96"},
+ {file = "black-24.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:257d724c2c9b1660f353b36c802ccece186a30accc7742c176d29c146df6e474"},
+ {file = "black-24.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bdde6f877a18f24844e381d45e9947a49e97933573ac9d4345399be37621e26c"},
+ {file = "black-24.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e151054aa00bad1f4e1f04919542885f89f5f7d086b8a59e5000e6c616896ffb"},
+ {file = "black-24.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:7e122b1c4fb252fd85df3ca93578732b4749d9be076593076ef4d07a0233c3e1"},
+ {file = "black-24.4.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:accf49e151c8ed2c0cdc528691838afd217c50412534e876a19270fea1e28e2d"},
+ {file = "black-24.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:88c57dc656038f1ab9f92b3eb5335ee9b021412feaa46330d5eba4e51fe49b04"},
+ {file = "black-24.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be8bef99eb46d5021bf053114442914baeb3649a89dc5f3a555c88737e5e98fc"},
+ {file = "black-24.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:415e686e87dbbe6f4cd5ef0fbf764af7b89f9057b97c908742b6008cc554b9c0"},
+ {file = "black-24.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bf10f7310db693bb62692609b397e8d67257c55f949abde4c67f9cc574492cc7"},
+ {file = "black-24.4.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:98e123f1d5cfd42f886624d84464f7756f60ff6eab89ae845210631714f6db94"},
+ {file = "black-24.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48a85f2cb5e6799a9ef05347b476cce6c182d6c71ee36925a6c194d074336ef8"},
+ {file = "black-24.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:b1530ae42e9d6d5b670a34db49a94115a64596bc77710b1d05e9801e62ca0a7c"},
+ {file = "black-24.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:37aae07b029fa0174d39daf02748b379399b909652a806e5708199bd93899da1"},
+ {file = "black-24.4.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:da33a1a5e49c4122ccdfd56cd021ff1ebc4a1ec4e2d01594fef9b6f267a9e741"},
+ {file = "black-24.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef703f83fc32e131e9bcc0a5094cfe85599e7109f896fe8bc96cc402f3eb4b6e"},
+ {file = "black-24.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:b9176b9832e84308818a99a561e90aa479e73c523b3f77afd07913380ae2eab7"},
+ {file = "black-24.4.2-py3-none-any.whl", hash = "sha256:d36ed1124bb81b32f8614555b34cc4259c3fbc7eec17870e8ff8ded335b58d8c"},
+ {file = "black-24.4.2.tar.gz", hash = "sha256:c872b53057f000085da66a19c55d68f6f8ddcac2642392ad3a355878406fbd4d"},
]
[package.dependencies]
@@ -331,37 +439,38 @@ uvloop = ["uvloop (>=0.15.2)"]
[[package]]
name = "build"
-version = "1.0.3"
+version = "1.2.1"
description = "A simple, correct Python build frontend"
optional = false
-python-versions = ">= 3.7"
+python-versions = ">=3.8"
files = [
- {file = "build-1.0.3-py3-none-any.whl", hash = "sha256:589bf99a67df7c9cf07ec0ac0e5e2ea5d4b37ac63301c4986d1acb126aa83f8f"},
- {file = "build-1.0.3.tar.gz", hash = "sha256:538aab1b64f9828977f84bc63ae570b060a8ed1be419e7870b8b4fc5e6ea553b"},
+ {file = "build-1.2.1-py3-none-any.whl", hash = "sha256:75e10f767a433d9a86e50d83f418e83efc18ede923ee5ff7df93b6cb0306c5d4"},
+ {file = "build-1.2.1.tar.gz", hash = "sha256:526263f4870c26f26c433545579475377b2b7588b6f1eac76a001e873ae3e19d"},
]
[package.dependencies]
colorama = {version = "*", markers = "os_name == \"nt\""}
-importlib-metadata = {version = ">=4.6", markers = "python_version < \"3.10\""}
-packaging = ">=19.0"
+importlib-metadata = {version = ">=4.6", markers = "python_full_version < \"3.10.2\""}
+packaging = ">=19.1"
pyproject_hooks = "*"
tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
[package.extras]
docs = ["furo (>=2023.08.17)", "sphinx (>=7.0,<8.0)", "sphinx-argparse-cli (>=1.5)", "sphinx-autodoc-typehints (>=1.10)", "sphinx-issues (>=3.0.0)"]
-test = ["filelock (>=3)", "pytest (>=6.2.4)", "pytest-cov (>=2.12)", "pytest-mock (>=2)", "pytest-rerunfailures (>=9.1)", "pytest-xdist (>=1.34)", "setuptools (>=42.0.0)", "setuptools (>=56.0.0)", "setuptools (>=56.0.0)", "setuptools (>=67.8.0)", "wheel (>=0.36.0)"]
-typing = ["importlib-metadata (>=5.1)", "mypy (>=1.5.0,<1.6.0)", "tomli", "typing-extensions (>=3.7.4.3)"]
+test = ["build[uv,virtualenv]", "filelock (>=3)", "pytest (>=6.2.4)", "pytest-cov (>=2.12)", "pytest-mock (>=2)", "pytest-rerunfailures (>=9.1)", "pytest-xdist (>=1.34)", "setuptools (>=42.0.0)", "setuptools (>=56.0.0)", "setuptools (>=56.0.0)", "setuptools (>=67.8.0)", "wheel (>=0.36.0)"]
+typing = ["build[uv]", "importlib-metadata (>=5.1)", "mypy (>=1.9.0,<1.10.0)", "tomli", "typing-extensions (>=3.7.4.3)"]
+uv = ["uv (>=0.1.18)"]
virtualenv = ["virtualenv (>=20.0.35)"]
[[package]]
name = "certifi"
-version = "2023.7.22"
+version = "2024.2.2"
description = "Python package for providing Mozilla's CA Bundle."
optional = false
python-versions = ">=3.6"
files = [
- {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"},
- {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"},
+ {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"},
+ {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"},
]
[[package]]
@@ -568,20 +677,6 @@ files = [
]
[[package]]
-name = "commonmark"
-version = "0.9.1"
-description = "Python parser for the CommonMark Markdown spec"
-optional = false
-python-versions = "*"
-files = [
- {file = "commonmark-0.9.1-py2.py3-none-any.whl", hash = "sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9"},
- {file = "commonmark-0.9.1.tar.gz", hash = "sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60"},
-]
-
-[package.extras]
-test = ["flake8 (==3.7.8)", "hypothesis (==3.55.3)"]
-
-[[package]]
name = "coverage"
version = "6.5.0"
description = "Code coverage measurement for Python"
@@ -645,43 +740,43 @@ toml = ["tomli"]
[[package]]
name = "cryptography"
-version = "42.0.4"
+version = "42.0.7"
description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
optional = false
python-versions = ">=3.7"
files = [
- {file = "cryptography-42.0.4-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:ffc73996c4fca3d2b6c1c8c12bfd3ad00def8621da24f547626bf06441400449"},
- {file = "cryptography-42.0.4-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:db4b65b02f59035037fde0998974d84244a64c3265bdef32a827ab9b63d61b18"},
- {file = "cryptography-42.0.4-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad9c385ba8ee025bb0d856714f71d7840020fe176ae0229de618f14dae7a6e2"},
- {file = "cryptography-42.0.4-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69b22ab6506a3fe483d67d1ed878e1602bdd5912a134e6202c1ec672233241c1"},
- {file = "cryptography-42.0.4-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:e09469a2cec88fb7b078e16d4adec594414397e8879a4341c6ace96013463d5b"},
- {file = "cryptography-42.0.4-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3e970a2119507d0b104f0a8e281521ad28fc26f2820687b3436b8c9a5fcf20d1"},
- {file = "cryptography-42.0.4-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:e53dc41cda40b248ebc40b83b31516487f7db95ab8ceac1f042626bc43a2f992"},
- {file = "cryptography-42.0.4-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:c3a5cbc620e1e17009f30dd34cb0d85c987afd21c41a74352d1719be33380885"},
- {file = "cryptography-42.0.4-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:6bfadd884e7280df24d26f2186e4e07556a05d37393b0f220a840b083dc6a824"},
- {file = "cryptography-42.0.4-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:01911714117642a3f1792c7f376db572aadadbafcd8d75bb527166009c9f1d1b"},
- {file = "cryptography-42.0.4-cp37-abi3-win32.whl", hash = "sha256:fb0cef872d8193e487fc6bdb08559c3aa41b659a7d9be48b2e10747f47863925"},
- {file = "cryptography-42.0.4-cp37-abi3-win_amd64.whl", hash = "sha256:c1f25b252d2c87088abc8bbc4f1ecbf7c919e05508a7e8628e6875c40bc70923"},
- {file = "cryptography-42.0.4-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:15a1fb843c48b4a604663fa30af60818cd28f895572386e5f9b8a665874c26e7"},
- {file = "cryptography-42.0.4-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1327f280c824ff7885bdeef8578f74690e9079267c1c8bd7dc5cc5aa065ae52"},
- {file = "cryptography-42.0.4-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ffb03d419edcab93b4b19c22ee80c007fb2d708429cecebf1dd3258956a563a"},
- {file = "cryptography-42.0.4-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:1df6fcbf60560d2113b5ed90f072dc0b108d64750d4cbd46a21ec882c7aefce9"},
- {file = "cryptography-42.0.4-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:44a64043f743485925d3bcac548d05df0f9bb445c5fcca6681889c7c3ab12764"},
- {file = "cryptography-42.0.4-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:3c6048f217533d89f2f8f4f0fe3044bf0b2090453b7b73d0b77db47b80af8dff"},
- {file = "cryptography-42.0.4-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:6d0fbe73728c44ca3a241eff9aefe6496ab2656d6e7a4ea2459865f2e8613257"},
- {file = "cryptography-42.0.4-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:887623fe0d70f48ab3f5e4dbf234986b1329a64c066d719432d0698522749929"},
- {file = "cryptography-42.0.4-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:ce8613beaffc7c14f091497346ef117c1798c202b01153a8cc7b8e2ebaaf41c0"},
- {file = "cryptography-42.0.4-cp39-abi3-win32.whl", hash = "sha256:810bcf151caefc03e51a3d61e53335cd5c7316c0a105cc695f0959f2c638b129"},
- {file = "cryptography-42.0.4-cp39-abi3-win_amd64.whl", hash = "sha256:a0298bdc6e98ca21382afe914c642620370ce0470a01e1bef6dd9b5354c36854"},
- {file = "cryptography-42.0.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5f8907fcf57392cd917892ae83708761c6ff3c37a8e835d7246ff0ad251d9298"},
- {file = "cryptography-42.0.4-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:12d341bd42cdb7d4937b0cabbdf2a94f949413ac4504904d0cdbdce4a22cbf88"},
- {file = "cryptography-42.0.4-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1cdcdbd117681c88d717437ada72bdd5be9de117f96e3f4d50dab3f59fd9ab20"},
- {file = "cryptography-42.0.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0e89f7b84f421c56e7ff69f11c441ebda73b8a8e6488d322ef71746224c20fce"},
- {file = "cryptography-42.0.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f1e85a178384bf19e36779d91ff35c7617c885da487d689b05c1366f9933ad74"},
- {file = "cryptography-42.0.4-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d2a27aca5597c8a71abbe10209184e1a8e91c1fd470b5070a2ea60cafec35bcd"},
- {file = "cryptography-42.0.4-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4e36685cb634af55e0677d435d425043967ac2f3790ec652b2b88ad03b85c27b"},
- {file = "cryptography-42.0.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:f47be41843200f7faec0683ad751e5ef11b9a56a220d57f300376cd8aba81660"},
- {file = "cryptography-42.0.4.tar.gz", hash = "sha256:831a4b37accef30cccd34fcb916a5d7b5be3cbbe27268a02832c3e450aea39cb"},
+ {file = "cryptography-42.0.7-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:a987f840718078212fdf4504d0fd4c6effe34a7e4740378e59d47696e8dfb477"},
+ {file = "cryptography-42.0.7-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:bd13b5e9b543532453de08bcdc3cc7cebec6f9883e886fd20a92f26940fd3e7a"},
+ {file = "cryptography-42.0.7-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a79165431551042cc9d1d90e6145d5d0d3ab0f2d66326c201d9b0e7f5bf43604"},
+ {file = "cryptography-42.0.7-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a47787a5e3649008a1102d3df55424e86606c9bae6fb77ac59afe06d234605f8"},
+ {file = "cryptography-42.0.7-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:02c0eee2d7133bdbbc5e24441258d5d2244beb31da5ed19fbb80315f4bbbff55"},
+ {file = "cryptography-42.0.7-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:5e44507bf8d14b36b8389b226665d597bc0f18ea035d75b4e53c7b1ea84583cc"},
+ {file = "cryptography-42.0.7-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:7f8b25fa616d8b846aef64b15c606bb0828dbc35faf90566eb139aa9cff67af2"},
+ {file = "cryptography-42.0.7-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:93a3209f6bb2b33e725ed08ee0991b92976dfdcf4e8b38646540674fc7508e13"},
+ {file = "cryptography-42.0.7-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e6b8f1881dac458c34778d0a424ae5769de30544fc678eac51c1c8bb2183e9da"},
+ {file = "cryptography-42.0.7-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3de9a45d3b2b7d8088c3fbf1ed4395dfeff79d07842217b38df14ef09ce1d8d7"},
+ {file = "cryptography-42.0.7-cp37-abi3-win32.whl", hash = "sha256:789caea816c6704f63f6241a519bfa347f72fbd67ba28d04636b7c6b7da94b0b"},
+ {file = "cryptography-42.0.7-cp37-abi3-win_amd64.whl", hash = "sha256:8cb8ce7c3347fcf9446f201dc30e2d5a3c898d009126010cbd1f443f28b52678"},
+ {file = "cryptography-42.0.7-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:a3a5ac8b56fe37f3125e5b72b61dcde43283e5370827f5233893d461b7360cd4"},
+ {file = "cryptography-42.0.7-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:779245e13b9a6638df14641d029add5dc17edbef6ec915688f3acb9e720a5858"},
+ {file = "cryptography-42.0.7-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d563795db98b4cd57742a78a288cdbdc9daedac29f2239793071fe114f13785"},
+ {file = "cryptography-42.0.7-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:31adb7d06fe4383226c3e963471f6837742889b3c4caa55aac20ad951bc8ffda"},
+ {file = "cryptography-42.0.7-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:efd0bf5205240182e0f13bcaea41be4fdf5c22c5129fc7ced4a0282ac86998c9"},
+ {file = "cryptography-42.0.7-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a9bc127cdc4ecf87a5ea22a2556cab6c7eda2923f84e4f3cc588e8470ce4e42e"},
+ {file = "cryptography-42.0.7-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:3577d029bc3f4827dd5bf8bf7710cac13527b470bbf1820a3f394adb38ed7d5f"},
+ {file = "cryptography-42.0.7-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2e47577f9b18723fa294b0ea9a17d5e53a227867a0a4904a1a076d1646d45ca1"},
+ {file = "cryptography-42.0.7-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1a58839984d9cb34c855197043eaae2c187d930ca6d644612843b4fe8513c886"},
+ {file = "cryptography-42.0.7-cp39-abi3-win32.whl", hash = "sha256:e6b79d0adb01aae87e8a44c2b64bc3f3fe59515280e00fb6d57a7267a2583cda"},
+ {file = "cryptography-42.0.7-cp39-abi3-win_amd64.whl", hash = "sha256:16268d46086bb8ad5bf0a2b5544d8a9ed87a0e33f5e77dd3c3301e63d941a83b"},
+ {file = "cryptography-42.0.7-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2954fccea107026512b15afb4aa664a5640cd0af630e2ee3962f2602693f0c82"},
+ {file = "cryptography-42.0.7-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:362e7197754c231797ec45ee081f3088a27a47c6c01eff2ac83f60f85a50fe60"},
+ {file = "cryptography-42.0.7-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4f698edacf9c9e0371112792558d2f705b5645076cc0aaae02f816a0171770fd"},
+ {file = "cryptography-42.0.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5482e789294854c28237bba77c4c83be698be740e31a3ae5e879ee5444166582"},
+ {file = "cryptography-42.0.7-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e9b2a6309f14c0497f348d08a065d52f3020656f675819fc405fb63bbcd26562"},
+ {file = "cryptography-42.0.7-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d8e3098721b84392ee45af2dd554c947c32cc52f862b6a3ae982dbb90f577f14"},
+ {file = "cryptography-42.0.7-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c65f96dad14f8528a447414125e1fc8feb2ad5a272b8f68477abbcc1ea7d94b9"},
+ {file = "cryptography-42.0.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:36017400817987670037fbb0324d71489b6ead6231c9604f8fc1f7d008087c68"},
+ {file = "cryptography-42.0.7.tar.gz", hash = "sha256:ecbfbc00bf55888edda9868a4cf927205de8499e7fabe6c050322298382953f2"},
]
[package.dependencies]
@@ -699,56 +794,57 @@ test-randomorder = ["pytest-randomly"]
[[package]]
name = "deepdiff"
-version = "6.6.1"
+version = "7.0.1"
description = "Deep Difference and Search of any Python object/data. Recreate objects by adding adding deltas to each other."
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "deepdiff-6.6.1-py3-none-any.whl", hash = "sha256:891b3cb12837e5d376ac0b58f4c8a2764e3a8bbceabb7108ff82235f1f2c4460"},
- {file = "deepdiff-6.6.1.tar.gz", hash = "sha256:75c75b1511f0e48edef2b70d785a9c32b2631666b465fa8c32270a77a7b950b5"},
+ {file = "deepdiff-7.0.1-py3-none-any.whl", hash = "sha256:447760081918216aa4fd4ca78a4b6a848b81307b2ea94c810255334b759e1dc3"},
+ {file = "deepdiff-7.0.1.tar.gz", hash = "sha256:260c16f052d4badbf60351b4f77e8390bee03a0b516246f6839bc813fb429ddf"},
]
[package.dependencies]
-ordered-set = ">=4.0.2,<4.2.0"
+ordered-set = ">=4.1.0,<4.2.0"
[package.extras]
-cli = ["click (==8.1.3)", "pyyaml (==6.0.1)"]
+cli = ["click (==8.1.7)", "pyyaml (==6.0.1)"]
optimize = ["orjson"]
[[package]]
name = "dill"
-version = "0.3.7"
+version = "0.3.8"
description = "serialize all of Python"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "dill-0.3.7-py3-none-any.whl", hash = "sha256:76b122c08ef4ce2eedcd4d1abd8e641114bfc6c2867f49f3c41facf65bf19f5e"},
- {file = "dill-0.3.7.tar.gz", hash = "sha256:cc1c8b182eb3013e24bd475ff2e9295af86c1a38eb1aff128dac8962a9ce3c03"},
+ {file = "dill-0.3.8-py3-none-any.whl", hash = "sha256:c36ca9ffb54365bdd2f8eb3eff7d2a21237f8452b57ace88b1ac615b7e815bd7"},
+ {file = "dill-0.3.8.tar.gz", hash = "sha256:3ebe3c479ad625c4553aca177444d89b486b1d84982eeacded644afc0cf797ca"},
]
[package.extras]
graph = ["objgraph (>=1.7.2)"]
+profile = ["gprof2dot (>=2022.7.29)"]
[[package]]
name = "docutils"
-version = "0.19"
+version = "0.21.2"
description = "Docutils -- Python Documentation Utilities"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.9"
files = [
- {file = "docutils-0.19-py3-none-any.whl", hash = "sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc"},
- {file = "docutils-0.19.tar.gz", hash = "sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6"},
+ {file = "docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2"},
+ {file = "docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f"},
]
[[package]]
name = "exceptiongroup"
-version = "1.1.3"
+version = "1.2.1"
description = "Backport of PEP 654 (exception groups)"
optional = false
python-versions = ">=3.7"
files = [
- {file = "exceptiongroup-1.1.3-py3-none-any.whl", hash = "sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3"},
- {file = "exceptiongroup-1.1.3.tar.gz", hash = "sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9"},
+ {file = "exceptiongroup-1.2.1-py3-none-any.whl", hash = "sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad"},
+ {file = "exceptiongroup-1.2.1.tar.gz", hash = "sha256:a4785e48b045528f5bfe627b6ad554ff32def154f42372786903b7abcfe1aa16"},
]
[package.extras]
@@ -756,13 +852,13 @@ test = ["pytest (>=6)"]
[[package]]
name = "execnet"
-version = "2.0.2"
+version = "2.1.1"
description = "execnet: rapid multi-Python deployment"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "execnet-2.0.2-py3-none-any.whl", hash = "sha256:88256416ae766bc9e8895c76a87928c0012183da3cc4fc18016e6f050e025f41"},
- {file = "execnet-2.0.2.tar.gz", hash = "sha256:cc59bc4423742fd71ad227122eb0dd44db51efb3dc4095b45ac9a08c770096af"},
+ {file = "execnet-2.1.1-py3-none-any.whl", hash = "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc"},
+ {file = "execnet-2.1.1.tar.gz", hash = "sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3"},
]
[package.extras]
@@ -770,72 +866,88 @@ testing = ["hatch", "pre-commit", "pytest", "tox"]
[[package]]
name = "frozenlist"
-version = "1.4.0"
+version = "1.4.1"
description = "A list-like structure which implements collections.abc.MutableSequence"
optional = false
python-versions = ">=3.8"
files = [
- {file = "frozenlist-1.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:764226ceef3125e53ea2cb275000e309c0aa5464d43bd72abd661e27fffc26ab"},
- {file = "frozenlist-1.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d6484756b12f40003c6128bfcc3fa9f0d49a687e171186c2d85ec82e3758c559"},
- {file = "frozenlist-1.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9ac08e601308e41eb533f232dbf6b7e4cea762f9f84f6357136eed926c15d12c"},
- {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d081f13b095d74b67d550de04df1c756831f3b83dc9881c38985834387487f1b"},
- {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:71932b597f9895f011f47f17d6428252fc728ba2ae6024e13c3398a087c2cdea"},
- {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:981b9ab5a0a3178ff413bca62526bb784249421c24ad7381e39d67981be2c326"},
- {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e41f3de4df3e80de75845d3e743b3f1c4c8613c3997a912dbf0229fc61a8b963"},
- {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6918d49b1f90821e93069682c06ffde41829c346c66b721e65a5c62b4bab0300"},
- {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0e5c8764c7829343d919cc2dfc587a8db01c4f70a4ebbc49abde5d4b158b007b"},
- {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8d0edd6b1c7fb94922bf569c9b092ee187a83f03fb1a63076e7774b60f9481a8"},
- {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e29cda763f752553fa14c68fb2195150bfab22b352572cb36c43c47bedba70eb"},
- {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:0c7c1b47859ee2cac3846fde1c1dc0f15da6cec5a0e5c72d101e0f83dcb67ff9"},
- {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:901289d524fdd571be1c7be054f48b1f88ce8dddcbdf1ec698b27d4b8b9e5d62"},
- {file = "frozenlist-1.4.0-cp310-cp310-win32.whl", hash = "sha256:1a0848b52815006ea6596c395f87449f693dc419061cc21e970f139d466dc0a0"},
- {file = "frozenlist-1.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:b206646d176a007466358aa21d85cd8600a415c67c9bd15403336c331a10d956"},
- {file = "frozenlist-1.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:de343e75f40e972bae1ef6090267f8260c1446a1695e77096db6cfa25e759a95"},
- {file = "frozenlist-1.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ad2a9eb6d9839ae241701d0918f54c51365a51407fd80f6b8289e2dfca977cc3"},
- {file = "frozenlist-1.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bd7bd3b3830247580de99c99ea2a01416dfc3c34471ca1298bccabf86d0ff4dc"},
- {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bdf1847068c362f16b353163391210269e4f0569a3c166bc6a9f74ccbfc7e839"},
- {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38461d02d66de17455072c9ba981d35f1d2a73024bee7790ac2f9e361ef1cd0c"},
- {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5a32087d720c608f42caed0ef36d2b3ea61a9d09ee59a5142d6070da9041b8f"},
- {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dd65632acaf0d47608190a71bfe46b209719bf2beb59507db08ccdbe712f969b"},
- {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:261b9f5d17cac914531331ff1b1d452125bf5daa05faf73b71d935485b0c510b"},
- {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b89ac9768b82205936771f8d2eb3ce88503b1556324c9f903e7156669f521472"},
- {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:008eb8b31b3ea6896da16c38c1b136cb9fec9e249e77f6211d479db79a4eaf01"},
- {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e74b0506fa5aa5598ac6a975a12aa8928cbb58e1f5ac8360792ef15de1aa848f"},
- {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:490132667476f6781b4c9458298b0c1cddf237488abd228b0b3650e5ecba7467"},
- {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:76d4711f6f6d08551a7e9ef28c722f4a50dd0fc204c56b4bcd95c6cc05ce6fbb"},
- {file = "frozenlist-1.4.0-cp311-cp311-win32.whl", hash = "sha256:a02eb8ab2b8f200179b5f62b59757685ae9987996ae549ccf30f983f40602431"},
- {file = "frozenlist-1.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:515e1abc578dd3b275d6a5114030b1330ba044ffba03f94091842852f806f1c1"},
- {file = "frozenlist-1.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f0ed05f5079c708fe74bf9027e95125334b6978bf07fd5ab923e9e55e5fbb9d3"},
- {file = "frozenlist-1.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ca265542ca427bf97aed183c1676e2a9c66942e822b14dc6e5f42e038f92a503"},
- {file = "frozenlist-1.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:491e014f5c43656da08958808588cc6c016847b4360e327a62cb308c791bd2d9"},
- {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17ae5cd0f333f94f2e03aaf140bb762c64783935cc764ff9c82dff626089bebf"},
- {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e78fb68cf9c1a6aa4a9a12e960a5c9dfbdb89b3695197aa7064705662515de2"},
- {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5655a942f5f5d2c9ed93d72148226d75369b4f6952680211972a33e59b1dfdc"},
- {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c11b0746f5d946fecf750428a95f3e9ebe792c1ee3b1e96eeba145dc631a9672"},
- {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e66d2a64d44d50d2543405fb183a21f76b3b5fd16f130f5c99187c3fb4e64919"},
- {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:88f7bc0fcca81f985f78dd0fa68d2c75abf8272b1f5c323ea4a01a4d7a614efc"},
- {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5833593c25ac59ede40ed4de6d67eb42928cca97f26feea219f21d0ed0959b79"},
- {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:fec520865f42e5c7f050c2a79038897b1c7d1595e907a9e08e3353293ffc948e"},
- {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:b826d97e4276750beca7c8f0f1a4938892697a6bcd8ec8217b3312dad6982781"},
- {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ceb6ec0a10c65540421e20ebd29083c50e6d1143278746a4ef6bcf6153171eb8"},
- {file = "frozenlist-1.4.0-cp38-cp38-win32.whl", hash = "sha256:2b8bcf994563466db019fab287ff390fffbfdb4f905fc77bc1c1d604b1c689cc"},
- {file = "frozenlist-1.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:a6c8097e01886188e5be3e6b14e94ab365f384736aa1fca6a0b9e35bd4a30bc7"},
- {file = "frozenlist-1.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:6c38721585f285203e4b4132a352eb3daa19121a035f3182e08e437cface44bf"},
- {file = "frozenlist-1.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a0c6da9aee33ff0b1a451e867da0c1f47408112b3391dd43133838339e410963"},
- {file = "frozenlist-1.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:93ea75c050c5bb3d98016b4ba2497851eadf0ac154d88a67d7a6816206f6fa7f"},
- {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f61e2dc5ad442c52b4887f1fdc112f97caeff4d9e6ebe78879364ac59f1663e1"},
- {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa384489fefeb62321b238e64c07ef48398fe80f9e1e6afeff22e140e0850eef"},
- {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:10ff5faaa22786315ef57097a279b833ecab1a0bfb07d604c9cbb1c4cdc2ed87"},
- {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:007df07a6e3eb3e33e9a1fe6a9db7af152bbd8a185f9aaa6ece10a3529e3e1c6"},
- {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f4f399d28478d1f604c2ff9119907af9726aed73680e5ed1ca634d377abb087"},
- {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c5374b80521d3d3f2ec5572e05adc94601985cc526fb276d0c8574a6d749f1b3"},
- {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ce31ae3e19f3c902de379cf1323d90c649425b86de7bbdf82871b8a2a0615f3d"},
- {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7211ef110a9194b6042449431e08c4d80c0481e5891e58d429df5899690511c2"},
- {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:556de4430ce324c836789fa4560ca62d1591d2538b8ceb0b4f68fb7b2384a27a"},
- {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7645a8e814a3ee34a89c4a372011dcd817964ce8cb273c8ed6119d706e9613e3"},
- {file = "frozenlist-1.4.0-cp39-cp39-win32.whl", hash = "sha256:19488c57c12d4e8095a922f328df3f179c820c212940a498623ed39160bc3c2f"},
- {file = "frozenlist-1.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:6221d84d463fb110bdd7619b69cb43878a11d51cbb9394ae3105d082d5199167"},
- {file = "frozenlist-1.4.0.tar.gz", hash = "sha256:09163bdf0b2907454042edb19f887c6d33806adc71fbd54afc14908bfdc22251"},
+ {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"},
+ {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868"},
+ {file = "frozenlist-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74fb4bee6880b529a0c6560885fce4dc95936920f9f20f53d99a213f7bf66776"},
+ {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590344787a90ae57d62511dd7c736ed56b428f04cd8c161fcc5e7232c130c69a"},
+ {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:068b63f23b17df8569b7fdca5517edef76171cf3897eb68beb01341131fbd2ad"},
+ {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c849d495bf5154cd8da18a9eb15db127d4dba2968d88831aff6f0331ea9bd4c"},
+ {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9750cc7fe1ae3b1611bb8cfc3f9ec11d532244235d75901fb6b8e42ce9229dfe"},
+ {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9b2de4cf0cdd5bd2dee4c4f63a653c61d2408055ab77b151c1957f221cabf2a"},
+ {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0633c8d5337cb5c77acbccc6357ac49a1770b8c487e5b3505c57b949b4b82e98"},
+ {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:27657df69e8801be6c3638054e202a135c7f299267f1a55ed3a598934f6c0d75"},
+ {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f9a3ea26252bd92f570600098783d1371354d89d5f6b7dfd87359d669f2109b5"},
+ {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4f57dab5fe3407b6c0c1cc907ac98e8a189f9e418f3b6e54d65a718aaafe3950"},
+ {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e02a0e11cf6597299b9f3bbd3f93d79217cb90cfd1411aec33848b13f5c656cc"},
+ {file = "frozenlist-1.4.1-cp310-cp310-win32.whl", hash = "sha256:a828c57f00f729620a442881cc60e57cfcec6842ba38e1b19fd3e47ac0ff8dc1"},
+ {file = "frozenlist-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:f56e2333dda1fe0f909e7cc59f021eba0d2307bc6f012a1ccf2beca6ba362439"},
+ {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0"},
+ {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49"},
+ {file = "frozenlist-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced"},
+ {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0"},
+ {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106"},
+ {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068"},
+ {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2"},
+ {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19"},
+ {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82"},
+ {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec"},
+ {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a"},
+ {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74"},
+ {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2"},
+ {file = "frozenlist-1.4.1-cp311-cp311-win32.whl", hash = "sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17"},
+ {file = "frozenlist-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825"},
+ {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1979bc0aeb89b33b588c51c54ab0161791149f2461ea7c7c946d95d5f93b56ae"},
+ {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cc7b01b3754ea68a62bd77ce6020afaffb44a590c2289089289363472d13aedb"},
+ {file = "frozenlist-1.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9c92be9fd329ac801cc420e08452b70e7aeab94ea4233a4804f0915c14eba9b"},
+ {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3894db91f5a489fc8fa6a9991820f368f0b3cbdb9cd8849547ccfab3392d86"},
+ {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba60bb19387e13597fb059f32cd4d59445d7b18b69a745b8f8e5db0346f33480"},
+ {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aefbba5f69d42246543407ed2461db31006b0f76c4e32dfd6f42215a2c41d09"},
+ {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780d3a35680ced9ce682fbcf4cb9c2bad3136eeff760ab33707b71db84664e3a"},
+ {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9acbb16f06fe7f52f441bb6f413ebae6c37baa6ef9edd49cdd567216da8600cd"},
+ {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:23b701e65c7b36e4bf15546a89279bd4d8675faabc287d06bbcfac7d3c33e1e6"},
+ {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3e0153a805a98f5ada7e09826255ba99fb4f7524bb81bf6b47fb702666484ae1"},
+ {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:dd9b1baec094d91bf36ec729445f7769d0d0cf6b64d04d86e45baf89e2b9059b"},
+ {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:1a4471094e146b6790f61b98616ab8e44f72661879cc63fa1049d13ef711e71e"},
+ {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5667ed53d68d91920defdf4035d1cdaa3c3121dc0b113255124bcfada1cfa1b8"},
+ {file = "frozenlist-1.4.1-cp312-cp312-win32.whl", hash = "sha256:beee944ae828747fd7cb216a70f120767fc9f4f00bacae8543c14a6831673f89"},
+ {file = "frozenlist-1.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:64536573d0a2cb6e625cf309984e2d873979709f2cf22839bf2d61790b448ad5"},
+ {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:20b51fa3f588ff2fe658663db52a41a4f7aa6c04f6201449c6c7c476bd255c0d"},
+ {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:410478a0c562d1a5bcc2f7ea448359fcb050ed48b3c6f6f4f18c313a9bdb1826"},
+ {file = "frozenlist-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6321c9efe29975232da3bd0af0ad216800a47e93d763ce64f291917a381b8eb"},
+ {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48f6a4533887e189dae092f1cf981f2e3885175f7a0f33c91fb5b7b682b6bab6"},
+ {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6eb73fa5426ea69ee0e012fb59cdc76a15b1283d6e32e4f8dc4482ec67d1194d"},
+ {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbeb989b5cc29e8daf7f976b421c220f1b8c731cbf22b9130d8815418ea45887"},
+ {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32453c1de775c889eb4e22f1197fe3bdfe457d16476ea407472b9442e6295f7a"},
+ {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693945278a31f2086d9bf3df0fe8254bbeaef1fe71e1351c3bd730aa7d31c41b"},
+ {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1d0ce09d36d53bbbe566fe296965b23b961764c0bcf3ce2fa45f463745c04701"},
+ {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3a670dc61eb0d0eb7080890c13de3066790f9049b47b0de04007090807c776b0"},
+ {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:dca69045298ce5c11fd539682cff879cc1e664c245d1c64da929813e54241d11"},
+ {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a06339f38e9ed3a64e4c4e43aec7f59084033647f908e4259d279a52d3757d09"},
+ {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b7f2f9f912dca3934c1baec2e4585a674ef16fe00218d833856408c48d5beee7"},
+ {file = "frozenlist-1.4.1-cp38-cp38-win32.whl", hash = "sha256:e7004be74cbb7d9f34553a5ce5fb08be14fb33bc86f332fb71cbe5216362a497"},
+ {file = "frozenlist-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:5a7d70357e7cee13f470c7883a063aae5fe209a493c57d86eb7f5a6f910fae09"},
+ {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfa4a17e17ce9abf47a74ae02f32d014c5e9404b6d9ac7f729e01562bbee601e"},
+ {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b7e3ed87d4138356775346e6845cccbe66cd9e207f3cd11d2f0b9fd13681359d"},
+ {file = "frozenlist-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c99169d4ff810155ca50b4da3b075cbde79752443117d89429595c2e8e37fed8"},
+ {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edb678da49d9f72c9f6c609fbe41a5dfb9a9282f9e6a2253d5a91e0fc382d7c0"},
+ {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6db4667b187a6742b33afbbaf05a7bc551ffcf1ced0000a571aedbb4aa42fc7b"},
+ {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55fdc093b5a3cb41d420884cdaf37a1e74c3c37a31f46e66286d9145d2063bd0"},
+ {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82e8211d69a4f4bc360ea22cd6555f8e61a1bd211d1d5d39d3d228b48c83a897"},
+ {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89aa2c2eeb20957be2d950b85974b30a01a762f3308cd02bb15e1ad632e22dc7"},
+ {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d3e0c25a2350080e9319724dede4f31f43a6c9779be48021a7f4ebde8b2d742"},
+ {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7268252af60904bf52c26173cbadc3a071cece75f873705419c8681f24d3edea"},
+ {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0c250a29735d4f15321007fb02865f0e6b6a41a6b88f1f523ca1596ab5f50bd5"},
+ {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:96ec70beabbd3b10e8bfe52616a13561e58fe84c0101dd031dc78f250d5128b9"},
+ {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:23b2d7679b73fe0e5a4560b672a39f98dfc6f60df63823b0a9970525325b95f6"},
+ {file = "frozenlist-1.4.1-cp39-cp39-win32.whl", hash = "sha256:a7496bfe1da7fb1a4e1cc23bb67c58fab69311cc7d32b5a99c2007b4b2a0e932"},
+ {file = "frozenlist-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e6a20a581f9ce92d389a8c7d7c3dd47c81fd5d6e655c8dddf341e14aa48659d0"},
+ {file = "frozenlist-1.4.1-py3-none-any.whl", hash = "sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7"},
+ {file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"},
]
[[package]]
@@ -850,38 +962,49 @@ files = [
]
[[package]]
+name = "imagesize"
+version = "1.4.1"
+description = "Getting image size from png/jpeg/jpeg2000/gif file"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+files = [
+ {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"},
+ {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"},
+]
+
+[[package]]
name = "importlib-metadata"
-version = "6.8.0"
+version = "7.1.0"
description = "Read metadata from Python packages"
optional = false
python-versions = ">=3.8"
files = [
- {file = "importlib_metadata-6.8.0-py3-none-any.whl", hash = "sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb"},
- {file = "importlib_metadata-6.8.0.tar.gz", hash = "sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743"},
+ {file = "importlib_metadata-7.1.0-py3-none-any.whl", hash = "sha256:30962b96c0c223483ed6cc7280e7f0199feb01a0e40cfae4d4450fc6fab1f570"},
+ {file = "importlib_metadata-7.1.0.tar.gz", hash = "sha256:b78938b926ee8d5f020fc4772d487045805a55ddbad2ecf21c6d60938dc7fcd2"},
]
[package.dependencies]
zipp = ">=0.5"
[package.extras]
-docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
+docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
perf = ["ipython"]
-testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"]
+testing = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"]
[[package]]
name = "importlib-resources"
-version = "5.0.7"
+version = "6.4.0"
description = "Read resources from Python packages"
optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.8"
files = [
- {file = "importlib_resources-5.0.7-py3-none-any.whl", hash = "sha256:2238159eb743bd85304a16e0536048b3e991c531d1cd51c4a834d1ccf2829057"},
- {file = "importlib_resources-5.0.7.tar.gz", hash = "sha256:4df460394562b4581bb4e4087ad9447bd433148fba44241754ec3152499f1d1b"},
+ {file = "importlib_resources-6.4.0-py3-none-any.whl", hash = "sha256:50d10f043df931902d4194ea07ec57960f66a80449ff867bfe782b4c486ba78c"},
+ {file = "importlib_resources-6.4.0.tar.gz", hash = "sha256:cdb2b453b8046ca4e3798eb1d84f3cce1446a0e8e7b5ef4efb600f19fc398145"},
]
[package.extras]
-docs = ["jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "sphinx"]
-testing = ["pytest (>=3.5,!=3.7.3)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=1.2.3)", "pytest-cov", "pytest-enabler", "pytest-flake8", "pytest-mypy"]
+docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"]
+testing = ["jaraco.test (>=5.4)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)", "zipp (>=3.17)"]
[[package]]
name = "iniconfig"
@@ -896,30 +1019,27 @@ files = [
[[package]]
name = "isort"
-version = "5.12.0"
+version = "5.13.2"
description = "A Python utility / library to sort Python imports."
optional = false
python-versions = ">=3.8.0"
files = [
- {file = "isort-5.12.0-py3-none-any.whl", hash = "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6"},
- {file = "isort-5.12.0.tar.gz", hash = "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504"},
+ {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"},
+ {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"},
]
[package.extras]
-colors = ["colorama (>=0.4.3)"]
-pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib"]
-plugins = ["setuptools"]
-requirements-deprecated-finder = ["pip-api", "pipreqs"]
+colors = ["colorama (>=0.4.6)"]
[[package]]
name = "jinja2"
-version = "3.1.2"
+version = "3.1.4"
description = "A very fast and expressive template engine."
optional = false
python-versions = ">=3.7"
files = [
- {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"},
- {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"},
+ {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"},
+ {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"},
]
[package.dependencies]
@@ -940,72 +1060,96 @@ files = [
]
[[package]]
+name = "markdown-it-py"
+version = "3.0.0"
+description = "Python port of markdown-it. Markdown parsing, done right!"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"},
+ {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"},
+]
+
+[package.dependencies]
+mdurl = ">=0.1,<1.0"
+
+[package.extras]
+benchmarking = ["psutil", "pytest", "pytest-benchmark"]
+code-style = ["pre-commit (>=3.0,<4.0)"]
+compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"]
+linkify = ["linkify-it-py (>=1,<3)"]
+plugins = ["mdit-py-plugins"]
+profiling = ["gprof2dot"]
+rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"]
+testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"]
+
+[[package]]
name = "markupsafe"
-version = "2.1.3"
+version = "2.1.5"
description = "Safely add untrusted strings to HTML/XML markup."
optional = false
python-versions = ">=3.7"
files = [
- {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"},
- {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"},
- {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"},
- {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"},
- {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"},
- {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"},
- {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"},
- {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"},
- {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"},
- {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"},
- {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"},
- {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"},
- {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"},
- {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"},
- {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"},
- {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"},
- {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"},
- {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"},
- {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"},
- {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"},
- {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"},
- {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"},
- {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"},
- {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"},
- {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"},
- {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"},
- {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"},
- {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"},
- {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"},
- {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"},
- {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"},
- {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"},
- {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"},
- {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e"},
- {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc"},
- {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48"},
- {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155"},
- {file = "MarkupSafe-2.1.3-cp37-cp37m-win32.whl", hash = "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0"},
- {file = "MarkupSafe-2.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24"},
- {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"},
- {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"},
- {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"},
- {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"},
- {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"},
- {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"},
- {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"},
- {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"},
- {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"},
- {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"},
- {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"},
- {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"},
- {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"},
- {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"},
- {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"},
- {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"},
- {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"},
- {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"},
- {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"},
- {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"},
- {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"},
+ {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"},
+ {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"},
+ {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"},
+ {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"},
+ {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"},
+ {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"},
+ {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"},
+ {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"},
+ {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"},
+ {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"},
+ {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"},
+ {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"},
+ {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"},
+ {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"},
+ {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"},
+ {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"},
+ {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"},
+ {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"},
+ {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"},
+ {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"},
+ {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"},
+ {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"},
+ {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"},
+ {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"},
+ {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"},
+ {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"},
+ {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"},
+ {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"},
+ {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"},
+ {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"},
+ {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"},
+ {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"},
+ {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"},
+ {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"},
+ {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"},
+ {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"},
+ {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"},
+ {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"},
+ {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"},
+ {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"},
+ {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"},
+ {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"},
+ {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"},
+ {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"},
+ {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"},
+ {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"},
+ {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"},
+ {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"},
+ {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"},
+ {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"},
+ {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"},
+ {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"},
+ {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"},
+ {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"},
+ {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"},
+ {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"},
+ {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"},
+ {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"},
+ {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"},
+ {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"},
]
[[package]]
@@ -1020,6 +1164,17 @@ files = [
]
[[package]]
+name = "mdurl"
+version = "0.1.2"
+description = "Markdown URL utilities"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"},
+ {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"},
+]
+
+[[package]]
name = "mock"
version = "4.0.3"
description = "Rolling backport of unittest.mock for all Pythons"
@@ -1037,85 +1192,101 @@ test = ["pytest (<5.4)", "pytest-cov"]
[[package]]
name = "multidict"
-version = "6.0.4"
+version = "6.0.5"
description = "multidict implementation"
optional = false
python-versions = ">=3.7"
files = [
- {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8"},
- {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171"},
- {file = "multidict-6.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d6d635d5209b82a3492508cf5b365f3446afb65ae7ebd755e70e18f287b0adf7"},
- {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c048099e4c9e9d615545e2001d3d8a4380bd403e1a0578734e0d31703d1b0c0b"},
- {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea20853c6dbbb53ed34cb4d080382169b6f4554d394015f1bef35e881bf83547"},
- {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16d232d4e5396c2efbbf4f6d4df89bfa905eb0d4dc5b3549d872ab898451f569"},
- {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93"},
- {file = "multidict-6.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64bdf1086b6043bf519869678f5f2757f473dee970d7abf6da91ec00acb9cb98"},
- {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:43644e38f42e3af682690876cff722d301ac585c5b9e1eacc013b7a3f7b696a0"},
- {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7582a1d1030e15422262de9f58711774e02fa80df0d1578995c76214f6954988"},
- {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ddff9c4e225a63a5afab9dd15590432c22e8057e1a9a13d28ed128ecf047bbdc"},
- {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ee2a1ece51b9b9e7752e742cfb661d2a29e7bcdba2d27e66e28a99f1890e4fa0"},
- {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a2e4369eb3d47d2034032a26c7a80fcb21a2cb22e1173d761a162f11e562caa5"},
- {file = "multidict-6.0.4-cp310-cp310-win32.whl", hash = "sha256:574b7eae1ab267e5f8285f0fe881f17efe4b98c39a40858247720935b893bba8"},
- {file = "multidict-6.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:4dcbb0906e38440fa3e325df2359ac6cb043df8e58c965bb45f4e406ecb162cc"},
- {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0dfad7a5a1e39c53ed00d2dd0c2e36aed4650936dc18fd9a1826a5ae1cad6f03"},
- {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:64da238a09d6039e3bd39bb3aee9c21a5e34f28bfa5aa22518581f910ff94af3"},
- {file = "multidict-6.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff959bee35038c4624250473988b24f846cbeb2c6639de3602c073f10410ceba"},
- {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01a3a55bd90018c9c080fbb0b9f4891db37d148a0a18722b42f94694f8b6d4c9"},
- {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5cb09abb18c1ea940fb99360ea0396f34d46566f157122c92dfa069d3e0e982"},
- {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666daae833559deb2d609afa4490b85830ab0dfca811a98b70a205621a6109fe"},
- {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11bdf3f5e1518b24530b8241529d2050014c884cf18b6fc69c0c2b30ca248710"},
- {file = "multidict-6.0.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d18748f2d30f94f498e852c67d61261c643b349b9d2a581131725595c45ec6c"},
- {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:458f37be2d9e4c95e2d8866a851663cbc76e865b78395090786f6cd9b3bbf4f4"},
- {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b1a2eeedcead3a41694130495593a559a668f382eee0727352b9a41e1c45759a"},
- {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7d6ae9d593ef8641544d6263c7fa6408cc90370c8cb2bbb65f8d43e5b0351d9c"},
- {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5979b5632c3e3534e42ca6ff856bb24b2e3071b37861c2c727ce220d80eee9ed"},
- {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dcfe792765fab89c365123c81046ad4103fcabbc4f56d1c1997e6715e8015461"},
- {file = "multidict-6.0.4-cp311-cp311-win32.whl", hash = "sha256:3601a3cece3819534b11d4efc1eb76047488fddd0c85a3948099d5da4d504636"},
- {file = "multidict-6.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:81a4f0b34bd92df3da93315c6a59034df95866014ac08535fc819f043bfd51f0"},
- {file = "multidict-6.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:67040058f37a2a51ed8ea8f6b0e6ee5bd78ca67f169ce6122f3e2ec80dfe9b78"},
- {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:853888594621e6604c978ce2a0444a1e6e70c8d253ab65ba11657659dcc9100f"},
- {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:39ff62e7d0f26c248b15e364517a72932a611a9b75f35b45be078d81bdb86603"},
- {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af048912e045a2dc732847d33821a9d84ba553f5c5f028adbd364dd4765092ac"},
- {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e8b901e607795ec06c9e42530788c45ac21ef3aaa11dbd0c69de543bfb79a9"},
- {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62501642008a8b9871ddfccbf83e4222cf8ac0d5aeedf73da36153ef2ec222d2"},
- {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:99b76c052e9f1bc0721f7541e5e8c05db3941eb9ebe7b8553c625ef88d6eefde"},
- {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:509eac6cf09c794aa27bcacfd4d62c885cce62bef7b2c3e8b2e49d365b5003fe"},
- {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:21a12c4eb6ddc9952c415f24eef97e3e55ba3af61f67c7bc388dcdec1404a067"},
- {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:5cad9430ab3e2e4fa4a2ef4450f548768400a2ac635841bc2a56a2052cdbeb87"},
- {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab55edc2e84460694295f401215f4a58597f8f7c9466faec545093045476327d"},
- {file = "multidict-6.0.4-cp37-cp37m-win32.whl", hash = "sha256:5a4dcf02b908c3b8b17a45fb0f15b695bf117a67b76b7ad18b73cf8e92608775"},
- {file = "multidict-6.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6ed5f161328b7df384d71b07317f4d8656434e34591f20552c7bcef27b0ab88e"},
- {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5fc1b16f586f049820c5c5b17bb4ee7583092fa0d1c4e28b5239181ff9532e0c"},
- {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1502e24330eb681bdaa3eb70d6358e818e8e8f908a22a1851dfd4e15bc2f8161"},
- {file = "multidict-6.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b692f419760c0e65d060959df05f2a531945af31fda0c8a3b3195d4efd06de11"},
- {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45e1ecb0379bfaab5eef059f50115b54571acfbe422a14f668fc8c27ba410e7e"},
- {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddd3915998d93fbcd2566ddf9cf62cdb35c9e093075f862935573d265cf8f65d"},
- {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:59d43b61c59d82f2effb39a93c48b845efe23a3852d201ed2d24ba830d0b4cf2"},
- {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc8e1d0c705233c5dd0c5e6460fbad7827d5d36f310a0fadfd45cc3029762258"},
- {file = "multidict-6.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6aa0418fcc838522256761b3415822626f866758ee0bc6632c9486b179d0b52"},
- {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6748717bb10339c4760c1e63da040f5f29f5ed6e59d76daee30305894069a660"},
- {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4d1a3d7ef5e96b1c9e92f973e43aa5e5b96c659c9bc3124acbbd81b0b9c8a951"},
- {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4372381634485bec7e46718edc71528024fcdc6f835baefe517b34a33c731d60"},
- {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:fc35cb4676846ef752816d5be2193a1e8367b4c1397b74a565a9d0389c433a1d"},
- {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b9d9e4e2b37daddb5c23ea33a3417901fa7c7b3dee2d855f63ee67a0b21e5b1"},
- {file = "multidict-6.0.4-cp38-cp38-win32.whl", hash = "sha256:e41b7e2b59679edfa309e8db64fdf22399eec4b0b24694e1b2104fb789207779"},
- {file = "multidict-6.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:d6c254ba6e45d8e72739281ebc46ea5eb5f101234f3ce171f0e9f5cc86991480"},
- {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16ab77bbeb596e14212e7bab8429f24c1579234a3a462105cda4a66904998664"},
- {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc779e9e6f7fda81b3f9aa58e3a6091d49ad528b11ed19f6621408806204ad35"},
- {file = "multidict-6.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ceef517eca3e03c1cceb22030a3e39cb399ac86bff4e426d4fc6ae49052cc60"},
- {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:281af09f488903fde97923c7744bb001a9b23b039a909460d0f14edc7bf59706"},
- {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52f2dffc8acaba9a2f27174c41c9e57f60b907bb9f096b36b1a1f3be71c6284d"},
- {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b41156839806aecb3641f3208c0dafd3ac7775b9c4c422d82ee2a45c34ba81ca"},
- {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3fc56f88cc98ef8139255cf8cd63eb2c586531e43310ff859d6bb3a6b51f1"},
- {file = "multidict-6.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8316a77808c501004802f9beebde51c9f857054a0c871bd6da8280e718444449"},
- {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f70b98cd94886b49d91170ef23ec5c0e8ebb6f242d734ed7ed677b24d50c82cf"},
- {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bf6774e60d67a9efe02b3616fee22441d86fab4c6d335f9d2051d19d90a40063"},
- {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e69924bfcdda39b722ef4d9aa762b2dd38e4632b3641b1d9a57ca9cd18f2f83a"},
- {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:6b181d8c23da913d4ff585afd1155a0e1194c0b50c54fcfe286f70cdaf2b7176"},
- {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52509b5be062d9eafc8170e53026fbc54cf3b32759a23d07fd935fb04fc22d95"},
- {file = "multidict-6.0.4-cp39-cp39-win32.whl", hash = "sha256:27c523fbfbdfd19c6867af7346332b62b586eed663887392cff78d614f9ec313"},
- {file = "multidict-6.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:33029f5734336aa0d4c0384525da0387ef89148dc7191aae00ca5fb23d7aafc2"},
- {file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"},
+ {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"},
+ {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"},
+ {file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"},
+ {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"},
+ {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"},
+ {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"},
+ {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"},
+ {file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"},
+ {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"},
+ {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"},
+ {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"},
+ {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"},
+ {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"},
+ {file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"},
+ {file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"},
+ {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"},
+ {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"},
+ {file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"},
+ {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"},
+ {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"},
+ {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"},
+ {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed"},
+ {file = "multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733"},
+ {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f"},
+ {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4"},
+ {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1"},
+ {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc"},
+ {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e"},
+ {file = "multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c"},
+ {file = "multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea"},
+ {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e"},
+ {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b"},
+ {file = "multidict-6.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5"},
+ {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450"},
+ {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496"},
+ {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a"},
+ {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226"},
+ {file = "multidict-6.0.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271"},
+ {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb"},
+ {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef"},
+ {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24"},
+ {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6"},
+ {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda"},
+ {file = "multidict-6.0.5-cp312-cp312-win32.whl", hash = "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5"},
+ {file = "multidict-6.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556"},
+ {file = "multidict-6.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:19fe01cea168585ba0f678cad6f58133db2aa14eccaf22f88e4a6dccadfad8b3"},
+ {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf7a982604375a8d49b6cc1b781c1747f243d91b81035a9b43a2126c04766f5"},
+ {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:107c0cdefe028703fb5dafe640a409cb146d44a6ae201e55b35a4af8e95457dd"},
+ {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:403c0911cd5d5791605808b942c88a8155c2592e05332d2bf78f18697a5fa15e"},
+ {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aeaf541ddbad8311a87dd695ed9642401131ea39ad7bc8cf3ef3967fd093b626"},
+ {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4972624066095e52b569e02b5ca97dbd7a7ddd4294bf4e7247d52635630dd83"},
+ {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d946b0a9eb8aaa590df1fe082cee553ceab173e6cb5b03239716338629c50c7a"},
+ {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b55358304d7a73d7bdf5de62494aaf70bd33015831ffd98bc498b433dfe5b10c"},
+ {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:a3145cb08d8625b2d3fee1b2d596a8766352979c9bffe5d7833e0503d0f0b5e5"},
+ {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d65f25da8e248202bd47445cec78e0025c0fe7582b23ec69c3b27a640dd7a8e3"},
+ {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c9bf56195c6bbd293340ea82eafd0071cb3d450c703d2c93afb89f93b8386ccc"},
+ {file = "multidict-6.0.5-cp37-cp37m-win32.whl", hash = "sha256:69db76c09796b313331bb7048229e3bee7928eb62bab5e071e9f7fcc4879caee"},
+ {file = "multidict-6.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:fce28b3c8a81b6b36dfac9feb1de115bab619b3c13905b419ec71d03a3fc1423"},
+ {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76f067f5121dcecf0d63a67f29080b26c43c71a98b10c701b0677e4a065fbd54"},
+ {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b82cc8ace10ab5bd93235dfaab2021c70637005e1ac787031f4d1da63d493c1d"},
+ {file = "multidict-6.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cb241881eefd96b46f89b1a056187ea8e9ba14ab88ba632e68d7a2ecb7aadf7"},
+ {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e94e6912639a02ce173341ff62cc1201232ab86b8a8fcc05572741a5dc7d93"},
+ {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09a892e4a9fb47331da06948690ae38eaa2426de97b4ccbfafbdcbe5c8f37ff8"},
+ {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55205d03e8a598cfc688c71ca8ea5f66447164efff8869517f175ea632c7cb7b"},
+ {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37b15024f864916b4951adb95d3a80c9431299080341ab9544ed148091b53f50"},
+ {file = "multidict-6.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2a1dee728b52b33eebff5072817176c172050d44d67befd681609b4746e1c2e"},
+ {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edd08e6f2f1a390bf137080507e44ccc086353c8e98c657e666c017718561b89"},
+ {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:60d698e8179a42ec85172d12f50b1668254628425a6bd611aba022257cac1386"},
+ {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3d25f19500588cbc47dc19081d78131c32637c25804df8414463ec908631e453"},
+ {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4cc0ef8b962ac7a5e62b9e826bd0cd5040e7d401bc45a6835910ed699037a461"},
+ {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:eca2e9d0cc5a889850e9bbd68e98314ada174ff6ccd1129500103df7a94a7a44"},
+ {file = "multidict-6.0.5-cp38-cp38-win32.whl", hash = "sha256:4a6a4f196f08c58c59e0b8ef8ec441d12aee4125a7d4f4fef000ccb22f8d7241"},
+ {file = "multidict-6.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:0275e35209c27a3f7951e1ce7aaf93ce0d163b28948444bec61dd7badc6d3f8c"},
+ {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e7be68734bd8c9a513f2b0cfd508802d6609da068f40dc57d4e3494cefc92929"},
+ {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d9ea7a7e779d7a3561aade7d596649fbecfa5c08a7674b11b423783217933f9"},
+ {file = "multidict-6.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ea1456df2a27c73ce51120fa2f519f1bea2f4a03a917f4a43c8707cf4cbbae1a"},
+ {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf590b134eb70629e350691ecca88eac3e3b8b3c86992042fb82e3cb1830d5e1"},
+ {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c0631926c4f58e9a5ccce555ad7747d9a9f8b10619621f22f9635f069f6233e"},
+ {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dce1c6912ab9ff5f179eaf6efe7365c1f425ed690b03341911bf4939ef2f3046"},
+ {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0868d64af83169e4d4152ec612637a543f7a336e4a307b119e98042e852ad9c"},
+ {file = "multidict-6.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141b43360bfd3bdd75f15ed811850763555a251e38b2405967f8e25fb43f7d40"},
+ {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7df704ca8cf4a073334e0427ae2345323613e4df18cc224f647f251e5e75a527"},
+ {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6214c5a5571802c33f80e6c84713b2c79e024995b9c5897f794b43e714daeec9"},
+ {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd6c8fca38178e12c00418de737aef1261576bd1b6e8c6134d3e729a4e858b38"},
+ {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e02021f87a5b6932fa6ce916ca004c4d441509d33bbdbeca70d05dff5e9d2479"},
+ {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ebd8d160f91a764652d3e51ce0d2956b38efe37c9231cd82cfc0bed2e40b581c"},
+ {file = "multidict-6.0.5-cp39-cp39-win32.whl", hash = "sha256:04da1bb8c8dbadf2a18a452639771951c662c5ad03aefe4884775454be322c9b"},
+ {file = "multidict-6.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:d6f6d4f185481c9669b9447bf9d9cf3b95a0e9df9d169bbc17e363b7d5487755"},
+ {file = "multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7"},
+ {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"},
]
[[package]]
@@ -1156,13 +1327,13 @@ files = [
[[package]]
name = "pathspec"
-version = "0.11.2"
+version = "0.12.1"
description = "Utility library for gitignore style pattern matching of file paths."
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"},
- {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"},
+ {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"},
+ {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"},
]
[[package]]
@@ -1178,28 +1349,29 @@ files = [
[[package]]
name = "platformdirs"
-version = "3.11.0"
-description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
+version = "4.2.1"
+description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`."
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "platformdirs-3.11.0-py3-none-any.whl", hash = "sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e"},
- {file = "platformdirs-3.11.0.tar.gz", hash = "sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3"},
+ {file = "platformdirs-4.2.1-py3-none-any.whl", hash = "sha256:17d5a1161b3fd67b390023cb2d3b026bbd40abde6fdb052dfbd3a29c3ba22ee1"},
+ {file = "platformdirs-4.2.1.tar.gz", hash = "sha256:031cd18d4ec63ec53e82dceaac0417d218a6863f7745dfcc9efe7793b7039bdf"},
]
[package.extras]
-docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"]
-test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"]
+docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"]
+test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"]
+type = ["mypy (>=1.8)"]
[[package]]
name = "pluggy"
-version = "1.3.0"
+version = "1.5.0"
description = "plugin and hook calling mechanisms for python"
optional = false
python-versions = ">=3.8"
files = [
- {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"},
- {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"},
+ {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"},
+ {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"},
]
[package.extras]
@@ -1219,106 +1391,163 @@ files = [
[[package]]
name = "pycparser"
-version = "2.21"
+version = "2.22"
description = "C parser in Python"
optional = false
-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+python-versions = ">=3.8"
files = [
- {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"},
- {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"},
+ {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"},
+ {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"},
]
[[package]]
name = "pydantic"
-version = "1.10.13"
-description = "Data validation and settings management using python type hints"
+version = "2.7.1"
+description = "Data validation using Python type hints"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "pydantic-1.10.13-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:efff03cc7a4f29d9009d1c96ceb1e7a70a65cfe86e89d34e4a5f2ab1e5693737"},
- {file = "pydantic-1.10.13-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3ecea2b9d80e5333303eeb77e180b90e95eea8f765d08c3d278cd56b00345d01"},
- {file = "pydantic-1.10.13-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1740068fd8e2ef6eb27a20e5651df000978edce6da6803c2bef0bc74540f9548"},
- {file = "pydantic-1.10.13-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84bafe2e60b5e78bc64a2941b4c071a4b7404c5c907f5f5a99b0139781e69ed8"},
- {file = "pydantic-1.10.13-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bc0898c12f8e9c97f6cd44c0ed70d55749eaf783716896960b4ecce2edfd2d69"},
- {file = "pydantic-1.10.13-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:654db58ae399fe6434e55325a2c3e959836bd17a6f6a0b6ca8107ea0571d2e17"},
- {file = "pydantic-1.10.13-cp310-cp310-win_amd64.whl", hash = "sha256:75ac15385a3534d887a99c713aa3da88a30fbd6204a5cd0dc4dab3d770b9bd2f"},
- {file = "pydantic-1.10.13-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c553f6a156deb868ba38a23cf0df886c63492e9257f60a79c0fd8e7173537653"},
- {file = "pydantic-1.10.13-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5e08865bc6464df8c7d61439ef4439829e3ab62ab1669cddea8dd00cd74b9ffe"},
- {file = "pydantic-1.10.13-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e31647d85a2013d926ce60b84f9dd5300d44535a9941fe825dc349ae1f760df9"},
- {file = "pydantic-1.10.13-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:210ce042e8f6f7c01168b2d84d4c9eb2b009fe7bf572c2266e235edf14bacd80"},
- {file = "pydantic-1.10.13-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8ae5dd6b721459bfa30805f4c25880e0dd78fc5b5879f9f7a692196ddcb5a580"},
- {file = "pydantic-1.10.13-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f8e81fc5fb17dae698f52bdd1c4f18b6ca674d7068242b2aff075f588301bbb0"},
- {file = "pydantic-1.10.13-cp311-cp311-win_amd64.whl", hash = "sha256:61d9dce220447fb74f45e73d7ff3b530e25db30192ad8d425166d43c5deb6df0"},
- {file = "pydantic-1.10.13-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4b03e42ec20286f052490423682016fd80fda830d8e4119f8ab13ec7464c0132"},
- {file = "pydantic-1.10.13-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f59ef915cac80275245824e9d771ee939133be38215555e9dc90c6cb148aaeb5"},
- {file = "pydantic-1.10.13-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a1f9f747851338933942db7af7b6ee8268568ef2ed86c4185c6ef4402e80ba8"},
- {file = "pydantic-1.10.13-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:97cce3ae7341f7620a0ba5ef6cf043975cd9d2b81f3aa5f4ea37928269bc1b87"},
- {file = "pydantic-1.10.13-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:854223752ba81e3abf663d685f105c64150873cc6f5d0c01d3e3220bcff7d36f"},
- {file = "pydantic-1.10.13-cp37-cp37m-win_amd64.whl", hash = "sha256:b97c1fac8c49be29486df85968682b0afa77e1b809aff74b83081cc115e52f33"},
- {file = "pydantic-1.10.13-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c958d053453a1c4b1c2062b05cd42d9d5c8eb67537b8d5a7e3c3032943ecd261"},
- {file = "pydantic-1.10.13-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4c5370a7edaac06daee3af1c8b1192e305bc102abcbf2a92374b5bc793818599"},
- {file = "pydantic-1.10.13-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d6f6e7305244bddb4414ba7094ce910560c907bdfa3501e9db1a7fd7eaea127"},
- {file = "pydantic-1.10.13-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d3a3c792a58e1622667a2837512099eac62490cdfd63bd407993aaf200a4cf1f"},
- {file = "pydantic-1.10.13-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c636925f38b8db208e09d344c7aa4f29a86bb9947495dd6b6d376ad10334fb78"},
- {file = "pydantic-1.10.13-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:678bcf5591b63cc917100dc50ab6caebe597ac67e8c9ccb75e698f66038ea953"},
- {file = "pydantic-1.10.13-cp38-cp38-win_amd64.whl", hash = "sha256:6cf25c1a65c27923a17b3da28a0bdb99f62ee04230c931d83e888012851f4e7f"},
- {file = "pydantic-1.10.13-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8ef467901d7a41fa0ca6db9ae3ec0021e3f657ce2c208e98cd511f3161c762c6"},
- {file = "pydantic-1.10.13-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:968ac42970f57b8344ee08837b62f6ee6f53c33f603547a55571c954a4225691"},
- {file = "pydantic-1.10.13-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9849f031cf8a2f0a928fe885e5a04b08006d6d41876b8bbd2fc68a18f9f2e3fd"},
- {file = "pydantic-1.10.13-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:56e3ff861c3b9c6857579de282ce8baabf443f42ffba355bf070770ed63e11e1"},
- {file = "pydantic-1.10.13-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f00790179497767aae6bcdc36355792c79e7bbb20b145ff449700eb076c5f96"},
- {file = "pydantic-1.10.13-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:75b297827b59bc229cac1a23a2f7a4ac0031068e5be0ce385be1462e7e17a35d"},
- {file = "pydantic-1.10.13-cp39-cp39-win_amd64.whl", hash = "sha256:e70ca129d2053fb8b728ee7d1af8e553a928d7e301a311094b8a0501adc8763d"},
- {file = "pydantic-1.10.13-py3-none-any.whl", hash = "sha256:b87326822e71bd5f313e7d3bfdc77ac3247035ac10b0c0618bd99dcf95b1e687"},
- {file = "pydantic-1.10.13.tar.gz", hash = "sha256:32c8b48dcd3b2ac4e78b0ba4af3a2c2eb6048cb75202f0ea7b34feb740efc340"},
+ {file = "pydantic-2.7.1-py3-none-any.whl", hash = "sha256:e029badca45266732a9a79898a15ae2e8b14840b1eabbb25844be28f0b33f3d5"},
+ {file = "pydantic-2.7.1.tar.gz", hash = "sha256:e9dbb5eada8abe4d9ae5f46b9939aead650cd2b68f249bb3a8139dbe125803cc"},
]
[package.dependencies]
-typing-extensions = ">=4.2.0"
+annotated-types = ">=0.4.0"
+pydantic-core = "2.18.2"
+typing-extensions = ">=4.6.1"
[package.extras]
-dotenv = ["python-dotenv (>=0.10.4)"]
-email = ["email-validator (>=1.0.3)"]
+email = ["email-validator (>=2.0.0)"]
+
+[[package]]
+name = "pydantic-core"
+version = "2.18.2"
+description = "Core functionality for Pydantic validation and serialization"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "pydantic_core-2.18.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:9e08e867b306f525802df7cd16c44ff5ebbe747ff0ca6cf3fde7f36c05a59a81"},
+ {file = "pydantic_core-2.18.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f0a21cbaa69900cbe1a2e7cad2aa74ac3cf21b10c3efb0fa0b80305274c0e8a2"},
+ {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0680b1f1f11fda801397de52c36ce38ef1c1dc841a0927a94f226dea29c3ae3d"},
+ {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:95b9d5e72481d3780ba3442eac863eae92ae43a5f3adb5b4d0a1de89d42bb250"},
+ {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fcf5cd9c4b655ad666ca332b9a081112cd7a58a8b5a6ca7a3104bc950f2038"},
+ {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b5155ff768083cb1d62f3e143b49a8a3432e6789a3abee8acd005c3c7af1c74"},
+ {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:553ef617b6836fc7e4df130bb851e32fe357ce36336d897fd6646d6058d980af"},
+ {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b89ed9eb7d616ef5714e5590e6cf7f23b02d0d539767d33561e3675d6f9e3857"},
+ {file = "pydantic_core-2.18.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:75f7e9488238e920ab6204399ded280dc4c307d034f3924cd7f90a38b1829563"},
+ {file = "pydantic_core-2.18.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ef26c9e94a8c04a1b2924149a9cb081836913818e55681722d7f29af88fe7b38"},
+ {file = "pydantic_core-2.18.2-cp310-none-win32.whl", hash = "sha256:182245ff6b0039e82b6bb585ed55a64d7c81c560715d1bad0cbad6dfa07b4027"},
+ {file = "pydantic_core-2.18.2-cp310-none-win_amd64.whl", hash = "sha256:e23ec367a948b6d812301afc1b13f8094ab7b2c280af66ef450efc357d2ae543"},
+ {file = "pydantic_core-2.18.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:219da3f096d50a157f33645a1cf31c0ad1fe829a92181dd1311022f986e5fbe3"},
+ {file = "pydantic_core-2.18.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cc1cfd88a64e012b74e94cd00bbe0f9c6df57049c97f02bb07d39e9c852e19a4"},
+ {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05b7133a6e6aeb8df37d6f413f7705a37ab4031597f64ab56384c94d98fa0e90"},
+ {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:224c421235f6102e8737032483f43c1a8cfb1d2f45740c44166219599358c2cd"},
+ {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b14d82cdb934e99dda6d9d60dc84a24379820176cc4a0d123f88df319ae9c150"},
+ {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2728b01246a3bba6de144f9e3115b532ee44bd6cf39795194fb75491824a1413"},
+ {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:470b94480bb5ee929f5acba6995251ada5e059a5ef3e0dfc63cca287283ebfa6"},
+ {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:997abc4df705d1295a42f95b4eec4950a37ad8ae46d913caeee117b6b198811c"},
+ {file = "pydantic_core-2.18.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:75250dbc5290e3f1a0f4618db35e51a165186f9034eff158f3d490b3fed9f8a0"},
+ {file = "pydantic_core-2.18.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4456f2dca97c425231d7315737d45239b2b51a50dc2b6f0c2bb181fce6207664"},
+ {file = "pydantic_core-2.18.2-cp311-none-win32.whl", hash = "sha256:269322dcc3d8bdb69f054681edff86276b2ff972447863cf34c8b860f5188e2e"},
+ {file = "pydantic_core-2.18.2-cp311-none-win_amd64.whl", hash = "sha256:800d60565aec896f25bc3cfa56d2277d52d5182af08162f7954f938c06dc4ee3"},
+ {file = "pydantic_core-2.18.2-cp311-none-win_arm64.whl", hash = "sha256:1404c69d6a676245199767ba4f633cce5f4ad4181f9d0ccb0577e1f66cf4c46d"},
+ {file = "pydantic_core-2.18.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:fb2bd7be70c0fe4dfd32c951bc813d9fe6ebcbfdd15a07527796c8204bd36242"},
+ {file = "pydantic_core-2.18.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6132dd3bd52838acddca05a72aafb6eab6536aa145e923bb50f45e78b7251043"},
+ {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d904828195733c183d20a54230c0df0eb46ec746ea1a666730787353e87182"},
+ {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c9bd70772c720142be1020eac55f8143a34ec9f82d75a8e7a07852023e46617f"},
+ {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2b8ed04b3582771764538f7ee7001b02e1170223cf9b75dff0bc698fadb00cf3"},
+ {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e6dac87ddb34aaec85f873d737e9d06a3555a1cc1a8e0c44b7f8d5daeb89d86f"},
+ {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ca4ae5a27ad7a4ee5170aebce1574b375de390bc01284f87b18d43a3984df72"},
+ {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:886eec03591b7cf058467a70a87733b35f44707bd86cf64a615584fd72488b7c"},
+ {file = "pydantic_core-2.18.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ca7b0c1f1c983e064caa85f3792dd2fe3526b3505378874afa84baf662e12241"},
+ {file = "pydantic_core-2.18.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4b4356d3538c3649337df4074e81b85f0616b79731fe22dd11b99499b2ebbdf3"},
+ {file = "pydantic_core-2.18.2-cp312-none-win32.whl", hash = "sha256:8b172601454f2d7701121bbec3425dd71efcb787a027edf49724c9cefc14c038"},
+ {file = "pydantic_core-2.18.2-cp312-none-win_amd64.whl", hash = "sha256:b1bd7e47b1558ea872bd16c8502c414f9e90dcf12f1395129d7bb42a09a95438"},
+ {file = "pydantic_core-2.18.2-cp312-none-win_arm64.whl", hash = "sha256:98758d627ff397e752bc339272c14c98199c613f922d4a384ddc07526c86a2ec"},
+ {file = "pydantic_core-2.18.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:9fdad8e35f278b2c3eb77cbdc5c0a49dada440657bf738d6905ce106dc1de439"},
+ {file = "pydantic_core-2.18.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1d90c3265ae107f91a4f279f4d6f6f1d4907ac76c6868b27dc7fb33688cfb347"},
+ {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:390193c770399861d8df9670fb0d1874f330c79caaca4642332df7c682bf6b91"},
+ {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:82d5d4d78e4448683cb467897fe24e2b74bb7b973a541ea1dcfec1d3cbce39fb"},
+ {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4774f3184d2ef3e14e8693194f661dea5a4d6ca4e3dc8e39786d33a94865cefd"},
+ {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d4d938ec0adf5167cb335acb25a4ee69a8107e4984f8fbd2e897021d9e4ca21b"},
+ {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0e8b1be28239fc64a88a8189d1df7fad8be8c1ae47fcc33e43d4be15f99cc70"},
+ {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:868649da93e5a3d5eacc2b5b3b9235c98ccdbfd443832f31e075f54419e1b96b"},
+ {file = "pydantic_core-2.18.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:78363590ef93d5d226ba21a90a03ea89a20738ee5b7da83d771d283fd8a56761"},
+ {file = "pydantic_core-2.18.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:852e966fbd035a6468fc0a3496589b45e2208ec7ca95c26470a54daed82a0788"},
+ {file = "pydantic_core-2.18.2-cp38-none-win32.whl", hash = "sha256:6a46e22a707e7ad4484ac9ee9f290f9d501df45954184e23fc29408dfad61350"},
+ {file = "pydantic_core-2.18.2-cp38-none-win_amd64.whl", hash = "sha256:d91cb5ea8b11607cc757675051f61b3d93f15eca3cefb3e6c704a5d6e8440f4e"},
+ {file = "pydantic_core-2.18.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:ae0a8a797a5e56c053610fa7be147993fe50960fa43609ff2a9552b0e07013e8"},
+ {file = "pydantic_core-2.18.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:042473b6280246b1dbf530559246f6842b56119c2926d1e52b631bdc46075f2a"},
+ {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a388a77e629b9ec814c1b1e6b3b595fe521d2cdc625fcca26fbc2d44c816804"},
+ {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e25add29b8f3b233ae90ccef2d902d0ae0432eb0d45370fe315d1a5cf231004b"},
+ {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f459a5ce8434614dfd39bbebf1041952ae01da6bed9855008cb33b875cb024c0"},
+ {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eff2de745698eb46eeb51193a9f41d67d834d50e424aef27df2fcdee1b153845"},
+ {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8309f67285bdfe65c372ea3722b7a5642680f3dba538566340a9d36e920b5f0"},
+ {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f93a8a2e3938ff656a7c1bc57193b1319960ac015b6e87d76c76bf14fe0244b4"},
+ {file = "pydantic_core-2.18.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:22057013c8c1e272eb8d0eebc796701167d8377441ec894a8fed1af64a0bf399"},
+ {file = "pydantic_core-2.18.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cfeecd1ac6cc1fb2692c3d5110781c965aabd4ec5d32799773ca7b1456ac636b"},
+ {file = "pydantic_core-2.18.2-cp39-none-win32.whl", hash = "sha256:0d69b4c2f6bb3e130dba60d34c0845ba31b69babdd3f78f7c0c8fae5021a253e"},
+ {file = "pydantic_core-2.18.2-cp39-none-win_amd64.whl", hash = "sha256:d9319e499827271b09b4e411905b24a426b8fb69464dfa1696258f53a3334641"},
+ {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a1874c6dd4113308bd0eb568418e6114b252afe44319ead2b4081e9b9521fe75"},
+ {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:ccdd111c03bfd3666bd2472b674c6899550e09e9f298954cfc896ab92b5b0e6d"},
+ {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e18609ceaa6eed63753037fc06ebb16041d17d28199ae5aba0052c51449650a9"},
+ {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e5c584d357c4e2baf0ff7baf44f4994be121e16a2c88918a5817331fc7599d7"},
+ {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43f0f463cf89ace478de71a318b1b4f05ebc456a9b9300d027b4b57c1a2064fb"},
+ {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e1b395e58b10b73b07b7cf740d728dd4ff9365ac46c18751bf8b3d8cca8f625a"},
+ {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0098300eebb1c837271d3d1a2cd2911e7c11b396eac9661655ee524a7f10587b"},
+ {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:36789b70d613fbac0a25bb07ab3d9dba4d2e38af609c020cf4d888d165ee0bf3"},
+ {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3f9a801e7c8f1ef8718da265bba008fa121243dfe37c1cea17840b0944dfd72c"},
+ {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:3a6515ebc6e69d85502b4951d89131ca4e036078ea35533bb76327f8424531ce"},
+ {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20aca1e2298c56ececfd8ed159ae4dde2df0781988c97ef77d5c16ff4bd5b400"},
+ {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:223ee893d77a310a0391dca6df00f70bbc2f36a71a895cecd9a0e762dc37b349"},
+ {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2334ce8c673ee93a1d6a65bd90327588387ba073c17e61bf19b4fd97d688d63c"},
+ {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:cbca948f2d14b09d20268cda7b0367723d79063f26c4ffc523af9042cad95592"},
+ {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:b3ef08e20ec49e02d5c6717a91bb5af9b20f1805583cb0adfe9ba2c6b505b5ae"},
+ {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c6fdc8627910eed0c01aed6a390a252fe3ea6d472ee70fdde56273f198938374"},
+ {file = "pydantic_core-2.18.2.tar.gz", hash = "sha256:2e29d20810dfc3043ee13ac7d9e25105799817683348823f305ab3f349b9386e"},
+]
+
+[package.dependencies]
+typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0"
[[package]]
name = "pygments"
-version = "2.16.1"
+version = "2.18.0"
description = "Pygments is a syntax highlighting package written in Python."
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "Pygments-2.16.1-py3-none-any.whl", hash = "sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692"},
- {file = "Pygments-2.16.1.tar.gz", hash = "sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29"},
+ {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"},
+ {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"},
]
[package.extras]
-plugins = ["importlib-metadata"]
+windows-terminal = ["colorama (>=0.4.6)"]
[[package]]
name = "pylint"
-version = "3.0.2"
+version = "3.1.1"
description = "python code static checker"
optional = false
python-versions = ">=3.8.0"
files = [
- {file = "pylint-3.0.2-py3-none-any.whl", hash = "sha256:60ed5f3a9ff8b61839ff0348b3624ceeb9e6c2a92c514d81c9cc273da3b6bcda"},
- {file = "pylint-3.0.2.tar.gz", hash = "sha256:0d4c286ef6d2f66c8bfb527a7f8a629009e42c99707dec821a03e1b51a4c1496"},
+ {file = "pylint-3.1.1-py3-none-any.whl", hash = "sha256:862eddf25dab42704c5f06d3688b8bc19ef4c99ad8a836b6ff260a3b2fbafee1"},
+ {file = "pylint-3.1.1.tar.gz", hash = "sha256:c7c2652bf8099c7fb7a63bc6af5c5f8f7b9d7b392fa1d320cb020e222aff28c2"},
]
[package.dependencies]
-astroid = ">=3.0.1,<=3.1.0-dev0"
+astroid = ">=3.1.0,<=3.2.0-dev0"
colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""}
dill = [
{version = ">=0.2", markers = "python_version < \"3.11\""},
{version = ">=0.3.7", markers = "python_version >= \"3.12\""},
{version = ">=0.3.6", markers = "python_version >= \"3.11\" and python_version < \"3.12\""},
]
-isort = ">=4.2.5,<6"
+isort = ">=4.2.5,<5.13.0 || >5.13.0,<6"
mccabe = ">=0.6,<0.8"
platformdirs = ">=2.2.0"
tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
tomlkit = ">=0.10.1"
-typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""}
[package.extras]
spelling = ["pyenchant (>=3.2,<4.0)"]
@@ -1326,13 +1555,13 @@ testutils = ["gitpython (>3)"]
[[package]]
name = "pynetbox"
-version = "7.2.0"
+version = "7.3.3"
description = "NetBox API client library"
optional = false
python-versions = "*"
files = [
- {file = "pynetbox-7.2.0-py3-none-any.whl", hash = "sha256:73ac66250f8899152180b5e1fca63402d4aef8ed361b291f19d7ba3730f87a6c"},
- {file = "pynetbox-7.2.0.tar.gz", hash = "sha256:3735f4d44e81da4b9af3cc1fdf18e708752ac5d863442100d987886c099df5df"},
+ {file = "pynetbox-7.3.3-py3-none-any.whl", hash = "sha256:9bc25f83ff126f12d26976e544a2f0f6c7c898c261e6d47f8a4144ab47686d81"},
+ {file = "pynetbox-7.3.3.tar.gz", hash = "sha256:b0bcef4c3a019516d2425ae0ce5509eae7f4f11ad4510c9fe9527d468f22529b"},
]
[package.dependencies]
@@ -1341,27 +1570,24 @@ requests = ">=2.20.0,<3.0"
[[package]]
name = "pyproject-hooks"
-version = "1.0.0"
+version = "1.1.0"
description = "Wrappers to call pyproject.toml-based build backend hooks."
optional = false
python-versions = ">=3.7"
files = [
- {file = "pyproject_hooks-1.0.0-py3-none-any.whl", hash = "sha256:283c11acd6b928d2f6a7c73fa0d01cb2bdc5f07c57a2eeb6e83d5e56b97976f8"},
- {file = "pyproject_hooks-1.0.0.tar.gz", hash = "sha256:f271b298b97f5955d53fb12b72c1fb1948c22c1a6b70b315c54cedaca0264ef5"},
+ {file = "pyproject_hooks-1.1.0-py3-none-any.whl", hash = "sha256:7ceeefe9aec63a1064c18d939bdc3adf2d8aa1988a510afec15151578b232aa2"},
+ {file = "pyproject_hooks-1.1.0.tar.gz", hash = "sha256:4b37730834edbd6bd37f26ece6b44802fb1c1ee2ece0e54ddff8bfc06db86965"},
]
-[package.dependencies]
-tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
-
[[package]]
name = "pytest"
-version = "7.4.3"
+version = "8.2.0"
description = "pytest: simple powerful testing with Python"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "pytest-7.4.3-py3-none-any.whl", hash = "sha256:0d009c083ea859a71b76adf7c1d502e4bc170b80a8ef002da5806527b9591fac"},
- {file = "pytest-7.4.3.tar.gz", hash = "sha256:d989d136982de4e3b29dabcc838ad581c64e8ed52c11fbe86ddebd9da0818cd5"},
+ {file = "pytest-8.2.0-py3-none-any.whl", hash = "sha256:1733f0620f6cda4095bbf0d9ff8022486e91892245bb9e7d5542c018f612f233"},
+ {file = "pytest-8.2.0.tar.gz", hash = "sha256:d507d4482197eac0ba2bae2e9babf0672eb333017bcedaa5fb1a3d42c1174b3f"},
]
[package.dependencies]
@@ -1369,11 +1595,11 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""}
exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""}
iniconfig = "*"
packaging = "*"
-pluggy = ">=0.12,<2.0"
-tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""}
+pluggy = ">=1.5,<2.0"
+tomli = {version = ">=1", markers = "python_version < \"3.11\""}
[package.extras]
-testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
+dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
[[package]]
name = "pytest-forked"
@@ -1392,35 +1618,35 @@ pytest = ">=3.10"
[[package]]
name = "pytest-mock"
-version = "3.12.0"
+version = "3.14.0"
description = "Thin-wrapper around the mock package for easier use with pytest"
optional = false
python-versions = ">=3.8"
files = [
- {file = "pytest-mock-3.12.0.tar.gz", hash = "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"},
- {file = "pytest_mock-3.12.0-py3-none-any.whl", hash = "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f"},
+ {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"},
+ {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"},
]
[package.dependencies]
-pytest = ">=5.0"
+pytest = ">=6.2.5"
[package.extras]
dev = ["pre-commit", "pytest-asyncio", "tox"]
[[package]]
name = "pytest-xdist"
-version = "3.3.1"
+version = "3.6.1"
description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "pytest-xdist-3.3.1.tar.gz", hash = "sha256:d5ee0520eb1b7bcca50a60a518ab7a7707992812c578198f8b44fdfac78e8c93"},
- {file = "pytest_xdist-3.3.1-py3-none-any.whl", hash = "sha256:ff9daa7793569e6a68544850fd3927cd257cc03a7ef76c95e86915355e82b5f2"},
+ {file = "pytest_xdist-3.6.1-py3-none-any.whl", hash = "sha256:9ed4adfb68a016610848639bb7e02c9352d5d9f03d04809919e2dafc3be4cca7"},
+ {file = "pytest_xdist-3.6.1.tar.gz", hash = "sha256:ead156a4db231eec769737f57668ef58a2084a34b2e55c4a8fa20d861107300d"},
]
[package.dependencies]
-execnet = ">=1.1"
-pytest = ">=6.2.0"
+execnet = ">=2.1"
+pytest = ">=7.0.0"
[package.extras]
psutil = ["psutil (>=3.0)"]
@@ -1429,13 +1655,13 @@ testing = ["filelock"]
[[package]]
name = "pytz"
-version = "2023.3.post1"
+version = "2024.1"
description = "World timezone definitions, modern and historical"
optional = false
python-versions = "*"
files = [
- {file = "pytz-2023.3.post1-py2.py3-none-any.whl", hash = "sha256:ce42d816b81b68506614c11e8937d3aa9e41007ceb50bfdcb0749b921bf646c7"},
- {file = "pytz-2023.3.post1.tar.gz", hash = "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b"},
+ {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"},
+ {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"},
]
[[package]]
@@ -1500,13 +1726,13 @@ files = [
[[package]]
name = "requests"
-version = "2.31.0"
+version = "2.32.0"
description = "Python HTTP for Humans."
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"},
- {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"},
+ {file = "requests-2.32.0-py3-none-any.whl", hash = "sha256:f2c3881dddb70d056c5bd7600a4fae312b2a300e39be6a118d30b90bd27262b5"},
+ {file = "requests-2.32.0.tar.gz", hash = "sha256:fa5490319474c82ef1d2c9bc459d3652e3ae4ef4c4ebdd18a21145a47ca4b6b8"},
]
[package.dependencies]
@@ -1521,13 +1747,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
[[package]]
name = "resolvelib"
-version = "0.8.1"
+version = "1.0.1"
description = "Resolve abstract dependencies into concrete ones"
optional = false
python-versions = "*"
files = [
- {file = "resolvelib-0.8.1-py2.py3-none-any.whl", hash = "sha256:d9b7907f055c3b3a2cfc56c914ffd940122915826ff5fb5b1de0c99778f4de98"},
- {file = "resolvelib-0.8.1.tar.gz", hash = "sha256:c6ea56732e9fb6fca1b2acc2ccc68a0b6b8c566d8f3e78e0443310ede61dbd37"},
+ {file = "resolvelib-1.0.1-py2.py3-none-any.whl", hash = "sha256:d2da45d1a8dfee81bdd591647783e340ef3bcb104b54c383f70d422ef5cc7dbf"},
+ {file = "resolvelib-1.0.1.tar.gz", hash = "sha256:04ce76cbd63fded2078ce224785da6ecd42b9564b1390793f64ddecbe997b309"},
]
[package.extras]
@@ -1538,64 +1764,68 @@ test = ["commentjson", "packaging", "pytest"]
[[package]]
name = "rich"
-version = "12.6.0"
+version = "13.7.1"
description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal"
optional = false
-python-versions = ">=3.6.3,<4.0.0"
+python-versions = ">=3.7.0"
files = [
- {file = "rich-12.6.0-py3-none-any.whl", hash = "sha256:a4eb26484f2c82589bd9a17c73d32a010b1e29d89f1604cd9bf3a2097b81bb5e"},
- {file = "rich-12.6.0.tar.gz", hash = "sha256:ba3a3775974105c221d31141f2c116f4fd65c5ceb0698657a11e9f295ec93fd0"},
+ {file = "rich-13.7.1-py3-none-any.whl", hash = "sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222"},
+ {file = "rich-13.7.1.tar.gz", hash = "sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432"},
]
[package.dependencies]
-commonmark = ">=0.9.0,<0.10.0"
-pygments = ">=2.6.0,<3.0.0"
+markdown-it-py = ">=2.2.0"
+pygments = ">=2.13.0,<3.0.0"
[package.extras]
-jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"]
+jupyter = ["ipywidgets (>=7.5.1,<9)"]
[[package]]
name = "rstcheck"
-version = "6.1.2"
+version = "6.2.1"
description = "Checks syntax of reStructuredText and code blocks nested within it"
optional = false
-python-versions = ">=3.7,<4.0"
+python-versions = ">=3.8"
files = [
- {file = "rstcheck-6.1.2-py3-none-any.whl", hash = "sha256:4aaa46e0debc179f849807c453fa384fd2b75167faf5b1274115730805fab529"},
- {file = "rstcheck-6.1.2.tar.gz", hash = "sha256:f9cb07a72ef9a81d1e32187eae29b00a89421ccba1bde0b1652a08ed0923f61b"},
+ {file = "rstcheck-6.2.1-py3-none-any.whl", hash = "sha256:b450943707d8ca053f5c6b9f103ee595f4926a064203e5e579172aefb3fe2c12"},
+ {file = "rstcheck-6.2.1.tar.gz", hash = "sha256:e4d173950b023eb12c2b9d2348a8c62bef46612bbc7b29e1e57d37320ed0a891"},
]
[package.dependencies]
-rstcheck-core = ">=1.0.2,<2.0.0"
-typer = {version = ">=0.4.1,<0.8", extras = ["all"]}
+rstcheck-core = ">=1.1"
+typer = {version = ">=0.4.1", extras = ["all"]}
[package.extras]
-docs = ["m2r2 (>=0.3.2)", "sphinx", "sphinx-autobuild (==2021.3.14)", "sphinx-click (>=4.0.3,<5.0.0)", "sphinx-rtd-dark-mode (>=1.2.4,<2.0.0)", "sphinx-rtd-theme (<1)", "sphinxcontrib-spelling (>=7.3)"]
-sphinx = ["sphinx"]
+dev = ["rstcheck[docs,sphinx,testing,toml,type-check]", "tox (>=3.15)"]
+docs = ["m2r2 (>=0.3.2)", "sphinx (>=5.0)", "sphinx-autobuild (>=2021.3.14)", "sphinx-click (>=4.0.3)", "sphinx-rtd-theme (>=1.2)", "sphinxcontrib-spelling (>=7.3)"]
+sphinx = ["sphinx (>=5.0)"]
testing = ["coverage-conditional-plugin (>=0.5)", "coverage[toml] (>=6.0)", "pytest (>=7.2)", "pytest-cov (>=3.0)", "pytest-randomly (>=3.0)", "pytest-sugar (>=0.9.5)"]
-toml = ["tomli"]
+toml = ["tomli (>=2.0)"]
+type-check = ["mypy (>=1.0)"]
[[package]]
name = "rstcheck-core"
-version = "1.0.3"
+version = "1.2.1"
description = "Checks syntax of reStructuredText and code blocks nested within it"
optional = false
-python-versions = ">=3.7,<4.0"
+python-versions = ">=3.8"
files = [
- {file = "rstcheck_core-1.0.3-py3-none-any.whl", hash = "sha256:d75d7df8f15b58e8aafe322d6fb6ef1ac8d12bb563089b0696948a00ee7f601a"},
- {file = "rstcheck_core-1.0.3.tar.gz", hash = "sha256:add19c9a1b97d9087f4b463b49c12cd8a9c03689a255e99089c70a2692f16369"},
+ {file = "rstcheck-core-1.2.1.tar.gz", hash = "sha256:9b330020d912e2864f23f332c1a0569463ca3b06b8fee7b7bdd201b055f7f831"},
+ {file = "rstcheck_core-1.2.1-py3-none-any.whl", hash = "sha256:1c100de418b6c9e14d9cf6558644d0ab103fdc447f891313882d02df3a3c52ba"},
]
[package.dependencies]
-docutils = ">=0.7,<0.20"
-pydantic = ">=1.2,<2.0"
-types-docutils = ">=0.18,<0.20"
+docutils = ">=0.7"
+pydantic = ">=2"
[package.extras]
-docs = ["m2r2 (>=0.3.2)", "sphinx (>=4.0,<6.0)", "sphinx-autobuild (==2021.3.14)", "sphinx-autodoc-typehints (>=1.15)", "sphinx-rtd-dark-mode (>=1.2.4,<2.0.0)", "sphinx-rtd-theme (<1)", "sphinxcontrib-apidoc (>=0.3)", "sphinxcontrib-spelling (>=7.3)"]
-sphinx = ["sphinx (>=4.0,<6.0)"]
-testing = ["coverage-conditional-plugin (>=0.5)", "coverage[toml] (>=6.0)", "pytest (>=6.0)", "pytest-cov (>=3.0)", "pytest-mock (>=3.7)", "pytest-randomly (>=3.0)", "pytest-sugar (>=0.9.5)"]
-toml = ["tomli (>=2.0,<3.0)"]
+dev = ["rstcheck-core[docs,sphinx,testing,toml,type-check,yaml]", "tox (>=3.15)"]
+docs = ["m2r2 (>=0.3.2)", "sphinx (>=5.0,!=7.2.5)", "sphinx-autobuild (>=2021.3.14)", "sphinx-autodoc-typehints (>=1.15)", "sphinx-rtd-theme (>=1.2)", "sphinxcontrib-apidoc (>=0.3)", "sphinxcontrib-spelling (>=7.3)"]
+sphinx = ["sphinx (>=5.0)"]
+testing = ["coverage-conditional-plugin (>=0.5)", "coverage[toml] (>=6.0)", "pytest (>=7.2)", "pytest-cov (>=3.0)", "pytest-mock (>=3.7)", "pytest-randomly (>=3.0)", "pytest-sugar (>=0.9.5)"]
+toml = ["tomli (>=2.0)"]
+type-check = ["mypy (>=1.0)", "types-PyYAML (>=6.0.0)", "types-docutils (>=0.18)"]
+yaml = ["pyyaml (>=6.0.0)"]
[[package]]
name = "semantic-version"
@@ -1613,16 +1843,6 @@ dev = ["Django (>=1.11)", "check-manifest", "colorama (<=0.4.1)", "coverage", "f
doc = ["Sphinx", "sphinx-rtd-theme"]
[[package]]
-name = "sh"
-version = "1.14.3"
-description = "Python subprocess replacement"
-optional = false
-python-versions = "*"
-files = [
- {file = "sh-1.14.3.tar.gz", hash = "sha256:e4045b6c732d9ce75d571c79f5ac2234edd9ae4f5fa9d59b09705082bdca18c7"},
-]
-
-[[package]]
name = "shellingham"
version = "1.5.4"
description = "Tool to Detect Surrounding Shell"
@@ -1645,6 +1865,146 @@ files = [
]
[[package]]
+name = "snowballstemmer"
+version = "2.2.0"
+description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms."
+optional = false
+python-versions = "*"
+files = [
+ {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"},
+ {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"},
+]
+
+[[package]]
+name = "sphinx"
+version = "7.3.7"
+description = "Python documentation generator"
+optional = false
+python-versions = ">=3.9"
+files = [
+ {file = "sphinx-7.3.7-py3-none-any.whl", hash = "sha256:413f75440be4cacf328f580b4274ada4565fb2187d696a84970c23f77b64d8c3"},
+ {file = "sphinx-7.3.7.tar.gz", hash = "sha256:a4a7db75ed37531c05002d56ed6948d4c42f473a36f46e1382b0bd76ca9627bc"},
+]
+
+[package.dependencies]
+alabaster = ">=0.7.14,<0.8.0"
+babel = ">=2.9"
+colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""}
+docutils = ">=0.18.1,<0.22"
+imagesize = ">=1.3"
+Jinja2 = ">=3.0"
+packaging = ">=21.0"
+Pygments = ">=2.14"
+requests = ">=2.25.0"
+snowballstemmer = ">=2.0"
+sphinxcontrib-applehelp = "*"
+sphinxcontrib-devhelp = "*"
+sphinxcontrib-htmlhelp = ">=2.0.0"
+sphinxcontrib-jsmath = "*"
+sphinxcontrib-qthelp = "*"
+sphinxcontrib-serializinghtml = ">=1.1.9"
+tomli = {version = ">=2", markers = "python_version < \"3.11\""}
+
+[package.extras]
+docs = ["sphinxcontrib-websupport"]
+lint = ["flake8 (>=3.5.0)", "importlib_metadata", "mypy (==1.9.0)", "pytest (>=6.0)", "ruff (==0.3.7)", "sphinx-lint", "tomli", "types-docutils", "types-requests"]
+test = ["cython (>=3.0)", "defusedxml (>=0.7.1)", "pytest (>=6.0)", "setuptools (>=67.0)"]
+
+[[package]]
+name = "sphinxcontrib-applehelp"
+version = "1.0.8"
+description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books"
+optional = false
+python-versions = ">=3.9"
+files = [
+ {file = "sphinxcontrib_applehelp-1.0.8-py3-none-any.whl", hash = "sha256:cb61eb0ec1b61f349e5cc36b2028e9e7ca765be05e49641c97241274753067b4"},
+ {file = "sphinxcontrib_applehelp-1.0.8.tar.gz", hash = "sha256:c40a4f96f3776c4393d933412053962fac2b84f4c99a7982ba42e09576a70619"},
+]
+
+[package.extras]
+lint = ["docutils-stubs", "flake8", "mypy"]
+standalone = ["Sphinx (>=5)"]
+test = ["pytest"]
+
+[[package]]
+name = "sphinxcontrib-devhelp"
+version = "1.0.6"
+description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents"
+optional = false
+python-versions = ">=3.9"
+files = [
+ {file = "sphinxcontrib_devhelp-1.0.6-py3-none-any.whl", hash = "sha256:6485d09629944511c893fa11355bda18b742b83a2b181f9a009f7e500595c90f"},
+ {file = "sphinxcontrib_devhelp-1.0.6.tar.gz", hash = "sha256:9893fd3f90506bc4b97bdb977ceb8fbd823989f4316b28c3841ec128544372d3"},
+]
+
+[package.extras]
+lint = ["docutils-stubs", "flake8", "mypy"]
+standalone = ["Sphinx (>=5)"]
+test = ["pytest"]
+
+[[package]]
+name = "sphinxcontrib-htmlhelp"
+version = "2.0.5"
+description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files"
+optional = false
+python-versions = ">=3.9"
+files = [
+ {file = "sphinxcontrib_htmlhelp-2.0.5-py3-none-any.whl", hash = "sha256:393f04f112b4d2f53d93448d4bce35842f62b307ccdc549ec1585e950bc35e04"},
+ {file = "sphinxcontrib_htmlhelp-2.0.5.tar.gz", hash = "sha256:0dc87637d5de53dd5eec3a6a01753b1ccf99494bd756aafecd74b4fa9e729015"},
+]
+
+[package.extras]
+lint = ["docutils-stubs", "flake8", "mypy"]
+standalone = ["Sphinx (>=5)"]
+test = ["html5lib", "pytest"]
+
+[[package]]
+name = "sphinxcontrib-jsmath"
+version = "1.0.1"
+description = "A sphinx extension which renders display math in HTML via JavaScript"
+optional = false
+python-versions = ">=3.5"
+files = [
+ {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"},
+ {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"},
+]
+
+[package.extras]
+test = ["flake8", "mypy", "pytest"]
+
+[[package]]
+name = "sphinxcontrib-qthelp"
+version = "1.0.7"
+description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents"
+optional = false
+python-versions = ">=3.9"
+files = [
+ {file = "sphinxcontrib_qthelp-1.0.7-py3-none-any.whl", hash = "sha256:e2ae3b5c492d58fcbd73281fbd27e34b8393ec34a073c792642cd8e529288182"},
+ {file = "sphinxcontrib_qthelp-1.0.7.tar.gz", hash = "sha256:053dedc38823a80a7209a80860b16b722e9e0209e32fea98c90e4e6624588ed6"},
+]
+
+[package.extras]
+lint = ["docutils-stubs", "flake8", "mypy"]
+standalone = ["Sphinx (>=5)"]
+test = ["pytest"]
+
+[[package]]
+name = "sphinxcontrib-serializinghtml"
+version = "1.1.10"
+description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)"
+optional = false
+python-versions = ">=3.9"
+files = [
+ {file = "sphinxcontrib_serializinghtml-1.1.10-py3-none-any.whl", hash = "sha256:326369b8df80a7d2d8d7f99aa5ac577f51ea51556ed974e7716cfd4fca3f6cb7"},
+ {file = "sphinxcontrib_serializinghtml-1.1.10.tar.gz", hash = "sha256:93f3f5dc458b91b192fe10c397e324f262cf163d79f3282c158e8436a2c4511f"},
+]
+
+[package.extras]
+lint = ["docutils-stubs", "flake8", "mypy"]
+standalone = ["Sphinx (>=5)"]
+test = ["pytest"]
+
+[[package]]
name = "tomli"
version = "2.0.1"
description = "A lil' TOML parser"
@@ -1657,13 +2017,13 @@ files = [
[[package]]
name = "tomlkit"
-version = "0.12.2"
+version = "0.12.5"
description = "Style preserving TOML library"
optional = false
python-versions = ">=3.7"
files = [
- {file = "tomlkit-0.12.2-py3-none-any.whl", hash = "sha256:eeea7ac7563faeab0a1ed8fe12c2e5a51c61f933f2502f7e9db0241a65163ad0"},
- {file = "tomlkit-0.12.2.tar.gz", hash = "sha256:df32fab589a81f0d7dc525a4267b6d7a64ee99619cbd1eeb0fae32c1dd426977"},
+ {file = "tomlkit-0.12.5-py3-none-any.whl", hash = "sha256:af914f5a9c59ed9d0762c7b64d3b5d5df007448eb9cd2edc8a46b1eafead172f"},
+ {file = "tomlkit-0.12.5.tar.gz", hash = "sha256:eef34fba39834d4d6b73c9ba7f3e4d1c417a4e56f89a7e96e090dd0d24b8fb3c"},
]
[[package]]
@@ -1682,147 +2042,146 @@ six = "*"
[[package]]
name = "typer"
-version = "0.7.0"
+version = "0.12.3"
description = "Typer, build great CLIs. Easy to code. Based on Python type hints."
optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.7"
files = [
- {file = "typer-0.7.0-py3-none-any.whl", hash = "sha256:b5e704f4e48ec263de1c0b3a2387cd405a13767d2f907f44c1a08cbad96f606d"},
- {file = "typer-0.7.0.tar.gz", hash = "sha256:ff797846578a9f2a201b53442aedeb543319466870fbe1c701eab66dd7681165"},
+ {file = "typer-0.12.3-py3-none-any.whl", hash = "sha256:070d7ca53f785acbccba8e7d28b08dcd88f79f1fbda035ade0aecec71ca5c914"},
+ {file = "typer-0.12.3.tar.gz", hash = "sha256:49e73131481d804288ef62598d97a1ceef3058905aa536a1134f90891ba35482"},
]
[package.dependencies]
-click = ">=7.1.1,<9.0.0"
-colorama = {version = ">=0.4.3,<0.5.0", optional = true, markers = "extra == \"all\""}
-rich = {version = ">=10.11.0,<13.0.0", optional = true, markers = "extra == \"all\""}
-shellingham = {version = ">=1.3.0,<2.0.0", optional = true, markers = "extra == \"all\""}
-
-[package.extras]
-all = ["colorama (>=0.4.3,<0.5.0)", "rich (>=10.11.0,<13.0.0)", "shellingham (>=1.3.0,<2.0.0)"]
-dev = ["autoflake (>=1.3.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "pre-commit (>=2.17.0,<3.0.0)"]
-doc = ["cairosvg (>=2.5.2,<3.0.0)", "mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pillow (>=9.3.0,<10.0.0)"]
-test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.910)", "pytest (>=4.4.0,<8.0.0)", "pytest-cov (>=2.10.0,<5.0.0)", "pytest-sugar (>=0.9.4,<0.10.0)", "pytest-xdist (>=1.32.0,<4.0.0)", "rich (>=10.11.0,<13.0.0)", "shellingham (>=1.3.0,<2.0.0)"]
-
-[[package]]
-name = "types-docutils"
-version = "0.19.1.9"
-description = "Typing stubs for docutils"
-optional = false
-python-versions = "*"
-files = [
- {file = "types-docutils-0.19.1.9.tar.gz", hash = "sha256:1d029567e67c52992fd42aa968778bc10a5e445c8450fc751d672d6f50330a4a"},
- {file = "types_docutils-0.19.1.9-py3-none-any.whl", hash = "sha256:556fb7ee19248aa482caa142a830c940b776b0f8c7577a98abe0977574546a1d"},
-]
+click = ">=8.0.0"
+rich = ">=10.11.0"
+shellingham = ">=1.3.0"
+typing-extensions = ">=3.7.4.3"
[[package]]
name = "typing-extensions"
-version = "4.8.0"
+version = "4.11.0"
description = "Backported and Experimental Type Hints for Python 3.8+"
optional = false
python-versions = ">=3.8"
files = [
- {file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"},
- {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"},
+ {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"},
+ {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"},
]
[[package]]
name = "urllib3"
-version = "2.0.7"
+version = "2.2.1"
description = "HTTP library with thread-safe connection pooling, file post, and more."
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "urllib3-2.0.7-py3-none-any.whl", hash = "sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e"},
- {file = "urllib3-2.0.7.tar.gz", hash = "sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84"},
+ {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"},
+ {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"},
]
[package.extras]
brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"]
-secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"]
+h2 = ["h2 (>=4,<5)"]
socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
zstd = ["zstandard (>=0.18.0)"]
[[package]]
name = "yarl"
-version = "1.9.2"
+version = "1.9.4"
description = "Yet another URL library"
optional = false
python-versions = ">=3.7"
files = [
- {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c2ad583743d16ddbdf6bb14b5cd76bf43b0d0006e918809d5d4ddf7bde8dd82"},
- {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:82aa6264b36c50acfb2424ad5ca537a2060ab6de158a5bd2a72a032cc75b9eb8"},
- {file = "yarl-1.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c0c77533b5ed4bcc38e943178ccae29b9bcf48ffd1063f5821192f23a1bd27b9"},
- {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee4afac41415d52d53a9833ebae7e32b344be72835bbb589018c9e938045a560"},
- {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9bf345c3a4f5ba7f766430f97f9cc1320786f19584acc7086491f45524a551ac"},
- {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a96c19c52ff442a808c105901d0bdfd2e28575b3d5f82e2f5fd67e20dc5f4ea"},
- {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:891c0e3ec5ec881541f6c5113d8df0315ce5440e244a716b95f2525b7b9f3608"},
- {file = "yarl-1.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c3a53ba34a636a256d767c086ceb111358876e1fb6b50dfc4d3f4951d40133d5"},
- {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:566185e8ebc0898b11f8026447eacd02e46226716229cea8db37496c8cdd26e0"},
- {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2b0738fb871812722a0ac2154be1f049c6223b9f6f22eec352996b69775b36d4"},
- {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:32f1d071b3f362c80f1a7d322bfd7b2d11e33d2adf395cc1dd4df36c9c243095"},
- {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:e9fdc7ac0d42bc3ea78818557fab03af6181e076a2944f43c38684b4b6bed8e3"},
- {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:56ff08ab5df8429901ebdc5d15941b59f6253393cb5da07b4170beefcf1b2528"},
- {file = "yarl-1.9.2-cp310-cp310-win32.whl", hash = "sha256:8ea48e0a2f931064469bdabca50c2f578b565fc446f302a79ba6cc0ee7f384d3"},
- {file = "yarl-1.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:50f33040f3836e912ed16d212f6cc1efb3231a8a60526a407aeb66c1c1956dde"},
- {file = "yarl-1.9.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:646d663eb2232d7909e6601f1a9107e66f9791f290a1b3dc7057818fe44fc2b6"},
- {file = "yarl-1.9.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aff634b15beff8902d1f918012fc2a42e0dbae6f469fce134c8a0dc51ca423bb"},
- {file = "yarl-1.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a83503934c6273806aed765035716216cc9ab4e0364f7f066227e1aaea90b8d0"},
- {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b25322201585c69abc7b0e89e72790469f7dad90d26754717f3310bfe30331c2"},
- {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:22a94666751778629f1ec4280b08eb11815783c63f52092a5953faf73be24191"},
- {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ec53a0ea2a80c5cd1ab397925f94bff59222aa3cf9c6da938ce05c9ec20428d"},
- {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:159d81f22d7a43e6eabc36d7194cb53f2f15f498dbbfa8edc8a3239350f59fe7"},
- {file = "yarl-1.9.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:832b7e711027c114d79dffb92576acd1bd2decc467dec60e1cac96912602d0e6"},
- {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:95d2ecefbcf4e744ea952d073c6922e72ee650ffc79028eb1e320e732898d7e8"},
- {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d4e2c6d555e77b37288eaf45b8f60f0737c9efa3452c6c44626a5455aeb250b9"},
- {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:783185c75c12a017cc345015ea359cc801c3b29a2966c2655cd12b233bf5a2be"},
- {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:b8cc1863402472f16c600e3e93d542b7e7542a540f95c30afd472e8e549fc3f7"},
- {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:822b30a0f22e588b32d3120f6d41e4ed021806418b4c9f0bc3048b8c8cb3f92a"},
- {file = "yarl-1.9.2-cp311-cp311-win32.whl", hash = "sha256:a60347f234c2212a9f0361955007fcf4033a75bf600a33c88a0a8e91af77c0e8"},
- {file = "yarl-1.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:be6b3fdec5c62f2a67cb3f8c6dbf56bbf3f61c0f046f84645cd1ca73532ea051"},
- {file = "yarl-1.9.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:38a3928ae37558bc1b559f67410df446d1fbfa87318b124bf5032c31e3447b74"},
- {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac9bb4c5ce3975aeac288cfcb5061ce60e0d14d92209e780c93954076c7c4367"},
- {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3da8a678ca8b96c8606bbb8bfacd99a12ad5dd288bc6f7979baddd62f71c63ef"},
- {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13414591ff516e04fcdee8dc051c13fd3db13b673c7a4cb1350e6b2ad9639ad3"},
- {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf74d08542c3a9ea97bb8f343d4fcbd4d8f91bba5ec9d5d7f792dbe727f88938"},
- {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e7221580dc1db478464cfeef9b03b95c5852cc22894e418562997df0d074ccc"},
- {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:494053246b119b041960ddcd20fd76224149cfea8ed8777b687358727911dd33"},
- {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:52a25809fcbecfc63ac9ba0c0fb586f90837f5425edfd1ec9f3372b119585e45"},
- {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:e65610c5792870d45d7b68c677681376fcf9cc1c289f23e8e8b39c1485384185"},
- {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:1b1bba902cba32cdec51fca038fd53f8beee88b77efc373968d1ed021024cc04"},
- {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:662e6016409828ee910f5d9602a2729a8a57d74b163c89a837de3fea050c7582"},
- {file = "yarl-1.9.2-cp37-cp37m-win32.whl", hash = "sha256:f364d3480bffd3aa566e886587eaca7c8c04d74f6e8933f3f2c996b7f09bee1b"},
- {file = "yarl-1.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6a5883464143ab3ae9ba68daae8e7c5c95b969462bbe42e2464d60e7e2698368"},
- {file = "yarl-1.9.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5610f80cf43b6202e2c33ba3ec2ee0a2884f8f423c8f4f62906731d876ef4fac"},
- {file = "yarl-1.9.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b9a4e67ad7b646cd6f0938c7ebfd60e481b7410f574c560e455e938d2da8e0f4"},
- {file = "yarl-1.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:83fcc480d7549ccebe9415d96d9263e2d4226798c37ebd18c930fce43dfb9574"},
- {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fcd436ea16fee7d4207c045b1e340020e58a2597301cfbcfdbe5abd2356c2fb"},
- {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84e0b1599334b1e1478db01b756e55937d4614f8654311eb26012091be109d59"},
- {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3458a24e4ea3fd8930e934c129b676c27452e4ebda80fbe47b56d8c6c7a63a9e"},
- {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:838162460b3a08987546e881a2bfa573960bb559dfa739e7800ceeec92e64417"},
- {file = "yarl-1.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4e2d08f07a3d7d3e12549052eb5ad3eab1c349c53ac51c209a0e5991bbada78"},
- {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:de119f56f3c5f0e2fb4dee508531a32b069a5f2c6e827b272d1e0ff5ac040333"},
- {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:149ddea5abf329752ea5051b61bd6c1d979e13fbf122d3a1f9f0c8be6cb6f63c"},
- {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:674ca19cbee4a82c9f54e0d1eee28116e63bc6fd1e96c43031d11cbab8b2afd5"},
- {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:9b3152f2f5677b997ae6c804b73da05a39daa6a9e85a512e0e6823d81cdad7cc"},
- {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5415d5a4b080dc9612b1b63cba008db84e908b95848369aa1da3686ae27b6d2b"},
- {file = "yarl-1.9.2-cp38-cp38-win32.whl", hash = "sha256:f7a3d8146575e08c29ed1cd287068e6d02f1c7bdff8970db96683b9591b86ee7"},
- {file = "yarl-1.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:63c48f6cef34e6319a74c727376e95626f84ea091f92c0250a98e53e62c77c72"},
- {file = "yarl-1.9.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:75df5ef94c3fdc393c6b19d80e6ef1ecc9ae2f4263c09cacb178d871c02a5ba9"},
- {file = "yarl-1.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c027a6e96ef77d401d8d5a5c8d6bc478e8042f1e448272e8d9752cb0aff8b5c8"},
- {file = "yarl-1.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3b078dbe227f79be488ffcfc7a9edb3409d018e0952cf13f15fd6512847f3f7"},
- {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59723a029760079b7d991a401386390c4be5bfec1e7dd83e25a6a0881859e716"},
- {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b03917871bf859a81ccb180c9a2e6c1e04d2f6a51d953e6a5cdd70c93d4e5a2a"},
- {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1012fa63eb6c032f3ce5d2171c267992ae0c00b9e164efe4d73db818465fac3"},
- {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a74dcbfe780e62f4b5a062714576f16c2f3493a0394e555ab141bf0d746bb955"},
- {file = "yarl-1.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c56986609b057b4839968ba901944af91b8e92f1725d1a2d77cbac6972b9ed1"},
- {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2c315df3293cd521033533d242d15eab26583360b58f7ee5d9565f15fee1bef4"},
- {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:b7232f8dfbd225d57340e441d8caf8652a6acd06b389ea2d3222b8bc89cbfca6"},
- {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:53338749febd28935d55b41bf0bcc79d634881195a39f6b2f767870b72514caf"},
- {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:066c163aec9d3d073dc9ffe5dd3ad05069bcb03fcaab8d221290ba99f9f69ee3"},
- {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8288d7cd28f8119b07dd49b7230d6b4562f9b61ee9a4ab02221060d21136be80"},
- {file = "yarl-1.9.2-cp39-cp39-win32.whl", hash = "sha256:b124e2a6d223b65ba8768d5706d103280914d61f5cae3afbc50fc3dfcc016623"},
- {file = "yarl-1.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:61016e7d582bc46a5378ffdd02cd0314fb8ba52f40f9cf4d9a5e7dbef88dee18"},
- {file = "yarl-1.9.2.tar.gz", hash = "sha256:04ab9d4b9f587c06d801c2abfe9317b77cdf996c65a90d5e84ecc45010823571"},
+ {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"},
+ {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2"},
+ {file = "yarl-1.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66"},
+ {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234"},
+ {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392"},
+ {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551"},
+ {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455"},
+ {file = "yarl-1.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c"},
+ {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53"},
+ {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385"},
+ {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863"},
+ {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b"},
+ {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541"},
+ {file = "yarl-1.9.4-cp310-cp310-win32.whl", hash = "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d"},
+ {file = "yarl-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b"},
+ {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099"},
+ {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c"},
+ {file = "yarl-1.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0"},
+ {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525"},
+ {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8"},
+ {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9"},
+ {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42"},
+ {file = "yarl-1.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe"},
+ {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce"},
+ {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9"},
+ {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572"},
+ {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958"},
+ {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98"},
+ {file = "yarl-1.9.4-cp311-cp311-win32.whl", hash = "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31"},
+ {file = "yarl-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1"},
+ {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81"},
+ {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142"},
+ {file = "yarl-1.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074"},
+ {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129"},
+ {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2"},
+ {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78"},
+ {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4"},
+ {file = "yarl-1.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0"},
+ {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51"},
+ {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff"},
+ {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7"},
+ {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc"},
+ {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10"},
+ {file = "yarl-1.9.4-cp312-cp312-win32.whl", hash = "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7"},
+ {file = "yarl-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984"},
+ {file = "yarl-1.9.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63b20738b5aac74e239622d2fe30df4fca4942a86e31bf47a81a0e94c14df94f"},
+ {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d7f7de27b8944f1fee2c26a88b4dabc2409d2fea7a9ed3df79b67277644e17"},
+ {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c74018551e31269d56fab81a728f683667e7c28c04e807ba08f8c9e3bba32f14"},
+ {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca06675212f94e7a610e85ca36948bb8fc023e458dd6c63ef71abfd482481aa5"},
+ {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aef935237d60a51a62b86249839b51345f47564208c6ee615ed2a40878dccdd"},
+ {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b134fd795e2322b7684155b7855cc99409d10b2e408056db2b93b51a52accc7"},
+ {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d25039a474c4c72a5ad4b52495056f843a7ff07b632c1b92ea9043a3d9950f6e"},
+ {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f7d6b36dd2e029b6bcb8a13cf19664c7b8e19ab3a58e0fefbb5b8461447ed5ec"},
+ {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:957b4774373cf6f709359e5c8c4a0af9f6d7875db657adb0feaf8d6cb3c3964c"},
+ {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d7eeb6d22331e2fd42fce928a81c697c9ee2d51400bd1a28803965883e13cead"},
+ {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6a962e04b8f91f8c4e5917e518d17958e3bdee71fd1d8b88cdce74dd0ebbf434"},
+ {file = "yarl-1.9.4-cp37-cp37m-win32.whl", hash = "sha256:f3bc6af6e2b8f92eced34ef6a96ffb248e863af20ef4fde9448cc8c9b858b749"},
+ {file = "yarl-1.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4d7a90a92e528aadf4965d685c17dacff3df282db1121136c382dc0b6014d2"},
+ {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ec61d826d80fc293ed46c9dd26995921e3a82146feacd952ef0757236fc137be"},
+ {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8be9e837ea9113676e5754b43b940b50cce76d9ed7d2461df1af39a8ee674d9f"},
+ {file = "yarl-1.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bef596fdaa8f26e3d66af846bbe77057237cb6e8efff8cd7cc8dff9a62278bbf"},
+ {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d47552b6e52c3319fede1b60b3de120fe83bde9b7bddad11a69fb0af7db32f1"},
+ {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84fc30f71689d7fc9168b92788abc977dc8cefa806909565fc2951d02f6b7d57"},
+ {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4aa9741085f635934f3a2583e16fcf62ba835719a8b2b28fb2917bb0537c1dfa"},
+ {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:206a55215e6d05dbc6c98ce598a59e6fbd0c493e2de4ea6cc2f4934d5a18d130"},
+ {file = "yarl-1.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07574b007ee20e5c375a8fe4a0789fad26db905f9813be0f9fef5a68080de559"},
+ {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5a2e2433eb9344a163aced6a5f6c9222c0786e5a9e9cac2c89f0b28433f56e23"},
+ {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6ad6d10ed9b67a382b45f29ea028f92d25bc0bc1daf6c5b801b90b5aa70fb9ec"},
+ {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6fe79f998a4052d79e1c30eeb7d6c1c1056ad33300f682465e1b4e9b5a188b78"},
+ {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a825ec844298c791fd28ed14ed1bffc56a98d15b8c58a20e0e08c1f5f2bea1be"},
+ {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8619d6915b3b0b34420cf9b2bb6d81ef59d984cb0fde7544e9ece32b4b3043c3"},
+ {file = "yarl-1.9.4-cp38-cp38-win32.whl", hash = "sha256:686a0c2f85f83463272ddffd4deb5e591c98aac1897d65e92319f729c320eece"},
+ {file = "yarl-1.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:a00862fb23195b6b8322f7d781b0dc1d82cb3bcac346d1e38689370cc1cc398b"},
+ {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:604f31d97fa493083ea21bd9b92c419012531c4e17ea6da0f65cacdcf5d0bd27"},
+ {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8a854227cf581330ffa2c4824d96e52ee621dd571078a252c25e3a3b3d94a1b1"},
+ {file = "yarl-1.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ba6f52cbc7809cd8d74604cce9c14868306ae4aa0282016b641c661f981a6e91"},
+ {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6327976c7c2f4ee6816eff196e25385ccc02cb81427952414a64811037bbc8b"},
+ {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8397a3817d7dcdd14bb266283cd1d6fc7264a48c186b986f32e86d86d35fbac5"},
+ {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0381b4ce23ff92f8170080c97678040fc5b08da85e9e292292aba67fdac6c34"},
+ {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23d32a2594cb5d565d358a92e151315d1b2268bc10f4610d098f96b147370136"},
+ {file = "yarl-1.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ddb2a5c08a4eaaba605340fdee8fc08e406c56617566d9643ad8bf6852778fc7"},
+ {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:26a1dc6285e03f3cc9e839a2da83bcbf31dcb0d004c72d0730e755b33466c30e"},
+ {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:18580f672e44ce1238b82f7fb87d727c4a131f3a9d33a5e0e82b793362bf18b4"},
+ {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:29e0f83f37610f173eb7e7b5562dd71467993495e568e708d99e9d1944f561ec"},
+ {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:1f23e4fe1e8794f74b6027d7cf19dc25f8b63af1483d91d595d4a07eca1fb26c"},
+ {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db8e58b9d79200c76956cefd14d5c90af54416ff5353c5bfd7cbe58818e26ef0"},
+ {file = "yarl-1.9.4-cp39-cp39-win32.whl", hash = "sha256:c7224cab95645c7ab53791022ae77a4509472613e839dab722a72abe5a684575"},
+ {file = "yarl-1.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:824d6c50492add5da9374875ce72db7a0733b29c2394890aef23d533106e2b15"},
+ {file = "yarl-1.9.4-py3-none-any.whl", hash = "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad"},
+ {file = "yarl-1.9.4.tar.gz", hash = "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf"},
]
[package.dependencies]
@@ -1831,20 +2190,20 @@ multidict = ">=4.0"
[[package]]
name = "zipp"
-version = "3.17.0"
+version = "3.18.1"
description = "Backport of pathlib-compatible object wrapper for zip files"
optional = false
python-versions = ">=3.8"
files = [
- {file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"},
- {file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"},
+ {file = "zipp-3.18.1-py3-none-any.whl", hash = "sha256:206f5a15f2af3dbaee80769fb7dc6f249695e940acca08dfb2a4769fe61e538b"},
+ {file = "zipp-3.18.1.tar.gz", hash = "sha256:2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715"},
]
[package.extras]
-docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"]
-testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"]
+docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
+testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"]
[metadata]
lock-version = "2.0"
-python-versions = "^3.9"
-content-hash = "7dc51b7cbbd935048b1b613952815663128eb15b811439eb79f56feccda18ad2"
+python-versions = "^3.10"
+content-hash = "6ba79f41a3da51a0bb9223846b122a582421de50bd82d785fa3d67cf224f3a67"
diff --git a/ansible_collections/netbox/netbox/pyproject.toml b/ansible_collections/netbox/netbox/pyproject.toml
index 1eb6c8ff8..ccfb556a9 100644
--- a/ansible_collections/netbox/netbox/pyproject.toml
+++ b/ansible_collections/netbox/netbox/pyproject.toml
@@ -1,12 +1,12 @@
[tool.poetry]
name = "netbox_ansible_modules"
-version = "3.17.0"
+version = "3.19.1"
description = "Ansible collection to interact with NetBox's API"
authors = ["Mikhail Yohman <mikhail.yohman@gmail.com>"]
license = "GPLv3"
[tool.poetry.dependencies]
-python = "^3.9"
+python = "^3.10"
ansible-core = "2.15.9"
black = "*"
codecov = "*"
@@ -15,7 +15,7 @@ deepdiff = "*"
cryptography = "*"
jinja2 = "*"
jmespath = "*"
-pynetbox = "^7"
+pynetbox = "^7.3"
pytest = "*"
pytest-mock = "*"
pytest-xdist = "*"
@@ -27,6 +27,8 @@ importlib-metadata = "*"
pylint = "*"
pytz = "*"
packaging = "*"
+importlib-resources = "*"
+antsibull-docs = "^2.11.0"
[tool.poetry.dev-dependencies]
diff --git a/ansible_collections/netbox/netbox/requirements.yml b/ansible_collections/netbox/netbox/requirements.yml
new file mode 100644
index 000000000..46c178c76
--- /dev/null
+++ b/ansible_collections/netbox/netbox/requirements.yml
@@ -0,0 +1,2 @@
+collections:
+ - name: .
diff --git a/ansible_collections/netbox/netbox/test-requirements.txt b/ansible_collections/netbox/netbox/test-requirements.txt
new file mode 100644
index 000000000..a82e06e5e
--- /dev/null
+++ b/ansible_collections/netbox/netbox/test-requirements.txt
@@ -0,0 +1,2 @@
+pytest-mock
+pytest-ansible \ No newline at end of file
diff --git a/ansible_collections/netbox/netbox/tests/config.yml b/ansible_collections/netbox/netbox/tests/config.yml
new file mode 100644
index 000000000..6fe2c43ad
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/config.yml
@@ -0,0 +1,3 @@
+---
+modules:
+ python_requires: ">=3.10"
diff --git a/ansible_collections/netbox/netbox/tests/integration/netbox-deploy.py b/ansible_collections/netbox/netbox/tests/integration/netbox-deploy.py
index de5e080e4..68f28cb2e 100755
--- a/ansible_collections/netbox/netbox/tests/integration/netbox-deploy.py
+++ b/ansible_collections/netbox/netbox/tests/integration/netbox-deploy.py
@@ -59,49 +59,49 @@ create_tags = make_netbox_calls(
# ORDER OF OPERATIONS FOR THE MOST PART
-## Create TENANTS
+# Create TENANTS
tenants = [{"name": "Test Tenant", "slug": "test-tenant"}]
created_tenants = make_netbox_calls(nb.tenancy.tenants, tenants)
-### Test Tenant to be used later on
+# Test Tenant to be used later on
test_tenant = nb.tenancy.tenants.get(slug="test-tenant")
-## Create TENANT GROUPS
+# Create TENANT GROUPS
tenant_groups = [{"name": "Test Tenant Group", "slug": "test-tenant-group"}]
created_tenant_groups = make_netbox_calls(nb.tenancy.tenant_groups, tenant_groups)
-## Create Regions
+# Create Regions
regions = [
{"name": "Test Region", "slug": "test-region"},
{"name": "Parent Region", "slug": "parent-region"},
{"name": "Other Region", "slug": "other-region"},
]
created_regions = make_netbox_calls(nb.dcim.regions, regions)
-### Region variables to be used later on
+# Region variables to be used later on
parent_region = nb.dcim.regions.get(slug="parent-region")
test_region = nb.dcim.regions.get(slug="test-region")
-### Create relationship between regions
+# Create relationship between regions
test_region.parent = parent_region
test_region.save()
-## Create site_groups
+# Create site_groups
site_groups = [
{"name": "Test site_group", "slug": "test-site_group"},
{"name": "Parent site_group", "slug": "parent-site_group"},
{"name": "Other site_group", "slug": "other-site_group"},
]
created_site_groups = make_netbox_calls(nb.dcim.site_groups, site_groups)
-### site_group variables to be used later on
+# site_group variables to be used later on
parent_site_group = nb.dcim.site_groups.get(slug="parent-site_group")
test_site_group = nb.dcim.site_groups.get(slug="test-site_group")
-### Create relationship between site_groups
+# Create relationship between site_groups
test_site_group.parent = parent_site_group
test_site_group.save()
-## Create SITES and register variables
+# Create SITES and register variables
sites = [
{
"name": "Test Site",
@@ -113,20 +113,20 @@ sites = [
{"name": "Test Site2", "slug": "test-site2"},
]
created_sites = make_netbox_calls(nb.dcim.sites, sites)
-### Site variables to be used later on
+# Site variables to be used later on
test_site = nb.dcim.sites.get(slug="test-site")
test_site2 = nb.dcim.sites.get(slug="test-site2")
-## Create Site Groups
+# Create Site Groups
site_groups = [{"name": "Test Site Group", "slug": "test-site-group"}]
created_site_groups = make_netbox_calls(nb.dcim.site_groups, site_groups)
-## Create VRFs
+# Create VRFs
vrfs = [{"name": "Test VRF", "rd": "1:1"}]
created_vrfs = make_netbox_calls(nb.ipam.vrfs, vrfs)
-## Create PREFIXES
+# Create PREFIXES
prefixes = [
{"prefix": "192.168.100.0/24", "site": test_site2.id},
{"prefix": "10.10.0.0/16"},
@@ -134,7 +134,7 @@ prefixes = [
created_prefixes = make_netbox_calls(nb.ipam.prefixes, prefixes)
-## Create VLAN GROUPS
+# Create VLAN GROUPS
vlan_groups = [
{
"name": "Test Vlan Group",
@@ -155,11 +155,11 @@ if nb_version >= version.parse("2.11"):
vg["scope_type"] = "dcim.site"
vg["scope_id"] = vg.pop("site")
created_vlan_groups = make_netbox_calls(nb.ipam.vlan_groups, vlan_groups)
-## VLAN Group variables to be used later on
+# VLAN Group variables to be used later on
test_vlan_group = nb.ipam.vlan_groups.get(slug="test-vlan-group")
-## Create VLANS
+# Create VLANS
vlans = [
{"name": "Wireless", "vid": 100, "site": test_site.id},
{"name": "Data", "vid": 200, "site": test_site.id},
@@ -174,7 +174,7 @@ vlans = [
]
created_vlans = make_netbox_calls(nb.ipam.vlans, vlans)
-## Create FHRP GROUPS
+# Create FHRP GROUPS
fhrp_groups = [
{
"protocol": "other",
@@ -189,24 +189,24 @@ fhrp_groups = [
]
created_fhrp_groups = make_netbox_calls(nb.ipam.fhrp_groups, fhrp_groups)
-## Create IPAM Roles
+# Create IPAM Roles
ipam_roles = [{"name": "Network of care", "slug": "network-of-care"}]
create_ipam_roles = make_netbox_calls(nb.ipam.roles, ipam_roles)
-## Create Manufacturers
+# Create Manufacturers
manufacturers = [
{"name": "Cisco", "slug": "cisco"},
{"name": "Arista", "slug": "arista"},
{"name": "Test Manufactuer", "slug": "test-manufacturer"},
]
created_manufacturers = make_netbox_calls(nb.dcim.manufacturers, manufacturers)
-### Manufacturer variables to be used later on
+# Manufacturer variables to be used later on
cisco_manu = nb.dcim.manufacturers.get(slug="cisco")
arista_manu = nb.dcim.manufacturers.get(slug="arista")
-## Create Device Types
+# Create Device Types
device_types = [
{"model": "Cisco Test", "slug": "cisco-test", "manufacturer": cisco_manu.id},
{"model": "Arista Test", "slug": "arista-test", "manufacturer": arista_manu.id},
@@ -232,13 +232,13 @@ device_types = [
]
created_device_types = make_netbox_calls(nb.dcim.device_types, device_types)
-### Device type variables to be used later on
+# Device type variables to be used later on
cisco_test = nb.dcim.device_types.get(slug="cisco-test")
arista_test = nb.dcim.device_types.get(slug="arista-test")
nexus_parent = nb.dcim.device_types.get(slug="nexus-parent")
nexus_child = nb.dcim.device_types.get(slug="nexus-child")
-## Create Device Roles
+# Create Device Roles
device_roles = [
{"name": "Core Switch", "slug": "core-switch", "color": "aa1409", "vm_role": False},
{
@@ -255,11 +255,11 @@ device_roles = [
},
]
created_device_roles = make_netbox_calls(nb.dcim.device_roles, device_roles)
-### Device role variables to be used later on
+# Device role variables to be used later on
core_switch = nb.dcim.device_roles.get(slug="core-switch")
-## Create Rack Groups
+# Create Rack Groups
rack_groups = [
{"name": "Test Rack Group", "slug": "test-rack-group", "site": test_site.id},
{"name": "Parent Rack Group", "slug": "parent-rack-group", "site": test_site.id},
@@ -269,15 +269,15 @@ if nb_version >= version.parse("2.11"):
else:
created_rack_groups = make_netbox_calls(nb.dcim.rack_groups, rack_groups)
-### Create Rack Group Parent relationship
+# Create Rack Group Parent relationship
created_rack_groups[0].parent = created_rack_groups[1]
created_rack_groups[0].save()
-## Create Rack Roles
+# Create Rack Roles
rack_roles = [{"name": "Test Rack Role", "slug": "test-rack-role", "color": "4287f5"}]
created_rack_roles = make_netbox_calls(nb.dcim.rack_roles, rack_roles)
-## Create Racks
+# Create Racks
racks = [
{
"name": "Test Rack Site 2",
@@ -287,7 +287,7 @@ racks = [
{"name": "Test Rack", "site": test_site.id, "group": created_rack_groups[0].id},
]
-## Use location instead of group for 2.11+
+# Use location instead of group for 2.11+
if nb_version >= version.parse("2.11"):
racks[1]["location"] = created_rack_groups[0].id
del racks[1]["group"]
@@ -297,7 +297,7 @@ test_rack = nb.dcim.racks.get(name="Test Rack") # racks don't have slugs
test_rack_site2 = nb.dcim.racks.get(name="Test Rack Site 2")
-## Create Devices
+# Create Devices
devices = [
{
"name": "test100",
@@ -338,7 +338,7 @@ devices = [
},
]
-## Add some locations for 2.11+
+# Add some locations for 2.11+
if nb_version >= version.parse("2.11"):
devices[0]["location"] = created_rack_groups[0].id
devices[1]["location"] = created_rack_groups[0].id
@@ -350,7 +350,7 @@ if nb_version >= version.parse("3.6"):
device["role"] = device.pop("device_role")
created_devices = make_netbox_calls(nb.dcim.devices, devices)
-### Device variables to be used later on
+# Device variables to be used later on
test100 = nb.dcim.devices.get(name="test100")
# Create VC, assign member, create initial interface
@@ -365,14 +365,14 @@ nexus_interfaces = [
]
created_nexus_interfaces = make_netbox_calls(nb.dcim.interfaces, nexus_interfaces)
-## Create Interfaces
+# Create Interfaces
dev_interfaces = [
{"name": "GigabitEthernet1", "device": test100.id, "type": "1000base-t"},
{"name": "GigabitEthernet2", "device": test100.id, "type": "1000base-t"},
]
created_interfaces = make_netbox_calls(nb.dcim.interfaces, dev_interfaces)
-## Wireless Interfaces
+# Wireless Interfaces
if nb_version >= version.parse("3.1"):
wlink_interfaces = [
{"name": "wlink1", "device": test100.id, "type": "ieee802.11a"},
@@ -380,12 +380,12 @@ if nb_version >= version.parse("3.1"):
]
wireless_interfaces = make_netbox_calls(nb.dcim.interfaces, wlink_interfaces)
-## Interface variables to be used later on
+# Interface variables to be used later on
test100_gi1 = nb.dcim.interfaces.get(name="GigabitEthernet1", device_id=1)
test100_gi2 = nb.dcim.interfaces.get(name="GigabitEthernet2", device_id=1)
-## Create IP Addresses
+# Create IP Addresses
ip_addresses = [
{
"address": "172.16.180.1/24",
@@ -415,25 +415,25 @@ created_ip_addresses = make_netbox_calls(nb.ipam.ip_addresses, ip_addresses)
# Assign Primary IP
nexus.update({"primary_ip4": 4})
-## Create RIRs
+# Create RIRs
rirs = [{"name": "Example RIR", "slug": "example-rir"}]
created_rirs = make_netbox_calls(nb.ipam.rirs, rirs)
-## Create Cluster Group
+# Create Cluster Group
cluster_groups = [{"name": "Test Cluster Group", "slug": "test-cluster-group"}]
created_cluster_groups = make_netbox_calls(
nb.virtualization.cluster_groups, cluster_groups
)
test_cluster_group = nb.virtualization.cluster_groups.get(slug="test-cluster-group")
-## Create Cluster Type
+# Create Cluster Type
cluster_types = [{"name": "Test Cluster Type", "slug": "test-cluster-type"}]
created_cluster_types = make_netbox_calls(
nb.virtualization.cluster_types, cluster_types
)
test_cluster_type = nb.virtualization.cluster_types.get(slug="test-cluster-type")
-## Create Cluster
+# Create Cluster
clusters = [
{
"name": "Test Cluster",
@@ -450,7 +450,7 @@ created_clusters = make_netbox_calls(nb.virtualization.clusters, clusters)
test_cluster = nb.virtualization.clusters.get(name="Test Cluster")
test_cluster2 = nb.virtualization.clusters.get(name="Test Cluster 2")
-## Create Virtual Machine
+# Create Virtual Machine
virtual_machines = [
{"name": "test100-vm", "cluster": test_cluster.id},
{"name": "test101-vm", "cluster": test_cluster.id},
@@ -475,7 +475,7 @@ created_virtual_disks = make_netbox_calls(
nb.virtualization.virtual_disks, virtual_disks
)
-## Create Virtual Machine Interfaces
+# Create Virtual Machine Interfaces
virtual_machines_intfs = [
# Create test100-vm intfs
{"name": "Eth0", "virtual_machine": test100_vm.id},
@@ -498,7 +498,7 @@ created_virtual_machines_intfs = make_netbox_calls(
)
-## Create Services
+# Create Services
services = [
{"device": test100.id, "name": "ssh", "ports": [22], "protocol": "tcp"},
{
@@ -519,17 +519,17 @@ services = [
created_services = make_netbox_calls(nb.ipam.services, services)
-## Create Circuit Provider
+# Create Circuit Provider
providers = [{"name": "Test Provider", "slug": "test-provider"}]
created_providers = make_netbox_calls(nb.circuits.providers, providers)
test_provider = nb.circuits.providers.get(slug="test-provider")
-## Create Circuit Type
+# Create Circuit Type
circuit_types = [{"name": "Test Circuit Type", "slug": "test-circuit-type"}]
created_circuit_types = make_netbox_calls(nb.circuits.circuit_types, circuit_types)
test_circuit_type = nb.circuits.circuit_types.get(slug="test-circuit-type")
-## Create Circuit
+# Create Circuit
circuits = [
{"cid": "Test Circuit", "provider": test_provider.id, "type": test_circuit_type.id},
{
@@ -541,7 +541,7 @@ circuits = [
created_circuits = make_netbox_calls(nb.circuits.circuits, circuits)
test_circuit_two = nb.circuits.circuits.get(cid="Test Circuit Two")
-## Create Circuit Termination
+# Create Circuit Termination
circuit_terms = [
{
"circuit": test_circuit_two.id,
@@ -561,7 +561,7 @@ route_targets = [
]
created_route_targets = make_netbox_calls(nb.ipam.route_targets, route_targets)
-## Create L2VPNs
+# Create L2VPNs
l2vpns = [
{
"identifier": 111111,
@@ -576,9 +576,13 @@ l2vpns = [
"type": "vxlan",
},
]
-created_l2vpns = make_netbox_calls(nb.ipam.l2vpns, l2vpns)
+if nb_version >= version.parse("3.7"):
+ created_l2vpns = make_netbox_calls(nb.vpn.l2vpns, l2vpns)
+else:
+ created_l2vpns = make_netbox_calls(nb.ipam.l2vpns, l2vpns)
if ERRORS:
sys.exit(
- "Errors have occurred when creating objects, and should have been printed out. Check previous output."
+ "Errors have occurred when creating objects, and should have been printed out."
+ " Check previous output."
)
diff --git a/ansible_collections/netbox/netbox/tests/integration/render_config.sh b/ansible_collections/netbox/netbox/tests/integration/render_config.sh
index 3d07c6638..16f7ed250 100755
--- a/ansible_collections/netbox/netbox/tests/integration/render_config.sh
+++ b/ansible_collections/netbox/netbox/tests/integration/render_config.sh
@@ -15,6 +15,7 @@ set -o pipefail # don't hide errors within pipes
function main()
{
readonly template="$1"
+ # shellcheck disable=SC2155
readonly content="$(cat "${template}")"
eval "echo \"$content\""
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.5/compare_inventory_json.py b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.5/compare_inventory_json.py
index 40a032ca0..ccdf8bd52 100755
--- a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.5/compare_inventory_json.py
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.5/compare_inventory_json.py
@@ -62,7 +62,7 @@ def sort_hostvar_arrays(obj):
if not hostvars:
return
- for _, host in hostvars.items():
+ for _, host in hostvars.items(): # pylint: disable=disallowed-name
if interfaces := host.get("interfaces"):
host["interfaces"] = sorted(interfaces, key=itemgetter("id"))
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.5/files/test-inventory-bearer-token.yml b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.5/files/test-inventory-bearer-token.yml
index 6af9a1339..a19b60db2 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.5/files/test-inventory-bearer-token.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.5/files/test-inventory-bearer-token.yml
@@ -1,15 +1,15 @@
---
plugin: netbox.netbox.nb_inventory
-api_endpoint: "http://localhost:32768"
+api_endpoint: http://localhost:32768
token:
type: Token
value: "0123456789abcdef0123456789abcdef01234567"
-validate_certs: False
+validate_certs: false
-config_context: True
-plurals: False
-interfaces: True
-services: True
+config_context: true
+plurals: false
+interfaces: true
+services: true
group_by:
- site
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.5/files/test-inventory-jinja2-filter.yml b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.5/files/test-inventory-jinja2-filter.yml
index 1dbc1993f..49f306a78 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.5/files/test-inventory-jinja2-filter.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.5/files/test-inventory-jinja2-filter.yml
@@ -1,13 +1,13 @@
---
plugin: netbox.netbox.nb_inventory
-api_endpoint: "http://localhost:32768"
+api_endpoint: http://localhost:32768
token: "0123456789abcdef0123456789abcdef01234567"
-validate_certs: False
+validate_certs: false
-config_context: True
-plurals: False
-interfaces: True
-services: True
+config_context: true
+plurals: false
+interfaces: true
+services: true
group_by:
- site
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.5/files/test-inventory-jinja2.yml b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.5/files/test-inventory-jinja2.yml
index 44ad6f16f..67f9c49be 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.5/files/test-inventory-jinja2.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.5/files/test-inventory-jinja2.yml
@@ -1,24 +1,24 @@
---
plugin: netbox.netbox.nb_inventory
-api_endpoint: "http://localhost:32768"
+api_endpoint: http://localhost:32768
token: "0123456789abcdef0123456789abcdef01234567"
-validate_certs: False
+validate_certs: false
# Cache is not for performance of tests, but to test the caching option works
# Also set on test-inventory-plurals.yml so that we actually hit the cache on one of these runs
-cache: True
+cache: true
cache_timeout: 3600
cache_plugin: jsonfile
cache_connection: /tmp/inventory_netbox
-config_context: False
-plurals: False
-interfaces: False
-services: False
-group_names_raw: True
-virtual_chassis_name: True
-dns_name: True
-ansible_host_dns_name: True
+config_context: false
+plurals: false
+interfaces: false
+services: false
+group_names_raw: true
+virtual_chassis_name: true
+dns_name: true
+ansible_host_dns_name: true
group_by:
- site
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.5/files/test-inventory-legacy.yml b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.5/files/test-inventory-legacy.yml
index d10721865..259c3934d 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.5/files/test-inventory-legacy.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.5/files/test-inventory-legacy.yml
@@ -5,6 +5,6 @@
# Checks that substantial work on the inventory does not diverge from what existing users are using by default.
plugin: netbox.netbox.nb_inventory
-api_endpoint: "http://localhost:32768"
+api_endpoint: http://localhost:32768
token: "0123456789abcdef0123456789abcdef01234567"
validate_certs: false
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.5/files/test-inventory-noracks.yml b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.5/files/test-inventory-noracks.yml
index 28f08f19d..f21f3ca62 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.5/files/test-inventory-noracks.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.5/files/test-inventory-noracks.yml
@@ -1,14 +1,14 @@
---
plugin: netbox.netbox.nb_inventory
-api_endpoint: "http://localhost:32768"
+api_endpoint: http://localhost:32768
token: "0123456789abcdef0123456789abcdef01234567"
validate_certs: false
-config_context: True
-plurals: True
-interfaces: True
-services: True
-racks: False
+config_context: true
+plurals: true
+interfaces: true
+services: true
+racks: false
group_by:
- sites
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.5/files/test-inventory-options-flatten.yml b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.5/files/test-inventory-options-flatten.yml
index 989a0fdc8..73d6beec9 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.5/files/test-inventory-options-flatten.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.5/files/test-inventory-options-flatten.yml
@@ -1,25 +1,25 @@
---
plugin: netbox.netbox.nb_inventory
-api_endpoint: "http://localhost:32768"
+api_endpoint: http://localhost:32768
token: "0123456789abcdef0123456789abcdef01234567"
-validate_certs: False
+validate_certs: false
# Use cache on this test to make sure interfaces is tested via the cache
-cache: True
+cache: true
cache_timeout: 3600
cache_plugin: jsonfile
cache_connection: /tmp/inventory_netbox
-config_context: True
-flatten_config_context: True
-flatten_custom_fields: True
-flatten_local_context_data: True
-plurals: False
-interfaces: True
-services: True
-fetch_all: False
+config_context: true
+flatten_config_context: true
+flatten_custom_fields: true
+flatten_local_context_data: true
+plurals: false
+interfaces: true
+services: true
+fetch_all: false
max_uri_length: 0
-group_names_raw: True
+group_names_raw: true
group_by:
- site
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.5/files/test-inventory-options.yml b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.5/files/test-inventory-options.yml
index f278a3aa8..f6597c661 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.5/files/test-inventory-options.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.5/files/test-inventory-options.yml
@@ -1,24 +1,24 @@
---
plugin: netbox.netbox.nb_inventory
-api_endpoint: "http://localhost:32768"
+api_endpoint: http://localhost:32768
token: "0123456789abcdef0123456789abcdef01234567"
-validate_certs: False
+validate_certs: false
# Cache is not for performance of tests, but to test the caching option works
# Also set on test-inventory-plurals.yml so that we actually hit the cache on one of these runs
-cache: True
+cache: true
cache_timeout: 3600
cache_plugin: jsonfile
cache_connection: /tmp/inventory_netbox
-config_context: False
-plurals: False
-interfaces: False
-services: False
-group_names_raw: True
-virtual_chassis_name: True
-dns_name: True
-ansible_host_dns_name: True
+config_context: false
+plurals: false
+interfaces: false
+services: false
+group_names_raw: true
+virtual_chassis_name: true
+dns_name: true
+ansible_host_dns_name: true
group_by:
- site
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.5/files/test-inventory-plurals-flatten.yml b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.5/files/test-inventory-plurals-flatten.yml
index 146b6d71c..33213ff5c 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.5/files/test-inventory-plurals-flatten.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.5/files/test-inventory-plurals-flatten.yml
@@ -1,17 +1,17 @@
---
plugin: netbox.netbox.nb_inventory
-api_endpoint: "http://localhost:32768"
+api_endpoint: http://localhost:32768
token: "0123456789abcdef0123456789abcdef01234567"
-validate_certs: False
+validate_certs: false
-config_context: True
-flatten_config_context: True
-flatten_custom_fields: True
-plurals: True
-interfaces: False
-services: False
-fetch_all: True
-group_names_raw: True
+config_context: true
+flatten_config_context: true
+flatten_custom_fields: true
+plurals: true
+interfaces: false
+services: false
+fetch_all: true
+group_names_raw: true
group_by:
- sites
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.5/files/test-inventory-plurals.yml b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.5/files/test-inventory-plurals.yml
index 244231062..1e53fd0bf 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.5/files/test-inventory-plurals.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.5/files/test-inventory-plurals.yml
@@ -1,22 +1,22 @@
---
plugin: netbox.netbox.nb_inventory
-api_endpoint: "http://localhost:32768"
+api_endpoint: http://localhost:32768
token: "0123456789abcdef0123456789abcdef01234567"
validate_certs: false
-cache: True
+cache: true
cache_timeout: 3600
cache_plugin: jsonfile
cache_connection: /tmp/inventory_netbox
-config_context: True
-plurals: True
-interfaces: True
-services: True
+config_context: true
+plurals: true
+interfaces: true
+services: true
# Enough to fit only 2 devices, so tests chunking logic
max_uri_length: 80
-fetch_all: False
+fetch_all: false
group_by:
- sites
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.5/files/test-inventory.yml b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.5/files/test-inventory.yml
index c88c96593..3a25c2a73 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.5/files/test-inventory.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.5/files/test-inventory.yml
@@ -1,13 +1,13 @@
---
plugin: netbox.netbox.nb_inventory
-api_endpoint: "http://localhost:32768"
+api_endpoint: http://localhost:32768
token: "0123456789abcdef0123456789abcdef01234567"
-validate_certs: False
+validate_certs: false
-config_context: True
-plurals: False
-interfaces: True
-services: True
+config_context: true
+plurals: false
+interfaces: true
+services: true
group_by:
- site
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.6/compare_inventory_json.py b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.6/compare_inventory_json.py
index 40a032ca0..ccdf8bd52 100755
--- a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.6/compare_inventory_json.py
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.6/compare_inventory_json.py
@@ -62,7 +62,7 @@ def sort_hostvar_arrays(obj):
if not hostvars:
return
- for _, host in hostvars.items():
+ for _, host in hostvars.items(): # pylint: disable=disallowed-name
if interfaces := host.get("interfaces"):
host["interfaces"] = sorted(interfaces, key=itemgetter("id"))
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.6/files/test-inventory-bearer-token.yml b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.6/files/test-inventory-bearer-token.yml
index 6af9a1339..a19b60db2 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.6/files/test-inventory-bearer-token.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.6/files/test-inventory-bearer-token.yml
@@ -1,15 +1,15 @@
---
plugin: netbox.netbox.nb_inventory
-api_endpoint: "http://localhost:32768"
+api_endpoint: http://localhost:32768
token:
type: Token
value: "0123456789abcdef0123456789abcdef01234567"
-validate_certs: False
+validate_certs: false
-config_context: True
-plurals: False
-interfaces: True
-services: True
+config_context: true
+plurals: false
+interfaces: true
+services: true
group_by:
- site
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.6/files/test-inventory-jinja2-filter.yml b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.6/files/test-inventory-jinja2-filter.yml
index 1dbc1993f..49f306a78 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.6/files/test-inventory-jinja2-filter.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.6/files/test-inventory-jinja2-filter.yml
@@ -1,13 +1,13 @@
---
plugin: netbox.netbox.nb_inventory
-api_endpoint: "http://localhost:32768"
+api_endpoint: http://localhost:32768
token: "0123456789abcdef0123456789abcdef01234567"
-validate_certs: False
+validate_certs: false
-config_context: True
-plurals: False
-interfaces: True
-services: True
+config_context: true
+plurals: false
+interfaces: true
+services: true
group_by:
- site
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.6/files/test-inventory-jinja2.yml b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.6/files/test-inventory-jinja2.yml
index 44ad6f16f..67f9c49be 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.6/files/test-inventory-jinja2.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.6/files/test-inventory-jinja2.yml
@@ -1,24 +1,24 @@
---
plugin: netbox.netbox.nb_inventory
-api_endpoint: "http://localhost:32768"
+api_endpoint: http://localhost:32768
token: "0123456789abcdef0123456789abcdef01234567"
-validate_certs: False
+validate_certs: false
# Cache is not for performance of tests, but to test the caching option works
# Also set on test-inventory-plurals.yml so that we actually hit the cache on one of these runs
-cache: True
+cache: true
cache_timeout: 3600
cache_plugin: jsonfile
cache_connection: /tmp/inventory_netbox
-config_context: False
-plurals: False
-interfaces: False
-services: False
-group_names_raw: True
-virtual_chassis_name: True
-dns_name: True
-ansible_host_dns_name: True
+config_context: false
+plurals: false
+interfaces: false
+services: false
+group_names_raw: true
+virtual_chassis_name: true
+dns_name: true
+ansible_host_dns_name: true
group_by:
- site
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.6/files/test-inventory-legacy.yml b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.6/files/test-inventory-legacy.yml
index d10721865..259c3934d 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.6/files/test-inventory-legacy.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.6/files/test-inventory-legacy.yml
@@ -5,6 +5,6 @@
# Checks that substantial work on the inventory does not diverge from what existing users are using by default.
plugin: netbox.netbox.nb_inventory
-api_endpoint: "http://localhost:32768"
+api_endpoint: http://localhost:32768
token: "0123456789abcdef0123456789abcdef01234567"
validate_certs: false
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.6/files/test-inventory-noracks.yml b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.6/files/test-inventory-noracks.yml
index 28f08f19d..f21f3ca62 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.6/files/test-inventory-noracks.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.6/files/test-inventory-noracks.yml
@@ -1,14 +1,14 @@
---
plugin: netbox.netbox.nb_inventory
-api_endpoint: "http://localhost:32768"
+api_endpoint: http://localhost:32768
token: "0123456789abcdef0123456789abcdef01234567"
validate_certs: false
-config_context: True
-plurals: True
-interfaces: True
-services: True
-racks: False
+config_context: true
+plurals: true
+interfaces: true
+services: true
+racks: false
group_by:
- sites
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.6/files/test-inventory-options-flatten.yml b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.6/files/test-inventory-options-flatten.yml
index 989a0fdc8..73d6beec9 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.6/files/test-inventory-options-flatten.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.6/files/test-inventory-options-flatten.yml
@@ -1,25 +1,25 @@
---
plugin: netbox.netbox.nb_inventory
-api_endpoint: "http://localhost:32768"
+api_endpoint: http://localhost:32768
token: "0123456789abcdef0123456789abcdef01234567"
-validate_certs: False
+validate_certs: false
# Use cache on this test to make sure interfaces is tested via the cache
-cache: True
+cache: true
cache_timeout: 3600
cache_plugin: jsonfile
cache_connection: /tmp/inventory_netbox
-config_context: True
-flatten_config_context: True
-flatten_custom_fields: True
-flatten_local_context_data: True
-plurals: False
-interfaces: True
-services: True
-fetch_all: False
+config_context: true
+flatten_config_context: true
+flatten_custom_fields: true
+flatten_local_context_data: true
+plurals: false
+interfaces: true
+services: true
+fetch_all: false
max_uri_length: 0
-group_names_raw: True
+group_names_raw: true
group_by:
- site
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.6/files/test-inventory-options.yml b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.6/files/test-inventory-options.yml
index f278a3aa8..f6597c661 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.6/files/test-inventory-options.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.6/files/test-inventory-options.yml
@@ -1,24 +1,24 @@
---
plugin: netbox.netbox.nb_inventory
-api_endpoint: "http://localhost:32768"
+api_endpoint: http://localhost:32768
token: "0123456789abcdef0123456789abcdef01234567"
-validate_certs: False
+validate_certs: false
# Cache is not for performance of tests, but to test the caching option works
# Also set on test-inventory-plurals.yml so that we actually hit the cache on one of these runs
-cache: True
+cache: true
cache_timeout: 3600
cache_plugin: jsonfile
cache_connection: /tmp/inventory_netbox
-config_context: False
-plurals: False
-interfaces: False
-services: False
-group_names_raw: True
-virtual_chassis_name: True
-dns_name: True
-ansible_host_dns_name: True
+config_context: false
+plurals: false
+interfaces: false
+services: false
+group_names_raw: true
+virtual_chassis_name: true
+dns_name: true
+ansible_host_dns_name: true
group_by:
- site
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.6/files/test-inventory-plurals-flatten.yml b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.6/files/test-inventory-plurals-flatten.yml
index 146b6d71c..33213ff5c 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.6/files/test-inventory-plurals-flatten.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.6/files/test-inventory-plurals-flatten.yml
@@ -1,17 +1,17 @@
---
plugin: netbox.netbox.nb_inventory
-api_endpoint: "http://localhost:32768"
+api_endpoint: http://localhost:32768
token: "0123456789abcdef0123456789abcdef01234567"
-validate_certs: False
+validate_certs: false
-config_context: True
-flatten_config_context: True
-flatten_custom_fields: True
-plurals: True
-interfaces: False
-services: False
-fetch_all: True
-group_names_raw: True
+config_context: true
+flatten_config_context: true
+flatten_custom_fields: true
+plurals: true
+interfaces: false
+services: false
+fetch_all: true
+group_names_raw: true
group_by:
- sites
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.6/files/test-inventory-plurals.yml b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.6/files/test-inventory-plurals.yml
index 244231062..1e53fd0bf 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.6/files/test-inventory-plurals.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.6/files/test-inventory-plurals.yml
@@ -1,22 +1,22 @@
---
plugin: netbox.netbox.nb_inventory
-api_endpoint: "http://localhost:32768"
+api_endpoint: http://localhost:32768
token: "0123456789abcdef0123456789abcdef01234567"
validate_certs: false
-cache: True
+cache: true
cache_timeout: 3600
cache_plugin: jsonfile
cache_connection: /tmp/inventory_netbox
-config_context: True
-plurals: True
-interfaces: True
-services: True
+config_context: true
+plurals: true
+interfaces: true
+services: true
# Enough to fit only 2 devices, so tests chunking logic
max_uri_length: 80
-fetch_all: False
+fetch_all: false
group_by:
- sites
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.6/files/test-inventory.yml b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.6/files/test-inventory.yml
index c88c96593..3a25c2a73 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.6/files/test-inventory.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.6/files/test-inventory.yml
@@ -1,13 +1,13 @@
---
plugin: netbox.netbox.nb_inventory
-api_endpoint: "http://localhost:32768"
+api_endpoint: http://localhost:32768
token: "0123456789abcdef0123456789abcdef01234567"
-validate_certs: False
+validate_certs: false
-config_context: True
-plurals: False
-interfaces: True
-services: True
+config_context: true
+plurals: false
+interfaces: true
+services: true
group_by:
- site
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.7/compare_inventory_json.py b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.7/compare_inventory_json.py
index 40a032ca0..ccdf8bd52 100755
--- a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.7/compare_inventory_json.py
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.7/compare_inventory_json.py
@@ -62,7 +62,7 @@ def sort_hostvar_arrays(obj):
if not hostvars:
return
- for _, host in hostvars.items():
+ for _, host in hostvars.items(): # pylint: disable=disallowed-name
if interfaces := host.get("interfaces"):
host["interfaces"] = sorted(interfaces, key=itemgetter("id"))
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.7/files/test-inventory-bearer-token.yml b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.7/files/test-inventory-bearer-token.yml
index 6af9a1339..a19b60db2 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.7/files/test-inventory-bearer-token.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.7/files/test-inventory-bearer-token.yml
@@ -1,15 +1,15 @@
---
plugin: netbox.netbox.nb_inventory
-api_endpoint: "http://localhost:32768"
+api_endpoint: http://localhost:32768
token:
type: Token
value: "0123456789abcdef0123456789abcdef01234567"
-validate_certs: False
+validate_certs: false
-config_context: True
-plurals: False
-interfaces: True
-services: True
+config_context: true
+plurals: false
+interfaces: true
+services: true
group_by:
- site
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.7/files/test-inventory-jinja2-filter.yml b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.7/files/test-inventory-jinja2-filter.yml
index 1dbc1993f..49f306a78 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.7/files/test-inventory-jinja2-filter.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.7/files/test-inventory-jinja2-filter.yml
@@ -1,13 +1,13 @@
---
plugin: netbox.netbox.nb_inventory
-api_endpoint: "http://localhost:32768"
+api_endpoint: http://localhost:32768
token: "0123456789abcdef0123456789abcdef01234567"
-validate_certs: False
+validate_certs: false
-config_context: True
-plurals: False
-interfaces: True
-services: True
+config_context: true
+plurals: false
+interfaces: true
+services: true
group_by:
- site
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.7/files/test-inventory-jinja2.yml b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.7/files/test-inventory-jinja2.yml
index 44ad6f16f..67f9c49be 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.7/files/test-inventory-jinja2.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.7/files/test-inventory-jinja2.yml
@@ -1,24 +1,24 @@
---
plugin: netbox.netbox.nb_inventory
-api_endpoint: "http://localhost:32768"
+api_endpoint: http://localhost:32768
token: "0123456789abcdef0123456789abcdef01234567"
-validate_certs: False
+validate_certs: false
# Cache is not for performance of tests, but to test the caching option works
# Also set on test-inventory-plurals.yml so that we actually hit the cache on one of these runs
-cache: True
+cache: true
cache_timeout: 3600
cache_plugin: jsonfile
cache_connection: /tmp/inventory_netbox
-config_context: False
-plurals: False
-interfaces: False
-services: False
-group_names_raw: True
-virtual_chassis_name: True
-dns_name: True
-ansible_host_dns_name: True
+config_context: false
+plurals: false
+interfaces: false
+services: false
+group_names_raw: true
+virtual_chassis_name: true
+dns_name: true
+ansible_host_dns_name: true
group_by:
- site
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.7/files/test-inventory-legacy.yml b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.7/files/test-inventory-legacy.yml
index d10721865..259c3934d 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.7/files/test-inventory-legacy.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.7/files/test-inventory-legacy.yml
@@ -5,6 +5,6 @@
# Checks that substantial work on the inventory does not diverge from what existing users are using by default.
plugin: netbox.netbox.nb_inventory
-api_endpoint: "http://localhost:32768"
+api_endpoint: http://localhost:32768
token: "0123456789abcdef0123456789abcdef01234567"
validate_certs: false
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.7/files/test-inventory-noracks.yml b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.7/files/test-inventory-noracks.yml
index 28f08f19d..f21f3ca62 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.7/files/test-inventory-noracks.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.7/files/test-inventory-noracks.yml
@@ -1,14 +1,14 @@
---
plugin: netbox.netbox.nb_inventory
-api_endpoint: "http://localhost:32768"
+api_endpoint: http://localhost:32768
token: "0123456789abcdef0123456789abcdef01234567"
validate_certs: false
-config_context: True
-plurals: True
-interfaces: True
-services: True
-racks: False
+config_context: true
+plurals: true
+interfaces: true
+services: true
+racks: false
group_by:
- sites
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.7/files/test-inventory-options-flatten.yml b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.7/files/test-inventory-options-flatten.yml
index 989a0fdc8..73d6beec9 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.7/files/test-inventory-options-flatten.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.7/files/test-inventory-options-flatten.yml
@@ -1,25 +1,25 @@
---
plugin: netbox.netbox.nb_inventory
-api_endpoint: "http://localhost:32768"
+api_endpoint: http://localhost:32768
token: "0123456789abcdef0123456789abcdef01234567"
-validate_certs: False
+validate_certs: false
# Use cache on this test to make sure interfaces is tested via the cache
-cache: True
+cache: true
cache_timeout: 3600
cache_plugin: jsonfile
cache_connection: /tmp/inventory_netbox
-config_context: True
-flatten_config_context: True
-flatten_custom_fields: True
-flatten_local_context_data: True
-plurals: False
-interfaces: True
-services: True
-fetch_all: False
+config_context: true
+flatten_config_context: true
+flatten_custom_fields: true
+flatten_local_context_data: true
+plurals: false
+interfaces: true
+services: true
+fetch_all: false
max_uri_length: 0
-group_names_raw: True
+group_names_raw: true
group_by:
- site
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.7/files/test-inventory-options.yml b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.7/files/test-inventory-options.yml
index f278a3aa8..f6597c661 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.7/files/test-inventory-options.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.7/files/test-inventory-options.yml
@@ -1,24 +1,24 @@
---
plugin: netbox.netbox.nb_inventory
-api_endpoint: "http://localhost:32768"
+api_endpoint: http://localhost:32768
token: "0123456789abcdef0123456789abcdef01234567"
-validate_certs: False
+validate_certs: false
# Cache is not for performance of tests, but to test the caching option works
# Also set on test-inventory-plurals.yml so that we actually hit the cache on one of these runs
-cache: True
+cache: true
cache_timeout: 3600
cache_plugin: jsonfile
cache_connection: /tmp/inventory_netbox
-config_context: False
-plurals: False
-interfaces: False
-services: False
-group_names_raw: True
-virtual_chassis_name: True
-dns_name: True
-ansible_host_dns_name: True
+config_context: false
+plurals: false
+interfaces: false
+services: false
+group_names_raw: true
+virtual_chassis_name: true
+dns_name: true
+ansible_host_dns_name: true
group_by:
- site
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.7/files/test-inventory-plurals-flatten.yml b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.7/files/test-inventory-plurals-flatten.yml
index 146b6d71c..33213ff5c 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.7/files/test-inventory-plurals-flatten.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.7/files/test-inventory-plurals-flatten.yml
@@ -1,17 +1,17 @@
---
plugin: netbox.netbox.nb_inventory
-api_endpoint: "http://localhost:32768"
+api_endpoint: http://localhost:32768
token: "0123456789abcdef0123456789abcdef01234567"
-validate_certs: False
+validate_certs: false
-config_context: True
-flatten_config_context: True
-flatten_custom_fields: True
-plurals: True
-interfaces: False
-services: False
-fetch_all: True
-group_names_raw: True
+config_context: true
+flatten_config_context: true
+flatten_custom_fields: true
+plurals: true
+interfaces: false
+services: false
+fetch_all: true
+group_names_raw: true
group_by:
- sites
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.7/files/test-inventory-plurals.yml b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.7/files/test-inventory-plurals.yml
index 244231062..1e53fd0bf 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.7/files/test-inventory-plurals.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.7/files/test-inventory-plurals.yml
@@ -1,22 +1,22 @@
---
plugin: netbox.netbox.nb_inventory
-api_endpoint: "http://localhost:32768"
+api_endpoint: http://localhost:32768
token: "0123456789abcdef0123456789abcdef01234567"
validate_certs: false
-cache: True
+cache: true
cache_timeout: 3600
cache_plugin: jsonfile
cache_connection: /tmp/inventory_netbox
-config_context: True
-plurals: True
-interfaces: True
-services: True
+config_context: true
+plurals: true
+interfaces: true
+services: true
# Enough to fit only 2 devices, so tests chunking logic
max_uri_length: 80
-fetch_all: False
+fetch_all: false
group_by:
- sites
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.7/files/test-inventory.yml b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.7/files/test-inventory.yml
index 8ddaac090..a24cd7bbb 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.7/files/test-inventory.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v3.7/files/test-inventory.yml
@@ -1,14 +1,14 @@
---
plugin: netbox.netbox.nb_inventory
-api_endpoint: "http://localhost:32768"
+api_endpoint: http://localhost:32768
token: "0123456789abcdef0123456789abcdef01234567"
-validate_certs: False
+validate_certs: false
-config_context: True
-plurals: False
-interfaces: True
-virtual_disks: True
-services: True
+config_context: true
+plurals: false
+interfaces: true
+virtual_disks: true
+services: true
group_by:
- site
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/.gitignore b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/.gitignore
new file mode 100644
index 000000000..df76504d5
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/.gitignore
@@ -0,0 +1 @@
+runme_config \ No newline at end of file
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/aliases b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/aliases
new file mode 100644
index 000000000..a3a2089b9
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/aliases
@@ -0,0 +1 @@
+# https://docs.ansible.com/ansible/devel/dev_guide/testing/sanity/integration-aliases.html
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/compare_inventory_json.py b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/compare_inventory_json.py
new file mode 100755
index 000000000..ccdf8bd52
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/compare_inventory_json.py
@@ -0,0 +1,152 @@
+#!/usr/bin/env python
+
+# Inspired by community.aws collection script_inventory_ec2 test
+# https://github.com/ansible-collections/community.aws/blob/master/tests/integration/targets/script_inventory_ec2/inventory_diff.py
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+import argparse
+import json
+import sys
+from operator import itemgetter
+
+from deepdiff import DeepDiff
+
+# NetBox includes "created" and "last_updated" times on objects. These end up in the interfaces objects that are included verbatim from the NetBox API.
+# "url" may be different if local tests use a different host/port
+# Remove these from files saved in git as test data
+KEYS_REMOVE = frozenset(["created", "last_updated", "url"])
+
+# Ignore these when performing diffs as they will be different for each test run
+# (Was previously keys specific to NetBox 2.6)
+KEYS_IGNORE = frozenset()
+
+# Rack Groups became hierarchical in NetBox 2.8. Don't bother comparing against test data in NetBox 2.7
+KEYS_IGNORE_27 = frozenset(
+ [
+ "rack_groups", # host var
+ "rack_group_parent_rack_group", # group, group_names_raw = False
+ "parent_rack_group", # group, group_names_raw = True
+ ]
+)
+
+
+# Assume the object will not be recursive, as it originally came from JSON
+def remove_keys(obj, keys):
+ if isinstance(obj, dict):
+ keys_to_remove = keys.intersection(obj.keys())
+ for key in keys_to_remove:
+ del obj[key]
+
+ for key, value in obj.items():
+ remove_keys(value, keys)
+
+ elif isinstance(obj, list):
+ # Iterate over temporary copy, as we may remove items
+ for item in obj[:]:
+ if isinstance(item, str) and item in keys:
+ # List contains a string that we want to remove
+ # eg. a group name in list of groups
+ obj.remove(item)
+ remove_keys(item, keys)
+
+
+def sort_hostvar_arrays(obj):
+ meta = obj.get("_meta")
+ if not meta:
+ return
+
+ hostvars = meta.get("hostvars")
+ if not hostvars:
+ return
+
+ for _, host in hostvars.items(): # pylint: disable=disallowed-name
+ if interfaces := host.get("interfaces"):
+ host["interfaces"] = sorted(interfaces, key=itemgetter("id"))
+
+ if services := host.get("services"):
+ host["services"] = sorted(services, key=itemgetter("id"))
+
+
+def read_json(filename):
+ with open(filename, "r", encoding="utf-8") as file:
+ return json.loads(file.read())
+
+
+def write_json(filename, data):
+ with open(filename, "w", encoding="utf-8") as file:
+ json.dump(data, file, indent=4)
+
+
+def main():
+ parser = argparse.ArgumentParser(description="Diff Ansible inventory JSON output")
+ parser.add_argument(
+ "filename_a",
+ metavar="ORIGINAL.json",
+ type=str,
+ help="Original json to test against",
+ )
+ parser.add_argument(
+ "filename_b",
+ metavar="NEW.json",
+ type=str,
+ help="Newly generated json to compare against original",
+ )
+ parser.add_argument(
+ "--write",
+ action="store_true",
+ help=(
+ "When comparing files, various keys are removed. "
+ "This option will not compare the files, and instead writes ORIGINAL.json to NEW.json after removing these keys. "
+ "This is used to clean the test json files before saving to the git repo. "
+ "For example, this removes dates. "
+ ),
+ )
+ parser.add_argument(
+ "--netbox-version",
+ metavar="VERSION",
+ type=str,
+ help=(
+ "Apply comparison specific to NetBox version. "
+ "For example, rack_groups arrays will only contain a single item in v2.7, so are ignored in the comparison."
+ ),
+ )
+
+ args = parser.parse_args()
+
+ data_a = read_json(args.filename_a)
+
+ if args.write:
+ # When writing test data, only remove "remove_keys" that will change on every git commit.
+ # This makes diffs more easily readable to ensure changes to test data look correct.
+ remove_keys(data_a, KEYS_REMOVE)
+ sort_hostvar_arrays(data_a)
+ write_json(args.filename_b, data_a)
+
+ else:
+ data_b = read_json(args.filename_b)
+
+ # Ignore keys that we don't want to diff, in addition to the ones removed that change on every commit
+ keys = KEYS_REMOVE.union(KEYS_IGNORE)
+ remove_keys(data_a, keys)
+ remove_keys(data_b, keys)
+
+ sort_hostvar_arrays(data_a)
+ sort_hostvar_arrays(data_b)
+
+ # Perform the diff
+ result = DeepDiff(data_a, data_b, ignore_order=True)
+
+ if result:
+ # Dictionary is not empty - print differences
+ print(json.dumps(result, sort_keys=True, indent=4))
+ sys.exit(1)
+ else:
+ # Success, no differences
+ sys.exit(0)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-bearer-token.json b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-bearer-token.json
new file mode 100644
index 000000000..27ac908c5
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-bearer-token.json
@@ -0,0 +1,1417 @@
+{
+ "_meta": {
+ "hostvars": {
+ "R1-Device": {
+ "asset_tag": "345678901",
+ "config_context": {},
+ "custom_fields": {},
+ "device_type": "cisco-test",
+ "interfaces": [],
+ "is_virtual": false,
+ "locations": [],
+ "manufacturer": "cisco",
+ "rack": "Test Rack Site 2",
+ "rack_role": "test-rack-role",
+ "regions": [],
+ "role": "core-switch",
+ "serial": "",
+ "services": [],
+ "site": "test-site2",
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "Test Nexus One": {
+ "ansible_host": "172.16.180.12",
+ "config_context": {},
+ "custom_fields": {},
+ "device_type": "nexus-parent",
+ "dns_name": "nexus.example.com",
+ "interfaces": [
+ {
+ "_occupied": false,
+ "bridge": null,
+ "cable": null,
+ "cable_end": "",
+ "connected_endpoints": null,
+ "connected_endpoints_reachable": null,
+ "connected_endpoints_type": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 1,
+ "created": "2024-05-13T19:51:41.577667Z",
+ "custom_fields": {},
+ "description": "",
+ "device": {
+ "description": "",
+ "display": "Test Nexus Child One",
+ "id": 5,
+ "name": "Test Nexus Child One",
+ "url": "http://localhost:32768/api/dcim/devices/5/"
+ },
+ "display": "Ethernet2/1",
+ "duplex": null,
+ "enabled": true,
+ "id": 2,
+ "ip_addresses": [
+ {
+ "address": "172.16.180.12/24",
+ "comments": "",
+ "created": "2024-05-13T19:51:41.876821Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "172.16.180.12/24",
+ "dns_name": "nexus.example.com",
+ "family": {
+ "label": "IPv4",
+ "value": 4
+ },
+ "id": 4,
+ "last_updated": "2024-05-13T19:51:41.876825Z",
+ "nat_inside": null,
+ "nat_outside": [],
+ "role": null,
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": [],
+ "tenant": null,
+ "url": "http://localhost:32768/api/ipam/ip-addresses/4/",
+ "vrf": null
+ }
+ ],
+ "l2vpn_termination": null,
+ "label": "",
+ "lag": null,
+ "last_updated": "2024-05-13T19:51:41.577671Z",
+ "link_peers": [],
+ "link_peers_type": null,
+ "mac_address": null,
+ "mark_connected": false,
+ "mgmt_only": false,
+ "mode": null,
+ "module": null,
+ "mtu": null,
+ "name": "Ethernet2/1",
+ "parent": null,
+ "poe_mode": null,
+ "poe_type": null,
+ "rf_channel": null,
+ "rf_channel_frequency": null,
+ "rf_channel_width": null,
+ "rf_role": null,
+ "speed": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "tx_power": null,
+ "type": {
+ "label": "1000BASE-T (1GE)",
+ "value": "1000base-t"
+ },
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/dcim/interfaces/2/",
+ "vdcs": [],
+ "vrf": null,
+ "wireless_lans": [],
+ "wireless_link": null,
+ "wwn": null
+ },
+ {
+ "_occupied": false,
+ "bridge": null,
+ "cable": null,
+ "cable_end": "",
+ "connected_endpoints": null,
+ "connected_endpoints_reachable": null,
+ "connected_endpoints_type": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 1,
+ "created": "2024-05-13T19:51:41.564397Z",
+ "custom_fields": {},
+ "description": "",
+ "device": {
+ "description": "",
+ "display": "Test Nexus One",
+ "id": 4,
+ "name": "Test Nexus One",
+ "url": "http://localhost:32768/api/dcim/devices/4/"
+ },
+ "display": "Ethernet1/1",
+ "duplex": null,
+ "enabled": true,
+ "id": 1,
+ "ip_addresses": [
+ {
+ "address": "172.16.180.11/24",
+ "comments": "",
+ "created": "2024-05-13T19:51:41.784351Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "172.16.180.11/24",
+ "dns_name": "",
+ "family": {
+ "label": "IPv4",
+ "value": 4
+ },
+ "id": 3,
+ "last_updated": "2024-05-13T19:51:41.784356Z",
+ "nat_inside": null,
+ "nat_outside": [],
+ "role": null,
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": [],
+ "tenant": null,
+ "url": "http://localhost:32768/api/ipam/ip-addresses/3/",
+ "vrf": null
+ }
+ ],
+ "l2vpn_termination": null,
+ "label": "",
+ "lag": null,
+ "last_updated": "2024-05-13T19:51:41.564404Z",
+ "link_peers": [],
+ "link_peers_type": null,
+ "mac_address": null,
+ "mark_connected": false,
+ "mgmt_only": false,
+ "mode": null,
+ "module": null,
+ "mtu": null,
+ "name": "Ethernet1/1",
+ "parent": null,
+ "poe_mode": null,
+ "poe_type": null,
+ "rf_channel": null,
+ "rf_channel_frequency": null,
+ "rf_channel_width": null,
+ "rf_role": null,
+ "speed": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "tx_power": null,
+ "type": {
+ "label": "1000BASE-T (1GE)",
+ "value": "1000base-t"
+ },
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/dcim/interfaces/1/",
+ "vdcs": [],
+ "vrf": null,
+ "wireless_lans": [],
+ "wireless_link": null,
+ "wwn": null
+ },
+ {
+ "_occupied": false,
+ "bridge": null,
+ "cable": null,
+ "cable_end": "",
+ "connected_endpoints": null,
+ "connected_endpoints_reachable": null,
+ "connected_endpoints_type": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T19:51:41.659099Z",
+ "custom_fields": {},
+ "description": "",
+ "device": {
+ "description": "",
+ "display": "Test Nexus One",
+ "id": 4,
+ "name": "Test Nexus One",
+ "url": "http://localhost:32768/api/dcim/devices/4/"
+ },
+ "display": "wlink1",
+ "duplex": null,
+ "enabled": true,
+ "id": 6,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "label": "",
+ "lag": null,
+ "last_updated": "2024-05-13T19:51:41.659103Z",
+ "link_peers": [],
+ "link_peers_type": null,
+ "mac_address": null,
+ "mark_connected": false,
+ "mgmt_only": false,
+ "mode": null,
+ "module": null,
+ "mtu": null,
+ "name": "wlink1",
+ "parent": null,
+ "poe_mode": null,
+ "poe_type": null,
+ "rf_channel": null,
+ "rf_channel_frequency": null,
+ "rf_channel_width": null,
+ "rf_role": null,
+ "speed": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "tx_power": null,
+ "type": {
+ "label": "IEEE 802.11a",
+ "value": "ieee802.11a"
+ },
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/dcim/interfaces/6/",
+ "vdcs": [],
+ "vrf": null,
+ "wireless_lans": [],
+ "wireless_link": null,
+ "wwn": null
+ }
+ ],
+ "is_virtual": false,
+ "locations": [
+ "test-rack-group",
+ "parent-rack-group"
+ ],
+ "manufacturer": "cisco",
+ "primary_ip4": "172.16.180.12",
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "role": "core-switch",
+ "serial": "",
+ "services": [
+ {
+ "comments": "",
+ "created": "2024-05-13T19:51:42.463448Z",
+ "custom_fields": {},
+ "description": "",
+ "device": {
+ "description": "",
+ "display": "Test Nexus One",
+ "id": 4,
+ "name": "Test Nexus One",
+ "url": "http://localhost:32768/api/dcim/devices/4/"
+ },
+ "display": "telnet (TCP/23)",
+ "id": 3,
+ "ipaddresses": [],
+ "last_updated": "2024-05-13T19:51:42.463452Z",
+ "name": "telnet",
+ "ports": [
+ 23
+ ],
+ "protocol": {
+ "label": "TCP",
+ "value": "tcp"
+ },
+ "tags": [],
+ "url": "http://localhost:32768/api/ipam/services/3/",
+ "virtual_machine": null
+ }
+ ],
+ "site": "test-site",
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "Test VM With Spaces": {
+ "cluster": "Test Cluster 2",
+ "cluster_type": "test-cluster-type",
+ "config_context": {},
+ "custom_fields": {},
+ "interfaces": [
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T19:51:42.385426Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth0",
+ "enabled": true,
+ "id": 11,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T19:51:42.385430Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth0",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/11/",
+ "virtual_machine": {
+ "description": "",
+ "display": "Test VM With Spaces",
+ "id": 6,
+ "name": "Test VM With Spaces",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/6/"
+ },
+ "vrf": null
+ },
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T19:51:42.392103Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth1",
+ "enabled": true,
+ "id": 12,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T19:51:42.392108Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth1",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/12/",
+ "virtual_machine": {
+ "description": "",
+ "display": "Test VM With Spaces",
+ "id": 6,
+ "name": "Test VM With Spaces",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/6/"
+ },
+ "vrf": null
+ }
+ ],
+ "is_virtual": true,
+ "locations": [],
+ "regions": [],
+ "services": [
+ {
+ "comments": "",
+ "created": "2024-05-13T19:51:42.468448Z",
+ "custom_fields": {},
+ "description": "",
+ "device": null,
+ "display": "ssh (TCP/22)",
+ "id": 4,
+ "ipaddresses": [],
+ "last_updated": "2024-05-13T19:51:42.468451Z",
+ "name": "ssh",
+ "ports": [
+ 22
+ ],
+ "protocol": {
+ "label": "TCP",
+ "value": "tcp"
+ },
+ "tags": [],
+ "url": "http://localhost:32768/api/ipam/services/4/",
+ "virtual_machine": {
+ "description": "",
+ "display": "Test VM With Spaces",
+ "id": 6,
+ "name": "Test VM With Spaces",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/6/"
+ }
+ }
+ ],
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "TestDeviceR1": {
+ "config_context": {},
+ "custom_fields": {},
+ "device_type": "cisco-test",
+ "interfaces": [],
+ "is_virtual": false,
+ "locations": [
+ "test-rack-group",
+ "parent-rack-group"
+ ],
+ "manufacturer": "cisco",
+ "rack": "Test Rack",
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "role": "core-switch",
+ "serial": "FAB12345678",
+ "services": [],
+ "site": "test-site",
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "test100": {
+ "asset_tag": "123456789",
+ "config_context": {
+ "ntp_servers": [
+ "pool.ntp.org"
+ ]
+ },
+ "custom_fields": {},
+ "device_type": "cisco-test",
+ "interfaces": [
+ {
+ "_occupied": false,
+ "bridge": null,
+ "cable": null,
+ "cable_end": "",
+ "connected_endpoints": null,
+ "connected_endpoints_reachable": null,
+ "connected_endpoints_type": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 1,
+ "created": "2024-05-13T19:51:41.606846Z",
+ "custom_fields": {},
+ "description": "",
+ "device": {
+ "description": "",
+ "display": "test100 (123456789)",
+ "id": 1,
+ "name": "test100",
+ "url": "http://localhost:32768/api/dcim/devices/1/"
+ },
+ "display": "GigabitEthernet1",
+ "duplex": null,
+ "enabled": true,
+ "id": 3,
+ "ip_addresses": [
+ {
+ "address": "172.16.180.1/24",
+ "comments": "",
+ "created": "2024-05-13T19:51:41.771940Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "172.16.180.1/24",
+ "dns_name": "",
+ "family": {
+ "label": "IPv4",
+ "value": 4
+ },
+ "id": 1,
+ "last_updated": "2024-05-13T19:51:41.771946Z",
+ "nat_inside": null,
+ "nat_outside": [],
+ "role": null,
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": [],
+ "tenant": null,
+ "url": "http://localhost:32768/api/ipam/ip-addresses/1/",
+ "vrf": null
+ }
+ ],
+ "l2vpn_termination": null,
+ "label": "",
+ "lag": null,
+ "last_updated": "2024-05-13T19:51:41.606851Z",
+ "link_peers": [],
+ "link_peers_type": null,
+ "mac_address": null,
+ "mark_connected": false,
+ "mgmt_only": false,
+ "mode": null,
+ "module": null,
+ "mtu": null,
+ "name": "GigabitEthernet1",
+ "parent": null,
+ "poe_mode": null,
+ "poe_type": null,
+ "rf_channel": null,
+ "rf_channel_frequency": null,
+ "rf_channel_width": null,
+ "rf_role": null,
+ "speed": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "tx_power": null,
+ "type": {
+ "label": "1000BASE-T (1GE)",
+ "value": "1000base-t"
+ },
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/dcim/interfaces/3/",
+ "vdcs": [],
+ "vrf": null,
+ "wireless_lans": [],
+ "wireless_link": null,
+ "wwn": null
+ },
+ {
+ "_occupied": false,
+ "bridge": null,
+ "cable": null,
+ "cable_end": "",
+ "connected_endpoints": null,
+ "connected_endpoints_reachable": null,
+ "connected_endpoints_type": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 1,
+ "created": "2024-05-13T19:51:41.617680Z",
+ "custom_fields": {},
+ "description": "",
+ "device": {
+ "description": "",
+ "display": "test100 (123456789)",
+ "id": 1,
+ "name": "test100",
+ "url": "http://localhost:32768/api/dcim/devices/1/"
+ },
+ "display": "GigabitEthernet2",
+ "duplex": null,
+ "enabled": true,
+ "id": 4,
+ "ip_addresses": [
+ {
+ "address": "2001::1:1/64",
+ "comments": "",
+ "created": "2024-05-13T19:51:41.778277Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "2001::1:1/64",
+ "dns_name": "",
+ "family": {
+ "label": "IPv6",
+ "value": 6
+ },
+ "id": 2,
+ "last_updated": "2024-05-13T19:51:41.778280Z",
+ "nat_inside": null,
+ "nat_outside": [],
+ "role": null,
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": [],
+ "tenant": null,
+ "url": "http://localhost:32768/api/ipam/ip-addresses/2/",
+ "vrf": null
+ }
+ ],
+ "l2vpn_termination": null,
+ "label": "",
+ "lag": null,
+ "last_updated": "2024-05-13T19:51:41.617684Z",
+ "link_peers": [],
+ "link_peers_type": null,
+ "mac_address": null,
+ "mark_connected": false,
+ "mgmt_only": false,
+ "mode": null,
+ "module": null,
+ "mtu": null,
+ "name": "GigabitEthernet2",
+ "parent": null,
+ "poe_mode": null,
+ "poe_type": null,
+ "rf_channel": null,
+ "rf_channel_frequency": null,
+ "rf_channel_width": null,
+ "rf_role": null,
+ "speed": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "tx_power": null,
+ "type": {
+ "label": "1000BASE-T (1GE)",
+ "value": "1000base-t"
+ },
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/dcim/interfaces/4/",
+ "vdcs": [],
+ "vrf": null,
+ "wireless_lans": [],
+ "wireless_link": null,
+ "wwn": null
+ },
+ {
+ "_occupied": false,
+ "bridge": null,
+ "cable": null,
+ "cable_end": "",
+ "connected_endpoints": null,
+ "connected_endpoints_reachable": null,
+ "connected_endpoints_type": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T19:51:41.648568Z",
+ "custom_fields": {},
+ "description": "",
+ "device": {
+ "description": "",
+ "display": "test100 (123456789)",
+ "id": 1,
+ "name": "test100",
+ "url": "http://localhost:32768/api/dcim/devices/1/"
+ },
+ "display": "wlink1",
+ "duplex": null,
+ "enabled": true,
+ "id": 5,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "label": "",
+ "lag": null,
+ "last_updated": "2024-05-13T19:51:41.648573Z",
+ "link_peers": [],
+ "link_peers_type": null,
+ "mac_address": null,
+ "mark_connected": false,
+ "mgmt_only": false,
+ "mode": null,
+ "module": null,
+ "mtu": null,
+ "name": "wlink1",
+ "parent": null,
+ "poe_mode": null,
+ "poe_type": null,
+ "rf_channel": null,
+ "rf_channel_frequency": null,
+ "rf_channel_width": null,
+ "rf_role": null,
+ "speed": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "tx_power": null,
+ "type": {
+ "label": "IEEE 802.11a",
+ "value": "ieee802.11a"
+ },
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/dcim/interfaces/5/",
+ "vdcs": [],
+ "vrf": null,
+ "wireless_lans": [],
+ "wireless_link": null,
+ "wwn": null
+ }
+ ],
+ "is_virtual": false,
+ "local_context_data": {
+ "ntp_servers": [
+ "pool.ntp.org"
+ ]
+ },
+ "locations": [
+ "test-rack-group",
+ "parent-rack-group"
+ ],
+ "manufacturer": "cisco",
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "role": "core-switch",
+ "serial": "FAB01234567",
+ "services": [
+ {
+ "comments": "",
+ "created": "2024-05-13T19:51:42.443759Z",
+ "custom_fields": {},
+ "description": "",
+ "device": {
+ "description": "",
+ "display": "test100 (123456789)",
+ "id": 1,
+ "name": "test100",
+ "url": "http://localhost:32768/api/dcim/devices/1/"
+ },
+ "display": "ssh (TCP/22)",
+ "id": 1,
+ "ipaddresses": [],
+ "last_updated": "2024-05-13T19:51:42.443763Z",
+ "name": "ssh",
+ "ports": [
+ 22
+ ],
+ "protocol": {
+ "label": "TCP",
+ "value": "tcp"
+ },
+ "tags": [],
+ "url": "http://localhost:32768/api/ipam/services/1/",
+ "virtual_machine": null
+ },
+ {
+ "comments": "",
+ "created": "2024-05-13T19:51:42.449954Z",
+ "custom_fields": {},
+ "description": "",
+ "device": {
+ "description": "",
+ "display": "test100 (123456789)",
+ "id": 1,
+ "name": "test100",
+ "url": "http://localhost:32768/api/dcim/devices/1/"
+ },
+ "display": "http (TCP/80)",
+ "id": 2,
+ "ipaddresses": [
+ {
+ "address": "172.16.180.1/24",
+ "description": "",
+ "display": "172.16.180.1/24",
+ "family": {
+ "label": "IPv4",
+ "value": 4
+ },
+ "id": 1,
+ "url": "http://localhost:32768/api/ipam/ip-addresses/1/"
+ },
+ {
+ "address": "2001::1:1/64",
+ "description": "",
+ "display": "2001::1:1/64",
+ "family": {
+ "label": "IPv6",
+ "value": 6
+ },
+ "id": 2,
+ "url": "http://localhost:32768/api/ipam/ip-addresses/2/"
+ }
+ ],
+ "last_updated": "2024-05-13T19:51:42.449957Z",
+ "name": "http",
+ "ports": [
+ 80
+ ],
+ "protocol": {
+ "label": "TCP",
+ "value": "tcp"
+ },
+ "tags": [],
+ "url": "http://localhost:32768/api/ipam/services/2/",
+ "virtual_machine": null
+ }
+ ],
+ "site": "test-site",
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "test100-vm": {
+ "cluster": "Test Cluster",
+ "cluster_group": "test-cluster-group",
+ "cluster_type": "test-cluster-type",
+ "config_context": {},
+ "custom_fields": {},
+ "disk": 170,
+ "interfaces": [
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T19:51:42.317118Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth0",
+ "enabled": true,
+ "id": 1,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T19:51:42.317123Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth0",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/1/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test100-vm",
+ "id": 1,
+ "name": "test100-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/1/"
+ },
+ "vrf": null
+ },
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T19:51:42.324758Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth1",
+ "enabled": true,
+ "id": 2,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T19:51:42.324762Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth1",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/2/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test100-vm",
+ "id": 1,
+ "name": "test100-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/1/"
+ },
+ "vrf": null
+ },
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T19:51:42.331470Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth2",
+ "enabled": true,
+ "id": 3,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T19:51:42.331474Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth2",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/3/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test100-vm",
+ "id": 1,
+ "name": "test100-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/1/"
+ },
+ "vrf": null
+ },
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T19:51:42.338401Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth3",
+ "enabled": true,
+ "id": 4,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T19:51:42.338405Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth3",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/4/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test100-vm",
+ "id": 1,
+ "name": "test100-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/1/"
+ },
+ "vrf": null
+ },
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T19:51:42.345785Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth4",
+ "enabled": true,
+ "id": 5,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T19:51:42.345789Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth4",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/5/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test100-vm",
+ "id": 1,
+ "name": "test100-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/1/"
+ },
+ "vrf": null
+ }
+ ],
+ "is_virtual": true,
+ "locations": [],
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "services": [],
+ "site": "test-site",
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "test101-vm": {
+ "cluster": "Test Cluster",
+ "cluster_group": "test-cluster-group",
+ "cluster_type": "test-cluster-type",
+ "config_context": {},
+ "custom_fields": {},
+ "interfaces": [
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T19:51:42.352485Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth0",
+ "enabled": true,
+ "id": 6,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T19:51:42.352489Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth0",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/6/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test101-vm",
+ "id": 2,
+ "name": "test101-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/2/"
+ },
+ "vrf": null
+ },
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T19:51:42.359111Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth1",
+ "enabled": true,
+ "id": 7,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T19:51:42.359115Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth1",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/7/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test101-vm",
+ "id": 2,
+ "name": "test101-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/2/"
+ },
+ "vrf": null
+ },
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T19:51:42.365791Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth2",
+ "enabled": true,
+ "id": 8,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T19:51:42.365795Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth2",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/8/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test101-vm",
+ "id": 2,
+ "name": "test101-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/2/"
+ },
+ "vrf": null
+ },
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T19:51:42.372298Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth3",
+ "enabled": true,
+ "id": 9,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T19:51:42.372302Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth3",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/9/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test101-vm",
+ "id": 2,
+ "name": "test101-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/2/"
+ },
+ "vrf": null
+ },
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T19:51:42.378872Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth4",
+ "enabled": true,
+ "id": 10,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T19:51:42.378876Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth4",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/10/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test101-vm",
+ "id": 2,
+ "name": "test101-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/2/"
+ },
+ "vrf": null
+ }
+ ],
+ "is_virtual": true,
+ "locations": [],
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "services": [],
+ "site": "test-site",
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "test102-vm": {
+ "cluster": "Test Cluster",
+ "cluster_group": "test-cluster-group",
+ "cluster_type": "test-cluster-type",
+ "config_context": {},
+ "custom_fields": {},
+ "interfaces": [],
+ "is_virtual": true,
+ "locations": [],
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "services": [],
+ "site": "test-site",
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "test103-vm": {
+ "cluster": "Test Cluster",
+ "cluster_group": "test-cluster-group",
+ "cluster_type": "test-cluster-type",
+ "config_context": {},
+ "custom_fields": {},
+ "interfaces": [],
+ "is_virtual": true,
+ "locations": [],
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "services": [],
+ "site": "test-site",
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "test104-vm": {
+ "cluster": "Test Cluster 2",
+ "cluster_type": "test-cluster-type",
+ "config_context": {},
+ "custom_fields": {},
+ "interfaces": [],
+ "is_virtual": true,
+ "locations": [],
+ "regions": [],
+ "services": [],
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ }
+ }
+ },
+ "all": {
+ "children": [
+ "ungrouped",
+ "site_test_site2",
+ "region_other_region",
+ "region_parent_region",
+ "site_group_other_site_group",
+ "site_group_parent_site_group",
+ "rack_Test_Rack_Site_2",
+ "rack_role_test_rack_role",
+ "role_core_switch",
+ "device_type_cisco_test",
+ "manufacturer_cisco",
+ "status_active",
+ "device_type_nexus_parent",
+ "service_telnet",
+ "rack_Test_Rack",
+ "service_ssh",
+ "service_http",
+ "cluster_Test_Cluster_2",
+ "cluster_type_test_cluster_type",
+ "is_virtual",
+ "cluster_Test_Cluster",
+ "cluster_group_test_cluster_group"
+ ]
+ },
+ "cluster_Test_Cluster": {
+ "hosts": [
+ "test100-vm",
+ "test101-vm",
+ "test102-vm",
+ "test103-vm"
+ ]
+ },
+ "cluster_Test_Cluster_2": {
+ "hosts": [
+ "Test VM With Spaces",
+ "test104-vm"
+ ]
+ },
+ "cluster_group_test_cluster_group": {
+ "hosts": [
+ "test100-vm",
+ "test101-vm",
+ "test102-vm",
+ "test103-vm"
+ ]
+ },
+ "cluster_type_test_cluster_type": {
+ "hosts": [
+ "Test VM With Spaces",
+ "test100-vm",
+ "test101-vm",
+ "test102-vm",
+ "test103-vm",
+ "test104-vm"
+ ]
+ },
+ "device_type_cisco_test": {
+ "hosts": [
+ "R1-Device",
+ "TestDeviceR1",
+ "test100"
+ ]
+ },
+ "device_type_nexus_parent": {
+ "hosts": [
+ "Test Nexus One"
+ ]
+ },
+ "is_virtual": {
+ "hosts": [
+ "Test VM With Spaces",
+ "test100-vm",
+ "test101-vm",
+ "test102-vm",
+ "test103-vm",
+ "test104-vm"
+ ]
+ },
+ "location_parent_rack_group": {
+ "children": [
+ "location_test_rack_group"
+ ]
+ },
+ "location_test_rack_group": {
+ "hosts": [
+ "Test Nexus One",
+ "TestDeviceR1",
+ "test100"
+ ]
+ },
+ "manufacturer_cisco": {
+ "hosts": [
+ "R1-Device",
+ "Test Nexus One",
+ "TestDeviceR1",
+ "test100"
+ ]
+ },
+ "rack_Test_Rack": {
+ "hosts": [
+ "TestDeviceR1"
+ ]
+ },
+ "rack_Test_Rack_Site_2": {
+ "hosts": [
+ "R1-Device"
+ ]
+ },
+ "rack_role_test_rack_role": {
+ "hosts": [
+ "R1-Device"
+ ]
+ },
+ "region_parent_region": {
+ "children": [
+ "region_test_region"
+ ]
+ },
+ "region_test_region": {
+ "children": [
+ "site_test_site"
+ ]
+ },
+ "role_core_switch": {
+ "hosts": [
+ "R1-Device",
+ "Test Nexus One",
+ "TestDeviceR1",
+ "test100"
+ ]
+ },
+ "service_http": {
+ "hosts": [
+ "test100"
+ ]
+ },
+ "service_ssh": {
+ "hosts": [
+ "test100",
+ "Test VM With Spaces"
+ ]
+ },
+ "service_telnet": {
+ "hosts": [
+ "Test Nexus One"
+ ]
+ },
+ "site_group_parent_site_group": {
+ "children": [
+ "site_group_test_site_group"
+ ]
+ },
+ "site_test_site": {
+ "children": [
+ "location_parent_rack_group"
+ ],
+ "hosts": [
+ "test100-vm",
+ "test101-vm",
+ "test102-vm",
+ "test103-vm"
+ ]
+ },
+ "site_test_site2": {
+ "hosts": [
+ "R1-Device"
+ ]
+ },
+ "status_active": {
+ "hosts": [
+ "R1-Device",
+ "Test Nexus One",
+ "TestDeviceR1",
+ "test100",
+ "Test VM With Spaces",
+ "test100-vm",
+ "test101-vm",
+ "test102-vm",
+ "test103-vm",
+ "test104-vm"
+ ]
+ }
+} \ No newline at end of file
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-bearer-token.yml b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-bearer-token.yml
new file mode 100644
index 000000000..a19b60db2
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-bearer-token.yml
@@ -0,0 +1,32 @@
+---
+plugin: netbox.netbox.nb_inventory
+api_endpoint: http://localhost:32768
+token:
+ type: Token
+ value: "0123456789abcdef0123456789abcdef01234567"
+validate_certs: false
+
+config_context: true
+plurals: false
+interfaces: true
+services: true
+
+group_by:
+ - site
+ - tenant
+ - rack
+ - location
+ - rack_role
+ - tag
+ - role
+ - device_type
+ - manufacturer
+ - platform
+ - region
+ - site_group
+ - cluster
+ - cluster_group
+ - cluster_type
+ - is_virtual
+ - services
+ - status
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-jinja2-filter.json b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-jinja2-filter.json
new file mode 100644
index 000000000..68e493db8
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-jinja2-filter.json
@@ -0,0 +1,1233 @@
+{
+ "_meta": {
+ "hostvars": {
+ "Test Nexus One": {
+ "ansible_host": "172.16.180.12",
+ "config_context": {},
+ "custom_fields": {},
+ "device_type": "nexus-parent",
+ "dns_name": "nexus.example.com",
+ "interfaces": [
+ {
+ "_occupied": false,
+ "bridge": null,
+ "cable": null,
+ "cable_end": "",
+ "connected_endpoints": null,
+ "connected_endpoints_reachable": null,
+ "connected_endpoints_type": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 1,
+ "created": "2024-05-13T19:51:41.577667Z",
+ "custom_fields": {},
+ "description": "",
+ "device": {
+ "description": "",
+ "display": "Test Nexus Child One",
+ "id": 5,
+ "name": "Test Nexus Child One",
+ "url": "http://localhost:32768/api/dcim/devices/5/"
+ },
+ "display": "Ethernet2/1",
+ "duplex": null,
+ "enabled": true,
+ "id": 2,
+ "ip_addresses": [
+ {
+ "address": "172.16.180.12/24",
+ "comments": "",
+ "created": "2024-05-13T19:51:41.876821Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "172.16.180.12/24",
+ "dns_name": "nexus.example.com",
+ "family": {
+ "label": "IPv4",
+ "value": 4
+ },
+ "id": 4,
+ "last_updated": "2024-05-13T19:51:41.876825Z",
+ "nat_inside": null,
+ "nat_outside": [],
+ "role": null,
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": [],
+ "tenant": null,
+ "url": "http://localhost:32768/api/ipam/ip-addresses/4/",
+ "vrf": null
+ }
+ ],
+ "l2vpn_termination": null,
+ "label": "",
+ "lag": null,
+ "last_updated": "2024-05-13T19:51:41.577671Z",
+ "link_peers": [],
+ "link_peers_type": null,
+ "mac_address": null,
+ "mark_connected": false,
+ "mgmt_only": false,
+ "mode": null,
+ "module": null,
+ "mtu": null,
+ "name": "Ethernet2/1",
+ "parent": null,
+ "poe_mode": null,
+ "poe_type": null,
+ "rf_channel": null,
+ "rf_channel_frequency": null,
+ "rf_channel_width": null,
+ "rf_role": null,
+ "speed": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "tx_power": null,
+ "type": {
+ "label": "1000BASE-T (1GE)",
+ "value": "1000base-t"
+ },
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/dcim/interfaces/2/",
+ "vdcs": [],
+ "vrf": null,
+ "wireless_lans": [],
+ "wireless_link": null,
+ "wwn": null
+ },
+ {
+ "_occupied": false,
+ "bridge": null,
+ "cable": null,
+ "cable_end": "",
+ "connected_endpoints": null,
+ "connected_endpoints_reachable": null,
+ "connected_endpoints_type": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 1,
+ "created": "2024-05-13T19:51:41.564397Z",
+ "custom_fields": {},
+ "description": "",
+ "device": {
+ "description": "",
+ "display": "Test Nexus One",
+ "id": 4,
+ "name": "Test Nexus One",
+ "url": "http://localhost:32768/api/dcim/devices/4/"
+ },
+ "display": "Ethernet1/1",
+ "duplex": null,
+ "enabled": true,
+ "id": 1,
+ "ip_addresses": [
+ {
+ "address": "172.16.180.11/24",
+ "comments": "",
+ "created": "2024-05-13T19:51:41.784351Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "172.16.180.11/24",
+ "dns_name": "",
+ "family": {
+ "label": "IPv4",
+ "value": 4
+ },
+ "id": 3,
+ "last_updated": "2024-05-13T19:51:41.784356Z",
+ "nat_inside": null,
+ "nat_outside": [],
+ "role": null,
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": [],
+ "tenant": null,
+ "url": "http://localhost:32768/api/ipam/ip-addresses/3/",
+ "vrf": null
+ }
+ ],
+ "l2vpn_termination": null,
+ "label": "",
+ "lag": null,
+ "last_updated": "2024-05-13T19:51:41.564404Z",
+ "link_peers": [],
+ "link_peers_type": null,
+ "mac_address": null,
+ "mark_connected": false,
+ "mgmt_only": false,
+ "mode": null,
+ "module": null,
+ "mtu": null,
+ "name": "Ethernet1/1",
+ "parent": null,
+ "poe_mode": null,
+ "poe_type": null,
+ "rf_channel": null,
+ "rf_channel_frequency": null,
+ "rf_channel_width": null,
+ "rf_role": null,
+ "speed": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "tx_power": null,
+ "type": {
+ "label": "1000BASE-T (1GE)",
+ "value": "1000base-t"
+ },
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/dcim/interfaces/1/",
+ "vdcs": [],
+ "vrf": null,
+ "wireless_lans": [],
+ "wireless_link": null,
+ "wwn": null
+ },
+ {
+ "_occupied": false,
+ "bridge": null,
+ "cable": null,
+ "cable_end": "",
+ "connected_endpoints": null,
+ "connected_endpoints_reachable": null,
+ "connected_endpoints_type": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T19:51:41.659099Z",
+ "custom_fields": {},
+ "description": "",
+ "device": {
+ "description": "",
+ "display": "Test Nexus One",
+ "id": 4,
+ "name": "Test Nexus One",
+ "url": "http://localhost:32768/api/dcim/devices/4/"
+ },
+ "display": "wlink1",
+ "duplex": null,
+ "enabled": true,
+ "id": 6,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "label": "",
+ "lag": null,
+ "last_updated": "2024-05-13T19:51:41.659103Z",
+ "link_peers": [],
+ "link_peers_type": null,
+ "mac_address": null,
+ "mark_connected": false,
+ "mgmt_only": false,
+ "mode": null,
+ "module": null,
+ "mtu": null,
+ "name": "wlink1",
+ "parent": null,
+ "poe_mode": null,
+ "poe_type": null,
+ "rf_channel": null,
+ "rf_channel_frequency": null,
+ "rf_channel_width": null,
+ "rf_role": null,
+ "speed": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "tx_power": null,
+ "type": {
+ "label": "IEEE 802.11a",
+ "value": "ieee802.11a"
+ },
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/dcim/interfaces/6/",
+ "vdcs": [],
+ "vrf": null,
+ "wireless_lans": [],
+ "wireless_link": null,
+ "wwn": null
+ }
+ ],
+ "is_virtual": false,
+ "locations": [
+ "test-rack-group",
+ "parent-rack-group"
+ ],
+ "manufacturer": "cisco",
+ "primary_ip4": "172.16.180.12",
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "role": "core-switch",
+ "serial": "",
+ "services": [
+ {
+ "comments": "",
+ "created": "2024-05-13T19:51:42.463448Z",
+ "custom_fields": {},
+ "description": "",
+ "device": {
+ "description": "",
+ "display": "Test Nexus One",
+ "id": 4,
+ "name": "Test Nexus One",
+ "url": "http://localhost:32768/api/dcim/devices/4/"
+ },
+ "display": "telnet (TCP/23)",
+ "id": 3,
+ "ipaddresses": [],
+ "last_updated": "2024-05-13T19:51:42.463452Z",
+ "name": "telnet",
+ "ports": [
+ 23
+ ],
+ "protocol": {
+ "label": "TCP",
+ "value": "tcp"
+ },
+ "tags": [],
+ "url": "http://localhost:32768/api/ipam/services/3/",
+ "virtual_machine": null
+ }
+ ],
+ "site": "test-site",
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "TestDeviceR1": {
+ "config_context": {},
+ "custom_fields": {},
+ "device_type": "cisco-test",
+ "interfaces": [],
+ "is_virtual": false,
+ "locations": [
+ "test-rack-group",
+ "parent-rack-group"
+ ],
+ "manufacturer": "cisco",
+ "rack": "Test Rack",
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "role": "core-switch",
+ "serial": "FAB12345678",
+ "services": [],
+ "site": "test-site",
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "test100": {
+ "asset_tag": "123456789",
+ "config_context": {
+ "ntp_servers": [
+ "pool.ntp.org"
+ ]
+ },
+ "custom_fields": {},
+ "device_type": "cisco-test",
+ "interfaces": [
+ {
+ "_occupied": false,
+ "bridge": null,
+ "cable": null,
+ "cable_end": "",
+ "connected_endpoints": null,
+ "connected_endpoints_reachable": null,
+ "connected_endpoints_type": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 1,
+ "created": "2024-05-13T19:51:41.606846Z",
+ "custom_fields": {},
+ "description": "",
+ "device": {
+ "description": "",
+ "display": "test100 (123456789)",
+ "id": 1,
+ "name": "test100",
+ "url": "http://localhost:32768/api/dcim/devices/1/"
+ },
+ "display": "GigabitEthernet1",
+ "duplex": null,
+ "enabled": true,
+ "id": 3,
+ "ip_addresses": [
+ {
+ "address": "172.16.180.1/24",
+ "comments": "",
+ "created": "2024-05-13T19:51:41.771940Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "172.16.180.1/24",
+ "dns_name": "",
+ "family": {
+ "label": "IPv4",
+ "value": 4
+ },
+ "id": 1,
+ "last_updated": "2024-05-13T19:51:41.771946Z",
+ "nat_inside": null,
+ "nat_outside": [],
+ "role": null,
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": [],
+ "tenant": null,
+ "url": "http://localhost:32768/api/ipam/ip-addresses/1/",
+ "vrf": null
+ }
+ ],
+ "l2vpn_termination": null,
+ "label": "",
+ "lag": null,
+ "last_updated": "2024-05-13T19:51:41.606851Z",
+ "link_peers": [],
+ "link_peers_type": null,
+ "mac_address": null,
+ "mark_connected": false,
+ "mgmt_only": false,
+ "mode": null,
+ "module": null,
+ "mtu": null,
+ "name": "GigabitEthernet1",
+ "parent": null,
+ "poe_mode": null,
+ "poe_type": null,
+ "rf_channel": null,
+ "rf_channel_frequency": null,
+ "rf_channel_width": null,
+ "rf_role": null,
+ "speed": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "tx_power": null,
+ "type": {
+ "label": "1000BASE-T (1GE)",
+ "value": "1000base-t"
+ },
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/dcim/interfaces/3/",
+ "vdcs": [],
+ "vrf": null,
+ "wireless_lans": [],
+ "wireless_link": null,
+ "wwn": null
+ },
+ {
+ "_occupied": false,
+ "bridge": null,
+ "cable": null,
+ "cable_end": "",
+ "connected_endpoints": null,
+ "connected_endpoints_reachable": null,
+ "connected_endpoints_type": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 1,
+ "created": "2024-05-13T19:51:41.617680Z",
+ "custom_fields": {},
+ "description": "",
+ "device": {
+ "description": "",
+ "display": "test100 (123456789)",
+ "id": 1,
+ "name": "test100",
+ "url": "http://localhost:32768/api/dcim/devices/1/"
+ },
+ "display": "GigabitEthernet2",
+ "duplex": null,
+ "enabled": true,
+ "id": 4,
+ "ip_addresses": [
+ {
+ "address": "2001::1:1/64",
+ "comments": "",
+ "created": "2024-05-13T19:51:41.778277Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "2001::1:1/64",
+ "dns_name": "",
+ "family": {
+ "label": "IPv6",
+ "value": 6
+ },
+ "id": 2,
+ "last_updated": "2024-05-13T19:51:41.778280Z",
+ "nat_inside": null,
+ "nat_outside": [],
+ "role": null,
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": [],
+ "tenant": null,
+ "url": "http://localhost:32768/api/ipam/ip-addresses/2/",
+ "vrf": null
+ }
+ ],
+ "l2vpn_termination": null,
+ "label": "",
+ "lag": null,
+ "last_updated": "2024-05-13T19:51:41.617684Z",
+ "link_peers": [],
+ "link_peers_type": null,
+ "mac_address": null,
+ "mark_connected": false,
+ "mgmt_only": false,
+ "mode": null,
+ "module": null,
+ "mtu": null,
+ "name": "GigabitEthernet2",
+ "parent": null,
+ "poe_mode": null,
+ "poe_type": null,
+ "rf_channel": null,
+ "rf_channel_frequency": null,
+ "rf_channel_width": null,
+ "rf_role": null,
+ "speed": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "tx_power": null,
+ "type": {
+ "label": "1000BASE-T (1GE)",
+ "value": "1000base-t"
+ },
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/dcim/interfaces/4/",
+ "vdcs": [],
+ "vrf": null,
+ "wireless_lans": [],
+ "wireless_link": null,
+ "wwn": null
+ },
+ {
+ "_occupied": false,
+ "bridge": null,
+ "cable": null,
+ "cable_end": "",
+ "connected_endpoints": null,
+ "connected_endpoints_reachable": null,
+ "connected_endpoints_type": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T19:51:41.648568Z",
+ "custom_fields": {},
+ "description": "",
+ "device": {
+ "description": "",
+ "display": "test100 (123456789)",
+ "id": 1,
+ "name": "test100",
+ "url": "http://localhost:32768/api/dcim/devices/1/"
+ },
+ "display": "wlink1",
+ "duplex": null,
+ "enabled": true,
+ "id": 5,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "label": "",
+ "lag": null,
+ "last_updated": "2024-05-13T19:51:41.648573Z",
+ "link_peers": [],
+ "link_peers_type": null,
+ "mac_address": null,
+ "mark_connected": false,
+ "mgmt_only": false,
+ "mode": null,
+ "module": null,
+ "mtu": null,
+ "name": "wlink1",
+ "parent": null,
+ "poe_mode": null,
+ "poe_type": null,
+ "rf_channel": null,
+ "rf_channel_frequency": null,
+ "rf_channel_width": null,
+ "rf_role": null,
+ "speed": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "tx_power": null,
+ "type": {
+ "label": "IEEE 802.11a",
+ "value": "ieee802.11a"
+ },
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/dcim/interfaces/5/",
+ "vdcs": [],
+ "vrf": null,
+ "wireless_lans": [],
+ "wireless_link": null,
+ "wwn": null
+ }
+ ],
+ "is_virtual": false,
+ "local_context_data": {
+ "ntp_servers": [
+ "pool.ntp.org"
+ ]
+ },
+ "locations": [
+ "test-rack-group",
+ "parent-rack-group"
+ ],
+ "manufacturer": "cisco",
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "role": "core-switch",
+ "serial": "FAB01234567",
+ "services": [
+ {
+ "comments": "",
+ "created": "2024-05-13T19:51:42.443759Z",
+ "custom_fields": {},
+ "description": "",
+ "device": {
+ "description": "",
+ "display": "test100 (123456789)",
+ "id": 1,
+ "name": "test100",
+ "url": "http://localhost:32768/api/dcim/devices/1/"
+ },
+ "display": "ssh (TCP/22)",
+ "id": 1,
+ "ipaddresses": [],
+ "last_updated": "2024-05-13T19:51:42.443763Z",
+ "name": "ssh",
+ "ports": [
+ 22
+ ],
+ "protocol": {
+ "label": "TCP",
+ "value": "tcp"
+ },
+ "tags": [],
+ "url": "http://localhost:32768/api/ipam/services/1/",
+ "virtual_machine": null
+ },
+ {
+ "comments": "",
+ "created": "2024-05-13T19:51:42.449954Z",
+ "custom_fields": {},
+ "description": "",
+ "device": {
+ "description": "",
+ "display": "test100 (123456789)",
+ "id": 1,
+ "name": "test100",
+ "url": "http://localhost:32768/api/dcim/devices/1/"
+ },
+ "display": "http (TCP/80)",
+ "id": 2,
+ "ipaddresses": [
+ {
+ "address": "172.16.180.1/24",
+ "description": "",
+ "display": "172.16.180.1/24",
+ "family": {
+ "label": "IPv4",
+ "value": 4
+ },
+ "id": 1,
+ "url": "http://localhost:32768/api/ipam/ip-addresses/1/"
+ },
+ {
+ "address": "2001::1:1/64",
+ "description": "",
+ "display": "2001::1:1/64",
+ "family": {
+ "label": "IPv6",
+ "value": 6
+ },
+ "id": 2,
+ "url": "http://localhost:32768/api/ipam/ip-addresses/2/"
+ }
+ ],
+ "last_updated": "2024-05-13T19:51:42.449957Z",
+ "name": "http",
+ "ports": [
+ 80
+ ],
+ "protocol": {
+ "label": "TCP",
+ "value": "tcp"
+ },
+ "tags": [],
+ "url": "http://localhost:32768/api/ipam/services/2/",
+ "virtual_machine": null
+ }
+ ],
+ "site": "test-site",
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "test100-vm": {
+ "cluster": "Test Cluster",
+ "cluster_group": "test-cluster-group",
+ "cluster_type": "test-cluster-type",
+ "config_context": {},
+ "custom_fields": {},
+ "disk": 170,
+ "interfaces": [
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T19:51:42.317118Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth0",
+ "enabled": true,
+ "id": 1,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T19:51:42.317123Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth0",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/1/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test100-vm",
+ "id": 1,
+ "name": "test100-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/1/"
+ },
+ "vrf": null
+ },
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T19:51:42.324758Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth1",
+ "enabled": true,
+ "id": 2,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T19:51:42.324762Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth1",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/2/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test100-vm",
+ "id": 1,
+ "name": "test100-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/1/"
+ },
+ "vrf": null
+ },
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T19:51:42.331470Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth2",
+ "enabled": true,
+ "id": 3,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T19:51:42.331474Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth2",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/3/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test100-vm",
+ "id": 1,
+ "name": "test100-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/1/"
+ },
+ "vrf": null
+ },
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T19:51:42.338401Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth3",
+ "enabled": true,
+ "id": 4,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T19:51:42.338405Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth3",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/4/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test100-vm",
+ "id": 1,
+ "name": "test100-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/1/"
+ },
+ "vrf": null
+ },
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T19:51:42.345785Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth4",
+ "enabled": true,
+ "id": 5,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T19:51:42.345789Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth4",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/5/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test100-vm",
+ "id": 1,
+ "name": "test100-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/1/"
+ },
+ "vrf": null
+ }
+ ],
+ "is_virtual": true,
+ "locations": [],
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "services": [],
+ "site": "test-site",
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "test101-vm": {
+ "cluster": "Test Cluster",
+ "cluster_group": "test-cluster-group",
+ "cluster_type": "test-cluster-type",
+ "config_context": {},
+ "custom_fields": {},
+ "interfaces": [
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T19:51:42.352485Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth0",
+ "enabled": true,
+ "id": 6,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T19:51:42.352489Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth0",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/6/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test101-vm",
+ "id": 2,
+ "name": "test101-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/2/"
+ },
+ "vrf": null
+ },
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T19:51:42.359111Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth1",
+ "enabled": true,
+ "id": 7,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T19:51:42.359115Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth1",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/7/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test101-vm",
+ "id": 2,
+ "name": "test101-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/2/"
+ },
+ "vrf": null
+ },
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T19:51:42.365791Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth2",
+ "enabled": true,
+ "id": 8,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T19:51:42.365795Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth2",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/8/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test101-vm",
+ "id": 2,
+ "name": "test101-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/2/"
+ },
+ "vrf": null
+ },
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T19:51:42.372298Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth3",
+ "enabled": true,
+ "id": 9,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T19:51:42.372302Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth3",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/9/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test101-vm",
+ "id": 2,
+ "name": "test101-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/2/"
+ },
+ "vrf": null
+ },
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T19:51:42.378872Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth4",
+ "enabled": true,
+ "id": 10,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T19:51:42.378876Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth4",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/10/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test101-vm",
+ "id": 2,
+ "name": "test101-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/2/"
+ },
+ "vrf": null
+ }
+ ],
+ "is_virtual": true,
+ "locations": [],
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "services": [],
+ "site": "test-site",
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "test102-vm": {
+ "cluster": "Test Cluster",
+ "cluster_group": "test-cluster-group",
+ "cluster_type": "test-cluster-type",
+ "config_context": {},
+ "custom_fields": {},
+ "interfaces": [],
+ "is_virtual": true,
+ "locations": [],
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "services": [],
+ "site": "test-site",
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "test103-vm": {
+ "cluster": "Test Cluster",
+ "cluster_group": "test-cluster-group",
+ "cluster_type": "test-cluster-type",
+ "config_context": {},
+ "custom_fields": {},
+ "interfaces": [],
+ "is_virtual": true,
+ "locations": [],
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "services": [],
+ "site": "test-site",
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ }
+ }
+ },
+ "all": {
+ "children": [
+ "ungrouped",
+ "site_test_site2",
+ "region_other_region",
+ "region_parent_region",
+ "site_group_other_site_group",
+ "site_group_parent_site_group",
+ "role_core_switch",
+ "device_type_nexus_parent",
+ "manufacturer_cisco",
+ "service_telnet",
+ "status_active",
+ "rack_Test_Rack",
+ "device_type_cisco_test",
+ "service_ssh",
+ "service_http",
+ "cluster_Test_Cluster",
+ "cluster_group_test_cluster_group",
+ "cluster_type_test_cluster_type",
+ "is_virtual"
+ ]
+ },
+ "cluster_Test_Cluster": {
+ "hosts": [
+ "test100-vm",
+ "test101-vm",
+ "test102-vm",
+ "test103-vm"
+ ]
+ },
+ "cluster_group_test_cluster_group": {
+ "hosts": [
+ "test100-vm",
+ "test101-vm",
+ "test102-vm",
+ "test103-vm"
+ ]
+ },
+ "cluster_type_test_cluster_type": {
+ "hosts": [
+ "test100-vm",
+ "test101-vm",
+ "test102-vm",
+ "test103-vm"
+ ]
+ },
+ "device_type_cisco_test": {
+ "hosts": [
+ "TestDeviceR1",
+ "test100"
+ ]
+ },
+ "device_type_nexus_parent": {
+ "hosts": [
+ "Test Nexus One"
+ ]
+ },
+ "is_virtual": {
+ "hosts": [
+ "test100-vm",
+ "test101-vm",
+ "test102-vm",
+ "test103-vm"
+ ]
+ },
+ "location_parent_rack_group": {
+ "children": [
+ "location_test_rack_group"
+ ]
+ },
+ "location_test_rack_group": {
+ "hosts": [
+ "Test Nexus One",
+ "TestDeviceR1",
+ "test100"
+ ]
+ },
+ "manufacturer_cisco": {
+ "hosts": [
+ "Test Nexus One",
+ "TestDeviceR1",
+ "test100"
+ ]
+ },
+ "rack_Test_Rack": {
+ "hosts": [
+ "TestDeviceR1"
+ ]
+ },
+ "region_parent_region": {
+ "children": [
+ "region_test_region"
+ ]
+ },
+ "region_test_region": {
+ "children": [
+ "site_test_site"
+ ]
+ },
+ "role_core_switch": {
+ "hosts": [
+ "Test Nexus One",
+ "TestDeviceR1",
+ "test100"
+ ]
+ },
+ "service_http": {
+ "hosts": [
+ "test100"
+ ]
+ },
+ "service_ssh": {
+ "hosts": [
+ "test100"
+ ]
+ },
+ "service_telnet": {
+ "hosts": [
+ "Test Nexus One"
+ ]
+ },
+ "site_group_parent_site_group": {
+ "children": [
+ "site_group_test_site_group"
+ ]
+ },
+ "site_test_site": {
+ "children": [
+ "location_parent_rack_group"
+ ],
+ "hosts": [
+ "test100-vm",
+ "test101-vm",
+ "test102-vm",
+ "test103-vm"
+ ]
+ },
+ "status_active": {
+ "hosts": [
+ "Test Nexus One",
+ "TestDeviceR1",
+ "test100",
+ "test100-vm",
+ "test101-vm",
+ "test102-vm",
+ "test103-vm"
+ ]
+ }
+} \ No newline at end of file
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-jinja2-filter.yml b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-jinja2-filter.yml
new file mode 100644
index 000000000..49f306a78
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-jinja2-filter.yml
@@ -0,0 +1,33 @@
+---
+plugin: netbox.netbox.nb_inventory
+api_endpoint: http://localhost:32768
+token: "0123456789abcdef0123456789abcdef01234567"
+validate_certs: false
+
+config_context: true
+plurals: false
+interfaces: true
+services: true
+
+group_by:
+ - site
+ - tenant
+ - rack
+ - location
+ - rack_role
+ - tag
+ - role
+ - device_type
+ - manufacturer
+ - platform
+ - region
+ - site_group
+ - cluster
+ - cluster_group
+ - cluster_type
+ - is_virtual
+ - services
+ - status
+
+query_filters:
+ - site: "{{ 'TEST-SITE' | lower }}"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-jinja2.json b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-jinja2.json
new file mode 100644
index 000000000..d104c5cea
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-jinja2.json
@@ -0,0 +1,384 @@
+{
+ "Test_Cluster": {
+ "hosts": [
+ "test100-vm",
+ "test101-vm",
+ "test102-vm",
+ "test103-vm"
+ ]
+ },
+ "Test_Cluster_2": {
+ "hosts": [
+ "Test VM With Spaces",
+ "test104-vm"
+ ]
+ },
+ "Test_Rack": {
+ "hosts": [
+ "TestDeviceR1"
+ ]
+ },
+ "Test_Rack_Site_2": {
+ "hosts": [
+ "R1-Device"
+ ]
+ },
+ "_meta": {
+ "hostvars": {
+ "R1-Device": {
+ "asset_tag": "345678901",
+ "custom_fields": {},
+ "device_type": "cisco-test",
+ "is_virtual": false,
+ "locations": [],
+ "manufacturer": "cisco",
+ "rack": "Test Rack Site 2",
+ "rack_id": "1",
+ "rack_role": "test-rack-role",
+ "regions": [],
+ "role": "core-switch",
+ "serial": "",
+ "site": "test-site2",
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "Test VM With Spaces": {
+ "cluster": "Test Cluster 2",
+ "cluster_type": "test-cluster-type",
+ "custom_fields": {},
+ "is_virtual": true,
+ "locations": [],
+ "regions": [],
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "TestDeviceR1": {
+ "custom_fields": {},
+ "device_type": "cisco-test",
+ "is_virtual": false,
+ "locations": [
+ "test-rack-group",
+ "parent-rack-group"
+ ],
+ "manufacturer": "cisco",
+ "rack": "Test Rack",
+ "rack_id": "2",
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "role": "core-switch",
+ "serial": "FAB12345678",
+ "site": "test-site",
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "VC1": {
+ "ansible_host": "nexus.example.com",
+ "custom_fields": {},
+ "device_type": "nexus-parent",
+ "dns_name": "nexus.example.com",
+ "is_virtual": false,
+ "locations": [
+ "test-rack-group",
+ "parent-rack-group"
+ ],
+ "manufacturer": "cisco",
+ "primary_ip4": "172.16.180.12",
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "role": "core-switch",
+ "serial": "",
+ "site": "test-site",
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "test100": {
+ "asset_tag": "123456789",
+ "custom_fields": {},
+ "device_type": "cisco-test",
+ "is_virtual": false,
+ "local_context_data": {
+ "ntp_servers": [
+ "pool.ntp.org"
+ ]
+ },
+ "locations": [
+ "test-rack-group",
+ "parent-rack-group"
+ ],
+ "manufacturer": "cisco",
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "role": "core-switch",
+ "serial": "FAB01234567",
+ "site": "test-site",
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "test100-vm": {
+ "cluster": "Test Cluster",
+ "cluster_group": "test-cluster-group",
+ "cluster_type": "test-cluster-type",
+ "custom_fields": {},
+ "disk": 170,
+ "is_virtual": true,
+ "locations": [],
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "site": "test-site",
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "test101-vm": {
+ "cluster": "Test Cluster",
+ "cluster_group": "test-cluster-group",
+ "cluster_type": "test-cluster-type",
+ "custom_fields": {},
+ "is_virtual": true,
+ "locations": [],
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "site": "test-site",
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "test102-vm": {
+ "cluster": "Test Cluster",
+ "cluster_group": "test-cluster-group",
+ "cluster_type": "test-cluster-type",
+ "custom_fields": {},
+ "is_virtual": true,
+ "locations": [],
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "site": "test-site",
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "test103-vm": {
+ "cluster": "Test Cluster",
+ "cluster_group": "test-cluster-group",
+ "cluster_type": "test-cluster-type",
+ "custom_fields": {},
+ "is_virtual": true,
+ "locations": [],
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "site": "test-site",
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "test104-vm": {
+ "cluster": "Test Cluster 2",
+ "cluster_type": "test-cluster-type",
+ "custom_fields": {},
+ "is_virtual": true,
+ "locations": [],
+ "regions": [],
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ }
+ }
+ },
+ "active": {
+ "hosts": [
+ "R1-Device",
+ "VC1",
+ "TestDeviceR1",
+ "test100",
+ "Test VM With Spaces",
+ "test100-vm",
+ "test101-vm",
+ "test102-vm",
+ "test103-vm",
+ "test104-vm"
+ ]
+ },
+ "all": {
+ "children": [
+ "ungrouped",
+ "test_site2",
+ "other_region",
+ "parent_region",
+ "other_site_group",
+ "parent_site_group",
+ "Test_Rack_Site_2",
+ "test_rack_role",
+ "core_switch",
+ "cisco_test",
+ "cisco",
+ "active",
+ "nexus_parent",
+ "jinja_test_group",
+ "Test_Rack",
+ "Test_Cluster_2",
+ "test_cluster_type",
+ "is_virtual",
+ "Test_Cluster",
+ "test_cluster_group"
+ ]
+ },
+ "cisco": {
+ "hosts": [
+ "R1-Device",
+ "VC1",
+ "TestDeviceR1",
+ "test100"
+ ]
+ },
+ "cisco_test": {
+ "hosts": [
+ "R1-Device",
+ "TestDeviceR1",
+ "test100"
+ ]
+ },
+ "core_switch": {
+ "hosts": [
+ "R1-Device",
+ "VC1",
+ "TestDeviceR1",
+ "test100"
+ ]
+ },
+ "is_virtual": {
+ "hosts": [
+ "Test VM With Spaces",
+ "test100-vm",
+ "test101-vm",
+ "test102-vm",
+ "test103-vm",
+ "test104-vm"
+ ]
+ },
+ "jinja_test_group": {
+ "hosts": [
+ "TestDeviceR1",
+ "Test VM With Spaces"
+ ]
+ },
+ "nexus_parent": {
+ "hosts": [
+ "VC1"
+ ]
+ },
+ "parent_rack_group": {
+ "children": [
+ "test_rack_group"
+ ]
+ },
+ "parent_region": {
+ "children": [
+ "test_region"
+ ]
+ },
+ "parent_site_group": {
+ "children": [
+ "test_site_group"
+ ]
+ },
+ "test_cluster_group": {
+ "hosts": [
+ "test100-vm",
+ "test101-vm",
+ "test102-vm",
+ "test103-vm"
+ ]
+ },
+ "test_cluster_type": {
+ "hosts": [
+ "Test VM With Spaces",
+ "test100-vm",
+ "test101-vm",
+ "test102-vm",
+ "test103-vm",
+ "test104-vm"
+ ]
+ },
+ "test_rack_group": {
+ "hosts": [
+ "VC1",
+ "TestDeviceR1",
+ "test100"
+ ]
+ },
+ "test_rack_role": {
+ "hosts": [
+ "R1-Device"
+ ]
+ },
+ "test_region": {
+ "children": [
+ "test_site"
+ ]
+ },
+ "test_site": {
+ "children": [
+ "parent_rack_group"
+ ],
+ "hosts": [
+ "test100-vm",
+ "test101-vm",
+ "test102-vm",
+ "test103-vm"
+ ]
+ },
+ "test_site2": {
+ "hosts": [
+ "R1-Device"
+ ]
+ }
+} \ No newline at end of file
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-jinja2.yml b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-jinja2.yml
new file mode 100644
index 000000000..67f9c49be
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-jinja2.yml
@@ -0,0 +1,62 @@
+---
+plugin: netbox.netbox.nb_inventory
+api_endpoint: http://localhost:32768
+token: "0123456789abcdef0123456789abcdef01234567"
+validate_certs: false
+
+# Cache is not for performance of tests, but to test the caching option works
+# Also set on test-inventory-plurals.yml so that we actually hit the cache on one of these runs
+cache: true
+cache_timeout: 3600
+cache_plugin: jsonfile
+cache_connection: /tmp/inventory_netbox
+
+config_context: false
+plurals: false
+interfaces: false
+services: false
+group_names_raw: true
+virtual_chassis_name: true
+dns_name: true
+ansible_host_dns_name: true
+
+group_by:
+ - site
+ - tenant
+ - rack
+ - location
+ - rack_role
+ - tag
+ - role
+ - device_type
+ - manufacturer
+ - platform
+ - region
+ - site_group
+ - cluster
+ - cluster_group
+ - cluster_type
+ - is_virtual
+ - status
+
+query_filters: ""
+
+device_query_filters:
+ - role: "{{ 'CORE-SWITCH' | lower }}"
+
+vm_query_filters:
+ - cluster_type: "{{ 'TEST-CLUSTER-TYPE' | lower }}"
+
+# See Constructed for details
+# https://docs.ansible.com/ansible/latest/plugins/inventory/constructed.html
+
+compose:
+ rack_id: rack.id
+ ntp_servers: config_context.ntp_servers
+
+keyed_groups:
+ - prefix: rack
+ key: rack.name
+
+groups:
+ jinja_test_group: inventory_hostname.startswith('Test')
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-legacy.json b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-legacy.json
new file mode 100644
index 000000000..763de9f25
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-legacy.json
@@ -0,0 +1,449 @@
+{
+ "_meta": {
+ "hostvars": {
+ "R1-Device": {
+ "asset_tag": "345678901",
+ "custom_fields": {},
+ "device_roles": [
+ "core-switch"
+ ],
+ "device_types": [
+ "cisco-test"
+ ],
+ "is_virtual": false,
+ "local_context_data": [
+ null
+ ],
+ "locations": [],
+ "manufacturers": [
+ "cisco"
+ ],
+ "rack_role": "test-rack-role",
+ "racks": [
+ "Test Rack Site 2"
+ ],
+ "regions": [],
+ "serial": "",
+ "services": [],
+ "site_groups": [],
+ "sites": [
+ "test-site2"
+ ],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "Test Nexus One": {
+ "ansible_host": "172.16.180.12",
+ "custom_fields": {},
+ "device_roles": [
+ "core-switch"
+ ],
+ "device_types": [
+ "nexus-parent"
+ ],
+ "is_virtual": false,
+ "local_context_data": [
+ null
+ ],
+ "locations": [
+ "test-rack-group",
+ "parent-rack-group"
+ ],
+ "manufacturers": [
+ "cisco"
+ ],
+ "primary_ip4": "172.16.180.12",
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "serial": "",
+ "services": [
+ {
+ "comments": "",
+ "created": "2024-05-13T19:51:42.463448Z",
+ "custom_fields": {},
+ "description": "",
+ "device": {
+ "description": "",
+ "display": "Test Nexus One",
+ "id": 4,
+ "name": "Test Nexus One",
+ "url": "http://localhost:32768/api/dcim/devices/4/"
+ },
+ "display": "telnet (TCP/23)",
+ "id": 3,
+ "ipaddresses": [],
+ "last_updated": "2024-05-13T19:51:42.463452Z",
+ "name": "telnet",
+ "ports": [
+ 23
+ ],
+ "protocol": {
+ "label": "TCP",
+ "value": "tcp"
+ },
+ "tags": [],
+ "url": "http://localhost:32768/api/ipam/services/3/",
+ "virtual_machine": null
+ }
+ ],
+ "site_groups": [],
+ "sites": [
+ "test-site"
+ ],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "Test VM With Spaces": {
+ "cluster": "Test Cluster 2",
+ "cluster_type": "test-cluster-type",
+ "custom_fields": {},
+ "is_virtual": true,
+ "local_context_data": [
+ null
+ ],
+ "locations": [],
+ "regions": [],
+ "services": [
+ {
+ "comments": "",
+ "created": "2024-05-13T19:51:42.468448Z",
+ "custom_fields": {},
+ "description": "",
+ "device": null,
+ "display": "ssh (TCP/22)",
+ "id": 4,
+ "ipaddresses": [],
+ "last_updated": "2024-05-13T19:51:42.468451Z",
+ "name": "ssh",
+ "ports": [
+ 22
+ ],
+ "protocol": {
+ "label": "TCP",
+ "value": "tcp"
+ },
+ "tags": [],
+ "url": "http://localhost:32768/api/ipam/services/4/",
+ "virtual_machine": {
+ "description": "",
+ "display": "Test VM With Spaces",
+ "id": 6,
+ "name": "Test VM With Spaces",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/6/"
+ }
+ }
+ ],
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "TestDeviceR1": {
+ "custom_fields": {},
+ "device_roles": [
+ "core-switch"
+ ],
+ "device_types": [
+ "cisco-test"
+ ],
+ "is_virtual": false,
+ "local_context_data": [
+ null
+ ],
+ "locations": [
+ "test-rack-group",
+ "parent-rack-group"
+ ],
+ "manufacturers": [
+ "cisco"
+ ],
+ "racks": [
+ "Test Rack"
+ ],
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "serial": "FAB12345678",
+ "services": [],
+ "site_groups": [],
+ "sites": [
+ "test-site"
+ ],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "test100": {
+ "asset_tag": "123456789",
+ "custom_fields": {},
+ "device_roles": [
+ "core-switch"
+ ],
+ "device_types": [
+ "cisco-test"
+ ],
+ "is_virtual": false,
+ "local_context_data": [
+ {
+ "ntp_servers": [
+ "pool.ntp.org"
+ ]
+ }
+ ],
+ "locations": [
+ "test-rack-group",
+ "parent-rack-group"
+ ],
+ "manufacturers": [
+ "cisco"
+ ],
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "serial": "FAB01234567",
+ "services": [
+ {
+ "comments": "",
+ "created": "2024-05-13T19:51:42.443759Z",
+ "custom_fields": {},
+ "description": "",
+ "device": {
+ "description": "",
+ "display": "test100 (123456789)",
+ "id": 1,
+ "name": "test100",
+ "url": "http://localhost:32768/api/dcim/devices/1/"
+ },
+ "display": "ssh (TCP/22)",
+ "id": 1,
+ "ipaddresses": [],
+ "last_updated": "2024-05-13T19:51:42.443763Z",
+ "name": "ssh",
+ "ports": [
+ 22
+ ],
+ "protocol": {
+ "label": "TCP",
+ "value": "tcp"
+ },
+ "tags": [],
+ "url": "http://localhost:32768/api/ipam/services/1/",
+ "virtual_machine": null
+ },
+ {
+ "comments": "",
+ "created": "2024-05-13T19:51:42.449954Z",
+ "custom_fields": {},
+ "description": "",
+ "device": {
+ "description": "",
+ "display": "test100 (123456789)",
+ "id": 1,
+ "name": "test100",
+ "url": "http://localhost:32768/api/dcim/devices/1/"
+ },
+ "display": "http (TCP/80)",
+ "id": 2,
+ "ipaddresses": [
+ {
+ "address": "172.16.180.1/24",
+ "description": "",
+ "display": "172.16.180.1/24",
+ "family": {
+ "label": "IPv4",
+ "value": 4
+ },
+ "id": 1,
+ "url": "http://localhost:32768/api/ipam/ip-addresses/1/"
+ },
+ {
+ "address": "2001::1:1/64",
+ "description": "",
+ "display": "2001::1:1/64",
+ "family": {
+ "label": "IPv6",
+ "value": 6
+ },
+ "id": 2,
+ "url": "http://localhost:32768/api/ipam/ip-addresses/2/"
+ }
+ ],
+ "last_updated": "2024-05-13T19:51:42.449957Z",
+ "name": "http",
+ "ports": [
+ 80
+ ],
+ "protocol": {
+ "label": "TCP",
+ "value": "tcp"
+ },
+ "tags": [],
+ "url": "http://localhost:32768/api/ipam/services/2/",
+ "virtual_machine": null
+ }
+ ],
+ "site_groups": [],
+ "sites": [
+ "test-site"
+ ],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "test100-vm": {
+ "cluster": "Test Cluster",
+ "cluster_group": "test-cluster-group",
+ "cluster_type": "test-cluster-type",
+ "custom_fields": {},
+ "disk": 170,
+ "is_virtual": true,
+ "local_context_data": [
+ null
+ ],
+ "locations": [],
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "services": [],
+ "site_groups": [],
+ "sites": [
+ "test-site"
+ ],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "test101-vm": {
+ "cluster": "Test Cluster",
+ "cluster_group": "test-cluster-group",
+ "cluster_type": "test-cluster-type",
+ "custom_fields": {},
+ "is_virtual": true,
+ "local_context_data": [
+ null
+ ],
+ "locations": [],
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "services": [],
+ "site_groups": [],
+ "sites": [
+ "test-site"
+ ],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "test102-vm": {
+ "cluster": "Test Cluster",
+ "cluster_group": "test-cluster-group",
+ "cluster_type": "test-cluster-type",
+ "custom_fields": {},
+ "is_virtual": true,
+ "local_context_data": [
+ null
+ ],
+ "locations": [],
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "services": [],
+ "site_groups": [],
+ "sites": [
+ "test-site"
+ ],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "test103-vm": {
+ "cluster": "Test Cluster",
+ "cluster_group": "test-cluster-group",
+ "cluster_type": "test-cluster-type",
+ "custom_fields": {},
+ "is_virtual": true,
+ "local_context_data": [
+ null
+ ],
+ "locations": [],
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "services": [],
+ "site_groups": [],
+ "sites": [
+ "test-site"
+ ],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "test104-vm": {
+ "cluster": "Test Cluster 2",
+ "cluster_type": "test-cluster-type",
+ "custom_fields": {},
+ "is_virtual": true,
+ "local_context_data": [
+ null
+ ],
+ "locations": [],
+ "regions": [],
+ "services": [],
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ }
+ }
+ },
+ "all": {
+ "children": [
+ "ungrouped"
+ ]
+ },
+ "ungrouped": {
+ "hosts": [
+ "R1-Device",
+ "Test Nexus One",
+ "TestDeviceR1",
+ "test100",
+ "Test VM With Spaces",
+ "test100-vm",
+ "test101-vm",
+ "test102-vm",
+ "test103-vm",
+ "test104-vm"
+ ]
+ }
+} \ No newline at end of file
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-legacy.yml b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-legacy.yml
new file mode 100644
index 000000000..259c3934d
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-legacy.yml
@@ -0,0 +1,10 @@
+---
+# To generate the json result, I checked out nb_inventory.yml from the v0.2.0 release 2d6894b,
+# and then ran it against this inventory with the latest test data.
+
+# Checks that substantial work on the inventory does not diverge from what existing users are using by default.
+
+plugin: netbox.netbox.nb_inventory
+api_endpoint: http://localhost:32768
+token: "0123456789abcdef0123456789abcdef01234567"
+validate_certs: false
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-noracks.json b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-noracks.json
new file mode 100644
index 000000000..4e4190074
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-noracks.json
@@ -0,0 +1,1447 @@
+{
+ "_meta": {
+ "hostvars": {
+ "R1-Device": {
+ "asset_tag": "345678901",
+ "config_context": [
+ {}
+ ],
+ "custom_fields": {},
+ "device_roles": [
+ "core-switch"
+ ],
+ "device_types": [
+ "cisco-test"
+ ],
+ "interfaces": [],
+ "is_virtual": false,
+ "local_context_data": [
+ null
+ ],
+ "manufacturers": [
+ "cisco"
+ ],
+ "regions": [],
+ "serial": "",
+ "services": [],
+ "site_groups": [],
+ "sites": [
+ "test-site2"
+ ],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "Test Nexus One": {
+ "ansible_host": "172.16.180.12",
+ "config_context": [
+ {}
+ ],
+ "custom_fields": {},
+ "device_roles": [
+ "core-switch"
+ ],
+ "device_types": [
+ "nexus-parent"
+ ],
+ "dns_name": "nexus.example.com",
+ "interfaces": [
+ {
+ "_occupied": false,
+ "bridge": null,
+ "cable": null,
+ "cable_end": "",
+ "connected_endpoints": null,
+ "connected_endpoints_reachable": null,
+ "connected_endpoints_type": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 1,
+ "created": "2024-05-13T19:51:41.577667Z",
+ "custom_fields": {},
+ "description": "",
+ "device": {
+ "description": "",
+ "display": "Test Nexus Child One",
+ "id": 5,
+ "name": "Test Nexus Child One",
+ "url": "http://localhost:32768/api/dcim/devices/5/"
+ },
+ "display": "Ethernet2/1",
+ "duplex": null,
+ "enabled": true,
+ "id": 2,
+ "ip_addresses": [
+ {
+ "address": "172.16.180.12/24",
+ "comments": "",
+ "created": "2024-05-13T19:51:41.876821Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "172.16.180.12/24",
+ "dns_name": "nexus.example.com",
+ "family": {
+ "label": "IPv4",
+ "value": 4
+ },
+ "id": 4,
+ "last_updated": "2024-05-13T19:51:41.876825Z",
+ "nat_inside": null,
+ "nat_outside": [],
+ "role": null,
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": [],
+ "tenant": null,
+ "url": "http://localhost:32768/api/ipam/ip-addresses/4/",
+ "vrf": null
+ }
+ ],
+ "l2vpn_termination": null,
+ "label": "",
+ "lag": null,
+ "last_updated": "2024-05-13T19:51:41.577671Z",
+ "link_peers": [],
+ "link_peers_type": null,
+ "mac_address": null,
+ "mark_connected": false,
+ "mgmt_only": false,
+ "mode": null,
+ "module": null,
+ "mtu": null,
+ "name": "Ethernet2/1",
+ "parent": null,
+ "poe_mode": null,
+ "poe_type": null,
+ "rf_channel": null,
+ "rf_channel_frequency": null,
+ "rf_channel_width": null,
+ "rf_role": null,
+ "speed": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "tx_power": null,
+ "type": {
+ "label": "1000BASE-T (1GE)",
+ "value": "1000base-t"
+ },
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/dcim/interfaces/2/",
+ "vdcs": [],
+ "vrf": null,
+ "wireless_lans": [],
+ "wireless_link": null,
+ "wwn": null
+ },
+ {
+ "_occupied": false,
+ "bridge": null,
+ "cable": null,
+ "cable_end": "",
+ "connected_endpoints": null,
+ "connected_endpoints_reachable": null,
+ "connected_endpoints_type": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 1,
+ "created": "2024-05-13T19:51:41.564397Z",
+ "custom_fields": {},
+ "description": "",
+ "device": {
+ "description": "",
+ "display": "Test Nexus One",
+ "id": 4,
+ "name": "Test Nexus One",
+ "url": "http://localhost:32768/api/dcim/devices/4/"
+ },
+ "display": "Ethernet1/1",
+ "duplex": null,
+ "enabled": true,
+ "id": 1,
+ "ip_addresses": [
+ {
+ "address": "172.16.180.11/24",
+ "comments": "",
+ "created": "2024-05-13T19:51:41.784351Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "172.16.180.11/24",
+ "dns_name": "",
+ "family": {
+ "label": "IPv4",
+ "value": 4
+ },
+ "id": 3,
+ "last_updated": "2024-05-13T19:51:41.784356Z",
+ "nat_inside": null,
+ "nat_outside": [],
+ "role": null,
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": [],
+ "tenant": null,
+ "url": "http://localhost:32768/api/ipam/ip-addresses/3/",
+ "vrf": null
+ }
+ ],
+ "l2vpn_termination": null,
+ "label": "",
+ "lag": null,
+ "last_updated": "2024-05-13T19:51:41.564404Z",
+ "link_peers": [],
+ "link_peers_type": null,
+ "mac_address": null,
+ "mark_connected": false,
+ "mgmt_only": false,
+ "mode": null,
+ "module": null,
+ "mtu": null,
+ "name": "Ethernet1/1",
+ "parent": null,
+ "poe_mode": null,
+ "poe_type": null,
+ "rf_channel": null,
+ "rf_channel_frequency": null,
+ "rf_channel_width": null,
+ "rf_role": null,
+ "speed": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "tx_power": null,
+ "type": {
+ "label": "1000BASE-T (1GE)",
+ "value": "1000base-t"
+ },
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/dcim/interfaces/1/",
+ "vdcs": [],
+ "vrf": null,
+ "wireless_lans": [],
+ "wireless_link": null,
+ "wwn": null
+ },
+ {
+ "_occupied": false,
+ "bridge": null,
+ "cable": null,
+ "cable_end": "",
+ "connected_endpoints": null,
+ "connected_endpoints_reachable": null,
+ "connected_endpoints_type": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T19:51:41.659099Z",
+ "custom_fields": {},
+ "description": "",
+ "device": {
+ "description": "",
+ "display": "Test Nexus One",
+ "id": 4,
+ "name": "Test Nexus One",
+ "url": "http://localhost:32768/api/dcim/devices/4/"
+ },
+ "display": "wlink1",
+ "duplex": null,
+ "enabled": true,
+ "id": 6,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "label": "",
+ "lag": null,
+ "last_updated": "2024-05-13T19:51:41.659103Z",
+ "link_peers": [],
+ "link_peers_type": null,
+ "mac_address": null,
+ "mark_connected": false,
+ "mgmt_only": false,
+ "mode": null,
+ "module": null,
+ "mtu": null,
+ "name": "wlink1",
+ "parent": null,
+ "poe_mode": null,
+ "poe_type": null,
+ "rf_channel": null,
+ "rf_channel_frequency": null,
+ "rf_channel_width": null,
+ "rf_role": null,
+ "speed": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "tx_power": null,
+ "type": {
+ "label": "IEEE 802.11a",
+ "value": "ieee802.11a"
+ },
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/dcim/interfaces/6/",
+ "vdcs": [],
+ "vrf": null,
+ "wireless_lans": [],
+ "wireless_link": null,
+ "wwn": null
+ }
+ ],
+ "is_virtual": false,
+ "local_context_data": [
+ null
+ ],
+ "manufacturers": [
+ "cisco"
+ ],
+ "primary_ip4": "172.16.180.12",
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "serial": "",
+ "services": [
+ {
+ "comments": "",
+ "created": "2024-05-13T19:51:42.463448Z",
+ "custom_fields": {},
+ "description": "",
+ "device": {
+ "description": "",
+ "display": "Test Nexus One",
+ "id": 4,
+ "name": "Test Nexus One",
+ "url": "http://localhost:32768/api/dcim/devices/4/"
+ },
+ "display": "telnet (TCP/23)",
+ "id": 3,
+ "ipaddresses": [],
+ "last_updated": "2024-05-13T19:51:42.463452Z",
+ "name": "telnet",
+ "ports": [
+ 23
+ ],
+ "protocol": {
+ "label": "TCP",
+ "value": "tcp"
+ },
+ "tags": [],
+ "url": "http://localhost:32768/api/ipam/services/3/",
+ "virtual_machine": null
+ }
+ ],
+ "site_groups": [],
+ "sites": [
+ "test-site"
+ ],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "Test VM With Spaces": {
+ "cluster": "Test Cluster 2",
+ "cluster_type": "test-cluster-type",
+ "config_context": [
+ {}
+ ],
+ "custom_fields": {},
+ "interfaces": [
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T19:51:42.385426Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth0",
+ "enabled": true,
+ "id": 11,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T19:51:42.385430Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth0",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/11/",
+ "virtual_machine": {
+ "description": "",
+ "display": "Test VM With Spaces",
+ "id": 6,
+ "name": "Test VM With Spaces",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/6/"
+ },
+ "vrf": null
+ },
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T19:51:42.392103Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth1",
+ "enabled": true,
+ "id": 12,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T19:51:42.392108Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth1",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/12/",
+ "virtual_machine": {
+ "description": "",
+ "display": "Test VM With Spaces",
+ "id": 6,
+ "name": "Test VM With Spaces",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/6/"
+ },
+ "vrf": null
+ }
+ ],
+ "is_virtual": true,
+ "local_context_data": [
+ null
+ ],
+ "regions": [],
+ "services": [
+ {
+ "comments": "",
+ "created": "2024-05-13T19:51:42.468448Z",
+ "custom_fields": {},
+ "description": "",
+ "device": null,
+ "display": "ssh (TCP/22)",
+ "id": 4,
+ "ipaddresses": [],
+ "last_updated": "2024-05-13T19:51:42.468451Z",
+ "name": "ssh",
+ "ports": [
+ 22
+ ],
+ "protocol": {
+ "label": "TCP",
+ "value": "tcp"
+ },
+ "tags": [],
+ "url": "http://localhost:32768/api/ipam/services/4/",
+ "virtual_machine": {
+ "description": "",
+ "display": "Test VM With Spaces",
+ "id": 6,
+ "name": "Test VM With Spaces",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/6/"
+ }
+ }
+ ],
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "TestDeviceR1": {
+ "config_context": [
+ {}
+ ],
+ "custom_fields": {},
+ "device_roles": [
+ "core-switch"
+ ],
+ "device_types": [
+ "cisco-test"
+ ],
+ "interfaces": [],
+ "is_virtual": false,
+ "local_context_data": [
+ null
+ ],
+ "manufacturers": [
+ "cisco"
+ ],
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "serial": "FAB12345678",
+ "services": [],
+ "site_groups": [],
+ "sites": [
+ "test-site"
+ ],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "test100": {
+ "asset_tag": "123456789",
+ "config_context": [
+ {
+ "ntp_servers": [
+ "pool.ntp.org"
+ ]
+ }
+ ],
+ "custom_fields": {},
+ "device_roles": [
+ "core-switch"
+ ],
+ "device_types": [
+ "cisco-test"
+ ],
+ "interfaces": [
+ {
+ "_occupied": false,
+ "bridge": null,
+ "cable": null,
+ "cable_end": "",
+ "connected_endpoints": null,
+ "connected_endpoints_reachable": null,
+ "connected_endpoints_type": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 1,
+ "created": "2024-05-13T19:51:41.606846Z",
+ "custom_fields": {},
+ "description": "",
+ "device": {
+ "description": "",
+ "display": "test100 (123456789)",
+ "id": 1,
+ "name": "test100",
+ "url": "http://localhost:32768/api/dcim/devices/1/"
+ },
+ "display": "GigabitEthernet1",
+ "duplex": null,
+ "enabled": true,
+ "id": 3,
+ "ip_addresses": [
+ {
+ "address": "172.16.180.1/24",
+ "comments": "",
+ "created": "2024-05-13T19:51:41.771940Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "172.16.180.1/24",
+ "dns_name": "",
+ "family": {
+ "label": "IPv4",
+ "value": 4
+ },
+ "id": 1,
+ "last_updated": "2024-05-13T19:51:41.771946Z",
+ "nat_inside": null,
+ "nat_outside": [],
+ "role": null,
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": [],
+ "tenant": null,
+ "url": "http://localhost:32768/api/ipam/ip-addresses/1/",
+ "vrf": null
+ }
+ ],
+ "l2vpn_termination": null,
+ "label": "",
+ "lag": null,
+ "last_updated": "2024-05-13T19:51:41.606851Z",
+ "link_peers": [],
+ "link_peers_type": null,
+ "mac_address": null,
+ "mark_connected": false,
+ "mgmt_only": false,
+ "mode": null,
+ "module": null,
+ "mtu": null,
+ "name": "GigabitEthernet1",
+ "parent": null,
+ "poe_mode": null,
+ "poe_type": null,
+ "rf_channel": null,
+ "rf_channel_frequency": null,
+ "rf_channel_width": null,
+ "rf_role": null,
+ "speed": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "tx_power": null,
+ "type": {
+ "label": "1000BASE-T (1GE)",
+ "value": "1000base-t"
+ },
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/dcim/interfaces/3/",
+ "vdcs": [],
+ "vrf": null,
+ "wireless_lans": [],
+ "wireless_link": null,
+ "wwn": null
+ },
+ {
+ "_occupied": false,
+ "bridge": null,
+ "cable": null,
+ "cable_end": "",
+ "connected_endpoints": null,
+ "connected_endpoints_reachable": null,
+ "connected_endpoints_type": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 1,
+ "created": "2024-05-13T19:51:41.617680Z",
+ "custom_fields": {},
+ "description": "",
+ "device": {
+ "description": "",
+ "display": "test100 (123456789)",
+ "id": 1,
+ "name": "test100",
+ "url": "http://localhost:32768/api/dcim/devices/1/"
+ },
+ "display": "GigabitEthernet2",
+ "duplex": null,
+ "enabled": true,
+ "id": 4,
+ "ip_addresses": [
+ {
+ "address": "2001::1:1/64",
+ "comments": "",
+ "created": "2024-05-13T19:51:41.778277Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "2001::1:1/64",
+ "dns_name": "",
+ "family": {
+ "label": "IPv6",
+ "value": 6
+ },
+ "id": 2,
+ "last_updated": "2024-05-13T19:51:41.778280Z",
+ "nat_inside": null,
+ "nat_outside": [],
+ "role": null,
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": [],
+ "tenant": null,
+ "url": "http://localhost:32768/api/ipam/ip-addresses/2/",
+ "vrf": null
+ }
+ ],
+ "l2vpn_termination": null,
+ "label": "",
+ "lag": null,
+ "last_updated": "2024-05-13T19:51:41.617684Z",
+ "link_peers": [],
+ "link_peers_type": null,
+ "mac_address": null,
+ "mark_connected": false,
+ "mgmt_only": false,
+ "mode": null,
+ "module": null,
+ "mtu": null,
+ "name": "GigabitEthernet2",
+ "parent": null,
+ "poe_mode": null,
+ "poe_type": null,
+ "rf_channel": null,
+ "rf_channel_frequency": null,
+ "rf_channel_width": null,
+ "rf_role": null,
+ "speed": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "tx_power": null,
+ "type": {
+ "label": "1000BASE-T (1GE)",
+ "value": "1000base-t"
+ },
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/dcim/interfaces/4/",
+ "vdcs": [],
+ "vrf": null,
+ "wireless_lans": [],
+ "wireless_link": null,
+ "wwn": null
+ },
+ {
+ "_occupied": false,
+ "bridge": null,
+ "cable": null,
+ "cable_end": "",
+ "connected_endpoints": null,
+ "connected_endpoints_reachable": null,
+ "connected_endpoints_type": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T19:51:41.648568Z",
+ "custom_fields": {},
+ "description": "",
+ "device": {
+ "description": "",
+ "display": "test100 (123456789)",
+ "id": 1,
+ "name": "test100",
+ "url": "http://localhost:32768/api/dcim/devices/1/"
+ },
+ "display": "wlink1",
+ "duplex": null,
+ "enabled": true,
+ "id": 5,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "label": "",
+ "lag": null,
+ "last_updated": "2024-05-13T19:51:41.648573Z",
+ "link_peers": [],
+ "link_peers_type": null,
+ "mac_address": null,
+ "mark_connected": false,
+ "mgmt_only": false,
+ "mode": null,
+ "module": null,
+ "mtu": null,
+ "name": "wlink1",
+ "parent": null,
+ "poe_mode": null,
+ "poe_type": null,
+ "rf_channel": null,
+ "rf_channel_frequency": null,
+ "rf_channel_width": null,
+ "rf_role": null,
+ "speed": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "tx_power": null,
+ "type": {
+ "label": "IEEE 802.11a",
+ "value": "ieee802.11a"
+ },
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/dcim/interfaces/5/",
+ "vdcs": [],
+ "vrf": null,
+ "wireless_lans": [],
+ "wireless_link": null,
+ "wwn": null
+ }
+ ],
+ "is_virtual": false,
+ "local_context_data": [
+ {
+ "ntp_servers": [
+ "pool.ntp.org"
+ ]
+ }
+ ],
+ "manufacturers": [
+ "cisco"
+ ],
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "serial": "FAB01234567",
+ "services": [
+ {
+ "comments": "",
+ "created": "2024-05-13T19:51:42.443759Z",
+ "custom_fields": {},
+ "description": "",
+ "device": {
+ "description": "",
+ "display": "test100 (123456789)",
+ "id": 1,
+ "name": "test100",
+ "url": "http://localhost:32768/api/dcim/devices/1/"
+ },
+ "display": "ssh (TCP/22)",
+ "id": 1,
+ "ipaddresses": [],
+ "last_updated": "2024-05-13T19:51:42.443763Z",
+ "name": "ssh",
+ "ports": [
+ 22
+ ],
+ "protocol": {
+ "label": "TCP",
+ "value": "tcp"
+ },
+ "tags": [],
+ "url": "http://localhost:32768/api/ipam/services/1/",
+ "virtual_machine": null
+ },
+ {
+ "comments": "",
+ "created": "2024-05-13T19:51:42.449954Z",
+ "custom_fields": {},
+ "description": "",
+ "device": {
+ "description": "",
+ "display": "test100 (123456789)",
+ "id": 1,
+ "name": "test100",
+ "url": "http://localhost:32768/api/dcim/devices/1/"
+ },
+ "display": "http (TCP/80)",
+ "id": 2,
+ "ipaddresses": [
+ {
+ "address": "172.16.180.1/24",
+ "description": "",
+ "display": "172.16.180.1/24",
+ "family": {
+ "label": "IPv4",
+ "value": 4
+ },
+ "id": 1,
+ "url": "http://localhost:32768/api/ipam/ip-addresses/1/"
+ },
+ {
+ "address": "2001::1:1/64",
+ "description": "",
+ "display": "2001::1:1/64",
+ "family": {
+ "label": "IPv6",
+ "value": 6
+ },
+ "id": 2,
+ "url": "http://localhost:32768/api/ipam/ip-addresses/2/"
+ }
+ ],
+ "last_updated": "2024-05-13T19:51:42.449957Z",
+ "name": "http",
+ "ports": [
+ 80
+ ],
+ "protocol": {
+ "label": "TCP",
+ "value": "tcp"
+ },
+ "tags": [],
+ "url": "http://localhost:32768/api/ipam/services/2/",
+ "virtual_machine": null
+ }
+ ],
+ "site_groups": [],
+ "sites": [
+ "test-site"
+ ],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "test100-vm": {
+ "cluster": "Test Cluster",
+ "cluster_group": "test-cluster-group",
+ "cluster_type": "test-cluster-type",
+ "config_context": [
+ {}
+ ],
+ "custom_fields": {},
+ "disk": 170,
+ "interfaces": [
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T19:51:42.317118Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth0",
+ "enabled": true,
+ "id": 1,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T19:51:42.317123Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth0",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/1/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test100-vm",
+ "id": 1,
+ "name": "test100-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/1/"
+ },
+ "vrf": null
+ },
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T19:51:42.324758Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth1",
+ "enabled": true,
+ "id": 2,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T19:51:42.324762Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth1",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/2/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test100-vm",
+ "id": 1,
+ "name": "test100-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/1/"
+ },
+ "vrf": null
+ },
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T19:51:42.331470Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth2",
+ "enabled": true,
+ "id": 3,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T19:51:42.331474Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth2",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/3/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test100-vm",
+ "id": 1,
+ "name": "test100-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/1/"
+ },
+ "vrf": null
+ },
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T19:51:42.338401Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth3",
+ "enabled": true,
+ "id": 4,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T19:51:42.338405Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth3",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/4/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test100-vm",
+ "id": 1,
+ "name": "test100-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/1/"
+ },
+ "vrf": null
+ },
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T19:51:42.345785Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth4",
+ "enabled": true,
+ "id": 5,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T19:51:42.345789Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth4",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/5/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test100-vm",
+ "id": 1,
+ "name": "test100-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/1/"
+ },
+ "vrf": null
+ }
+ ],
+ "is_virtual": true,
+ "local_context_data": [
+ null
+ ],
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "services": [],
+ "site_groups": [],
+ "sites": [
+ "test-site"
+ ],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "test101-vm": {
+ "cluster": "Test Cluster",
+ "cluster_group": "test-cluster-group",
+ "cluster_type": "test-cluster-type",
+ "config_context": [
+ {}
+ ],
+ "custom_fields": {},
+ "interfaces": [
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T19:51:42.352485Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth0",
+ "enabled": true,
+ "id": 6,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T19:51:42.352489Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth0",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/6/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test101-vm",
+ "id": 2,
+ "name": "test101-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/2/"
+ },
+ "vrf": null
+ },
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T19:51:42.359111Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth1",
+ "enabled": true,
+ "id": 7,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T19:51:42.359115Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth1",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/7/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test101-vm",
+ "id": 2,
+ "name": "test101-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/2/"
+ },
+ "vrf": null
+ },
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T19:51:42.365791Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth2",
+ "enabled": true,
+ "id": 8,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T19:51:42.365795Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth2",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/8/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test101-vm",
+ "id": 2,
+ "name": "test101-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/2/"
+ },
+ "vrf": null
+ },
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T19:51:42.372298Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth3",
+ "enabled": true,
+ "id": 9,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T19:51:42.372302Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth3",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/9/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test101-vm",
+ "id": 2,
+ "name": "test101-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/2/"
+ },
+ "vrf": null
+ },
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T19:51:42.378872Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth4",
+ "enabled": true,
+ "id": 10,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T19:51:42.378876Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth4",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/10/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test101-vm",
+ "id": 2,
+ "name": "test101-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/2/"
+ },
+ "vrf": null
+ }
+ ],
+ "is_virtual": true,
+ "local_context_data": [
+ null
+ ],
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "services": [],
+ "site_groups": [],
+ "sites": [
+ "test-site"
+ ],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "test102-vm": {
+ "cluster": "Test Cluster",
+ "cluster_group": "test-cluster-group",
+ "cluster_type": "test-cluster-type",
+ "config_context": [
+ {}
+ ],
+ "custom_fields": {},
+ "interfaces": [],
+ "is_virtual": true,
+ "local_context_data": [
+ null
+ ],
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "services": [],
+ "site_groups": [],
+ "sites": [
+ "test-site"
+ ],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "test103-vm": {
+ "cluster": "Test Cluster",
+ "cluster_group": "test-cluster-group",
+ "cluster_type": "test-cluster-type",
+ "config_context": [
+ {}
+ ],
+ "custom_fields": {},
+ "interfaces": [],
+ "is_virtual": true,
+ "local_context_data": [
+ null
+ ],
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "services": [],
+ "site_groups": [],
+ "sites": [
+ "test-site"
+ ],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "test104-vm": {
+ "cluster": "Test Cluster 2",
+ "cluster_type": "test-cluster-type",
+ "config_context": [
+ {}
+ ],
+ "custom_fields": {},
+ "interfaces": [],
+ "is_virtual": true,
+ "local_context_data": [
+ null
+ ],
+ "regions": [],
+ "services": [],
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ }
+ }
+ },
+ "all": {
+ "children": [
+ "ungrouped",
+ "sites_test_site2",
+ "region_other_region",
+ "region_parent_region",
+ "site_group_other_site_group",
+ "site_group_parent_site_group",
+ "device_roles_core_switch",
+ "device_types_cisco_test",
+ "manufacturers_cisco",
+ "status_active",
+ "device_types_nexus_parent",
+ "cluster_Test_Cluster_2",
+ "cluster_type_test_cluster_type",
+ "is_virtual",
+ "cluster_Test_Cluster",
+ "cluster_group_test_cluster_group"
+ ]
+ },
+ "cluster_Test_Cluster": {
+ "hosts": [
+ "test100-vm",
+ "test101-vm",
+ "test102-vm",
+ "test103-vm"
+ ]
+ },
+ "cluster_Test_Cluster_2": {
+ "hosts": [
+ "Test VM With Spaces",
+ "test104-vm"
+ ]
+ },
+ "cluster_group_test_cluster_group": {
+ "hosts": [
+ "test100-vm",
+ "test101-vm",
+ "test102-vm",
+ "test103-vm"
+ ]
+ },
+ "cluster_type_test_cluster_type": {
+ "hosts": [
+ "Test VM With Spaces",
+ "test100-vm",
+ "test101-vm",
+ "test102-vm",
+ "test103-vm",
+ "test104-vm"
+ ]
+ },
+ "device_roles_core_switch": {
+ "hosts": [
+ "R1-Device",
+ "Test Nexus One",
+ "TestDeviceR1",
+ "test100"
+ ]
+ },
+ "device_types_cisco_test": {
+ "hosts": [
+ "R1-Device",
+ "TestDeviceR1",
+ "test100"
+ ]
+ },
+ "device_types_nexus_parent": {
+ "hosts": [
+ "Test Nexus One"
+ ]
+ },
+ "is_virtual": {
+ "hosts": [
+ "Test VM With Spaces",
+ "test100-vm",
+ "test101-vm",
+ "test102-vm",
+ "test103-vm",
+ "test104-vm"
+ ]
+ },
+ "location_parent_rack_group": {
+ "children": [
+ "location_test_rack_group"
+ ]
+ },
+ "location_test_rack_group": {
+ "hosts": [
+ "Test Nexus One",
+ "TestDeviceR1",
+ "test100"
+ ]
+ },
+ "manufacturers_cisco": {
+ "hosts": [
+ "R1-Device",
+ "Test Nexus One",
+ "TestDeviceR1",
+ "test100"
+ ]
+ },
+ "region_parent_region": {
+ "children": [
+ "region_test_region"
+ ]
+ },
+ "region_test_region": {
+ "children": [
+ "sites_test_site"
+ ]
+ },
+ "site_group_parent_site_group": {
+ "children": [
+ "site_group_test_site_group"
+ ]
+ },
+ "sites_test_site": {
+ "children": [
+ "location_parent_rack_group"
+ ],
+ "hosts": [
+ "test100-vm",
+ "test101-vm",
+ "test102-vm",
+ "test103-vm"
+ ]
+ },
+ "sites_test_site2": {
+ "hosts": [
+ "R1-Device"
+ ]
+ },
+ "status_active": {
+ "hosts": [
+ "R1-Device",
+ "Test Nexus One",
+ "TestDeviceR1",
+ "test100",
+ "Test VM With Spaces",
+ "test100-vm",
+ "test101-vm",
+ "test102-vm",
+ "test103-vm",
+ "test104-vm"
+ ]
+ }
+} \ No newline at end of file
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-noracks.yml b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-noracks.yml
new file mode 100644
index 000000000..f21f3ca62
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-noracks.yml
@@ -0,0 +1,28 @@
+---
+plugin: netbox.netbox.nb_inventory
+api_endpoint: http://localhost:32768
+token: "0123456789abcdef0123456789abcdef01234567"
+validate_certs: false
+
+config_context: true
+plurals: true
+interfaces: true
+services: true
+racks: false
+
+group_by:
+ - sites
+ - tenants
+ - location
+ - tags
+ - device_roles
+ - device_types
+ - manufacturers
+ - platforms
+ - region
+ - site_group
+ - cluster
+ - cluster_group
+ - cluster_type
+ - is_virtual
+ - status
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-options-flatten.json b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-options-flatten.json
new file mode 100644
index 000000000..e709203f4
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-options-flatten.json
@@ -0,0 +1,1372 @@
+{
+ "Test_Cluster": {
+ "hosts": [
+ "test100-vm",
+ "test101-vm",
+ "test102-vm",
+ "test103-vm"
+ ]
+ },
+ "Test_Cluster_2": {
+ "hosts": [
+ "Test VM With Spaces",
+ "test104-vm"
+ ]
+ },
+ "Test_Rack": {
+ "hosts": [
+ "TestDeviceR1"
+ ]
+ },
+ "Test_Rack_Site_2": {
+ "hosts": [
+ "R1-Device"
+ ]
+ },
+ "_meta": {
+ "hostvars": {
+ "R1-Device": {
+ "asset_tag": "345678901",
+ "device_type": "cisco-test",
+ "interfaces": [],
+ "is_virtual": false,
+ "locations": [],
+ "manufacturer": "cisco",
+ "rack": "Test Rack Site 2",
+ "rack_role": "test-rack-role",
+ "regions": [],
+ "role": "core-switch",
+ "serial": "",
+ "services": [],
+ "site": "test-site2",
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "Test Nexus One": {
+ "ansible_host": "172.16.180.12",
+ "device_type": "nexus-parent",
+ "dns_name": "nexus.example.com",
+ "interfaces": [
+ {
+ "_occupied": false,
+ "bridge": null,
+ "cable": null,
+ "cable_end": "",
+ "connected_endpoints": null,
+ "connected_endpoints_reachable": null,
+ "connected_endpoints_type": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 1,
+ "created": "2024-05-13T18:42:37.096176Z",
+ "custom_fields": {},
+ "description": "",
+ "device": {
+ "description": "",
+ "display": "Test Nexus Child One",
+ "id": 5,
+ "name": "Test Nexus Child One",
+ "url": "http://localhost:32768/api/dcim/devices/5/"
+ },
+ "display": "Ethernet2/1",
+ "duplex": null,
+ "enabled": true,
+ "id": 2,
+ "ip_addresses": [
+ {
+ "address": "172.16.180.12/24",
+ "comments": "",
+ "created": "2024-05-13T18:42:37.310118Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "172.16.180.12/24",
+ "dns_name": "nexus.example.com",
+ "family": {
+ "label": "IPv4",
+ "value": 4
+ },
+ "id": 4,
+ "last_updated": "2024-05-13T18:42:37.310121Z",
+ "nat_inside": null,
+ "nat_outside": [],
+ "role": null,
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": [],
+ "tenant": null,
+ "url": "http://localhost:32768/api/ipam/ip-addresses/4/",
+ "vrf": null
+ }
+ ],
+ "l2vpn_termination": null,
+ "label": "",
+ "lag": null,
+ "last_updated": "2024-05-13T18:42:37.096180Z",
+ "link_peers": [],
+ "link_peers_type": null,
+ "mac_address": null,
+ "mark_connected": false,
+ "mgmt_only": false,
+ "mode": null,
+ "module": null,
+ "mtu": null,
+ "name": "Ethernet2/1",
+ "parent": null,
+ "poe_mode": null,
+ "poe_type": null,
+ "rf_channel": null,
+ "rf_channel_frequency": null,
+ "rf_channel_width": null,
+ "rf_role": null,
+ "speed": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "tx_power": null,
+ "type": {
+ "label": "1000BASE-T (1GE)",
+ "value": "1000base-t"
+ },
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/dcim/interfaces/2/",
+ "vdcs": [],
+ "vrf": null,
+ "wireless_lans": [],
+ "wireless_link": null,
+ "wwn": null
+ },
+ {
+ "_occupied": false,
+ "bridge": null,
+ "cable": null,
+ "cable_end": "",
+ "connected_endpoints": null,
+ "connected_endpoints_reachable": null,
+ "connected_endpoints_type": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 1,
+ "created": "2024-05-13T18:42:37.082569Z",
+ "custom_fields": {},
+ "description": "",
+ "device": {
+ "description": "",
+ "display": "Test Nexus One",
+ "id": 4,
+ "name": "Test Nexus One",
+ "url": "http://localhost:32768/api/dcim/devices/4/"
+ },
+ "display": "Ethernet1/1",
+ "duplex": null,
+ "enabled": true,
+ "id": 1,
+ "ip_addresses": [
+ {
+ "address": "172.16.180.11/24",
+ "comments": "",
+ "created": "2024-05-13T18:42:37.304490Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "172.16.180.11/24",
+ "dns_name": "",
+ "family": {
+ "label": "IPv4",
+ "value": 4
+ },
+ "id": 3,
+ "last_updated": "2024-05-13T18:42:37.304494Z",
+ "nat_inside": null,
+ "nat_outside": [],
+ "role": null,
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": [],
+ "tenant": null,
+ "url": "http://localhost:32768/api/ipam/ip-addresses/3/",
+ "vrf": null
+ }
+ ],
+ "l2vpn_termination": null,
+ "label": "",
+ "lag": null,
+ "last_updated": "2024-05-13T18:42:37.082575Z",
+ "link_peers": [],
+ "link_peers_type": null,
+ "mac_address": null,
+ "mark_connected": false,
+ "mgmt_only": false,
+ "mode": null,
+ "module": null,
+ "mtu": null,
+ "name": "Ethernet1/1",
+ "parent": null,
+ "poe_mode": null,
+ "poe_type": null,
+ "rf_channel": null,
+ "rf_channel_frequency": null,
+ "rf_channel_width": null,
+ "rf_role": null,
+ "speed": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "tx_power": null,
+ "type": {
+ "label": "1000BASE-T (1GE)",
+ "value": "1000base-t"
+ },
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/dcim/interfaces/1/",
+ "vdcs": [],
+ "vrf": null,
+ "wireless_lans": [],
+ "wireless_link": null,
+ "wwn": null
+ },
+ {
+ "_occupied": false,
+ "bridge": null,
+ "cable": null,
+ "cable_end": "",
+ "connected_endpoints": null,
+ "connected_endpoints_reachable": null,
+ "connected_endpoints_type": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T18:42:37.177570Z",
+ "custom_fields": {},
+ "description": "",
+ "device": {
+ "description": "",
+ "display": "Test Nexus One",
+ "id": 4,
+ "name": "Test Nexus One",
+ "url": "http://localhost:32768/api/dcim/devices/4/"
+ },
+ "display": "wlink1",
+ "duplex": null,
+ "enabled": true,
+ "id": 6,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "label": "",
+ "lag": null,
+ "last_updated": "2024-05-13T18:42:37.177574Z",
+ "link_peers": [],
+ "link_peers_type": null,
+ "mac_address": null,
+ "mark_connected": false,
+ "mgmt_only": false,
+ "mode": null,
+ "module": null,
+ "mtu": null,
+ "name": "wlink1",
+ "parent": null,
+ "poe_mode": null,
+ "poe_type": null,
+ "rf_channel": null,
+ "rf_channel_frequency": null,
+ "rf_channel_width": null,
+ "rf_role": null,
+ "speed": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "tx_power": null,
+ "type": {
+ "label": "IEEE 802.11a",
+ "value": "ieee802.11a"
+ },
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/dcim/interfaces/6/",
+ "vdcs": [],
+ "vrf": null,
+ "wireless_lans": [],
+ "wireless_link": null,
+ "wwn": null
+ }
+ ],
+ "is_virtual": false,
+ "locations": [
+ "test-rack-group",
+ "parent-rack-group"
+ ],
+ "manufacturer": "cisco",
+ "primary_ip4": "172.16.180.12",
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "role": "core-switch",
+ "serial": "",
+ "services": [
+ {
+ "comments": "",
+ "created": "2024-05-13T18:42:37.967206Z",
+ "custom_fields": {},
+ "description": "",
+ "device": {
+ "description": "",
+ "display": "Test Nexus One",
+ "id": 4,
+ "name": "Test Nexus One",
+ "url": "http://localhost:32768/api/dcim/devices/4/"
+ },
+ "display": "telnet (TCP/23)",
+ "id": 3,
+ "ipaddresses": [],
+ "last_updated": "2024-05-13T18:42:37.967211Z",
+ "name": "telnet",
+ "ports": [
+ 23
+ ],
+ "protocol": {
+ "label": "TCP",
+ "value": "tcp"
+ },
+ "tags": [],
+ "url": "http://localhost:32768/api/ipam/services/3/",
+ "virtual_machine": null
+ }
+ ],
+ "site": "test-site",
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "Test VM With Spaces": {
+ "cluster": "Test Cluster 2",
+ "cluster_type": "test-cluster-type",
+ "interfaces": [
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T18:42:37.888581Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth0",
+ "enabled": true,
+ "id": 11,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T18:42:37.888585Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth0",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/11/",
+ "virtual_machine": {
+ "description": "",
+ "display": "Test VM With Spaces",
+ "id": 6,
+ "name": "Test VM With Spaces",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/6/"
+ },
+ "vrf": null
+ },
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T18:42:37.895332Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth1",
+ "enabled": true,
+ "id": 12,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T18:42:37.895336Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth1",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/12/",
+ "virtual_machine": {
+ "description": "",
+ "display": "Test VM With Spaces",
+ "id": 6,
+ "name": "Test VM With Spaces",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/6/"
+ },
+ "vrf": null
+ }
+ ],
+ "is_virtual": true,
+ "locations": [],
+ "regions": [],
+ "services": [
+ {
+ "comments": "",
+ "created": "2024-05-13T18:42:37.972348Z",
+ "custom_fields": {},
+ "description": "",
+ "device": null,
+ "display": "ssh (TCP/22)",
+ "id": 4,
+ "ipaddresses": [],
+ "last_updated": "2024-05-13T18:42:37.972351Z",
+ "name": "ssh",
+ "ports": [
+ 22
+ ],
+ "protocol": {
+ "label": "TCP",
+ "value": "tcp"
+ },
+ "tags": [],
+ "url": "http://localhost:32768/api/ipam/services/4/",
+ "virtual_machine": {
+ "description": "",
+ "display": "Test VM With Spaces",
+ "id": 6,
+ "name": "Test VM With Spaces",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/6/"
+ }
+ }
+ ],
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "TestDeviceR1": {
+ "device_type": "cisco-test",
+ "interfaces": [],
+ "is_virtual": false,
+ "locations": [
+ "test-rack-group",
+ "parent-rack-group"
+ ],
+ "manufacturer": "cisco",
+ "rack": "Test Rack",
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "role": "core-switch",
+ "serial": "FAB12345678",
+ "services": [],
+ "site": "test-site",
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "test100": {
+ "asset_tag": "123456789",
+ "device_type": "cisco-test",
+ "interfaces": [
+ {
+ "_occupied": false,
+ "bridge": null,
+ "cable": null,
+ "cable_end": "",
+ "connected_endpoints": null,
+ "connected_endpoints_reachable": null,
+ "connected_endpoints_type": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 1,
+ "created": "2024-05-13T18:42:37.126588Z",
+ "custom_fields": {},
+ "description": "",
+ "device": {
+ "description": "",
+ "display": "test100 (123456789)",
+ "id": 1,
+ "name": "test100",
+ "url": "http://localhost:32768/api/dcim/devices/1/"
+ },
+ "display": "GigabitEthernet1",
+ "duplex": null,
+ "enabled": true,
+ "id": 3,
+ "ip_addresses": [
+ {
+ "address": "172.16.180.1/24",
+ "comments": "",
+ "created": "2024-05-13T18:42:37.291898Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "172.16.180.1/24",
+ "dns_name": "",
+ "family": {
+ "label": "IPv4",
+ "value": 4
+ },
+ "id": 1,
+ "last_updated": "2024-05-13T18:42:37.291904Z",
+ "nat_inside": null,
+ "nat_outside": [],
+ "role": null,
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": [],
+ "tenant": null,
+ "url": "http://localhost:32768/api/ipam/ip-addresses/1/",
+ "vrf": null
+ }
+ ],
+ "l2vpn_termination": null,
+ "label": "",
+ "lag": null,
+ "last_updated": "2024-05-13T18:42:37.126594Z",
+ "link_peers": [],
+ "link_peers_type": null,
+ "mac_address": null,
+ "mark_connected": false,
+ "mgmt_only": false,
+ "mode": null,
+ "module": null,
+ "mtu": null,
+ "name": "GigabitEthernet1",
+ "parent": null,
+ "poe_mode": null,
+ "poe_type": null,
+ "rf_channel": null,
+ "rf_channel_frequency": null,
+ "rf_channel_width": null,
+ "rf_role": null,
+ "speed": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "tx_power": null,
+ "type": {
+ "label": "1000BASE-T (1GE)",
+ "value": "1000base-t"
+ },
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/dcim/interfaces/3/",
+ "vdcs": [],
+ "vrf": null,
+ "wireless_lans": [],
+ "wireless_link": null,
+ "wwn": null
+ },
+ {
+ "_occupied": false,
+ "bridge": null,
+ "cable": null,
+ "cable_end": "",
+ "connected_endpoints": null,
+ "connected_endpoints_reachable": null,
+ "connected_endpoints_type": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 1,
+ "created": "2024-05-13T18:42:37.137551Z",
+ "custom_fields": {},
+ "description": "",
+ "device": {
+ "description": "",
+ "display": "test100 (123456789)",
+ "id": 1,
+ "name": "test100",
+ "url": "http://localhost:32768/api/dcim/devices/1/"
+ },
+ "display": "GigabitEthernet2",
+ "duplex": null,
+ "enabled": true,
+ "id": 4,
+ "ip_addresses": [
+ {
+ "address": "2001::1:1/64",
+ "comments": "",
+ "created": "2024-05-13T18:42:37.298655Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "2001::1:1/64",
+ "dns_name": "",
+ "family": {
+ "label": "IPv6",
+ "value": 6
+ },
+ "id": 2,
+ "last_updated": "2024-05-13T18:42:37.298668Z",
+ "nat_inside": null,
+ "nat_outside": [],
+ "role": null,
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": [],
+ "tenant": null,
+ "url": "http://localhost:32768/api/ipam/ip-addresses/2/",
+ "vrf": null
+ }
+ ],
+ "l2vpn_termination": null,
+ "label": "",
+ "lag": null,
+ "last_updated": "2024-05-13T18:42:37.137554Z",
+ "link_peers": [],
+ "link_peers_type": null,
+ "mac_address": null,
+ "mark_connected": false,
+ "mgmt_only": false,
+ "mode": null,
+ "module": null,
+ "mtu": null,
+ "name": "GigabitEthernet2",
+ "parent": null,
+ "poe_mode": null,
+ "poe_type": null,
+ "rf_channel": null,
+ "rf_channel_frequency": null,
+ "rf_channel_width": null,
+ "rf_role": null,
+ "speed": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "tx_power": null,
+ "type": {
+ "label": "1000BASE-T (1GE)",
+ "value": "1000base-t"
+ },
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/dcim/interfaces/4/",
+ "vdcs": [],
+ "vrf": null,
+ "wireless_lans": [],
+ "wireless_link": null,
+ "wwn": null
+ },
+ {
+ "_occupied": false,
+ "bridge": null,
+ "cable": null,
+ "cable_end": "",
+ "connected_endpoints": null,
+ "connected_endpoints_reachable": null,
+ "connected_endpoints_type": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T18:42:37.167058Z",
+ "custom_fields": {},
+ "description": "",
+ "device": {
+ "description": "",
+ "display": "test100 (123456789)",
+ "id": 1,
+ "name": "test100",
+ "url": "http://localhost:32768/api/dcim/devices/1/"
+ },
+ "display": "wlink1",
+ "duplex": null,
+ "enabled": true,
+ "id": 5,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "label": "",
+ "lag": null,
+ "last_updated": "2024-05-13T18:42:37.167063Z",
+ "link_peers": [],
+ "link_peers_type": null,
+ "mac_address": null,
+ "mark_connected": false,
+ "mgmt_only": false,
+ "mode": null,
+ "module": null,
+ "mtu": null,
+ "name": "wlink1",
+ "parent": null,
+ "poe_mode": null,
+ "poe_type": null,
+ "rf_channel": null,
+ "rf_channel_frequency": null,
+ "rf_channel_width": null,
+ "rf_role": null,
+ "speed": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "tx_power": null,
+ "type": {
+ "label": "IEEE 802.11a",
+ "value": "ieee802.11a"
+ },
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/dcim/interfaces/5/",
+ "vdcs": [],
+ "vrf": null,
+ "wireless_lans": [],
+ "wireless_link": null,
+ "wwn": null
+ }
+ ],
+ "is_virtual": false,
+ "locations": [
+ "test-rack-group",
+ "parent-rack-group"
+ ],
+ "manufacturer": "cisco",
+ "ntp_servers": [
+ "pool.ntp.org"
+ ],
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "role": "core-switch",
+ "serial": "FAB01234567",
+ "services": [
+ {
+ "comments": "",
+ "created": "2024-05-13T18:42:37.947125Z",
+ "custom_fields": {},
+ "description": "",
+ "device": {
+ "description": "",
+ "display": "test100 (123456789)",
+ "id": 1,
+ "name": "test100",
+ "url": "http://localhost:32768/api/dcim/devices/1/"
+ },
+ "display": "ssh (TCP/22)",
+ "id": 1,
+ "ipaddresses": [],
+ "last_updated": "2024-05-13T18:42:37.947130Z",
+ "name": "ssh",
+ "ports": [
+ 22
+ ],
+ "protocol": {
+ "label": "TCP",
+ "value": "tcp"
+ },
+ "tags": [],
+ "url": "http://localhost:32768/api/ipam/services/1/",
+ "virtual_machine": null
+ },
+ {
+ "comments": "",
+ "created": "2024-05-13T18:42:37.953341Z",
+ "custom_fields": {},
+ "description": "",
+ "device": {
+ "description": "",
+ "display": "test100 (123456789)",
+ "id": 1,
+ "name": "test100",
+ "url": "http://localhost:32768/api/dcim/devices/1/"
+ },
+ "display": "http (TCP/80)",
+ "id": 2,
+ "ipaddresses": [
+ {
+ "address": "172.16.180.1/24",
+ "description": "",
+ "display": "172.16.180.1/24",
+ "family": {
+ "label": "IPv4",
+ "value": 4
+ },
+ "id": 1,
+ "url": "http://localhost:32768/api/ipam/ip-addresses/1/"
+ },
+ {
+ "address": "2001::1:1/64",
+ "description": "",
+ "display": "2001::1:1/64",
+ "family": {
+ "label": "IPv6",
+ "value": 6
+ },
+ "id": 2,
+ "url": "http://localhost:32768/api/ipam/ip-addresses/2/"
+ }
+ ],
+ "last_updated": "2024-05-13T18:42:37.953344Z",
+ "name": "http",
+ "ports": [
+ 80
+ ],
+ "protocol": {
+ "label": "TCP",
+ "value": "tcp"
+ },
+ "tags": [],
+ "url": "http://localhost:32768/api/ipam/services/2/",
+ "virtual_machine": null
+ }
+ ],
+ "site": "test-site",
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "test100-vm": {
+ "cluster": "Test Cluster",
+ "cluster_group": "test-cluster-group",
+ "cluster_type": "test-cluster-type",
+ "disk": 170,
+ "interfaces": [
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T18:42:37.819447Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth0",
+ "enabled": true,
+ "id": 1,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T18:42:37.819452Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth0",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/1/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test100-vm",
+ "id": 1,
+ "name": "test100-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/1/"
+ },
+ "vrf": null
+ },
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T18:42:37.827324Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth1",
+ "enabled": true,
+ "id": 2,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T18:42:37.827329Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth1",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/2/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test100-vm",
+ "id": 1,
+ "name": "test100-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/1/"
+ },
+ "vrf": null
+ },
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T18:42:37.835002Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth2",
+ "enabled": true,
+ "id": 3,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T18:42:37.835006Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth2",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/3/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test100-vm",
+ "id": 1,
+ "name": "test100-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/1/"
+ },
+ "vrf": null
+ },
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T18:42:37.841967Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth3",
+ "enabled": true,
+ "id": 4,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T18:42:37.841971Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth3",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/4/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test100-vm",
+ "id": 1,
+ "name": "test100-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/1/"
+ },
+ "vrf": null
+ },
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T18:42:37.848584Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth4",
+ "enabled": true,
+ "id": 5,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T18:42:37.848588Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth4",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/5/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test100-vm",
+ "id": 1,
+ "name": "test100-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/1/"
+ },
+ "vrf": null
+ }
+ ],
+ "is_virtual": true,
+ "locations": [],
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "services": [],
+ "site": "test-site",
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "test101-vm": {
+ "cluster": "Test Cluster",
+ "cluster_group": "test-cluster-group",
+ "cluster_type": "test-cluster-type",
+ "interfaces": [
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T18:42:37.855261Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth0",
+ "enabled": true,
+ "id": 6,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T18:42:37.855265Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth0",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/6/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test101-vm",
+ "id": 2,
+ "name": "test101-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/2/"
+ },
+ "vrf": null
+ },
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T18:42:37.862048Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth1",
+ "enabled": true,
+ "id": 7,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T18:42:37.862052Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth1",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/7/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test101-vm",
+ "id": 2,
+ "name": "test101-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/2/"
+ },
+ "vrf": null
+ },
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T18:42:37.868723Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth2",
+ "enabled": true,
+ "id": 8,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T18:42:37.868727Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth2",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/8/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test101-vm",
+ "id": 2,
+ "name": "test101-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/2/"
+ },
+ "vrf": null
+ },
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T18:42:37.875318Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth3",
+ "enabled": true,
+ "id": 9,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T18:42:37.875321Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth3",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/9/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test101-vm",
+ "id": 2,
+ "name": "test101-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/2/"
+ },
+ "vrf": null
+ },
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T18:42:37.881944Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth4",
+ "enabled": true,
+ "id": 10,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T18:42:37.881948Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth4",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/10/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test101-vm",
+ "id": 2,
+ "name": "test101-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/2/"
+ },
+ "vrf": null
+ }
+ ],
+ "is_virtual": true,
+ "locations": [],
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "services": [],
+ "site": "test-site",
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "test102-vm": {
+ "cluster": "Test Cluster",
+ "cluster_group": "test-cluster-group",
+ "cluster_type": "test-cluster-type",
+ "interfaces": [],
+ "is_virtual": true,
+ "locations": [],
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "services": [],
+ "site": "test-site",
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "test103-vm": {
+ "cluster": "Test Cluster",
+ "cluster_group": "test-cluster-group",
+ "cluster_type": "test-cluster-type",
+ "interfaces": [],
+ "is_virtual": true,
+ "locations": [],
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "services": [],
+ "site": "test-site",
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "test104-vm": {
+ "cluster": "Test Cluster 2",
+ "cluster_type": "test-cluster-type",
+ "interfaces": [],
+ "is_virtual": true,
+ "locations": [],
+ "regions": [],
+ "services": [],
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ }
+ }
+ },
+ "active": {
+ "hosts": [
+ "R1-Device",
+ "Test Nexus One",
+ "TestDeviceR1",
+ "test100",
+ "Test VM With Spaces",
+ "test100-vm",
+ "test101-vm",
+ "test102-vm",
+ "test103-vm",
+ "test104-vm"
+ ]
+ },
+ "all": {
+ "children": [
+ "ungrouped",
+ "test_site2",
+ "other_region",
+ "parent_region",
+ "other_site_group",
+ "parent_site_group",
+ "Test_Rack_Site_2",
+ "test_rack_role",
+ "core_switch",
+ "cisco_test",
+ "cisco",
+ "active",
+ "nexus_parent",
+ "Test_Rack",
+ "Test_Cluster_2",
+ "test_cluster_type",
+ "is_virtual",
+ "Test_Cluster",
+ "test_cluster_group"
+ ]
+ },
+ "cisco": {
+ "hosts": [
+ "R1-Device",
+ "Test Nexus One",
+ "TestDeviceR1",
+ "test100"
+ ]
+ },
+ "cisco_test": {
+ "hosts": [
+ "R1-Device",
+ "TestDeviceR1",
+ "test100"
+ ]
+ },
+ "core_switch": {
+ "hosts": [
+ "R1-Device",
+ "Test Nexus One",
+ "TestDeviceR1",
+ "test100"
+ ]
+ },
+ "is_virtual": {
+ "hosts": [
+ "Test VM With Spaces",
+ "test100-vm",
+ "test101-vm",
+ "test102-vm",
+ "test103-vm",
+ "test104-vm"
+ ]
+ },
+ "nexus_parent": {
+ "hosts": [
+ "Test Nexus One"
+ ]
+ },
+ "parent_rack_group": {
+ "children": [
+ "test_rack_group"
+ ]
+ },
+ "parent_region": {
+ "children": [
+ "test_region"
+ ]
+ },
+ "parent_site_group": {
+ "children": [
+ "test_site_group"
+ ]
+ },
+ "test_cluster_group": {
+ "hosts": [
+ "test100-vm",
+ "test101-vm",
+ "test102-vm",
+ "test103-vm"
+ ]
+ },
+ "test_cluster_type": {
+ "hosts": [
+ "Test VM With Spaces",
+ "test100-vm",
+ "test101-vm",
+ "test102-vm",
+ "test103-vm",
+ "test104-vm"
+ ]
+ },
+ "test_rack_group": {
+ "hosts": [
+ "Test Nexus One",
+ "TestDeviceR1",
+ "test100"
+ ]
+ },
+ "test_rack_role": {
+ "hosts": [
+ "R1-Device"
+ ]
+ },
+ "test_region": {
+ "children": [
+ "test_site"
+ ]
+ },
+ "test_site": {
+ "children": [
+ "parent_rack_group"
+ ],
+ "hosts": [
+ "test100-vm",
+ "test101-vm",
+ "test102-vm",
+ "test103-vm"
+ ]
+ },
+ "test_site2": {
+ "hosts": [
+ "R1-Device"
+ ]
+ }
+} \ No newline at end of file
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-options-flatten.yml b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-options-flatten.yml
new file mode 100644
index 000000000..73d6beec9
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-options-flatten.yml
@@ -0,0 +1,41 @@
+---
+plugin: netbox.netbox.nb_inventory
+api_endpoint: http://localhost:32768
+token: "0123456789abcdef0123456789abcdef01234567"
+validate_certs: false
+
+# Use cache on this test to make sure interfaces is tested via the cache
+cache: true
+cache_timeout: 3600
+cache_plugin: jsonfile
+cache_connection: /tmp/inventory_netbox
+
+config_context: true
+flatten_config_context: true
+flatten_custom_fields: true
+flatten_local_context_data: true
+plurals: false
+interfaces: true
+services: true
+fetch_all: false
+max_uri_length: 0
+group_names_raw: true
+
+group_by:
+ - site
+ - tenant
+ - rack
+ - location
+ - rack_role
+ - tag
+ - role
+ - device_type
+ - manufacturer
+ - platform
+ - region
+ - site_group
+ - cluster
+ - cluster_group
+ - cluster_type
+ - is_virtual
+ - status
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-options.json b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-options.json
new file mode 100644
index 000000000..d104c5cea
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-options.json
@@ -0,0 +1,384 @@
+{
+ "Test_Cluster": {
+ "hosts": [
+ "test100-vm",
+ "test101-vm",
+ "test102-vm",
+ "test103-vm"
+ ]
+ },
+ "Test_Cluster_2": {
+ "hosts": [
+ "Test VM With Spaces",
+ "test104-vm"
+ ]
+ },
+ "Test_Rack": {
+ "hosts": [
+ "TestDeviceR1"
+ ]
+ },
+ "Test_Rack_Site_2": {
+ "hosts": [
+ "R1-Device"
+ ]
+ },
+ "_meta": {
+ "hostvars": {
+ "R1-Device": {
+ "asset_tag": "345678901",
+ "custom_fields": {},
+ "device_type": "cisco-test",
+ "is_virtual": false,
+ "locations": [],
+ "manufacturer": "cisco",
+ "rack": "Test Rack Site 2",
+ "rack_id": "1",
+ "rack_role": "test-rack-role",
+ "regions": [],
+ "role": "core-switch",
+ "serial": "",
+ "site": "test-site2",
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "Test VM With Spaces": {
+ "cluster": "Test Cluster 2",
+ "cluster_type": "test-cluster-type",
+ "custom_fields": {},
+ "is_virtual": true,
+ "locations": [],
+ "regions": [],
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "TestDeviceR1": {
+ "custom_fields": {},
+ "device_type": "cisco-test",
+ "is_virtual": false,
+ "locations": [
+ "test-rack-group",
+ "parent-rack-group"
+ ],
+ "manufacturer": "cisco",
+ "rack": "Test Rack",
+ "rack_id": "2",
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "role": "core-switch",
+ "serial": "FAB12345678",
+ "site": "test-site",
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "VC1": {
+ "ansible_host": "nexus.example.com",
+ "custom_fields": {},
+ "device_type": "nexus-parent",
+ "dns_name": "nexus.example.com",
+ "is_virtual": false,
+ "locations": [
+ "test-rack-group",
+ "parent-rack-group"
+ ],
+ "manufacturer": "cisco",
+ "primary_ip4": "172.16.180.12",
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "role": "core-switch",
+ "serial": "",
+ "site": "test-site",
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "test100": {
+ "asset_tag": "123456789",
+ "custom_fields": {},
+ "device_type": "cisco-test",
+ "is_virtual": false,
+ "local_context_data": {
+ "ntp_servers": [
+ "pool.ntp.org"
+ ]
+ },
+ "locations": [
+ "test-rack-group",
+ "parent-rack-group"
+ ],
+ "manufacturer": "cisco",
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "role": "core-switch",
+ "serial": "FAB01234567",
+ "site": "test-site",
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "test100-vm": {
+ "cluster": "Test Cluster",
+ "cluster_group": "test-cluster-group",
+ "cluster_type": "test-cluster-type",
+ "custom_fields": {},
+ "disk": 170,
+ "is_virtual": true,
+ "locations": [],
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "site": "test-site",
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "test101-vm": {
+ "cluster": "Test Cluster",
+ "cluster_group": "test-cluster-group",
+ "cluster_type": "test-cluster-type",
+ "custom_fields": {},
+ "is_virtual": true,
+ "locations": [],
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "site": "test-site",
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "test102-vm": {
+ "cluster": "Test Cluster",
+ "cluster_group": "test-cluster-group",
+ "cluster_type": "test-cluster-type",
+ "custom_fields": {},
+ "is_virtual": true,
+ "locations": [],
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "site": "test-site",
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "test103-vm": {
+ "cluster": "Test Cluster",
+ "cluster_group": "test-cluster-group",
+ "cluster_type": "test-cluster-type",
+ "custom_fields": {},
+ "is_virtual": true,
+ "locations": [],
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "site": "test-site",
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "test104-vm": {
+ "cluster": "Test Cluster 2",
+ "cluster_type": "test-cluster-type",
+ "custom_fields": {},
+ "is_virtual": true,
+ "locations": [],
+ "regions": [],
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ }
+ }
+ },
+ "active": {
+ "hosts": [
+ "R1-Device",
+ "VC1",
+ "TestDeviceR1",
+ "test100",
+ "Test VM With Spaces",
+ "test100-vm",
+ "test101-vm",
+ "test102-vm",
+ "test103-vm",
+ "test104-vm"
+ ]
+ },
+ "all": {
+ "children": [
+ "ungrouped",
+ "test_site2",
+ "other_region",
+ "parent_region",
+ "other_site_group",
+ "parent_site_group",
+ "Test_Rack_Site_2",
+ "test_rack_role",
+ "core_switch",
+ "cisco_test",
+ "cisco",
+ "active",
+ "nexus_parent",
+ "jinja_test_group",
+ "Test_Rack",
+ "Test_Cluster_2",
+ "test_cluster_type",
+ "is_virtual",
+ "Test_Cluster",
+ "test_cluster_group"
+ ]
+ },
+ "cisco": {
+ "hosts": [
+ "R1-Device",
+ "VC1",
+ "TestDeviceR1",
+ "test100"
+ ]
+ },
+ "cisco_test": {
+ "hosts": [
+ "R1-Device",
+ "TestDeviceR1",
+ "test100"
+ ]
+ },
+ "core_switch": {
+ "hosts": [
+ "R1-Device",
+ "VC1",
+ "TestDeviceR1",
+ "test100"
+ ]
+ },
+ "is_virtual": {
+ "hosts": [
+ "Test VM With Spaces",
+ "test100-vm",
+ "test101-vm",
+ "test102-vm",
+ "test103-vm",
+ "test104-vm"
+ ]
+ },
+ "jinja_test_group": {
+ "hosts": [
+ "TestDeviceR1",
+ "Test VM With Spaces"
+ ]
+ },
+ "nexus_parent": {
+ "hosts": [
+ "VC1"
+ ]
+ },
+ "parent_rack_group": {
+ "children": [
+ "test_rack_group"
+ ]
+ },
+ "parent_region": {
+ "children": [
+ "test_region"
+ ]
+ },
+ "parent_site_group": {
+ "children": [
+ "test_site_group"
+ ]
+ },
+ "test_cluster_group": {
+ "hosts": [
+ "test100-vm",
+ "test101-vm",
+ "test102-vm",
+ "test103-vm"
+ ]
+ },
+ "test_cluster_type": {
+ "hosts": [
+ "Test VM With Spaces",
+ "test100-vm",
+ "test101-vm",
+ "test102-vm",
+ "test103-vm",
+ "test104-vm"
+ ]
+ },
+ "test_rack_group": {
+ "hosts": [
+ "VC1",
+ "TestDeviceR1",
+ "test100"
+ ]
+ },
+ "test_rack_role": {
+ "hosts": [
+ "R1-Device"
+ ]
+ },
+ "test_region": {
+ "children": [
+ "test_site"
+ ]
+ },
+ "test_site": {
+ "children": [
+ "parent_rack_group"
+ ],
+ "hosts": [
+ "test100-vm",
+ "test101-vm",
+ "test102-vm",
+ "test103-vm"
+ ]
+ },
+ "test_site2": {
+ "hosts": [
+ "R1-Device"
+ ]
+ }
+} \ No newline at end of file
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-options.yml b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-options.yml
new file mode 100644
index 000000000..f6597c661
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-options.yml
@@ -0,0 +1,62 @@
+---
+plugin: netbox.netbox.nb_inventory
+api_endpoint: http://localhost:32768
+token: "0123456789abcdef0123456789abcdef01234567"
+validate_certs: false
+
+# Cache is not for performance of tests, but to test the caching option works
+# Also set on test-inventory-plurals.yml so that we actually hit the cache on one of these runs
+cache: true
+cache_timeout: 3600
+cache_plugin: jsonfile
+cache_connection: /tmp/inventory_netbox
+
+config_context: false
+plurals: false
+interfaces: false
+services: false
+group_names_raw: true
+virtual_chassis_name: true
+dns_name: true
+ansible_host_dns_name: true
+
+group_by:
+ - site
+ - tenant
+ - rack
+ - location
+ - rack_role
+ - tag
+ - role
+ - device_type
+ - manufacturer
+ - platform
+ - region
+ - site_group
+ - cluster
+ - cluster_group
+ - cluster_type
+ - is_virtual
+ - status
+
+query_filters: ""
+
+device_query_filters:
+ - role: core-switch
+
+vm_query_filters:
+ - cluster_type: test-cluster-type
+
+# See Constructed for details
+# https://docs.ansible.com/ansible/latest/plugins/inventory/constructed.html
+
+compose:
+ rack_id: rack.id
+ ntp_servers: config_context.ntp_servers
+
+keyed_groups:
+ - prefix: rack
+ key: rack.name
+
+groups:
+ jinja_test_group: inventory_hostname.startswith('Test')
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-plurals-flatten.json b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-plurals-flatten.json
new file mode 100644
index 000000000..12e096f53
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-plurals-flatten.json
@@ -0,0 +1,440 @@
+{
+ "Test_Cluster": {
+ "hosts": [
+ "test100-vm",
+ "test101-vm",
+ "test102-vm",
+ "test103-vm"
+ ]
+ },
+ "Test_Cluster_2": {
+ "hosts": [
+ "Test VM With Spaces",
+ "test104-vm"
+ ]
+ },
+ "Test_Rack": {
+ "hosts": [
+ "TestDeviceR1"
+ ]
+ },
+ "Test_Rack_Site_2": {
+ "hosts": [
+ "R1-Device"
+ ]
+ },
+ "_meta": {
+ "hostvars": {
+ "R1-Device": {
+ "asset_tag": "345678901",
+ "device_roles": [
+ "core-switch"
+ ],
+ "device_types": [
+ "cisco-test"
+ ],
+ "is_virtual": false,
+ "local_context_data": [
+ null
+ ],
+ "locations": [],
+ "manufacturers": [
+ "cisco"
+ ],
+ "rack_role": "test-rack-role",
+ "racks": [
+ "Test Rack Site 2"
+ ],
+ "regions": [],
+ "serial": "",
+ "site_groups": [],
+ "sites": [
+ "test-site2"
+ ],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "Test Nexus One": {
+ "ansible_host": "172.16.180.12",
+ "device_roles": [
+ "core-switch"
+ ],
+ "device_types": [
+ "nexus-parent"
+ ],
+ "is_virtual": false,
+ "local_context_data": [
+ null
+ ],
+ "locations": [
+ "test-rack-group",
+ "parent-rack-group"
+ ],
+ "manufacturers": [
+ "cisco"
+ ],
+ "primary_ip4": "172.16.180.12",
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "serial": "",
+ "site_groups": [],
+ "sites": [
+ "test-site"
+ ],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "Test VM With Spaces": {
+ "cluster": "Test Cluster 2",
+ "cluster_type": "test-cluster-type",
+ "is_virtual": true,
+ "local_context_data": [
+ null
+ ],
+ "locations": [],
+ "regions": [],
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "TestDeviceR1": {
+ "device_roles": [
+ "core-switch"
+ ],
+ "device_types": [
+ "cisco-test"
+ ],
+ "is_virtual": false,
+ "local_context_data": [
+ null
+ ],
+ "locations": [
+ "test-rack-group",
+ "parent-rack-group"
+ ],
+ "manufacturers": [
+ "cisco"
+ ],
+ "racks": [
+ "Test Rack"
+ ],
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "serial": "FAB12345678",
+ "site_groups": [],
+ "sites": [
+ "test-site"
+ ],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "test100": {
+ "asset_tag": "123456789",
+ "device_roles": [
+ "core-switch"
+ ],
+ "device_types": [
+ "cisco-test"
+ ],
+ "is_virtual": false,
+ "local_context_data": [
+ {
+ "ntp_servers": [
+ "pool.ntp.org"
+ ]
+ }
+ ],
+ "locations": [
+ "test-rack-group",
+ "parent-rack-group"
+ ],
+ "manufacturers": [
+ "cisco"
+ ],
+ "ntp_servers": [
+ "pool.ntp.org"
+ ],
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "serial": "FAB01234567",
+ "site_groups": [],
+ "sites": [
+ "test-site"
+ ],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "test100-vm": {
+ "cluster": "Test Cluster",
+ "cluster_group": "test-cluster-group",
+ "cluster_type": "test-cluster-type",
+ "disk": 170,
+ "is_virtual": true,
+ "local_context_data": [
+ null
+ ],
+ "locations": [],
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "site_groups": [],
+ "sites": [
+ "test-site"
+ ],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "test101-vm": {
+ "cluster": "Test Cluster",
+ "cluster_group": "test-cluster-group",
+ "cluster_type": "test-cluster-type",
+ "is_virtual": true,
+ "local_context_data": [
+ null
+ ],
+ "locations": [],
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "site_groups": [],
+ "sites": [
+ "test-site"
+ ],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "test102-vm": {
+ "cluster": "Test Cluster",
+ "cluster_group": "test-cluster-group",
+ "cluster_type": "test-cluster-type",
+ "is_virtual": true,
+ "local_context_data": [
+ null
+ ],
+ "locations": [],
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "site_groups": [],
+ "sites": [
+ "test-site"
+ ],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "test103-vm": {
+ "cluster": "Test Cluster",
+ "cluster_group": "test-cluster-group",
+ "cluster_type": "test-cluster-type",
+ "is_virtual": true,
+ "local_context_data": [
+ null
+ ],
+ "locations": [],
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "site_groups": [],
+ "sites": [
+ "test-site"
+ ],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "test104-vm": {
+ "cluster": "Test Cluster 2",
+ "cluster_type": "test-cluster-type",
+ "is_virtual": true,
+ "local_context_data": [
+ null
+ ],
+ "locations": [],
+ "regions": [],
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ }
+ }
+ },
+ "active": {
+ "hosts": [
+ "R1-Device",
+ "Test Nexus One",
+ "TestDeviceR1",
+ "test100",
+ "Test VM With Spaces",
+ "test100-vm",
+ "test101-vm",
+ "test102-vm",
+ "test103-vm",
+ "test104-vm"
+ ]
+ },
+ "all": {
+ "children": [
+ "ungrouped",
+ "test_site2",
+ "other_region",
+ "parent_region",
+ "other_site_group",
+ "parent_site_group",
+ "Test_Rack_Site_2",
+ "test_rack_role",
+ "core_switch",
+ "cisco_test",
+ "cisco",
+ "active",
+ "nexus_parent",
+ "Test_Rack",
+ "Test_Cluster_2",
+ "test_cluster_type",
+ "is_virtual",
+ "Test_Cluster",
+ "test_cluster_group"
+ ]
+ },
+ "cisco": {
+ "hosts": [
+ "R1-Device",
+ "Test Nexus One",
+ "TestDeviceR1",
+ "test100"
+ ]
+ },
+ "cisco_test": {
+ "hosts": [
+ "R1-Device",
+ "TestDeviceR1",
+ "test100"
+ ]
+ },
+ "core_switch": {
+ "hosts": [
+ "R1-Device",
+ "Test Nexus One",
+ "TestDeviceR1",
+ "test100"
+ ]
+ },
+ "is_virtual": {
+ "hosts": [
+ "Test VM With Spaces",
+ "test100-vm",
+ "test101-vm",
+ "test102-vm",
+ "test103-vm",
+ "test104-vm"
+ ]
+ },
+ "nexus_parent": {
+ "hosts": [
+ "Test Nexus One"
+ ]
+ },
+ "parent_rack_group": {
+ "children": [
+ "test_rack_group"
+ ]
+ },
+ "parent_region": {
+ "children": [
+ "test_region"
+ ]
+ },
+ "parent_site_group": {
+ "children": [
+ "test_site_group"
+ ]
+ },
+ "test_cluster_group": {
+ "hosts": [
+ "test100-vm",
+ "test101-vm",
+ "test102-vm",
+ "test103-vm"
+ ]
+ },
+ "test_cluster_type": {
+ "hosts": [
+ "Test VM With Spaces",
+ "test100-vm",
+ "test101-vm",
+ "test102-vm",
+ "test103-vm",
+ "test104-vm"
+ ]
+ },
+ "test_rack_group": {
+ "hosts": [
+ "Test Nexus One",
+ "TestDeviceR1",
+ "test100"
+ ]
+ },
+ "test_rack_role": {
+ "hosts": [
+ "R1-Device"
+ ]
+ },
+ "test_region": {
+ "children": [
+ "test_site"
+ ]
+ },
+ "test_site": {
+ "children": [
+ "parent_rack_group"
+ ],
+ "hosts": [
+ "test100-vm",
+ "test101-vm",
+ "test102-vm",
+ "test103-vm"
+ ]
+ },
+ "test_site2": {
+ "hosts": [
+ "R1-Device"
+ ]
+ }
+} \ No newline at end of file
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-plurals-flatten.yml b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-plurals-flatten.yml
new file mode 100644
index 000000000..33213ff5c
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-plurals-flatten.yml
@@ -0,0 +1,33 @@
+---
+plugin: netbox.netbox.nb_inventory
+api_endpoint: http://localhost:32768
+token: "0123456789abcdef0123456789abcdef01234567"
+validate_certs: false
+
+config_context: true
+flatten_config_context: true
+flatten_custom_fields: true
+plurals: true
+interfaces: false
+services: false
+fetch_all: true
+group_names_raw: true
+
+group_by:
+ - sites
+ - tenants
+ - racks
+ - location
+ - rack_role
+ - tags
+ - device_roles
+ - device_types
+ - manufacturers
+ - platforms
+ - region
+ - site_group
+ - cluster
+ - cluster_group
+ - cluster_type
+ - is_virtual
+ - status
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-plurals.json b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-plurals.json
new file mode 100644
index 000000000..4ee185daf
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-plurals.json
@@ -0,0 +1,1491 @@
+{
+ "_meta": {
+ "hostvars": {
+ "R1-Device": {
+ "asset_tag": "345678901",
+ "config_context": [
+ {}
+ ],
+ "custom_fields": {},
+ "device_roles": [
+ "core-switch"
+ ],
+ "device_types": [
+ "cisco-test"
+ ],
+ "interfaces": [],
+ "is_virtual": false,
+ "local_context_data": [
+ null
+ ],
+ "locations": [],
+ "manufacturers": [
+ "cisco"
+ ],
+ "rack_role": "test-rack-role",
+ "racks": [
+ "Test Rack Site 2"
+ ],
+ "regions": [],
+ "serial": "",
+ "services": [],
+ "site_groups": [],
+ "sites": [
+ "test-site2"
+ ],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "Test Nexus One": {
+ "ansible_host": "172.16.180.12",
+ "config_context": [
+ {}
+ ],
+ "custom_fields": {},
+ "device_roles": [
+ "core-switch"
+ ],
+ "device_types": [
+ "nexus-parent"
+ ],
+ "dns_name": "nexus.example.com",
+ "interfaces": [
+ {
+ "_occupied": false,
+ "bridge": null,
+ "cable": null,
+ "cable_end": "",
+ "connected_endpoints": null,
+ "connected_endpoints_reachable": null,
+ "connected_endpoints_type": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 1,
+ "created": "2024-05-13T18:42:37.096176Z",
+ "custom_fields": {},
+ "description": "",
+ "device": {
+ "description": "",
+ "display": "Test Nexus Child One",
+ "id": 5,
+ "name": "Test Nexus Child One",
+ "url": "http://localhost:32768/api/dcim/devices/5/"
+ },
+ "display": "Ethernet2/1",
+ "duplex": null,
+ "enabled": true,
+ "id": 2,
+ "ip_addresses": [
+ {
+ "address": "172.16.180.12/24",
+ "comments": "",
+ "created": "2024-05-13T18:42:37.310118Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "172.16.180.12/24",
+ "dns_name": "nexus.example.com",
+ "family": {
+ "label": "IPv4",
+ "value": 4
+ },
+ "id": 4,
+ "last_updated": "2024-05-13T18:42:37.310121Z",
+ "nat_inside": null,
+ "nat_outside": [],
+ "role": null,
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": [],
+ "tenant": null,
+ "url": "http://localhost:32768/api/ipam/ip-addresses/4/",
+ "vrf": null
+ }
+ ],
+ "l2vpn_termination": null,
+ "label": "",
+ "lag": null,
+ "last_updated": "2024-05-13T18:42:37.096180Z",
+ "link_peers": [],
+ "link_peers_type": null,
+ "mac_address": null,
+ "mark_connected": false,
+ "mgmt_only": false,
+ "mode": null,
+ "module": null,
+ "mtu": null,
+ "name": "Ethernet2/1",
+ "parent": null,
+ "poe_mode": null,
+ "poe_type": null,
+ "rf_channel": null,
+ "rf_channel_frequency": null,
+ "rf_channel_width": null,
+ "rf_role": null,
+ "speed": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "tx_power": null,
+ "type": {
+ "label": "1000BASE-T (1GE)",
+ "value": "1000base-t"
+ },
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/dcim/interfaces/2/",
+ "vdcs": [],
+ "vrf": null,
+ "wireless_lans": [],
+ "wireless_link": null,
+ "wwn": null
+ },
+ {
+ "_occupied": false,
+ "bridge": null,
+ "cable": null,
+ "cable_end": "",
+ "connected_endpoints": null,
+ "connected_endpoints_reachable": null,
+ "connected_endpoints_type": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 1,
+ "created": "2024-05-13T18:42:37.082569Z",
+ "custom_fields": {},
+ "description": "",
+ "device": {
+ "description": "",
+ "display": "Test Nexus One",
+ "id": 4,
+ "name": "Test Nexus One",
+ "url": "http://localhost:32768/api/dcim/devices/4/"
+ },
+ "display": "Ethernet1/1",
+ "duplex": null,
+ "enabled": true,
+ "id": 1,
+ "ip_addresses": [
+ {
+ "address": "172.16.180.11/24",
+ "comments": "",
+ "created": "2024-05-13T18:42:37.304490Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "172.16.180.11/24",
+ "dns_name": "",
+ "family": {
+ "label": "IPv4",
+ "value": 4
+ },
+ "id": 3,
+ "last_updated": "2024-05-13T18:42:37.304494Z",
+ "nat_inside": null,
+ "nat_outside": [],
+ "role": null,
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": [],
+ "tenant": null,
+ "url": "http://localhost:32768/api/ipam/ip-addresses/3/",
+ "vrf": null
+ }
+ ],
+ "l2vpn_termination": null,
+ "label": "",
+ "lag": null,
+ "last_updated": "2024-05-13T18:42:37.082575Z",
+ "link_peers": [],
+ "link_peers_type": null,
+ "mac_address": null,
+ "mark_connected": false,
+ "mgmt_only": false,
+ "mode": null,
+ "module": null,
+ "mtu": null,
+ "name": "Ethernet1/1",
+ "parent": null,
+ "poe_mode": null,
+ "poe_type": null,
+ "rf_channel": null,
+ "rf_channel_frequency": null,
+ "rf_channel_width": null,
+ "rf_role": null,
+ "speed": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "tx_power": null,
+ "type": {
+ "label": "1000BASE-T (1GE)",
+ "value": "1000base-t"
+ },
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/dcim/interfaces/1/",
+ "vdcs": [],
+ "vrf": null,
+ "wireless_lans": [],
+ "wireless_link": null,
+ "wwn": null
+ },
+ {
+ "_occupied": false,
+ "bridge": null,
+ "cable": null,
+ "cable_end": "",
+ "connected_endpoints": null,
+ "connected_endpoints_reachable": null,
+ "connected_endpoints_type": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T18:42:37.177570Z",
+ "custom_fields": {},
+ "description": "",
+ "device": {
+ "description": "",
+ "display": "Test Nexus One",
+ "id": 4,
+ "name": "Test Nexus One",
+ "url": "http://localhost:32768/api/dcim/devices/4/"
+ },
+ "display": "wlink1",
+ "duplex": null,
+ "enabled": true,
+ "id": 6,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "label": "",
+ "lag": null,
+ "last_updated": "2024-05-13T18:42:37.177574Z",
+ "link_peers": [],
+ "link_peers_type": null,
+ "mac_address": null,
+ "mark_connected": false,
+ "mgmt_only": false,
+ "mode": null,
+ "module": null,
+ "mtu": null,
+ "name": "wlink1",
+ "parent": null,
+ "poe_mode": null,
+ "poe_type": null,
+ "rf_channel": null,
+ "rf_channel_frequency": null,
+ "rf_channel_width": null,
+ "rf_role": null,
+ "speed": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "tx_power": null,
+ "type": {
+ "label": "IEEE 802.11a",
+ "value": "ieee802.11a"
+ },
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/dcim/interfaces/6/",
+ "vdcs": [],
+ "vrf": null,
+ "wireless_lans": [],
+ "wireless_link": null,
+ "wwn": null
+ }
+ ],
+ "is_virtual": false,
+ "local_context_data": [
+ null
+ ],
+ "locations": [
+ "test-rack-group",
+ "parent-rack-group"
+ ],
+ "manufacturers": [
+ "cisco"
+ ],
+ "primary_ip4": "172.16.180.12",
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "serial": "",
+ "services": [
+ {
+ "comments": "",
+ "created": "2024-05-13T18:42:37.967206Z",
+ "custom_fields": {},
+ "description": "",
+ "device": {
+ "description": "",
+ "display": "Test Nexus One",
+ "id": 4,
+ "name": "Test Nexus One",
+ "url": "http://localhost:32768/api/dcim/devices/4/"
+ },
+ "display": "telnet (TCP/23)",
+ "id": 3,
+ "ipaddresses": [],
+ "last_updated": "2024-05-13T18:42:37.967211Z",
+ "name": "telnet",
+ "ports": [
+ 23
+ ],
+ "protocol": {
+ "label": "TCP",
+ "value": "tcp"
+ },
+ "tags": [],
+ "url": "http://localhost:32768/api/ipam/services/3/",
+ "virtual_machine": null
+ }
+ ],
+ "site_groups": [],
+ "sites": [
+ "test-site"
+ ],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "Test VM With Spaces": {
+ "cluster": "Test Cluster 2",
+ "cluster_type": "test-cluster-type",
+ "config_context": [
+ {}
+ ],
+ "custom_fields": {},
+ "interfaces": [
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T18:42:37.888581Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth0",
+ "enabled": true,
+ "id": 11,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T18:42:37.888585Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth0",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/11/",
+ "virtual_machine": {
+ "description": "",
+ "display": "Test VM With Spaces",
+ "id": 6,
+ "name": "Test VM With Spaces",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/6/"
+ },
+ "vrf": null
+ },
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T18:42:37.895332Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth1",
+ "enabled": true,
+ "id": 12,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T18:42:37.895336Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth1",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/12/",
+ "virtual_machine": {
+ "description": "",
+ "display": "Test VM With Spaces",
+ "id": 6,
+ "name": "Test VM With Spaces",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/6/"
+ },
+ "vrf": null
+ }
+ ],
+ "is_virtual": true,
+ "local_context_data": [
+ null
+ ],
+ "locations": [],
+ "regions": [],
+ "services": [
+ {
+ "comments": "",
+ "created": "2024-05-13T18:42:37.972348Z",
+ "custom_fields": {},
+ "description": "",
+ "device": null,
+ "display": "ssh (TCP/22)",
+ "id": 4,
+ "ipaddresses": [],
+ "last_updated": "2024-05-13T18:42:37.972351Z",
+ "name": "ssh",
+ "ports": [
+ 22
+ ],
+ "protocol": {
+ "label": "TCP",
+ "value": "tcp"
+ },
+ "tags": [],
+ "url": "http://localhost:32768/api/ipam/services/4/",
+ "virtual_machine": {
+ "description": "",
+ "display": "Test VM With Spaces",
+ "id": 6,
+ "name": "Test VM With Spaces",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/6/"
+ }
+ }
+ ],
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "TestDeviceR1": {
+ "config_context": [
+ {}
+ ],
+ "custom_fields": {},
+ "device_roles": [
+ "core-switch"
+ ],
+ "device_types": [
+ "cisco-test"
+ ],
+ "interfaces": [],
+ "is_virtual": false,
+ "local_context_data": [
+ null
+ ],
+ "locations": [
+ "test-rack-group",
+ "parent-rack-group"
+ ],
+ "manufacturers": [
+ "cisco"
+ ],
+ "racks": [
+ "Test Rack"
+ ],
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "serial": "FAB12345678",
+ "services": [],
+ "site_groups": [],
+ "sites": [
+ "test-site"
+ ],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "test100": {
+ "asset_tag": "123456789",
+ "config_context": [
+ {
+ "ntp_servers": [
+ "pool.ntp.org"
+ ]
+ }
+ ],
+ "custom_fields": {},
+ "device_roles": [
+ "core-switch"
+ ],
+ "device_types": [
+ "cisco-test"
+ ],
+ "interfaces": [
+ {
+ "_occupied": false,
+ "bridge": null,
+ "cable": null,
+ "cable_end": "",
+ "connected_endpoints": null,
+ "connected_endpoints_reachable": null,
+ "connected_endpoints_type": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 1,
+ "created": "2024-05-13T18:42:37.126588Z",
+ "custom_fields": {},
+ "description": "",
+ "device": {
+ "description": "",
+ "display": "test100 (123456789)",
+ "id": 1,
+ "name": "test100",
+ "url": "http://localhost:32768/api/dcim/devices/1/"
+ },
+ "display": "GigabitEthernet1",
+ "duplex": null,
+ "enabled": true,
+ "id": 3,
+ "ip_addresses": [
+ {
+ "address": "172.16.180.1/24",
+ "comments": "",
+ "created": "2024-05-13T18:42:37.291898Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "172.16.180.1/24",
+ "dns_name": "",
+ "family": {
+ "label": "IPv4",
+ "value": 4
+ },
+ "id": 1,
+ "last_updated": "2024-05-13T18:42:37.291904Z",
+ "nat_inside": null,
+ "nat_outside": [],
+ "role": null,
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": [],
+ "tenant": null,
+ "url": "http://localhost:32768/api/ipam/ip-addresses/1/",
+ "vrf": null
+ }
+ ],
+ "l2vpn_termination": null,
+ "label": "",
+ "lag": null,
+ "last_updated": "2024-05-13T18:42:37.126594Z",
+ "link_peers": [],
+ "link_peers_type": null,
+ "mac_address": null,
+ "mark_connected": false,
+ "mgmt_only": false,
+ "mode": null,
+ "module": null,
+ "mtu": null,
+ "name": "GigabitEthernet1",
+ "parent": null,
+ "poe_mode": null,
+ "poe_type": null,
+ "rf_channel": null,
+ "rf_channel_frequency": null,
+ "rf_channel_width": null,
+ "rf_role": null,
+ "speed": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "tx_power": null,
+ "type": {
+ "label": "1000BASE-T (1GE)",
+ "value": "1000base-t"
+ },
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/dcim/interfaces/3/",
+ "vdcs": [],
+ "vrf": null,
+ "wireless_lans": [],
+ "wireless_link": null,
+ "wwn": null
+ },
+ {
+ "_occupied": false,
+ "bridge": null,
+ "cable": null,
+ "cable_end": "",
+ "connected_endpoints": null,
+ "connected_endpoints_reachable": null,
+ "connected_endpoints_type": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 1,
+ "created": "2024-05-13T18:42:37.137551Z",
+ "custom_fields": {},
+ "description": "",
+ "device": {
+ "description": "",
+ "display": "test100 (123456789)",
+ "id": 1,
+ "name": "test100",
+ "url": "http://localhost:32768/api/dcim/devices/1/"
+ },
+ "display": "GigabitEthernet2",
+ "duplex": null,
+ "enabled": true,
+ "id": 4,
+ "ip_addresses": [
+ {
+ "address": "2001::1:1/64",
+ "comments": "",
+ "created": "2024-05-13T18:42:37.298655Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "2001::1:1/64",
+ "dns_name": "",
+ "family": {
+ "label": "IPv6",
+ "value": 6
+ },
+ "id": 2,
+ "last_updated": "2024-05-13T18:42:37.298668Z",
+ "nat_inside": null,
+ "nat_outside": [],
+ "role": null,
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": [],
+ "tenant": null,
+ "url": "http://localhost:32768/api/ipam/ip-addresses/2/",
+ "vrf": null
+ }
+ ],
+ "l2vpn_termination": null,
+ "label": "",
+ "lag": null,
+ "last_updated": "2024-05-13T18:42:37.137554Z",
+ "link_peers": [],
+ "link_peers_type": null,
+ "mac_address": null,
+ "mark_connected": false,
+ "mgmt_only": false,
+ "mode": null,
+ "module": null,
+ "mtu": null,
+ "name": "GigabitEthernet2",
+ "parent": null,
+ "poe_mode": null,
+ "poe_type": null,
+ "rf_channel": null,
+ "rf_channel_frequency": null,
+ "rf_channel_width": null,
+ "rf_role": null,
+ "speed": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "tx_power": null,
+ "type": {
+ "label": "1000BASE-T (1GE)",
+ "value": "1000base-t"
+ },
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/dcim/interfaces/4/",
+ "vdcs": [],
+ "vrf": null,
+ "wireless_lans": [],
+ "wireless_link": null,
+ "wwn": null
+ },
+ {
+ "_occupied": false,
+ "bridge": null,
+ "cable": null,
+ "cable_end": "",
+ "connected_endpoints": null,
+ "connected_endpoints_reachable": null,
+ "connected_endpoints_type": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T18:42:37.167058Z",
+ "custom_fields": {},
+ "description": "",
+ "device": {
+ "description": "",
+ "display": "test100 (123456789)",
+ "id": 1,
+ "name": "test100",
+ "url": "http://localhost:32768/api/dcim/devices/1/"
+ },
+ "display": "wlink1",
+ "duplex": null,
+ "enabled": true,
+ "id": 5,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "label": "",
+ "lag": null,
+ "last_updated": "2024-05-13T18:42:37.167063Z",
+ "link_peers": [],
+ "link_peers_type": null,
+ "mac_address": null,
+ "mark_connected": false,
+ "mgmt_only": false,
+ "mode": null,
+ "module": null,
+ "mtu": null,
+ "name": "wlink1",
+ "parent": null,
+ "poe_mode": null,
+ "poe_type": null,
+ "rf_channel": null,
+ "rf_channel_frequency": null,
+ "rf_channel_width": null,
+ "rf_role": null,
+ "speed": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "tx_power": null,
+ "type": {
+ "label": "IEEE 802.11a",
+ "value": "ieee802.11a"
+ },
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/dcim/interfaces/5/",
+ "vdcs": [],
+ "vrf": null,
+ "wireless_lans": [],
+ "wireless_link": null,
+ "wwn": null
+ }
+ ],
+ "is_virtual": false,
+ "local_context_data": [
+ {
+ "ntp_servers": [
+ "pool.ntp.org"
+ ]
+ }
+ ],
+ "locations": [
+ "test-rack-group",
+ "parent-rack-group"
+ ],
+ "manufacturers": [
+ "cisco"
+ ],
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "serial": "FAB01234567",
+ "services": [
+ {
+ "comments": "",
+ "created": "2024-05-13T18:42:37.947125Z",
+ "custom_fields": {},
+ "description": "",
+ "device": {
+ "description": "",
+ "display": "test100 (123456789)",
+ "id": 1,
+ "name": "test100",
+ "url": "http://localhost:32768/api/dcim/devices/1/"
+ },
+ "display": "ssh (TCP/22)",
+ "id": 1,
+ "ipaddresses": [],
+ "last_updated": "2024-05-13T18:42:37.947130Z",
+ "name": "ssh",
+ "ports": [
+ 22
+ ],
+ "protocol": {
+ "label": "TCP",
+ "value": "tcp"
+ },
+ "tags": [],
+ "url": "http://localhost:32768/api/ipam/services/1/",
+ "virtual_machine": null
+ },
+ {
+ "comments": "",
+ "created": "2024-05-13T18:42:37.953341Z",
+ "custom_fields": {},
+ "description": "",
+ "device": {
+ "description": "",
+ "display": "test100 (123456789)",
+ "id": 1,
+ "name": "test100",
+ "url": "http://localhost:32768/api/dcim/devices/1/"
+ },
+ "display": "http (TCP/80)",
+ "id": 2,
+ "ipaddresses": [
+ {
+ "address": "172.16.180.1/24",
+ "description": "",
+ "display": "172.16.180.1/24",
+ "family": {
+ "label": "IPv4",
+ "value": 4
+ },
+ "id": 1,
+ "url": "http://localhost:32768/api/ipam/ip-addresses/1/"
+ },
+ {
+ "address": "2001::1:1/64",
+ "description": "",
+ "display": "2001::1:1/64",
+ "family": {
+ "label": "IPv6",
+ "value": 6
+ },
+ "id": 2,
+ "url": "http://localhost:32768/api/ipam/ip-addresses/2/"
+ }
+ ],
+ "last_updated": "2024-05-13T18:42:37.953344Z",
+ "name": "http",
+ "ports": [
+ 80
+ ],
+ "protocol": {
+ "label": "TCP",
+ "value": "tcp"
+ },
+ "tags": [],
+ "url": "http://localhost:32768/api/ipam/services/2/",
+ "virtual_machine": null
+ }
+ ],
+ "site_groups": [],
+ "sites": [
+ "test-site"
+ ],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "test100-vm": {
+ "cluster": "Test Cluster",
+ "cluster_group": "test-cluster-group",
+ "cluster_type": "test-cluster-type",
+ "config_context": [
+ {}
+ ],
+ "custom_fields": {},
+ "disk": 170,
+ "interfaces": [
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T18:42:37.819447Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth0",
+ "enabled": true,
+ "id": 1,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T18:42:37.819452Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth0",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/1/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test100-vm",
+ "id": 1,
+ "name": "test100-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/1/"
+ },
+ "vrf": null
+ },
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T18:42:37.827324Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth1",
+ "enabled": true,
+ "id": 2,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T18:42:37.827329Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth1",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/2/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test100-vm",
+ "id": 1,
+ "name": "test100-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/1/"
+ },
+ "vrf": null
+ },
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T18:42:37.835002Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth2",
+ "enabled": true,
+ "id": 3,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T18:42:37.835006Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth2",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/3/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test100-vm",
+ "id": 1,
+ "name": "test100-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/1/"
+ },
+ "vrf": null
+ },
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T18:42:37.841967Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth3",
+ "enabled": true,
+ "id": 4,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T18:42:37.841971Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth3",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/4/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test100-vm",
+ "id": 1,
+ "name": "test100-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/1/"
+ },
+ "vrf": null
+ },
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T18:42:37.848584Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth4",
+ "enabled": true,
+ "id": 5,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T18:42:37.848588Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth4",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/5/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test100-vm",
+ "id": 1,
+ "name": "test100-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/1/"
+ },
+ "vrf": null
+ }
+ ],
+ "is_virtual": true,
+ "local_context_data": [
+ null
+ ],
+ "locations": [],
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "services": [],
+ "site_groups": [],
+ "sites": [
+ "test-site"
+ ],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "test101-vm": {
+ "cluster": "Test Cluster",
+ "cluster_group": "test-cluster-group",
+ "cluster_type": "test-cluster-type",
+ "config_context": [
+ {}
+ ],
+ "custom_fields": {},
+ "interfaces": [
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T18:42:37.855261Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth0",
+ "enabled": true,
+ "id": 6,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T18:42:37.855265Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth0",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/6/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test101-vm",
+ "id": 2,
+ "name": "test101-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/2/"
+ },
+ "vrf": null
+ },
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T18:42:37.862048Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth1",
+ "enabled": true,
+ "id": 7,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T18:42:37.862052Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth1",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/7/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test101-vm",
+ "id": 2,
+ "name": "test101-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/2/"
+ },
+ "vrf": null
+ },
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T18:42:37.868723Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth2",
+ "enabled": true,
+ "id": 8,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T18:42:37.868727Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth2",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/8/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test101-vm",
+ "id": 2,
+ "name": "test101-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/2/"
+ },
+ "vrf": null
+ },
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T18:42:37.875318Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth3",
+ "enabled": true,
+ "id": 9,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T18:42:37.875321Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth3",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/9/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test101-vm",
+ "id": 2,
+ "name": "test101-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/2/"
+ },
+ "vrf": null
+ },
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T18:42:37.881944Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth4",
+ "enabled": true,
+ "id": 10,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T18:42:37.881948Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth4",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/10/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test101-vm",
+ "id": 2,
+ "name": "test101-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/2/"
+ },
+ "vrf": null
+ }
+ ],
+ "is_virtual": true,
+ "local_context_data": [
+ null
+ ],
+ "locations": [],
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "services": [],
+ "site_groups": [],
+ "sites": [
+ "test-site"
+ ],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "test102-vm": {
+ "cluster": "Test Cluster",
+ "cluster_group": "test-cluster-group",
+ "cluster_type": "test-cluster-type",
+ "config_context": [
+ {}
+ ],
+ "custom_fields": {},
+ "interfaces": [],
+ "is_virtual": true,
+ "local_context_data": [
+ null
+ ],
+ "locations": [],
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "services": [],
+ "site_groups": [],
+ "sites": [
+ "test-site"
+ ],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "test103-vm": {
+ "cluster": "Test Cluster",
+ "cluster_group": "test-cluster-group",
+ "cluster_type": "test-cluster-type",
+ "config_context": [
+ {}
+ ],
+ "custom_fields": {},
+ "interfaces": [],
+ "is_virtual": true,
+ "local_context_data": [
+ null
+ ],
+ "locations": [],
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "services": [],
+ "site_groups": [],
+ "sites": [
+ "test-site"
+ ],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ },
+ "test104-vm": {
+ "cluster": "Test Cluster 2",
+ "cluster_type": "test-cluster-type",
+ "config_context": [
+ {}
+ ],
+ "custom_fields": {},
+ "interfaces": [],
+ "is_virtual": true,
+ "local_context_data": [
+ null
+ ],
+ "locations": [],
+ "regions": [],
+ "services": [],
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": []
+ }
+ }
+ },
+ "all": {
+ "children": [
+ "ungrouped",
+ "sites_test_site2",
+ "region_other_region",
+ "region_parent_region",
+ "site_group_other_site_group",
+ "site_group_parent_site_group",
+ "racks_Test_Rack_Site_2",
+ "rack_role_test_rack_role",
+ "device_roles_core_switch",
+ "device_types_cisco_test",
+ "manufacturers_cisco",
+ "status_active",
+ "device_types_nexus_parent",
+ "racks_Test_Rack",
+ "cluster_Test_Cluster_2",
+ "cluster_type_test_cluster_type",
+ "is_virtual",
+ "cluster_Test_Cluster",
+ "cluster_group_test_cluster_group"
+ ]
+ },
+ "cluster_Test_Cluster": {
+ "hosts": [
+ "test100-vm",
+ "test101-vm",
+ "test102-vm",
+ "test103-vm"
+ ]
+ },
+ "cluster_Test_Cluster_2": {
+ "hosts": [
+ "Test VM With Spaces",
+ "test104-vm"
+ ]
+ },
+ "cluster_group_test_cluster_group": {
+ "hosts": [
+ "test100-vm",
+ "test101-vm",
+ "test102-vm",
+ "test103-vm"
+ ]
+ },
+ "cluster_type_test_cluster_type": {
+ "hosts": [
+ "Test VM With Spaces",
+ "test100-vm",
+ "test101-vm",
+ "test102-vm",
+ "test103-vm",
+ "test104-vm"
+ ]
+ },
+ "device_roles_core_switch": {
+ "hosts": [
+ "R1-Device",
+ "Test Nexus One",
+ "TestDeviceR1",
+ "test100"
+ ]
+ },
+ "device_types_cisco_test": {
+ "hosts": [
+ "R1-Device",
+ "TestDeviceR1",
+ "test100"
+ ]
+ },
+ "device_types_nexus_parent": {
+ "hosts": [
+ "Test Nexus One"
+ ]
+ },
+ "is_virtual": {
+ "hosts": [
+ "Test VM With Spaces",
+ "test100-vm",
+ "test101-vm",
+ "test102-vm",
+ "test103-vm",
+ "test104-vm"
+ ]
+ },
+ "location_parent_rack_group": {
+ "children": [
+ "location_test_rack_group"
+ ]
+ },
+ "location_test_rack_group": {
+ "hosts": [
+ "Test Nexus One",
+ "TestDeviceR1",
+ "test100"
+ ]
+ },
+ "manufacturers_cisco": {
+ "hosts": [
+ "R1-Device",
+ "Test Nexus One",
+ "TestDeviceR1",
+ "test100"
+ ]
+ },
+ "rack_role_test_rack_role": {
+ "hosts": [
+ "R1-Device"
+ ]
+ },
+ "racks_Test_Rack": {
+ "hosts": [
+ "TestDeviceR1"
+ ]
+ },
+ "racks_Test_Rack_Site_2": {
+ "hosts": [
+ "R1-Device"
+ ]
+ },
+ "region_parent_region": {
+ "children": [
+ "region_test_region"
+ ]
+ },
+ "region_test_region": {
+ "children": [
+ "sites_test_site"
+ ]
+ },
+ "site_group_parent_site_group": {
+ "children": [
+ "site_group_test_site_group"
+ ]
+ },
+ "sites_test_site": {
+ "children": [
+ "location_parent_rack_group"
+ ],
+ "hosts": [
+ "test100-vm",
+ "test101-vm",
+ "test102-vm",
+ "test103-vm"
+ ]
+ },
+ "sites_test_site2": {
+ "hosts": [
+ "R1-Device"
+ ]
+ },
+ "status_active": {
+ "hosts": [
+ "R1-Device",
+ "Test Nexus One",
+ "TestDeviceR1",
+ "test100",
+ "Test VM With Spaces",
+ "test100-vm",
+ "test101-vm",
+ "test102-vm",
+ "test103-vm",
+ "test104-vm"
+ ]
+ }
+} \ No newline at end of file
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-plurals.yml b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-plurals.yml
new file mode 100644
index 000000000..1e53fd0bf
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory-plurals.yml
@@ -0,0 +1,38 @@
+---
+plugin: netbox.netbox.nb_inventory
+api_endpoint: http://localhost:32768
+token: "0123456789abcdef0123456789abcdef01234567"
+validate_certs: false
+
+cache: true
+cache_timeout: 3600
+cache_plugin: jsonfile
+cache_connection: /tmp/inventory_netbox
+
+config_context: true
+plurals: true
+interfaces: true
+services: true
+
+# Enough to fit only 2 devices, so tests chunking logic
+max_uri_length: 80
+fetch_all: false
+
+group_by:
+ - sites
+ - tenants
+ - racks
+ - location
+ - rack_role
+ - tags
+ - device_roles
+ - device_types
+ - manufacturers
+ - platforms
+ - region
+ - site_group
+ - cluster
+ - cluster_group
+ - cluster_type
+ - is_virtual
+ - status
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory.json b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory.json
new file mode 100644
index 000000000..4063f3f9c
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory.json
@@ -0,0 +1,1505 @@
+{
+ "_meta": {
+ "hostvars": {
+ "R1-Device": {
+ "asset_tag": "345678901",
+ "config_context": {},
+ "custom_fields": {},
+ "device_type": "cisco-test",
+ "interfaces": [],
+ "is_virtual": false,
+ "locations": [],
+ "manufacturer": "cisco",
+ "rack": "Test Rack Site 2",
+ "rack_role": "test-rack-role",
+ "regions": [],
+ "role": "core-switch",
+ "serial": "",
+ "services": [],
+ "site": "test-site2",
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": [],
+ "virtual_disks": []
+ },
+ "Test Nexus One": {
+ "ansible_host": "172.16.180.12",
+ "config_context": {},
+ "custom_fields": {},
+ "device_type": "nexus-parent",
+ "dns_name": "nexus.example.com",
+ "interfaces": [
+ {
+ "_occupied": false,
+ "bridge": null,
+ "cable": null,
+ "cable_end": "",
+ "connected_endpoints": null,
+ "connected_endpoints_reachable": null,
+ "connected_endpoints_type": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 1,
+ "created": "2024-05-13T19:51:41.577667Z",
+ "custom_fields": {},
+ "description": "",
+ "device": {
+ "description": "",
+ "display": "Test Nexus Child One",
+ "id": 5,
+ "name": "Test Nexus Child One",
+ "url": "http://localhost:32768/api/dcim/devices/5/"
+ },
+ "display": "Ethernet2/1",
+ "duplex": null,
+ "enabled": true,
+ "id": 2,
+ "ip_addresses": [
+ {
+ "address": "172.16.180.12/24",
+ "comments": "",
+ "created": "2024-05-13T19:51:41.876821Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "172.16.180.12/24",
+ "dns_name": "nexus.example.com",
+ "family": {
+ "label": "IPv4",
+ "value": 4
+ },
+ "id": 4,
+ "last_updated": "2024-05-13T19:51:41.876825Z",
+ "nat_inside": null,
+ "nat_outside": [],
+ "role": null,
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": [],
+ "tenant": null,
+ "url": "http://localhost:32768/api/ipam/ip-addresses/4/",
+ "vrf": null
+ }
+ ],
+ "l2vpn_termination": null,
+ "label": "",
+ "lag": null,
+ "last_updated": "2024-05-13T19:51:41.577671Z",
+ "link_peers": [],
+ "link_peers_type": null,
+ "mac_address": null,
+ "mark_connected": false,
+ "mgmt_only": false,
+ "mode": null,
+ "module": null,
+ "mtu": null,
+ "name": "Ethernet2/1",
+ "parent": null,
+ "poe_mode": null,
+ "poe_type": null,
+ "rf_channel": null,
+ "rf_channel_frequency": null,
+ "rf_channel_width": null,
+ "rf_role": null,
+ "speed": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "tx_power": null,
+ "type": {
+ "label": "1000BASE-T (1GE)",
+ "value": "1000base-t"
+ },
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/dcim/interfaces/2/",
+ "vdcs": [],
+ "vrf": null,
+ "wireless_lans": [],
+ "wireless_link": null,
+ "wwn": null
+ },
+ {
+ "_occupied": false,
+ "bridge": null,
+ "cable": null,
+ "cable_end": "",
+ "connected_endpoints": null,
+ "connected_endpoints_reachable": null,
+ "connected_endpoints_type": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 1,
+ "created": "2024-05-13T19:51:41.564397Z",
+ "custom_fields": {},
+ "description": "",
+ "device": {
+ "description": "",
+ "display": "Test Nexus One",
+ "id": 4,
+ "name": "Test Nexus One",
+ "url": "http://localhost:32768/api/dcim/devices/4/"
+ },
+ "display": "Ethernet1/1",
+ "duplex": null,
+ "enabled": true,
+ "id": 1,
+ "ip_addresses": [
+ {
+ "address": "172.16.180.11/24",
+ "comments": "",
+ "created": "2024-05-13T19:51:41.784351Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "172.16.180.11/24",
+ "dns_name": "",
+ "family": {
+ "label": "IPv4",
+ "value": 4
+ },
+ "id": 3,
+ "last_updated": "2024-05-13T19:51:41.784356Z",
+ "nat_inside": null,
+ "nat_outside": [],
+ "role": null,
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": [],
+ "tenant": null,
+ "url": "http://localhost:32768/api/ipam/ip-addresses/3/",
+ "vrf": null
+ }
+ ],
+ "l2vpn_termination": null,
+ "label": "",
+ "lag": null,
+ "last_updated": "2024-05-13T19:51:41.564404Z",
+ "link_peers": [],
+ "link_peers_type": null,
+ "mac_address": null,
+ "mark_connected": false,
+ "mgmt_only": false,
+ "mode": null,
+ "module": null,
+ "mtu": null,
+ "name": "Ethernet1/1",
+ "parent": null,
+ "poe_mode": null,
+ "poe_type": null,
+ "rf_channel": null,
+ "rf_channel_frequency": null,
+ "rf_channel_width": null,
+ "rf_role": null,
+ "speed": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "tx_power": null,
+ "type": {
+ "label": "1000BASE-T (1GE)",
+ "value": "1000base-t"
+ },
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/dcim/interfaces/1/",
+ "vdcs": [],
+ "vrf": null,
+ "wireless_lans": [],
+ "wireless_link": null,
+ "wwn": null
+ },
+ {
+ "_occupied": false,
+ "bridge": null,
+ "cable": null,
+ "cable_end": "",
+ "connected_endpoints": null,
+ "connected_endpoints_reachable": null,
+ "connected_endpoints_type": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T19:51:41.659099Z",
+ "custom_fields": {},
+ "description": "",
+ "device": {
+ "description": "",
+ "display": "Test Nexus One",
+ "id": 4,
+ "name": "Test Nexus One",
+ "url": "http://localhost:32768/api/dcim/devices/4/"
+ },
+ "display": "wlink1",
+ "duplex": null,
+ "enabled": true,
+ "id": 6,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "label": "",
+ "lag": null,
+ "last_updated": "2024-05-13T19:51:41.659103Z",
+ "link_peers": [],
+ "link_peers_type": null,
+ "mac_address": null,
+ "mark_connected": false,
+ "mgmt_only": false,
+ "mode": null,
+ "module": null,
+ "mtu": null,
+ "name": "wlink1",
+ "parent": null,
+ "poe_mode": null,
+ "poe_type": null,
+ "rf_channel": null,
+ "rf_channel_frequency": null,
+ "rf_channel_width": null,
+ "rf_role": null,
+ "speed": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "tx_power": null,
+ "type": {
+ "label": "IEEE 802.11a",
+ "value": "ieee802.11a"
+ },
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/dcim/interfaces/6/",
+ "vdcs": [],
+ "vrf": null,
+ "wireless_lans": [],
+ "wireless_link": null,
+ "wwn": null
+ }
+ ],
+ "is_virtual": false,
+ "locations": [
+ "test-rack-group",
+ "parent-rack-group"
+ ],
+ "manufacturer": "cisco",
+ "primary_ip4": "172.16.180.12",
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "role": "core-switch",
+ "serial": "",
+ "services": [
+ {
+ "comments": "",
+ "created": "2024-05-13T19:51:42.463448Z",
+ "custom_fields": {},
+ "description": "",
+ "device": {
+ "description": "",
+ "display": "Test Nexus One",
+ "id": 4,
+ "name": "Test Nexus One",
+ "url": "http://localhost:32768/api/dcim/devices/4/"
+ },
+ "display": "telnet (TCP/23)",
+ "id": 3,
+ "ipaddresses": [],
+ "last_updated": "2024-05-13T19:51:42.463452Z",
+ "name": "telnet",
+ "ports": [
+ 23
+ ],
+ "protocol": {
+ "label": "TCP",
+ "value": "tcp"
+ },
+ "tags": [],
+ "url": "http://localhost:32768/api/ipam/services/3/",
+ "virtual_machine": null
+ }
+ ],
+ "site": "test-site",
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": [],
+ "virtual_disks": []
+ },
+ "Test VM With Spaces": {
+ "cluster": "Test Cluster 2",
+ "cluster_type": "test-cluster-type",
+ "config_context": {},
+ "custom_fields": {},
+ "interfaces": [
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T19:51:42.385426Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth0",
+ "enabled": true,
+ "id": 11,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T19:51:42.385430Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth0",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/11/",
+ "virtual_machine": {
+ "description": "",
+ "display": "Test VM With Spaces",
+ "id": 6,
+ "name": "Test VM With Spaces",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/6/"
+ },
+ "vrf": null
+ },
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T19:51:42.392103Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth1",
+ "enabled": true,
+ "id": 12,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T19:51:42.392108Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth1",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/12/",
+ "virtual_machine": {
+ "description": "",
+ "display": "Test VM With Spaces",
+ "id": 6,
+ "name": "Test VM With Spaces",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/6/"
+ },
+ "vrf": null
+ }
+ ],
+ "is_virtual": true,
+ "locations": [],
+ "regions": [],
+ "services": [
+ {
+ "comments": "",
+ "created": "2024-05-13T19:51:42.468448Z",
+ "custom_fields": {},
+ "description": "",
+ "device": null,
+ "display": "ssh (TCP/22)",
+ "id": 4,
+ "ipaddresses": [],
+ "last_updated": "2024-05-13T19:51:42.468451Z",
+ "name": "ssh",
+ "ports": [
+ 22
+ ],
+ "protocol": {
+ "label": "TCP",
+ "value": "tcp"
+ },
+ "tags": [],
+ "url": "http://localhost:32768/api/ipam/services/4/",
+ "virtual_machine": {
+ "description": "",
+ "display": "Test VM With Spaces",
+ "id": 6,
+ "name": "Test VM With Spaces",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/6/"
+ }
+ }
+ ],
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": [],
+ "virtual_disks": []
+ },
+ "TestDeviceR1": {
+ "config_context": {},
+ "custom_fields": {},
+ "device_type": "cisco-test",
+ "interfaces": [],
+ "is_virtual": false,
+ "locations": [
+ "test-rack-group",
+ "parent-rack-group"
+ ],
+ "manufacturer": "cisco",
+ "rack": "Test Rack",
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "role": "core-switch",
+ "serial": "FAB12345678",
+ "services": [],
+ "site": "test-site",
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": [],
+ "virtual_disks": []
+ },
+ "test100": {
+ "asset_tag": "123456789",
+ "config_context": {
+ "ntp_servers": [
+ "pool.ntp.org"
+ ]
+ },
+ "custom_fields": {},
+ "device_type": "cisco-test",
+ "interfaces": [
+ {
+ "_occupied": false,
+ "bridge": null,
+ "cable": null,
+ "cable_end": "",
+ "connected_endpoints": null,
+ "connected_endpoints_reachable": null,
+ "connected_endpoints_type": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 1,
+ "created": "2024-05-13T19:51:41.606846Z",
+ "custom_fields": {},
+ "description": "",
+ "device": {
+ "description": "",
+ "display": "test100 (123456789)",
+ "id": 1,
+ "name": "test100",
+ "url": "http://localhost:32768/api/dcim/devices/1/"
+ },
+ "display": "GigabitEthernet1",
+ "duplex": null,
+ "enabled": true,
+ "id": 3,
+ "ip_addresses": [
+ {
+ "address": "172.16.180.1/24",
+ "comments": "",
+ "created": "2024-05-13T19:51:41.771940Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "172.16.180.1/24",
+ "dns_name": "",
+ "family": {
+ "label": "IPv4",
+ "value": 4
+ },
+ "id": 1,
+ "last_updated": "2024-05-13T19:51:41.771946Z",
+ "nat_inside": null,
+ "nat_outside": [],
+ "role": null,
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": [],
+ "tenant": null,
+ "url": "http://localhost:32768/api/ipam/ip-addresses/1/",
+ "vrf": null
+ }
+ ],
+ "l2vpn_termination": null,
+ "label": "",
+ "lag": null,
+ "last_updated": "2024-05-13T19:51:41.606851Z",
+ "link_peers": [],
+ "link_peers_type": null,
+ "mac_address": null,
+ "mark_connected": false,
+ "mgmt_only": false,
+ "mode": null,
+ "module": null,
+ "mtu": null,
+ "name": "GigabitEthernet1",
+ "parent": null,
+ "poe_mode": null,
+ "poe_type": null,
+ "rf_channel": null,
+ "rf_channel_frequency": null,
+ "rf_channel_width": null,
+ "rf_role": null,
+ "speed": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "tx_power": null,
+ "type": {
+ "label": "1000BASE-T (1GE)",
+ "value": "1000base-t"
+ },
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/dcim/interfaces/3/",
+ "vdcs": [],
+ "vrf": null,
+ "wireless_lans": [],
+ "wireless_link": null,
+ "wwn": null
+ },
+ {
+ "_occupied": false,
+ "bridge": null,
+ "cable": null,
+ "cable_end": "",
+ "connected_endpoints": null,
+ "connected_endpoints_reachable": null,
+ "connected_endpoints_type": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 1,
+ "created": "2024-05-13T19:51:41.617680Z",
+ "custom_fields": {},
+ "description": "",
+ "device": {
+ "description": "",
+ "display": "test100 (123456789)",
+ "id": 1,
+ "name": "test100",
+ "url": "http://localhost:32768/api/dcim/devices/1/"
+ },
+ "display": "GigabitEthernet2",
+ "duplex": null,
+ "enabled": true,
+ "id": 4,
+ "ip_addresses": [
+ {
+ "address": "2001::1:1/64",
+ "comments": "",
+ "created": "2024-05-13T19:51:41.778277Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "2001::1:1/64",
+ "dns_name": "",
+ "family": {
+ "label": "IPv6",
+ "value": 6
+ },
+ "id": 2,
+ "last_updated": "2024-05-13T19:51:41.778280Z",
+ "nat_inside": null,
+ "nat_outside": [],
+ "role": null,
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": [],
+ "tenant": null,
+ "url": "http://localhost:32768/api/ipam/ip-addresses/2/",
+ "vrf": null
+ }
+ ],
+ "l2vpn_termination": null,
+ "label": "",
+ "lag": null,
+ "last_updated": "2024-05-13T19:51:41.617684Z",
+ "link_peers": [],
+ "link_peers_type": null,
+ "mac_address": null,
+ "mark_connected": false,
+ "mgmt_only": false,
+ "mode": null,
+ "module": null,
+ "mtu": null,
+ "name": "GigabitEthernet2",
+ "parent": null,
+ "poe_mode": null,
+ "poe_type": null,
+ "rf_channel": null,
+ "rf_channel_frequency": null,
+ "rf_channel_width": null,
+ "rf_role": null,
+ "speed": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "tx_power": null,
+ "type": {
+ "label": "1000BASE-T (1GE)",
+ "value": "1000base-t"
+ },
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/dcim/interfaces/4/",
+ "vdcs": [],
+ "vrf": null,
+ "wireless_lans": [],
+ "wireless_link": null,
+ "wwn": null
+ },
+ {
+ "_occupied": false,
+ "bridge": null,
+ "cable": null,
+ "cable_end": "",
+ "connected_endpoints": null,
+ "connected_endpoints_reachable": null,
+ "connected_endpoints_type": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T19:51:41.648568Z",
+ "custom_fields": {},
+ "description": "",
+ "device": {
+ "description": "",
+ "display": "test100 (123456789)",
+ "id": 1,
+ "name": "test100",
+ "url": "http://localhost:32768/api/dcim/devices/1/"
+ },
+ "display": "wlink1",
+ "duplex": null,
+ "enabled": true,
+ "id": 5,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "label": "",
+ "lag": null,
+ "last_updated": "2024-05-13T19:51:41.648573Z",
+ "link_peers": [],
+ "link_peers_type": null,
+ "mac_address": null,
+ "mark_connected": false,
+ "mgmt_only": false,
+ "mode": null,
+ "module": null,
+ "mtu": null,
+ "name": "wlink1",
+ "parent": null,
+ "poe_mode": null,
+ "poe_type": null,
+ "rf_channel": null,
+ "rf_channel_frequency": null,
+ "rf_channel_width": null,
+ "rf_role": null,
+ "speed": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "tx_power": null,
+ "type": {
+ "label": "IEEE 802.11a",
+ "value": "ieee802.11a"
+ },
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/dcim/interfaces/5/",
+ "vdcs": [],
+ "vrf": null,
+ "wireless_lans": [],
+ "wireless_link": null,
+ "wwn": null
+ }
+ ],
+ "is_virtual": false,
+ "local_context_data": {
+ "ntp_servers": [
+ "pool.ntp.org"
+ ]
+ },
+ "locations": [
+ "test-rack-group",
+ "parent-rack-group"
+ ],
+ "manufacturer": "cisco",
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "role": "core-switch",
+ "serial": "FAB01234567",
+ "services": [
+ {
+ "comments": "",
+ "created": "2024-05-13T19:51:42.443759Z",
+ "custom_fields": {},
+ "description": "",
+ "device": {
+ "description": "",
+ "display": "test100 (123456789)",
+ "id": 1,
+ "name": "test100",
+ "url": "http://localhost:32768/api/dcim/devices/1/"
+ },
+ "display": "ssh (TCP/22)",
+ "id": 1,
+ "ipaddresses": [],
+ "last_updated": "2024-05-13T19:51:42.443763Z",
+ "name": "ssh",
+ "ports": [
+ 22
+ ],
+ "protocol": {
+ "label": "TCP",
+ "value": "tcp"
+ },
+ "tags": [],
+ "url": "http://localhost:32768/api/ipam/services/1/",
+ "virtual_machine": null
+ },
+ {
+ "comments": "",
+ "created": "2024-05-13T19:51:42.449954Z",
+ "custom_fields": {},
+ "description": "",
+ "device": {
+ "description": "",
+ "display": "test100 (123456789)",
+ "id": 1,
+ "name": "test100",
+ "url": "http://localhost:32768/api/dcim/devices/1/"
+ },
+ "display": "http (TCP/80)",
+ "id": 2,
+ "ipaddresses": [
+ {
+ "address": "172.16.180.1/24",
+ "description": "",
+ "display": "172.16.180.1/24",
+ "family": {
+ "label": "IPv4",
+ "value": 4
+ },
+ "id": 1,
+ "url": "http://localhost:32768/api/ipam/ip-addresses/1/"
+ },
+ {
+ "address": "2001::1:1/64",
+ "description": "",
+ "display": "2001::1:1/64",
+ "family": {
+ "label": "IPv6",
+ "value": 6
+ },
+ "id": 2,
+ "url": "http://localhost:32768/api/ipam/ip-addresses/2/"
+ }
+ ],
+ "last_updated": "2024-05-13T19:51:42.449957Z",
+ "name": "http",
+ "ports": [
+ 80
+ ],
+ "protocol": {
+ "label": "TCP",
+ "value": "tcp"
+ },
+ "tags": [],
+ "url": "http://localhost:32768/api/ipam/services/2/",
+ "virtual_machine": null
+ }
+ ],
+ "site": "test-site",
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": [],
+ "virtual_disks": [
+ {
+ "created": "2024-05-13T19:51:42.274418Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "disk1",
+ "id": 1,
+ "last_updated": "2024-05-13T19:51:42.274425Z",
+ "name": "disk1",
+ "size": 60,
+ "tags": [],
+ "url": "http://localhost:32768/api/virtualization/virtual-disks/1/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test100-vm",
+ "id": 1,
+ "name": "test100-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/1/"
+ }
+ },
+ {
+ "created": "2024-05-13T19:51:42.279072Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "disk2",
+ "id": 2,
+ "last_updated": "2024-05-13T19:51:42.279077Z",
+ "name": "disk2",
+ "size": 110,
+ "tags": [],
+ "url": "http://localhost:32768/api/virtualization/virtual-disks/2/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test100-vm",
+ "id": 1,
+ "name": "test100-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/1/"
+ }
+ }
+ ]
+ },
+ "test100-vm": {
+ "cluster": "Test Cluster",
+ "cluster_group": "test-cluster-group",
+ "cluster_type": "test-cluster-type",
+ "config_context": {},
+ "custom_fields": {},
+ "disk": 170,
+ "interfaces": [
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T19:51:42.317118Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth0",
+ "enabled": true,
+ "id": 1,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T19:51:42.317123Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth0",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/1/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test100-vm",
+ "id": 1,
+ "name": "test100-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/1/"
+ },
+ "vrf": null
+ },
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T19:51:42.324758Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth1",
+ "enabled": true,
+ "id": 2,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T19:51:42.324762Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth1",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/2/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test100-vm",
+ "id": 1,
+ "name": "test100-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/1/"
+ },
+ "vrf": null
+ },
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T19:51:42.331470Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth2",
+ "enabled": true,
+ "id": 3,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T19:51:42.331474Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth2",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/3/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test100-vm",
+ "id": 1,
+ "name": "test100-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/1/"
+ },
+ "vrf": null
+ },
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T19:51:42.338401Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth3",
+ "enabled": true,
+ "id": 4,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T19:51:42.338405Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth3",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/4/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test100-vm",
+ "id": 1,
+ "name": "test100-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/1/"
+ },
+ "vrf": null
+ },
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T19:51:42.345785Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth4",
+ "enabled": true,
+ "id": 5,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T19:51:42.345789Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth4",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/5/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test100-vm",
+ "id": 1,
+ "name": "test100-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/1/"
+ },
+ "vrf": null
+ }
+ ],
+ "is_virtual": true,
+ "locations": [],
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "services": [],
+ "site": "test-site",
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": [],
+ "virtual_disks": [
+ {
+ "created": "2024-05-13T19:51:42.274418Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "disk1",
+ "id": 1,
+ "last_updated": "2024-05-13T19:51:42.274425Z",
+ "name": "disk1",
+ "size": 60,
+ "tags": [],
+ "url": "http://localhost:32768/api/virtualization/virtual-disks/1/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test100-vm",
+ "id": 1,
+ "name": "test100-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/1/"
+ }
+ },
+ {
+ "created": "2024-05-13T19:51:42.279072Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "disk2",
+ "id": 2,
+ "last_updated": "2024-05-13T19:51:42.279077Z",
+ "name": "disk2",
+ "size": 110,
+ "tags": [],
+ "url": "http://localhost:32768/api/virtualization/virtual-disks/2/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test100-vm",
+ "id": 1,
+ "name": "test100-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/1/"
+ }
+ }
+ ]
+ },
+ "test101-vm": {
+ "cluster": "Test Cluster",
+ "cluster_group": "test-cluster-group",
+ "cluster_type": "test-cluster-type",
+ "config_context": {},
+ "custom_fields": {},
+ "interfaces": [
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T19:51:42.352485Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth0",
+ "enabled": true,
+ "id": 6,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T19:51:42.352489Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth0",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/6/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test101-vm",
+ "id": 2,
+ "name": "test101-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/2/"
+ },
+ "vrf": null
+ },
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T19:51:42.359111Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth1",
+ "enabled": true,
+ "id": 7,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T19:51:42.359115Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth1",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/7/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test101-vm",
+ "id": 2,
+ "name": "test101-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/2/"
+ },
+ "vrf": null
+ },
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T19:51:42.365791Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth2",
+ "enabled": true,
+ "id": 8,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T19:51:42.365795Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth2",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/8/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test101-vm",
+ "id": 2,
+ "name": "test101-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/2/"
+ },
+ "vrf": null
+ },
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T19:51:42.372298Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth3",
+ "enabled": true,
+ "id": 9,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T19:51:42.372302Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth3",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/9/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test101-vm",
+ "id": 2,
+ "name": "test101-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/2/"
+ },
+ "vrf": null
+ },
+ {
+ "bridge": null,
+ "count_fhrp_groups": 0,
+ "count_ipaddresses": 0,
+ "created": "2024-05-13T19:51:42.378872Z",
+ "custom_fields": {},
+ "description": "",
+ "display": "Eth4",
+ "enabled": true,
+ "id": 10,
+ "ip_addresses": [],
+ "l2vpn_termination": null,
+ "last_updated": "2024-05-13T19:51:42.378876Z",
+ "mac_address": null,
+ "mode": null,
+ "mtu": null,
+ "name": "Eth4",
+ "parent": null,
+ "tagged_vlans": [],
+ "tags": [],
+ "untagged_vlan": null,
+ "url": "http://localhost:32768/api/virtualization/interfaces/10/",
+ "virtual_machine": {
+ "description": "",
+ "display": "test101-vm",
+ "id": 2,
+ "name": "test101-vm",
+ "url": "http://localhost:32768/api/virtualization/virtual-machines/2/"
+ },
+ "vrf": null
+ }
+ ],
+ "is_virtual": true,
+ "locations": [],
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "services": [],
+ "site": "test-site",
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": [],
+ "virtual_disks": []
+ },
+ "test102-vm": {
+ "cluster": "Test Cluster",
+ "cluster_group": "test-cluster-group",
+ "cluster_type": "test-cluster-type",
+ "config_context": {},
+ "custom_fields": {},
+ "interfaces": [],
+ "is_virtual": true,
+ "locations": [],
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "services": [],
+ "site": "test-site",
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": [],
+ "virtual_disks": []
+ },
+ "test103-vm": {
+ "cluster": "Test Cluster",
+ "cluster_group": "test-cluster-group",
+ "cluster_type": "test-cluster-type",
+ "config_context": {},
+ "custom_fields": {},
+ "interfaces": [],
+ "is_virtual": true,
+ "locations": [],
+ "regions": [
+ "test-region",
+ "parent-region"
+ ],
+ "services": [],
+ "site": "test-site",
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": [],
+ "virtual_disks": []
+ },
+ "test104-vm": {
+ "cluster": "Test Cluster 2",
+ "cluster_type": "test-cluster-type",
+ "config_context": {},
+ "custom_fields": {},
+ "interfaces": [],
+ "is_virtual": true,
+ "locations": [],
+ "regions": [],
+ "services": [],
+ "site_groups": [],
+ "status": {
+ "label": "Active",
+ "value": "active"
+ },
+ "tags": [],
+ "virtual_disks": []
+ }
+ }
+ },
+ "all": {
+ "children": [
+ "ungrouped",
+ "site_test_site2",
+ "region_other_region",
+ "region_parent_region",
+ "site_group_other_site_group",
+ "site_group_parent_site_group",
+ "rack_Test_Rack_Site_2",
+ "rack_role_test_rack_role",
+ "role_core_switch",
+ "device_type_cisco_test",
+ "manufacturer_cisco",
+ "status_active",
+ "device_type_nexus_parent",
+ "service_telnet",
+ "rack_Test_Rack",
+ "service_ssh",
+ "service_http",
+ "cluster_Test_Cluster_2",
+ "cluster_type_test_cluster_type",
+ "is_virtual",
+ "cluster_Test_Cluster",
+ "cluster_group_test_cluster_group"
+ ]
+ },
+ "cluster_Test_Cluster": {
+ "hosts": [
+ "test100-vm",
+ "test101-vm",
+ "test102-vm",
+ "test103-vm"
+ ]
+ },
+ "cluster_Test_Cluster_2": {
+ "hosts": [
+ "Test VM With Spaces",
+ "test104-vm"
+ ]
+ },
+ "cluster_group_test_cluster_group": {
+ "hosts": [
+ "test100-vm",
+ "test101-vm",
+ "test102-vm",
+ "test103-vm"
+ ]
+ },
+ "cluster_type_test_cluster_type": {
+ "hosts": [
+ "Test VM With Spaces",
+ "test100-vm",
+ "test101-vm",
+ "test102-vm",
+ "test103-vm",
+ "test104-vm"
+ ]
+ },
+ "device_type_cisco_test": {
+ "hosts": [
+ "R1-Device",
+ "TestDeviceR1",
+ "test100"
+ ]
+ },
+ "device_type_nexus_parent": {
+ "hosts": [
+ "Test Nexus One"
+ ]
+ },
+ "is_virtual": {
+ "hosts": [
+ "Test VM With Spaces",
+ "test100-vm",
+ "test101-vm",
+ "test102-vm",
+ "test103-vm",
+ "test104-vm"
+ ]
+ },
+ "location_parent_rack_group": {
+ "children": [
+ "location_test_rack_group"
+ ]
+ },
+ "location_test_rack_group": {
+ "hosts": [
+ "Test Nexus One",
+ "TestDeviceR1",
+ "test100"
+ ]
+ },
+ "manufacturer_cisco": {
+ "hosts": [
+ "R1-Device",
+ "Test Nexus One",
+ "TestDeviceR1",
+ "test100"
+ ]
+ },
+ "rack_Test_Rack": {
+ "hosts": [
+ "TestDeviceR1"
+ ]
+ },
+ "rack_Test_Rack_Site_2": {
+ "hosts": [
+ "R1-Device"
+ ]
+ },
+ "rack_role_test_rack_role": {
+ "hosts": [
+ "R1-Device"
+ ]
+ },
+ "region_parent_region": {
+ "children": [
+ "region_test_region"
+ ]
+ },
+ "region_test_region": {
+ "children": [
+ "site_test_site"
+ ]
+ },
+ "role_core_switch": {
+ "hosts": [
+ "R1-Device",
+ "Test Nexus One",
+ "TestDeviceR1",
+ "test100"
+ ]
+ },
+ "service_http": {
+ "hosts": [
+ "test100"
+ ]
+ },
+ "service_ssh": {
+ "hosts": [
+ "test100",
+ "Test VM With Spaces"
+ ]
+ },
+ "service_telnet": {
+ "hosts": [
+ "Test Nexus One"
+ ]
+ },
+ "site_group_parent_site_group": {
+ "children": [
+ "site_group_test_site_group"
+ ]
+ },
+ "site_test_site": {
+ "children": [
+ "location_parent_rack_group"
+ ],
+ "hosts": [
+ "test100-vm",
+ "test101-vm",
+ "test102-vm",
+ "test103-vm"
+ ]
+ },
+ "site_test_site2": {
+ "hosts": [
+ "R1-Device"
+ ]
+ },
+ "status_active": {
+ "hosts": [
+ "R1-Device",
+ "Test Nexus One",
+ "TestDeviceR1",
+ "test100",
+ "Test VM With Spaces",
+ "test100-vm",
+ "test101-vm",
+ "test102-vm",
+ "test103-vm",
+ "test104-vm"
+ ]
+ }
+} \ No newline at end of file
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory.yml b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory.yml
new file mode 100644
index 000000000..a24cd7bbb
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/files/test-inventory.yml
@@ -0,0 +1,31 @@
+---
+plugin: netbox.netbox.nb_inventory
+api_endpoint: http://localhost:32768
+token: "0123456789abcdef0123456789abcdef01234567"
+validate_certs: false
+
+config_context: true
+plurals: false
+interfaces: true
+virtual_disks: true
+services: true
+
+group_by:
+ - site
+ - tenant
+ - rack
+ - location
+ - rack_role
+ - tag
+ - role
+ - device_type
+ - manufacturer
+ - platform
+ - region
+ - site_group
+ - cluster
+ - cluster_group
+ - cluster_type
+ - is_virtual
+ - services
+ - status
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/runme.sh b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/runme.sh
new file mode 100755
index 000000000..18579c472
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/runme.sh
@@ -0,0 +1,72 @@
+#!/usr/bin/env bash
+
+set -o xtrace # Print commands as they're run
+set -o errexit # abort on nonzero exitstatus
+set -o nounset # abort on unbound variable
+set -o pipefail # don't hide errors within pipes
+
+# Directory of this script
+SCRIPT_DIR="$( dirname "${BASH_SOURCE[0]}" )"
+RUNME_CONFIG="$SCRIPT_DIR/runme_config"
+INVENTORIES_DIR="$SCRIPT_DIR/files"
+
+# Load runme_config, if exists - the only way to pass environment when run through ansible-test
+if [[ -f "$RUNME_CONFIG" ]]
+then
+ source "$RUNME_CONFIG"
+fi
+
+declare -a COMPARE_OPTIONS # empty array
+
+# Check if NETBOX_VERSION has been set by runme_config, and if so, pass to compare_inventory_json.py
+if [[ "${NETBOX_VERSION:-}" == "v3.5" ]]
+then
+ COMPARE_OPTIONS+=(--netbox-version "${NETBOX_VERSION}")
+fi
+
+# OUTPUT_DIR is set by ansible-test
+# OUTPUT_INVENTORY_JSON is only set if running hacking/update_test_inventories.sh to update the test diff data
+if [[ -n "${OUTPUT_INVENTORY_JSON:-}" ]]
+then
+ OUTPUT_DIR="$OUTPUT_INVENTORY_JSON"
+
+ # Clean up JSON fields we don't want to store and compare against in tests (creation times, etc.)
+ COMPARE_OPTIONS+=(--write)
+fi
+
+echo OUTPUT_DIR="$OUTPUT_DIR"
+
+inventory () {
+ if [[ -n "${OUTPUT_INVENTORY_JSON:-}" ]]
+ then
+ # Running for the purpose of updating test data
+ ansible-inventory "$@"
+ else
+ # Running inside ansible-test
+ # Run through python.py just to make sure we've definitely got the coverage environment set up
+ # Just running ansible-inventory directly may not actually find the right one in PATH
+ python.py "$(command -v ansible-inventory)" "$@"
+ fi
+}
+
+
+RESULT=0
+
+for INVENTORY in "$INVENTORIES_DIR"/*.yml
+do
+ NAME="$(basename "$INVENTORY")"
+ NAME_WITHOUT_EXTENSION="${NAME%.yml}"
+
+ OUTPUT_JSON="$OUTPUT_DIR/$NAME_WITHOUT_EXTENSION.json"
+ inventory -vvvv --list --inventory "$INVENTORY" --output="$OUTPUT_JSON"
+
+ # Compare the output
+ if ! "$SCRIPT_DIR/compare_inventory_json.py" "${COMPARE_OPTIONS[@]}" "$INVENTORIES_DIR/$NAME_WITHOUT_EXTENSION.json" "$OUTPUT_JSON"
+ then
+ # Returned non-zero status
+ RESULT=1
+ fi
+
+done
+
+exit $RESULT
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/runme_config.template b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/runme_config.template
new file mode 100644
index 000000000..80b42a326
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/inventory-v4.0/runme_config.template
@@ -0,0 +1,6 @@
+# runme_config is source'd by runme.sh to set environment variables used to modify the test against different versions of NetBox.
+# .travis.yml uses render_config.sh to generate it from runme_config.template
+# There is no other way to pass environment variables to a runme.sh integration test.
+# (integration_config.yml files are only helpful to ansible yaml-based tests)
+
+export NETBOX_VERSION=${VERSION}
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/regression-v3.5/tasks/main.yml b/ansible_collections/netbox/netbox/tests/integration/targets/regression-v3.5/tasks/main.yml
index 817546d47..5bc0bd708 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/regression-v3.5/tasks/main.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/regression-v3.5/tasks/main.yml
@@ -4,102 +4,102 @@
### TEST NETBOX CONNECTION FAILURE
##
##
-- name: "TEST NETBOX CONNECTION FAILURE"
+- name: TEST NETBOX CONNECTION FAILURE
connection: local
block:
- - name: "1 - Device with required information"
+ - name: 1 - Device with required information
netbox.netbox.netbox_device:
- netbox_url: "http://some-random-invalid-URL"
+ netbox_url: http://some-random-invalid-URL
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "R1"
- device_type: "Cisco Test"
- device_role: "Core Switch"
- site: "Test Site"
- status: "Staged"
+ name: R1
+ device_type: Cisco Test
+ device_role: Core Switch
+ site: Test Site
+ status: Staged
state: present
register: test_one
- ignore_errors: yes
+ ignore_errors: true
- - name: "1 - ASSERT"
- assert:
+ - name: 1 - ASSERT
+ ansible.builtin.assert:
that:
- test_one is failed
- test_one['msg'] == "Failed to establish connection to NetBox API"
- - name: "2 - Check to see if netbox_prefix with parent defined will pass via check-mode"
+ - name: 2 - Check to see if netbox_prefix with parent defined will pass via check-mode
netbox.netbox.netbox_prefix:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- parent: "10.10.0.0/16"
+ parent: 10.10.0.0/16
prefix_length: 24
- first_available: yes
+ first_available: true
state: present
register: test_two
- check_mode: yes
+ check_mode: true
- - name: "2 - ASSERT"
- assert:
+ - name: 2 - ASSERT
+ ansible.builtin.assert:
that:
- test_two is changed
- test_two['msg'] == "New prefix created within 10.10.0.0/16"
- name: "3 - Add device with tags - Setup device to test #242"
netbox.netbox.netbox_device:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "issue-242"
- device_type: "Cisco Test"
- device_role: "Core Switch"
- site: "Test Site"
- status: "Staged"
+ name: issue-242
+ device_type: Cisco Test
+ device_role: Core Switch
+ site: Test Site
+ status: Staged
tags:
- slug: first
- slug: second
- name: "4 - Add device with tags out of order - shouldn't change - Tests #242 is fixed"
netbox.netbox.netbox_device:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "issue-242"
- device_type: "Cisco Test"
- device_role: "Core Switch"
- site: "Test Site"
- status: "Staged"
+ name: issue-242
+ device_type: Cisco Test
+ device_role: Core Switch
+ site: Test Site
+ status: Staged
tags:
- slug: second
- slug: first
register: test_four
- diff: yes
+ diff: true
- name: "4 - Assert not changed - Tests #242 is fixed"
- assert:
+ ansible.builtin.assert:
that:
- not test_four["changed"]
- name: "5 - Add device with extra tag - Tests #242 is fixed"
netbox.netbox.netbox_device:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "issue-242"
- device_type: "Cisco Test"
- device_role: "Core Switch"
- site: "Test Site"
- status: "Staged"
+ name: issue-242
+ device_type: Cisco Test
+ device_role: Core Switch
+ site: Test Site
+ status: Staged
asset_tag: "1234"
tags:
- slug: second
- slug: third
- slug: first
register: test_five
- diff: yes
+ diff: true
- name: "5 - Assert added tag - Tests #242 is fixed"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five["diff"]["after"]["tags"] is defined
@@ -107,21 +107,21 @@
- name: "6 - Loop through and add interface templates to different device interface templates - Fixes #282"
netbox.netbox.netbox_device_interface_template:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "SFP+ (10GE)"
- type: "SFP+ (10GE)"
+ name: SFP+ (10GE)
+ type: SFP+ (10GE)
device_type: "{{ item }}"
register: test_six
loop:
- - "Cisco Test"
- - "Arista Test"
- - "Nexus Child"
- - "Nexus Parent"
+ - Cisco Test
+ - Arista Test
+ - Nexus Child
+ - Nexus Parent
- name: "6 - Assert device type is correct - Fixes #282"
- assert:
+ ansible.builtin.assert:
that:
- test_six.results[0]["diff"]["before"]["state"] == "absent"
- test_six.results[0]["diff"]["after"]["state"] == "present"
@@ -136,16 +136,16 @@
- test_six.results[3]["diff"]["after"]["state"] == "present"
- test_six.results[3]["interface_template"]["device_type"] == 3
- - name: "7 - Don't prevent updates to other params if tags are specified"
+ - name: 7 - Don't prevent updates to other params if tags are specified
netbox.netbox.netbox_device:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "issue-242"
- device_type: "Cisco Test"
- device_role: "Core Switch"
- site: "Test Site"
- status: "Staged"
+ name: issue-242
+ device_type: Cisco Test
+ device_role: Core Switch
+ site: Test Site
+ status: Staged
asset_tag: "Null"
tags:
# Changed these for issue #407 to be IDs
@@ -155,7 +155,7 @@
register: test_seven
- name: "5 - Assert added tag - Tests #242 is fixed"
- assert:
+ ansible.builtin.assert:
that:
- test_seven is changed
- test_seven["diff"]["after"]["asset_tag"] == "Null"
@@ -163,7 +163,7 @@
- name: Add ip address to netbox and don't assign it to a device (Issue 372)
netbox.netbox.netbox_ip_address:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
address: 10.255.255.1/24
@@ -174,7 +174,7 @@
- name: Update same ip address to attach to a device interface (Issue 372)
netbox.netbox.netbox_ip_address:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
address: 10.255.255.1/24
@@ -188,7 +188,7 @@
register: query_params_372
- name: Assert ip address was updated and added to device interface
- assert:
+ ansible.builtin.assert:
that:
- query_params_372 is changed
- query_params_372['msg'] == 'ip_address 10.255.255.1/24 updated'
@@ -196,78 +196,79 @@
- query_params_372['diff']['after']['assigned_object_id'] == 3
- query_params_372['diff']['after']['assigned_object_type'] == 'dcim.interface'
- - name: "Validate failure due to invalid child params provided by user"
+ - name: Validate failure due to invalid child params provided by user
netbox.netbox.netbox_cable:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- termination_a_type: "dcim.interface"
+ termination_a_type: dcim.interface
termination_a:
- device: "test100"
- name: "GigabitEthernet1"
- termination_b_type: "circuits.circuittermination"
+ device: test100
+ name: GigabitEthernet1
+ termination_b_type: circuits.circuittermination
termination_b:
- name: "XYZ987"
- ignore_errors: "yes"
- register: "test_results"
+ name: XYZ987
+ ignore_errors: true
+ register: test_results
- name: "Issue #415 - Assert failure message shows the allowed params and what the user provided"
- assert:
+ ansible.builtin.assert:
that:
- test_results is failed
- - 'test_results["msg"] == "One or more of the kwargs provided are invalid for circuits.circuittermination, provided kwargs: name. Acceptable kwargs: circuit, term_side"'
+ - 'test_results["msg"] == "One or more of the kwargs provided are invalid for circuits.circuittermination, provided kwargs: name. Acceptable kwargs: circuit,
+ term_side"'
- name: "Issue #432 - Make sure same IPs get assigned to different device interfaces"
netbox.netbox.netbox_ip_address:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data: "{{ item }}"
loop: "{{ data }}"
- register: "test_results"
+ register: test_results
vars:
data:
- address: 121.121.121.121/32
assigned_object:
- device: "Test Nexus One"
- name: "Ethernet1/1"
+ device: Test Nexus One
+ name: Ethernet1/1
description: ansible-netbox-1.2.1
dns_name: ansible-netbox-1.2.1
role: HSRP
status: Active
- address: 121.121.121.121/32
assigned_object:
- device: "Test Nexus Child One"
- name: "Ethernet2/1"
+ device: Test Nexus Child One
+ name: Ethernet2/1
description: ansible-netbox-1.2.1
dns_name: ansible-netbox-1.2.1
role: HSRP
status: Active
- address: 1.121.121.121/32
assigned_object:
- device: "Test Nexus One"
- name: "Ethernet1/1"
+ device: Test Nexus One
+ name: Ethernet1/1
description: ansible-netbox-1.2.1
dns_name: ansible-netbox-1.2.1
role: HSRP
status: Active
- address: 1.121.121.121/32
assigned_object:
- device: "Test Nexus Child One"
- name: "Ethernet2/1"
+ device: Test Nexus Child One
+ name: Ethernet2/1
description: ansible-netbox-1.2.1
dns_name: ansible-netbox-1.2.1
role: HSRP
status: Active
- name: "ASSERT Issue #432 changes reflect correct device"
- assert:
+ ansible.builtin.assert:
that:
- test_results | community.general.json_query('results[?ip_address.address==`1.121.121.121/32`]') | length == 2
- test_results | community.general.json_query('results[?ip_address.address==`121.121.121.121/32`]') | length == 2
- name: "Issue #958 - Make sure we can add same location with different sites"
netbox.netbox.netbox_location:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Office Building
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/regression-v3.6/tasks/main.yml b/ansible_collections/netbox/netbox/tests/integration/targets/regression-v3.6/tasks/main.yml
index 817546d47..5bc0bd708 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/regression-v3.6/tasks/main.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/regression-v3.6/tasks/main.yml
@@ -4,102 +4,102 @@
### TEST NETBOX CONNECTION FAILURE
##
##
-- name: "TEST NETBOX CONNECTION FAILURE"
+- name: TEST NETBOX CONNECTION FAILURE
connection: local
block:
- - name: "1 - Device with required information"
+ - name: 1 - Device with required information
netbox.netbox.netbox_device:
- netbox_url: "http://some-random-invalid-URL"
+ netbox_url: http://some-random-invalid-URL
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "R1"
- device_type: "Cisco Test"
- device_role: "Core Switch"
- site: "Test Site"
- status: "Staged"
+ name: R1
+ device_type: Cisco Test
+ device_role: Core Switch
+ site: Test Site
+ status: Staged
state: present
register: test_one
- ignore_errors: yes
+ ignore_errors: true
- - name: "1 - ASSERT"
- assert:
+ - name: 1 - ASSERT
+ ansible.builtin.assert:
that:
- test_one is failed
- test_one['msg'] == "Failed to establish connection to NetBox API"
- - name: "2 - Check to see if netbox_prefix with parent defined will pass via check-mode"
+ - name: 2 - Check to see if netbox_prefix with parent defined will pass via check-mode
netbox.netbox.netbox_prefix:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- parent: "10.10.0.0/16"
+ parent: 10.10.0.0/16
prefix_length: 24
- first_available: yes
+ first_available: true
state: present
register: test_two
- check_mode: yes
+ check_mode: true
- - name: "2 - ASSERT"
- assert:
+ - name: 2 - ASSERT
+ ansible.builtin.assert:
that:
- test_two is changed
- test_two['msg'] == "New prefix created within 10.10.0.0/16"
- name: "3 - Add device with tags - Setup device to test #242"
netbox.netbox.netbox_device:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "issue-242"
- device_type: "Cisco Test"
- device_role: "Core Switch"
- site: "Test Site"
- status: "Staged"
+ name: issue-242
+ device_type: Cisco Test
+ device_role: Core Switch
+ site: Test Site
+ status: Staged
tags:
- slug: first
- slug: second
- name: "4 - Add device with tags out of order - shouldn't change - Tests #242 is fixed"
netbox.netbox.netbox_device:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "issue-242"
- device_type: "Cisco Test"
- device_role: "Core Switch"
- site: "Test Site"
- status: "Staged"
+ name: issue-242
+ device_type: Cisco Test
+ device_role: Core Switch
+ site: Test Site
+ status: Staged
tags:
- slug: second
- slug: first
register: test_four
- diff: yes
+ diff: true
- name: "4 - Assert not changed - Tests #242 is fixed"
- assert:
+ ansible.builtin.assert:
that:
- not test_four["changed"]
- name: "5 - Add device with extra tag - Tests #242 is fixed"
netbox.netbox.netbox_device:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "issue-242"
- device_type: "Cisco Test"
- device_role: "Core Switch"
- site: "Test Site"
- status: "Staged"
+ name: issue-242
+ device_type: Cisco Test
+ device_role: Core Switch
+ site: Test Site
+ status: Staged
asset_tag: "1234"
tags:
- slug: second
- slug: third
- slug: first
register: test_five
- diff: yes
+ diff: true
- name: "5 - Assert added tag - Tests #242 is fixed"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five["diff"]["after"]["tags"] is defined
@@ -107,21 +107,21 @@
- name: "6 - Loop through and add interface templates to different device interface templates - Fixes #282"
netbox.netbox.netbox_device_interface_template:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "SFP+ (10GE)"
- type: "SFP+ (10GE)"
+ name: SFP+ (10GE)
+ type: SFP+ (10GE)
device_type: "{{ item }}"
register: test_six
loop:
- - "Cisco Test"
- - "Arista Test"
- - "Nexus Child"
- - "Nexus Parent"
+ - Cisco Test
+ - Arista Test
+ - Nexus Child
+ - Nexus Parent
- name: "6 - Assert device type is correct - Fixes #282"
- assert:
+ ansible.builtin.assert:
that:
- test_six.results[0]["diff"]["before"]["state"] == "absent"
- test_six.results[0]["diff"]["after"]["state"] == "present"
@@ -136,16 +136,16 @@
- test_six.results[3]["diff"]["after"]["state"] == "present"
- test_six.results[3]["interface_template"]["device_type"] == 3
- - name: "7 - Don't prevent updates to other params if tags are specified"
+ - name: 7 - Don't prevent updates to other params if tags are specified
netbox.netbox.netbox_device:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "issue-242"
- device_type: "Cisco Test"
- device_role: "Core Switch"
- site: "Test Site"
- status: "Staged"
+ name: issue-242
+ device_type: Cisco Test
+ device_role: Core Switch
+ site: Test Site
+ status: Staged
asset_tag: "Null"
tags:
# Changed these for issue #407 to be IDs
@@ -155,7 +155,7 @@
register: test_seven
- name: "5 - Assert added tag - Tests #242 is fixed"
- assert:
+ ansible.builtin.assert:
that:
- test_seven is changed
- test_seven["diff"]["after"]["asset_tag"] == "Null"
@@ -163,7 +163,7 @@
- name: Add ip address to netbox and don't assign it to a device (Issue 372)
netbox.netbox.netbox_ip_address:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
address: 10.255.255.1/24
@@ -174,7 +174,7 @@
- name: Update same ip address to attach to a device interface (Issue 372)
netbox.netbox.netbox_ip_address:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
address: 10.255.255.1/24
@@ -188,7 +188,7 @@
register: query_params_372
- name: Assert ip address was updated and added to device interface
- assert:
+ ansible.builtin.assert:
that:
- query_params_372 is changed
- query_params_372['msg'] == 'ip_address 10.255.255.1/24 updated'
@@ -196,78 +196,79 @@
- query_params_372['diff']['after']['assigned_object_id'] == 3
- query_params_372['diff']['after']['assigned_object_type'] == 'dcim.interface'
- - name: "Validate failure due to invalid child params provided by user"
+ - name: Validate failure due to invalid child params provided by user
netbox.netbox.netbox_cable:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- termination_a_type: "dcim.interface"
+ termination_a_type: dcim.interface
termination_a:
- device: "test100"
- name: "GigabitEthernet1"
- termination_b_type: "circuits.circuittermination"
+ device: test100
+ name: GigabitEthernet1
+ termination_b_type: circuits.circuittermination
termination_b:
- name: "XYZ987"
- ignore_errors: "yes"
- register: "test_results"
+ name: XYZ987
+ ignore_errors: true
+ register: test_results
- name: "Issue #415 - Assert failure message shows the allowed params and what the user provided"
- assert:
+ ansible.builtin.assert:
that:
- test_results is failed
- - 'test_results["msg"] == "One or more of the kwargs provided are invalid for circuits.circuittermination, provided kwargs: name. Acceptable kwargs: circuit, term_side"'
+ - 'test_results["msg"] == "One or more of the kwargs provided are invalid for circuits.circuittermination, provided kwargs: name. Acceptable kwargs: circuit,
+ term_side"'
- name: "Issue #432 - Make sure same IPs get assigned to different device interfaces"
netbox.netbox.netbox_ip_address:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data: "{{ item }}"
loop: "{{ data }}"
- register: "test_results"
+ register: test_results
vars:
data:
- address: 121.121.121.121/32
assigned_object:
- device: "Test Nexus One"
- name: "Ethernet1/1"
+ device: Test Nexus One
+ name: Ethernet1/1
description: ansible-netbox-1.2.1
dns_name: ansible-netbox-1.2.1
role: HSRP
status: Active
- address: 121.121.121.121/32
assigned_object:
- device: "Test Nexus Child One"
- name: "Ethernet2/1"
+ device: Test Nexus Child One
+ name: Ethernet2/1
description: ansible-netbox-1.2.1
dns_name: ansible-netbox-1.2.1
role: HSRP
status: Active
- address: 1.121.121.121/32
assigned_object:
- device: "Test Nexus One"
- name: "Ethernet1/1"
+ device: Test Nexus One
+ name: Ethernet1/1
description: ansible-netbox-1.2.1
dns_name: ansible-netbox-1.2.1
role: HSRP
status: Active
- address: 1.121.121.121/32
assigned_object:
- device: "Test Nexus Child One"
- name: "Ethernet2/1"
+ device: Test Nexus Child One
+ name: Ethernet2/1
description: ansible-netbox-1.2.1
dns_name: ansible-netbox-1.2.1
role: HSRP
status: Active
- name: "ASSERT Issue #432 changes reflect correct device"
- assert:
+ ansible.builtin.assert:
that:
- test_results | community.general.json_query('results[?ip_address.address==`1.121.121.121/32`]') | length == 2
- test_results | community.general.json_query('results[?ip_address.address==`121.121.121.121/32`]') | length == 2
- name: "Issue #958 - Make sure we can add same location with different sites"
netbox.netbox.netbox_location:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Office Building
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/regression-v3.7/tasks/main.yml b/ansible_collections/netbox/netbox/tests/integration/targets/regression-v3.7/tasks/main.yml
index 817546d47..5bc0bd708 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/regression-v3.7/tasks/main.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/regression-v3.7/tasks/main.yml
@@ -4,102 +4,102 @@
### TEST NETBOX CONNECTION FAILURE
##
##
-- name: "TEST NETBOX CONNECTION FAILURE"
+- name: TEST NETBOX CONNECTION FAILURE
connection: local
block:
- - name: "1 - Device with required information"
+ - name: 1 - Device with required information
netbox.netbox.netbox_device:
- netbox_url: "http://some-random-invalid-URL"
+ netbox_url: http://some-random-invalid-URL
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "R1"
- device_type: "Cisco Test"
- device_role: "Core Switch"
- site: "Test Site"
- status: "Staged"
+ name: R1
+ device_type: Cisco Test
+ device_role: Core Switch
+ site: Test Site
+ status: Staged
state: present
register: test_one
- ignore_errors: yes
+ ignore_errors: true
- - name: "1 - ASSERT"
- assert:
+ - name: 1 - ASSERT
+ ansible.builtin.assert:
that:
- test_one is failed
- test_one['msg'] == "Failed to establish connection to NetBox API"
- - name: "2 - Check to see if netbox_prefix with parent defined will pass via check-mode"
+ - name: 2 - Check to see if netbox_prefix with parent defined will pass via check-mode
netbox.netbox.netbox_prefix:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- parent: "10.10.0.0/16"
+ parent: 10.10.0.0/16
prefix_length: 24
- first_available: yes
+ first_available: true
state: present
register: test_two
- check_mode: yes
+ check_mode: true
- - name: "2 - ASSERT"
- assert:
+ - name: 2 - ASSERT
+ ansible.builtin.assert:
that:
- test_two is changed
- test_two['msg'] == "New prefix created within 10.10.0.0/16"
- name: "3 - Add device with tags - Setup device to test #242"
netbox.netbox.netbox_device:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "issue-242"
- device_type: "Cisco Test"
- device_role: "Core Switch"
- site: "Test Site"
- status: "Staged"
+ name: issue-242
+ device_type: Cisco Test
+ device_role: Core Switch
+ site: Test Site
+ status: Staged
tags:
- slug: first
- slug: second
- name: "4 - Add device with tags out of order - shouldn't change - Tests #242 is fixed"
netbox.netbox.netbox_device:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "issue-242"
- device_type: "Cisco Test"
- device_role: "Core Switch"
- site: "Test Site"
- status: "Staged"
+ name: issue-242
+ device_type: Cisco Test
+ device_role: Core Switch
+ site: Test Site
+ status: Staged
tags:
- slug: second
- slug: first
register: test_four
- diff: yes
+ diff: true
- name: "4 - Assert not changed - Tests #242 is fixed"
- assert:
+ ansible.builtin.assert:
that:
- not test_four["changed"]
- name: "5 - Add device with extra tag - Tests #242 is fixed"
netbox.netbox.netbox_device:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "issue-242"
- device_type: "Cisco Test"
- device_role: "Core Switch"
- site: "Test Site"
- status: "Staged"
+ name: issue-242
+ device_type: Cisco Test
+ device_role: Core Switch
+ site: Test Site
+ status: Staged
asset_tag: "1234"
tags:
- slug: second
- slug: third
- slug: first
register: test_five
- diff: yes
+ diff: true
- name: "5 - Assert added tag - Tests #242 is fixed"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five["diff"]["after"]["tags"] is defined
@@ -107,21 +107,21 @@
- name: "6 - Loop through and add interface templates to different device interface templates - Fixes #282"
netbox.netbox.netbox_device_interface_template:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "SFP+ (10GE)"
- type: "SFP+ (10GE)"
+ name: SFP+ (10GE)
+ type: SFP+ (10GE)
device_type: "{{ item }}"
register: test_six
loop:
- - "Cisco Test"
- - "Arista Test"
- - "Nexus Child"
- - "Nexus Parent"
+ - Cisco Test
+ - Arista Test
+ - Nexus Child
+ - Nexus Parent
- name: "6 - Assert device type is correct - Fixes #282"
- assert:
+ ansible.builtin.assert:
that:
- test_six.results[0]["diff"]["before"]["state"] == "absent"
- test_six.results[0]["diff"]["after"]["state"] == "present"
@@ -136,16 +136,16 @@
- test_six.results[3]["diff"]["after"]["state"] == "present"
- test_six.results[3]["interface_template"]["device_type"] == 3
- - name: "7 - Don't prevent updates to other params if tags are specified"
+ - name: 7 - Don't prevent updates to other params if tags are specified
netbox.netbox.netbox_device:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "issue-242"
- device_type: "Cisco Test"
- device_role: "Core Switch"
- site: "Test Site"
- status: "Staged"
+ name: issue-242
+ device_type: Cisco Test
+ device_role: Core Switch
+ site: Test Site
+ status: Staged
asset_tag: "Null"
tags:
# Changed these for issue #407 to be IDs
@@ -155,7 +155,7 @@
register: test_seven
- name: "5 - Assert added tag - Tests #242 is fixed"
- assert:
+ ansible.builtin.assert:
that:
- test_seven is changed
- test_seven["diff"]["after"]["asset_tag"] == "Null"
@@ -163,7 +163,7 @@
- name: Add ip address to netbox and don't assign it to a device (Issue 372)
netbox.netbox.netbox_ip_address:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
address: 10.255.255.1/24
@@ -174,7 +174,7 @@
- name: Update same ip address to attach to a device interface (Issue 372)
netbox.netbox.netbox_ip_address:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
address: 10.255.255.1/24
@@ -188,7 +188,7 @@
register: query_params_372
- name: Assert ip address was updated and added to device interface
- assert:
+ ansible.builtin.assert:
that:
- query_params_372 is changed
- query_params_372['msg'] == 'ip_address 10.255.255.1/24 updated'
@@ -196,78 +196,79 @@
- query_params_372['diff']['after']['assigned_object_id'] == 3
- query_params_372['diff']['after']['assigned_object_type'] == 'dcim.interface'
- - name: "Validate failure due to invalid child params provided by user"
+ - name: Validate failure due to invalid child params provided by user
netbox.netbox.netbox_cable:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- termination_a_type: "dcim.interface"
+ termination_a_type: dcim.interface
termination_a:
- device: "test100"
- name: "GigabitEthernet1"
- termination_b_type: "circuits.circuittermination"
+ device: test100
+ name: GigabitEthernet1
+ termination_b_type: circuits.circuittermination
termination_b:
- name: "XYZ987"
- ignore_errors: "yes"
- register: "test_results"
+ name: XYZ987
+ ignore_errors: true
+ register: test_results
- name: "Issue #415 - Assert failure message shows the allowed params and what the user provided"
- assert:
+ ansible.builtin.assert:
that:
- test_results is failed
- - 'test_results["msg"] == "One or more of the kwargs provided are invalid for circuits.circuittermination, provided kwargs: name. Acceptable kwargs: circuit, term_side"'
+ - 'test_results["msg"] == "One or more of the kwargs provided are invalid for circuits.circuittermination, provided kwargs: name. Acceptable kwargs: circuit,
+ term_side"'
- name: "Issue #432 - Make sure same IPs get assigned to different device interfaces"
netbox.netbox.netbox_ip_address:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data: "{{ item }}"
loop: "{{ data }}"
- register: "test_results"
+ register: test_results
vars:
data:
- address: 121.121.121.121/32
assigned_object:
- device: "Test Nexus One"
- name: "Ethernet1/1"
+ device: Test Nexus One
+ name: Ethernet1/1
description: ansible-netbox-1.2.1
dns_name: ansible-netbox-1.2.1
role: HSRP
status: Active
- address: 121.121.121.121/32
assigned_object:
- device: "Test Nexus Child One"
- name: "Ethernet2/1"
+ device: Test Nexus Child One
+ name: Ethernet2/1
description: ansible-netbox-1.2.1
dns_name: ansible-netbox-1.2.1
role: HSRP
status: Active
- address: 1.121.121.121/32
assigned_object:
- device: "Test Nexus One"
- name: "Ethernet1/1"
+ device: Test Nexus One
+ name: Ethernet1/1
description: ansible-netbox-1.2.1
dns_name: ansible-netbox-1.2.1
role: HSRP
status: Active
- address: 1.121.121.121/32
assigned_object:
- device: "Test Nexus Child One"
- name: "Ethernet2/1"
+ device: Test Nexus Child One
+ name: Ethernet2/1
description: ansible-netbox-1.2.1
dns_name: ansible-netbox-1.2.1
role: HSRP
status: Active
- name: "ASSERT Issue #432 changes reflect correct device"
- assert:
+ ansible.builtin.assert:
that:
- test_results | community.general.json_query('results[?ip_address.address==`1.121.121.121/32`]') | length == 2
- test_results | community.general.json_query('results[?ip_address.address==`121.121.121.121/32`]') | length == 2
- name: "Issue #958 - Make sure we can add same location with different sites"
netbox.netbox.netbox_location:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Office Building
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/regression-v4.0/tasks/main.yml b/ansible_collections/netbox/netbox/tests/integration/targets/regression-v4.0/tasks/main.yml
new file mode 100644
index 000000000..5bc0bd708
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/regression-v4.0/tasks/main.yml
@@ -0,0 +1,284 @@
+---
+##
+##
+### TEST NETBOX CONNECTION FAILURE
+##
+##
+- name: TEST NETBOX CONNECTION FAILURE
+ connection: local
+ block:
+ - name: 1 - Device with required information
+ netbox.netbox.netbox_device:
+ netbox_url: http://some-random-invalid-URL
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: R1
+ device_type: Cisco Test
+ device_role: Core Switch
+ site: Test Site
+ status: Staged
+ state: present
+ register: test_one
+ ignore_errors: true
+
+ - name: 1 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_one is failed
+ - test_one['msg'] == "Failed to establish connection to NetBox API"
+
+ - name: 2 - Check to see if netbox_prefix with parent defined will pass via check-mode
+ netbox.netbox.netbox_prefix:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ parent: 10.10.0.0/16
+ prefix_length: 24
+ first_available: true
+ state: present
+ register: test_two
+ check_mode: true
+
+ - name: 2 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_two is changed
+ - test_two['msg'] == "New prefix created within 10.10.0.0/16"
+
+ - name: "3 - Add device with tags - Setup device to test #242"
+ netbox.netbox.netbox_device:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: issue-242
+ device_type: Cisco Test
+ device_role: Core Switch
+ site: Test Site
+ status: Staged
+ tags:
+ - slug: first
+ - slug: second
+
+ - name: "4 - Add device with tags out of order - shouldn't change - Tests #242 is fixed"
+ netbox.netbox.netbox_device:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: issue-242
+ device_type: Cisco Test
+ device_role: Core Switch
+ site: Test Site
+ status: Staged
+ tags:
+ - slug: second
+ - slug: first
+ register: test_four
+ diff: true
+
+ - name: "4 - Assert not changed - Tests #242 is fixed"
+ ansible.builtin.assert:
+ that:
+ - not test_four["changed"]
+
+ - name: "5 - Add device with extra tag - Tests #242 is fixed"
+ netbox.netbox.netbox_device:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: issue-242
+ device_type: Cisco Test
+ device_role: Core Switch
+ site: Test Site
+ status: Staged
+ asset_tag: "1234"
+ tags:
+ - slug: second
+ - slug: third
+ - slug: first
+ register: test_five
+ diff: true
+
+ - name: "5 - Assert added tag - Tests #242 is fixed"
+ ansible.builtin.assert:
+ that:
+ - test_five is changed
+ - test_five["diff"]["after"]["tags"] is defined
+ - test_five["device"]["tags"] is defined
+
+ - name: "6 - Loop through and add interface templates to different device interface templates - Fixes #282"
+ netbox.netbox.netbox_device_interface_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: SFP+ (10GE)
+ type: SFP+ (10GE)
+ device_type: "{{ item }}"
+ register: test_six
+ loop:
+ - Cisco Test
+ - Arista Test
+ - Nexus Child
+ - Nexus Parent
+
+ - name: "6 - Assert device type is correct - Fixes #282"
+ ansible.builtin.assert:
+ that:
+ - test_six.results[0]["diff"]["before"]["state"] == "absent"
+ - test_six.results[0]["diff"]["after"]["state"] == "present"
+ - test_six.results[0]["interface_template"]["device_type"] == 1
+ - test_six.results[1]["diff"]["before"]["state"] == "absent"
+ - test_six.results[1]["diff"]["after"]["state"] == "present"
+ - test_six.results[1]["interface_template"]["device_type"] == 2
+ - test_six.results[2]["diff"]["before"]["state"] == "absent"
+ - test_six.results[2]["diff"]["after"]["state"] == "present"
+ - test_six.results[2]["interface_template"]["device_type"] == 4
+ - test_six.results[3]["diff"]["before"]["state"] == "absent"
+ - test_six.results[3]["diff"]["after"]["state"] == "present"
+ - test_six.results[3]["interface_template"]["device_type"] == 3
+
+ - name: 7 - Don't prevent updates to other params if tags are specified
+ netbox.netbox.netbox_device:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: issue-242
+ device_type: Cisco Test
+ device_role: Core Switch
+ site: Test Site
+ status: Staged
+ asset_tag: "Null"
+ tags:
+ # Changed these for issue #407 to be IDs
+ - 2
+ - 3
+ - 1
+ register: test_seven
+
+ - name: "5 - Assert added tag - Tests #242 is fixed"
+ ansible.builtin.assert:
+ that:
+ - test_seven is changed
+ - test_seven["diff"]["after"]["asset_tag"] == "Null"
+ - test_seven["device"]["asset_tag"] == "Null"
+
+ - name: Add ip address to netbox and don't assign it to a device (Issue 372)
+ netbox.netbox.netbox_ip_address:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ address: 10.255.255.1/24
+ query_params:
+ - address
+ - vrf
+ state: present
+
+ - name: Update same ip address to attach to a device interface (Issue 372)
+ netbox.netbox.netbox_ip_address:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ address: 10.255.255.1/24
+ assigned_object:
+ device: test100
+ name: GigabitEthernet1
+ query_params:
+ - address
+ - vrf
+ state: present
+ register: query_params_372
+
+ - name: Assert ip address was updated and added to device interface
+ ansible.builtin.assert:
+ that:
+ - query_params_372 is changed
+ - query_params_372['msg'] == 'ip_address 10.255.255.1/24 updated'
+ - query_params_372['diff']['after']['assigned_object'] == 3
+ - query_params_372['diff']['after']['assigned_object_id'] == 3
+ - query_params_372['diff']['after']['assigned_object_type'] == 'dcim.interface'
+
+ - name: Validate failure due to invalid child params provided by user
+ netbox.netbox.netbox_cable:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ termination_a_type: dcim.interface
+ termination_a:
+ device: test100
+ name: GigabitEthernet1
+ termination_b_type: circuits.circuittermination
+ termination_b:
+ name: XYZ987
+ ignore_errors: true
+ register: test_results
+
+ - name: "Issue #415 - Assert failure message shows the allowed params and what the user provided"
+ ansible.builtin.assert:
+ that:
+ - test_results is failed
+ - 'test_results["msg"] == "One or more of the kwargs provided are invalid for circuits.circuittermination, provided kwargs: name. Acceptable kwargs: circuit,
+ term_side"'
+
+ - name: "Issue #432 - Make sure same IPs get assigned to different device interfaces"
+ netbox.netbox.netbox_ip_address:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data: "{{ item }}"
+ loop: "{{ data }}"
+ register: test_results
+ vars:
+ data:
+ - address: 121.121.121.121/32
+ assigned_object:
+ device: Test Nexus One
+ name: Ethernet1/1
+ description: ansible-netbox-1.2.1
+ dns_name: ansible-netbox-1.2.1
+ role: HSRP
+ status: Active
+ - address: 121.121.121.121/32
+ assigned_object:
+ device: Test Nexus Child One
+ name: Ethernet2/1
+ description: ansible-netbox-1.2.1
+ dns_name: ansible-netbox-1.2.1
+ role: HSRP
+ status: Active
+ - address: 1.121.121.121/32
+ assigned_object:
+ device: Test Nexus One
+ name: Ethernet1/1
+ description: ansible-netbox-1.2.1
+ dns_name: ansible-netbox-1.2.1
+ role: HSRP
+ status: Active
+ - address: 1.121.121.121/32
+ assigned_object:
+ device: Test Nexus Child One
+ name: Ethernet2/1
+ description: ansible-netbox-1.2.1
+ dns_name: ansible-netbox-1.2.1
+ role: HSRP
+ status: Active
+
+ - name: "ASSERT Issue #432 changes reflect correct device"
+ ansible.builtin.assert:
+ that:
+ - test_results | community.general.json_query('results[?ip_address.address==`1.121.121.121/32`]') | length == 2
+ - test_results | community.general.json_query('results[?ip_address.address==`121.121.121.121/32`]') | length == 2
+
+ - name: "Issue #958 - Make sure we can add same location with different sites"
+ netbox.netbox.netbox_location:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Office Building
+ site: "{{ item }}"
+ loop:
+ - Test Site
+ - Test Site2
+ register: test_results
+
+ - name: "ASSERT ISSUE #957 - Location has different IDs"
+ ansible.builtin.assert:
+ that:
+ - test_results.results.0.location.id != test_results.results.1.location.id
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/main.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/main.yml
index 38aa7d0ff..5861669cb 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/main.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/main.yml
@@ -1,306 +1,305 @@
---
+- name: NETBOX_DEVICE TESTS
+ ansible.builtin.include_tasks: netbox_device.yml
-- name: "NETBOX_DEVICE TESTS"
- include_tasks: "netbox_device.yml"
+- name: NETBOX_DEVICE_INTERFACE TESTS
+ ansible.builtin.include_tasks: netbox_device_interface.yml
-- name: "NETBOX_DEVICE_INTERFACE TESTS"
- include_tasks: "netbox_device_interface.yml"
+- name: NETBOX_DEVICE_INTERFACE_TEMPLATE TESTS
+ ansible.builtin.include_tasks: netbox_device_interface_template.yml
-- name: "NETBOX_DEVICE_INTERFACE_TEMPLATE TESTS"
- include_tasks: "netbox_device_interface_template.yml"
+- name: NETBOX_IP_ADDRESS TESTS
+ ansible.builtin.include_tasks: netbox_ip_address.yml
-- name: "NETBOX_IP_ADDRESS TESTS"
- include_tasks: "netbox_ip_address.yml"
+- name: NETBOX_PREFIX TESTS
+ ansible.builtin.include_tasks: netbox_prefix.yml
-- name: "NETBOX_PREFIX TESTS"
- include_tasks: "netbox_prefix.yml"
+- name: NETBOX_SITE TESTS
+ ansible.builtin.include_tasks: netbox_site.yml
-- name: "NETBOX_SITE TESTS"
- include_tasks: "netbox_site.yml"
+- name: NETBOX_SITE_GROUP TESTS
+ ansible.builtin.include_tasks: netbox_site_group.yml
-- name: "NETBOX_SITE_GROUP TESTS"
- include_tasks: "netbox_site_group.yml"
+- name: NETBOX_CONTACT TESTS
+ ansible.builtin.include_tasks: netbox_contact.yml
-- name: "NETBOX_CONTACT TESTS"
- include_tasks: "netbox_contact.yml"
+- name: NETBOX_CONTACT_ROLE TESTS
+ ansible.builtin.include_tasks: netbox_contact_role.yml
-- name: "NETBOX_CONTACT_ROLE TESTS"
- include_tasks: "netbox_contact_role.yml"
+- name: NETBOX_TENTANT TESTS
+ ansible.builtin.include_tasks: netbox_tenant.yml
-- name: "NETBOX_TENTANT TESTS"
- include_tasks: "netbox_tenant.yml"
+- name: NETBOX_TENTANT_GROUP TESTS
+ ansible.builtin.include_tasks: netbox_tenant_group.yml
-- name: "NETBOX_TENTANT_GROUP TESTS"
- include_tasks: "netbox_tenant_group.yml"
+- name: NETBOX_RACK TESTS
+ ansible.builtin.include_tasks: netbox_rack.yml
-- name: "NETBOX_RACK TESTS"
- include_tasks: "netbox_rack.yml"
+- name: NETBOX_RACK_ROLE TESTS
+ ansible.builtin.include_tasks: netbox_rack_role.yml
-- name: "NETBOX_RACK_ROLE TESTS"
- include_tasks: "netbox_rack_role.yml"
+- name: NETBOX_LOCATION TESTS
+ ansible.builtin.include_tasks: netbox_location.yml
-- name: "NETBOX_LOCATION TESTS"
- include_tasks: "netbox_location.yml"
+- name: NETBOX_MANUFACTURER TESTS
+ ansible.builtin.include_tasks: netbox_manufacturer.yml
-- name: "NETBOX_MANUFACTURER TESTS"
- include_tasks: "netbox_manufacturer.yml"
+- name: NETBOX_PLATFORM TESTS
+ ansible.builtin.include_tasks: netbox_platform.yml
-- name: "NETBOX_PLATFORM TESTS"
- include_tasks: "netbox_platform.yml"
+- name: NETBOX_DEVICE_TYPE TESTS
+ ansible.builtin.include_tasks: netbox_device_type.yml
-- name: "NETBOX_DEVICE_TYPE TESTS"
- include_tasks: "netbox_device_type.yml"
+- name: NETBOX_DEVICE_ROLE TESTS
+ ansible.builtin.include_tasks: netbox_device_role.yml
-- name: "NETBOX_DEVICE_ROLE TESTS"
- include_tasks: "netbox_device_role.yml"
+- name: NETBOX_IPAM_ROLE TESTS
+ ansible.builtin.include_tasks: netbox_ipam_role.yml
-- name: "NETBOX_IPAM_ROLE TESTS"
- include_tasks: "netbox_ipam_role.yml"
+- name: NETBOX_VLAN_GROUP TESTS
+ ansible.builtin.include_tasks: netbox_vlan_group.yml
-- name: "NETBOX_VLAN_GROUP TESTS"
- include_tasks: "netbox_vlan_group.yml"
+- name: NETBOX_VLAN TESTS
+ ansible.builtin.include_tasks: netbox_vlan.yml
-- name: "NETBOX_VLAN TESTS"
- include_tasks: "netbox_vlan.yml"
+- name: NETBOX_VRF TESTS
+ ansible.builtin.include_tasks: netbox_vrf.yml
-- name: "NETBOX_VRF TESTS"
- include_tasks: "netbox_vrf.yml"
+- name: NETBOX_RIR TESTS
+ ansible.builtin.include_tasks: netbox_rir.yml
-- name: "NETBOX_RIR TESTS"
- include_tasks: "netbox_rir.yml"
+- name: NETBOX_AGGREGATE TESTS
+ ansible.builtin.include_tasks: netbox_aggregate.yml
-- name: "NETBOX_AGGREGATE TESTS"
- include_tasks: "netbox_aggregate.yml"
+- name: NETBOX_REGION TESTS
+ ansible.builtin.include_tasks: netbox_region.yml
-- name: "NETBOX_REGION TESTS"
- include_tasks: "netbox_region.yml"
+- name: NETBOX_DEVICE_BAY TESTS
+ ansible.builtin.include_tasks: netbox_device_bay.yml
-- name: "NETBOX_DEVICE_BAY TESTS"
- include_tasks: "netbox_device_bay.yml"
+- name: NETBOX_DEVICE_BAY_TEMPLATE TESTS
+ ansible.builtin.include_tasks: netbox_device_bay_template.yml
-- name: "NETBOX_DEVICE_BAY_TEMPLATE TESTS"
- include_tasks: "netbox_device_bay_template.yml"
+- name: NETBOX_INVENTORY_ITEM TESTS
+ ansible.builtin.include_tasks: netbox_inventory_item.yml
-- name: "NETBOX_INVENTORY_ITEM TESTS"
- include_tasks: "netbox_inventory_item.yml"
+- name: NETBOX_VIRTUAL_MACHINE TESTS
+ ansible.builtin.include_tasks: netbox_virtual_machine.yml
-- name: "NETBOX_VIRTUAL_MACHINE TESTS"
- include_tasks: "netbox_virtual_machine.yml"
+- name: NETBOX_CLUSTER TESTS
+ ansible.builtin.include_tasks: netbox_cluster.yml
-- name: "NETBOX_CLUSTER TESTS"
- include_tasks: "netbox_cluster.yml"
+- name: NETBOX_CLUSTER_GROUP TESTS
+ ansible.builtin.include_tasks: netbox_cluster_group.yml
-- name: "NETBOX_CLUSTER_GROUP TESTS"
- include_tasks: "netbox_cluster_group.yml"
+- name: NETBOX_CLUSTER_TYPE TESTS
+ ansible.builtin.include_tasks: netbox_cluster_type.yml
-- name: "NETBOX_CLUSTER_TYPE TESTS"
- include_tasks: "netbox_cluster_type.yml"
+- name: NETBOX_VM_INTERFACE TESTS
+ ansible.builtin.include_tasks: netbox_vm_interface.yml
-- name: "NETBOX_VM_INTERFACE TESTS"
- include_tasks: "netbox_vm_interface.yml"
+- name: NETBOX_PROVIDER TESTS
+ ansible.builtin.include_tasks: netbox_provider.yml
-- name: "NETBOX_PROVIDER TESTS"
- include_tasks: "netbox_provider.yml"
+- name: NETBOX_PROVIDER_NETWORK TESTS
+ ansible.builtin.include_tasks: netbox_provider_network.yml
-- name: "NETBOX_PROVIDER_NETWORK TESTS"
- include_tasks: "netbox_provider_network.yml"
+- name: NETBOX_CIRCUIT_TYPE TESTS
+ ansible.builtin.include_tasks: netbox_circuit_type.yml
-- name: "NETBOX_CIRCUIT_TYPE TESTS"
- include_tasks: "netbox_circuit_type.yml"
+- name: NETBOX_CIRCUIT TESTS
+ ansible.builtin.include_tasks: netbox_circuit.yml
-- name: "NETBOX_CIRCUIT TESTS"
- include_tasks: "netbox_circuit.yml"
+- name: NETBOX_CIRCUIT_TERMINATION TESTS
+ ansible.builtin.include_tasks: netbox_circuit_termination.yml
-- name: "NETBOX_CIRCUIT_TERMINATION TESTS"
- include_tasks: "netbox_circuit_termination.yml"
+- name: NETBOX_REAR_PORT TESTS
+ ansible.builtin.include_tasks: netbox_rear_port.yml
-- name: "NETBOX_REAR_PORT TESTS"
- include_tasks: "netbox_rear_port.yml"
+- name: NETBOX_REAR_PORT_TEMPLATE TESTS
+ ansible.builtin.include_tasks: netbox_rear_port_template.yml
-- name: "NETBOX_REAR_PORT_TEMPLATE TESTS"
- include_tasks: "netbox_rear_port_template.yml"
+- name: NETBOX_FRONT_PORT TESTS
+ ansible.builtin.include_tasks: netbox_front_port.yml
-- name: "NETBOX_FRONT_PORT TESTS"
- include_tasks: "netbox_front_port.yml"
+- name: NETBOX_FRONT_PORT_TEMPLATE TESTS
+ ansible.builtin.include_tasks: netbox_front_port_template.yml
-- name: "NETBOX_FRONT_PORT_TEMPLATE TESTS"
- include_tasks: "netbox_front_port_template.yml"
+- name: NETBOX_CONSOLE_PORT TESTS
+ ansible.builtin.include_tasks: netbox_console_port.yml
-- name: "NETBOX_CONSOLE_PORT TESTS"
- include_tasks: "netbox_console_port.yml"
+- name: NETBOX_CONSOLE_PORT_TEMPLATE TESTS
+ ansible.builtin.include_tasks: netbox_console_port_template.yml
-- name: "NETBOX_CONSOLE_PORT_TEMPLATE TESTS"
- include_tasks: "netbox_console_port_template.yml"
+- name: NETBOX_CONSOLE_SERVER_PORT TESTS
+ ansible.builtin.include_tasks: netbox_console_server_port.yml
-- name: "NETBOX_CONSOLE_SERVER_PORT TESTS"
- include_tasks: "netbox_console_server_port.yml"
+- name: NETBOX_CONSOLE_SERVER_PORT_TEMPLATE TESTS
+ ansible.builtin.include_tasks: netbox_console_server_port_template.yml
-- name: "NETBOX_CONSOLE_SERVER_PORT_TEMPLATE TESTS"
- include_tasks: "netbox_console_server_port_template.yml"
+- name: NETBOX_POWER_PANEL TESTS
+ ansible.builtin.include_tasks: netbox_power_panel.yml
-- name: "NETBOX_POWER_PANEL TESTS"
- include_tasks: "netbox_power_panel.yml"
+- name: NETBOX_POWER_FEED TESTS
+ ansible.builtin.include_tasks: netbox_power_feed.yml
-- name: "NETBOX_POWER_FEED TESTS"
- include_tasks: "netbox_power_feed.yml"
+- name: NETBOX_POWER_PORT TESTS
+ ansible.builtin.include_tasks: netbox_power_port.yml
-- name: "NETBOX_POWER_PORT TESTS"
- include_tasks: "netbox_power_port.yml"
+- name: NETBOX_POWER_PORT_TEMPLATE TESTS
+ ansible.builtin.include_tasks: netbox_power_port_template.yml
-- name: "NETBOX_POWER_PORT_TEMPLATE TESTS"
- include_tasks: "netbox_power_port_template.yml"
+- name: NETBOX_POWER_OUTLET TESTS
+ ansible.builtin.include_tasks: netbox_power_outlet.yml
-- name: "NETBOX_POWER_OUTLET TESTS"
- include_tasks: "netbox_power_outlet.yml"
+- name: NETBOX_POWER_OUTLET_TEMPLATE TESTS
+ ansible.builtin.include_tasks: netbox_power_outlet_template.yml
-- name: "NETBOX_POWER_OUTLET_TEMPLATE TESTS"
- include_tasks: "netbox_power_outlet_template.yml"
-
-- name: "NETBOX_VIRTUAL_CHASSIS TESTS"
- include_tasks: "netbox_virtual_chassis.yml"
+- name: NETBOX_VIRTUAL_CHASSIS TESTS
+ ansible.builtin.include_tasks: netbox_virtual_chassis.yml
# Module has to be updated for 3.3
-#- name: "NETBOX_CABLE TESTS"
+# - name: "NETBOX_CABLE TESTS"
# include_tasks: "netbox_cable.yml"
-- name: "NETBOX_SERVICE TESTS"
- include_tasks: "netbox_service.yml"
+- name: NETBOX_SERVICE TESTS
+ ansible.builtin.include_tasks: netbox_service.yml
-- name: "NETBOX_LOOKUP TESTS"
- include_tasks: "netbox_lookup.yml"
+- name: NETBOX_LOOKUP TESTS
+ ansible.builtin.include_tasks: netbox_lookup.yml
-- name: "NETBOX_TAG_TESTS"
- include_tasks: "netbox_tag.yml"
+- name: NETBOX_TAG_TESTS
+ ansible.builtin.include_tasks: netbox_tag.yml
-- name: "NETBOX_ROUTE_TARGET_TESTS"
- include_tasks: "netbox_route_target.yml"
+- name: NETBOX_ROUTE_TARGET_TESTS
+ ansible.builtin.include_tasks: netbox_route_target.yml
-- name: "NETBOX_WIRELESS_LAN TESTS"
- include_tasks:
- file: "netbox_wireless_lan.yml"
+- name: NETBOX_WIRELESS_LAN TESTS
+ ansible.builtin.include_tasks:
+ file: netbox_wireless_lan.yml
apply:
tags:
- netbox_wireless_lan
tags:
- netbox_wireless_lan
-- name: "NETBOX_WIRELESS_LAN_GROUP TESTS"
- include_tasks:
- file: "netbox_wireless_lan_group.yml"
+- name: NETBOX_WIRELESS_LAN_GROUP TESTS
+ ansible.builtin.include_tasks:
+ file: netbox_wireless_lan_group.yml
apply:
tags:
- netbox_wireless_lan_group
tags:
- netbox_wireless_lan_group
-- name: "NETBOX_WIRELESS_LINK TESTS"
- include_tasks:
- file: "netbox_wireless_link.yml"
+- name: NETBOX_WIRELESS_LINK TESTS
+ ansible.builtin.include_tasks:
+ file: netbox_wireless_link.yml
apply:
tags:
- netbox_wireless_link
tags:
- netbox_wireless_link
-- name: "NETBOX_CUSTOM_FIELD TESTS"
- include_tasks:
- file: "netbox_custom_field.yml"
+- name: NETBOX_CUSTOM_FIELD TESTS
+ ansible.builtin.include_tasks:
+ file: netbox_custom_field.yml
apply:
tags:
- netbox_custom_field
tags:
- netbox_custom_field
-- name: "NETBOX_CUSTOM_LINK TESTS"
- include_tasks:
- file: "netbox_custom_link.yml"
+- name: NETBOX_CUSTOM_LINK TESTS
+ ansible.builtin.include_tasks:
+ file: netbox_custom_link.yml
apply:
tags:
- netbox_custom_link
tags:
- netbox_custom_link
-- name: "NETBOX_EXPORT_TEMPLATE TESTS"
- include_tasks:
- file: "netbox_export_template.yml"
+- name: NETBOX_EXPORT_TEMPLATE TESTS
+ ansible.builtin.include_tasks:
+ file: netbox_export_template.yml
apply:
tags:
- netbox_export_template
tags:
- netbox_export_template
-- name: "NETBOX_WEBHOOK TESTS"
- include_tasks:
- file: "netbox_webhook.yml"
+- name: NETBOX_WEBHOOK TESTS
+ ansible.builtin.include_tasks:
+ file: netbox_webhook.yml
apply:
tags:
- netbox_webhook
tags:
- netbox_webhook
-- name: "NETBOX_L2VPN TESTS"
- include_tasks:
- file: "netbox_l2vpn.yml"
+- name: NETBOX_L2VPN TESTS
+ ansible.builtin.include_tasks:
+ file: netbox_l2vpn.yml
apply:
tags:
- netbox_l2vpn
tags:
- netbox_l2vpn
-- name: "NETBOX_L2VPN_TERMINATION TESTS"
- include_tasks:
- file: "netbox_l2vpn_termination.yml"
+- name: NETBOX_L2VPN_TERMINATION TESTS
+ ansible.builtin.include_tasks:
+ file: netbox_l2vpn_termination.yml
apply:
tags:
- netbox_l2vpn_termination
tags:
- netbox_l2vpn_termination
-- name: "NETBOX_INVENTORY_ITEM_ROLE TESTS"
- include_tasks:
- file: "netbox_inventory_item_role.yml"
+- name: NETBOX_INVENTORY_ITEM_ROLE TESTS
+ ansible.builtin.include_tasks:
+ file: netbox_inventory_item_role.yml
apply:
tags:
- netbox_inventory_item_role
tags:
- netbox_inventory_item_role
-- name: "NETBOX_MODULE_TYPE TESTS"
- include_tasks:
- file: "netbox_module_type.yml"
+- name: NETBOX_MODULE_TYPE TESTS
+ ansible.builtin.include_tasks:
+ file: netbox_module_type.yml
apply:
tags:
- netbox_module_type
tags:
- netbox_module_type
-- name: "NETBOX_SERVICE_TEMPLATE TESTS"
- include_tasks:
- file: "netbox_service_template.yml"
+- name: NETBOX_SERVICE_TEMPLATE TESTS
+ ansible.builtin.include_tasks:
+ file: netbox_service_template.yml
apply:
tags:
- netbox_service_template
tags:
- netbox_service_template
-- name: "NETBOX_ASN TESTS"
- include_tasks: "netbox_asn.yml"
+- name: NETBOX_ASN TESTS
+ ansible.builtin.include_tasks: netbox_asn.yml
-- name: "NETBOX_FHRP_GROUP TESTS"
- include_tasks: "netbox_fhrp_group.yml"
+- name: NETBOX_FHRP_GROUP TESTS
+ ansible.builtin.include_tasks: netbox_fhrp_group.yml
-- name: "NETBOX_JOURNAL_ENTRY TESTS"
- include_tasks:
- file: "netbox_journal_entry.yml"
+- name: NETBOX_JOURNAL_ENTRY TESTS
+ ansible.builtin.include_tasks:
+ file: netbox_journal_entry.yml
apply:
tags:
- netbox_journal_entry
tags:
- netbox_journal_entry
-- name: "NETBOX_FHRP_GROUP_ASSIGNMENT TESTS"
- include_tasks:
- file: "netbox_fhrp_group_assignment.yml"
+- name: NETBOX_FHRP_GROUP_ASSIGNMENT TESTS
+ ansible.builtin.include_tasks:
+ file: netbox_fhrp_group_assignment.yml
apply:
tags:
- netbox_fhrp_group_assignmen
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_aggregate.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_aggregate.yml
index ac02641dc..d45b9c835 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_aggregate.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_aggregate.yml
@@ -7,35 +7,35 @@
- name: "AGGREGATE 1: Necessary info creation"
netbox.netbox.netbox_aggregate:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- prefix: "10.0.0.0/8"
- rir: "Example RIR"
+ prefix: 10.0.0.0/8
+ rir: Example RIR
state: present
register: test_one
- name: "AGGREGATE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
- test_one['diff']['after']['state'] == "present"
- test_one['aggregate']['prefix'] == "10.0.0.0/8"
- #- test_one['aggregate']['family'] == 4
+ # - test_one['aggregate']['family'] == 4
- test_one['aggregate']['rir'] == 1
- test_one['msg'] == "aggregate 10.0.0.0/8 created"
- name: "AGGREGATE 2: Create duplicate"
netbox.netbox.netbox_aggregate:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- prefix: "10.0.0.0/8"
+ prefix: 10.0.0.0/8
state: present
register: test_two
- name: "AGGREGATE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['aggregate']['prefix'] == "10.0.0.0/8"
@@ -46,19 +46,19 @@
- name: "AGGREGATE 3: ASSERT - Update"
netbox.netbox.netbox_aggregate:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- prefix: "10.0.0.0/8"
- rir: "Example RIR"
+ prefix: 10.0.0.0/8
+ rir: Example RIR
date_added: "1989-01-18"
- description: "Test Description"
+ description: Test Description
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: test_three
- name: "AGGREGATE 3: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['date_added'] == "1989-01-18"
@@ -75,14 +75,14 @@
- name: "AGGREGATE 4: ASSERT - Delete"
netbox.netbox.netbox_aggregate:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- prefix: "10.0.0.0/8"
+ prefix: 10.0.0.0/8
state: absent
register: test_four
- name: "AGGREGATE 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['aggregate']['prefix'] == "10.0.0.0/8"
@@ -96,20 +96,20 @@
- name: "AGGREGATE 5: Necessary info creation"
netbox.netbox.netbox_aggregate:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- prefix: "2001::/32"
- rir: "Example RIR"
+ prefix: 2001::/32
+ rir: Example RIR
state: present
register: test_five
- name: "AGGREGATE 5: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "absent"
- test_five['diff']['after']['state'] == "present"
- test_five['aggregate']['prefix'] == "2001::/32"
- #- test_five['aggregate']['family'] == 6
+ # - test_five['aggregate']['family'] == 6
- test_five['aggregate']['rir'] == 1
- test_five['msg'] == "aggregate 2001::/32 created"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_asn.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_asn.yml
index 346bafda9..fab4270a9 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_asn.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_asn.yml
@@ -7,7 +7,7 @@
- name: "ASN 1: Test ASN creation"
netbox.netbox.netbox_asn:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
asn: 1111111111
rir: Example RIR
@@ -27,7 +27,7 @@
- name: "ASN 2: Create duplicate"
netbox.netbox.netbox_asn:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
asn: 1111111111
rir: Example RIR
@@ -45,14 +45,14 @@
- name: "ASN 3: Update ASN with other fields"
netbox.netbox.netbox_asn:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
asn: 1111111111
rir: Example RIR
tenant: Test Tenant
description: Test description
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: test_three
@@ -73,7 +73,7 @@
- name: "ASN 4: ASSERT - Delete"
netbox.netbox.netbox_asn:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
asn: 1111111111
state: absent
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_cable.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_cable.yml
index 5728986c0..d6f0ab02e 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_cable.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_cable.yml
@@ -10,7 +10,7 @@
- name: "CABLE 1: Necessary info creation"
netbox.netbox.netbox_cable:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
termination_a_type: dcim.interface
termination_a:
@@ -24,7 +24,7 @@
register: test_one
- name: "CABLE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -38,7 +38,7 @@
- name: "CABLE 2: Create duplicate"
netbox.netbox.netbox_cable:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
termination_a_type: dcim.interface
termination_a:
@@ -52,7 +52,7 @@
register: test_two
- name: "CABLE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['cable']['termination_a_type'] == "dcim.interface"
@@ -64,7 +64,7 @@
- name: "CABLE 3: Update Cable with other fields"
netbox.netbox.netbox_cable:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
termination_a_type: dcim.interface
termination_a:
@@ -81,13 +81,13 @@
length: 30
length_unit: m
tags:
- - "Schnozzberry"
- tenant: "Test Tenant"
+ - Schnozzberry
+ tenant: Test Tenant
state: present
register: test_three
- name: "CABLE 3: ASSERT - Update Cable with other fields"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['type'] == "mmf-om4"
@@ -115,7 +115,7 @@
- name: "CABLE 4: ASSERT - Delete"
netbox.netbox.netbox_cable:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
termination_a_type: dcim.interface
termination_a:
@@ -129,7 +129,7 @@
register: test_four
- name: "CABLE 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
@@ -139,7 +139,7 @@
- name: "CABLE 5: Connect Console Port and Console Server Port"
netbox.netbox.netbox_cable:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
termination_a_type: dcim.consoleserverport
termination_a:
@@ -153,7 +153,7 @@
register: test_five
- name: "CABLE 5: ASSERT - Connect Console Port and Console Server Port"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "absent"
@@ -167,21 +167,21 @@
- name: "CABLE 6: Circuits Termination as side A"
netbox.netbox.netbox_cable:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
termination_a_type: circuits.circuittermination
termination_a:
- circuit: "Test Circuit Two"
- term_side: "A"
+ circuit: Test Circuit Two
+ term_side: A
termination_b_type: dcim.interface
termination_b:
- device: "test100"
- name: "GigabitEthernet2"
+ device: test100
+ name: GigabitEthernet2
state: present
register: test_six
- name: "CABLE 6: ASSERT - Circuits Termination as side A"
- assert:
+ ansible.builtin.assert:
that:
- test_six is changed
- test_six['diff']['before']['state'] == "absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_circuit.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_circuit.yml
index 1d8f1ec47..8bfd265ac 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_circuit.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_circuit.yml
@@ -7,7 +7,7 @@
- name: "NETBOX_CIRCUIT 1: Create provider within NetBox with only required information"
netbox.netbox.netbox_circuit:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
cid: Test Circuit One
provider: Test Provider
@@ -16,7 +16,7 @@
register: test_one
- name: "NETBOX_CIRCUIT 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -29,7 +29,7 @@
- name: "NETBOX_CIRCUIT 2: Duplicate"
netbox.netbox.netbox_circuit:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
cid: Test Circuit One
provider: Test Provider
@@ -38,7 +38,7 @@
register: test_two
- name: "NETBOX_CIRCUIT 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['circuit']['cid'] == "Test Circuit One"
@@ -49,7 +49,7 @@
- name: "NETBOX_CIRCUIT 3: Update provider with other fields"
netbox.netbox.netbox_circuit:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
cid: Test Circuit One
provider: Test Provider
@@ -59,12 +59,12 @@
install_date: "2018-12-25"
commit_rate: 10000
description: "Test circuit "
- comments: "FAST CIRCUIT"
+ comments: FAST CIRCUIT
state: present
register: test_three
- name: "NETBOX_CIRCUIT 3: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['status'] == "planned"
@@ -87,14 +87,14 @@
- name: "NETBOX_CIRCUIT 4: Delete provider within netbox"
netbox.netbox.netbox_circuit:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
cid: Test Circuit One
state: absent
register: test_four
- name: "NETBOX_CIRCUIT 4 : ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['circuit']['cid'] == "Test Circuit One"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_circuit_termination.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_circuit_termination.yml
index a37cdc680..cbc749f63 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_circuit_termination.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_circuit_termination.yml
@@ -7,17 +7,17 @@
- name: "NETBOX_CIRCUIT_TERMINATION 1: Create provider within NetBox with only required information"
netbox.netbox.netbox_circuit_termination:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
circuit: Test Circuit
term_side: A
- site: "Test Site"
+ site: Test Site
port_speed: 10000
state: present
register: test_one
- name: "NETBOX_CIRCUIT_TERMINATION 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -31,7 +31,7 @@
- name: "NETBOX_CIRCUIT_TERMINATION 2: Duplicate"
netbox.netbox.netbox_circuit_termination:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
circuit: Test Circuit
term_side: A
@@ -39,7 +39,7 @@
register: test_two
- name: "NETBOX_CIRCUIT_TERMINATION 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['circuit_termination']['circuit'] == 1
@@ -51,19 +51,19 @@
- name: "NETBOX_CIRCUIT_TERMINATION 3: Update provider with other fields"
netbox.netbox.netbox_circuit_termination:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
circuit: Test Circuit
term_side: A
upstream_speed: 1000
xconnect_id: 10X100
pp_info: PP10-24
- description: "Test description"
+ description: Test description
state: present
register: test_three
- name: "NETBOX_CIRCUIT_TERMINATION 3: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['upstream_speed'] == 1000
@@ -83,17 +83,17 @@
- name: "NETBOX_CIRCUIT_TERMINATION 4: Create Z Side"
netbox.netbox.netbox_circuit_termination:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
circuit: Test Circuit
term_side: Z
- site: "Test Site"
+ site: Test Site
port_speed: 10000
state: present
register: test_four
- name: "NETBOX_CIRCUIT_TERMINATION 4: ASSERT - Create Z Side"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -107,7 +107,7 @@
- name: "NETBOX_CIRCUIT_TERMINATION 5: Delete provider within netbox"
netbox.netbox.netbox_circuit_termination:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
circuit: Test Circuit
term_side: A
@@ -115,7 +115,7 @@
register: test_five
- name: "NETBOX_CIRCUIT_TERMINATION 5: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['circuit_termination']['circuit'] == 1
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_circuit_type.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_circuit_type.yml
index 45f956e34..08dc67985 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_circuit_type.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_circuit_type.yml
@@ -7,14 +7,14 @@
- name: "CIRCUIT_TYPE 1: Necessary info creation"
netbox.netbox.netbox_circuit_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Circuit Type One"
+ name: Test Circuit Type One
state: present
register: test_one
- name: "CIRCUIT_TYPE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -26,14 +26,14 @@
- name: "CIRCUIT_TYPE 2: Create duplicate"
netbox.netbox.netbox_circuit_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Circuit Type One"
+ name: Test Circuit Type One
state: present
register: test_two
- name: "CIRCUIT_TYPE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['circuit_type']['name'] == "Test Circuit Type One"
@@ -43,15 +43,15 @@
- name: "CIRCUIT_TYPE 3: User specified slug"
netbox.netbox.netbox_circuit_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Circuit Type Two"
- slug: "test-circuit-type-2"
+ name: Test Circuit Type Two
+ slug: test-circuit-type-2
state: present
register: test_three
- name: "CIRCUIT_TYPE 3: ASSERT - User specified slug"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['before']['state'] == "absent"
@@ -63,14 +63,14 @@
- name: "CIRCUIT_TYPE 4: ASSERT - Delete"
netbox.netbox.netbox_circuit_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Circuit Type One"
+ name: Test Circuit Type One
state: absent
register: test_four
- name: "CIRCUIT_TYPE 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['circuit_type']['name'] == "Test Circuit Type One"
@@ -80,15 +80,15 @@
- name: "CIRCUIT_TYPE 5: ASSERT - Delete"
netbox.netbox.netbox_circuit_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Circuit Type Two"
- slug: "test-circuit-type-2"
+ name: Test Circuit Type Two
+ slug: test-circuit-type-2
state: absent
register: test_five
- name: "CIRCUIT_TYPE 5: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['circuit_type']['name'] == "Test Circuit Type Two"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_cluster.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_cluster.yml
index 11ba5ef82..b2810a1be 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_cluster.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_cluster.yml
@@ -7,15 +7,15 @@
- name: "CLUSTER 1: Necessary info creation"
netbox.netbox.netbox_cluster:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Cluster One"
- cluster_type: "Test Cluster Type"
+ name: Test Cluster One
+ cluster_type: Test Cluster Type
state: present
register: test_one
- name: "CLUSTER 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -27,15 +27,15 @@
- name: "CLUSTER 2: Create duplicate"
netbox.netbox.netbox_cluster:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Cluster One"
- cluster_type: "Test Cluster Type"
+ name: Test Cluster One
+ cluster_type: Test Cluster Type
state: present
register: test_two
- name: "CLUSTER 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['cluster']['name'] == "Test Cluster One"
@@ -45,21 +45,21 @@
- name: "CLUSTER 3: Update"
netbox.netbox.netbox_cluster:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Cluster One"
- cluster_type: "Test Cluster Type"
- cluster_group: "Test Cluster Group"
- site: "Test Site"
- comments: "Updated cluster"
- tenant: "Test Tenant"
+ name: Test Cluster One
+ cluster_type: Test Cluster Type
+ cluster_group: Test Cluster Group
+ site: Test Site
+ comments: Updated cluster
+ tenant: Test Tenant
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: test_three
- name: "CLUSTER 3: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['group'] == 1
@@ -79,14 +79,14 @@
- name: "CLUSTER 4: ASSERT - Delete"
netbox.netbox.netbox_cluster:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Cluster One"
+ name: Test Cluster One
state: absent
register: test_four
- name: "CLUSTER 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['cluster']['name'] == "Test Cluster One"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_cluster_group.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_cluster_group.yml
index 9f84518ce..64dda6c44 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_cluster_group.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_cluster_group.yml
@@ -7,14 +7,14 @@
- name: "CLUSTER_GROUP 1: Necessary info creation"
netbox.netbox.netbox_cluster_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Cluster Group One"
+ name: Test Cluster Group One
state: present
register: test_one
- name: "CLUSTER_GROUP 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -26,14 +26,14 @@
- name: "CLUSTER_GROUP 2: Create duplicate"
netbox.netbox.netbox_cluster_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Cluster Group One"
+ name: Test Cluster Group One
state: present
register: test_two
- name: "CLUSTER_GROUP 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['cluster_group']['name'] == "Test Cluster Group One"
@@ -43,15 +43,15 @@
- name: "CLUSTER_GROUP 3: User specified slug"
netbox.netbox.netbox_cluster_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Cluster Group Two"
- slug: "test-cluster-group-2"
+ name: Test Cluster Group Two
+ slug: test-cluster-group-2
state: present
register: test_three
- name: "CLUSTER_GROUP 3: ASSERT - User specified slug"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['before']['state'] == "absent"
@@ -63,14 +63,14 @@
- name: "CLUSTER_GROUP 4: ASSERT - Delete"
netbox.netbox.netbox_cluster_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Cluster Group One"
+ name: Test Cluster Group One
state: absent
register: test_four
- name: "CLUSTER_GROUP 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['cluster_group']['name'] == "Test Cluster Group One"
@@ -80,15 +80,15 @@
- name: "CLUSTER_GROUP 5: ASSERT - Delete"
netbox.netbox.netbox_cluster_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Cluster Group Two"
- slug: "test-cluster-group-2"
+ name: Test Cluster Group Two
+ slug: test-cluster-group-2
state: absent
register: test_five
- name: "CLUSTER_GROUP 5: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['cluster_group']['name'] == "Test Cluster Group Two"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_cluster_type.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_cluster_type.yml
index 11c88206f..bc09ac8a6 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_cluster_type.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_cluster_type.yml
@@ -7,14 +7,14 @@
- name: "CLUSTER_TYPE 1: Necessary info creation"
netbox.netbox.netbox_cluster_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Cluster Type One"
+ name: Test Cluster Type One
state: present
register: test_one
- name: "CLUSTER_TYPE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -26,14 +26,14 @@
- name: "CLUSTER_TYPE 2: Create duplicate"
netbox.netbox.netbox_cluster_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Cluster Type One"
+ name: Test Cluster Type One
state: present
register: test_two
- name: "CLUSTER_TYPE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['cluster_type']['name'] == "Test Cluster Type One"
@@ -43,15 +43,15 @@
- name: "CLUSTER_TYPE 3: User specified slug"
netbox.netbox.netbox_cluster_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Cluster Type Two"
- slug: "test-cluster-type-2"
+ name: Test Cluster Type Two
+ slug: test-cluster-type-2
state: present
register: test_three
- name: "CLUSTER_TYPE 3: ASSERT - User specified slug"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['before']['state'] == "absent"
@@ -63,14 +63,14 @@
- name: "CLUSTER_TYPE 4: ASSERT - Delete"
netbox.netbox.netbox_cluster_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Cluster Type One"
+ name: Test Cluster Type One
state: absent
register: test_four
- name: "CLUSTER_TYPE 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['cluster_type']['name'] == "Test Cluster Type One"
@@ -80,15 +80,15 @@
- name: "CLUSTER_TYPE 5: ASSERT - Delete"
netbox.netbox.netbox_cluster_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Cluster Type Two"
- slug: "test-cluster-type-2"
+ name: Test Cluster Type Two
+ slug: test-cluster-type-2
state: absent
register: test_five
- name: "CLUSTER_TYPE 5: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['cluster_type']['name'] == "Test Cluster Type Two"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_config_context.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_config_context.yml
index 2353dfbfa..910ec82ca 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_config_context.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_config_context.yml
@@ -7,16 +7,16 @@
- name: "CONFIG_CONTEXT 1: Necessary info creation"
netbox.netbox.netbox_config_context:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "test_context"
- description: "Test context"
- data: "{ \"testkey\": { \"testsubkey\": [ \"testvaule\" ] } }"
+ name: test_context
+ description: Test context
+ data: '{ "testkey": { "testsubkey": [ "testvaule" ] } }'
state: present
register: test_one
- name: "CONFIG_CONTEXT 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -31,16 +31,16 @@
- name: "CONFIG_CONTEXT 2: Create duplicate"
netbox.netbox.netbox_config_context:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "test_context"
- description: "Test context"
- data: "{ \"testkey\": { \"testsubkey\": [ \"testvaule\" ] } }"
+ name: test_context
+ description: Test context
+ data: '{ "testkey": { "testsubkey": [ "testvaule" ] } }'
state: present
register: test_two
- name: "CONFIG_CONTEXT 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['config_context']['name'] == "test_context"
@@ -49,18 +49,18 @@
- name: "CONFIG_CONTEXT 3: Update data and attach to site"
netbox.netbox.netbox_config_context:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "test_context"
- description: "Updated test context"
- data: "{ \"testkey\": { \"testsubkey\": [ \"updatedvaule\" ] } }"
+ name: test_context
+ description: Updated test context
+ data: '{ "testkey": { "testsubkey": [ "updatedvaule" ] } }'
weight: 100
sites: [test-site]
state: present
register: test_three
- name: "CONFIG_CONTEXT 3: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['data'].testkey.testsubkey[0] == "updatedvaule"
@@ -73,16 +73,16 @@
- name: "CONFIG_CONTEXT 4: Detach from site"
netbox.netbox.netbox_config_context:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "test_context"
- data: "{ \"testkey\": { \"testsubkey\": [ \"updatedvaule\" ] } }"
+ name: test_context
+ data: '{ "testkey": { "testsubkey": [ "updatedvaule" ] } }'
sites: []
state: present
register: test_four
- name: "CONFIG_CONTEXT 4: ASSERT - Detached"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['after']['sites']|length == 0
@@ -92,14 +92,14 @@
- name: "CONFIG_CONTEXT 5: Delete"
netbox.netbox.netbox_config_context:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "test_context"
+ name: test_context
state: absent
register: test_five
- name: "CONFIG_CONTEXT 5: ASSERT - Deleted"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['after']['state'] == "absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_console_port.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_console_port.yml
index 1c97cb8ad..6be028bec 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_console_port.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_console_port.yml
@@ -10,7 +10,7 @@
- name: "CONSOLE_PORT 1: Necessary info creation"
netbox.netbox.netbox_console_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Port
device: test100
@@ -18,7 +18,7 @@
register: test_one
- name: "CONSOLE_PORT 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -30,7 +30,7 @@
- name: "CONSOLE_PORT 2: Create duplicate"
netbox.netbox.netbox_console_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Port
device: test100
@@ -38,7 +38,7 @@
register: test_two
- name: "CONSOLE_PORT 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['console_port']['name'] == "Console Port"
@@ -48,7 +48,7 @@
- name: "CONSOLE_PORT 3: Update Console Port with other fields"
netbox.netbox.netbox_console_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Port
device: test100
@@ -58,7 +58,7 @@
register: test_three
- name: "CONSOLE_PORT 3: ASSERT - Update Console Port with other fields"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['type'] == "usb-a"
@@ -72,7 +72,7 @@
- name: "CONSOLE_PORT 4: Create Console Port for Delete Test"
netbox.netbox.netbox_console_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Port 2
device: test100
@@ -80,7 +80,7 @@
register: test_four
- name: "CONSOLE_PORT 4: ASSERT - Create Console Port for Delete Test"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -92,7 +92,7 @@
- name: "CONSOLE_PORT 5: Delete Console Port"
netbox.netbox.netbox_console_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Port 2
device: test100
@@ -100,7 +100,7 @@
register: test_five
- name: "CONSOLE_PORT 5: ASSERT - Delete Console Port"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "present"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_console_port_template.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_console_port_template.yml
index b1bd1d1e4..ef6149214 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_console_port_template.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_console_port_template.yml
@@ -10,7 +10,7 @@
- name: "CONSOLE_PORT_TEMPLATE 1: Necessary info creation"
netbox.netbox.netbox_console_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Port Template
device_type: Cisco Test
@@ -18,7 +18,7 @@
register: test_one
- name: "CONSOLE_PORT_TEMPLATE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -30,7 +30,7 @@
- name: "CONSOLE_PORT_TEMPLATE 2: Create duplicate"
netbox.netbox.netbox_console_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Port Template
device_type: Cisco Test
@@ -38,7 +38,7 @@
register: test_two
- name: "CONSOLE_PORT_TEMPLATE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['console_port_template']['name'] == "Console Port Template"
@@ -48,7 +48,7 @@
- name: "CONSOLE_PORT_TEMPLATE 3: Update Console Port Template with other fields"
netbox.netbox.netbox_console_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Port Template
device_type: Cisco Test
@@ -57,7 +57,7 @@
register: test_three
- name: "CONSOLE_PORT_TEMPLATE 3: ASSERT - Update Console Port Template with other fields"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['type'] == "usb-a"
@@ -69,7 +69,7 @@
- name: "CONSOLE_PORT_TEMPLATE 4: Create Console Port Template for Delete Test"
netbox.netbox.netbox_console_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Port Template 2
device_type: Cisco Test
@@ -77,7 +77,7 @@
register: test_four
- name: "CONSOLE_PORT_TEMPLATE 4: ASSERT - Create Console Port Template for Delete Test"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -89,7 +89,7 @@
- name: "CONSOLE_PORT_TEMPLATE 5: Delete Console Port Template"
netbox.netbox.netbox_console_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Port Template 2
device_type: Cisco Test
@@ -97,7 +97,7 @@
register: test_five
- name: "CONSOLE_PORT_TEMPLATE 5: ASSERT - Delete Console Port Template"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "present"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_console_server_port.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_console_server_port.yml
index f70345329..0ac7b65d0 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_console_server_port.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_console_server_port.yml
@@ -10,7 +10,7 @@
- name: "CONSOLE_SERVER_PORT 1: Necessary info creation"
netbox.netbox.netbox_console_server_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Server Port
device: test100
@@ -18,7 +18,7 @@
register: test_one
- name: "CONSOLE_SERVER_PORT 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -30,7 +30,7 @@
- name: "CONSOLE_SERVER_PORT 2: Create duplicate"
netbox.netbox.netbox_console_server_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Server Port
device: test100
@@ -38,7 +38,7 @@
register: test_two
- name: "CONSOLE_SERVER_PORT 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['console_server_port']['name'] == "Console Server Port"
@@ -48,7 +48,7 @@
- name: "CONSOLE_SERVER_PORT 3: Update Console Server Port with other fields"
netbox.netbox.netbox_console_server_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Server Port
device: test100
@@ -58,7 +58,7 @@
register: test_three
- name: "CONSOLE_SERVER_PORT 3: ASSERT - Update Console Server Port with other fields"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['type'] == "usb-a"
@@ -72,7 +72,7 @@
- name: "CONSOLE_SERVER_PORT 4: Create Console Server Port for Delete Test"
netbox.netbox.netbox_console_server_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Server Port 2
device: test100
@@ -80,7 +80,7 @@
register: test_four
- name: "CONSOLE_SERVER_PORT 4: ASSERT - Create Console Server Port for Delete Test"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -92,7 +92,7 @@
- name: "CONSOLE_SERVER_PORT 5: Delete Console Server Port"
netbox.netbox.netbox_console_server_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Server Port 2
device: test100
@@ -100,7 +100,7 @@
register: test_five
- name: "CONSOLE_SERVER_PORT 5: ASSERT - Delete Console Server Port"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "present"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_console_server_port_template.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_console_server_port_template.yml
index 4ad47d40a..4c7dfc846 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_console_server_port_template.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_console_server_port_template.yml
@@ -10,7 +10,7 @@
- name: "CONSOLE_SERVER_PORT_TEMPLATE 1: Necessary info creation"
netbox.netbox.netbox_console_server_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Server Port Template
device_type: Cisco Test
@@ -18,7 +18,7 @@
register: test_one
- name: "CONSOLE_SERVER_PORT_TEMPLATE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -30,7 +30,7 @@
- name: "CONSOLE_SERVER_PORT_TEMPLATE 2: Create duplicate"
netbox.netbox.netbox_console_server_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Server Port Template
device_type: Cisco Test
@@ -38,7 +38,7 @@
register: test_two
- name: "CONSOLE_SERVER_PORT_TEMPLATE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['console_server_port_template']['name'] == "Console Server Port Template"
@@ -48,7 +48,7 @@
- name: "CONSOLE_SERVER_PORT_TEMPLATE 3: Update Console Server Port Template with other fields"
netbox.netbox.netbox_console_server_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Server Port Template
device_type: Cisco Test
@@ -57,7 +57,7 @@
register: test_three
- name: "CONSOLE_SERVER_PORT_TEMPLATE 3: ASSERT - Update Console Server Port Template with other fields"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['type'] == "usb-a"
@@ -69,7 +69,7 @@
- name: "CONSOLE_SERVER_PORT_TEMPLATE 4: Create Console Server Port Template for Delete Test"
netbox.netbox.netbox_console_server_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Server Port Template 2
device_type: Cisco Test
@@ -77,7 +77,7 @@
register: test_four
- name: "CONSOLE_SERVER_PORT_TEMPLATE 4: ASSERT - Create Console Server Port Template for Delete Test"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -89,7 +89,7 @@
- name: "CONSOLE_SERVER_PORT_TEMPLATE 5: Delete Console Server Port Template"
netbox.netbox.netbox_console_server_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Server Port Template 2
device_type: Cisco Test
@@ -97,7 +97,7 @@
register: test_five
- name: "CONSOLE_SERVER_PORT_TEMPLATE 5: ASSERT - Delete Console Server Port Template"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "present"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_contact.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_contact.yml
index feb9f7e9f..f5ede9c1a 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_contact.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_contact.yml
@@ -4,16 +4,16 @@
### NETBOX_CONTACT
##
##
-- name: "1 - Test contact creation"
+- name: 1 - Test contact creation
netbox.netbox.netbox_contact:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Contact ABC"
+ name: Contact ABC
register: test_one
-- name: "1 - ASSERT"
- assert:
+- name: 1 - ASSERT
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -21,32 +21,32 @@
- test_one['contact']['name'] == "Contact ABC"
- test_one['msg'] == "contact Contact ABC created"
-- name: "Test duplicate contact"
+- name: Test duplicate contact
netbox.netbox.netbox_contact:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Contact ABC"
+ name: Contact ABC
register: test_two
-- name: "2 - ASSERT"
- assert:
+- name: 2 - ASSERT
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['contact']['name'] == "Contact ABC"
- test_two['msg'] == "contact Contact ABC already exists"
-- name: "3 - Test update"
+- name: 3 - Test update
netbox.netbox.netbox_contact:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Contact ABC"
- title: "New Title"
+ name: Contact ABC
+ title: New Title
register: test_three
-- name: "3 - ASSERT"
- assert:
+- name: 3 - ASSERT
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['title'] == "New Title"
@@ -54,41 +54,41 @@
- test_three['contact']['title'] == "New Title"
- test_three['msg'] == "contact Contact ABC updated"
-- name: "4 - Test delete"
+- name: 4 - Test delete
netbox.netbox.netbox_contact:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Contact ABC"
- state: "absent"
+ name: Contact ABC
+ state: absent
register: test_four
-- name: "4 - ASSERT"
- assert:
+- name: 4 - ASSERT
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
- test_four['diff']['after']['state'] == "absent"
- test_four['msg'] == "contact Contact ABC deleted"
-- name: "5 - Create contact with all parameters"
+- name: 5 - Create contact with all parameters
netbox.netbox.netbox_contact:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Contact ABC"
- title: "Fancy title"
+ name: Contact ABC
+ title: Fancy title
phone: "12345678"
- email: "contact@contact.com"
+ email: contact@contact.com
tags:
- - "tagA"
- - "tagB"
- - "tagC"
+ - tagA
+ - tagB
+ - tagC
state: present
register: test_five
-- name: "5 - ASSERT"
- assert:
+- name: 5 - ASSERT
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_contact_role.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_contact_role.yml
index fb875748e..c84703da9 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_contact_role.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_contact_role.yml
@@ -7,14 +7,14 @@
- name: "CONTACT_ROLE 1: Necessary info creation"
netbox.netbox.netbox_contact_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Contact Role"
+ name: Test Contact Role
state: present
register: test_one
- name: "CONTACT_ROLE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -26,14 +26,14 @@
- name: "CONTACT_ROLE 2: Create duplicate"
netbox.netbox.netbox_contact_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Contact Role"
+ name: Test Contact Role
state: present
register: test_two
- name: "CONTACT ROLE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['contact_role']['name'] == "Test Contact Role"
@@ -43,15 +43,15 @@
- name: "CONTACT_ROLE 3: ASSERT - Update"
netbox.netbox.netbox_contact_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Contact Role"
- description: "Update description"
+ name: Test Contact Role
+ description: Update description
state: present
register: test_three
- name: "IPAM_ROLE 3: ASSERT - Update"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['description'] == "Update description"
@@ -63,14 +63,14 @@
- name: "CONTACT_ROLE 4: ASSERT - Delete"
netbox.netbox.netbox_contact_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test Contact Role
state: absent
register: test_four
- name: "CONTACT_ROLE 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
@@ -80,14 +80,14 @@
- name: "CONTACT_ROLE 5: ASSERT - Delete non existing"
netbox.netbox.netbox_contact_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test Contact Role
state: absent
register: test_five
- name: "CONTACT_ROLE 5: ASSERT - Delete non existing`"
- assert:
+ ansible.builtin.assert:
that:
- not test_five['changed']
- test_five['contact_role'] == None
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_custom_field.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_custom_field.yml
index 0342c111f..a4ec68866 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_custom_field.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_custom_field.yml
@@ -7,17 +7,17 @@
- name: "CUSTOM_FIELD 1: Necessary info creation"
netbox.netbox.netbox_custom_field:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
content_types:
- - "dcim.device"
+ - dcim.device
name: A_CustomField
type: text
state: present
register: test_one
- name: "CUSTOM_FIELD 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -32,16 +32,16 @@
- name: "CUSTOM_FIELD 2: Create duplicate"
netbox.netbox.netbox_custom_field:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
content_types:
- - "dcim.device"
+ - dcim.device
name: A_CustomField
state: present
register: test_two
- name: "CUSTOM_FIELD 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['custom_field']['name'] == "A_CustomField"
@@ -50,18 +50,18 @@
- name: "CUSTOM_FIELD 3: Update data and make it required"
netbox.netbox.netbox_custom_field:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
content_types:
- - "dcim.device"
- name: "A_CustomField"
- description: "Added a description"
- required: yes
+ - dcim.device
+ name: A_CustomField
+ description: Added a description
+ required: true
state: present
register: test_three
- name: "CUSTOM_FIELD 3: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['description'] == "Added a description"
@@ -72,18 +72,18 @@
- name: "CUSTOM_FIELD 4: Change content type"
netbox.netbox.netbox_custom_field:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
content_types:
- - "virtualization.virtualmachine"
- name: "A_CustomField"
- description: "Added a description"
- required: yes
+ - virtualization.virtualmachine
+ name: A_CustomField
+ description: Added a description
+ required: true
state: present
register: test_four
- name: "CUSTOM_FIELD 4: ASSERT - Change content type"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['after']['content_types'] == ["virtualization.virtualmachine"]
@@ -93,14 +93,14 @@
- name: "CUSTOM_FIELD 5: Delete"
netbox.netbox.netbox_custom_field:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "A_CustomField"
+ name: A_CustomField
state: absent
register: test_five
- name: "CUSTOM_FIELD 5: ASSERT - Deleted"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['after']['state'] == "absent"
@@ -110,10 +110,10 @@
- name: "CUSTOM_FIELD 6: UI Visibility (hidden-ifunset)"
netbox.netbox.netbox_custom_field:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
content_types:
- - "dcim.device"
+ - dcim.device
name: A_CustomField
type: text
ui_visibility: hidden-ifunset
@@ -121,7 +121,7 @@
register: test_six
- name: "CUSTOM_FIELD 6: UI Visibility (hidden-ifunset)"
- assert:
+ ansible.builtin.assert:
that:
- test_six is changed
- test_six['custom_field']['name'] == "A_CustomField"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_custom_link.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_custom_link.yml
index 8376d357b..b9bbcc8a7 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_custom_link.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_custom_link.yml
@@ -7,10 +7,10 @@
- name: "CUSTOM_LINK 1: Necessary info creation"
netbox.netbox.netbox_custom_link:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
content_types:
- - "dcim.device"
+ - dcim.device
name: Custom Link
link_text: Open Web management
link_url: !unsafe https://{{ obj.name }}.domain.local/
@@ -18,7 +18,7 @@
register: test_one
- name: "CUSTOM_LINK 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -31,10 +31,10 @@
- name: "CUSTOM_LINK 2: Create duplicate"
netbox.netbox.netbox_custom_link:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
content_types:
- - "dcim.device"
+ - dcim.device
name: Custom Link
link_text: Open Web management
link_url: !unsafe https://{{ obj.name }}.domain.local/
@@ -42,7 +42,7 @@
register: test_two
- name: "CUSTOM_LINK 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['custom_link']['name'] == "Custom Link"
@@ -51,10 +51,10 @@
- name: "CUSTOM_FIELD 3: Update data and add weight"
netbox.netbox.netbox_custom_link:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
content_types:
- - "dcim.device"
+ - dcim.device
name: Custom Link
link_text: Open Web management
link_url: !unsafe https://{{ obj.name }}.domain.local/
@@ -63,7 +63,7 @@
register: test_three
- name: "CUSTOM_FIELD 3: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['weight'] == 50
@@ -73,10 +73,10 @@
- name: "CUSTOM_LINK 4: Change content type"
netbox.netbox.netbox_custom_link:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
content_types:
- - "virtualization.virtualmachine"
+ - virtualization.virtualmachine
name: Custom Link
link_text: Open Web management
link_url: !unsafe https://{{ obj.name }}.domain.local/
@@ -84,7 +84,7 @@
register: test_four
- name: "CUSTOM_LINK 4: ASSERT - Change content type"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['after']['content_types'] == ["virtualization.virtualmachine"]
@@ -94,10 +94,10 @@
- name: "CUSTOM_LINK 5: Delete"
netbox.netbox.netbox_custom_link:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
content_types:
- - "virtualization.virtualmachine"
+ - virtualization.virtualmachine
name: Custom Link
link_text: Open Web management
link_url: !unsafe https://{{ obj.name }}.domain.local/
@@ -105,7 +105,7 @@
register: test_five
- name: "CUSTOM_LINK 5: ASSERT - Deleted"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['after']['state'] == "absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_device.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_device.yml
index 5a791b242..a3a5d8308 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_device.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_device.yml
@@ -4,22 +4,22 @@
### NETBOX_DEVICE
##
##
-- name: "1 - Device with required information"
+- name: 1 - Device with required information
netbox.netbox.netbox_device:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "R1"
+ name: R1
device_type:
id: "1"
- device_role: "Core Switch"
- site: "Test Site"
- status: "Staged"
+ device_role: Core Switch
+ site: Test Site
+ status: Staged
state: present
register: test_one
-- name: "1 - ASSERT"
- assert:
+- name: 1 - ASSERT
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == 'absent'
@@ -32,21 +32,21 @@
- test_one['device']['name'] == "R1"
- test_one['msg'] == "device R1 created"
-- name: "2 - Duplicate device"
+- name: 2 - Duplicate device
netbox.netbox.netbox_device:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "R1"
- device_type: "Cisco Test"
- device_role: "Core Switch"
- site: "Test Site"
- status: "Staged"
+ name: R1
+ device_type: Cisco Test
+ device_role: Core Switch
+ site: Test Site
+ status: Staged
state: present
register: test_two
-- name: "2 - ASSERT"
- assert:
+- name: 2 - ASSERT
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['device']['name'] == "R1"
@@ -56,24 +56,24 @@
- test_two['device']['status'] == "staged"
- test_two['msg'] == "device R1 already exists"
-- name: "3 - Update device"
+- name: 3 - Update device
netbox.netbox.netbox_device:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "R1"
- serial: "FXS1001"
+ name: R1
+ serial: FXS1001
local_context_data:
bgp_as: "65412"
- virtual_chassis: "VC1"
+ virtual_chassis: VC1
vc_position: 3
vc_priority: 15
- location: "Test Rack Group"
+ location: Test Rack Group
state: present
register: test_three
-- name: "3 - ASSERT"
- assert:
+- name: 3 - ASSERT
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['serial'] == "FXS1001"
@@ -94,20 +94,20 @@
- test_three['device']['location'] == 1
- test_three['msg'] == "device R1 updated"
-- name: "3.1 - Update device name using query_params"
+- name: 3.1 - Update device name using query_params
netbox.netbox.netbox_device:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "R1-changed-name"
- serial: "FXS1001"
+ name: R1-changed-name
+ serial: FXS1001
query_params:
- serial
state: present
register: test_three_dot_one
-- name: "3.1 - ASSERT"
- assert:
+- name: 3.1 - ASSERT
+ ansible.builtin.assert:
that:
- test_three_dot_one is changed
- test_three_dot_one['diff']['after']['name'] == "R1-changed-name"
@@ -119,27 +119,27 @@
- test_three_dot_one['device']['local_context_data']["bgp_as"] == "65412"
- test_three_dot_one['msg'] == "device R1-changed-name updated"
-- name: "4 - Create device with tags and assign to rack"
+- name: 4 - Create device with tags and assign to rack
netbox.netbox.netbox_device:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "TestR1"
+ name: TestR1
device_type: "1841"
- device_role: "Core Switch"
- site: "Test Site2"
- rack: "Test Rack Site 2"
+ device_role: Core Switch
+ site: Test Site2
+ rack: Test Rack Site 2
position: 35.5
- face: "Front"
+ face: Front
tags:
- - "schnozzberry"
- tenant: "Test Tenant"
+ - schnozzberry
+ tenant: Test Tenant
asset_tag: "1234"
state: present
register: test_four
-- name: "4 - ASSERT"
- assert:
+- name: 4 - ASSERT
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -155,53 +155,53 @@
- test_four['device']['asset_tag'] == '1234'
- test_four['msg'] == "device TestR1 created"
-- name: "5 - Delete previous device"
+- name: 5 - Delete previous device
netbox.netbox.netbox_device:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "TestR1"
+ name: TestR1
state: absent
register: test_five
-- name: "5 - ASSERT"
- assert:
+- name: 5 - ASSERT
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "present"
- test_five['diff']['after']['state'] == "absent"
- test_five['msg'] == "device TestR1 deleted"
-- name: "6 - Delete R1"
+- name: 6 - Delete R1
netbox.netbox.netbox_device:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "R1-changed-name"
+ name: R1-changed-name
state: absent
register: test_six
-- name: "6 - ASSERT"
- assert:
+- name: 6 - ASSERT
+ ansible.builtin.assert:
that:
- test_six is changed
- test_six['diff']['before']['state'] == "present"
- test_six['diff']['after']['state'] == "absent"
- test_six['msg'] == "device R1-changed-name deleted"
-- name: "7 - Add primary_ip4/6 to test100"
+- name: 7 - Add primary_ip4/6 to test100
netbox.netbox.netbox_device:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "test100"
- primary_ip4: "172.16.180.1/24"
- primary_ip6: "2001::1:1/64"
+ name: test100
+ primary_ip4: 172.16.180.1/24
+ primary_ip6: 2001::1:1/64
state: present
register: test_seven
-- name: "7 - ASSERT"
- assert:
+- name: 7 - ASSERT
+ ansible.builtin.assert:
that:
- test_seven is changed
- test_seven['diff']['after']['primary_ip4'] == 1
@@ -215,22 +215,22 @@
- test_seven['device']['primary_ip6'] == 2
- test_seven['msg'] == "device test100 updated"
-- name: "8 - Device with empty string name"
+- name: 8 - Device with empty string name
netbox.netbox.netbox_device:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: ""
device_type:
id: 1
- device_role: "Core Switch"
- site: "Test Site"
- status: "Staged"
+ device_role: Core Switch
+ site: Test Site
+ status: Staged
state: present
register: test_eight
-- name: "8 - ASSERT"
- assert:
+- name: 8 - ASSERT
+ ansible.builtin.assert:
that:
- test_eight is changed
- test_eight['diff']['before']['state'] == 'absent'
@@ -240,4 +240,4 @@
- test_eight['device']['site'] == 1
- test_eight['device']['status'] == "staged"
- "'-' in test_eight['device']['name']"
- - "test_eight['device']['name'] | length == 36"
+ - test_eight['device']['name'] | length == 36
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_device_bay.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_device_bay.yml
index 18a804354..b892d2160 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_device_bay.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_device_bay.yml
@@ -7,15 +7,15 @@
- name: "DEVICE_BAY 1: Necessary info creation"
netbox.netbox.netbox_device_bay:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- device: "Test Nexus One"
- name: "Device Bay One"
+ device: Test Nexus One
+ name: Device Bay One
state: present
register: test_one
- name: "DEVICE_BAY 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -27,15 +27,15 @@
- name: "DEVICE_BAY 2: Create duplicate"
netbox.netbox.netbox_device_bay:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- device: "Test Nexus One"
- name: "Device Bay One"
+ device: Test Nexus One
+ name: Device Bay One
state: present
register: test_two
- name: "DEVICE_BAY 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['device_bay']['name'] == "Device Bay One"
@@ -45,18 +45,18 @@
- name: "DEVICE_BAY 3: ASSERT - Update"
netbox.netbox.netbox_device_bay:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- device: "Test Nexus One"
- name: "Device Bay One"
- installed_device: "Test Nexus Child One"
+ device: Test Nexus One
+ name: Device Bay One
+ installed_device: Test Nexus Child One
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: test_three
- name: "DEVICE_BAY 3: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['installed_device'] == 5
@@ -70,14 +70,14 @@
- name: "DEVICE_BAY 4: ASSERT - Delete"
netbox.netbox.netbox_device_bay:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Device Bay One"
+ name: Device Bay One
state: absent
register: test_four
- name: "DEVICE_BAY 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['device_bay']['name'] == "Device Bay One"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_device_bay_template.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_device_bay_template.yml
index 4b6965033..c2ba1e32c 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_device_bay_template.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_device_bay_template.yml
@@ -7,7 +7,7 @@
- name: "DEVICE_BAY_TEMPLATE 1: Necessary info creation"
netbox.netbox.netbox_device_bay_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device_type: WS Test 3850
name: Device Bay Template One
@@ -15,7 +15,7 @@
register: test_one
- name: "DEVICE_BAY_TEMPLATE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -27,7 +27,7 @@
- name: "DEVICE_BAY_TEMPLATE 2: Create duplicate"
netbox.netbox.netbox_device_bay_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device_type: WS Test 3850
name: Device Bay Template One
@@ -35,7 +35,7 @@
register: test_two
- name: "DEVICE_BAY_TEMPLATE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['device_bay_template']['name'] == "Device Bay Template One"
@@ -45,7 +45,7 @@
- name: "DEVICE_BAY_TEMPLATE 3: ASSERT - Create Device Bay Template for Delete Test"
netbox.netbox.netbox_device_bay_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device_type: WS Test 3850
name: Device Bay Template Two
@@ -53,7 +53,7 @@
register: test_three
- name: "DEVICE_BAY_TEMPLATE 3: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['before']['state'] == "absent"
@@ -65,7 +65,7 @@
- name: "DEVICE_BAY_TEMPLATE 4: ASSERT - Delete"
netbox.netbox.netbox_device_bay_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Device Bay Template Two
device_type: WS Test 3850
@@ -73,7 +73,7 @@
register: test_four
- name: "DEVICE_BAY_TEMPLATE 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['device_bay_template']['name'] == "Device Bay Template Two"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_device_interface.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_device_interface.yml
index d8e187d65..57732ef4f 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_device_interface.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_device_interface.yml
@@ -4,18 +4,18 @@
### NETBOX_DEVICE_INTERFACE
##
##
-- name: "1 - Interface with required information"
+- name: 1 - Interface with required information
netbox.netbox.netbox_device_interface:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: test100
name: GigabitEthernet3
- type: "1000Base-T (1GE)"
+ type: 1000Base-T (1GE)
register: test_one
-- name: "1 - ASSERT"
- assert:
+- name: 1 - ASSERT
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['msg'] == "interface GigabitEthernet3 created"
@@ -24,9 +24,9 @@
- test_one['interface']['name'] == "GigabitEthernet3"
- test_one['interface']['device'] == 1
-- name: "2 - Update test100 - GigabitEthernet3"
+- name: 2 - Update test100 - GigabitEthernet3
netbox.netbox.netbox_device_interface:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: test100
@@ -35,8 +35,8 @@
enabled: false
register: test_two
-- name: "2 - ASSERT"
- assert:
+- name: 2 - ASSERT
+ ansible.builtin.assert:
that:
- test_two is changed
- test_two['msg'] == "interface GigabitEthernet3 updated"
@@ -47,9 +47,9 @@
- test_two['interface']['enabled'] == false
- test_two['interface']['mtu'] == 1600
-- name: "3 - Delete interface test100 - GigabitEthernet3"
+- name: 3 - Delete interface test100 - GigabitEthernet3
netbox.netbox.netbox_device_interface:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: test100
@@ -57,17 +57,17 @@
state: absent
register: test_three
-- name: "3 - ASSERT"
- assert:
+- name: 3 - ASSERT
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['msg'] == "interface GigabitEthernet3 deleted"
- test_three['diff']['before']['state'] == "present"
- test_three['diff']['after']['state'] == "absent"
-- name: "4 - Create LAG with several specified options"
+- name: 4 - Create LAG with several specified options
netbox.netbox.netbox_device_interface:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: test100
@@ -79,8 +79,8 @@
state: present
register: test_four
-- name: "4 - ASSERT"
- assert:
+- name: 4 - ASSERT
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['msg'] == "interface port-channel1 created"
@@ -94,9 +94,9 @@
- test_four['interface']['mode'] == "access"
- test_four['interface']['mtu'] == 1600
-- name: "5 - Create interface and assign it to parent LAG"
+- name: 5 - Create interface and assign it to parent LAG
netbox.netbox.netbox_device_interface:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: test100
@@ -111,8 +111,8 @@
state: present
register: test_five
-- name: "5 - ASSERT"
- assert:
+- name: 5 - ASSERT
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['msg'] == "interface GigabitEthernet3 created"
@@ -127,9 +127,9 @@
- test_five['interface']['mode'] == "access"
- test_five['interface']['mtu'] == 1600
-- name: "6 - Create interface as trunk port"
+- name: 6 - Create interface as trunk port
netbox.netbox.netbox_device_interface:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: test100
@@ -150,8 +150,8 @@
state: present
register: test_six
-- name: "6 - ASSERT"
- assert:
+- name: 6 - ASSERT
+ ansible.builtin.assert:
that:
- test_six is changed
- test_six['msg'] == "interface GigabitEthernet21 created"
@@ -167,50 +167,50 @@
- test_six['interface']['tagged_vlans'] == [2, 3]
- test_six['interface']['untagged_vlan'] == 1
-- name: "7 - Duplicate Interface"
+- name: 7 - Duplicate Interface
netbox.netbox.netbox_device_interface:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: test100
name: GigabitEthernet1
register: test_seven
-- name: "7 - ASSERT"
- assert:
+- name: 7 - ASSERT
+ ansible.builtin.assert:
that:
- not test_seven['changed']
- test_seven['msg'] == "interface GigabitEthernet1 already exists"
- test_seven['interface']['name'] == "GigabitEthernet1"
- test_seven['interface']['device'] == 1
-- name: "Add port-channel1 to R1 to test finding proper port-channel1"
+- name: Add port-channel1 to R1 to test finding proper port-channel1
netbox.netbox.netbox_device_interface:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- device: "R1-Device"
- name: "port-channel1"
- type: "Link Aggregation Group (LAG)"
+ device: R1-Device
+ name: port-channel1
+ type: Link Aggregation Group (LAG)
-- name: "8 - Create interface and assign it to parent LAG - non dict"
+- name: 8 - Create interface and assign it to parent LAG - non dict
netbox.netbox.netbox_device_interface:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: test100
name: GigabitEthernet4
enabled: false
type: 1000Base-T (1GE)
- lag: "port-channel1"
+ lag: port-channel1
mtu: 1600
mgmt_only: false
mode: Access
state: present
register: test_eight
-- name: "8 - ASSERT"
- assert:
+- name: 8 - ASSERT
+ ansible.builtin.assert:
that:
- test_eight is changed
- test_eight['msg'] == "interface GigabitEthernet4 created"
@@ -225,9 +225,9 @@
- test_eight['interface']['mode'] == "access"
- test_eight['interface']['mtu'] == 1600
-- name: "9 - Create interface on VC child"
+- name: 9 - Create interface on VC child
netbox.netbox.netbox_device_interface:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: Test Nexus Child One
@@ -236,8 +236,8 @@
state: present
register: test_nine
-- name: "9 - ASSERT"
- assert:
+- name: 9 - ASSERT
+ ansible.builtin.assert:
that:
- test_nine is changed
- test_nine['msg'] == "interface Ethernet2/2 created"
@@ -248,20 +248,20 @@
- test_nine['interface']['enabled'] == true
- test_nine['interface']['type'] == "1000base-t"
-- name: "10 - Update interface on VC child"
+- name: 10 - Update interface on VC child
netbox.netbox.netbox_device_interface:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: Test Nexus One
name: Ethernet2/2
- description: "Updated child interface from parent device"
- update_vc_child: True
+ description: Updated child interface from parent device
+ update_vc_child: true
state: present
register: test_ten
-- name: "10 - ASSERT"
- assert:
+- name: 10 - ASSERT
+ ansible.builtin.assert:
that:
- test_ten is changed
- test_ten['msg'] == "interface Ethernet2/2 updated"
@@ -272,27 +272,27 @@
- test_ten['interface']['type'] == "1000base-t"
- test_ten['interface']['description'] == 'Updated child interface from parent device'
-- name: "11 - Update interface on VC child w/o update_vc_child"
+- name: 11 - Update interface on VC child w/o update_vc_child
netbox.netbox.netbox_device_interface:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: Test Nexus One
name: Ethernet2/2
- description: "Updated child interface from parent device - test"
+ description: Updated child interface from parent device - test
state: present
- ignore_errors: yes
+ ignore_errors: true
register: test_eleven
-- name: "11 - ASSERT"
- assert:
+- name: 11 - ASSERT
+ ansible.builtin.assert:
that:
- test_eleven is failed
- test_eleven['msg'] == "Must set update_vc_child to True to allow child device interface modification"
-- name: "12 - Create interface and mark it as connected"
+- name: 12 - Create interface and mark it as connected
netbox.netbox.netbox_device_interface:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: test100
@@ -301,8 +301,8 @@
mark_connected: true
register: test_twelve
-- name: "12- ASSERT"
- assert:
+- name: 12- ASSERT
+ ansible.builtin.assert:
that:
- test_twelve is changed
- test_twelve['msg'] == "interface GigabitEthernet5 created"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_device_interface_template.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_device_interface_template.yml
index b6e98914f..abe8f3405 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_device_interface_template.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_device_interface_template.yml
@@ -4,9 +4,9 @@
### NETBOX_DEVICE_INTERFACE_TEMPLATE
##
##
-- name: "1 - Interface with required information"
+- name: 1 - Interface with required information
netbox.netbox.netbox_device_interface_template:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device_type: Arista Test
@@ -14,8 +14,8 @@
type: 10gbase-t
register: test_one
-- name: "1 - ASSERT"
- assert:
+- name: 1 - ASSERT
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['msg'] == "interface_template 10GBASE-T (10GE) created"
@@ -25,9 +25,9 @@
- test_one['interface_template']['device_type'] == 2
- test_one['interface_template']['type'] == '10gbase-t'
-- name: "2 - Update 10GBASE-T (10GE)"
+- name: 2 - Update 10GBASE-T (10GE)
netbox.netbox.netbox_device_interface_template:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device_type: Arista Test
@@ -36,8 +36,8 @@
mgmt_only: true
register: test_two
-- name: "2 - ASSERT"
- assert:
+- name: 2 - ASSERT
+ ansible.builtin.assert:
that:
- test_two is changed
- test_two['msg'] == "interface_template 10GBASE-T (10GE) updated"
@@ -46,9 +46,9 @@
- test_two['interface_template']['device_type'] == 2
- test_two['interface_template']['mgmt_only'] == true
-- name: "3 - Delete interface template 10GBASE-T (10GE)"
+- name: 3 - Delete interface template 10GBASE-T (10GE)
netbox.netbox.netbox_device_interface_template:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device_type: Arista Test
@@ -57,17 +57,17 @@
state: absent
register: test_three
-- name: "3 - ASSERT"
- assert:
+- name: 3 - ASSERT
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['msg'] == "interface_template 10GBASE-T (10GE) deleted"
- test_three['diff']['before']['state'] == "present"
- test_three['diff']['after']['state'] == "absent"
-- name: "4 - Create LAG with several specified options"
+- name: 4 - Create LAG with several specified options
netbox.netbox.netbox_device_interface_template:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device_type: Arista Test
@@ -77,8 +77,8 @@
state: present
register: test_four
-- name: "4 - ASSERT"
- assert:
+- name: 4 - ASSERT
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['msg'] == "interface_template port channel template created"
@@ -89,9 +89,9 @@
- test_four['interface_template']['type'] == "lag"
- test_four['interface_template']['mgmt_only'] == false
-- name: "5 - Duplicate Interface Template port channel template"
+- name: 5 - Duplicate Interface Template port channel template
netbox.netbox.netbox_device_interface_template:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device_type: Arista Test
@@ -99,8 +99,8 @@
type: lag
register: test_five
-- name: "5 - ASSERT"
- assert:
+- name: 5 - ASSERT
+ ansible.builtin.assert:
that:
- not test_five['changed']
- test_five['msg'] == "interface_template port channel template already exists"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_device_role.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_device_role.yml
index 64ec9840f..b85ee2c86 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_device_role.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_device_role.yml
@@ -7,15 +7,15 @@
- name: "DEVICE_ROLE 1: Necessary info creation"
netbox.netbox.netbox_device_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Device Role"
- color: "FFFFFF"
+ name: Test Device Role
+ color: FFFFFF
state: present
register: test_one
- name: "DEVICE_ROLE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -28,15 +28,15 @@
- name: "DEVICE_ROLE 2: Create duplicate"
netbox.netbox.netbox_device_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Device Role"
- color: "FFFFFF"
+ name: Test Device Role
+ color: FFFFFF
state: present
register: test_two
- name: "DEVICE_ROLE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['device_role']['name'] == "Test Device Role"
@@ -47,16 +47,16 @@
- name: "DEVICE_ROLE 3: ASSERT - Update"
netbox.netbox.netbox_device_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Device Role"
+ name: Test Device Role
color: "003EFF"
vm_role: false
state: present
register: test_three
- name: "DEVICE_ROLE 3: ASSERT - Update"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['color'] == "003eff"
@@ -70,14 +70,14 @@
- name: "DEVICE_ROLE 4: ASSERT - Delete"
netbox.netbox.netbox_device_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test Device Role
state: absent
register: test_four
- name: "DEVICE_ROLE 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
@@ -87,14 +87,14 @@
- name: "DEVICE_ROLE 5: ASSERT - Delete non existing"
netbox.netbox.netbox_device_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test Device Role
state: absent
register: test_five
- name: "DEVICE_ROLE 5: ASSERT - Delete non existing`"
- assert:
+ ansible.builtin.assert:
that:
- not test_five['changed']
- test_five['device_role'] == None
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_device_type.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_device_type.yml
index 5587119f3..3027f6fd1 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_device_type.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_device_type.yml
@@ -7,7 +7,7 @@
- name: "DEVICE_TYPE 1: Necessary info creation"
netbox.netbox.netbox_device_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
slug: test-device-type
model: ws-test-3750
@@ -16,7 +16,7 @@
register: test_one
- name: "DEVICE_TYPE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -29,16 +29,16 @@
- name: "DEVICE_TYPE 2: Create duplicate"
netbox.netbox.netbox_device_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
slug: test-device-type
- model: "ws-test-3750"
+ model: ws-test-3750
manufacturer: Test Manufacturer
state: present
register: test_two
- name: "DEVICE_TYPE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_one['device_type']['slug'] == "test-device-type"
@@ -49,7 +49,7 @@
- name: "DEVICE_TYPE 3: ASSERT - Update"
netbox.netbox.netbox_device_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
slug: test-device-type
model: ws-test-3750
@@ -62,7 +62,7 @@
register: test_three
- name: "DEVICE_TYPE 3: ASSERT - Update"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['is_full_depth'] == false
@@ -79,14 +79,14 @@
- name: "DEVICE_TYPE 4: ASSERT - Delete"
netbox.netbox.netbox_device_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
model: test-device-type
state: absent
register: test_four
- name: "DEVICE_TYPE 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
@@ -96,14 +96,14 @@
- name: "DEVICE_TYPE 5: ASSERT - Delete non existing"
netbox.netbox.netbox_device_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- model: "Test Device Type"
+ model: Test Device Type
state: absent
register: test_five
- name: "DEVICE_TYPE 5: ASSERT - Delete non existing`"
- assert:
+ ansible.builtin.assert:
that:
- not test_five['changed']
- test_five['device_type'] == None
@@ -112,16 +112,16 @@
- name: "DEVICE_TYPE 6: Without Slug"
netbox.netbox.netbox_device_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- model: "WS Test 3850"
- manufacturer: "Test Manufacturer"
- subdevice_role: "parent"
+ model: WS Test 3850
+ manufacturer: Test Manufacturer
+ subdevice_role: parent
state: present
register: test_six
- name: "DEVICE_TYPE 6: ASSERT - Without Slug"
- assert:
+ ansible.builtin.assert:
that:
- test_six is changed
- test_six['diff']['before']['state'] == "absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_export_template.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_export_template.yml
index b3dcea015..ffdc0424f 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_export_template.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_export_template.yml
@@ -7,10 +7,10 @@
- name: "EXPORT_TEMPLATE 1: Necessary info creation"
netbox.netbox.netbox_export_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
content_types:
- - "dcim.device"
+ - dcim.device
name: Example Export Template
description: Export Devices
template_code: !unsafe >-
@@ -19,7 +19,7 @@
register: test_one
- name: "EXPORT_TEMPLATE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -32,10 +32,10 @@
- name: "EXPORT_TEMPLATE 2: Create duplicate"
netbox.netbox.netbox_export_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
content_types:
- - "dcim.device"
+ - dcim.device
name: Example Export Template
description: Export Devices
template_code: !unsafe >-
@@ -44,7 +44,7 @@
register: test_two
- name: "EXPORT_TEMPLATE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['export_template']['name'] == "Example Export Template"
@@ -53,20 +53,20 @@
- name: "EXPORT_TEMPLATE 3: Update data and remove as_attachment"
netbox.netbox.netbox_export_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
content_types:
- - "dcim.device"
+ - dcim.device
name: Example Export Template
description: Export Devices
template_code: !unsafe >-
{% for obj in queryset %}{{ obj.name }}{% endfor %}
- as_attachment: no
+ as_attachment: false
state: present
register: test_three
- name: "EXPORT_TEMPLATE 3: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['as_attachment'] == false
@@ -76,10 +76,10 @@
- name: "EXPORT_TEMPLATE 4: Change content type"
netbox.netbox.netbox_export_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
content_types:
- - "virtualization.virtualmachine"
+ - virtualization.virtualmachine
name: Example Export Template
description: Export Devices
template_code: !unsafe >-
@@ -88,7 +88,7 @@
register: test_four
- name: "EXPORT_TEMPLATE 4: ASSERT - Change content type"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['after']['content_types'] == ["virtualization.virtualmachine"]
@@ -98,10 +98,10 @@
- name: "EXPORT_TEMPLATE 5: Delete"
netbox.netbox.netbox_export_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
content_types:
- - "virtualization.virtualmachine"
+ - virtualization.virtualmachine
name: Example Export Template
description: Export Devices
template_code: !unsafe >-
@@ -110,7 +110,7 @@
register: test_five
- name: "EXPORT_TEMPLATE 5: ASSERT - Deleted"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['after']['state'] == "absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_fhrp_group.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_fhrp_group.yml
index 91788ef02..eed3f2f88 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_fhrp_group.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_fhrp_group.yml
@@ -7,9 +7,9 @@
- name: "FHRP group 1: Test FHRP group creation"
netbox.netbox.netbox_fhrp_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- protocol: "glbp"
+ protocol: glbp
group_id: 111
state: present
register: test_one
@@ -27,9 +27,9 @@
- name: "FHRP group 2: Create duplicate"
netbox.netbox.netbox_fhrp_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- protocol: "glbp"
+ protocol: glbp
group_id: 111
state: present
register: test_two
@@ -45,15 +45,15 @@
- name: "FHRP group 3: Update FHRP group with other fields"
netbox.netbox.netbox_fhrp_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- protocol: "glbp"
+ protocol: glbp
group_id: 111
auth_type: md5
auth_key: 11111
description: Test description
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: test_three
@@ -76,7 +76,7 @@
- name: "FHRP group 4: ASSERT - Delete"
netbox.netbox.netbox_fhrp_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
group_id: 111
state: absent
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_fhrp_group_assignment.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_fhrp_group_assignment.yml
index 27f20c293..2dd2f6d61 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_fhrp_group_assignment.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_fhrp_group_assignment.yml
@@ -7,7 +7,7 @@
- name: "FHRP group assignment 1: Test FHRP group assignment creation"
netbox.netbox.netbox_fhrp_group_assignment:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
fhrp_group: 1
interface_type: dcim.interface
@@ -31,7 +31,7 @@
- name: "FHRP group assignment 2: Create duplicate"
netbox.netbox.netbox_fhrp_group_assignment:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
fhrp_group: 1
interface_type: dcim.interface
@@ -53,7 +53,7 @@
- name: "FHRP group assignment 3: Update FHRP group assignment"
netbox.netbox.netbox_fhrp_group_assignment:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
fhrp_group: 1
interface_type: dcim.interface
@@ -75,7 +75,7 @@
- name: "FHRP group assignment 4: Delete FHRP group assignment"
netbox.netbox.netbox_fhrp_group_assignment:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
fhrp_group: 1
interface_type: dcim.interface
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_front_port.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_front_port.yml
index 1f4c60a4d..7088bbeef 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_front_port.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_front_port.yml
@@ -10,7 +10,7 @@
- name: "FRONT_PORT 1: Necessary info creation"
netbox.netbox.netbox_front_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Front Port
device: test100
@@ -20,7 +20,7 @@
register: test_one
- name: "FRONT_PORT 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -34,7 +34,7 @@
- name: "FRONT_PORT 2: Create duplicate"
netbox.netbox.netbox_front_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Front Port
device: test100
@@ -44,7 +44,7 @@
register: test_two
- name: "FRONT_PORT 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['front_port']['name'] == "Front Port"
@@ -56,7 +56,7 @@
- name: "FRONT_PORT 3: Update Front Port with other fields"
netbox.netbox.netbox_front_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Front Port
device: test100
@@ -68,7 +68,7 @@
register: test_three
- name: "FRONT_PORT 3: ASSERT - Update Front Port with other fields"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['rear_port_position'] == 5
@@ -84,7 +84,7 @@
- name: "FRONT_PORT 4: Create Front Port for Delete Test"
netbox.netbox.netbox_front_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Front Port 2
device: test100
@@ -94,7 +94,7 @@
register: test_four
- name: "FRONT_PORT 4: ASSERT - Create Front Port for Delete Test"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -108,7 +108,7 @@
- name: "FRONT_PORT 5: Delete Front Port"
netbox.netbox.netbox_front_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Front Port 2
device: test100
@@ -118,7 +118,7 @@
register: test_five
- name: "FRONT_PORT 5: ASSERT - Delete Front Port"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "present"
@@ -128,7 +128,7 @@
- name: "FRONT_PORT 6: Create duplicate with rear_port dictionary"
netbox.netbox.netbox_front_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Front Port
device: test100
@@ -140,7 +140,7 @@
register: test_six
- name: "FRONT_PORT 6: ASSERT - Create duplicate with rear_port dictionary"
- assert:
+ ansible.builtin.assert:
that:
- not test_six['changed']
- test_six['front_port']['name'] == "Front Port"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_front_port_template.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_front_port_template.yml
index 30e907100..966279fa8 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_front_port_template.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_front_port_template.yml
@@ -10,7 +10,7 @@
- name: "FRONT_PORT_TEMPLATE 1: Necessary info creation"
netbox.netbox.netbox_front_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Front Port Template
device_type: Cisco Test
@@ -20,7 +20,7 @@
register: test_one
- name: "FRONT_PORT_TEMPLATE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -34,7 +34,7 @@
- name: "FRONT_PORT_TEMPLATE 2: Create duplicate"
netbox.netbox.netbox_front_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Front Port Template
device_type: Cisco Test
@@ -44,7 +44,7 @@
register: test_two
- name: "FRONT_PORT_TEMPLATE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['front_port_template']['name'] == "Front Port Template"
@@ -56,7 +56,7 @@
- name: "FRONT_PORT_TEMPLATE 3: Update Front Port Template with other fields"
netbox.netbox.netbox_front_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Front Port Template
device_type: Cisco Test
@@ -67,7 +67,7 @@
register: test_three
- name: "FRONT_PORT_TEMPLATE 3: ASSERT - Update Front Port Template with other fields"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['rear_port_position'] == 5
@@ -81,7 +81,7 @@
- name: "FRONT_PORT_TEMPLATE 4: Create Front Port Template for Delete Test"
netbox.netbox.netbox_front_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Front Port Template 2
device_type: Cisco Test
@@ -91,7 +91,7 @@
register: test_four
- name: "FRONT_PORT_TEMPLATE 4: ASSERT - Create Front Port Template for Delete Test"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -105,7 +105,7 @@
- name: "FRONT_PORT_TEMPLATE 5: Delete Front Port Template"
netbox.netbox.netbox_front_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Front Port Template 2
device_type: Cisco Test
@@ -115,7 +115,7 @@
register: test_five
- name: "FRONT_PORT_TEMPLATE 5: ASSERT - Delete Front Port Template"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "present"
@@ -125,7 +125,7 @@
- name: "FRONT_PORT 6: Create duplicate with rear_port_template dictionary"
netbox.netbox.netbox_front_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Front Port Template
device_type: Cisco Test
@@ -137,7 +137,7 @@
register: test_six
- name: "FRONT_PORT 6: ASSERT - Create duplicate with rear_port_template dictionary"
- assert:
+ ansible.builtin.assert:
that:
- not test_six['changed']
- test_six['front_port_template']['name'] == "Front Port Template"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_inventory_item.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_inventory_item.yml
index cc038ac94..5b330c3a5 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_inventory_item.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_inventory_item.yml
@@ -7,10 +7,10 @@
- name: "INVENTORY_ITEM 1: Necessary info creation"
netbox.netbox.netbox_inventory_item:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- device: "test100"
- name: "10G-SFP+"
+ device: test100
+ name: 10G-SFP+
state: present
register: test_one
@@ -27,10 +27,10 @@
- name: "INVENTORY_ITEM 2: Create duplicate"
netbox.netbox.netbox_inventory_item:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- device: "test100"
- name: "10G-SFP+"
+ device: test100
+ name: 10G-SFP+
state: present
register: test_two
@@ -45,18 +45,18 @@
- name: "INVENTORY_ITEM 3: Update properties"
netbox.netbox.netbox_inventory_item:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- device: "test100"
- name: "10G-SFP+"
- manufacturer: "Cisco"
- part_id: "10G-SFP+"
+ device: test100
+ name: 10G-SFP+
+ manufacturer: Cisco
+ part_id: 10G-SFP+
serial: "1234"
asset_tag: "1234"
- description: "New SFP"
- discovered: True
+ description: New SFP
+ discovered: true
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: test_three
@@ -85,10 +85,10 @@
- name: "INVENTORY_ITEM 4: Delete inventory item"
netbox.netbox.netbox_inventory_item:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- device: "test100"
- name: "10G-SFP+"
+ device: test100
+ name: 10G-SFP+
state: absent
register: test_four
@@ -109,7 +109,7 @@
- name: "INVENTORY_ITEM 5: PREWORK - Create inventory item role"
netbox.netbox.netbox_inventory_item_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Processor
color: FFFFFF
@@ -119,7 +119,7 @@
- name: "INVENTORY_ITEM 5: Create inventory item with role"
netbox.netbox.netbox_inventory_item:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: test100
name: test_processor
@@ -141,7 +141,7 @@
- name: "INVENTORY_ITEM 6: Create inventory item with missing role"
netbox.netbox.netbox_inventory_item:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: test100
name: test_processor
@@ -159,14 +159,14 @@
- name: "INVENTORY_ITEM 7: Create inventory item with component"
netbox.netbox.netbox_inventory_item:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: test100
name: test_component
- component_type: "dcim.interface"
+ component_type: dcim.interface
component:
name: GigabitEthernet2
- device: "test100"
+ device: test100
state: present
register: test_seven
@@ -185,13 +185,13 @@
- name: "INVENTORY_ITEM 8: Create inventory item with missing component_type"
netbox.netbox.netbox_inventory_item:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: test100
name: test_component
component:
name: GigabitEthernet2
- device: "test100"
+ device: test100
state: present
ignore_errors: true
register: test_eight
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_inventory_item_role.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_inventory_item_role.yml
index 1c2de3f03..71d86d747 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_inventory_item_role.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_inventory_item_role.yml
@@ -7,15 +7,15 @@
- name: "INVENTORY_ITEM_ROLE 1: Necessary info creation"
netbox.netbox.netbox_inventory_item_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Inventory Item Role"
- color: "FFFFFF"
+ name: Test Inventory Item Role
+ color: FFFFFF
state: present
register: test_one
- name: "INVENTORY_ITEM_ROLE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -28,15 +28,15 @@
- name: "INVENTORY_ITEM_ROLE 2: Create duplicate"
netbox.netbox.netbox_inventory_item_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Inventory Item Role"
- color: "FFFFFF"
+ name: Test Inventory Item Role
+ color: FFFFFF
state: present
register: test_two
- name: "INVENTORY_ITEM_ROLE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['inventory_item_role']['name'] == "Test Inventory Item Role"
@@ -47,15 +47,15 @@
- name: "INVENTORY_ITEM_ROLE 3: ASSERT - Update"
netbox.netbox.netbox_inventory_item_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Inventory Item Role"
+ name: Test Inventory Item Role
color: "003EFF"
state: present
register: test_three
- name: "INVENTORY_ITEM_ROLE 3: ASSERT - Update"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['color'] == "003eff"
@@ -67,14 +67,14 @@
- name: "INVENTORY_ITEM_ROLE 4: ASSERT - Delete"
netbox.netbox.netbox_inventory_item_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test Inventory Item Role
state: absent
register: test_four
- name: "INVENTORY_ITEM_ROLE 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
@@ -84,14 +84,14 @@
- name: "INVENTORY_ITEM_ROLE 5: ASSERT - Delete non existing"
netbox.netbox.netbox_inventory_item_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test Inventory Item Role
state: absent
register: test_five
- name: "INVENTORY_ITEM_ROLE 5: ASSERT - Delete non existing`"
- assert:
+ ansible.builtin.assert:
that:
- not test_five['changed']
- test_five['inventory_item_role'] == None
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_ip_address.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_ip_address.yml
index 3cda56d3c..ce2b125a4 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_ip_address.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_ip_address.yml
@@ -7,14 +7,14 @@
- name: "1 - Create IP address within NetBox with only required information - State: Present"
netbox.netbox.netbox_ip_address:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
address: 192.168.1.10/30
state: present
register: test_one
-- name: "1 - ASSERT"
- assert:
+- name: 1 - ASSERT
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -22,20 +22,20 @@
- test_one['msg'] == "ip_address 192.168.1.10/30 created"
- test_one['ip_address']['address'] == "192.168.1.10/30"
-- name: "2 - Update 192.168.1.10/30"
+- name: 2 - Update 192.168.1.10/30
netbox.netbox.netbox_ip_address:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
address: 192.168.1.10/30
- description: "Updated ip address"
+ description: Updated ip address
tags:
- - "Updated"
+ - Updated
state: present
register: test_two
-- name: "2 - ASSERT"
- assert:
+- name: 2 - ASSERT
+ ansible.builtin.assert:
that:
- test_two is changed
- test_two['diff']['after']['description'] == "Updated ip address"
@@ -48,14 +48,14 @@
- name: "3 - Delete IP - 192.168.1.10 - State: Absent"
netbox.netbox.netbox_ip_address:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
address: 192.168.1.10/30
state: absent
register: test_three
-- name: "3 - ASSERT"
- assert:
+- name: 3 - ASSERT
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['before']['state'] == "present"
@@ -65,14 +65,14 @@
- name: "4 - Create IP in global VRF - 192.168.1.20/30 - State: Present"
netbox.netbox.netbox_ip_address:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
address: 192.168.1.20/30
state: present
register: test_four
-- name: "4 - ASSERT"
- assert:
+- name: 4 - ASSERT
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -83,14 +83,14 @@
- name: "5 - Create IP in global VRF - 192.168.1.20/30 - State: New"
netbox.netbox.netbox_ip_address:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
address: 192.168.1.20/30
state: new
register: test_five
-- name: "5 - ASSERT"
- assert:
+- name: 5 - ASSERT
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "absent"
@@ -101,14 +101,14 @@
- name: "6 - Create new address with only prefix specified - State: new"
netbox.netbox.netbox_ip_address:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
prefix: 192.168.100.0/24
state: new
register: test_six
-- name: "6 - ASSERT"
- assert:
+- name: 6 - ASSERT
+ ansible.builtin.assert:
that:
- test_six is changed
- test_six['diff']['before']['state'] == "absent"
@@ -116,10 +116,10 @@
- test_six['msg'] == "ip_address 192.168.100.1/24 created"
- test_six['ip_address']['address'] == "192.168.100.1/24"
-- name: "7 - Create IP address with several specified"
+- name: 7 - Create IP address with several specified
netbox.netbox.netbox_ip_address:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
family: 4
address: 172.16.1.20/24
@@ -129,12 +129,12 @@
role: Loopback
description: Test description
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: test_seven
-- name: "7 - ASSERT"
- assert:
+- name: 7 - ASSERT
+ ansible.builtin.assert:
that:
- test_seven is changed
- test_seven['diff']['before']['state'] == "absent"
@@ -149,10 +149,10 @@
- test_seven['ip_address']['tenant'] == 1
- test_seven['ip_address']['vrf'] == 1
-- name: "8 - Create IP address and assign a nat_inside IP"
+- name: 8 - Create IP address and assign a nat_inside IP
netbox.netbox.netbox_ip_address:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
family: 4
address: 10.10.1.30/16
@@ -162,8 +162,8 @@
vrf: Test VRF
register: test_eight
-- name: "8 - ASSERT"
- assert:
+- name: 8 - ASSERT
+ ansible.builtin.assert:
that:
- test_eight is changed
- test_eight['diff']['before']['state'] == "absent"
@@ -177,7 +177,7 @@
- name: "9 - Create IP address on GigabitEthernet2 - test100 - State: present"
netbox.netbox.netbox_ip_address:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
family: 4
address: 10.10.200.30/16
@@ -186,8 +186,8 @@
device: test100
register: test_nine
-- name: "9 - ASSERT"
- assert:
+- name: 9 - ASSERT
+ ansible.builtin.assert:
that:
- test_nine is changed
- test_nine['diff']['before']['state'] == "absent"
@@ -201,7 +201,7 @@
- name: "10 - Create IP address on GigabitEthernet2 - test100 - State: new"
netbox.netbox.netbox_ip_address:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
family: 4
prefix: 10.10.0.0/16
@@ -211,8 +211,8 @@
state: new
register: test_ten
-- name: "10 - ASSERT"
- assert:
+- name: 10 - ASSERT
+ ansible.builtin.assert:
that:
- test_ten is changed
- test_ten['diff']['before']['state'] == "absent"
@@ -226,7 +226,7 @@
- name: "11 - Create IP address on GigabitEthernet2 - test100 - State: present"
netbox.netbox.netbox_ip_address:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
family: 4
prefix: 192.168.100.0/24
@@ -236,8 +236,8 @@
state: present
register: test_eleven
-- name: "11 - ASSERT"
- assert:
+- name: 11 - ASSERT
+ ansible.builtin.assert:
that:
- test_eleven is changed
- test_eleven['diff']['before']['state'] == "absent"
@@ -245,10 +245,10 @@
- test_eleven['msg'] == "ip_address 192.168.100.2/24 created"
- test_eleven['ip_address']['address'] == "192.168.100.2/24"
-- name: "12 - Duplicate - 192.168.100.2/24 on interface"
+- name: 12 - Duplicate - 192.168.100.2/24 on interface
netbox.netbox.netbox_ip_address:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
address: 192.168.100.2/24
assigned_object:
@@ -257,8 +257,8 @@
state: present
register: test_twelve
-- name: "12 - ASSERT"
- assert:
+- name: 12 - ASSERT
+ ansible.builtin.assert:
that:
- not test_twelve['changed']
- test_twelve['msg'] == "ip_address 192.168.100.2/24 already exists"
@@ -266,17 +266,17 @@
- test_twelve['ip_address']['assigned_object_type'] == "dcim.interface"
- test_twelve['ip_address']['assigned_object_id'] == 4
-- name: "13 - Duplicate - 192.168.100.2/24"
+- name: 13 - Duplicate - 192.168.100.2/24
netbox.netbox.netbox_ip_address:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
address: 192.168.100.2/24
state: present
register: test_thirteen
-- name: "13 - ASSERT"
- assert:
+- name: 13 - ASSERT
+ ansible.builtin.assert:
that:
- not test_thirteen['changed']
- test_thirteen['msg'] == "ip_address 192.168.100.2/24 already exists"
@@ -285,7 +285,7 @@
- name: "14 - Create IP address on Eth0 - test100-vm - State: present"
netbox.netbox.netbox_ip_address:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
family: 4
address: 10.188.1.100/24
@@ -294,8 +294,8 @@
virtual_machine: test100-vm
register: test_fourteen
-- name: "14 - ASSERT"
- assert:
+- name: 14 - ASSERT
+ ansible.builtin.assert:
that:
- test_fourteen is changed
- test_fourteen['diff']['before']['state'] == "absent"
@@ -309,18 +309,18 @@
- name: "15 - Create same IP address on Eth0 - test101-vm - State: present"
netbox.netbox.netbox_ip_address:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
family: 4
address: 10.188.1.100/24
assigned_object:
name: Eth0
virtual_machine: test101-vm
- state: "present"
+ state: present
register: test_fifteen
-- name: "15 - ASSERT"
- assert:
+- name: 15 - ASSERT
+ ansible.builtin.assert:
that:
- test_fifteen is changed
- test_fifteen['diff']['before']['state'] == "absent"
@@ -334,14 +334,14 @@
- name: "16 - Create IP address with no mask - State: Present"
netbox.netbox.netbox_ip_address:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
address: 10.120.10.1
state: present
register: test_sixteen
-- name: "16 - ASSERT"
- assert:
+- name: 16 - ASSERT
+ ansible.builtin.assert:
that:
- test_sixteen is changed
- test_sixteen['diff']['before']['state'] == "absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_ipam_role.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_ipam_role.yml
index 1f1ff0f36..9f376c2ed 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_ipam_role.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_ipam_role.yml
@@ -7,14 +7,14 @@
- name: "IPAM_ROLE 1: Necessary info creation"
netbox.netbox.netbox_ipam_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test IPAM Role"
+ name: Test IPAM Role
state: present
register: test_one
- name: "IPAM_ROLE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -26,14 +26,14 @@
- name: "IPAM_ROLE 2: Create duplicate"
netbox.netbox.netbox_ipam_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test IPAM Role"
+ name: Test IPAM Role
state: present
register: test_two
- name: "IPAM_ROLE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['role']['name'] == "Test IPAM Role"
@@ -43,15 +43,15 @@
- name: "IPAM_ROLE 3: ASSERT - Update"
netbox.netbox.netbox_ipam_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test IPAM Role"
+ name: Test IPAM Role
weight: 4096
state: present
register: test_three
- name: "IPAM_ROLE 3: ASSERT - Update"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['weight'] == 4096
@@ -63,14 +63,14 @@
- name: "IPAM_ROLE 4: ASSERT - Delete"
netbox.netbox.netbox_ipam_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test IPAM Role
state: absent
register: test_four
- name: "IPAM_ROLE 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
@@ -80,14 +80,14 @@
- name: "IPAM_ROLE 5: ASSERT - Delete non existing"
netbox.netbox.netbox_ipam_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test IPAM Role
state: absent
register: test_five
- name: "IPAM_ROLE 5: ASSERT - Delete non existing`"
- assert:
+ ansible.builtin.assert:
that:
- not test_five['changed']
- test_five['role'] == None
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_journal_entry.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_journal_entry.yml
index d65e64cb8..7eead8db1 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_journal_entry.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_journal_entry.yml
@@ -7,7 +7,7 @@
- name: "JOURNAL ENTRY 1: Creation"
netbox.netbox.netbox_journal_entry:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
assigned_object_type: dcim.device
assigned_object_id: 1
@@ -17,7 +17,7 @@
register: test_one
- name: "JOURNAL_ENTRY 1: ASSERT - Creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_l2vpn.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_l2vpn.yml
index 292badb93..655be9455 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_l2vpn.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_l2vpn.yml
@@ -7,7 +7,7 @@
- name: "L2VPN 1: Necessary info creation"
netbox.netbox.netbox_l2vpn:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test L2VPN
type: vxlan
@@ -15,7 +15,7 @@
register: test_one
- name: "L2VPN 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -27,7 +27,7 @@
- name: "L2VPN 2: Create duplicate"
netbox.netbox.netbox_l2vpn:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test L2VPN
type: vxlan
@@ -35,7 +35,7 @@
register: test_two
- name: "L2VPN 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['l2vpn']['name'] == "Test L2VPN"
@@ -45,24 +45,24 @@
- name: "L2VPN 4: ASSERT - Update"
netbox.netbox.netbox_l2vpn:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test L2VPN"
+ name: Test L2VPN
type: vxlan
- tenant: "Test Tenant"
+ tenant: Test Tenant
description: Updated description
import_targets:
- - "4000:4000"
- - "5000:5000"
+ - 4000:4000
+ - 5000:5000
export_targets:
- - "6000:6000"
+ - 6000:6000
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: test_four
- name: "L2VPN: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['after']['description'] == "Updated description"
@@ -80,15 +80,15 @@
- name: "L2VPN: ASSERT - Delete"
netbox.netbox.netbox_l2vpn:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test L2VPN"
+ name: Test L2VPN
type: vxlan
state: absent
register: test_six
- name: "L2VPN 6: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_six is changed
- test_six['l2vpn']['name'] == "Test L2VPN"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_l2vpn_termination.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_l2vpn_termination.yml
index a4560f407..0dd539943 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_l2vpn_termination.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_l2vpn_termination.yml
@@ -7,7 +7,7 @@
- name: "L2VPN_TERMINATION 1: Necessary info creation"
netbox.netbox.netbox_l2vpn_termination:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
l2vpn: 1
assigned_object_type: dcim.interface
@@ -29,7 +29,7 @@
- name: "L2VPN_TERMINATION 2: Create duplicate"
netbox.netbox.netbox_l2vpn_termination:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
l2vpn: 1
assigned_object_type: dcim.interface
@@ -49,13 +49,13 @@
- name: "L2VPN_TERMINATION 3: Update"
netbox.netbox.netbox_l2vpn_termination:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
l2vpn: 1
assigned_object_type: dcim.interface
assigned_object_id: 1
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: test_three
@@ -73,7 +73,7 @@
- name: "L2VPN_TERMINATION 4: Delete"
netbox.netbox.netbox_l2vpn_termination:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
l2vpn: 1
assigned_object_type: dcim.interface
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_location.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_location.yml
index f2f7bb9db..2e37c7682 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_location.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_location.yml
@@ -7,7 +7,7 @@
- name: "LOCATION 1: Necessary info creation"
netbox.netbox.netbox_location:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Location
site: Test Site
@@ -15,7 +15,7 @@
register: test_one
- name: "LOCATION 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -28,7 +28,7 @@
- name: "LOCATION 2: Create duplicate"
netbox.netbox.netbox_location:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Location
site: Test Site
@@ -36,7 +36,7 @@
register: test_two
- name: "LOCATION 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['location']['name'] == "Location"
@@ -47,7 +47,7 @@
- name: "LOCATION 3: Update"
netbox.netbox.netbox_location:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Location
parent_location: Parent Rack Group
@@ -56,7 +56,7 @@
register: test_three
- name: "LOCATION 3: ASSERT - Update"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['parent'] == 2
@@ -70,14 +70,14 @@
- name: "LOCATION 4: Delete"
netbox.netbox.netbox_location:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Location
state: absent
register: test_four
- name: "LOCATION 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_lookup.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_lookup.yml
index 277454fcd..3d54e02b1 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_lookup.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_lookup.yml
@@ -5,79 +5,86 @@
##
##
- name: "NETBOX_LOOKUP 1: Lookup returns exactly two sites"
- assert:
+ ansible.builtin.assert:
that: query_result == "3"
vars:
- query_result: "{{ query('netbox.netbox.nb_lookup', 'sites', api_endpoint='http://localhost:32768', token='0123456789abcdef0123456789abcdef01234567')|count }}"
+ query_result: "{{ query('netbox.netbox.nb_lookup', 'sites', api_endpoint='http://localhost:32768', token='0123456789abcdef0123456789abcdef01234567') | count }}"
- name: "NETBOX_LOOKUP 2: Query doesn't return Wibble (sanity check json_query)"
- assert:
+ ansible.builtin.assert:
that: query_result == "0"
vars:
- query_result: "{{ query('netbox.netbox.nb_lookup', 'devices', api_endpoint='http://localhost:32768', token='0123456789abcdef0123456789abcdef01234567')|community.general.json_query('[?value.display==`Wibble`]')|count}}"
+ query_result: "{{ query('netbox.netbox.nb_lookup', 'devices', api_endpoint='http://localhost:32768', token='0123456789abcdef0123456789abcdef01234567') | community.general.json_query('[?value.display==`Wibble`]')
+ | count }}"
- name: "NETBOX_LOOKUP 3: Device query returns exactly one TestDeviceR1"
- assert:
+ ansible.builtin.assert:
that: query_result == "1"
vars:
- query_result: "{{ query('netbox.netbox.nb_lookup', 'devices', api_endpoint='http://localhost:32768', token='0123456789abcdef0123456789abcdef01234567')|community.general.json_query('[?value.display==`TestDeviceR1`]')|count }}"
+ query_result: "{{ query('netbox.netbox.nb_lookup', 'devices', api_endpoint='http://localhost:32768', token='0123456789abcdef0123456789abcdef01234567') | community.general.json_query('[?value.display==`TestDeviceR1`]')
+ | count }}"
- name: "NETBOX_LOOKUP 4: VLAN ID 400 can be queried and is named 'Test VLAN'"
- assert:
+ ansible.builtin.assert:
that: query_result == 'Test VLAN'
vars:
- query_result: "{{ (query('netbox.netbox.nb_lookup', 'vlans', api_endpoint='http://localhost:32768', token='0123456789abcdef0123456789abcdef01234567')|community.general.json_query('[?value.vid==`400`].value.name'))[0] }}"
+ query_result: "{{ (query('netbox.netbox.nb_lookup', 'vlans', api_endpoint='http://localhost:32768', token='0123456789abcdef0123456789abcdef01234567') | community.general.json_query('[?value.vid==`400`].value.name'))[0]
+ }}"
- name: "NETBOX_LOOKUP 5: Add one of two devices for lookup filter test."
netbox.netbox.netbox_device:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "L1"
- device_type: "Cisco Test"
- device_role: "Core Switch"
- site: "Test Site"
- status: "Staged"
+ name: L1
+ device_type: Cisco Test
+ device_role: Core Switch
+ site: Test Site
+ status: Staged
tags:
- - "nolookup"
+ - nolookup
state: present
- name: "NETBOX_LOOKUP 6: Add two of two devices for lookup filter test."
netbox.netbox.netbox_device:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "L2"
- device_type: "Cisco Test"
- device_role: "Core Switch"
- site: "Test Site2"
- status: "Staged"
+ name: L2
+ device_type: Cisco Test
+ device_role: Core Switch
+ site: Test Site2
+ status: Staged
tags:
- - "lookup"
+ - lookup
state: present
- name: "NETBOX_LOOKUP 7: Device query returns exactly the L2 device"
- assert:
+ ansible.builtin.assert:
that: query_result == "1"
vars:
- query_result: "{{ query('netbox.netbox.nb_lookup', 'devices', api_filter='role=core-switch tag=lookup', api_endpoint='http://localhost:32768', token='0123456789abcdef0123456789abcdef01234567')|community.general.json_query('[?value.display==`L2`]')|count }}"
+ query_result: "{{ query('netbox.netbox.nb_lookup', 'devices', api_filter='role=core-switch tag=lookup', api_endpoint='http://localhost:32768', token='0123456789abcdef0123456789abcdef01234567')
+ | community.general.json_query('[?value.display==`L2`]') | count }}"
- name: "NETBOX_LOOKUP 8: Device query specifying raw data returns payload without key/value dict"
- assert:
+ ansible.builtin.assert:
that: query_result == "1"
vars:
- query_result: "{{ query('netbox.netbox.nb_lookup', 'devices', api_filter='role=core-switch tag=lookup', api_endpoint='http://localhost:32768', token='0123456789abcdef0123456789abcdef01234567', raw_data=True)|community.general.json_query('[?display==`L2`]')|count }}"
+ query_result: "{{ query('netbox.netbox.nb_lookup', 'devices', api_filter='role=core-switch tag=lookup', api_endpoint='http://localhost:32768', token='0123456789abcdef0123456789abcdef01234567',
+ raw_data=True) | community.general.json_query('[?display==`L2`]') | count }}"
- name: "NETBOX_LOOKUP 9: Device query specifying multiple sites, Make sure L1 and L2 are in the results"
- assert:
+ ansible.builtin.assert:
that:
- "'L1' in query_result"
- "'L2' in query_result"
vars:
- query_result: "{{ query('netbox.netbox.nb_lookup', 'devices', api_filter='role=core-switch site=test-site site=test-site2', api_endpoint='http://localhost:32768', token='0123456789abcdef0123456789abcdef01234567', raw_data=True)|community.general.json_query('[*].display') }}"
+ query_result: "{{ query('netbox.netbox.nb_lookup', 'devices', api_filter='role=core-switch site=test-site site=test-site2', api_endpoint='http://localhost:32768',
+ token='0123456789abcdef0123456789abcdef01234567', raw_data=True) | community.general.json_query('[*].display') }}"
- name: "NETBOX_LOOKUP 10: Device query by ID"
- assert:
+ ansible.builtin.assert:
that: query_result
vars:
- query_result: "{{ query('netbox.netbox.nb_lookup', 'devices', api_filter='id=1', api_endpoint='http://localhost:32768', token='0123456789abcdef0123456789abcdef01234567') }}"
+ query_result: "{{ query('netbox.netbox.nb_lookup', 'devices', api_filter='id=1', api_endpoint='http://localhost:32768', token='0123456789abcdef0123456789abcdef01234567')
+ }}"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_manufacturer.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_manufacturer.yml
index 4dd1907eb..39804659b 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_manufacturer.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_manufacturer.yml
@@ -7,14 +7,14 @@
- name: "MANUFACTURER 1: Necessary info creation"
netbox.netbox.netbox_manufacturer:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test Manufacturer Two
state: present
register: test_one
- name: "MANUFACTURER 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -26,14 +26,14 @@
- name: "MANUFACTURER 2: Create duplicate"
netbox.netbox.netbox_manufacturer:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test Manufacturer Two
state: present
register: test_two
- name: "MANUFACTURER 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['manufacturer']['name'] == "Test Manufacturer Two"
@@ -43,14 +43,14 @@
- name: "MANUFACTURER 3: Update"
netbox.netbox.netbox_manufacturer:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: test manufacturer two
state: present
register: test_three
- name: "MANUFACTURER 3: ASSERT - Update"
- assert:
+ ansible.builtin.assert:
that:
- test_three['changed']
- test_three['manufacturer']['name'] == "test manufacturer two"
@@ -60,14 +60,14 @@
- name: "MANUFACTURER 4: ASSERT - Delete"
netbox.netbox.netbox_manufacturer:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: test manufacturer two
state: absent
register: test_four
- name: "MANUFACTURER 3: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
@@ -77,14 +77,14 @@
- name: "MANUFACTURER 5: ASSERT - Delete non existing"
netbox.netbox.netbox_manufacturer:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test Manufacturer Two
state: absent
register: test_five
- name: "MANUFACTURER 5: ASSERT - Delete non existing"
- assert:
+ ansible.builtin.assert:
that:
- not test_five['changed']
- test_five['manufacturer'] == None
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_module.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_module.yml
index e8f090d8c..0a18f3894 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_module.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_module.yml
@@ -7,7 +7,7 @@
- name: "MODULE 1: Necessary info creation"
netbox.netbox.netbox_module:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: C9300-DEMO
module_bay: Network Module
@@ -16,7 +16,7 @@
register: test_one
- name: "MODULE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -29,7 +29,7 @@
- name: "MODULE 2: Create duplicate"
netbox.netbox.netbox_module:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: C9300-DEMO
module_bay: Network Module
@@ -38,7 +38,7 @@
register: test_two
- name: "MODULE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['module']['device'] == "C9300-DEMO"
@@ -49,7 +49,7 @@
- name: "MODULE 3: ASSERT - Update"
netbox.netbox.netbox_module:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: C9300-DEMO
module_bay: Network Module
@@ -59,7 +59,7 @@
register: test_three
- name: "MODULE 3: ASSERT - Update"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['serial'] == "XXXNNNNXXXX"
@@ -72,7 +72,7 @@
- name: "MODULE 4: ASSERT - Delete"
netbox.netbox.netbox_module:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: C9300-DEMO
module_bay: Network Module
@@ -81,7 +81,7 @@
register: test_four
- name: "MODULE 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
@@ -91,7 +91,7 @@
- name: "MODULE 5: ASSERT - Delete non existing"
netbox.netbox.netbox_module:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: C9300-DEMO
module_bay: Network Module
@@ -100,7 +100,7 @@
register: test_five
- name: "MODULE 5: ASSERT - Delete non existing`"
- assert:
+ ansible.builtin.assert:
that:
- not test_five['changed']
- test_five['module'] == None
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_module_bay.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_module_bay.yml
index 44bb76269..843a4767e 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_module_bay.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_module_bay.yml
@@ -7,7 +7,7 @@
- name: "MODULE 1: Necessary info creation"
netbox.netbox.netbox_module_bay:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: C9300-DEMO
name: Network Module
@@ -16,7 +16,7 @@
register: test_one
- name: "MODULE BAY 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -29,7 +29,7 @@
- name: "MODULE BAY 2: Create duplicate"
netbox.netbox.netbox_module_bay:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: C9300-DEMO
name: Network Module
@@ -38,7 +38,7 @@
register: test_two
- name: "MODULE BAY 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['module_bay']['device'] == "C9300-DEMO"
@@ -49,7 +49,7 @@
- name: "MODULE BAY 3: ASSERT - Update"
netbox.netbox.netbox_module_bay:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: C9300-DEMO
name: Network Module
@@ -59,7 +59,7 @@
register: test_three
- name: "MODULE BAY 3: ASSERT - Update"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['label'] == "TEST"
@@ -72,7 +72,7 @@
- name: "MODULE BAY 4: ASSERT - Delete"
netbox.netbox.netbox_module_bay:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: C9300-DEMO
name: Network Module
@@ -80,7 +80,7 @@
register: test_four
- name: "MODULE BAY 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
@@ -90,7 +90,7 @@
- name: "MODULE BAY 5: ASSERT - Delete non existing"
netbox.netbox.netbox_module_bay:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: C9300-DEMO
name: Network Module
@@ -98,7 +98,7 @@
register: test_five
- name: "MODULE BAY 5: ASSERT - Delete non existing`"
- assert:
+ ansible.builtin.assert:
that:
- not test_five['changed']
- test_five['module_bay'] == None
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_module_type.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_module_type.yml
index e1d51c4cb..8ab077803 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_module_type.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_module_type.yml
@@ -7,7 +7,7 @@
- name: "MODULE_TYPE 1: Necessary info creation"
netbox.netbox.netbox_module_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
model: ws-test-3750
manufacturer: Test Manufacturer
@@ -15,7 +15,7 @@
register: test_one
- name: "MODULE_TYPE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -27,15 +27,15 @@
- name: "MODULE_TYPE 2: Create duplicate"
netbox.netbox.netbox_module_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- model: "ws-test-3750"
+ model: ws-test-3750
manufacturer: Test Manufacturer
state: present
register: test_two
- name: "MODULE_TYPE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_one['module_type']['model'] == "ws-test-3750"
@@ -45,7 +45,7 @@
- name: "MODULE_TYPE 3: ASSERT - Update"
netbox.netbox.netbox_module_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
model: ws-test-3750
manufacturer: Test Manufacturer
@@ -54,7 +54,7 @@
register: test_three
- name: "MODULE_TYPE 3: ASSERT - Update"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['part_number'] == "ws-3750g-v2"
@@ -66,14 +66,14 @@
- name: "MODULE_TYPE 4: ASSERT - Delete"
netbox.netbox.netbox_module_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
model: ws-test-3750
state: absent
register: test_four
- name: "MODULE_TYPE 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
@@ -83,14 +83,14 @@
- name: "MODULE_TYPE 5: ASSERT - Delete non existing"
netbox.netbox.netbox_module_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- model: "Test Module Type"
+ model: Test Module Type
state: absent
register: test_five
- name: "MODULE_TYPE 5: ASSERT - Delete non existing`"
- assert:
+ ansible.builtin.assert:
that:
- not test_five['changed']
- test_five['module_type'] == None
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_platform.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_platform.yml
index 95ccdb4cf..fd4988724 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_platform.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_platform.yml
@@ -7,14 +7,14 @@
- name: "PLATFORM 1: Necessary info creation"
netbox.netbox.netbox_platform:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test Platform
state: present
register: test_one
- name: "PLATFORM 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -26,14 +26,14 @@
- name: "PLATFORM 2: Create duplicate"
netbox.netbox.netbox_platform:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test Platform
state: present
register: test_two
- name: "PLATFORM 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['platform']['name'] == "Test Platform"
@@ -43,7 +43,7 @@
- name: "PLATFORM 3: ASSERT - Update"
netbox.netbox.netbox_platform:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test Platform
manufacturer: Test Manufacturer
@@ -54,7 +54,7 @@
register: test_three
- name: "PLATFORM 3: ASSERT - Update"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['manufacturer'] == 3
@@ -68,14 +68,14 @@
- name: "PLATFORM 4: ASSERT - Delete"
netbox.netbox.netbox_platform:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test Platform
state: absent
register: test_four
- name: "PLATFORM 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
@@ -85,14 +85,14 @@
- name: "PLATFORM 5: ASSERT - Delete non existing"
netbox.netbox.netbox_platform:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test Platform
state: absent
register: test_five
- name: "PLATFORM 5: ASSERT - Delete non existing`"
- assert:
+ ansible.builtin.assert:
that:
- not test_five['changed']
- test_five['platform'] == None
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_power_feed.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_power_feed.yml
index a02fd9129..f4ac71e01 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_power_feed.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_power_feed.yml
@@ -10,7 +10,7 @@
- name: "POWER_FEED 1: Necessary info creation"
netbox.netbox.netbox_power_feed:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Feed
power_panel: Power Panel
@@ -18,7 +18,7 @@
register: test_one
- name: "POWER_FEED 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -30,7 +30,7 @@
- name: "POWER_FEED 2: Create duplicate"
netbox.netbox.netbox_power_feed:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Feed
power_panel: Power Panel
@@ -38,7 +38,7 @@
register: test_two
- name: "POWER_FEED 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['power_feed']['name'] == "Power Feed"
@@ -48,7 +48,7 @@
- name: "POWER_FEED 3: Update power_feed with other fields"
netbox.netbox.netbox_power_feed:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Feed
power_panel: Power Panel
@@ -64,7 +64,7 @@
register: test_three
- name: "POWER_FEED 3: ASSERT - Update power_feed with other fields"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['status'] == "offline"
@@ -90,7 +90,7 @@
- name: "POWER_FEED 4: Create Power Feed for Delete Test"
netbox.netbox.netbox_power_feed:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Feed 2
power_panel: Power Panel
@@ -98,7 +98,7 @@
register: test_four
- name: "POWER_FEED 4: ASSERT - Create Power Feed for Delete Test"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -107,11 +107,10 @@
- test_four['power_feed']['power_panel'] == 1
- test_four['msg'] == "power_feed Power Feed 2 created"
-
- name: "POWER_FEED 5: Delete Power Feed"
netbox.netbox.netbox_power_feed:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Feed 2
power_panel: Power Panel
@@ -119,7 +118,7 @@
register: test_five
- name: "POWER_FEED 5: ASSERT - Delete Power Feed"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "present"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_power_outlet.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_power_outlet.yml
index c7b5f7d3d..84f9d3255 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_power_outlet.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_power_outlet.yml
@@ -10,7 +10,7 @@
- name: "POWER_OUTLET 1: Necessary info creation"
netbox.netbox.netbox_power_outlet:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Outlet
device: Device Power Tests
@@ -18,7 +18,7 @@
register: test_one
- name: "POWER_OUTLET 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -30,7 +30,7 @@
- name: "POWER_OUTLET 2: Create duplicate"
netbox.netbox.netbox_power_outlet:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Outlet
device: Device Power Tests
@@ -38,7 +38,7 @@
register: test_two
- name: "POWER_OUTLET 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['power_outlet']['name'] == "Power Outlet"
@@ -48,7 +48,7 @@
- name: "POWER_OUTLET 3: Update power_outlet with other fields"
netbox.netbox.netbox_power_outlet:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Outlet
device: Device Power Tests
@@ -60,7 +60,7 @@
register: test_three
- name: "POWER_OUTLET 3: ASSERT - Update power_outlet with other fields"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['type'] == "ita-e"
@@ -78,7 +78,7 @@
- name: "POWER_OUTLET 4: Create Power Outlet for Delete Test"
netbox.netbox.netbox_power_outlet:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Outlet 2
device: Device Power Tests
@@ -86,7 +86,7 @@
register: test_four
- name: "POWER_OUTLET 4: ASSERT - Create Power Outlet for Delete Test"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -98,7 +98,7 @@
- name: "POWER_OUTLET 5: Delete Power Outlet"
netbox.netbox.netbox_power_outlet:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Outlet 2
device: Device Power Tests
@@ -106,7 +106,7 @@
register: test_five
- name: "POWER_OUTLET 5: ASSERT - Delete Power Outlet"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "present"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_power_outlet_template.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_power_outlet_template.yml
index 96f283486..14ff49d0c 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_power_outlet_template.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_power_outlet_template.yml
@@ -10,7 +10,7 @@
- name: "POWER_OUTLET_TEMPLATE 1: Necessary info creation"
netbox.netbox.netbox_power_outlet_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Outlet Template
device_type: Device Type Power Tests
@@ -18,7 +18,7 @@
register: test_one
- name: "POWER_OUTLET_TEMPLATE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -30,7 +30,7 @@
- name: "POWER_OUTLET_TEMPLATE 2: Create duplicate"
netbox.netbox.netbox_power_outlet_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Outlet Template
device_type: Device Type Power Tests
@@ -38,7 +38,7 @@
register: test_two
- name: "POWER_OUTLET_TEMPLATE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['power_outlet_template']['name'] == "Power Outlet Template"
@@ -48,7 +48,7 @@
- name: "POWER_OUTLET_TEMPLATE 3: Update power_outlet_template with other fields"
netbox.netbox.netbox_power_outlet_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Outlet Template
device_type: Device Type Power Tests
@@ -59,7 +59,7 @@
register: test_three
- name: "POWER_OUTLET_TEMPLATE 3: ASSERT - Update power_outlet_template with other fields"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['type'] == "ita-e"
@@ -75,7 +75,7 @@
- name: "POWER_OUTLET_TEMPLATE 4: Create Power Outlet Template for Delete Test"
netbox.netbox.netbox_power_outlet_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Outlet Template 2
device_type: Device Type Power Tests
@@ -83,7 +83,7 @@
register: test_four
- name: "POWER_OUTLET_TEMPLATE 4: ASSERT - Create Power Outlet Template for Delete Test"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -95,7 +95,7 @@
- name: "POWER_OUTLET_TEMPLATE 5: Delete Power Outlet Template"
netbox.netbox.netbox_power_outlet_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Outlet Template 2
device_type: Device Type Power Tests
@@ -103,7 +103,7 @@
register: test_five
- name: "POWER_OUTLET_TEMPLATE 5: ASSERT - Delete Power Outlet Template"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "present"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_power_panel.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_power_panel.yml
index 082a6b375..e568a75f4 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_power_panel.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_power_panel.yml
@@ -10,7 +10,7 @@
- name: "POWER_PANEL 1: Necessary info creation"
netbox.netbox.netbox_power_panel:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Panel
site: Test Site
@@ -18,7 +18,7 @@
register: test_one
- name: "POWER_PANEL 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -30,7 +30,7 @@
- name: "POWER_PANEL 2: Create duplicate"
netbox.netbox.netbox_power_panel:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Panel
site: Test Site
@@ -38,7 +38,7 @@
register: test_two
- name: "POWER_PANEL 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['power_panel']['name'] == "Power Panel"
@@ -48,7 +48,7 @@
- name: "POWER_PANEL 3: Update power_panel with other fields"
netbox.netbox.netbox_power_panel:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Panel
site: Test Site
@@ -57,7 +57,7 @@
register: test_three
- name: "POWER_PANEL 3: ASSERT - Update power_panel with other fields"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['location'] == 1
@@ -69,7 +69,7 @@
- name: "POWER_PANEL 4: Create Power Panel for Delete Test"
netbox.netbox.netbox_power_panel:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Panel 2
site: Test Site
@@ -77,7 +77,7 @@
register: test_four
- name: "POWER_PANEL 4: ASSERT - Create Power Panel for Delete Test"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -89,7 +89,7 @@
- name: "POWER_PANEL 5: Delete Power Panel"
netbox.netbox.netbox_power_panel:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Panel 2
site: Test Site
@@ -97,7 +97,7 @@
register: test_five
- name: "POWER_PANEL 5: ASSERT - Delete Power Panel"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "present"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_power_port.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_power_port.yml
index b6368bd92..311820ccd 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_power_port.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_power_port.yml
@@ -10,7 +10,7 @@
- name: "POWER_PORT 0: Create device for testing power ports"
netbox.netbox.netbox_device:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Device Power Tests
device_type: Cisco Test
@@ -21,7 +21,7 @@
- name: "POWER_PORT 1: Necessary info creation"
netbox.netbox.netbox_power_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Port
device: Device Power Tests
@@ -29,7 +29,7 @@
register: test_one
- name: "POWER_PORT 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -41,7 +41,7 @@
- name: "POWER_PORT 2: Create duplicate"
netbox.netbox.netbox_power_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Port
device: Device Power Tests
@@ -49,7 +49,7 @@
register: test_two
- name: "POWER_PORT 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['power_port']['name'] == "Power Port"
@@ -59,7 +59,7 @@
- name: "POWER_FEED 3: Update power_port with other fields"
netbox.netbox.netbox_power_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Port
device: Device Power Tests
@@ -71,7 +71,7 @@
register: test_three
- name: "POWER_FEED 3: ASSERT - Update power_port with other fields"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['type'] == "ita-e"
@@ -89,7 +89,7 @@
- name: "POWER_PORT 4: Create Power Port for Delete Test"
netbox.netbox.netbox_power_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Port 2
device: Device Power Tests
@@ -97,7 +97,7 @@
register: test_four
- name: "POWER_PORT 4: ASSERT - Create Power Port for Delete Test"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -109,7 +109,7 @@
- name: "POWER_PORT 5: Delete Power Port"
netbox.netbox.netbox_power_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Port 2
device: Device Power Tests
@@ -117,7 +117,7 @@
register: test_five
- name: "POWER_PORT 5: ASSERT - Delete Power Port"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "present"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_power_port_template.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_power_port_template.yml
index aaa5eb43c..b413c29ce 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_power_port_template.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_power_port_template.yml
@@ -10,7 +10,7 @@
- name: "POWER_PORT_TEMPLATE 0.1: Create device type for testing power ports on device types"
netbox.netbox.netbox_device_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
model: Device Type Power Tests
manufacturer: Test Manufacturer
@@ -19,7 +19,7 @@
- name: "POWER_PORT_TEMPLATE 0.2: Create module type for testing power ports on module types"
netbox.netbox.netbox_module_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
model: Module Type Power Tests
manufacturer: Test Manufacturer
@@ -28,7 +28,7 @@
- name: "POWER_PORT_TEMPLATE 1: Necessary info creation"
netbox.netbox.netbox_power_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Port Template
device_type: Device Type Power Tests
@@ -36,7 +36,7 @@
register: test_one
- name: "POWER_PORT_TEMPLATE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -48,7 +48,7 @@
- name: "POWER_PORT_TEMPLATE 2: Create duplicate"
netbox.netbox.netbox_power_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Port Template
device_type: Device Type Power Tests
@@ -56,7 +56,7 @@
register: test_two
- name: "POWER_PORT_TEMPLATE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['power_port_template']['name'] == "Power Port Template"
@@ -66,7 +66,7 @@
- name: "POWER_PORT_TEMPLATE 3: Update power_port_template with other fields"
netbox.netbox.netbox_power_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Port Template
device_type: Device Type Power Tests
@@ -77,7 +77,7 @@
register: test_three
- name: "POWER_PORT_TEMPLATE 3: ASSERT - Update power_port_template with other fields"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['type'] == "ita-e"
@@ -93,7 +93,7 @@
- name: "POWER_PORT_TEMPLATE 4: Create Power Port Template for Delete Test"
netbox.netbox.netbox_power_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Port Template 2
device_type: Device Type Power Tests
@@ -101,7 +101,7 @@
register: test_four
- name: "POWER_PORT_TEMPLATE 4: ASSERT - Create Power Port Template for Delete Test"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -113,7 +113,7 @@
- name: "POWER_PORT_TEMPLATE 5: Delete Power Port Template"
netbox.netbox.netbox_power_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Port Template 2
device_type: Device Type Power Tests
@@ -121,7 +121,7 @@
register: test_five
- name: "POWER_PORT_TEMPLATE 5: ASSERT - Delete Power Port Template"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "present"
@@ -131,7 +131,7 @@
- name: "POWER_PORT_TEMPLATE 6: Necessary info creation"
netbox.netbox.netbox_power_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Module Power Port Template
module_type: Module Type Power Tests
@@ -139,7 +139,7 @@
register: test_six
- name: "POWER_PORT_TEMPLATE 6: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_six is changed
- test_six['diff']['before']['state'] == "absent"
@@ -151,7 +151,7 @@
- name: "POWER_PORT_TEMPLATE 7: Create duplicate"
netbox.netbox.netbox_power_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Module Power Port Template
module_type: Module Type Power Tests
@@ -159,7 +159,7 @@
register: test_seven
- name: "POWER_PORT_TEMPLATE 7: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_seven['changed']
- test_seven['power_port_template']['name'] == "Module Power Port Template"
@@ -169,7 +169,7 @@
- name: "POWER_PORT_TEMPLATE 8: Update power_port_template with other fields"
netbox.netbox.netbox_power_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Module Power Port Template
module_type: Module Type Power Tests
@@ -180,7 +180,7 @@
register: test_eight
- name: "POWER_PORT_TEMPLATE 8: ASSERT - Update power_port_template with other fields"
- assert:
+ ansible.builtin.assert:
that:
- test_eight is changed
- test_eight['diff']['after']['type'] == "ita-e"
@@ -196,7 +196,7 @@
- name: "POWER_PORT_TEMPLATE 9: Delete Power Port Template"
netbox.netbox.netbox_power_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Module Power Port Template
module_type: Module Type Power Tests
@@ -204,7 +204,7 @@
register: test_nine
- name: "POWER_PORT_TEMPLATE 9: ASSERT - Delete Power Port Template"
- assert:
+ ansible.builtin.assert:
that:
- test_nine is changed
- test_nine['diff']['before']['state'] == "present"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_prefix.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_prefix.yml
index 59ed336ad..64adae855 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_prefix.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_prefix.yml
@@ -4,17 +4,17 @@
### NETBOX_PREFIX
##
##
-- name: "1 - Create prefix within NetBox with only required information"
+- name: 1 - Create prefix within NetBox with only required information
netbox.netbox.netbox_prefix:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
prefix: 10.156.0.0/19
state: present
register: test_one
-- name: "1 - ASSERT"
- assert:
+- name: 1 - ASSERT
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -22,36 +22,36 @@
- test_one['msg'] == "prefix 10.156.0.0/19 created"
- test_one['prefix']['prefix'] == "10.156.0.0/19"
-- name: "2 - Duplicate"
+- name: 2 - Duplicate
netbox.netbox.netbox_prefix:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
prefix: 10.156.0.0/19
state: present
register: test_two
-- name: "2 - ASSERT"
- assert:
+- name: 2 - ASSERT
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['msg'] == "prefix 10.156.0.0/19 already exists"
- test_two['prefix']['prefix'] == "10.156.0.0/19"
-- name: "3 - Update 10.156.0.0/19"
+- name: 3 - Update 10.156.0.0/19
netbox.netbox.netbox_prefix:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
prefix: 10.156.0.0/19
site: Test Site
status: Reserved
- description: "This prefix has been updated"
+ description: This prefix has been updated
state: present
register: test_three
-- name: "3 - ASSERT"
- assert:
+- name: 3 - ASSERT
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['site'] == 1
@@ -63,27 +63,27 @@
- test_three['prefix']['status'] == "reserved"
- test_three['prefix']['description'] == "This prefix has been updated"
-- name: "4 - Delete prefix within netbox"
+- name: 4 - Delete prefix within netbox
netbox.netbox.netbox_prefix:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
prefix: 10.156.0.0/19
state: absent
register: test_four
-- name: "4 - ASSERT"
- assert:
+- name: 4 - ASSERT
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
- test_four['diff']['after']['state'] == "absent"
- test_four['msg'] == "prefix 10.156.0.0/19 deleted"
-- name: "5 - Create prefix with several specified options"
+- name: 5 - Create prefix with several specified options
netbox.netbox.netbox_prefix:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
family: 4
prefix: 10.156.32.0/19
@@ -100,12 +100,12 @@
description: Test description
is_pool: true
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: test_five
-- name: "5 - ASSERT"
- assert:
+- name: 5 - ASSERT
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "absent"
@@ -123,34 +123,34 @@
- test_five['prefix']['is_pool'] == true
- test_five['prefix']['tags'][0] == 4
-- name: "6 - Get a new /24 inside 10.156.0.0/19 within NetBox - Parent doesn't exist"
+- name: 6 - Get a new /24 inside 10.156.0.0/19 within NetBox - Parent doesn't exist
netbox.netbox.netbox_prefix:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
parent: 10.156.0.0/19
prefix_length: 24
state: present
- first_available: yes
+ first_available: true
register: test_six
-- name: "6 - ASSERT"
- assert:
+- name: 6 - ASSERT
+ ansible.builtin.assert:
that:
- not test_six['changed']
- test_six['msg'] == "Parent prefix does not exist - 10.156.0.0/19"
-- name: "7 - Create prefix within NetBox with only required information"
+- name: 7 - Create prefix within NetBox with only required information
netbox.netbox.netbox_prefix:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
prefix: 10.156.0.0/19
state: present
register: test_seven
-- name: "7 - ASSERT"
- assert:
+- name: 7 - ASSERT
+ ansible.builtin.assert:
that:
- test_seven is changed
- test_seven['diff']['before']['state'] == "absent"
@@ -158,19 +158,19 @@
- test_seven['msg'] == "prefix 10.156.0.0/19 created"
- test_seven['prefix']['prefix'] == "10.156.0.0/19"
-- name: "8 - Get a new /24 inside 10.156.0.0/19 within NetBox"
+- name: 8 - Get a new /24 inside 10.156.0.0/19 within NetBox
netbox.netbox.netbox_prefix:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
parent: 10.156.0.0/19
prefix_length: 24
state: present
- first_available: yes
+ first_available: true
register: test_eight
-- name: "8 - ASSERT"
- assert:
+- name: 8 - ASSERT
+ ansible.builtin.assert:
that:
- test_eight is changed
- test_eight['diff']['before']['state'] == "absent"
@@ -178,10 +178,10 @@
- test_eight['msg'] == "prefix 10.156.0.0/24 created"
- test_eight['prefix']['prefix'] == "10.156.0.0/24"
-- name: "9 - Create 10.157.0.0/19"
+- name: 9 - Create 10.157.0.0/19
netbox.netbox.netbox_prefix:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
prefix: 10.157.0.0/19
vrf: Test VRF
@@ -189,8 +189,8 @@
state: present
register: test_nine
-- name: "9 - ASSERT"
- assert:
+- name: 9 - ASSERT
+ ansible.builtin.assert:
that:
- test_nine is changed
- test_nine['diff']['before']['state'] == "absent"
@@ -200,21 +200,21 @@
- test_nine['prefix']['site'] == 1
- test_nine['prefix']['vrf'] == 1
-- name: "10 - Get a new /24 inside 10.157.0.0/19 within NetBox with additional values"
+- name: 10 - Get a new /24 inside 10.157.0.0/19 within NetBox with additional values
netbox.netbox.netbox_prefix:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
parent: 10.157.0.0/19
prefix_length: 24
vrf: Test VRF
site: Test Site
state: present
- first_available: yes
+ first_available: true
register: test_ten
-- name: "10 - ASSERT"
- assert:
+- name: 10 - ASSERT
+ ansible.builtin.assert:
that:
- test_ten is changed
- test_ten['diff']['before']['state'] == "absent"
@@ -224,19 +224,19 @@
- test_ten['prefix']['site'] == 1
- test_ten['prefix']['vrf'] == 1
-- name: "11 - Get a new /24 inside 10.156.0.0/19 within NetBox"
+- name: 11 - Get a new /24 inside 10.156.0.0/19 within NetBox
netbox.netbox.netbox_prefix:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
parent: 10.156.0.0/19
prefix_length: 24
state: present
- first_available: yes
+ first_available: true
register: test_eleven
-- name: "11 - ASSERT"
- assert:
+- name: 11 - ASSERT
+ ansible.builtin.assert:
that:
- test_eleven is changed
- test_eleven['diff']['before']['state'] == "absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_provider.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_provider.yml
index cba224eb4..ec7b2c30a 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_provider.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_provider.yml
@@ -7,14 +7,14 @@
- name: "NETBOX_PROVIDER 1: Create provider within NetBox with only required information"
netbox.netbox.netbox_provider:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test Provider One
state: present
register: test_one
- name: "NETBOX_PROVIDER 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -26,14 +26,14 @@
- name: "NETBOX_PROVIDER 2: Duplicate"
netbox.netbox.netbox_provider:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test Provider One
state: present
register: test_two
- name: "NETBOX_PROVIDER 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['provider']['name'] == "Test Provider One"
@@ -43,15 +43,15 @@
- name: "NETBOX_PROVIDER 3: Update provider with other fields"
netbox.netbox.netbox_provider:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test Provider One
- comments: "BAD PROVIDER"
+ comments: BAD PROVIDER
state: present
register: test_three
- name: "NETBOX_PROVIDER 3: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['comments'] == "BAD PROVIDER"
@@ -63,14 +63,14 @@
- name: "NETBOX_PROVIDER 4: Delete provider within netbox"
netbox.netbox.netbox_provider:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test Provider One
state: absent
register: test_four
- name: "NETBOX_PROVIDER 4 : ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['provider']['name'] == "Test Provider One"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_provider_network.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_provider_network.yml
index bf7d4bf97..6f8b2b331 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_provider_network.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_provider_network.yml
@@ -7,7 +7,7 @@
- name: "NETBOX_PROVIDER_NETWORK 1: Create provider network within NetBox with only required information"
netbox.netbox.netbox_provider_network:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
provider: Test Provider
name: Test Provider Network One
@@ -15,7 +15,7 @@
register: test_one
- name: "NETBOX_PROVIDER_NETWORK 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -26,7 +26,7 @@
- name: "NETBOX_PROVIDER_NETWORK 2: Duplicate"
netbox.netbox.netbox_provider_network:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
provider: Test Provider
name: Test Provider Network One
@@ -34,7 +34,7 @@
register: test_two
- name: "NETBOX_PROVIDER_NETWORK 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['provider_network']['name'] == "Test Provider Network One"
@@ -43,17 +43,17 @@
- name: "NETBOX_PROVIDER_NETWORK 3: Update provider network with other fields"
netbox.netbox.netbox_provider_network:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
provider: Test Provider
name: Test Provider Network One
description: Describe a Provider Network
- comments: "A provider network"
+ comments: A provider network
state: present
register: test_three
- name: "NETBOX_PROVIDER_NETWORK 3: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['comments'] == "A provider network"
@@ -66,7 +66,7 @@
- name: "NETBOX_PROVIDER_NETWORK 4: Delete provider within netbox"
netbox.netbox.netbox_provider_network:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
provider: Test Provider
name: Test Provider Network One
@@ -74,7 +74,7 @@
register: test_four
- name: "NETBOX_PROVIDER_NETWORK 4 : ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['provider_network']['name'] == "Test Provider Network One"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_rack.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_rack.yml
index 8b96c56c1..b8390df47 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_rack.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_rack.yml
@@ -4,18 +4,18 @@
### NETBOX_RACK
##
##
-- name: "1 - Test rack creation"
+- name: 1 - Test rack creation
netbox.netbox.netbox_rack:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test rack one"
- site: "Test Site"
- location: "Test Rack Group"
+ name: Test rack one
+ site: Test Site
+ location: Test Rack Group
register: test_one
-- name: "1 - ASSERT"
- assert:
+- name: 1 - ASSERT
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -23,34 +23,34 @@
- test_one['rack']['name'] == "Test rack one"
- test_one['rack']['site'] == 1
-- name: "Test duplicate rack"
+- name: Test duplicate rack
netbox.netbox.netbox_rack:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test rack one"
+ name: Test rack one
register: test_two
-- name: "2 - ASSERT"
- assert:
+- name: 2 - ASSERT
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['rack']['name'] == "Test rack one"
- test_two['rack']['site'] == 1
- test_two['msg'] == "rack Test rack one already exists"
-- name: "3 - Create new rack with similar name"
+- name: 3 - Create new rack with similar name
netbox.netbox.netbox_rack:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test rack - Test Site
site: Test Site
state: present
register: test_three
-- name: "3 - ASSERT"
- assert:
+- name: 3 - ASSERT
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['before']['state'] == "absent"
@@ -59,53 +59,53 @@
- test_three['rack']['site'] == 1
- test_three['msg'] == "rack Test rack - Test Site created"
-- name: "4 - Attempt to create Test rack one again"
+- name: 4 - Attempt to create Test rack one again
netbox.netbox.netbox_rack:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test rack one
site: Test Site
- location: "Test Rack Group"
+ location: Test Rack Group
state: present
register: test_four
-- name: "4 - ASSERT"
- assert:
+- name: 4 - ASSERT
+ ansible.builtin.assert:
that:
- not test_four['changed']
- test_four['rack']['name'] == "Test rack one"
- test_four['rack']['site'] == 1
- test_four['msg'] == "rack Test rack one already exists"
-- name: "5 - Update Test rack one with more options"
+- name: 5 - Update Test rack one with more options
netbox.netbox.netbox_rack:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test rack one
site: Test Site
- rack_role: "Test Rack Role"
- location: "Test Rack Group"
- facility_id: "EQUI10291"
- tenant: "Test Tenant"
+ rack_role: Test Rack Role
+ location: Test Rack Group
+ facility_id: EQUI10291
+ tenant: Test Tenant
status: Available
- serial: "FXS10001"
+ serial: FXS10001
asset_tag: "1234"
width: 23
u_height: 48
- type: "2-post frame"
+ type: 2-post frame
outer_width: 32
outer_depth: 24
- outer_unit: "Inches"
- comments: "Just testing rack module"
+ outer_unit: Inches
+ comments: Just testing rack module
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: test_five
-- name: "5 - ASSERT"
- assert:
+- name: 5 - ASSERT
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['after']['asset_tag'] == "1234"
@@ -141,34 +141,34 @@
- test_five['rack']['width'] == 23
- test_five['msg'] == "rack Test rack one updated"
-- name: "6 - Update Test rack one with same options"
+- name: 6 - Update Test rack one with same options
netbox.netbox.netbox_rack:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test rack one
site: Test Site
- rack_role: "Test Rack Role"
- location: "Test Rack Group"
- facility_id: "EQUI10291"
- tenant: "Test Tenant"
+ rack_role: Test Rack Role
+ location: Test Rack Group
+ facility_id: EQUI10291
+ tenant: Test Tenant
status: Available
- serial: "FXS10001"
+ serial: FXS10001
asset_tag: "1234"
width: 23
u_height: 48
- type: "2-post frame"
+ type: 2-post frame
outer_width: 32
outer_depth: 24
- outer_unit: "Inches"
- comments: "Just testing rack module"
+ outer_unit: Inches
+ comments: Just testing rack module
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: test_six
-- name: "6 - ASSERT"
- assert:
+- name: 6 - ASSERT
+ ansible.builtin.assert:
that:
- test_six is not changed
- test_six['rack']['name'] == "Test rack one"
@@ -189,36 +189,36 @@
- test_six['rack']['u_height'] == 48
- test_six['rack']['width'] == 23
-- name: "7 - Create rack with same asset tag and serial number"
+- name: 7 - Create rack with same asset tag and serial number
netbox.netbox.netbox_rack:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test rack two
site: Test Site
- serial: "FXS10001"
+ serial: FXS10001
asset_tag: "1234"
state: present
- ignore_errors: yes
+ ignore_errors: true
register: test_seven
-- name: "7 - ASSERT"
- assert:
+- name: 7 - ASSERT
+ ansible.builtin.assert:
that:
- test_seven is failed
- "'Asset tag already exists' in test_seven['msg']"
-- name: "8 - Test delete"
+- name: 8 - Test delete
netbox.netbox.netbox_rack:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test rack one"
- state: "absent"
+ name: Test rack one
+ state: absent
register: test_eight
-- name: "8 - ASSERT"
- assert:
+- name: 8 - ASSERT
+ ansible.builtin.assert:
that:
- test_eight is changed
- test_eight['diff']['before']['state'] == "present"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_rack_group.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_rack_group.yml
index 6a10cbf94..40dba8408 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_rack_group.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_rack_group.yml
@@ -7,7 +7,7 @@
- name: "RACK_GROUP 1: Necessary info creation"
netbox.netbox.netbox_rack_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Rack Group
site: Test Site
@@ -15,7 +15,7 @@
register: test_one
- name: "RACK_GROUP 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -28,7 +28,7 @@
- name: "RACK_GROUP 2: Create duplicate"
netbox.netbox.netbox_rack_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Rack Group
site: Test Site
@@ -36,7 +36,7 @@
register: test_two
- name: "RACK_GROUP 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['rack_group']['name'] == "Rack Group"
@@ -47,14 +47,14 @@
- name: "RACK_GROUP 3: ASSERT - Delete"
netbox.netbox.netbox_rack_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Rack Group
state: absent
register: test_three
- name: "RACK_GROUP 3: ASSERT - Update"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['before']['state'] == "present"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_rack_role.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_rack_role.yml
index 5ca9e44eb..ff0d6b4da 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_rack_role.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_rack_role.yml
@@ -7,15 +7,15 @@
- name: "RACK_ROLE 1: Necessary info creation"
netbox.netbox.netbox_rack_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Rack Role
- color: "ffffff"
+ color: ffffff
state: present
register: test_one
- name: "RACK_ROLE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -28,14 +28,14 @@
- name: "RACK_ROLE 2: Create duplicate"
netbox.netbox.netbox_rack_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Rack Role
state: present
register: test_two
- name: "RACK_ROLE 1: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['rack_role']['name'] == "Rack Role"
@@ -46,7 +46,7 @@
- name: "RACK_ROLE 3: Update"
netbox.netbox.netbox_rack_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Rack Role
color: "003EFF"
@@ -54,7 +54,7 @@
register: test_three
- name: "RACK_ROLE 3: ASSERT - Update"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['color'] == "003eff"
@@ -66,14 +66,14 @@
- name: "RACK_ROLE 4: Delete"
netbox.netbox.netbox_rack_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Rack Role
state: absent
register: test_four
- name: "RACK_ROLE 4: ASSERT - Update"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_rear_port.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_rear_port.yml
index 10eb23098..d186233be 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_rear_port.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_rear_port.yml
@@ -10,7 +10,7 @@
- name: "REAR_PORT 1: Necessary info creation"
netbox.netbox.netbox_rear_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Rear Port
device: test100
@@ -19,7 +19,7 @@
register: test_one
- name: "REAR_PORT 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -32,7 +32,7 @@
- name: "REAR_PORT 2: Create duplicate"
netbox.netbox.netbox_rear_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Rear Port
device: test100
@@ -41,7 +41,7 @@
register: test_two
- name: "REAR_PORT 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['rear_port']['name'] == "Rear Port"
@@ -52,7 +52,7 @@
- name: "REAR_PORT 3: Update Rear Port with other fields"
netbox.netbox.netbox_rear_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Rear Port
device: test100
@@ -63,7 +63,7 @@
register: test_three
- name: "REAR_PORT 3: ASSERT - Update Rear Port with other fields"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['positions'] == 5
@@ -78,7 +78,7 @@
- name: "REAR_PORT 4: Create Rear Port for Delete Test"
netbox.netbox.netbox_rear_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Rear Port 2
device: test100
@@ -87,7 +87,7 @@
register: test_four
- name: "REAR_PORT 4: ASSERT - Create Rear Port for Delete Test"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -100,7 +100,7 @@
- name: "REAR_PORT 5: Delete Rear Port"
netbox.netbox.netbox_rear_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Rear Port 2
device: test100
@@ -109,7 +109,7 @@
register: test_five
- name: "REAR_PORT 5: ASSERT - Delete Rear Port"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "present"
@@ -119,7 +119,7 @@
- name: "REAR_PORT 6: Create second Rear Port"
netbox.netbox.netbox_rear_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Rear Port
device: Test Nexus One
@@ -128,7 +128,7 @@
register: test_six
- name: "REAR_PORT 6: ASSERT - Create second Rear Port"
- assert:
+ ansible.builtin.assert:
that:
- test_six is changed
- test_six['diff']['before']['state'] == "absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_rear_port_template.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_rear_port_template.yml
index ca867619a..5342627d0 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_rear_port_template.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_rear_port_template.yml
@@ -10,7 +10,7 @@
- name: "REAR_PORT_TEMPLATE 1: Necessary info creation"
netbox.netbox.netbox_rear_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Rear Port Template
device_type: Cisco Test
@@ -19,7 +19,7 @@
register: test_one
- name: "REAR_PORT_TEMPLATE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -32,7 +32,7 @@
- name: "REAR_PORT_TEMPLATE 2: Create duplicate"
netbox.netbox.netbox_rear_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Rear Port Template
device_type: Cisco Test
@@ -41,7 +41,7 @@
register: test_two
- name: "REAR_PORT_TEMPLATE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['rear_port_template']['name'] == "Rear Port Template"
@@ -52,7 +52,7 @@
- name: "REAR_PORT_TEMPLATE 3: Update Rear Port Template with other fields"
netbox.netbox.netbox_rear_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Rear Port Template
device_type: Cisco Test
@@ -62,7 +62,7 @@
register: test_three
- name: "REAR_PORT_TEMPLATE 3: ASSERT - Update Rear Port Template with other fields"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['positions'] == 5
@@ -75,7 +75,7 @@
- name: "REAR_PORT_TEMPLATE 4: Create Rear Port Template for Delete Test"
netbox.netbox.netbox_rear_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Rear Port Template 2
device_type: Cisco Test
@@ -84,7 +84,7 @@
register: test_four
- name: "REAR_PORT_TEMPLATE 4: ASSERT - Create Rear Port Template for Delete Test"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -97,7 +97,7 @@
- name: "REAR_PORT_TEMPLATE 5: Delete Rear Port Template"
netbox.netbox.netbox_rear_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Rear Port Template 2
device_type: Cisco Test
@@ -106,7 +106,7 @@
register: test_five
- name: "REAR_PORT_TEMPLATE 5: ASSERT - Delete Rear Port Template"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "present"
@@ -116,7 +116,7 @@
- name: "REAR_PORT_TEMPLATE 6: Create second Rear Port Template"
netbox.netbox.netbox_rear_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Rear Port Template 2
device_type: Arista Test
@@ -125,7 +125,7 @@
register: test_six
- name: "REAR_PORT_TEMPLATE 6: ASSERT - Create second Rear Port Template"
- assert:
+ ansible.builtin.assert:
that:
- test_six is changed
- test_six['diff']['before']['state'] == "absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_region.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_region.yml
index 1359598af..0395db435 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_region.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_region.yml
@@ -7,14 +7,14 @@
- name: "REGION 1: Necessary info creation"
netbox.netbox.netbox_region:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Region One"
+ name: Test Region One
state: present
register: test_one
- name: "REGION 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -26,14 +26,14 @@
- name: "REGION 2: Create duplicate"
netbox.netbox.netbox_region:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Region One"
+ name: Test Region One
state: present
register: test_two
- name: "REGION 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['region']['name'] == "Test Region One"
@@ -43,15 +43,15 @@
- name: "REGION 3: ASSERT - Update"
netbox.netbox.netbox_region:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Region One"
- parent_region: "Test Region"
+ name: Test Region One
+ parent_region: Test Region
state: present
register: test_three
- name: "REGION 3: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['parent'] == 1
@@ -63,14 +63,14 @@
- name: "REGION 4: ASSERT - Delete"
netbox.netbox.netbox_region:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Region One"
+ name: Test Region One
state: absent
register: test_four
- name: "REGION 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['region']['name'] == "Test Region One"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_rir.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_rir.yml
index cc4935938..3a7bf79fa 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_rir.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_rir.yml
@@ -7,14 +7,14 @@
- name: "RIR 1: Necessary info creation"
netbox.netbox.netbox_rir:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test RIR One
state: present
register: test_one
- name: "RIR 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -26,14 +26,14 @@
- name: "RIR 2: Create duplicate"
netbox.netbox.netbox_rir:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test RIR One
state: present
register: test_two
- name: "RIR 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['rir']['name'] == "Test RIR One"
@@ -43,15 +43,15 @@
- name: "RIR 3: ASSERT - Update"
netbox.netbox.netbox_rir:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test RIR One"
+ name: Test RIR One
is_private: true
state: present
register: test_three
- name: "RIR 3: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['is_private'] == true
@@ -63,14 +63,14 @@
- name: "RIR 4: ASSERT - Delete"
netbox.netbox.netbox_rir:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test RIR One"
+ name: Test RIR One
state: absent
register: test_four
- name: "RIR 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['rir']['name'] == "Test RIR One"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_route_target.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_route_target.yml
index 354978dc2..c2576c4cc 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_route_target.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_route_target.yml
@@ -4,18 +4,18 @@
- name: "NETBOX_ROUTE_TARGET_ADD: Check Mode - Add all fields except description"
netbox.netbox.netbox_route_target:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "65000:65001"
- tenant: "Test Tenant"
+ name: 65000:65001
+ tenant: Test Tenant
tags:
- first
- second
- check_mode: yes
+ check_mode: true
register: test_results
- name: "NETBOX_ROUTE_TARGET_ADD: (ASSERT) Check Mode - Add all fields except description"
- assert:
+ ansible.builtin.assert:
that:
- test_results is changed
- test_results['route_target']['name'] == "65000:65001"
@@ -29,17 +29,17 @@
- name: "NETBOX_ROUTE_TARGET_ADD: Add all fields except description"
netbox.netbox.netbox_route_target:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "65000:65001"
- tenant: "Test Tenant"
+ name: 65000:65001
+ tenant: Test Tenant
tags:
- first
- second
register: test_results
- name: "NETBOX_ROUTE_TARGET_ADD: (ASSERT) Add all fields except description"
- assert:
+ ansible.builtin.assert:
that:
- test_results is changed
- test_results['route_target']['name'] == "65000:65001"
@@ -53,17 +53,17 @@
- name: "NETBOX_ROUTE_TARGET_ADD_IDEM: (IDEMPOTENT) Add all fields except description"
netbox.netbox.netbox_route_target:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "65000:65001"
- tenant: "Test Tenant"
+ name: 65000:65001
+ tenant: Test Tenant
tags:
- first
- second
register: test_results
- name: "NETBOX_ROUTE_TARGET_ADD_IDEM: (IDEMPOTENT) Add all fields except description"
- assert:
+ ansible.builtin.assert:
that:
- test_results is not changed
- test_results['route_target']['name'] == "65000:65001"
@@ -76,19 +76,19 @@
- name: "NETBOX_ROUTE_TARGET_UPDATE: Check Mode - Update description"
netbox.netbox.netbox_route_target:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "65000:65001"
- description: "NEW DESCRIPTION"
- tenant: "Test Tenant"
+ name: 65000:65001
+ description: NEW DESCRIPTION
+ tenant: Test Tenant
tags:
- first
- second
- check_mode: yes
+ check_mode: true
register: test_results
- name: "NETBOX_ROUTE_TARGET_UPDATE: (ASSERT) Check Mode - Update description"
- assert:
+ ansible.builtin.assert:
that:
- test_results is changed
- test_results['route_target']['name'] == "65000:65001"
@@ -103,18 +103,18 @@
- name: "NETBOX_ROUTE_TARGET_UPDATE: Update description"
netbox.netbox.netbox_route_target:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "65000:65001"
- tenant: "Test Tenant"
- description: "NEW DESCRIPTION"
+ name: 65000:65001
+ tenant: Test Tenant
+ description: NEW DESCRIPTION
tags:
- first
- second
register: test_results
- name: "NETBOX_ROUTE_TARGET_UPDATE: (ASSERT) Update description"
- assert:
+ ansible.builtin.assert:
that:
- test_results is changed
- test_results['route_target']['name'] == "65000:65001"
@@ -129,18 +129,18 @@
- name: "NETBOX_ROUTE_TARGET_UPDATE_IDEM: (IDEMPOTENT) Update description"
netbox.netbox.netbox_route_target:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "65000:65001"
- tenant: "Test Tenant"
- description: "NEW DESCRIPTION"
+ name: 65000:65001
+ tenant: Test Tenant
+ description: NEW DESCRIPTION
tags:
- first
- second
register: test_results
- name: "NETBOX_ROUTE_TARGET_UPDATE_IDEM: (IDEMPOTENT) Update description"
- assert:
+ ansible.builtin.assert:
that:
- test_results is not changed
- test_results['route_target']['name'] == "65000:65001"
@@ -154,15 +154,15 @@
- name: "NETBOX_ROUTE_TARGET_DELETE: Check Mode - Delete route target"
netbox.netbox.netbox_route_target:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "65000:65001"
+ name: 65000:65001
state: absent
- check_mode: yes
+ check_mode: true
register: test_results
- name: "NETBOX_ROUTE_TARGET_DELETE: (ASSERT) Check Mode - Delete route target"
- assert:
+ ansible.builtin.assert:
that:
- test_results is changed
- test_results['diff']['after']['state'] == "absent"
@@ -173,14 +173,14 @@
- name: "NETBOX_ROUTE_TARGET_DELETE: Delete route target"
netbox.netbox.netbox_route_target:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "65000:65001"
+ name: 65000:65001
state: absent
register: test_results
- name: "NETBOX_ROUTE_TARGET_DELETE: (ASSERT) Delete route target"
- assert:
+ ansible.builtin.assert:
that:
- test_results is changed
- test_results['diff']['after']['state'] == "absent"
@@ -191,14 +191,14 @@
- name: "NETBOX_ROUTE_TARGET_DELETE_IDEM: (IDEMPOTENT) Delete route target"
netbox.netbox.netbox_route_target:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "65000:65001"
+ name: 65000:65001
state: absent
register: test_results
- name: "NETBOX_ROUTE_TARGET_DELETE_IDEM: (IDEMPOTENT) Delete route target"
- assert:
+ ansible.builtin.assert:
that:
- test_results is not changed
- test_results['msg'] == "route_target 65000:65001 already absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_service.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_service.yml
index 09a993c3d..d937e50a6 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_service.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_service.yml
@@ -4,32 +4,32 @@
### NETBOX_SERVICE
##
##
-- name: "1 - Device with required information needs to add new service"
+- name: 1 - Device with required information needs to add new service
netbox.netbox.netbox_device:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "FOR_SERVICE"
- device_type: "Cisco Test"
- device_role: "Core Switch"
- site: "Test Site"
- status: "Staged"
+ name: FOR_SERVICE
+ device_type: Cisco Test
+ device_role: Core Switch
+ site: Test Site
+ status: Staged
state: present
- name: "NETBOX_SERVICE: Create new service"
netbox.netbox.netbox_service:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- device: "FOR_SERVICE"
- name: "node-exporter"
+ device: FOR_SERVICE
+ name: node-exporter
port: 9100
protocol: TCP
state: present
register: test_service_create
-- name: "NETBOX_SERVICE ASSERT - Create"
- assert:
+- name: NETBOX_SERVICE ASSERT - Create
+ ansible.builtin.assert:
that:
- test_service_create is changed
- test_service_create['services']['name'] == "node-exporter"
@@ -41,18 +41,18 @@
- name: "NETBOX_SERVICE: Test idempotence"
netbox.netbox.netbox_service:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- device: "FOR_SERVICE"
- name: "node-exporter"
+ device: FOR_SERVICE
+ name: node-exporter
port: 9100
protocol: TCP
state: present
register: test_service_idempotence
-- name: "NETBOX_SERVICE ASSERT - Not changed"
- assert:
+- name: NETBOX_SERVICE ASSERT - Not changed
+ ansible.builtin.assert:
that:
- test_service_idempotence['services']['name'] == "node-exporter"
- test_service_idempotence['services']['ports'] == [9100]
@@ -61,22 +61,22 @@
- name: "NETBOX_SERVICE: Test update"
netbox.netbox.netbox_service:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- device: "FOR_SERVICE"
- name: "node-exporter"
+ device: FOR_SERVICE
+ name: node-exporter
ports:
- 9100
- 9200
protocol: TCP
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: test_service_update
-- name: "NETBOX_SERVICE ASSERT - Service has been updated"
- assert:
+- name: NETBOX_SERVICE ASSERT - Service has been updated
+ ansible.builtin.assert:
that:
- test_service_update is changed
- test_service_update['diff']['after']['tags'][0] == 4
@@ -85,18 +85,18 @@
- name: "NETBOX_SERVICE: Test same details, but different protocol - Create"
netbox.netbox.netbox_service:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- device: "FOR_SERVICE"
- name: "node-exporter"
+ device: FOR_SERVICE
+ name: node-exporter
port: 9100
protocol: UDP
state: present
register: test_service_protocol
-- name: "NETBOX_SERVICE ASSERT - Different protocol - Create"
- assert:
+- name: NETBOX_SERVICE ASSERT - Different protocol - Create
+ ansible.builtin.assert:
that:
- test_service_protocol is changed
- test_service_protocol['diff']['after']['state'] == "present"
@@ -108,18 +108,18 @@
- name: "NETBOX_SERVICE: Test service deletion"
netbox.netbox.netbox_service:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- device: "FOR_SERVICE"
- name: "node-exporter"
+ device: FOR_SERVICE
+ name: node-exporter
port: 9100
protocol: UDP
state: absent
register: test_service_delete
-- name: "NETBOX_SERVICE ASSERT - Service has been deleted"
- assert:
+- name: NETBOX_SERVICE ASSERT - Service has been deleted
+ ansible.builtin.assert:
that:
- test_service_delete is changed
- test_service_delete['diff']['after']['state'] == "absent"
@@ -128,20 +128,20 @@
- name: "NETBOX_SERVICE: Test service IP addresses"
netbox.netbox.netbox_service:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- device: "test100"
- name: "node-exporter"
+ device: test100
+ name: node-exporter
port: 9100
protocol: UDP
ipaddresses:
- - address: "172.16.180.1/24"
+ - address: 172.16.180.1/24
state: present
register: test_service_ip_addresses
-- name: "NETBOX_SERVICE ASSERT - Service has been created with IP address"
- assert:
+- name: NETBOX_SERVICE ASSERT - Service has been created with IP address
+ ansible.builtin.assert:
that:
- test_service_ip_addresses is changed
- test_service_ip_addresses['diff']['after']['state'] == "present"
@@ -154,38 +154,38 @@
- name: "NETBOX_SERVICE: Missing both device & virtual_machine options - Tests required_one_of"
netbox.netbox.netbox_service:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "node-exporter"
+ name: node-exporter
port: 9100
protocol: UDP
ipaddresses:
- - address: "172.16.180.1/24"
+ - address: 172.16.180.1/24
state: present
- ignore_errors: yes
+ ignore_errors: true
register: test_service_required_one_of
-- name: "NETBOX_SERVICE ASSERT - Failed due to missing arguments"
- assert:
+- name: NETBOX_SERVICE ASSERT - Failed due to missing arguments
+ ansible.builtin.assert:
that:
- test_service_required_one_of is failed
- 'test_service_required_one_of["msg"] == "one of the following is required: device, virtual_machine"'
- name: "NETBOX_SERVICE: Create new service on virtual_machine"
netbox.netbox.netbox_service:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- virtual_machine: "test100-vm"
- name: "node-exporter"
+ virtual_machine: test100-vm
+ name: node-exporter
port: 9100
protocol: TCP
state: present
register: test_service_create_vm
-- name: "NETBOX_SERVICE ASSERT - Create"
- assert:
+- name: NETBOX_SERVICE ASSERT - Create
+ ansible.builtin.assert:
that:
- test_service_create_vm is changed
- test_service_create_vm['services']['name'] == "node-exporter"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_service_template.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_service_template.yml
index f64509acf..507a566a4 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_service_template.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_service_template.yml
@@ -7,7 +7,7 @@
- name: "SERVICE_TEMPLATE 1: Necessary info creation"
netbox.netbox.netbox_service_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Service Template for SSH
ports:
@@ -17,7 +17,7 @@
register: test_one
- name: "SERVICE_TEMPLATE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -30,7 +30,7 @@
- name: "SERVICE_TEMPLATE 2: Create duplicate"
netbox.netbox.netbox_service_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Service Template for SSH
ports:
@@ -40,7 +40,7 @@
register: test_two
- name: "SERVICE_TEMPLATE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['service_template']['name'] == "Service Template for SSH"
@@ -51,7 +51,7 @@
- name: "SERVICE_TEMPLATE 3: Update Service Template with other fields"
netbox.netbox.netbox_service_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Service Template for SSH
ports:
@@ -62,7 +62,7 @@
register: test_three
- name: "SERVICE_TEMPLATE 3: ASSERT - Update Service Template with other fields"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['comments'] == "For SSH service"
@@ -75,7 +75,7 @@
- name: "SERVICE_TEMPLATE 4: Create Service Template for Delete Test"
netbox.netbox.netbox_service_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Service Template for DNS
ports:
@@ -86,7 +86,7 @@
register: test_four
- name: "SERVICE_TEMPLATE 4: ASSERT - Create Service Template for Delete Test"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -100,14 +100,14 @@
- name: "SERVICE_TEMPLATE 5: Delete Service Template"
netbox.netbox.netbox_service_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Service Template for DNS
state: absent
register: test_five
- name: "SERVICE_TEMPLATE 5: ASSERT - Delete Service Template"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "present"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_site.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_site.yml
index 6960eeb3d..a5861522e 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_site.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_site.yml
@@ -4,17 +4,17 @@
### NETBOX_SITE
##
##
-- name: "1 - Create site within NetBox with only required information"
+- name: 1 - Create site within NetBox with only required information
netbox.netbox.netbox_site:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test - Colorado
state: present
register: test_one
-- name: "1 - ASSERT"
- assert:
+- name: 1 - ASSERT
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -22,26 +22,26 @@
- test_one['site']['name'] == "Test - Colorado"
- test_one['msg'] == "site Test - Colorado created"
-- name: "2 - Duplicate"
+- name: 2 - Duplicate
netbox.netbox.netbox_site:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test - Colorado
state: present
register: test_two
-- name: "2 - ASSERT"
- assert:
+- name: 2 - ASSERT
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['msg'] == "site Test - Colorado already exists"
- test_two['site']['name'] == "Test - Colorado"
-- name: "3 - Update Test - Colorado"
+- name: 3 - Update Test - Colorado
netbox.netbox.netbox_site:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test - Colorado
status: Planned
@@ -49,8 +49,8 @@
state: present
register: test_three
-- name: "3 - ASSERT"
- assert:
+- name: 3 - ASSERT
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['status'] == "planned"
@@ -60,10 +60,10 @@
- test_three['site']['status'] == "planned"
- test_three['site']['region'] == 1
-- name: "4 - Create site with all parameters"
+- name: 4 - Create site with all parameters
netbox.netbox.netbox_site:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test - California
status: Planned
@@ -78,12 +78,12 @@
latitude: "22.169141"
longitude: "-100.994041"
comments: "### Placeholder"
- slug: "test_california"
+ slug: test_california
state: present
register: test_four
-- name: "4 - ASSERT"
- assert:
+- name: 4 - ASSERT
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -107,7 +107,7 @@
- name: "NETBOX_SITE_IDEM: Idempotency - Create duplicate site with all parameters"
netbox.netbox.netbox_site:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test - California
status: Planned
@@ -122,12 +122,12 @@
latitude: "22.169141"
longitude: "-100.994041"
comments: "### Placeholder"
- slug: "test_california"
+ slug: test_california
state: present
register: test_results
- name: "NETBOX_SITE_IDEM: (ASSERT) Idempotency - Duplicate device site with all parameters"
- assert:
+ ansible.builtin.assert:
that:
- test_results is not changed
- test_results['site']['name'] == "Test - California"
@@ -146,17 +146,17 @@
- test_results['site']['comments'] == "### Placeholder"
- test_results['site']['slug'] == "test_california"
-- name: "5 - Delete site within netbox"
+- name: 5 - Delete site within netbox
netbox.netbox.netbox_site:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test - Colorado
state: absent
register: test_five
-- name: "5 - ASSERT"
- assert:
+- name: 5 - ASSERT
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "present"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_site_group.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_site_group.yml
index 09e2df4c6..11b9cbb24 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_site_group.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_site_group.yml
@@ -7,14 +7,14 @@
- name: "SITE_GROUP 1: Necessary info creation"
netbox.netbox.netbox_site_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Site Group
state: present
register: test_one
- name: "SITE_GROUP 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -26,14 +26,14 @@
- name: "SITE_GROUP 2: Create duplicate"
netbox.netbox.netbox_site_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Site Group
state: present
register: test_two
- name: "SITE_GROUP 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['site_group']['name'] == "Site Group"
@@ -43,7 +43,7 @@
- name: "SITE_GROUP 3: Update"
netbox.netbox.netbox_site_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Site Group
parent_site_group: Test Site Group
@@ -52,7 +52,7 @@
register: test_three
- name: "SITE_GROUP 3: ASSERT - Update"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['parent'] == 4
@@ -66,14 +66,14 @@
- name: "SITE_GROUP 4: Delete"
netbox.netbox.netbox_site_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Site Group
state: absent
register: test_four
- name: "SITE_GROUP 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_tag.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_tag.yml
index 38c4157c2..f3fcbac39 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_tag.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_tag.yml
@@ -7,16 +7,16 @@
- name: "TAG 1: ASSERT - Necessary info creation"
netbox.netbox.netbox_tag:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Tag 1"
- description: "Tag 1 test"
+ name: Test Tag 1
+ description: Tag 1 test
color: "0000ff"
state: present
register: test_one
- name: "TAG 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -30,16 +30,16 @@
- name: "TAG 2: Create duplicate"
netbox.netbox.netbox_tag:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Tag 1"
- description: "Tag 1 test"
+ name: Test Tag 1
+ description: Tag 1 test
color: "0000ff"
state: present
register: test_two
- name: "TAG 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['tags']['name'] == "Test Tag 1"
@@ -48,16 +48,16 @@
- name: "TAG 3: ASSERT - Update"
netbox.netbox.netbox_tag:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Tag 1"
- description: "Tag 1 update test"
+ name: Test Tag 1
+ description: Tag 1 update test
color: "00ff00"
state: present
register: test_three
- name: "TAG 3: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['color'] == "00ff00"
@@ -70,14 +70,14 @@
- name: "TAG 4: ASSERT - Delete"
netbox.netbox.netbox_tag:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Tag 1"
+ name: Test Tag 1
state: absent
register: test_four
- name: "TAG 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['after']['state'] == "absent"
@@ -88,17 +88,17 @@
- name: "TAG 5: ASSERT - Necessary info creation"
netbox.netbox.netbox_tag:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Tag 5"
- slug: "test-tag-five"
- description: "Tag 5 test"
+ name: Test Tag 5
+ slug: test-tag-five
+ description: Tag 5 test
color: "0000ff"
state: present
register: test_five
- name: "TAG 5: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_tenant.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_tenant.yml
index 7c9af8b9f..0731e0268 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_tenant.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_tenant.yml
@@ -4,16 +4,16 @@
### NETBOX_TENANT
##
##
-- name: "1 - Test tenant creation"
+- name: 1 - Test tenant creation
netbox.netbox.netbox_tenant:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Tenant ABC"
+ name: Tenant ABC
register: test_one
-- name: "1 - ASSERT"
- assert:
+- name: 1 - ASSERT
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -22,33 +22,33 @@
- test_one['tenant']['slug'] == "tenant-abc"
- test_one['msg'] == "tenant Tenant ABC created"
-- name: "Test duplicate tenant"
+- name: Test duplicate tenant
netbox.netbox.netbox_tenant:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Tenant ABC"
+ name: Tenant ABC
register: test_two
-- name: "2 - ASSERT"
- assert:
+- name: 2 - ASSERT
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['tenant']['name'] == "Tenant ABC"
- test_two['tenant']['slug'] == "tenant-abc"
- test_two['msg'] == "tenant Tenant ABC already exists"
-- name: "3 - Test update"
+- name: 3 - Test update
netbox.netbox.netbox_tenant:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Tenant ABC"
- description: "Updated description"
+ name: Tenant ABC
+ description: Updated description
register: test_three
-- name: "3 - ASSERT"
- assert:
+- name: 3 - ASSERT
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['description'] == "Updated description"
@@ -57,42 +57,42 @@
- test_three['tenant']['description'] == "Updated description"
- test_three['msg'] == "tenant Tenant ABC updated"
-- name: "4 - Test delete"
+- name: 4 - Test delete
netbox.netbox.netbox_tenant:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Tenant ABC"
- state: "absent"
+ name: Tenant ABC
+ state: absent
register: test_four
-- name: "4 - ASSERT"
- assert:
+- name: 4 - ASSERT
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
- test_four['diff']['after']['state'] == "absent"
- test_four['msg'] == "tenant Tenant ABC deleted"
-- name: "5 - Create tenant with all parameters"
+- name: 5 - Create tenant with all parameters
netbox.netbox.netbox_tenant:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Tenant ABC"
- description: "ABC Incorporated"
+ name: Tenant ABC
+ description: ABC Incorporated
comments: "### This tenant is super cool"
- tenant_group: "Test Tenant Group"
- slug: "tenant_abc"
+ tenant_group: Test Tenant Group
+ slug: tenant_abc
tags:
- - "tagA"
- - "tagB"
- - "tagC"
+ - tagA
+ - tagB
+ - tagC
state: present
register: test_five
-- name: "5 - ASSERT"
- assert:
+- name: 5 - ASSERT
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_tenant_group.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_tenant_group.yml
index 4113a2260..f7f57f3c2 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_tenant_group.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_tenant_group.yml
@@ -4,16 +4,16 @@
### NETBOX_TENANT_GROUP
##
##
-- name: "1 - Test tenant group creation"
+- name: 1 - Test tenant group creation
netbox.netbox.netbox_tenant_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Tenant Group Two"
+ name: Test Tenant Group Two
register: test_one
-- name: "1 - ASSERT"
- assert:
+- name: 1 - ASSERT
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -22,50 +22,50 @@
- test_one['tenant_group']['slug'] == "test-tenant-group-two"
- test_one['msg'] == "tenant_group Test Tenant Group Two created"
-- name: "Test duplicate tenant group"
+- name: Test duplicate tenant group
netbox.netbox.netbox_tenant_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Tenant Group Two"
+ name: Test Tenant Group Two
register: test_two
-- name: "2 - ASSERT"
- assert:
+- name: 2 - ASSERT
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['tenant_group']['name'] == "Test Tenant Group Two"
- test_two['tenant_group']['slug'] == "test-tenant-group-two"
- test_two['msg'] == "tenant_group Test Tenant Group Two already exists"
-- name: "3 - Test delete"
+- name: 3 - Test delete
netbox.netbox.netbox_tenant_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Tenant Group Two"
- state: "absent"
+ name: Test Tenant Group Two
+ state: absent
register: test_three
-- name: "3 - ASSERT"
- assert:
+- name: 3 - ASSERT
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['before']['state'] == "present"
- test_three['diff']['after']['state'] == "absent"
- test_three['msg'] == "tenant_group Test Tenant Group Two deleted"
-- name: "4 - Test tenant group creation with custom slug"
+- name: 4 - Test tenant group creation with custom slug
netbox.netbox.netbox_tenant_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Tenant Group ABC"
- slug: "test_tenant_group_four"
+ name: Test Tenant Group ABC
+ slug: test_tenant_group_four
register: test_four
-- name: "4 - ASSERT"
- assert:
+- name: 4 - ASSERT
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -74,17 +74,17 @@
- test_four['tenant_group']['slug'] == "test_tenant_group_four"
- test_four['msg'] == "tenant_group Test Tenant Group ABC created"
-- name: "5 - Test child tenant group creation"
+- name: 5 - Test child tenant group creation
netbox.netbox.netbox_tenant_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Child Test Tenant Group"
+ name: Child Test Tenant Group
parent_tenant_group: "{{ test_four.tenant_group.slug }}"
register: test_five
-- name: "5 - ASSERT"
- assert:
+- name: 5 - ASSERT
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "absent"
@@ -93,35 +93,35 @@
- test_five['tenant_group']['parent'] == test_four.tenant_group.id
- test_five['msg'] == "tenant_group Child Test Tenant Group created"
-- name: "6 - Test child tenant group deletion"
+- name: 6 - Test child tenant group deletion
netbox.netbox.netbox_tenant_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Child Test Tenant Group"
- state: "absent"
+ name: Child Test Tenant Group
+ state: absent
register: test_six
-- name: "6 - ASSERT"
- assert:
+- name: 6 - ASSERT
+ ansible.builtin.assert:
that:
- test_six is changed
- test_six['diff']['before']['state'] == "present"
- test_six['diff']['after']['state'] == "absent"
- test_six['msg'] == "tenant_group Child Test Tenant Group deleted"
-- name: "7 - Test deletion of the tenant group with custom slug"
+- name: 7 - Test deletion of the tenant group with custom slug
netbox.netbox.netbox_tenant_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Tenant Group ABC"
- slug: "test_tenant_group_four"
- state: "absent"
+ name: Test Tenant Group ABC
+ slug: test_tenant_group_four
+ state: absent
register: test_seven
-- name: "7 - ASSERT"
- assert:
+- name: 7 - ASSERT
+ ansible.builtin.assert:
that:
- test_seven is changed
- test_seven['diff']['before']['state'] == "present"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_virtual_chassis.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_virtual_chassis.yml
index 04ada8411..395be8395 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_virtual_chassis.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_virtual_chassis.yml
@@ -10,7 +10,7 @@
- name: "VIRTUAL_CHASSIS 0: Create device for testing virtual chassis"
netbox.netbox.netbox_device:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Device Virtual Chassis Tests
device_type: Cisco Test
@@ -23,15 +23,15 @@
- name: "VIRTUAL_CHASSIS 1: Necessary info creation"
netbox.netbox.netbox_virtual_chassis:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "First VC"
+ name: First VC
master: Device Virtual Chassis Tests
state: present
register: test_one
- name: "VIRTUAL_CHASSIS 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -43,15 +43,15 @@
- name: "VIRTUAL_CHASSIS 2: Create duplicate"
netbox.netbox.netbox_virtual_chassis:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "First VC"
+ name: First VC
master: Device Virtual Chassis Tests
state: present
register: test_two
- name: "VIRTUAL_CHASSIS 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['virtual_chassis']['master'] == 11
@@ -61,16 +61,16 @@
- name: "POWER_FEED 3: Update virtual_chassis with other fields"
netbox.netbox.netbox_virtual_chassis:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "First VC"
+ name: First VC
master: Device Virtual Chassis Tests
domain: Domain Text
state: present
register: test_three
- name: "POWER_FEED 3: ASSERT - Update virtual_chassis with other fields"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['domain'] == "Domain Text"
@@ -82,7 +82,7 @@
- name: "VIRTUAL_CHASSIS 4: Create device for testing virtual chassis deletion"
netbox.netbox.netbox_device:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Device Virtual Chassis Tests 2
device_type: Cisco Test
@@ -95,15 +95,15 @@
- name: "VIRTUAL_CHASSIS 4: Create Virtual Chassis for Delete Test"
netbox.netbox.netbox_virtual_chassis:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Second VC"
+ name: Second VC
master: Device Virtual Chassis Tests 2
state: present
register: test_four
- name: "VIRTUAL_CHASSIS 4: ASSERT - Create Virtual Chassis for Delete Test"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -115,15 +115,15 @@
- name: "VIRTUAL_CHASSIS 5: Delete Virtual Chassis"
netbox.netbox.netbox_virtual_chassis:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Second VC"
+ name: Second VC
master: Device Virtual Chassis Tests 2
state: absent
register: test_five
- name: "VIRTUAL_CHASSIS 5: ASSERT - Delete Virtual Chassis"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "present"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_virtual_machine.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_virtual_machine.yml
index 79d3882bc..3fb0e4319 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_virtual_machine.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_virtual_machine.yml
@@ -7,15 +7,15 @@
- name: "VIRTUAL_MACHINE 1: Necessary info creation"
netbox.netbox.netbox_virtual_machine:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test VM One"
- cluster: "Test Cluster"
+ name: Test VM One
+ cluster: Test Cluster
state: present
register: test_one
- name: "VIRTUAL_MACHINE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -27,15 +27,15 @@
- name: "VIRTUAL_MACHINE 2: Create duplicate"
netbox.netbox.netbox_virtual_machine:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test VM One"
- cluster: "Test Cluster"
+ name: Test VM One
+ cluster: Test Cluster
state: present
register: test_two
- name: "VIRTUAL_MACHINE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['virtual_machine']['name'] == "Test VM One"
@@ -45,21 +45,21 @@
- name: "VIRTUAL_MACHINE 3: Update"
netbox.netbox.netbox_virtual_machine:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test VM One"
- cluster: "Test Cluster"
+ name: Test VM One
+ cluster: Test Cluster
vcpus: 8.5
memory: 8
- status: "Planned"
- virtual_machine_role: "Test VM Role"
+ status: Planned
+ virtual_machine_role: Test VM Role
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: test_three
- name: "VIRTUAL_MACHINE 3: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['vcpus'] == 8.5
@@ -79,21 +79,21 @@
- name: "VIRTUAL_MACHINE 4: Test idempotence"
netbox.netbox.netbox_virtual_machine:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test VM One"
- cluster: "Test Cluster"
+ name: Test VM One
+ cluster: Test Cluster
vcpus: 8.5
memory: 8
- status: "Planned"
- virtual_machine_role: "Test VM Role"
+ status: Planned
+ virtual_machine_role: Test VM Role
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: test_four_idempotence
- name: "VIRTUAL_MACHINE 4: ASSERT - Not changed"
- assert:
+ ansible.builtin.assert:
that:
- test_four_idempotence is not changed
- test_four_idempotence['virtual_machine']['name'] == "Test VM One"
@@ -108,14 +108,14 @@
- name: "VIRTUAL_MACHINE 5: Delete"
netbox.netbox.netbox_virtual_machine:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test VM One"
+ name: Test VM One
state: absent
register: test_five
- name: "VIRTUAL_MACHINE 5: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['virtual_machine']['name'] == "Test VM One"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_vlan.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_vlan.yml
index d95abbeca..463873ca5 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_vlan.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_vlan.yml
@@ -7,7 +7,7 @@
- name: "VLAN 1: Necessary info creation"
netbox.netbox.netbox_vlan:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test VLAN 500
vid: 500
@@ -15,7 +15,7 @@
register: test_one
- name: "VLAN 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -27,7 +27,7 @@
- name: "VLAN 2: Create duplicate"
netbox.netbox.netbox_vlan:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test VLAN 500
vid: 500
@@ -35,7 +35,7 @@
register: test_two
- name: "VLAN 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['vlan']['name'] == "Test VLAN 500"
@@ -45,18 +45,18 @@
- name: "VLAN 3: Create VLAN with same name, but different site"
netbox.netbox.netbox_vlan:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test VLAN 500
vid: 500
site: Test Site
tenant: Test Tenant
- vlan_group: "Test VLAN Group"
+ vlan_group: Test VLAN Group
state: present
register: test_three
- name: "VLAN 3: ASSERT - Create VLAN with same name, but different site"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['before']['state'] == "absent"
@@ -70,23 +70,23 @@
- name: "VLAN 4: ASSERT - Update"
netbox.netbox.netbox_vlan:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test VLAN 500"
+ name: Test VLAN 500
vid: 500
- tenant: "Test Tenant"
- vlan_group: "Test VLAN Group"
+ tenant: Test Tenant
+ vlan_group: Test VLAN Group
status: Reserved
vlan_role: Network of care
description: Updated description
- site: "Test Site"
+ site: Test Site
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: test_four
- name: "VLAN 4: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['after']['status'] == "reserved"
@@ -106,41 +106,41 @@
- name: "VLAN: ASSERT - IDEMPOTENT WITH VLAN_GROUP"
netbox.netbox.netbox_vlan:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test VLAN 500"
+ name: Test VLAN 500
vid: 500
- tenant: "Test Tenant"
- vlan_group: "Test VLAN Group"
+ tenant: Test Tenant
+ vlan_group: Test VLAN Group
status: Reserved
vlan_role: Network of care
description: Updated description
- site: "Test Site"
+ site: Test Site
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: idempotent_vlan_group
- name: "VLAN: ASSERT - IDEMPOTENT WITH VLAN_GROUP"
- assert:
+ ansible.builtin.assert:
that:
- idempotent_vlan_group is not changed
- name: "VLAN: Create VLAN with same name, but different vlan_group"
netbox.netbox.netbox_vlan:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test VLAN 500
vid: 500
site: Test Site
tenant: Test Tenant
- vlan_group: "Test VLAN Group 2"
+ vlan_group: Test VLAN Group 2
state: present
register: new_vlan_group
- name: "VLAN: ASSERT - Create VLAN with same name, but different vlan_group"
- assert:
+ ansible.builtin.assert:
that:
- new_vlan_group is changed
- new_vlan_group['diff']['before']['state'] == "absent"
@@ -154,15 +154,15 @@
- name: "VLAN 5: ASSERT - Delete more than one result"
netbox.netbox.netbox_vlan:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test VLAN 500"
+ name: Test VLAN 500
state: absent
- ignore_errors: yes
+ ignore_errors: true
register: test_five
- name: "VLAN 5: ASSERT - Delete more than one result"
- assert:
+ ansible.builtin.assert:
that:
- test_five is failed
- test_five['msg'] == "More than one result returned for Test VLAN 500"
@@ -170,16 +170,16 @@
- name: "VLAN 6: ASSERT - Delete"
netbox.netbox.netbox_vlan:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test VLAN 500"
+ name: Test VLAN 500
site: Test Site
- vlan_group: "Test VLAN Group"
+ vlan_group: Test VLAN Group
state: absent
register: test_six
- name: "VLAN 6: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_six is changed
- test_six['vlan']['name'] == "Test VLAN 500"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_vlan_group.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_vlan_group.yml
index 2be4fd3df..9dd4d1789 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_vlan_group.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_vlan_group.yml
@@ -7,16 +7,16 @@
- name: "VLAN_GROUP 1: Necessary info creation"
netbox.netbox.netbox_vlan_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "VLAN Group One"
- scope_type: "dcim.site"
+ name: VLAN Group One
+ scope_type: dcim.site
scope: Test Site
state: present
register: results
- name: "VLAN_GROUP 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- results is changed
- results['diff']['before']['state'] == "absent"
@@ -30,16 +30,16 @@
- name: "VLAN_GROUP 2: Create duplicate"
netbox.netbox.netbox_vlan_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "VLAN Group One"
- scope_type: "dcim.site"
+ name: VLAN Group One
+ scope_type: dcim.site
scope: Test Site
state: present
register: results
- name: "VLAN_GROUP 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not results['changed']
- results['vlan_group']['name'] == "VLAN Group One"
@@ -51,16 +51,16 @@
- name: "VLAN_GROUP 3: ASSERT - Create with same name, different site"
netbox.netbox.netbox_vlan_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "VLAN Group One"
- scope_type: "dcim.site"
- scope: "Test Site2"
+ name: VLAN Group One
+ scope_type: dcim.site
+ scope: Test Site2
state: present
register: results
- name: "VLAN_GROUP 3: ASSERT - Create with same name, different site"
- assert:
+ ansible.builtin.assert:
that:
- results is changed
- results['vlan_group']['name'] == "VLAN Group One"
@@ -72,15 +72,15 @@
- name: "VLAN_GROUP 4: ASSERT - Create vlan group, no site"
netbox.netbox.netbox_vlan_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "VLAN Group One"
+ name: VLAN Group One
state: present
- ignore_errors: yes
+ ignore_errors: true
register: results
- name: "VLAN_GROUP 4: ASSERT - Create with same name, different site"
- assert:
+ ansible.builtin.assert:
that:
- results is failed
- results['msg'] == "More than one result returned for VLAN Group One"
@@ -88,16 +88,16 @@
- name: "VLAN_GROUP 5: ASSERT - Delete"
netbox.netbox.netbox_vlan_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: VLAN Group One
- scope_type: "dcim.site"
+ scope_type: dcim.site
scope: Test Site2
state: absent
register: results
- name: "VLAN_GROUP 5: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- results is changed
- results['diff']['before']['state'] == "present"
@@ -111,16 +111,16 @@
- name: "VLAN_GROUP 6: ASSERT - Delete non existing"
netbox.netbox.netbox_vlan_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: VLAN Group One
- scope_type: "dcim.site"
+ scope_type: dcim.site
scope: Test Site2
state: absent
register: results
- name: "VLAN_GROUP 6: ASSERT - Delete non existing`"
- assert:
+ ansible.builtin.assert:
that:
- not results['changed']
- results['vlan_group'] == None
@@ -129,16 +129,16 @@
- name: "VLAN_GROUP 7: Necessary info creation - scope_type: dcim.location"
netbox.netbox.netbox_vlan_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "VLAN Group Location"
- scope_type: "dcim.location"
+ name: VLAN Group Location
+ scope_type: dcim.location
scope: Test Rack Group
state: present
register: results
- name: "VLAN_GROUP 7: ASSERT - Necessary info creation - scope_type: dcim.location"
- assert:
+ ansible.builtin.assert:
that:
- results is changed
- results['diff']['before']['state'] == "absent"
@@ -152,16 +152,16 @@
- name: "VLAN_GROUP 8: Necessary info creation - scope_type: dcim.rack"
netbox.netbox.netbox_vlan_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "VLAN Group Rack"
- scope_type: "dcim.rack"
+ name: VLAN Group Rack
+ scope_type: dcim.rack
scope: Test Rack
state: present
register: results
- name: "VLAN_GROUP 8: ASSERT - Necessary info creation - scope_type: dcim.rack"
- assert:
+ ansible.builtin.assert:
that:
- results is changed
- results['diff']['before']['state'] == "absent"
@@ -175,16 +175,16 @@
- name: "VLAN_GROUP 9: Necessary info creation - scope_type: dcim.region"
netbox.netbox.netbox_vlan_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "VLAN Group Region"
- scope_type: "dcim.region"
+ name: VLAN Group Region
+ scope_type: dcim.region
scope: Test Region
state: present
register: results
- name: "VLAN_GROUP 9: ASSERT - Necessary info creation - scope_type: dcim.region"
- assert:
+ ansible.builtin.assert:
that:
- results is changed
- results['diff']['before']['state'] == "absent"
@@ -198,16 +198,16 @@
- name: "VLAN_GROUP 10: Necessary info creation - scope_type: dcim.sitegroup"
netbox.netbox.netbox_vlan_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "VLAN Group Site Group"
- scope_type: "dcim.sitegroup"
+ name: VLAN Group Site Group
+ scope_type: dcim.sitegroup
scope: Test Site Group
state: present
register: results
- name: "VLAN_GROUP 10: ASSERT - Necessary info creation - scope_type: dcim.sitegroup"
- assert:
+ ansible.builtin.assert:
that:
- results is changed
- results['diff']['before']['state'] == "absent"
@@ -219,7 +219,7 @@
- results['msg'] == "vlan_group VLAN Group Site Group created"
# Commented out due to invalid content type being reported back by API
-#- name: "VLAN_GROUP 11: Necessary info creation - scope_type: virtualization.cluster"
+# - name: "VLAN_GROUP 11: Necessary info creation - scope_type: virtualization.cluster"
# netbox.netbox.netbox_vlan_group:
# netbox_url: http://localhost:32768
# netbox_token: 0123456789abcdef0123456789abcdef01234567
@@ -230,7 +230,7 @@
# state: present
# register: results
-#- name: "VLAN_GROUP 11: ASSERT - Necessary info creation - scope_type: virtualization.cluster"
+# - name: "VLAN_GROUP 11: ASSERT - Necessary info creation - scope_type: virtualization.cluster"
# assert:
# that:
# - results is changed
@@ -241,8 +241,8 @@
# - results['vlan_group']['scope_type'] == "virtualization.cluster"
# - results['vlan_group']['scope_id'] == 1
# - results['msg'] == "vlan_group VLAN Group Cluster created"
-#
-#- name: "VLAN_GROUP 12: Necessary info creation - scope_type: virtualization.clustergroup"
+
+# - name: "VLAN_GROUP 12: Necessary info creation - scope_type: virtualization.clustergroup"
# netbox.netbox.netbox_vlan_group:
# netbox_url: http://localhost:32768
# netbox_token: 0123456789abcdef0123456789abcdef01234567
@@ -252,8 +252,8 @@
# scope: Test Cluster Group
# state: present
# register: results
-#
-#- name: "VLAN_GROUP 12: ASSERT - Necessary info creation - scope_type: virtualization.clustergroup"
+
+# - name: "VLAN_GROUP 12: ASSERT - Necessary info creation - scope_type: virtualization.clustergroup"
# assert:
# that:
# - results is changed
@@ -268,17 +268,17 @@
- name: "VLAN_GROUP 12: Update Description - scope_type: dcim.location"
netbox.netbox.netbox_vlan_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "VLAN Group Location"
- scope_type: "dcim.location"
+ name: VLAN Group Location
+ scope_type: dcim.location
scope: Test Rack Group
- description: "Ansible updated description"
+ description: Ansible updated description
state: present
register: results
- name: "VLAN_GROUP 12: ASSERT - Update Description - scope_type: dcim.location"
- assert:
+ ansible.builtin.assert:
that:
- results is changed
- results['diff']['before']['description'] == ""
@@ -293,17 +293,17 @@
- name: "VLAN_GROUP 12: Update Description (IDEM) - scope_type: dcim.location"
netbox.netbox.netbox_vlan_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "VLAN Group Location"
- scope_type: "dcim.location"
+ name: VLAN Group Location
+ scope_type: dcim.location
scope: Test Rack Group
- description: "Ansible updated description"
+ description: Ansible updated description
state: present
register: results
- name: "VLAN_GROUP 12: ASSERT - Update Description (IDEM) - scope_type: dcim.location"
- assert:
+ ansible.builtin.assert:
that:
- results is not changed
- results['vlan_group']['name'] == "VLAN Group Location"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_vm_interface.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_vm_interface.yml
index 799d04b16..653f31e0e 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_vm_interface.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_vm_interface.yml
@@ -7,15 +7,15 @@
- name: "NETBOX_VM_INTERFACE 1: Necessary info creation"
netbox.netbox.netbox_vm_interface:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- virtual_machine: "test100-vm"
- name: "Eth10"
+ virtual_machine: test100-vm
+ name: Eth10
state: present
register: test_one
- name: "NETBOX_VM_INTERFACE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -27,15 +27,15 @@
- name: "NETBOX_VM_INTERFACE 2: Create duplicate"
netbox.netbox.netbox_vm_interface:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- virtual_machine: "test100-vm"
- name: "Eth10"
+ virtual_machine: test100-vm
+ name: Eth10
state: present
register: test_two
- name: "NETBOX_VM_INTERFACE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['interface']['name'] == "Eth10"
@@ -45,30 +45,30 @@
- name: "NETBOX_VM_INTERFACE 3: Updated"
netbox.netbox.netbox_vm_interface:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- virtual_machine: "test100-vm"
- name: "Eth10"
+ virtual_machine: test100-vm
+ name: Eth10
enabled: false
mtu: 9000
mac_address: "00:00:00:AA:AA:01"
- description: "Updated test100-vm"
+ description: Updated test100-vm
mode: Tagged
- #untagged_vlan:
+ # untagged_vlan:
# name: Wireless
# site: Test Site
- #tagged_vlans:
+ # tagged_vlans:
# - name: Data
# site: Test Site
# - name: VoIP
# site: Test Site
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: test_three
- name: "NETBOX_VM_INTERFACE 4: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['enabled'] == false
@@ -76,8 +76,8 @@
- test_three['diff']['after']['mac_address'] == "00:00:00:AA:AA:01"
- test_three['diff']['after']['description'] == "Updated test100-vm"
- test_three['diff']['after']['mode'] == "tagged"
- #- test_three['diff']['after']['untagged_vlan'] == 1
- #- test_three['diff']['after']['tagged_vlans'] == [2, 3]
+ # - test_three['diff']['after']['untagged_vlan'] == 1
+ # - test_three['diff']['after']['tagged_vlans'] == [2, 3]
- test_three['diff']['after']['tags'][0] == 4
- test_three['interface']['name'] == "Eth10"
- test_three['interface']['virtual_machine'] == 1
@@ -86,23 +86,23 @@
- test_three['interface']['mac_address'] == "00:00:00:AA:AA:01"
- test_three['interface']['description'] == "Updated test100-vm"
- test_three['interface']['mode'] == "tagged"
- #- test_three['interface']['untagged_vlan'] == 1
- #- test_three['interface']['tagged_vlans'] == [2, 3]
+ # - test_three['interface']['untagged_vlan'] == 1
+ # - test_three['interface']['tagged_vlans'] == [2, 3]
- test_three['interface']['tags'][0] == 4
- test_three['msg'] == "interface Eth10 updated"
- name: "NETBOX_VM_INTERFACE 4: ASSERT - Delete"
netbox.netbox.netbox_vm_interface:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Eth10"
- virtual_machine: "test100-vm"
+ name: Eth10
+ virtual_machine: test100-vm
state: absent
register: test_four
- name: "NETBOX_VM_INTERFACE 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['interface']['name'] == "Eth10"
@@ -112,30 +112,30 @@
- name: "NETBOX_VM_INTERFACE 5: Attempt to update interface with same name on other VMs"
netbox.netbox.netbox_vm_interface:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- virtual_machine: "test100-vm"
- name: "Eth0"
+ virtual_machine: test100-vm
+ name: Eth0
enabled: false
mtu: 9000
mac_address: "00:00:00:AA:AA:01"
- description: "Updated test100-vm Eth0 intf"
+ description: Updated test100-vm Eth0 intf
mode: Tagged
- #untagged_vlan:
+ # untagged_vlan:
# name: Wireless
# site: Test Site
- #tagged_vlans:
+ # tagged_vlans:
# - name: Data
# site: Test Site
# - name: VoIP
# site: Test Site
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: test_five
- name: "NETBOX_VM_INTERFACE 5: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['after']['enabled'] == false
@@ -143,8 +143,8 @@
- test_five['diff']['after']['mac_address'] == "00:00:00:AA:AA:01"
- test_five['diff']['after']['description'] == "Updated test100-vm Eth0 intf"
- test_five['diff']['after']['mode'] == "tagged"
- #- test_five['diff']['after']['untagged_vlan'] == 1
- #- test_five['diff']['after']['tagged_vlans'] == [2, 3]
+ # - test_five['diff']['after']['untagged_vlan'] == 1
+ # - test_five['diff']['after']['tagged_vlans'] == [2, 3]
- test_five['diff']['after']['tags'][0] == 4
- test_five['interface']['name'] == "Eth0"
- test_five['interface']['virtual_machine'] == 1
@@ -153,7 +153,7 @@
- test_five['interface']['mac_address'] == "00:00:00:AA:AA:01"
- test_five['interface']['description'] == "Updated test100-vm Eth0 intf"
- test_five['interface']['mode'] == "tagged"
- #- test_five['interface']['untagged_vlan'] == 1
- #- test_five['interface']['tagged_vlans'] == [2, 3]
+ # - test_five['interface']['untagged_vlan'] == 1
+ # - test_five['interface']['tagged_vlans'] == [2, 3]
- test_five['interface']['tags'][0] == 4
- test_five['msg'] == "interface Eth0 updated"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_vrf.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_vrf.yml
index abf9bab0e..1da7ae382 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_vrf.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_vrf.yml
@@ -7,14 +7,14 @@
- name: "VRF 1: Necessary info creation"
netbox.netbox.netbox_vrf:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test VRF One
state: present
register: test_one
- name: "VRF 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -25,14 +25,14 @@
- name: "VRF 2: Create duplicate"
netbox.netbox.netbox_vrf:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test VRF One
state: present
register: test_two
- name: "VRF 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['vrf']['name'] == "Test VRF One"
@@ -41,7 +41,7 @@
- name: "VRF 3: Create VRF with same name, but different tenant"
netbox.netbox.netbox_vrf:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test VRF One
tenant: Test Tenant
@@ -49,7 +49,7 @@
register: test_three
- name: "VRF 3: ASSERT - Create VRF with same name, but different site"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['before']['state'] == "absent"
@@ -61,25 +61,25 @@
- name: "VRF 4: ASSERT - Update"
netbox.netbox.netbox_vrf:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test VRF One"
+ name: Test VRF One
rd: "65001:1"
- enforce_unique: False
- tenant: "Test Tenant"
+ enforce_unique: false
+ tenant: Test Tenant
description: Updated description
import_targets:
- - "4000:4000"
- - "5000:5000"
+ - 4000:4000
+ - 5000:5000
export_targets:
- - "5000:5000"
+ - 5000:5000
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: test_four
- name: "VRF 4: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['after']['rd'] == "65001:1"
@@ -101,15 +101,15 @@
- name: "VRF 5: ASSERT - Delete more than one result"
netbox.netbox.netbox_vrf:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test VRF One"
+ name: Test VRF One
state: absent
- ignore_errors: yes
+ ignore_errors: true
register: test_five
- name: "VRF 5: ASSERT - Delete more than one result"
- assert:
+ ansible.builtin.assert:
that:
- test_five is failed
- test_five['msg'] == "More than one result returned for Test VRF One"
@@ -117,15 +117,15 @@
- name: "VRF 6: ASSERT - Delete"
netbox.netbox.netbox_vrf:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test VRF One"
+ name: Test VRF One
tenant: Test Tenant
state: absent
register: test_six
- name: "VRF 6: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_six is changed
- test_six['vrf']['name'] == "Test VRF One"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_webhook.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_webhook.yml
index f2f5d517e..a3d5a3825 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_webhook.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_webhook.yml
@@ -7,12 +7,12 @@
- name: "WEBHOOK 1: Necessary info creation"
netbox.netbox.netbox_webhook:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
content_types:
- - "dcim.device"
+ - dcim.device
name: Example Webhook
- type_create: yes
+ type_create: true
payload_url: https://payload.url
body_template: !unsafe >-
{{ data }}
@@ -20,7 +20,7 @@
register: test_one
- name: "WEBHOOK 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -34,12 +34,12 @@
- name: "WEBHOOK 2: Create duplicate"
netbox.netbox.netbox_webhook:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
content_types:
- - "dcim.device"
+ - dcim.device
name: Example Webhook
- type_create: yes
+ type_create: true
payload_url: https://payload.url
body_template: !unsafe >-
{{ data }}
@@ -47,7 +47,7 @@
register: test_two
- name: "WEBHOOK 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['webhook']['name'] == "Example Webhook"
@@ -56,12 +56,12 @@
- name: "WEBHOOK 3: Update data and add on delete"
netbox.netbox.netbox_webhook:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
content_types:
- - "dcim.device"
- type_create: yes
- type_delete: yes
+ - dcim.device
+ type_create: true
+ type_delete: true
name: Example Webhook
payload_url: https://payload.url
body_template: !unsafe >-
@@ -70,7 +70,7 @@
register: test_three
- name: "WEBHOOK 3: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['webhook']['name'] == "Example Webhook"
@@ -79,10 +79,10 @@
- name: "WEBHOOK 4: Change content type"
netbox.netbox.netbox_webhook:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
content_types:
- - "virtualization.virtualmachine"
+ - virtualization.virtualmachine
name: Example Webhook
payload_url: https://payload.url
body_template: !unsafe >-
@@ -91,7 +91,7 @@
register: test_four
- name: "WEBHOOK 4: ASSERT - Change content type"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['after']['content_types'] == ["virtualization.virtualmachine"]
@@ -101,10 +101,10 @@
- name: "WEBHOOK 5: Delete"
netbox.netbox.netbox_webhook:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
content_types:
- - "virtualization.virtualmachine"
+ - virtualization.virtualmachine
name: Example Webhook
payload_url: https://payload.url
body_template: !unsafe >-
@@ -113,7 +113,7 @@
register: test_five
- name: "WEBHOOK 5: ASSERT - Deleted"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['after']['state'] == "absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_wireless_lan.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_wireless_lan.yml
index e59af42ee..e70fd3e59 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_wireless_lan.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_wireless_lan.yml
@@ -4,16 +4,16 @@
### NETBOX_WIRELESS_LAN
##
##
-- name: "1 - Test wireless LAN creation"
+- name: 1 - Test wireless LAN creation
netbox.netbox.netbox_wireless_lan:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- ssid: "Wireless LAN One"
+ ssid: Wireless LAN One
register: test_one
-- name: "1 - ASSERT"
- assert:
+- name: 1 - ASSERT
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -21,32 +21,32 @@
- test_one['wireless_lan']['ssid'] == "Wireless LAN One"
- test_one['msg'] == "wireless_lan Wireless LAN One created"
-- name: "Test duplicate wireless LAN"
+- name: Test duplicate wireless LAN
netbox.netbox.netbox_wireless_lan:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- ssid: "Wireless LAN One"
+ ssid: Wireless LAN One
register: test_two
-- name: "2 - ASSERT"
- assert:
+- name: 2 - ASSERT
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['wireless_lan']['ssid'] == "Wireless LAN One"
- test_two['msg'] == "wireless_lan Wireless LAN One already exists"
-- name: "3 - Test update"
+- name: 3 - Test update
netbox.netbox.netbox_wireless_lan:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- ssid: "Wireless LAN One"
- description: "New Description"
+ ssid: Wireless LAN One
+ description: New Description
register: test_three
-- name: "3 - ASSERT"
- assert:
+- name: 3 - ASSERT
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['description'] == "New Description"
@@ -54,27 +54,27 @@
- test_three['wireless_lan']['description'] == "New Description"
- test_three['msg'] == "wireless_lan Wireless LAN One updated"
-- name: "4 - Test delete"
+- name: 4 - Test delete
netbox.netbox.netbox_wireless_lan:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- ssid: "Wireless LAN One"
- state: "absent"
+ ssid: Wireless LAN One
+ state: absent
register: test_four
-- name: "4 - ASSERT"
- assert:
+- name: 4 - ASSERT
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
- test_four['diff']['after']['state'] == "absent"
- test_four['msg'] == "wireless_lan Wireless LAN One deleted"
-- name: "5 - Create wireless LAN with all parameters"
+- name: 5 - Create wireless LAN with all parameters
netbox.netbox.netbox_wireless_lan:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
ssid: Wireless Network One
description: Cool Wireless Network
@@ -88,8 +88,8 @@
state: present
register: test_five
-- name: "5 - ASSERT"
- assert:
+- name: 5 - ASSERT
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "absent"
@@ -99,5 +99,5 @@
- test_five['wireless_lan']['auth_type'] == "wpa-enterprise"
- test_five['wireless_lan']['auth_cipher'] == "aes"
- test_five['wireless_lan']['auth_psk'] == "psk123456"
- #- test_five['wireless_lan']['tags'] | length == 3
+ # - test_five['wireless_lan']['tags'] | length == 3
- test_five['msg'] == "wireless_lan Wireless Network One created"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_wireless_lan_group.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_wireless_lan_group.yml
index 51ae3a74f..57daaf62c 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_wireless_lan_group.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_wireless_lan_group.yml
@@ -4,16 +4,16 @@
### NETBOX_WIRELESS_LAN_GROUP
##
##
-- name: "1 - Test wireless LAN group creation"
+- name: 1 - Test wireless LAN group creation
netbox.netbox.netbox_wireless_lan_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Wireless LAN Group One"
+ name: Wireless LAN Group One
register: test_one
-- name: "1 - ASSERT"
- assert:
+- name: 1 - ASSERT
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -21,32 +21,32 @@
- test_one['wireless_lan_group']['name'] == "Wireless LAN Group One"
- test_one['msg'] == "wireless_lan_group Wireless LAN Group One created"
-- name: "Test duplicate wireless LAN group"
+- name: Test duplicate wireless LAN group
netbox.netbox.netbox_wireless_lan_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Wireless LAN Group One"
+ name: Wireless LAN Group One
register: test_two
-- name: "2 - ASSERT"
- assert:
+- name: 2 - ASSERT
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['wireless_lan_group']['name'] == "Wireless LAN Group One"
- test_two['msg'] == "wireless_lan_group Wireless LAN Group One already exists"
-- name: "3 - Test update"
+- name: 3 - Test update
netbox.netbox.netbox_wireless_lan_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Wireless LAN Group One"
- description: "New Description"
+ name: Wireless LAN Group One
+ description: New Description
register: test_three
-- name: "3 - ASSERT"
- assert:
+- name: 3 - ASSERT
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['description'] == "New Description"
@@ -54,27 +54,27 @@
- test_three['wireless_lan_group']['description'] == "New Description"
- test_three['msg'] == "wireless_lan_group Wireless LAN Group One updated"
-- name: "4 - Test delete"
+- name: 4 - Test delete
netbox.netbox.netbox_wireless_lan_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Wireless LAN Group One"
- state: "absent"
+ name: Wireless LAN Group One
+ state: absent
register: test_four
-- name: "4 - ASSERT"
- assert:
+- name: 4 - ASSERT
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
- test_four['diff']['after']['state'] == "absent"
- test_four['msg'] == "wireless_lan_group Wireless LAN Group One deleted"
-- name: "5 - Create wireless LAN group with all parameters"
+- name: 5 - Create wireless LAN group with all parameters
netbox.netbox.netbox_wireless_lan_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Wireless LAN Group One
description: Cool Wireless LAN Group
@@ -85,8 +85,8 @@
state: present
register: test_five
-- name: "5 - ASSERT"
- assert:
+- name: 5 - ASSERT
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_wireless_link.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_wireless_link.yml
index baa4574d1..b026db81e 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_wireless_link.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.5/tasks/netbox_wireless_link.yml
@@ -4,10 +4,10 @@
### NETBOX_WIRELESS_LINK
##
##
-- name: "1 - Test wireless link creation"
+- name: 1 - Test wireless link creation
netbox.netbox.netbox_wireless_link:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
interface_a:
device: Test Nexus One
@@ -17,8 +17,8 @@
name: wlink1
register: test_one
-- name: "1 - ASSERT"
- assert:
+- name: 1 - ASSERT
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -27,10 +27,10 @@
- test_one['wireless_link']['interface_b'] == 5
- test_one['msg'] == "wireless_link Test Nexus One wlink1 <> test100 wlink1 created"
-- name: "Test duplicate wireless link"
+- name: Test duplicate wireless link
netbox.netbox.netbox_wireless_link:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
interface_a:
device: Test Nexus One
@@ -40,16 +40,16 @@
name: wlink1
register: test_two
-- name: "2 - ASSERT"
- assert:
+- name: 2 - ASSERT
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['msg'] == "wireless_link Test Nexus One wlink1 <> test100 wlink1 already exists"
-- name: "3 - Test update"
+- name: 3 - Test update
netbox.netbox.netbox_wireless_link:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
interface_a:
device: Test Nexus One
@@ -60,18 +60,18 @@
status: planned
register: test_three
-- name: "3 - ASSERT"
- assert:
+- name: 3 - ASSERT
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['status'] == "planned"
- test_three['wireless_link']['status'] == "planned"
- test_three['msg'] == "wireless_link Test Nexus One wlink1 <> test100 wlink1 updated"
-- name: "4 - Test delete"
+- name: 4 - Test delete
netbox.netbox.netbox_wireless_link:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
interface_a:
device: Test Nexus One
@@ -79,21 +79,21 @@
interface_b:
device: test100
name: wlink1
- state: "absent"
+ state: absent
register: test_four
-- name: "4 - ASSERT"
- assert:
+- name: 4 - ASSERT
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
- test_four['diff']['after']['state'] == "absent"
- test_four['msg'] == "wireless_link Test Nexus One wlink1 <> test100 wlink1 deleted"
-- name: "5 - Create wireless link with all parameters"
+- name: 5 - Create wireless link with all parameters
netbox.netbox.netbox_wireless_link:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
interface_a:
device: Test Nexus One
@@ -113,8 +113,8 @@
state: present
register: test_five
-- name: "5 - ASSERT"
- assert:
+- name: 5 - ASSERT
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/main.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/main.yml
index 5329b4917..31e300ef3 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/main.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/main.yml
@@ -1,317 +1,317 @@
---
-- name: "NETBOX_DEVICE TESTS"
- include_tasks: "netbox_device.yml"
+- name: NETBOX_DEVICE TESTS
+ ansible.builtin.include_tasks: netbox_device.yml
-- name: "NETBOX_DEVICE_INTERFACE TESTS"
- include_tasks: "netbox_device_interface.yml"
+- name: NETBOX_DEVICE_INTERFACE TESTS
+ ansible.builtin.include_tasks: netbox_device_interface.yml
-- name: "NETBOX_DEVICE_INTERFACE_TEMPLATE TESTS"
- include_tasks: "netbox_device_interface_template.yml"
+- name: NETBOX_DEVICE_INTERFACE_TEMPLATE TESTS
+ ansible.builtin.include_tasks: netbox_device_interface_template.yml
-- name: "NETBOX_IP_ADDRESS TESTS"
- include_tasks: "netbox_ip_address.yml"
+- name: NETBOX_IP_ADDRESS TESTS
+ ansible.builtin.include_tasks: netbox_ip_address.yml
-- name: "NETBOX_PREFIX TESTS"
- include_tasks: "netbox_prefix.yml"
+- name: NETBOX_PREFIX TESTS
+ ansible.builtin.include_tasks: netbox_prefix.yml
-- name: "NETBOX_SITE TESTS"
- include_tasks: "netbox_site.yml"
+- name: NETBOX_SITE TESTS
+ ansible.builtin.include_tasks: netbox_site.yml
-- name: "NETBOX_SITE_GROUP TESTS"
- include_tasks: "netbox_site_group.yml"
+- name: NETBOX_SITE_GROUP TESTS
+ ansible.builtin.include_tasks: netbox_site_group.yml
-- name: "NETBOX_CONTACT TESTS"
- include_tasks: "netbox_contact.yml"
+- name: NETBOX_CONTACT TESTS
+ ansible.builtin.include_tasks: netbox_contact.yml
-- name: "NETBOX_CONTACT_ROLE TESTS"
- include_tasks: "netbox_contact_role.yml"
+- name: NETBOX_CONTACT_ROLE TESTS
+ ansible.builtin.include_tasks: netbox_contact_role.yml
-- name: "NETBOX_TENTANT TESTS"
- include_tasks: "netbox_tenant.yml"
+- name: NETBOX_TENTANT TESTS
+ ansible.builtin.include_tasks: netbox_tenant.yml
-- name: "NETBOX_TENTANT_GROUP TESTS"
- include_tasks: "netbox_tenant_group.yml"
+- name: NETBOX_TENTANT_GROUP TESTS
+ ansible.builtin.include_tasks: netbox_tenant_group.yml
-- name: "NETBOX_RACK TESTS"
- include_tasks: "netbox_rack.yml"
+- name: NETBOX_RACK TESTS
+ ansible.builtin.include_tasks: netbox_rack.yml
-- name: "NETBOX_RACK_ROLE TESTS"
- include_tasks: "netbox_rack_role.yml"
+- name: NETBOX_RACK_ROLE TESTS
+ ansible.builtin.include_tasks: netbox_rack_role.yml
-- name: "NETBOX_LOCATION TESTS"
- include_tasks: "netbox_location.yml"
+- name: NETBOX_LOCATION TESTS
+ ansible.builtin.include_tasks: netbox_location.yml
-- name: "NETBOX_MANUFACTURER TESTS"
- include_tasks: "netbox_manufacturer.yml"
+- name: NETBOX_MANUFACTURER TESTS
+ ansible.builtin.include_tasks: netbox_manufacturer.yml
-- name: "NETBOX_PLATFORM TESTS"
- include_tasks: "netbox_platform.yml"
+- name: NETBOX_PLATFORM TESTS
+ ansible.builtin.include_tasks: netbox_platform.yml
-- name: "NETBOX_DEVICE_TYPE TESTS"
- include_tasks: "netbox_device_type.yml"
+- name: NETBOX_DEVICE_TYPE TESTS
+ ansible.builtin.include_tasks: netbox_device_type.yml
-- name: "NETBOX_DEVICE_ROLE TESTS"
- include_tasks: "netbox_device_role.yml"
+- name: NETBOX_DEVICE_ROLE TESTS
+ ansible.builtin.include_tasks: netbox_device_role.yml
-- name: "NETBOX_IPAM_ROLE TESTS"
- include_tasks: "netbox_ipam_role.yml"
+- name: NETBOX_IPAM_ROLE TESTS
+ ansible.builtin.include_tasks: netbox_ipam_role.yml
-- name: "NETBOX_VLAN_GROUP TESTS"
- include_tasks: "netbox_vlan_group.yml"
+- name: NETBOX_VLAN_GROUP TESTS
+ ansible.builtin.include_tasks: netbox_vlan_group.yml
-- name: "NETBOX_VLAN TESTS"
- include_tasks: "netbox_vlan.yml"
+- name: NETBOX_VLAN TESTS
+ ansible.builtin.include_tasks: netbox_vlan.yml
-- name: "NETBOX_VRF TESTS"
- include_tasks: "netbox_vrf.yml"
+- name: NETBOX_VRF TESTS
+ ansible.builtin.include_tasks: netbox_vrf.yml
-- name: "NETBOX_RIR TESTS"
- include_tasks: "netbox_rir.yml"
+- name: NETBOX_RIR TESTS
+ ansible.builtin.include_tasks: netbox_rir.yml
-- name: "NETBOX_AGGREGATE TESTS"
- include_tasks: "netbox_aggregate.yml"
+- name: NETBOX_AGGREGATE TESTS
+ ansible.builtin.include_tasks: netbox_aggregate.yml
-- name: "NETBOX_REGION TESTS"
- include_tasks: "netbox_region.yml"
+- name: NETBOX_REGION TESTS
+ ansible.builtin.include_tasks: netbox_region.yml
-- name: "NETBOX_DEVICE_BAY TESTS"
- include_tasks: "netbox_device_bay.yml"
+- name: NETBOX_DEVICE_BAY TESTS
+ ansible.builtin.include_tasks: netbox_device_bay.yml
-- name: "NETBOX_DEVICE_BAY_TEMPLATE TESTS"
- include_tasks: "netbox_device_bay_template.yml"
+- name: NETBOX_DEVICE_BAY_TEMPLATE TESTS
+ ansible.builtin.include_tasks: netbox_device_bay_template.yml
-- name: "NETBOX_INVENTORY_ITEM TESTS"
- include_tasks: "netbox_inventory_item.yml"
+- name: NETBOX_INVENTORY_ITEM TESTS
+ ansible.builtin.include_tasks: netbox_inventory_item.yml
-- name: "NETBOX_VIRTUAL_MACHINE TESTS"
- include_tasks: "netbox_virtual_machine.yml"
+- name: NETBOX_VIRTUAL_MACHINE TESTS
+ ansible.builtin.include_tasks: netbox_virtual_machine.yml
-- name: "NETBOX_CLUSTER TESTS"
- include_tasks: "netbox_cluster.yml"
+- name: NETBOX_CLUSTER TESTS
+ ansible.builtin.include_tasks: netbox_cluster.yml
-- name: "NETBOX_CLUSTER_GROUP TESTS"
- include_tasks: "netbox_cluster_group.yml"
+- name: NETBOX_CLUSTER_GROUP TESTS
+ ansible.builtin.include_tasks: netbox_cluster_group.yml
-- name: "NETBOX_CLUSTER_TYPE TESTS"
- include_tasks: "netbox_cluster_type.yml"
+- name: NETBOX_CLUSTER_TYPE TESTS
+ ansible.builtin.include_tasks: netbox_cluster_type.yml
-- name: "NETBOX_VM_INTERFACE TESTS"
- include_tasks: "netbox_vm_interface.yml"
+- name: NETBOX_VM_INTERFACE TESTS
+ ansible.builtin.include_tasks: netbox_vm_interface.yml
-- name: "NETBOX_PROVIDER TESTS"
- include_tasks: "netbox_provider.yml"
+- name: NETBOX_PROVIDER TESTS
+ ansible.builtin.include_tasks: netbox_provider.yml
-- name: "NETBOX_PROVIDER_NETWORK TESTS"
- include_tasks: "netbox_provider_network.yml"
+- name: NETBOX_PROVIDER_NETWORK TESTS
+ ansible.builtin.include_tasks: netbox_provider_network.yml
-- name: "NETBOX_CIRCUIT_TYPE TESTS"
- include_tasks: "netbox_circuit_type.yml"
+- name: NETBOX_CIRCUIT_TYPE TESTS
+ ansible.builtin.include_tasks: netbox_circuit_type.yml
-- name: "NETBOX_CIRCUIT TESTS"
- include_tasks: "netbox_circuit.yml"
+- name: NETBOX_CIRCUIT TESTS
+ ansible.builtin.include_tasks: netbox_circuit.yml
-- name: "NETBOX_CIRCUIT_TERMINATION TESTS"
- include_tasks: "netbox_circuit_termination.yml"
+- name: NETBOX_CIRCUIT_TERMINATION TESTS
+ ansible.builtin.include_tasks: netbox_circuit_termination.yml
-- name: "NETBOX_REAR_PORT TESTS"
- include_tasks: "netbox_rear_port.yml"
+- name: NETBOX_REAR_PORT TESTS
+ ansible.builtin.include_tasks: netbox_rear_port.yml
-- name: "NETBOX_REAR_PORT_TEMPLATE TESTS"
- include_tasks: "netbox_rear_port_template.yml"
+- name: NETBOX_REAR_PORT_TEMPLATE TESTS
+ ansible.builtin.include_tasks: netbox_rear_port_template.yml
-- name: "NETBOX_FRONT_PORT TESTS"
- include_tasks: "netbox_front_port.yml"
+- name: NETBOX_FRONT_PORT TESTS
+ ansible.builtin.include_tasks: netbox_front_port.yml
-- name: "NETBOX_FRONT_PORT_TEMPLATE TESTS"
- include_tasks: "netbox_front_port_template.yml"
+- name: NETBOX_FRONT_PORT_TEMPLATE TESTS
+ ansible.builtin.include_tasks: netbox_front_port_template.yml
-- name: "NETBOX_CONSOLE_PORT TESTS"
- include_tasks: "netbox_console_port.yml"
+- name: NETBOX_CONSOLE_PORT TESTS
+ ansible.builtin.include_tasks: netbox_console_port.yml
-- name: "NETBOX_CONSOLE_PORT_TEMPLATE TESTS"
- include_tasks: "netbox_console_port_template.yml"
+- name: NETBOX_CONSOLE_PORT_TEMPLATE TESTS
+ ansible.builtin.include_tasks: netbox_console_port_template.yml
-- name: "NETBOX_CONSOLE_SERVER_PORT TESTS"
- include_tasks: "netbox_console_server_port.yml"
+- name: NETBOX_CONSOLE_SERVER_PORT TESTS
+ ansible.builtin.include_tasks: netbox_console_server_port.yml
-- name: "NETBOX_CONSOLE_SERVER_PORT_TEMPLATE TESTS"
- include_tasks: "netbox_console_server_port_template.yml"
+- name: NETBOX_CONSOLE_SERVER_PORT_TEMPLATE TESTS
+ ansible.builtin.include_tasks: netbox_console_server_port_template.yml
-- name: "NETBOX_POWER_PANEL TESTS"
- include_tasks: "netbox_power_panel.yml"
+- name: NETBOX_POWER_PANEL TESTS
+ ansible.builtin.include_tasks: netbox_power_panel.yml
-- name: "NETBOX_POWER_FEED TESTS"
- include_tasks: "netbox_power_feed.yml"
+- name: NETBOX_POWER_FEED TESTS
+ ansible.builtin.include_tasks: netbox_power_feed.yml
-- name: "NETBOX_POWER_PORT TESTS"
- include_tasks: "netbox_power_port.yml"
+- name: NETBOX_POWER_PORT TESTS
+ ansible.builtin.include_tasks: netbox_power_port.yml
-- name: "NETBOX_POWER_PORT_TEMPLATE TESTS"
- include_tasks: "netbox_power_port_template.yml"
+- name: NETBOX_POWER_PORT_TEMPLATE TESTS
+ ansible.builtin.include_tasks: netbox_power_port_template.yml
-- name: "NETBOX_POWER_OUTLET TESTS"
- include_tasks: "netbox_power_outlet.yml"
+- name: NETBOX_POWER_OUTLET TESTS
+ ansible.builtin.include_tasks: netbox_power_outlet.yml
-- name: "NETBOX_POWER_OUTLET_TEMPLATE TESTS"
- include_tasks: "netbox_power_outlet_template.yml"
+- name: NETBOX_POWER_OUTLET_TEMPLATE TESTS
+ ansible.builtin.include_tasks: netbox_power_outlet_template.yml
-- name: "NETBOX_VIRTUAL_CHASSIS TESTS"
- include_tasks: "netbox_virtual_chassis.yml"
+- name: NETBOX_VIRTUAL_CHASSIS TESTS
+ ansible.builtin.include_tasks: netbox_virtual_chassis.yml
# Module has to be updated for 3.3
-#- name: "NETBOX_CABLE TESTS"
-# include_tasks: "netbox_cable.yml"
+# - name: "NETBOX_CABLE TESTS"
+# include_tasks: "netbox_cable.yml"
-- name: "NETBOX_SERVICE TESTS"
- include_tasks: "netbox_service.yml"
+- name: NETBOX_SERVICE TESTS
+ ansible.builtin.include_tasks: netbox_service.yml
-- name: "NETBOX_LOOKUP TESTS"
- include_tasks: "netbox_lookup.yml"
+- name: NETBOX_LOOKUP TESTS
+ ansible.builtin.include_tasks: netbox_lookup.yml
-- name: "NETBOX_TAG_TESTS"
- include_tasks: "netbox_tag.yml"
+- name: NETBOX_TAG_TESTS
+ ansible.builtin.include_tasks: netbox_tag.yml
-- name: "NETBOX_ROUTE_TARGET_TESTS"
- include_tasks: "netbox_route_target.yml"
+- name: NETBOX_ROUTE_TARGET_TESTS
+ ansible.builtin.include_tasks: netbox_route_target.yml
-- name: "NETBOX_WIRELESS_LAN TESTS"
- include_tasks:
- file: "netbox_wireless_lan.yml"
+- name: NETBOX_WIRELESS_LAN TESTS
+ ansible.builtin.include_tasks:
+ file: netbox_wireless_lan.yml
apply:
tags:
- netbox_wireless_lan
tags:
- netbox_wireless_lan
-- name: "NETBOX_WIRELESS_LAN_GROUP TESTS"
- include_tasks:
- file: "netbox_wireless_lan_group.yml"
+- name: NETBOX_WIRELESS_LAN_GROUP TESTS
+ ansible.builtin.include_tasks:
+ file: netbox_wireless_lan_group.yml
apply:
tags:
- netbox_wireless_lan_group
tags:
- netbox_wireless_lan_group
-- name: "NETBOX_WIRELESS_LINK TESTS"
- include_tasks:
- file: "netbox_wireless_link.yml"
+- name: NETBOX_WIRELESS_LINK TESTS
+ ansible.builtin.include_tasks:
+ file: netbox_wireless_link.yml
apply:
tags:
- netbox_wireless_link
tags:
- netbox_wireless_link
-- name: "NETBOX_CUSTOM_FIELD TESTS"
- include_tasks:
- file: "netbox_custom_field.yml"
+- name: NETBOX_CUSTOM_FIELD TESTS
+ ansible.builtin.include_tasks:
+ file: netbox_custom_field.yml
apply:
tags:
- netbox_custom_field
tags:
- netbox_custom_field
-- name: "NETBOX_CUSTOM_LINK TESTS"
- include_tasks:
- file: "netbox_custom_link.yml"
+- name: NETBOX_CUSTOM_LINK TESTS
+ ansible.builtin.include_tasks:
+ file: netbox_custom_link.yml
apply:
tags:
- netbox_custom_link
tags:
- netbox_custom_link
-- name: "NETBOX_EXPORT_TEMPLATE TESTS"
- include_tasks:
- file: "netbox_export_template.yml"
+- name: NETBOX_EXPORT_TEMPLATE TESTS
+ ansible.builtin.include_tasks:
+ file: netbox_export_template.yml
apply:
tags:
- netbox_export_template
tags:
- netbox_export_template
-- name: "NETBOX_WEBHOOK TESTS"
- include_tasks:
- file: "netbox_webhook.yml"
+- name: NETBOX_WEBHOOK TESTS
+ ansible.builtin.include_tasks:
+ file: netbox_webhook.yml
apply:
tags:
- netbox_webhook
tags:
- netbox_webhook
-- name: "NETBOX_L2VPN TESTS"
- include_tasks:
- file: "netbox_l2vpn.yml"
+- name: NETBOX_L2VPN TESTS
+ ansible.builtin.include_tasks:
+ file: netbox_l2vpn.yml
apply:
tags:
- netbox_l2vpn
tags:
- netbox_l2vpn
-- name: "NETBOX_L2VPN_TERMINATION TESTS"
- include_tasks:
- file: "netbox_l2vpn_termination.yml"
+- name: NETBOX_L2VPN_TERMINATION TESTS
+ ansible.builtin.include_tasks:
+ file: netbox_l2vpn_termination.yml
apply:
tags:
- netbox_l2vpn_termination
tags:
- netbox_l2vpn_termination
-- name: "NETBOX_INVENTORY_ITEM_ROLE TESTS"
- include_tasks:
- file: "netbox_inventory_item_role.yml"
+- name: NETBOX_INVENTORY_ITEM_ROLE TESTS
+ ansible.builtin.include_tasks:
+ file: netbox_inventory_item_role.yml
apply:
tags:
- netbox_inventory_item_role
tags:
- netbox_inventory_item_role
-- name: "NETBOX_MODULE_TYPE TESTS"
- include_tasks:
- file: "netbox_module_type.yml"
+- name: NETBOX_MODULE_TYPE TESTS
+ ansible.builtin.include_tasks:
+ file: netbox_module_type.yml
apply:
tags:
- netbox_module_type
tags:
- netbox_module_type
-- name: "NETBOX_SERVICE_TEMPLATE TESTS"
- include_tasks:
- file: "netbox_service_template.yml"
+- name: NETBOX_SERVICE_TEMPLATE TESTS
+ ansible.builtin.include_tasks:
+ file: netbox_service_template.yml
apply:
tags:
- netbox_service_template
tags:
- netbox_service_template
-- name: "NETBOX_ASN TESTS"
- include_tasks: "netbox_asn.yml"
+- name: NETBOX_ASN TESTS
+ ansible.builtin.include_tasks: netbox_asn.yml
-- name: "NETBOX_FHRP_GROUP TESTS"
- include_tasks: "netbox_fhrp_group.yml"
+- name: NETBOX_FHRP_GROUP TESTS
+ ansible.builtin.include_tasks: netbox_fhrp_group.yml
-- name: "NETBOX_JOURNAL_ENTRY TESTS"
- include_tasks:
- file: "netbox_journal_entry.yml"
+- name: NETBOX_JOURNAL_ENTRY TESTS
+ ansible.builtin.include_tasks:
+ file: netbox_journal_entry.yml
apply:
tags:
- netbox_journal_entry
tags:
- netbox_journal_entry
-- name: "NETBOX_FHRP_GROUP_ASSIGNMENT TESTS"
- include_tasks:
- file: "netbox_fhrp_group_assignment.yml"
+- name: NETBOX_FHRP_GROUP_ASSIGNMENT TESTS
+ ansible.builtin.include_tasks:
+ file: netbox_fhrp_group_assignment.yml
apply:
tags:
- netbox_fhrp_group_assignmen
tags:
- netbox_fhrp_group_assignmen
-- name: "NETBOX_CONFIG_TEMPLATE"
- include_tasks: "netbox_config_template.yml"
+- name: NETBOX_CONFIG_TEMPLATE
+ ansible.builtin.include_tasks: netbox_config_template.yml
tags:
- netbox_config_template
-- name: "NETBOX_CUSTOM_FIELD_CHOICE_SET"
- include_tasks: "netbox_custom_field_choice_set.yml"
+- name: NETBOX_CUSTOM_FIELD_CHOICE_SET
+ ansible.builtin.include_tasks: netbox_custom_field_choice_set.yml
tags:
- netbox_custom_field_choice_set
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_aggregate.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_aggregate.yml
index ac02641dc..d45b9c835 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_aggregate.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_aggregate.yml
@@ -7,35 +7,35 @@
- name: "AGGREGATE 1: Necessary info creation"
netbox.netbox.netbox_aggregate:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- prefix: "10.0.0.0/8"
- rir: "Example RIR"
+ prefix: 10.0.0.0/8
+ rir: Example RIR
state: present
register: test_one
- name: "AGGREGATE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
- test_one['diff']['after']['state'] == "present"
- test_one['aggregate']['prefix'] == "10.0.0.0/8"
- #- test_one['aggregate']['family'] == 4
+ # - test_one['aggregate']['family'] == 4
- test_one['aggregate']['rir'] == 1
- test_one['msg'] == "aggregate 10.0.0.0/8 created"
- name: "AGGREGATE 2: Create duplicate"
netbox.netbox.netbox_aggregate:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- prefix: "10.0.0.0/8"
+ prefix: 10.0.0.0/8
state: present
register: test_two
- name: "AGGREGATE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['aggregate']['prefix'] == "10.0.0.0/8"
@@ -46,19 +46,19 @@
- name: "AGGREGATE 3: ASSERT - Update"
netbox.netbox.netbox_aggregate:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- prefix: "10.0.0.0/8"
- rir: "Example RIR"
+ prefix: 10.0.0.0/8
+ rir: Example RIR
date_added: "1989-01-18"
- description: "Test Description"
+ description: Test Description
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: test_three
- name: "AGGREGATE 3: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['date_added'] == "1989-01-18"
@@ -75,14 +75,14 @@
- name: "AGGREGATE 4: ASSERT - Delete"
netbox.netbox.netbox_aggregate:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- prefix: "10.0.0.0/8"
+ prefix: 10.0.0.0/8
state: absent
register: test_four
- name: "AGGREGATE 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['aggregate']['prefix'] == "10.0.0.0/8"
@@ -96,20 +96,20 @@
- name: "AGGREGATE 5: Necessary info creation"
netbox.netbox.netbox_aggregate:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- prefix: "2001::/32"
- rir: "Example RIR"
+ prefix: 2001::/32
+ rir: Example RIR
state: present
register: test_five
- name: "AGGREGATE 5: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "absent"
- test_five['diff']['after']['state'] == "present"
- test_five['aggregate']['prefix'] == "2001::/32"
- #- test_five['aggregate']['family'] == 6
+ # - test_five['aggregate']['family'] == 6
- test_five['aggregate']['rir'] == 1
- test_five['msg'] == "aggregate 2001::/32 created"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_asn.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_asn.yml
index 346bafda9..fab4270a9 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_asn.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_asn.yml
@@ -7,7 +7,7 @@
- name: "ASN 1: Test ASN creation"
netbox.netbox.netbox_asn:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
asn: 1111111111
rir: Example RIR
@@ -27,7 +27,7 @@
- name: "ASN 2: Create duplicate"
netbox.netbox.netbox_asn:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
asn: 1111111111
rir: Example RIR
@@ -45,14 +45,14 @@
- name: "ASN 3: Update ASN with other fields"
netbox.netbox.netbox_asn:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
asn: 1111111111
rir: Example RIR
tenant: Test Tenant
description: Test description
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: test_three
@@ -73,7 +73,7 @@
- name: "ASN 4: ASSERT - Delete"
netbox.netbox.netbox_asn:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
asn: 1111111111
state: absent
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_cable.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_cable.yml
index 5728986c0..d6f0ab02e 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_cable.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_cable.yml
@@ -10,7 +10,7 @@
- name: "CABLE 1: Necessary info creation"
netbox.netbox.netbox_cable:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
termination_a_type: dcim.interface
termination_a:
@@ -24,7 +24,7 @@
register: test_one
- name: "CABLE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -38,7 +38,7 @@
- name: "CABLE 2: Create duplicate"
netbox.netbox.netbox_cable:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
termination_a_type: dcim.interface
termination_a:
@@ -52,7 +52,7 @@
register: test_two
- name: "CABLE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['cable']['termination_a_type'] == "dcim.interface"
@@ -64,7 +64,7 @@
- name: "CABLE 3: Update Cable with other fields"
netbox.netbox.netbox_cable:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
termination_a_type: dcim.interface
termination_a:
@@ -81,13 +81,13 @@
length: 30
length_unit: m
tags:
- - "Schnozzberry"
- tenant: "Test Tenant"
+ - Schnozzberry
+ tenant: Test Tenant
state: present
register: test_three
- name: "CABLE 3: ASSERT - Update Cable with other fields"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['type'] == "mmf-om4"
@@ -115,7 +115,7 @@
- name: "CABLE 4: ASSERT - Delete"
netbox.netbox.netbox_cable:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
termination_a_type: dcim.interface
termination_a:
@@ -129,7 +129,7 @@
register: test_four
- name: "CABLE 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
@@ -139,7 +139,7 @@
- name: "CABLE 5: Connect Console Port and Console Server Port"
netbox.netbox.netbox_cable:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
termination_a_type: dcim.consoleserverport
termination_a:
@@ -153,7 +153,7 @@
register: test_five
- name: "CABLE 5: ASSERT - Connect Console Port and Console Server Port"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "absent"
@@ -167,21 +167,21 @@
- name: "CABLE 6: Circuits Termination as side A"
netbox.netbox.netbox_cable:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
termination_a_type: circuits.circuittermination
termination_a:
- circuit: "Test Circuit Two"
- term_side: "A"
+ circuit: Test Circuit Two
+ term_side: A
termination_b_type: dcim.interface
termination_b:
- device: "test100"
- name: "GigabitEthernet2"
+ device: test100
+ name: GigabitEthernet2
state: present
register: test_six
- name: "CABLE 6: ASSERT - Circuits Termination as side A"
- assert:
+ ansible.builtin.assert:
that:
- test_six is changed
- test_six['diff']['before']['state'] == "absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_circuit.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_circuit.yml
index 1d8f1ec47..8bfd265ac 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_circuit.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_circuit.yml
@@ -7,7 +7,7 @@
- name: "NETBOX_CIRCUIT 1: Create provider within NetBox with only required information"
netbox.netbox.netbox_circuit:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
cid: Test Circuit One
provider: Test Provider
@@ -16,7 +16,7 @@
register: test_one
- name: "NETBOX_CIRCUIT 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -29,7 +29,7 @@
- name: "NETBOX_CIRCUIT 2: Duplicate"
netbox.netbox.netbox_circuit:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
cid: Test Circuit One
provider: Test Provider
@@ -38,7 +38,7 @@
register: test_two
- name: "NETBOX_CIRCUIT 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['circuit']['cid'] == "Test Circuit One"
@@ -49,7 +49,7 @@
- name: "NETBOX_CIRCUIT 3: Update provider with other fields"
netbox.netbox.netbox_circuit:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
cid: Test Circuit One
provider: Test Provider
@@ -59,12 +59,12 @@
install_date: "2018-12-25"
commit_rate: 10000
description: "Test circuit "
- comments: "FAST CIRCUIT"
+ comments: FAST CIRCUIT
state: present
register: test_three
- name: "NETBOX_CIRCUIT 3: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['status'] == "planned"
@@ -87,14 +87,14 @@
- name: "NETBOX_CIRCUIT 4: Delete provider within netbox"
netbox.netbox.netbox_circuit:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
cid: Test Circuit One
state: absent
register: test_four
- name: "NETBOX_CIRCUIT 4 : ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['circuit']['cid'] == "Test Circuit One"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_circuit_termination.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_circuit_termination.yml
index a37cdc680..cbc749f63 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_circuit_termination.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_circuit_termination.yml
@@ -7,17 +7,17 @@
- name: "NETBOX_CIRCUIT_TERMINATION 1: Create provider within NetBox with only required information"
netbox.netbox.netbox_circuit_termination:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
circuit: Test Circuit
term_side: A
- site: "Test Site"
+ site: Test Site
port_speed: 10000
state: present
register: test_one
- name: "NETBOX_CIRCUIT_TERMINATION 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -31,7 +31,7 @@
- name: "NETBOX_CIRCUIT_TERMINATION 2: Duplicate"
netbox.netbox.netbox_circuit_termination:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
circuit: Test Circuit
term_side: A
@@ -39,7 +39,7 @@
register: test_two
- name: "NETBOX_CIRCUIT_TERMINATION 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['circuit_termination']['circuit'] == 1
@@ -51,19 +51,19 @@
- name: "NETBOX_CIRCUIT_TERMINATION 3: Update provider with other fields"
netbox.netbox.netbox_circuit_termination:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
circuit: Test Circuit
term_side: A
upstream_speed: 1000
xconnect_id: 10X100
pp_info: PP10-24
- description: "Test description"
+ description: Test description
state: present
register: test_three
- name: "NETBOX_CIRCUIT_TERMINATION 3: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['upstream_speed'] == 1000
@@ -83,17 +83,17 @@
- name: "NETBOX_CIRCUIT_TERMINATION 4: Create Z Side"
netbox.netbox.netbox_circuit_termination:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
circuit: Test Circuit
term_side: Z
- site: "Test Site"
+ site: Test Site
port_speed: 10000
state: present
register: test_four
- name: "NETBOX_CIRCUIT_TERMINATION 4: ASSERT - Create Z Side"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -107,7 +107,7 @@
- name: "NETBOX_CIRCUIT_TERMINATION 5: Delete provider within netbox"
netbox.netbox.netbox_circuit_termination:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
circuit: Test Circuit
term_side: A
@@ -115,7 +115,7 @@
register: test_five
- name: "NETBOX_CIRCUIT_TERMINATION 5: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['circuit_termination']['circuit'] == 1
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_circuit_type.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_circuit_type.yml
index 45f956e34..08dc67985 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_circuit_type.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_circuit_type.yml
@@ -7,14 +7,14 @@
- name: "CIRCUIT_TYPE 1: Necessary info creation"
netbox.netbox.netbox_circuit_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Circuit Type One"
+ name: Test Circuit Type One
state: present
register: test_one
- name: "CIRCUIT_TYPE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -26,14 +26,14 @@
- name: "CIRCUIT_TYPE 2: Create duplicate"
netbox.netbox.netbox_circuit_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Circuit Type One"
+ name: Test Circuit Type One
state: present
register: test_two
- name: "CIRCUIT_TYPE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['circuit_type']['name'] == "Test Circuit Type One"
@@ -43,15 +43,15 @@
- name: "CIRCUIT_TYPE 3: User specified slug"
netbox.netbox.netbox_circuit_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Circuit Type Two"
- slug: "test-circuit-type-2"
+ name: Test Circuit Type Two
+ slug: test-circuit-type-2
state: present
register: test_three
- name: "CIRCUIT_TYPE 3: ASSERT - User specified slug"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['before']['state'] == "absent"
@@ -63,14 +63,14 @@
- name: "CIRCUIT_TYPE 4: ASSERT - Delete"
netbox.netbox.netbox_circuit_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Circuit Type One"
+ name: Test Circuit Type One
state: absent
register: test_four
- name: "CIRCUIT_TYPE 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['circuit_type']['name'] == "Test Circuit Type One"
@@ -80,15 +80,15 @@
- name: "CIRCUIT_TYPE 5: ASSERT - Delete"
netbox.netbox.netbox_circuit_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Circuit Type Two"
- slug: "test-circuit-type-2"
+ name: Test Circuit Type Two
+ slug: test-circuit-type-2
state: absent
register: test_five
- name: "CIRCUIT_TYPE 5: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['circuit_type']['name'] == "Test Circuit Type Two"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_cluster.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_cluster.yml
index 11ba5ef82..b2810a1be 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_cluster.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_cluster.yml
@@ -7,15 +7,15 @@
- name: "CLUSTER 1: Necessary info creation"
netbox.netbox.netbox_cluster:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Cluster One"
- cluster_type: "Test Cluster Type"
+ name: Test Cluster One
+ cluster_type: Test Cluster Type
state: present
register: test_one
- name: "CLUSTER 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -27,15 +27,15 @@
- name: "CLUSTER 2: Create duplicate"
netbox.netbox.netbox_cluster:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Cluster One"
- cluster_type: "Test Cluster Type"
+ name: Test Cluster One
+ cluster_type: Test Cluster Type
state: present
register: test_two
- name: "CLUSTER 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['cluster']['name'] == "Test Cluster One"
@@ -45,21 +45,21 @@
- name: "CLUSTER 3: Update"
netbox.netbox.netbox_cluster:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Cluster One"
- cluster_type: "Test Cluster Type"
- cluster_group: "Test Cluster Group"
- site: "Test Site"
- comments: "Updated cluster"
- tenant: "Test Tenant"
+ name: Test Cluster One
+ cluster_type: Test Cluster Type
+ cluster_group: Test Cluster Group
+ site: Test Site
+ comments: Updated cluster
+ tenant: Test Tenant
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: test_three
- name: "CLUSTER 3: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['group'] == 1
@@ -79,14 +79,14 @@
- name: "CLUSTER 4: ASSERT - Delete"
netbox.netbox.netbox_cluster:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Cluster One"
+ name: Test Cluster One
state: absent
register: test_four
- name: "CLUSTER 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['cluster']['name'] == "Test Cluster One"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_cluster_group.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_cluster_group.yml
index 9f84518ce..64dda6c44 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_cluster_group.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_cluster_group.yml
@@ -7,14 +7,14 @@
- name: "CLUSTER_GROUP 1: Necessary info creation"
netbox.netbox.netbox_cluster_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Cluster Group One"
+ name: Test Cluster Group One
state: present
register: test_one
- name: "CLUSTER_GROUP 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -26,14 +26,14 @@
- name: "CLUSTER_GROUP 2: Create duplicate"
netbox.netbox.netbox_cluster_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Cluster Group One"
+ name: Test Cluster Group One
state: present
register: test_two
- name: "CLUSTER_GROUP 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['cluster_group']['name'] == "Test Cluster Group One"
@@ -43,15 +43,15 @@
- name: "CLUSTER_GROUP 3: User specified slug"
netbox.netbox.netbox_cluster_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Cluster Group Two"
- slug: "test-cluster-group-2"
+ name: Test Cluster Group Two
+ slug: test-cluster-group-2
state: present
register: test_three
- name: "CLUSTER_GROUP 3: ASSERT - User specified slug"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['before']['state'] == "absent"
@@ -63,14 +63,14 @@
- name: "CLUSTER_GROUP 4: ASSERT - Delete"
netbox.netbox.netbox_cluster_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Cluster Group One"
+ name: Test Cluster Group One
state: absent
register: test_four
- name: "CLUSTER_GROUP 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['cluster_group']['name'] == "Test Cluster Group One"
@@ -80,15 +80,15 @@
- name: "CLUSTER_GROUP 5: ASSERT - Delete"
netbox.netbox.netbox_cluster_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Cluster Group Two"
- slug: "test-cluster-group-2"
+ name: Test Cluster Group Two
+ slug: test-cluster-group-2
state: absent
register: test_five
- name: "CLUSTER_GROUP 5: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['cluster_group']['name'] == "Test Cluster Group Two"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_cluster_type.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_cluster_type.yml
index 11c88206f..bc09ac8a6 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_cluster_type.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_cluster_type.yml
@@ -7,14 +7,14 @@
- name: "CLUSTER_TYPE 1: Necessary info creation"
netbox.netbox.netbox_cluster_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Cluster Type One"
+ name: Test Cluster Type One
state: present
register: test_one
- name: "CLUSTER_TYPE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -26,14 +26,14 @@
- name: "CLUSTER_TYPE 2: Create duplicate"
netbox.netbox.netbox_cluster_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Cluster Type One"
+ name: Test Cluster Type One
state: present
register: test_two
- name: "CLUSTER_TYPE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['cluster_type']['name'] == "Test Cluster Type One"
@@ -43,15 +43,15 @@
- name: "CLUSTER_TYPE 3: User specified slug"
netbox.netbox.netbox_cluster_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Cluster Type Two"
- slug: "test-cluster-type-2"
+ name: Test Cluster Type Two
+ slug: test-cluster-type-2
state: present
register: test_three
- name: "CLUSTER_TYPE 3: ASSERT - User specified slug"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['before']['state'] == "absent"
@@ -63,14 +63,14 @@
- name: "CLUSTER_TYPE 4: ASSERT - Delete"
netbox.netbox.netbox_cluster_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Cluster Type One"
+ name: Test Cluster Type One
state: absent
register: test_four
- name: "CLUSTER_TYPE 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['cluster_type']['name'] == "Test Cluster Type One"
@@ -80,15 +80,15 @@
- name: "CLUSTER_TYPE 5: ASSERT - Delete"
netbox.netbox.netbox_cluster_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Cluster Type Two"
- slug: "test-cluster-type-2"
+ name: Test Cluster Type Two
+ slug: test-cluster-type-2
state: absent
register: test_five
- name: "CLUSTER_TYPE 5: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['cluster_type']['name'] == "Test Cluster Type Two"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_config_context.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_config_context.yml
index 2353dfbfa..910ec82ca 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_config_context.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_config_context.yml
@@ -7,16 +7,16 @@
- name: "CONFIG_CONTEXT 1: Necessary info creation"
netbox.netbox.netbox_config_context:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "test_context"
- description: "Test context"
- data: "{ \"testkey\": { \"testsubkey\": [ \"testvaule\" ] } }"
+ name: test_context
+ description: Test context
+ data: '{ "testkey": { "testsubkey": [ "testvaule" ] } }'
state: present
register: test_one
- name: "CONFIG_CONTEXT 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -31,16 +31,16 @@
- name: "CONFIG_CONTEXT 2: Create duplicate"
netbox.netbox.netbox_config_context:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "test_context"
- description: "Test context"
- data: "{ \"testkey\": { \"testsubkey\": [ \"testvaule\" ] } }"
+ name: test_context
+ description: Test context
+ data: '{ "testkey": { "testsubkey": [ "testvaule" ] } }'
state: present
register: test_two
- name: "CONFIG_CONTEXT 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['config_context']['name'] == "test_context"
@@ -49,18 +49,18 @@
- name: "CONFIG_CONTEXT 3: Update data and attach to site"
netbox.netbox.netbox_config_context:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "test_context"
- description: "Updated test context"
- data: "{ \"testkey\": { \"testsubkey\": [ \"updatedvaule\" ] } }"
+ name: test_context
+ description: Updated test context
+ data: '{ "testkey": { "testsubkey": [ "updatedvaule" ] } }'
weight: 100
sites: [test-site]
state: present
register: test_three
- name: "CONFIG_CONTEXT 3: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['data'].testkey.testsubkey[0] == "updatedvaule"
@@ -73,16 +73,16 @@
- name: "CONFIG_CONTEXT 4: Detach from site"
netbox.netbox.netbox_config_context:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "test_context"
- data: "{ \"testkey\": { \"testsubkey\": [ \"updatedvaule\" ] } }"
+ name: test_context
+ data: '{ "testkey": { "testsubkey": [ "updatedvaule" ] } }'
sites: []
state: present
register: test_four
- name: "CONFIG_CONTEXT 4: ASSERT - Detached"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['after']['sites']|length == 0
@@ -92,14 +92,14 @@
- name: "CONFIG_CONTEXT 5: Delete"
netbox.netbox.netbox_config_context:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "test_context"
+ name: test_context
state: absent
register: test_five
- name: "CONFIG_CONTEXT 5: ASSERT - Deleted"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['after']['state'] == "absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_config_template.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_config_template.yml
index 9abf3ea05..af6cfb471 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_config_template.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_config_template.yml
@@ -7,16 +7,16 @@
- name: "CONFIG_TEMPLATES 1: Necessary info creation"
netbox.netbox.netbox_config_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "test_template"
- description: "Test template"
- template_code: "test template"
+ name: test_template
+ description: Test template
+ template_code: test template
state: present
register: test_one
- name: "CONFIG_TEMPLATES 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -29,16 +29,16 @@
- name: "CONFIG_TEMPLATES 2: Create duplicate"
netbox.netbox.netbox_config_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "test_template"
- description: "Test template"
- template_code: "test template"
+ name: test_template
+ description: Test template
+ template_code: test template
state: present
register: test_two
- name: "CONFIG_TEMPLATES 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['config_template']['name'] == "test_template"
@@ -47,16 +47,16 @@
- name: "CONFIG_TEMPLATES 3: Update data"
netbox.netbox.netbox_config_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "test_template"
- description: "Updated test template"
- template_code: "updated test template"
+ name: test_template
+ description: Updated test template
+ template_code: updated test template
state: present
register: test_three
- name: "CONFIG_TEMPLATES 3: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['template_code'] == "updated test template"
@@ -67,14 +67,14 @@
- name: "CONFIG_TEMPLATES 4: Delete"
netbox.netbox.netbox_config_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "test_template"
+ name: test_template
state: absent
register: test_four
- name: "CONFIG_TEMPLATES 4: ASSERT - Deleted"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['after']['state'] == "absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_console_port.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_console_port.yml
index 1c97cb8ad..6be028bec 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_console_port.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_console_port.yml
@@ -10,7 +10,7 @@
- name: "CONSOLE_PORT 1: Necessary info creation"
netbox.netbox.netbox_console_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Port
device: test100
@@ -18,7 +18,7 @@
register: test_one
- name: "CONSOLE_PORT 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -30,7 +30,7 @@
- name: "CONSOLE_PORT 2: Create duplicate"
netbox.netbox.netbox_console_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Port
device: test100
@@ -38,7 +38,7 @@
register: test_two
- name: "CONSOLE_PORT 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['console_port']['name'] == "Console Port"
@@ -48,7 +48,7 @@
- name: "CONSOLE_PORT 3: Update Console Port with other fields"
netbox.netbox.netbox_console_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Port
device: test100
@@ -58,7 +58,7 @@
register: test_three
- name: "CONSOLE_PORT 3: ASSERT - Update Console Port with other fields"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['type'] == "usb-a"
@@ -72,7 +72,7 @@
- name: "CONSOLE_PORT 4: Create Console Port for Delete Test"
netbox.netbox.netbox_console_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Port 2
device: test100
@@ -80,7 +80,7 @@
register: test_four
- name: "CONSOLE_PORT 4: ASSERT - Create Console Port for Delete Test"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -92,7 +92,7 @@
- name: "CONSOLE_PORT 5: Delete Console Port"
netbox.netbox.netbox_console_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Port 2
device: test100
@@ -100,7 +100,7 @@
register: test_five
- name: "CONSOLE_PORT 5: ASSERT - Delete Console Port"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "present"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_console_port_template.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_console_port_template.yml
index b1bd1d1e4..ef6149214 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_console_port_template.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_console_port_template.yml
@@ -10,7 +10,7 @@
- name: "CONSOLE_PORT_TEMPLATE 1: Necessary info creation"
netbox.netbox.netbox_console_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Port Template
device_type: Cisco Test
@@ -18,7 +18,7 @@
register: test_one
- name: "CONSOLE_PORT_TEMPLATE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -30,7 +30,7 @@
- name: "CONSOLE_PORT_TEMPLATE 2: Create duplicate"
netbox.netbox.netbox_console_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Port Template
device_type: Cisco Test
@@ -38,7 +38,7 @@
register: test_two
- name: "CONSOLE_PORT_TEMPLATE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['console_port_template']['name'] == "Console Port Template"
@@ -48,7 +48,7 @@
- name: "CONSOLE_PORT_TEMPLATE 3: Update Console Port Template with other fields"
netbox.netbox.netbox_console_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Port Template
device_type: Cisco Test
@@ -57,7 +57,7 @@
register: test_three
- name: "CONSOLE_PORT_TEMPLATE 3: ASSERT - Update Console Port Template with other fields"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['type'] == "usb-a"
@@ -69,7 +69,7 @@
- name: "CONSOLE_PORT_TEMPLATE 4: Create Console Port Template for Delete Test"
netbox.netbox.netbox_console_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Port Template 2
device_type: Cisco Test
@@ -77,7 +77,7 @@
register: test_four
- name: "CONSOLE_PORT_TEMPLATE 4: ASSERT - Create Console Port Template for Delete Test"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -89,7 +89,7 @@
- name: "CONSOLE_PORT_TEMPLATE 5: Delete Console Port Template"
netbox.netbox.netbox_console_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Port Template 2
device_type: Cisco Test
@@ -97,7 +97,7 @@
register: test_five
- name: "CONSOLE_PORT_TEMPLATE 5: ASSERT - Delete Console Port Template"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "present"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_console_server_port.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_console_server_port.yml
index f70345329..0ac7b65d0 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_console_server_port.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_console_server_port.yml
@@ -10,7 +10,7 @@
- name: "CONSOLE_SERVER_PORT 1: Necessary info creation"
netbox.netbox.netbox_console_server_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Server Port
device: test100
@@ -18,7 +18,7 @@
register: test_one
- name: "CONSOLE_SERVER_PORT 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -30,7 +30,7 @@
- name: "CONSOLE_SERVER_PORT 2: Create duplicate"
netbox.netbox.netbox_console_server_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Server Port
device: test100
@@ -38,7 +38,7 @@
register: test_two
- name: "CONSOLE_SERVER_PORT 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['console_server_port']['name'] == "Console Server Port"
@@ -48,7 +48,7 @@
- name: "CONSOLE_SERVER_PORT 3: Update Console Server Port with other fields"
netbox.netbox.netbox_console_server_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Server Port
device: test100
@@ -58,7 +58,7 @@
register: test_three
- name: "CONSOLE_SERVER_PORT 3: ASSERT - Update Console Server Port with other fields"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['type'] == "usb-a"
@@ -72,7 +72,7 @@
- name: "CONSOLE_SERVER_PORT 4: Create Console Server Port for Delete Test"
netbox.netbox.netbox_console_server_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Server Port 2
device: test100
@@ -80,7 +80,7 @@
register: test_four
- name: "CONSOLE_SERVER_PORT 4: ASSERT - Create Console Server Port for Delete Test"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -92,7 +92,7 @@
- name: "CONSOLE_SERVER_PORT 5: Delete Console Server Port"
netbox.netbox.netbox_console_server_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Server Port 2
device: test100
@@ -100,7 +100,7 @@
register: test_five
- name: "CONSOLE_SERVER_PORT 5: ASSERT - Delete Console Server Port"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "present"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_console_server_port_template.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_console_server_port_template.yml
index 4ad47d40a..4c7dfc846 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_console_server_port_template.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_console_server_port_template.yml
@@ -10,7 +10,7 @@
- name: "CONSOLE_SERVER_PORT_TEMPLATE 1: Necessary info creation"
netbox.netbox.netbox_console_server_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Server Port Template
device_type: Cisco Test
@@ -18,7 +18,7 @@
register: test_one
- name: "CONSOLE_SERVER_PORT_TEMPLATE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -30,7 +30,7 @@
- name: "CONSOLE_SERVER_PORT_TEMPLATE 2: Create duplicate"
netbox.netbox.netbox_console_server_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Server Port Template
device_type: Cisco Test
@@ -38,7 +38,7 @@
register: test_two
- name: "CONSOLE_SERVER_PORT_TEMPLATE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['console_server_port_template']['name'] == "Console Server Port Template"
@@ -48,7 +48,7 @@
- name: "CONSOLE_SERVER_PORT_TEMPLATE 3: Update Console Server Port Template with other fields"
netbox.netbox.netbox_console_server_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Server Port Template
device_type: Cisco Test
@@ -57,7 +57,7 @@
register: test_three
- name: "CONSOLE_SERVER_PORT_TEMPLATE 3: ASSERT - Update Console Server Port Template with other fields"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['type'] == "usb-a"
@@ -69,7 +69,7 @@
- name: "CONSOLE_SERVER_PORT_TEMPLATE 4: Create Console Server Port Template for Delete Test"
netbox.netbox.netbox_console_server_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Server Port Template 2
device_type: Cisco Test
@@ -77,7 +77,7 @@
register: test_four
- name: "CONSOLE_SERVER_PORT_TEMPLATE 4: ASSERT - Create Console Server Port Template for Delete Test"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -89,7 +89,7 @@
- name: "CONSOLE_SERVER_PORT_TEMPLATE 5: Delete Console Server Port Template"
netbox.netbox.netbox_console_server_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Server Port Template 2
device_type: Cisco Test
@@ -97,7 +97,7 @@
register: test_five
- name: "CONSOLE_SERVER_PORT_TEMPLATE 5: ASSERT - Delete Console Server Port Template"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "present"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_contact.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_contact.yml
index feb9f7e9f..f5ede9c1a 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_contact.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_contact.yml
@@ -4,16 +4,16 @@
### NETBOX_CONTACT
##
##
-- name: "1 - Test contact creation"
+- name: 1 - Test contact creation
netbox.netbox.netbox_contact:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Contact ABC"
+ name: Contact ABC
register: test_one
-- name: "1 - ASSERT"
- assert:
+- name: 1 - ASSERT
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -21,32 +21,32 @@
- test_one['contact']['name'] == "Contact ABC"
- test_one['msg'] == "contact Contact ABC created"
-- name: "Test duplicate contact"
+- name: Test duplicate contact
netbox.netbox.netbox_contact:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Contact ABC"
+ name: Contact ABC
register: test_two
-- name: "2 - ASSERT"
- assert:
+- name: 2 - ASSERT
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['contact']['name'] == "Contact ABC"
- test_two['msg'] == "contact Contact ABC already exists"
-- name: "3 - Test update"
+- name: 3 - Test update
netbox.netbox.netbox_contact:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Contact ABC"
- title: "New Title"
+ name: Contact ABC
+ title: New Title
register: test_three
-- name: "3 - ASSERT"
- assert:
+- name: 3 - ASSERT
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['title'] == "New Title"
@@ -54,41 +54,41 @@
- test_three['contact']['title'] == "New Title"
- test_three['msg'] == "contact Contact ABC updated"
-- name: "4 - Test delete"
+- name: 4 - Test delete
netbox.netbox.netbox_contact:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Contact ABC"
- state: "absent"
+ name: Contact ABC
+ state: absent
register: test_four
-- name: "4 - ASSERT"
- assert:
+- name: 4 - ASSERT
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
- test_four['diff']['after']['state'] == "absent"
- test_four['msg'] == "contact Contact ABC deleted"
-- name: "5 - Create contact with all parameters"
+- name: 5 - Create contact with all parameters
netbox.netbox.netbox_contact:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Contact ABC"
- title: "Fancy title"
+ name: Contact ABC
+ title: Fancy title
phone: "12345678"
- email: "contact@contact.com"
+ email: contact@contact.com
tags:
- - "tagA"
- - "tagB"
- - "tagC"
+ - tagA
+ - tagB
+ - tagC
state: present
register: test_five
-- name: "5 - ASSERT"
- assert:
+- name: 5 - ASSERT
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_contact_role.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_contact_role.yml
index fb875748e..c84703da9 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_contact_role.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_contact_role.yml
@@ -7,14 +7,14 @@
- name: "CONTACT_ROLE 1: Necessary info creation"
netbox.netbox.netbox_contact_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Contact Role"
+ name: Test Contact Role
state: present
register: test_one
- name: "CONTACT_ROLE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -26,14 +26,14 @@
- name: "CONTACT_ROLE 2: Create duplicate"
netbox.netbox.netbox_contact_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Contact Role"
+ name: Test Contact Role
state: present
register: test_two
- name: "CONTACT ROLE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['contact_role']['name'] == "Test Contact Role"
@@ -43,15 +43,15 @@
- name: "CONTACT_ROLE 3: ASSERT - Update"
netbox.netbox.netbox_contact_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Contact Role"
- description: "Update description"
+ name: Test Contact Role
+ description: Update description
state: present
register: test_three
- name: "IPAM_ROLE 3: ASSERT - Update"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['description'] == "Update description"
@@ -63,14 +63,14 @@
- name: "CONTACT_ROLE 4: ASSERT - Delete"
netbox.netbox.netbox_contact_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test Contact Role
state: absent
register: test_four
- name: "CONTACT_ROLE 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
@@ -80,14 +80,14 @@
- name: "CONTACT_ROLE 5: ASSERT - Delete non existing"
netbox.netbox.netbox_contact_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test Contact Role
state: absent
register: test_five
- name: "CONTACT_ROLE 5: ASSERT - Delete non existing`"
- assert:
+ ansible.builtin.assert:
that:
- not test_five['changed']
- test_five['contact_role'] == None
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_custom_field.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_custom_field.yml
index 0342c111f..a4ec68866 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_custom_field.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_custom_field.yml
@@ -7,17 +7,17 @@
- name: "CUSTOM_FIELD 1: Necessary info creation"
netbox.netbox.netbox_custom_field:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
content_types:
- - "dcim.device"
+ - dcim.device
name: A_CustomField
type: text
state: present
register: test_one
- name: "CUSTOM_FIELD 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -32,16 +32,16 @@
- name: "CUSTOM_FIELD 2: Create duplicate"
netbox.netbox.netbox_custom_field:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
content_types:
- - "dcim.device"
+ - dcim.device
name: A_CustomField
state: present
register: test_two
- name: "CUSTOM_FIELD 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['custom_field']['name'] == "A_CustomField"
@@ -50,18 +50,18 @@
- name: "CUSTOM_FIELD 3: Update data and make it required"
netbox.netbox.netbox_custom_field:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
content_types:
- - "dcim.device"
- name: "A_CustomField"
- description: "Added a description"
- required: yes
+ - dcim.device
+ name: A_CustomField
+ description: Added a description
+ required: true
state: present
register: test_three
- name: "CUSTOM_FIELD 3: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['description'] == "Added a description"
@@ -72,18 +72,18 @@
- name: "CUSTOM_FIELD 4: Change content type"
netbox.netbox.netbox_custom_field:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
content_types:
- - "virtualization.virtualmachine"
- name: "A_CustomField"
- description: "Added a description"
- required: yes
+ - virtualization.virtualmachine
+ name: A_CustomField
+ description: Added a description
+ required: true
state: present
register: test_four
- name: "CUSTOM_FIELD 4: ASSERT - Change content type"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['after']['content_types'] == ["virtualization.virtualmachine"]
@@ -93,14 +93,14 @@
- name: "CUSTOM_FIELD 5: Delete"
netbox.netbox.netbox_custom_field:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "A_CustomField"
+ name: A_CustomField
state: absent
register: test_five
- name: "CUSTOM_FIELD 5: ASSERT - Deleted"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['after']['state'] == "absent"
@@ -110,10 +110,10 @@
- name: "CUSTOM_FIELD 6: UI Visibility (hidden-ifunset)"
netbox.netbox.netbox_custom_field:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
content_types:
- - "dcim.device"
+ - dcim.device
name: A_CustomField
type: text
ui_visibility: hidden-ifunset
@@ -121,7 +121,7 @@
register: test_six
- name: "CUSTOM_FIELD 6: UI Visibility (hidden-ifunset)"
- assert:
+ ansible.builtin.assert:
that:
- test_six is changed
- test_six['custom_field']['name'] == "A_CustomField"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_custom_field_choice_set.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_custom_field_choice_set.yml
index f76743271..6d255ebdb 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_custom_field_choice_set.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_custom_field_choice_set.yml
@@ -7,16 +7,16 @@
- name: "CUSTOM_FIELD_CHOICE_SET 1: Base Choice set creation"
netbox.netbox.netbox_custom_field_choice_set:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "A_ChoiceSet"
- description: "Added a description"
- base_choices: "IATA"
+ name: A_ChoiceSet
+ description: Added a description
+ base_choices: IATA
state: present
register: test_one
- name: "CUSTOM_FIELD_CHOICE_SET 1: ASSERT - Base Choice set creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -31,16 +31,16 @@
- name: "CUSTOM_FIELD_CHOICE_SET 2: Create duplicate"
netbox.netbox.netbox_custom_field_choice_set:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "A_ChoiceSet"
- description: "Added a description"
- base_choices: "IATA"
+ name: A_ChoiceSet
+ description: Added a description
+ base_choices: IATA
state: present
register: test_two
- name: "CUSTOM_FIELD_CHOICE_SET 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['choice_set']['name'] == "A_ChoiceSet"
@@ -49,16 +49,16 @@
- name: "CUSTOM_FIELD_CHOICE_SET 3: Update data and change base_choice"
netbox.netbox.netbox_custom_field_choice_set:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "A_ChoiceSet"
- description: "Added a description"
- base_choices: "ISO_3166"
+ name: A_ChoiceSet
+ description: Added a description
+ base_choices: ISO_3166
state: present
register: test_three
- name: "CUSTOM_FIELD_CHOICE_SET 3: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['base_choices'] == "ISO_3166"
@@ -68,19 +68,19 @@
- name: "CUSTOM_FIELD_CHOICE_SET 4: Update extra choice and order alphabetically"
netbox.netbox.netbox_custom_field_choice_set:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "A_ChoiceSet"
- description: "Added a description"
+ name: A_ChoiceSet
+ description: Added a description
order_alphabetically: true
extra_choices:
- - ['test', 'label']
- - ['test2', 'label2']
+ - [test, label]
+ - [test2, label2]
state: present
register: test_four
- name: "CUSTOM_FIELD_CHOICE_SET 4: ASSERT - Change extra choice and order alphabetically"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['after']['extra_choices'] == [["test","label"],["test2","label2"]]
@@ -91,14 +91,14 @@
- name: "CUSTOM_FIELD_CHOICE_SET 5: Delete"
netbox.netbox.netbox_custom_field_choice_set:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "A_ChoiceSet"
+ name: A_ChoiceSet
state: absent
register: test_five
- name: "CUSTOM_FIELD_CHOICE_SET 5: ASSERT - Deleted"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['after']['state'] == "absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_custom_link.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_custom_link.yml
index 8376d357b..b9bbcc8a7 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_custom_link.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_custom_link.yml
@@ -7,10 +7,10 @@
- name: "CUSTOM_LINK 1: Necessary info creation"
netbox.netbox.netbox_custom_link:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
content_types:
- - "dcim.device"
+ - dcim.device
name: Custom Link
link_text: Open Web management
link_url: !unsafe https://{{ obj.name }}.domain.local/
@@ -18,7 +18,7 @@
register: test_one
- name: "CUSTOM_LINK 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -31,10 +31,10 @@
- name: "CUSTOM_LINK 2: Create duplicate"
netbox.netbox.netbox_custom_link:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
content_types:
- - "dcim.device"
+ - dcim.device
name: Custom Link
link_text: Open Web management
link_url: !unsafe https://{{ obj.name }}.domain.local/
@@ -42,7 +42,7 @@
register: test_two
- name: "CUSTOM_LINK 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['custom_link']['name'] == "Custom Link"
@@ -51,10 +51,10 @@
- name: "CUSTOM_FIELD 3: Update data and add weight"
netbox.netbox.netbox_custom_link:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
content_types:
- - "dcim.device"
+ - dcim.device
name: Custom Link
link_text: Open Web management
link_url: !unsafe https://{{ obj.name }}.domain.local/
@@ -63,7 +63,7 @@
register: test_three
- name: "CUSTOM_FIELD 3: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['weight'] == 50
@@ -73,10 +73,10 @@
- name: "CUSTOM_LINK 4: Change content type"
netbox.netbox.netbox_custom_link:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
content_types:
- - "virtualization.virtualmachine"
+ - virtualization.virtualmachine
name: Custom Link
link_text: Open Web management
link_url: !unsafe https://{{ obj.name }}.domain.local/
@@ -84,7 +84,7 @@
register: test_four
- name: "CUSTOM_LINK 4: ASSERT - Change content type"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['after']['content_types'] == ["virtualization.virtualmachine"]
@@ -94,10 +94,10 @@
- name: "CUSTOM_LINK 5: Delete"
netbox.netbox.netbox_custom_link:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
content_types:
- - "virtualization.virtualmachine"
+ - virtualization.virtualmachine
name: Custom Link
link_text: Open Web management
link_url: !unsafe https://{{ obj.name }}.domain.local/
@@ -105,7 +105,7 @@
register: test_five
- name: "CUSTOM_LINK 5: ASSERT - Deleted"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['after']['state'] == "absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_device.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_device.yml
index 5a791b242..a3a5d8308 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_device.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_device.yml
@@ -4,22 +4,22 @@
### NETBOX_DEVICE
##
##
-- name: "1 - Device with required information"
+- name: 1 - Device with required information
netbox.netbox.netbox_device:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "R1"
+ name: R1
device_type:
id: "1"
- device_role: "Core Switch"
- site: "Test Site"
- status: "Staged"
+ device_role: Core Switch
+ site: Test Site
+ status: Staged
state: present
register: test_one
-- name: "1 - ASSERT"
- assert:
+- name: 1 - ASSERT
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == 'absent'
@@ -32,21 +32,21 @@
- test_one['device']['name'] == "R1"
- test_one['msg'] == "device R1 created"
-- name: "2 - Duplicate device"
+- name: 2 - Duplicate device
netbox.netbox.netbox_device:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "R1"
- device_type: "Cisco Test"
- device_role: "Core Switch"
- site: "Test Site"
- status: "Staged"
+ name: R1
+ device_type: Cisco Test
+ device_role: Core Switch
+ site: Test Site
+ status: Staged
state: present
register: test_two
-- name: "2 - ASSERT"
- assert:
+- name: 2 - ASSERT
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['device']['name'] == "R1"
@@ -56,24 +56,24 @@
- test_two['device']['status'] == "staged"
- test_two['msg'] == "device R1 already exists"
-- name: "3 - Update device"
+- name: 3 - Update device
netbox.netbox.netbox_device:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "R1"
- serial: "FXS1001"
+ name: R1
+ serial: FXS1001
local_context_data:
bgp_as: "65412"
- virtual_chassis: "VC1"
+ virtual_chassis: VC1
vc_position: 3
vc_priority: 15
- location: "Test Rack Group"
+ location: Test Rack Group
state: present
register: test_three
-- name: "3 - ASSERT"
- assert:
+- name: 3 - ASSERT
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['serial'] == "FXS1001"
@@ -94,20 +94,20 @@
- test_three['device']['location'] == 1
- test_three['msg'] == "device R1 updated"
-- name: "3.1 - Update device name using query_params"
+- name: 3.1 - Update device name using query_params
netbox.netbox.netbox_device:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "R1-changed-name"
- serial: "FXS1001"
+ name: R1-changed-name
+ serial: FXS1001
query_params:
- serial
state: present
register: test_three_dot_one
-- name: "3.1 - ASSERT"
- assert:
+- name: 3.1 - ASSERT
+ ansible.builtin.assert:
that:
- test_three_dot_one is changed
- test_three_dot_one['diff']['after']['name'] == "R1-changed-name"
@@ -119,27 +119,27 @@
- test_three_dot_one['device']['local_context_data']["bgp_as"] == "65412"
- test_three_dot_one['msg'] == "device R1-changed-name updated"
-- name: "4 - Create device with tags and assign to rack"
+- name: 4 - Create device with tags and assign to rack
netbox.netbox.netbox_device:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "TestR1"
+ name: TestR1
device_type: "1841"
- device_role: "Core Switch"
- site: "Test Site2"
- rack: "Test Rack Site 2"
+ device_role: Core Switch
+ site: Test Site2
+ rack: Test Rack Site 2
position: 35.5
- face: "Front"
+ face: Front
tags:
- - "schnozzberry"
- tenant: "Test Tenant"
+ - schnozzberry
+ tenant: Test Tenant
asset_tag: "1234"
state: present
register: test_four
-- name: "4 - ASSERT"
- assert:
+- name: 4 - ASSERT
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -155,53 +155,53 @@
- test_four['device']['asset_tag'] == '1234'
- test_four['msg'] == "device TestR1 created"
-- name: "5 - Delete previous device"
+- name: 5 - Delete previous device
netbox.netbox.netbox_device:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "TestR1"
+ name: TestR1
state: absent
register: test_five
-- name: "5 - ASSERT"
- assert:
+- name: 5 - ASSERT
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "present"
- test_five['diff']['after']['state'] == "absent"
- test_five['msg'] == "device TestR1 deleted"
-- name: "6 - Delete R1"
+- name: 6 - Delete R1
netbox.netbox.netbox_device:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "R1-changed-name"
+ name: R1-changed-name
state: absent
register: test_six
-- name: "6 - ASSERT"
- assert:
+- name: 6 - ASSERT
+ ansible.builtin.assert:
that:
- test_six is changed
- test_six['diff']['before']['state'] == "present"
- test_six['diff']['after']['state'] == "absent"
- test_six['msg'] == "device R1-changed-name deleted"
-- name: "7 - Add primary_ip4/6 to test100"
+- name: 7 - Add primary_ip4/6 to test100
netbox.netbox.netbox_device:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "test100"
- primary_ip4: "172.16.180.1/24"
- primary_ip6: "2001::1:1/64"
+ name: test100
+ primary_ip4: 172.16.180.1/24
+ primary_ip6: 2001::1:1/64
state: present
register: test_seven
-- name: "7 - ASSERT"
- assert:
+- name: 7 - ASSERT
+ ansible.builtin.assert:
that:
- test_seven is changed
- test_seven['diff']['after']['primary_ip4'] == 1
@@ -215,22 +215,22 @@
- test_seven['device']['primary_ip6'] == 2
- test_seven['msg'] == "device test100 updated"
-- name: "8 - Device with empty string name"
+- name: 8 - Device with empty string name
netbox.netbox.netbox_device:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: ""
device_type:
id: 1
- device_role: "Core Switch"
- site: "Test Site"
- status: "Staged"
+ device_role: Core Switch
+ site: Test Site
+ status: Staged
state: present
register: test_eight
-- name: "8 - ASSERT"
- assert:
+- name: 8 - ASSERT
+ ansible.builtin.assert:
that:
- test_eight is changed
- test_eight['diff']['before']['state'] == 'absent'
@@ -240,4 +240,4 @@
- test_eight['device']['site'] == 1
- test_eight['device']['status'] == "staged"
- "'-' in test_eight['device']['name']"
- - "test_eight['device']['name'] | length == 36"
+ - test_eight['device']['name'] | length == 36
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_device_bay.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_device_bay.yml
index 18a804354..b892d2160 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_device_bay.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_device_bay.yml
@@ -7,15 +7,15 @@
- name: "DEVICE_BAY 1: Necessary info creation"
netbox.netbox.netbox_device_bay:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- device: "Test Nexus One"
- name: "Device Bay One"
+ device: Test Nexus One
+ name: Device Bay One
state: present
register: test_one
- name: "DEVICE_BAY 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -27,15 +27,15 @@
- name: "DEVICE_BAY 2: Create duplicate"
netbox.netbox.netbox_device_bay:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- device: "Test Nexus One"
- name: "Device Bay One"
+ device: Test Nexus One
+ name: Device Bay One
state: present
register: test_two
- name: "DEVICE_BAY 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['device_bay']['name'] == "Device Bay One"
@@ -45,18 +45,18 @@
- name: "DEVICE_BAY 3: ASSERT - Update"
netbox.netbox.netbox_device_bay:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- device: "Test Nexus One"
- name: "Device Bay One"
- installed_device: "Test Nexus Child One"
+ device: Test Nexus One
+ name: Device Bay One
+ installed_device: Test Nexus Child One
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: test_three
- name: "DEVICE_BAY 3: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['installed_device'] == 5
@@ -70,14 +70,14 @@
- name: "DEVICE_BAY 4: ASSERT - Delete"
netbox.netbox.netbox_device_bay:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Device Bay One"
+ name: Device Bay One
state: absent
register: test_four
- name: "DEVICE_BAY 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['device_bay']['name'] == "Device Bay One"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_device_bay_template.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_device_bay_template.yml
index 4b6965033..c2ba1e32c 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_device_bay_template.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_device_bay_template.yml
@@ -7,7 +7,7 @@
- name: "DEVICE_BAY_TEMPLATE 1: Necessary info creation"
netbox.netbox.netbox_device_bay_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device_type: WS Test 3850
name: Device Bay Template One
@@ -15,7 +15,7 @@
register: test_one
- name: "DEVICE_BAY_TEMPLATE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -27,7 +27,7 @@
- name: "DEVICE_BAY_TEMPLATE 2: Create duplicate"
netbox.netbox.netbox_device_bay_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device_type: WS Test 3850
name: Device Bay Template One
@@ -35,7 +35,7 @@
register: test_two
- name: "DEVICE_BAY_TEMPLATE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['device_bay_template']['name'] == "Device Bay Template One"
@@ -45,7 +45,7 @@
- name: "DEVICE_BAY_TEMPLATE 3: ASSERT - Create Device Bay Template for Delete Test"
netbox.netbox.netbox_device_bay_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device_type: WS Test 3850
name: Device Bay Template Two
@@ -53,7 +53,7 @@
register: test_three
- name: "DEVICE_BAY_TEMPLATE 3: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['before']['state'] == "absent"
@@ -65,7 +65,7 @@
- name: "DEVICE_BAY_TEMPLATE 4: ASSERT - Delete"
netbox.netbox.netbox_device_bay_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Device Bay Template Two
device_type: WS Test 3850
@@ -73,7 +73,7 @@
register: test_four
- name: "DEVICE_BAY_TEMPLATE 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['device_bay_template']['name'] == "Device Bay Template Two"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_device_interface.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_device_interface.yml
index fa3f10214..dd3c2383b 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_device_interface.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_device_interface.yml
@@ -1,18 +1,18 @@
---
# NETBOX_DEVICE_INTERFACE
-- name: "1 - Interface with required information"
+- name: 1 - Interface with required information
netbox.netbox.netbox_device_interface:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: test100
name: GigabitEthernet3
- type: "1000Base-T (1GE)"
+ type: 1000Base-T (1GE)
register: test_one
-- name: "1 - ASSERT"
- assert:
+- name: 1 - ASSERT
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['msg'] == "interface GigabitEthernet3 created"
@@ -21,9 +21,9 @@
- test_one['interface']['name'] == "GigabitEthernet3"
- test_one['interface']['device'] == 1
-- name: "2 - Update test100 - GigabitEthernet3"
+- name: 2 - Update test100 - GigabitEthernet3
netbox.netbox.netbox_device_interface:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: test100
@@ -32,8 +32,8 @@
enabled: false
register: test_two
-- name: "2 - ASSERT"
- assert:
+- name: 2 - ASSERT
+ ansible.builtin.assert:
that:
- test_two is changed
- test_two['msg'] == "interface GigabitEthernet3 updated"
@@ -44,9 +44,9 @@
- test_two['interface']['enabled'] == false
- test_two['interface']['mtu'] == 1600
-- name: "3 - Delete interface test100 - GigabitEthernet3"
+- name: 3 - Delete interface test100 - GigabitEthernet3
netbox.netbox.netbox_device_interface:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: test100
@@ -54,17 +54,17 @@
state: absent
register: test_three
-- name: "3 - ASSERT"
- assert:
+- name: 3 - ASSERT
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['msg'] == "interface GigabitEthernet3 deleted"
- test_three['diff']['before']['state'] == "present"
- test_three['diff']['after']['state'] == "absent"
-- name: "4 - Create LAG with several specified options"
+- name: 4 - Create LAG with several specified options
netbox.netbox.netbox_device_interface:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: test100
@@ -76,8 +76,8 @@
state: present
register: test_four
-- name: "4 - ASSERT"
- assert:
+- name: 4 - ASSERT
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['msg'] == "interface port-channel1 created"
@@ -91,9 +91,9 @@
- test_four['interface']['mode'] == "access"
- test_four['interface']['mtu'] == 1600
-- name: "5 - Create interface and assign it to parent LAG"
+- name: 5 - Create interface and assign it to parent LAG
netbox.netbox.netbox_device_interface:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: test100
@@ -108,8 +108,8 @@
state: present
register: test_five
-- name: "5 - ASSERT"
- assert:
+- name: 5 - ASSERT
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['msg'] == "interface GigabitEthernet3 created"
@@ -124,9 +124,9 @@
- test_five['interface']['mode'] == "access"
- test_five['interface']['mtu'] == 1600
-- name: "6 - Create interface as trunk port"
+- name: 6 - Create interface as trunk port
netbox.netbox.netbox_device_interface:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: test100
@@ -147,8 +147,8 @@
state: present
register: test_six
-- name: "6 - ASSERT"
- assert:
+- name: 6 - ASSERT
+ ansible.builtin.assert:
that:
- test_six is changed
- test_six['msg'] == "interface GigabitEthernet21 created"
@@ -164,50 +164,50 @@
- test_six['interface']['tagged_vlans'] == [2, 3]
- test_six['interface']['untagged_vlan'] == 1
-- name: "7 - Duplicate Interface"
+- name: 7 - Duplicate Interface
netbox.netbox.netbox_device_interface:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: test100
name: GigabitEthernet1
register: test_seven
-- name: "7 - ASSERT"
- assert:
+- name: 7 - ASSERT
+ ansible.builtin.assert:
that:
- not test_seven['changed']
- test_seven['msg'] == "interface GigabitEthernet1 already exists"
- test_seven['interface']['name'] == "GigabitEthernet1"
- test_seven['interface']['device'] == 1
-- name: "Add port-channel1 to R1 to test finding proper port-channel1"
+- name: Add port-channel1 to R1 to test finding proper port-channel1
netbox.netbox.netbox_device_interface:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- device: "R1-Device"
- name: "port-channel1"
- type: "Link Aggregation Group (LAG)"
+ device: R1-Device
+ name: port-channel1
+ type: Link Aggregation Group (LAG)
-- name: "8 - Create interface and assign it to parent LAG - non dict"
+- name: 8 - Create interface and assign it to parent LAG - non dict
netbox.netbox.netbox_device_interface:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: test100
name: GigabitEthernet4
enabled: false
type: 1000Base-T (1GE)
- lag: "port-channel1"
+ lag: port-channel1
mtu: 1600
mgmt_only: false
mode: Access
state: present
register: test_eight
-- name: "8 - ASSERT"
- assert:
+- name: 8 - ASSERT
+ ansible.builtin.assert:
that:
- test_eight is changed
- test_eight['msg'] == "interface GigabitEthernet4 created"
@@ -222,9 +222,9 @@
- test_eight['interface']['mode'] == "access"
- test_eight['interface']['mtu'] == 1600
-- name: "9 - Create interface on VC child"
+- name: 9 - Create interface on VC child
netbox.netbox.netbox_device_interface:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: Test Nexus Child One
@@ -233,8 +233,8 @@
state: present
register: test_nine
-- name: "9 - ASSERT"
- assert:
+- name: 9 - ASSERT
+ ansible.builtin.assert:
that:
- test_nine is changed
- test_nine['msg'] == "interface Ethernet2/2 created"
@@ -245,21 +245,21 @@
- test_nine['interface']['enabled'] == true
- test_nine['interface']['type'] == "1000base-t"
-- name: "10 - Update interface on VC child"
+- name: 10 - Update interface on VC child
netbox.netbox.netbox_device_interface:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: Test Nexus One
name: Ethernet2/2
- description: "Updated child interface from parent device"
+ description: Updated child interface from parent device
type: 1000Base-T (1GE)
- update_vc_child: True
+ update_vc_child: true
state: present
register: test_ten
-- name: "10 - ASSERT"
- assert:
+- name: 10 - ASSERT
+ ansible.builtin.assert:
that:
- test_ten is changed
- test_ten['msg'] == "interface Ethernet2/2 updated"
@@ -270,28 +270,28 @@
- test_ten['interface']['type'] == "1000base-t"
- test_ten['interface']['description'] == 'Updated child interface from parent device'
-- name: "11 - Update interface on VC child w/o update_vc_child"
+- name: 11 - Update interface on VC child w/o update_vc_child
netbox.netbox.netbox_device_interface:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: Test Nexus One
name: Ethernet2/2
- description: "Updated child interface from parent device - test"
+ description: Updated child interface from parent device - test
type: 1000Base-T (1GE)
state: present
- ignore_errors: yes
+ ignore_errors: true
register: test_eleven
-- name: "11 - ASSERT"
- assert:
+- name: 11 - ASSERT
+ ansible.builtin.assert:
that:
- test_eleven is failed
- test_eleven['msg'] == "Must set update_vc_child to True to allow child device interface modification"
-- name: "12 - Create interface and mark it as connected"
+- name: 12 - Create interface and mark it as connected
netbox.netbox.netbox_device_interface:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: test100
@@ -300,8 +300,8 @@
mark_connected: true
register: test_twelve
-- name: "12- ASSERT"
- assert:
+- name: 12- ASSERT
+ ansible.builtin.assert:
that:
- test_twelve is changed
- test_twelve['msg'] == "interface GigabitEthernet5 created"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_device_interface_template.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_device_interface_template.yml
index b6e98914f..abe8f3405 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_device_interface_template.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_device_interface_template.yml
@@ -4,9 +4,9 @@
### NETBOX_DEVICE_INTERFACE_TEMPLATE
##
##
-- name: "1 - Interface with required information"
+- name: 1 - Interface with required information
netbox.netbox.netbox_device_interface_template:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device_type: Arista Test
@@ -14,8 +14,8 @@
type: 10gbase-t
register: test_one
-- name: "1 - ASSERT"
- assert:
+- name: 1 - ASSERT
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['msg'] == "interface_template 10GBASE-T (10GE) created"
@@ -25,9 +25,9 @@
- test_one['interface_template']['device_type'] == 2
- test_one['interface_template']['type'] == '10gbase-t'
-- name: "2 - Update 10GBASE-T (10GE)"
+- name: 2 - Update 10GBASE-T (10GE)
netbox.netbox.netbox_device_interface_template:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device_type: Arista Test
@@ -36,8 +36,8 @@
mgmt_only: true
register: test_two
-- name: "2 - ASSERT"
- assert:
+- name: 2 - ASSERT
+ ansible.builtin.assert:
that:
- test_two is changed
- test_two['msg'] == "interface_template 10GBASE-T (10GE) updated"
@@ -46,9 +46,9 @@
- test_two['interface_template']['device_type'] == 2
- test_two['interface_template']['mgmt_only'] == true
-- name: "3 - Delete interface template 10GBASE-T (10GE)"
+- name: 3 - Delete interface template 10GBASE-T (10GE)
netbox.netbox.netbox_device_interface_template:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device_type: Arista Test
@@ -57,17 +57,17 @@
state: absent
register: test_three
-- name: "3 - ASSERT"
- assert:
+- name: 3 - ASSERT
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['msg'] == "interface_template 10GBASE-T (10GE) deleted"
- test_three['diff']['before']['state'] == "present"
- test_three['diff']['after']['state'] == "absent"
-- name: "4 - Create LAG with several specified options"
+- name: 4 - Create LAG with several specified options
netbox.netbox.netbox_device_interface_template:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device_type: Arista Test
@@ -77,8 +77,8 @@
state: present
register: test_four
-- name: "4 - ASSERT"
- assert:
+- name: 4 - ASSERT
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['msg'] == "interface_template port channel template created"
@@ -89,9 +89,9 @@
- test_four['interface_template']['type'] == "lag"
- test_four['interface_template']['mgmt_only'] == false
-- name: "5 - Duplicate Interface Template port channel template"
+- name: 5 - Duplicate Interface Template port channel template
netbox.netbox.netbox_device_interface_template:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device_type: Arista Test
@@ -99,8 +99,8 @@
type: lag
register: test_five
-- name: "5 - ASSERT"
- assert:
+- name: 5 - ASSERT
+ ansible.builtin.assert:
that:
- not test_five['changed']
- test_five['msg'] == "interface_template port channel template already exists"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_device_role.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_device_role.yml
index 64ec9840f..b85ee2c86 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_device_role.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_device_role.yml
@@ -7,15 +7,15 @@
- name: "DEVICE_ROLE 1: Necessary info creation"
netbox.netbox.netbox_device_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Device Role"
- color: "FFFFFF"
+ name: Test Device Role
+ color: FFFFFF
state: present
register: test_one
- name: "DEVICE_ROLE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -28,15 +28,15 @@
- name: "DEVICE_ROLE 2: Create duplicate"
netbox.netbox.netbox_device_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Device Role"
- color: "FFFFFF"
+ name: Test Device Role
+ color: FFFFFF
state: present
register: test_two
- name: "DEVICE_ROLE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['device_role']['name'] == "Test Device Role"
@@ -47,16 +47,16 @@
- name: "DEVICE_ROLE 3: ASSERT - Update"
netbox.netbox.netbox_device_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Device Role"
+ name: Test Device Role
color: "003EFF"
vm_role: false
state: present
register: test_three
- name: "DEVICE_ROLE 3: ASSERT - Update"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['color'] == "003eff"
@@ -70,14 +70,14 @@
- name: "DEVICE_ROLE 4: ASSERT - Delete"
netbox.netbox.netbox_device_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test Device Role
state: absent
register: test_four
- name: "DEVICE_ROLE 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
@@ -87,14 +87,14 @@
- name: "DEVICE_ROLE 5: ASSERT - Delete non existing"
netbox.netbox.netbox_device_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test Device Role
state: absent
register: test_five
- name: "DEVICE_ROLE 5: ASSERT - Delete non existing`"
- assert:
+ ansible.builtin.assert:
that:
- not test_five['changed']
- test_five['device_role'] == None
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_device_type.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_device_type.yml
index 5587119f3..3027f6fd1 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_device_type.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_device_type.yml
@@ -7,7 +7,7 @@
- name: "DEVICE_TYPE 1: Necessary info creation"
netbox.netbox.netbox_device_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
slug: test-device-type
model: ws-test-3750
@@ -16,7 +16,7 @@
register: test_one
- name: "DEVICE_TYPE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -29,16 +29,16 @@
- name: "DEVICE_TYPE 2: Create duplicate"
netbox.netbox.netbox_device_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
slug: test-device-type
- model: "ws-test-3750"
+ model: ws-test-3750
manufacturer: Test Manufacturer
state: present
register: test_two
- name: "DEVICE_TYPE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_one['device_type']['slug'] == "test-device-type"
@@ -49,7 +49,7 @@
- name: "DEVICE_TYPE 3: ASSERT - Update"
netbox.netbox.netbox_device_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
slug: test-device-type
model: ws-test-3750
@@ -62,7 +62,7 @@
register: test_three
- name: "DEVICE_TYPE 3: ASSERT - Update"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['is_full_depth'] == false
@@ -79,14 +79,14 @@
- name: "DEVICE_TYPE 4: ASSERT - Delete"
netbox.netbox.netbox_device_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
model: test-device-type
state: absent
register: test_four
- name: "DEVICE_TYPE 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
@@ -96,14 +96,14 @@
- name: "DEVICE_TYPE 5: ASSERT - Delete non existing"
netbox.netbox.netbox_device_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- model: "Test Device Type"
+ model: Test Device Type
state: absent
register: test_five
- name: "DEVICE_TYPE 5: ASSERT - Delete non existing`"
- assert:
+ ansible.builtin.assert:
that:
- not test_five['changed']
- test_five['device_type'] == None
@@ -112,16 +112,16 @@
- name: "DEVICE_TYPE 6: Without Slug"
netbox.netbox.netbox_device_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- model: "WS Test 3850"
- manufacturer: "Test Manufacturer"
- subdevice_role: "parent"
+ model: WS Test 3850
+ manufacturer: Test Manufacturer
+ subdevice_role: parent
state: present
register: test_six
- name: "DEVICE_TYPE 6: ASSERT - Without Slug"
- assert:
+ ansible.builtin.assert:
that:
- test_six is changed
- test_six['diff']['before']['state'] == "absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_export_template.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_export_template.yml
index b3dcea015..ffdc0424f 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_export_template.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_export_template.yml
@@ -7,10 +7,10 @@
- name: "EXPORT_TEMPLATE 1: Necessary info creation"
netbox.netbox.netbox_export_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
content_types:
- - "dcim.device"
+ - dcim.device
name: Example Export Template
description: Export Devices
template_code: !unsafe >-
@@ -19,7 +19,7 @@
register: test_one
- name: "EXPORT_TEMPLATE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -32,10 +32,10 @@
- name: "EXPORT_TEMPLATE 2: Create duplicate"
netbox.netbox.netbox_export_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
content_types:
- - "dcim.device"
+ - dcim.device
name: Example Export Template
description: Export Devices
template_code: !unsafe >-
@@ -44,7 +44,7 @@
register: test_two
- name: "EXPORT_TEMPLATE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['export_template']['name'] == "Example Export Template"
@@ -53,20 +53,20 @@
- name: "EXPORT_TEMPLATE 3: Update data and remove as_attachment"
netbox.netbox.netbox_export_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
content_types:
- - "dcim.device"
+ - dcim.device
name: Example Export Template
description: Export Devices
template_code: !unsafe >-
{% for obj in queryset %}{{ obj.name }}{% endfor %}
- as_attachment: no
+ as_attachment: false
state: present
register: test_three
- name: "EXPORT_TEMPLATE 3: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['as_attachment'] == false
@@ -76,10 +76,10 @@
- name: "EXPORT_TEMPLATE 4: Change content type"
netbox.netbox.netbox_export_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
content_types:
- - "virtualization.virtualmachine"
+ - virtualization.virtualmachine
name: Example Export Template
description: Export Devices
template_code: !unsafe >-
@@ -88,7 +88,7 @@
register: test_four
- name: "EXPORT_TEMPLATE 4: ASSERT - Change content type"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['after']['content_types'] == ["virtualization.virtualmachine"]
@@ -98,10 +98,10 @@
- name: "EXPORT_TEMPLATE 5: Delete"
netbox.netbox.netbox_export_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
content_types:
- - "virtualization.virtualmachine"
+ - virtualization.virtualmachine
name: Example Export Template
description: Export Devices
template_code: !unsafe >-
@@ -110,7 +110,7 @@
register: test_five
- name: "EXPORT_TEMPLATE 5: ASSERT - Deleted"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['after']['state'] == "absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_fhrp_group.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_fhrp_group.yml
index 91788ef02..eed3f2f88 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_fhrp_group.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_fhrp_group.yml
@@ -7,9 +7,9 @@
- name: "FHRP group 1: Test FHRP group creation"
netbox.netbox.netbox_fhrp_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- protocol: "glbp"
+ protocol: glbp
group_id: 111
state: present
register: test_one
@@ -27,9 +27,9 @@
- name: "FHRP group 2: Create duplicate"
netbox.netbox.netbox_fhrp_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- protocol: "glbp"
+ protocol: glbp
group_id: 111
state: present
register: test_two
@@ -45,15 +45,15 @@
- name: "FHRP group 3: Update FHRP group with other fields"
netbox.netbox.netbox_fhrp_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- protocol: "glbp"
+ protocol: glbp
group_id: 111
auth_type: md5
auth_key: 11111
description: Test description
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: test_three
@@ -76,7 +76,7 @@
- name: "FHRP group 4: ASSERT - Delete"
netbox.netbox.netbox_fhrp_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
group_id: 111
state: absent
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_fhrp_group_assignment.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_fhrp_group_assignment.yml
index 27f20c293..2dd2f6d61 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_fhrp_group_assignment.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_fhrp_group_assignment.yml
@@ -7,7 +7,7 @@
- name: "FHRP group assignment 1: Test FHRP group assignment creation"
netbox.netbox.netbox_fhrp_group_assignment:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
fhrp_group: 1
interface_type: dcim.interface
@@ -31,7 +31,7 @@
- name: "FHRP group assignment 2: Create duplicate"
netbox.netbox.netbox_fhrp_group_assignment:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
fhrp_group: 1
interface_type: dcim.interface
@@ -53,7 +53,7 @@
- name: "FHRP group assignment 3: Update FHRP group assignment"
netbox.netbox.netbox_fhrp_group_assignment:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
fhrp_group: 1
interface_type: dcim.interface
@@ -75,7 +75,7 @@
- name: "FHRP group assignment 4: Delete FHRP group assignment"
netbox.netbox.netbox_fhrp_group_assignment:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
fhrp_group: 1
interface_type: dcim.interface
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_front_port.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_front_port.yml
index 1f4c60a4d..7088bbeef 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_front_port.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_front_port.yml
@@ -10,7 +10,7 @@
- name: "FRONT_PORT 1: Necessary info creation"
netbox.netbox.netbox_front_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Front Port
device: test100
@@ -20,7 +20,7 @@
register: test_one
- name: "FRONT_PORT 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -34,7 +34,7 @@
- name: "FRONT_PORT 2: Create duplicate"
netbox.netbox.netbox_front_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Front Port
device: test100
@@ -44,7 +44,7 @@
register: test_two
- name: "FRONT_PORT 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['front_port']['name'] == "Front Port"
@@ -56,7 +56,7 @@
- name: "FRONT_PORT 3: Update Front Port with other fields"
netbox.netbox.netbox_front_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Front Port
device: test100
@@ -68,7 +68,7 @@
register: test_three
- name: "FRONT_PORT 3: ASSERT - Update Front Port with other fields"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['rear_port_position'] == 5
@@ -84,7 +84,7 @@
- name: "FRONT_PORT 4: Create Front Port for Delete Test"
netbox.netbox.netbox_front_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Front Port 2
device: test100
@@ -94,7 +94,7 @@
register: test_four
- name: "FRONT_PORT 4: ASSERT - Create Front Port for Delete Test"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -108,7 +108,7 @@
- name: "FRONT_PORT 5: Delete Front Port"
netbox.netbox.netbox_front_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Front Port 2
device: test100
@@ -118,7 +118,7 @@
register: test_five
- name: "FRONT_PORT 5: ASSERT - Delete Front Port"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "present"
@@ -128,7 +128,7 @@
- name: "FRONT_PORT 6: Create duplicate with rear_port dictionary"
netbox.netbox.netbox_front_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Front Port
device: test100
@@ -140,7 +140,7 @@
register: test_six
- name: "FRONT_PORT 6: ASSERT - Create duplicate with rear_port dictionary"
- assert:
+ ansible.builtin.assert:
that:
- not test_six['changed']
- test_six['front_port']['name'] == "Front Port"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_front_port_template.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_front_port_template.yml
index 30e907100..966279fa8 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_front_port_template.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_front_port_template.yml
@@ -10,7 +10,7 @@
- name: "FRONT_PORT_TEMPLATE 1: Necessary info creation"
netbox.netbox.netbox_front_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Front Port Template
device_type: Cisco Test
@@ -20,7 +20,7 @@
register: test_one
- name: "FRONT_PORT_TEMPLATE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -34,7 +34,7 @@
- name: "FRONT_PORT_TEMPLATE 2: Create duplicate"
netbox.netbox.netbox_front_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Front Port Template
device_type: Cisco Test
@@ -44,7 +44,7 @@
register: test_two
- name: "FRONT_PORT_TEMPLATE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['front_port_template']['name'] == "Front Port Template"
@@ -56,7 +56,7 @@
- name: "FRONT_PORT_TEMPLATE 3: Update Front Port Template with other fields"
netbox.netbox.netbox_front_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Front Port Template
device_type: Cisco Test
@@ -67,7 +67,7 @@
register: test_three
- name: "FRONT_PORT_TEMPLATE 3: ASSERT - Update Front Port Template with other fields"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['rear_port_position'] == 5
@@ -81,7 +81,7 @@
- name: "FRONT_PORT_TEMPLATE 4: Create Front Port Template for Delete Test"
netbox.netbox.netbox_front_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Front Port Template 2
device_type: Cisco Test
@@ -91,7 +91,7 @@
register: test_four
- name: "FRONT_PORT_TEMPLATE 4: ASSERT - Create Front Port Template for Delete Test"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -105,7 +105,7 @@
- name: "FRONT_PORT_TEMPLATE 5: Delete Front Port Template"
netbox.netbox.netbox_front_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Front Port Template 2
device_type: Cisco Test
@@ -115,7 +115,7 @@
register: test_five
- name: "FRONT_PORT_TEMPLATE 5: ASSERT - Delete Front Port Template"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "present"
@@ -125,7 +125,7 @@
- name: "FRONT_PORT 6: Create duplicate with rear_port_template dictionary"
netbox.netbox.netbox_front_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Front Port Template
device_type: Cisco Test
@@ -137,7 +137,7 @@
register: test_six
- name: "FRONT_PORT 6: ASSERT - Create duplicate with rear_port_template dictionary"
- assert:
+ ansible.builtin.assert:
that:
- not test_six['changed']
- test_six['front_port_template']['name'] == "Front Port Template"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_inventory_item.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_inventory_item.yml
index cc038ac94..5b330c3a5 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_inventory_item.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_inventory_item.yml
@@ -7,10 +7,10 @@
- name: "INVENTORY_ITEM 1: Necessary info creation"
netbox.netbox.netbox_inventory_item:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- device: "test100"
- name: "10G-SFP+"
+ device: test100
+ name: 10G-SFP+
state: present
register: test_one
@@ -27,10 +27,10 @@
- name: "INVENTORY_ITEM 2: Create duplicate"
netbox.netbox.netbox_inventory_item:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- device: "test100"
- name: "10G-SFP+"
+ device: test100
+ name: 10G-SFP+
state: present
register: test_two
@@ -45,18 +45,18 @@
- name: "INVENTORY_ITEM 3: Update properties"
netbox.netbox.netbox_inventory_item:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- device: "test100"
- name: "10G-SFP+"
- manufacturer: "Cisco"
- part_id: "10G-SFP+"
+ device: test100
+ name: 10G-SFP+
+ manufacturer: Cisco
+ part_id: 10G-SFP+
serial: "1234"
asset_tag: "1234"
- description: "New SFP"
- discovered: True
+ description: New SFP
+ discovered: true
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: test_three
@@ -85,10 +85,10 @@
- name: "INVENTORY_ITEM 4: Delete inventory item"
netbox.netbox.netbox_inventory_item:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- device: "test100"
- name: "10G-SFP+"
+ device: test100
+ name: 10G-SFP+
state: absent
register: test_four
@@ -109,7 +109,7 @@
- name: "INVENTORY_ITEM 5: PREWORK - Create inventory item role"
netbox.netbox.netbox_inventory_item_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Processor
color: FFFFFF
@@ -119,7 +119,7 @@
- name: "INVENTORY_ITEM 5: Create inventory item with role"
netbox.netbox.netbox_inventory_item:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: test100
name: test_processor
@@ -141,7 +141,7 @@
- name: "INVENTORY_ITEM 6: Create inventory item with missing role"
netbox.netbox.netbox_inventory_item:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: test100
name: test_processor
@@ -159,14 +159,14 @@
- name: "INVENTORY_ITEM 7: Create inventory item with component"
netbox.netbox.netbox_inventory_item:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: test100
name: test_component
- component_type: "dcim.interface"
+ component_type: dcim.interface
component:
name: GigabitEthernet2
- device: "test100"
+ device: test100
state: present
register: test_seven
@@ -185,13 +185,13 @@
- name: "INVENTORY_ITEM 8: Create inventory item with missing component_type"
netbox.netbox.netbox_inventory_item:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: test100
name: test_component
component:
name: GigabitEthernet2
- device: "test100"
+ device: test100
state: present
ignore_errors: true
register: test_eight
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_inventory_item_role.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_inventory_item_role.yml
index 1c2de3f03..71d86d747 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_inventory_item_role.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_inventory_item_role.yml
@@ -7,15 +7,15 @@
- name: "INVENTORY_ITEM_ROLE 1: Necessary info creation"
netbox.netbox.netbox_inventory_item_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Inventory Item Role"
- color: "FFFFFF"
+ name: Test Inventory Item Role
+ color: FFFFFF
state: present
register: test_one
- name: "INVENTORY_ITEM_ROLE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -28,15 +28,15 @@
- name: "INVENTORY_ITEM_ROLE 2: Create duplicate"
netbox.netbox.netbox_inventory_item_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Inventory Item Role"
- color: "FFFFFF"
+ name: Test Inventory Item Role
+ color: FFFFFF
state: present
register: test_two
- name: "INVENTORY_ITEM_ROLE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['inventory_item_role']['name'] == "Test Inventory Item Role"
@@ -47,15 +47,15 @@
- name: "INVENTORY_ITEM_ROLE 3: ASSERT - Update"
netbox.netbox.netbox_inventory_item_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Inventory Item Role"
+ name: Test Inventory Item Role
color: "003EFF"
state: present
register: test_three
- name: "INVENTORY_ITEM_ROLE 3: ASSERT - Update"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['color'] == "003eff"
@@ -67,14 +67,14 @@
- name: "INVENTORY_ITEM_ROLE 4: ASSERT - Delete"
netbox.netbox.netbox_inventory_item_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test Inventory Item Role
state: absent
register: test_four
- name: "INVENTORY_ITEM_ROLE 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
@@ -84,14 +84,14 @@
- name: "INVENTORY_ITEM_ROLE 5: ASSERT - Delete non existing"
netbox.netbox.netbox_inventory_item_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test Inventory Item Role
state: absent
register: test_five
- name: "INVENTORY_ITEM_ROLE 5: ASSERT - Delete non existing`"
- assert:
+ ansible.builtin.assert:
that:
- not test_five['changed']
- test_five['inventory_item_role'] == None
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_ip_address.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_ip_address.yml
index 3cda56d3c..ce2b125a4 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_ip_address.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_ip_address.yml
@@ -7,14 +7,14 @@
- name: "1 - Create IP address within NetBox with only required information - State: Present"
netbox.netbox.netbox_ip_address:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
address: 192.168.1.10/30
state: present
register: test_one
-- name: "1 - ASSERT"
- assert:
+- name: 1 - ASSERT
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -22,20 +22,20 @@
- test_one['msg'] == "ip_address 192.168.1.10/30 created"
- test_one['ip_address']['address'] == "192.168.1.10/30"
-- name: "2 - Update 192.168.1.10/30"
+- name: 2 - Update 192.168.1.10/30
netbox.netbox.netbox_ip_address:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
address: 192.168.1.10/30
- description: "Updated ip address"
+ description: Updated ip address
tags:
- - "Updated"
+ - Updated
state: present
register: test_two
-- name: "2 - ASSERT"
- assert:
+- name: 2 - ASSERT
+ ansible.builtin.assert:
that:
- test_two is changed
- test_two['diff']['after']['description'] == "Updated ip address"
@@ -48,14 +48,14 @@
- name: "3 - Delete IP - 192.168.1.10 - State: Absent"
netbox.netbox.netbox_ip_address:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
address: 192.168.1.10/30
state: absent
register: test_three
-- name: "3 - ASSERT"
- assert:
+- name: 3 - ASSERT
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['before']['state'] == "present"
@@ -65,14 +65,14 @@
- name: "4 - Create IP in global VRF - 192.168.1.20/30 - State: Present"
netbox.netbox.netbox_ip_address:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
address: 192.168.1.20/30
state: present
register: test_four
-- name: "4 - ASSERT"
- assert:
+- name: 4 - ASSERT
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -83,14 +83,14 @@
- name: "5 - Create IP in global VRF - 192.168.1.20/30 - State: New"
netbox.netbox.netbox_ip_address:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
address: 192.168.1.20/30
state: new
register: test_five
-- name: "5 - ASSERT"
- assert:
+- name: 5 - ASSERT
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "absent"
@@ -101,14 +101,14 @@
- name: "6 - Create new address with only prefix specified - State: new"
netbox.netbox.netbox_ip_address:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
prefix: 192.168.100.0/24
state: new
register: test_six
-- name: "6 - ASSERT"
- assert:
+- name: 6 - ASSERT
+ ansible.builtin.assert:
that:
- test_six is changed
- test_six['diff']['before']['state'] == "absent"
@@ -116,10 +116,10 @@
- test_six['msg'] == "ip_address 192.168.100.1/24 created"
- test_six['ip_address']['address'] == "192.168.100.1/24"
-- name: "7 - Create IP address with several specified"
+- name: 7 - Create IP address with several specified
netbox.netbox.netbox_ip_address:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
family: 4
address: 172.16.1.20/24
@@ -129,12 +129,12 @@
role: Loopback
description: Test description
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: test_seven
-- name: "7 - ASSERT"
- assert:
+- name: 7 - ASSERT
+ ansible.builtin.assert:
that:
- test_seven is changed
- test_seven['diff']['before']['state'] == "absent"
@@ -149,10 +149,10 @@
- test_seven['ip_address']['tenant'] == 1
- test_seven['ip_address']['vrf'] == 1
-- name: "8 - Create IP address and assign a nat_inside IP"
+- name: 8 - Create IP address and assign a nat_inside IP
netbox.netbox.netbox_ip_address:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
family: 4
address: 10.10.1.30/16
@@ -162,8 +162,8 @@
vrf: Test VRF
register: test_eight
-- name: "8 - ASSERT"
- assert:
+- name: 8 - ASSERT
+ ansible.builtin.assert:
that:
- test_eight is changed
- test_eight['diff']['before']['state'] == "absent"
@@ -177,7 +177,7 @@
- name: "9 - Create IP address on GigabitEthernet2 - test100 - State: present"
netbox.netbox.netbox_ip_address:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
family: 4
address: 10.10.200.30/16
@@ -186,8 +186,8 @@
device: test100
register: test_nine
-- name: "9 - ASSERT"
- assert:
+- name: 9 - ASSERT
+ ansible.builtin.assert:
that:
- test_nine is changed
- test_nine['diff']['before']['state'] == "absent"
@@ -201,7 +201,7 @@
- name: "10 - Create IP address on GigabitEthernet2 - test100 - State: new"
netbox.netbox.netbox_ip_address:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
family: 4
prefix: 10.10.0.0/16
@@ -211,8 +211,8 @@
state: new
register: test_ten
-- name: "10 - ASSERT"
- assert:
+- name: 10 - ASSERT
+ ansible.builtin.assert:
that:
- test_ten is changed
- test_ten['diff']['before']['state'] == "absent"
@@ -226,7 +226,7 @@
- name: "11 - Create IP address on GigabitEthernet2 - test100 - State: present"
netbox.netbox.netbox_ip_address:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
family: 4
prefix: 192.168.100.0/24
@@ -236,8 +236,8 @@
state: present
register: test_eleven
-- name: "11 - ASSERT"
- assert:
+- name: 11 - ASSERT
+ ansible.builtin.assert:
that:
- test_eleven is changed
- test_eleven['diff']['before']['state'] == "absent"
@@ -245,10 +245,10 @@
- test_eleven['msg'] == "ip_address 192.168.100.2/24 created"
- test_eleven['ip_address']['address'] == "192.168.100.2/24"
-- name: "12 - Duplicate - 192.168.100.2/24 on interface"
+- name: 12 - Duplicate - 192.168.100.2/24 on interface
netbox.netbox.netbox_ip_address:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
address: 192.168.100.2/24
assigned_object:
@@ -257,8 +257,8 @@
state: present
register: test_twelve
-- name: "12 - ASSERT"
- assert:
+- name: 12 - ASSERT
+ ansible.builtin.assert:
that:
- not test_twelve['changed']
- test_twelve['msg'] == "ip_address 192.168.100.2/24 already exists"
@@ -266,17 +266,17 @@
- test_twelve['ip_address']['assigned_object_type'] == "dcim.interface"
- test_twelve['ip_address']['assigned_object_id'] == 4
-- name: "13 - Duplicate - 192.168.100.2/24"
+- name: 13 - Duplicate - 192.168.100.2/24
netbox.netbox.netbox_ip_address:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
address: 192.168.100.2/24
state: present
register: test_thirteen
-- name: "13 - ASSERT"
- assert:
+- name: 13 - ASSERT
+ ansible.builtin.assert:
that:
- not test_thirteen['changed']
- test_thirteen['msg'] == "ip_address 192.168.100.2/24 already exists"
@@ -285,7 +285,7 @@
- name: "14 - Create IP address on Eth0 - test100-vm - State: present"
netbox.netbox.netbox_ip_address:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
family: 4
address: 10.188.1.100/24
@@ -294,8 +294,8 @@
virtual_machine: test100-vm
register: test_fourteen
-- name: "14 - ASSERT"
- assert:
+- name: 14 - ASSERT
+ ansible.builtin.assert:
that:
- test_fourteen is changed
- test_fourteen['diff']['before']['state'] == "absent"
@@ -309,18 +309,18 @@
- name: "15 - Create same IP address on Eth0 - test101-vm - State: present"
netbox.netbox.netbox_ip_address:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
family: 4
address: 10.188.1.100/24
assigned_object:
name: Eth0
virtual_machine: test101-vm
- state: "present"
+ state: present
register: test_fifteen
-- name: "15 - ASSERT"
- assert:
+- name: 15 - ASSERT
+ ansible.builtin.assert:
that:
- test_fifteen is changed
- test_fifteen['diff']['before']['state'] == "absent"
@@ -334,14 +334,14 @@
- name: "16 - Create IP address with no mask - State: Present"
netbox.netbox.netbox_ip_address:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
address: 10.120.10.1
state: present
register: test_sixteen
-- name: "16 - ASSERT"
- assert:
+- name: 16 - ASSERT
+ ansible.builtin.assert:
that:
- test_sixteen is changed
- test_sixteen['diff']['before']['state'] == "absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_ipam_role.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_ipam_role.yml
index 1f1ff0f36..9f376c2ed 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_ipam_role.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_ipam_role.yml
@@ -7,14 +7,14 @@
- name: "IPAM_ROLE 1: Necessary info creation"
netbox.netbox.netbox_ipam_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test IPAM Role"
+ name: Test IPAM Role
state: present
register: test_one
- name: "IPAM_ROLE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -26,14 +26,14 @@
- name: "IPAM_ROLE 2: Create duplicate"
netbox.netbox.netbox_ipam_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test IPAM Role"
+ name: Test IPAM Role
state: present
register: test_two
- name: "IPAM_ROLE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['role']['name'] == "Test IPAM Role"
@@ -43,15 +43,15 @@
- name: "IPAM_ROLE 3: ASSERT - Update"
netbox.netbox.netbox_ipam_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test IPAM Role"
+ name: Test IPAM Role
weight: 4096
state: present
register: test_three
- name: "IPAM_ROLE 3: ASSERT - Update"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['weight'] == 4096
@@ -63,14 +63,14 @@
- name: "IPAM_ROLE 4: ASSERT - Delete"
netbox.netbox.netbox_ipam_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test IPAM Role
state: absent
register: test_four
- name: "IPAM_ROLE 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
@@ -80,14 +80,14 @@
- name: "IPAM_ROLE 5: ASSERT - Delete non existing"
netbox.netbox.netbox_ipam_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test IPAM Role
state: absent
register: test_five
- name: "IPAM_ROLE 5: ASSERT - Delete non existing`"
- assert:
+ ansible.builtin.assert:
that:
- not test_five['changed']
- test_five['role'] == None
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_journal_entry.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_journal_entry.yml
index d65e64cb8..7eead8db1 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_journal_entry.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_journal_entry.yml
@@ -7,7 +7,7 @@
- name: "JOURNAL ENTRY 1: Creation"
netbox.netbox.netbox_journal_entry:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
assigned_object_type: dcim.device
assigned_object_id: 1
@@ -17,7 +17,7 @@
register: test_one
- name: "JOURNAL_ENTRY 1: ASSERT - Creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_l2vpn.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_l2vpn.yml
index 292badb93..655be9455 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_l2vpn.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_l2vpn.yml
@@ -7,7 +7,7 @@
- name: "L2VPN 1: Necessary info creation"
netbox.netbox.netbox_l2vpn:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test L2VPN
type: vxlan
@@ -15,7 +15,7 @@
register: test_one
- name: "L2VPN 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -27,7 +27,7 @@
- name: "L2VPN 2: Create duplicate"
netbox.netbox.netbox_l2vpn:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test L2VPN
type: vxlan
@@ -35,7 +35,7 @@
register: test_two
- name: "L2VPN 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['l2vpn']['name'] == "Test L2VPN"
@@ -45,24 +45,24 @@
- name: "L2VPN 4: ASSERT - Update"
netbox.netbox.netbox_l2vpn:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test L2VPN"
+ name: Test L2VPN
type: vxlan
- tenant: "Test Tenant"
+ tenant: Test Tenant
description: Updated description
import_targets:
- - "4000:4000"
- - "5000:5000"
+ - 4000:4000
+ - 5000:5000
export_targets:
- - "6000:6000"
+ - 6000:6000
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: test_four
- name: "L2VPN: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['after']['description'] == "Updated description"
@@ -80,15 +80,15 @@
- name: "L2VPN: ASSERT - Delete"
netbox.netbox.netbox_l2vpn:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test L2VPN"
+ name: Test L2VPN
type: vxlan
state: absent
register: test_six
- name: "L2VPN 6: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_six is changed
- test_six['l2vpn']['name'] == "Test L2VPN"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_l2vpn_termination.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_l2vpn_termination.yml
index a4560f407..0dd539943 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_l2vpn_termination.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_l2vpn_termination.yml
@@ -7,7 +7,7 @@
- name: "L2VPN_TERMINATION 1: Necessary info creation"
netbox.netbox.netbox_l2vpn_termination:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
l2vpn: 1
assigned_object_type: dcim.interface
@@ -29,7 +29,7 @@
- name: "L2VPN_TERMINATION 2: Create duplicate"
netbox.netbox.netbox_l2vpn_termination:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
l2vpn: 1
assigned_object_type: dcim.interface
@@ -49,13 +49,13 @@
- name: "L2VPN_TERMINATION 3: Update"
netbox.netbox.netbox_l2vpn_termination:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
l2vpn: 1
assigned_object_type: dcim.interface
assigned_object_id: 1
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: test_three
@@ -73,7 +73,7 @@
- name: "L2VPN_TERMINATION 4: Delete"
netbox.netbox.netbox_l2vpn_termination:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
l2vpn: 1
assigned_object_type: dcim.interface
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_location.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_location.yml
index f2f7bb9db..2e37c7682 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_location.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_location.yml
@@ -7,7 +7,7 @@
- name: "LOCATION 1: Necessary info creation"
netbox.netbox.netbox_location:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Location
site: Test Site
@@ -15,7 +15,7 @@
register: test_one
- name: "LOCATION 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -28,7 +28,7 @@
- name: "LOCATION 2: Create duplicate"
netbox.netbox.netbox_location:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Location
site: Test Site
@@ -36,7 +36,7 @@
register: test_two
- name: "LOCATION 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['location']['name'] == "Location"
@@ -47,7 +47,7 @@
- name: "LOCATION 3: Update"
netbox.netbox.netbox_location:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Location
parent_location: Parent Rack Group
@@ -56,7 +56,7 @@
register: test_three
- name: "LOCATION 3: ASSERT - Update"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['parent'] == 2
@@ -70,14 +70,14 @@
- name: "LOCATION 4: Delete"
netbox.netbox.netbox_location:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Location
state: absent
register: test_four
- name: "LOCATION 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_lookup.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_lookup.yml
index 277454fcd..3d54e02b1 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_lookup.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_lookup.yml
@@ -5,79 +5,86 @@
##
##
- name: "NETBOX_LOOKUP 1: Lookup returns exactly two sites"
- assert:
+ ansible.builtin.assert:
that: query_result == "3"
vars:
- query_result: "{{ query('netbox.netbox.nb_lookup', 'sites', api_endpoint='http://localhost:32768', token='0123456789abcdef0123456789abcdef01234567')|count }}"
+ query_result: "{{ query('netbox.netbox.nb_lookup', 'sites', api_endpoint='http://localhost:32768', token='0123456789abcdef0123456789abcdef01234567') | count }}"
- name: "NETBOX_LOOKUP 2: Query doesn't return Wibble (sanity check json_query)"
- assert:
+ ansible.builtin.assert:
that: query_result == "0"
vars:
- query_result: "{{ query('netbox.netbox.nb_lookup', 'devices', api_endpoint='http://localhost:32768', token='0123456789abcdef0123456789abcdef01234567')|community.general.json_query('[?value.display==`Wibble`]')|count}}"
+ query_result: "{{ query('netbox.netbox.nb_lookup', 'devices', api_endpoint='http://localhost:32768', token='0123456789abcdef0123456789abcdef01234567') | community.general.json_query('[?value.display==`Wibble`]')
+ | count }}"
- name: "NETBOX_LOOKUP 3: Device query returns exactly one TestDeviceR1"
- assert:
+ ansible.builtin.assert:
that: query_result == "1"
vars:
- query_result: "{{ query('netbox.netbox.nb_lookup', 'devices', api_endpoint='http://localhost:32768', token='0123456789abcdef0123456789abcdef01234567')|community.general.json_query('[?value.display==`TestDeviceR1`]')|count }}"
+ query_result: "{{ query('netbox.netbox.nb_lookup', 'devices', api_endpoint='http://localhost:32768', token='0123456789abcdef0123456789abcdef01234567') | community.general.json_query('[?value.display==`TestDeviceR1`]')
+ | count }}"
- name: "NETBOX_LOOKUP 4: VLAN ID 400 can be queried and is named 'Test VLAN'"
- assert:
+ ansible.builtin.assert:
that: query_result == 'Test VLAN'
vars:
- query_result: "{{ (query('netbox.netbox.nb_lookup', 'vlans', api_endpoint='http://localhost:32768', token='0123456789abcdef0123456789abcdef01234567')|community.general.json_query('[?value.vid==`400`].value.name'))[0] }}"
+ query_result: "{{ (query('netbox.netbox.nb_lookup', 'vlans', api_endpoint='http://localhost:32768', token='0123456789abcdef0123456789abcdef01234567') | community.general.json_query('[?value.vid==`400`].value.name'))[0]
+ }}"
- name: "NETBOX_LOOKUP 5: Add one of two devices for lookup filter test."
netbox.netbox.netbox_device:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "L1"
- device_type: "Cisco Test"
- device_role: "Core Switch"
- site: "Test Site"
- status: "Staged"
+ name: L1
+ device_type: Cisco Test
+ device_role: Core Switch
+ site: Test Site
+ status: Staged
tags:
- - "nolookup"
+ - nolookup
state: present
- name: "NETBOX_LOOKUP 6: Add two of two devices for lookup filter test."
netbox.netbox.netbox_device:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "L2"
- device_type: "Cisco Test"
- device_role: "Core Switch"
- site: "Test Site2"
- status: "Staged"
+ name: L2
+ device_type: Cisco Test
+ device_role: Core Switch
+ site: Test Site2
+ status: Staged
tags:
- - "lookup"
+ - lookup
state: present
- name: "NETBOX_LOOKUP 7: Device query returns exactly the L2 device"
- assert:
+ ansible.builtin.assert:
that: query_result == "1"
vars:
- query_result: "{{ query('netbox.netbox.nb_lookup', 'devices', api_filter='role=core-switch tag=lookup', api_endpoint='http://localhost:32768', token='0123456789abcdef0123456789abcdef01234567')|community.general.json_query('[?value.display==`L2`]')|count }}"
+ query_result: "{{ query('netbox.netbox.nb_lookup', 'devices', api_filter='role=core-switch tag=lookup', api_endpoint='http://localhost:32768', token='0123456789abcdef0123456789abcdef01234567')
+ | community.general.json_query('[?value.display==`L2`]') | count }}"
- name: "NETBOX_LOOKUP 8: Device query specifying raw data returns payload without key/value dict"
- assert:
+ ansible.builtin.assert:
that: query_result == "1"
vars:
- query_result: "{{ query('netbox.netbox.nb_lookup', 'devices', api_filter='role=core-switch tag=lookup', api_endpoint='http://localhost:32768', token='0123456789abcdef0123456789abcdef01234567', raw_data=True)|community.general.json_query('[?display==`L2`]')|count }}"
+ query_result: "{{ query('netbox.netbox.nb_lookup', 'devices', api_filter='role=core-switch tag=lookup', api_endpoint='http://localhost:32768', token='0123456789abcdef0123456789abcdef01234567',
+ raw_data=True) | community.general.json_query('[?display==`L2`]') | count }}"
- name: "NETBOX_LOOKUP 9: Device query specifying multiple sites, Make sure L1 and L2 are in the results"
- assert:
+ ansible.builtin.assert:
that:
- "'L1' in query_result"
- "'L2' in query_result"
vars:
- query_result: "{{ query('netbox.netbox.nb_lookup', 'devices', api_filter='role=core-switch site=test-site site=test-site2', api_endpoint='http://localhost:32768', token='0123456789abcdef0123456789abcdef01234567', raw_data=True)|community.general.json_query('[*].display') }}"
+ query_result: "{{ query('netbox.netbox.nb_lookup', 'devices', api_filter='role=core-switch site=test-site site=test-site2', api_endpoint='http://localhost:32768',
+ token='0123456789abcdef0123456789abcdef01234567', raw_data=True) | community.general.json_query('[*].display') }}"
- name: "NETBOX_LOOKUP 10: Device query by ID"
- assert:
+ ansible.builtin.assert:
that: query_result
vars:
- query_result: "{{ query('netbox.netbox.nb_lookup', 'devices', api_filter='id=1', api_endpoint='http://localhost:32768', token='0123456789abcdef0123456789abcdef01234567') }}"
+ query_result: "{{ query('netbox.netbox.nb_lookup', 'devices', api_filter='id=1', api_endpoint='http://localhost:32768', token='0123456789abcdef0123456789abcdef01234567')
+ }}"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_manufacturer.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_manufacturer.yml
index 4dd1907eb..39804659b 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_manufacturer.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_manufacturer.yml
@@ -7,14 +7,14 @@
- name: "MANUFACTURER 1: Necessary info creation"
netbox.netbox.netbox_manufacturer:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test Manufacturer Two
state: present
register: test_one
- name: "MANUFACTURER 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -26,14 +26,14 @@
- name: "MANUFACTURER 2: Create duplicate"
netbox.netbox.netbox_manufacturer:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test Manufacturer Two
state: present
register: test_two
- name: "MANUFACTURER 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['manufacturer']['name'] == "Test Manufacturer Two"
@@ -43,14 +43,14 @@
- name: "MANUFACTURER 3: Update"
netbox.netbox.netbox_manufacturer:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: test manufacturer two
state: present
register: test_three
- name: "MANUFACTURER 3: ASSERT - Update"
- assert:
+ ansible.builtin.assert:
that:
- test_three['changed']
- test_three['manufacturer']['name'] == "test manufacturer two"
@@ -60,14 +60,14 @@
- name: "MANUFACTURER 4: ASSERT - Delete"
netbox.netbox.netbox_manufacturer:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: test manufacturer two
state: absent
register: test_four
- name: "MANUFACTURER 3: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
@@ -77,14 +77,14 @@
- name: "MANUFACTURER 5: ASSERT - Delete non existing"
netbox.netbox.netbox_manufacturer:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test Manufacturer Two
state: absent
register: test_five
- name: "MANUFACTURER 5: ASSERT - Delete non existing"
- assert:
+ ansible.builtin.assert:
that:
- not test_five['changed']
- test_five['manufacturer'] == None
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_module.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_module.yml
index e8f090d8c..0a18f3894 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_module.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_module.yml
@@ -7,7 +7,7 @@
- name: "MODULE 1: Necessary info creation"
netbox.netbox.netbox_module:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: C9300-DEMO
module_bay: Network Module
@@ -16,7 +16,7 @@
register: test_one
- name: "MODULE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -29,7 +29,7 @@
- name: "MODULE 2: Create duplicate"
netbox.netbox.netbox_module:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: C9300-DEMO
module_bay: Network Module
@@ -38,7 +38,7 @@
register: test_two
- name: "MODULE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['module']['device'] == "C9300-DEMO"
@@ -49,7 +49,7 @@
- name: "MODULE 3: ASSERT - Update"
netbox.netbox.netbox_module:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: C9300-DEMO
module_bay: Network Module
@@ -59,7 +59,7 @@
register: test_three
- name: "MODULE 3: ASSERT - Update"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['serial'] == "XXXNNNNXXXX"
@@ -72,7 +72,7 @@
- name: "MODULE 4: ASSERT - Delete"
netbox.netbox.netbox_module:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: C9300-DEMO
module_bay: Network Module
@@ -81,7 +81,7 @@
register: test_four
- name: "MODULE 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
@@ -91,7 +91,7 @@
- name: "MODULE 5: ASSERT - Delete non existing"
netbox.netbox.netbox_module:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: C9300-DEMO
module_bay: Network Module
@@ -100,7 +100,7 @@
register: test_five
- name: "MODULE 5: ASSERT - Delete non existing`"
- assert:
+ ansible.builtin.assert:
that:
- not test_five['changed']
- test_five['module'] == None
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_module_bay.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_module_bay.yml
index 44bb76269..843a4767e 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_module_bay.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_module_bay.yml
@@ -7,7 +7,7 @@
- name: "MODULE 1: Necessary info creation"
netbox.netbox.netbox_module_bay:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: C9300-DEMO
name: Network Module
@@ -16,7 +16,7 @@
register: test_one
- name: "MODULE BAY 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -29,7 +29,7 @@
- name: "MODULE BAY 2: Create duplicate"
netbox.netbox.netbox_module_bay:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: C9300-DEMO
name: Network Module
@@ -38,7 +38,7 @@
register: test_two
- name: "MODULE BAY 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['module_bay']['device'] == "C9300-DEMO"
@@ -49,7 +49,7 @@
- name: "MODULE BAY 3: ASSERT - Update"
netbox.netbox.netbox_module_bay:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: C9300-DEMO
name: Network Module
@@ -59,7 +59,7 @@
register: test_three
- name: "MODULE BAY 3: ASSERT - Update"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['label'] == "TEST"
@@ -72,7 +72,7 @@
- name: "MODULE BAY 4: ASSERT - Delete"
netbox.netbox.netbox_module_bay:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: C9300-DEMO
name: Network Module
@@ -80,7 +80,7 @@
register: test_four
- name: "MODULE BAY 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
@@ -90,7 +90,7 @@
- name: "MODULE BAY 5: ASSERT - Delete non existing"
netbox.netbox.netbox_module_bay:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: C9300-DEMO
name: Network Module
@@ -98,7 +98,7 @@
register: test_five
- name: "MODULE BAY 5: ASSERT - Delete non existing`"
- assert:
+ ansible.builtin.assert:
that:
- not test_five['changed']
- test_five['module_bay'] == None
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_module_type.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_module_type.yml
index e1d51c4cb..8ab077803 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_module_type.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_module_type.yml
@@ -7,7 +7,7 @@
- name: "MODULE_TYPE 1: Necessary info creation"
netbox.netbox.netbox_module_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
model: ws-test-3750
manufacturer: Test Manufacturer
@@ -15,7 +15,7 @@
register: test_one
- name: "MODULE_TYPE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -27,15 +27,15 @@
- name: "MODULE_TYPE 2: Create duplicate"
netbox.netbox.netbox_module_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- model: "ws-test-3750"
+ model: ws-test-3750
manufacturer: Test Manufacturer
state: present
register: test_two
- name: "MODULE_TYPE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_one['module_type']['model'] == "ws-test-3750"
@@ -45,7 +45,7 @@
- name: "MODULE_TYPE 3: ASSERT - Update"
netbox.netbox.netbox_module_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
model: ws-test-3750
manufacturer: Test Manufacturer
@@ -54,7 +54,7 @@
register: test_three
- name: "MODULE_TYPE 3: ASSERT - Update"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['part_number'] == "ws-3750g-v2"
@@ -66,14 +66,14 @@
- name: "MODULE_TYPE 4: ASSERT - Delete"
netbox.netbox.netbox_module_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
model: ws-test-3750
state: absent
register: test_four
- name: "MODULE_TYPE 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
@@ -83,14 +83,14 @@
- name: "MODULE_TYPE 5: ASSERT - Delete non existing"
netbox.netbox.netbox_module_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- model: "Test Module Type"
+ model: Test Module Type
state: absent
register: test_five
- name: "MODULE_TYPE 5: ASSERT - Delete non existing`"
- assert:
+ ansible.builtin.assert:
that:
- not test_five['changed']
- test_five['module_type'] == None
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_platform.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_platform.yml
index 1e3621add..8a36ef2c1 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_platform.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_platform.yml
@@ -7,14 +7,14 @@
- name: "PLATFORM 1: Necessary info creation"
netbox.netbox.netbox_platform:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test Platform
state: present
register: test_one
- name: "PLATFORM 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -26,14 +26,14 @@
- name: "PLATFORM 2: Create duplicate"
netbox.netbox.netbox_platform:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test Platform
state: present
register: test_two
- name: "PLATFORM 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['platform']['name'] == "Test Platform"
@@ -43,7 +43,7 @@
- name: "PLATFORM 3: ASSERT - Update"
netbox.netbox.netbox_platform:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test Platform
manufacturer: Test Manufacturer
@@ -51,7 +51,7 @@
register: test_three
- name: "PLATFORM 3: ASSERT - Update"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['manufacturer'] == 3
@@ -61,14 +61,14 @@
- name: "PLATFORM 4: ASSERT - Delete"
netbox.netbox.netbox_platform:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test Platform
state: absent
register: test_four
- name: "PLATFORM 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
@@ -78,14 +78,14 @@
- name: "PLATFORM 5: ASSERT - Delete non existing"
netbox.netbox.netbox_platform:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test Platform
state: absent
register: test_five
- name: "PLATFORM 5: ASSERT - Delete non existing`"
- assert:
+ ansible.builtin.assert:
that:
- not test_five['changed']
- test_five['platform'] == None
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_power_feed.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_power_feed.yml
index a02fd9129..f4ac71e01 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_power_feed.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_power_feed.yml
@@ -10,7 +10,7 @@
- name: "POWER_FEED 1: Necessary info creation"
netbox.netbox.netbox_power_feed:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Feed
power_panel: Power Panel
@@ -18,7 +18,7 @@
register: test_one
- name: "POWER_FEED 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -30,7 +30,7 @@
- name: "POWER_FEED 2: Create duplicate"
netbox.netbox.netbox_power_feed:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Feed
power_panel: Power Panel
@@ -38,7 +38,7 @@
register: test_two
- name: "POWER_FEED 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['power_feed']['name'] == "Power Feed"
@@ -48,7 +48,7 @@
- name: "POWER_FEED 3: Update power_feed with other fields"
netbox.netbox.netbox_power_feed:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Feed
power_panel: Power Panel
@@ -64,7 +64,7 @@
register: test_three
- name: "POWER_FEED 3: ASSERT - Update power_feed with other fields"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['status'] == "offline"
@@ -90,7 +90,7 @@
- name: "POWER_FEED 4: Create Power Feed for Delete Test"
netbox.netbox.netbox_power_feed:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Feed 2
power_panel: Power Panel
@@ -98,7 +98,7 @@
register: test_four
- name: "POWER_FEED 4: ASSERT - Create Power Feed for Delete Test"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -107,11 +107,10 @@
- test_four['power_feed']['power_panel'] == 1
- test_four['msg'] == "power_feed Power Feed 2 created"
-
- name: "POWER_FEED 5: Delete Power Feed"
netbox.netbox.netbox_power_feed:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Feed 2
power_panel: Power Panel
@@ -119,7 +118,7 @@
register: test_five
- name: "POWER_FEED 5: ASSERT - Delete Power Feed"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "present"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_power_outlet.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_power_outlet.yml
index c7b5f7d3d..84f9d3255 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_power_outlet.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_power_outlet.yml
@@ -10,7 +10,7 @@
- name: "POWER_OUTLET 1: Necessary info creation"
netbox.netbox.netbox_power_outlet:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Outlet
device: Device Power Tests
@@ -18,7 +18,7 @@
register: test_one
- name: "POWER_OUTLET 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -30,7 +30,7 @@
- name: "POWER_OUTLET 2: Create duplicate"
netbox.netbox.netbox_power_outlet:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Outlet
device: Device Power Tests
@@ -38,7 +38,7 @@
register: test_two
- name: "POWER_OUTLET 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['power_outlet']['name'] == "Power Outlet"
@@ -48,7 +48,7 @@
- name: "POWER_OUTLET 3: Update power_outlet with other fields"
netbox.netbox.netbox_power_outlet:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Outlet
device: Device Power Tests
@@ -60,7 +60,7 @@
register: test_three
- name: "POWER_OUTLET 3: ASSERT - Update power_outlet with other fields"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['type'] == "ita-e"
@@ -78,7 +78,7 @@
- name: "POWER_OUTLET 4: Create Power Outlet for Delete Test"
netbox.netbox.netbox_power_outlet:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Outlet 2
device: Device Power Tests
@@ -86,7 +86,7 @@
register: test_four
- name: "POWER_OUTLET 4: ASSERT - Create Power Outlet for Delete Test"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -98,7 +98,7 @@
- name: "POWER_OUTLET 5: Delete Power Outlet"
netbox.netbox.netbox_power_outlet:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Outlet 2
device: Device Power Tests
@@ -106,7 +106,7 @@
register: test_five
- name: "POWER_OUTLET 5: ASSERT - Delete Power Outlet"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "present"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_power_outlet_template.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_power_outlet_template.yml
index 96f283486..14ff49d0c 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_power_outlet_template.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_power_outlet_template.yml
@@ -10,7 +10,7 @@
- name: "POWER_OUTLET_TEMPLATE 1: Necessary info creation"
netbox.netbox.netbox_power_outlet_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Outlet Template
device_type: Device Type Power Tests
@@ -18,7 +18,7 @@
register: test_one
- name: "POWER_OUTLET_TEMPLATE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -30,7 +30,7 @@
- name: "POWER_OUTLET_TEMPLATE 2: Create duplicate"
netbox.netbox.netbox_power_outlet_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Outlet Template
device_type: Device Type Power Tests
@@ -38,7 +38,7 @@
register: test_two
- name: "POWER_OUTLET_TEMPLATE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['power_outlet_template']['name'] == "Power Outlet Template"
@@ -48,7 +48,7 @@
- name: "POWER_OUTLET_TEMPLATE 3: Update power_outlet_template with other fields"
netbox.netbox.netbox_power_outlet_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Outlet Template
device_type: Device Type Power Tests
@@ -59,7 +59,7 @@
register: test_three
- name: "POWER_OUTLET_TEMPLATE 3: ASSERT - Update power_outlet_template with other fields"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['type'] == "ita-e"
@@ -75,7 +75,7 @@
- name: "POWER_OUTLET_TEMPLATE 4: Create Power Outlet Template for Delete Test"
netbox.netbox.netbox_power_outlet_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Outlet Template 2
device_type: Device Type Power Tests
@@ -83,7 +83,7 @@
register: test_four
- name: "POWER_OUTLET_TEMPLATE 4: ASSERT - Create Power Outlet Template for Delete Test"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -95,7 +95,7 @@
- name: "POWER_OUTLET_TEMPLATE 5: Delete Power Outlet Template"
netbox.netbox.netbox_power_outlet_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Outlet Template 2
device_type: Device Type Power Tests
@@ -103,7 +103,7 @@
register: test_five
- name: "POWER_OUTLET_TEMPLATE 5: ASSERT - Delete Power Outlet Template"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "present"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_power_panel.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_power_panel.yml
index 082a6b375..e568a75f4 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_power_panel.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_power_panel.yml
@@ -10,7 +10,7 @@
- name: "POWER_PANEL 1: Necessary info creation"
netbox.netbox.netbox_power_panel:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Panel
site: Test Site
@@ -18,7 +18,7 @@
register: test_one
- name: "POWER_PANEL 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -30,7 +30,7 @@
- name: "POWER_PANEL 2: Create duplicate"
netbox.netbox.netbox_power_panel:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Panel
site: Test Site
@@ -38,7 +38,7 @@
register: test_two
- name: "POWER_PANEL 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['power_panel']['name'] == "Power Panel"
@@ -48,7 +48,7 @@
- name: "POWER_PANEL 3: Update power_panel with other fields"
netbox.netbox.netbox_power_panel:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Panel
site: Test Site
@@ -57,7 +57,7 @@
register: test_three
- name: "POWER_PANEL 3: ASSERT - Update power_panel with other fields"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['location'] == 1
@@ -69,7 +69,7 @@
- name: "POWER_PANEL 4: Create Power Panel for Delete Test"
netbox.netbox.netbox_power_panel:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Panel 2
site: Test Site
@@ -77,7 +77,7 @@
register: test_four
- name: "POWER_PANEL 4: ASSERT - Create Power Panel for Delete Test"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -89,7 +89,7 @@
- name: "POWER_PANEL 5: Delete Power Panel"
netbox.netbox.netbox_power_panel:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Panel 2
site: Test Site
@@ -97,7 +97,7 @@
register: test_five
- name: "POWER_PANEL 5: ASSERT - Delete Power Panel"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "present"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_power_port.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_power_port.yml
index b6368bd92..311820ccd 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_power_port.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_power_port.yml
@@ -10,7 +10,7 @@
- name: "POWER_PORT 0: Create device for testing power ports"
netbox.netbox.netbox_device:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Device Power Tests
device_type: Cisco Test
@@ -21,7 +21,7 @@
- name: "POWER_PORT 1: Necessary info creation"
netbox.netbox.netbox_power_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Port
device: Device Power Tests
@@ -29,7 +29,7 @@
register: test_one
- name: "POWER_PORT 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -41,7 +41,7 @@
- name: "POWER_PORT 2: Create duplicate"
netbox.netbox.netbox_power_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Port
device: Device Power Tests
@@ -49,7 +49,7 @@
register: test_two
- name: "POWER_PORT 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['power_port']['name'] == "Power Port"
@@ -59,7 +59,7 @@
- name: "POWER_FEED 3: Update power_port with other fields"
netbox.netbox.netbox_power_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Port
device: Device Power Tests
@@ -71,7 +71,7 @@
register: test_three
- name: "POWER_FEED 3: ASSERT - Update power_port with other fields"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['type'] == "ita-e"
@@ -89,7 +89,7 @@
- name: "POWER_PORT 4: Create Power Port for Delete Test"
netbox.netbox.netbox_power_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Port 2
device: Device Power Tests
@@ -97,7 +97,7 @@
register: test_four
- name: "POWER_PORT 4: ASSERT - Create Power Port for Delete Test"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -109,7 +109,7 @@
- name: "POWER_PORT 5: Delete Power Port"
netbox.netbox.netbox_power_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Port 2
device: Device Power Tests
@@ -117,7 +117,7 @@
register: test_five
- name: "POWER_PORT 5: ASSERT - Delete Power Port"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "present"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_power_port_template.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_power_port_template.yml
index aaa5eb43c..b413c29ce 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_power_port_template.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_power_port_template.yml
@@ -10,7 +10,7 @@
- name: "POWER_PORT_TEMPLATE 0.1: Create device type for testing power ports on device types"
netbox.netbox.netbox_device_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
model: Device Type Power Tests
manufacturer: Test Manufacturer
@@ -19,7 +19,7 @@
- name: "POWER_PORT_TEMPLATE 0.2: Create module type for testing power ports on module types"
netbox.netbox.netbox_module_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
model: Module Type Power Tests
manufacturer: Test Manufacturer
@@ -28,7 +28,7 @@
- name: "POWER_PORT_TEMPLATE 1: Necessary info creation"
netbox.netbox.netbox_power_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Port Template
device_type: Device Type Power Tests
@@ -36,7 +36,7 @@
register: test_one
- name: "POWER_PORT_TEMPLATE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -48,7 +48,7 @@
- name: "POWER_PORT_TEMPLATE 2: Create duplicate"
netbox.netbox.netbox_power_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Port Template
device_type: Device Type Power Tests
@@ -56,7 +56,7 @@
register: test_two
- name: "POWER_PORT_TEMPLATE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['power_port_template']['name'] == "Power Port Template"
@@ -66,7 +66,7 @@
- name: "POWER_PORT_TEMPLATE 3: Update power_port_template with other fields"
netbox.netbox.netbox_power_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Port Template
device_type: Device Type Power Tests
@@ -77,7 +77,7 @@
register: test_three
- name: "POWER_PORT_TEMPLATE 3: ASSERT - Update power_port_template with other fields"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['type'] == "ita-e"
@@ -93,7 +93,7 @@
- name: "POWER_PORT_TEMPLATE 4: Create Power Port Template for Delete Test"
netbox.netbox.netbox_power_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Port Template 2
device_type: Device Type Power Tests
@@ -101,7 +101,7 @@
register: test_four
- name: "POWER_PORT_TEMPLATE 4: ASSERT - Create Power Port Template for Delete Test"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -113,7 +113,7 @@
- name: "POWER_PORT_TEMPLATE 5: Delete Power Port Template"
netbox.netbox.netbox_power_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Port Template 2
device_type: Device Type Power Tests
@@ -121,7 +121,7 @@
register: test_five
- name: "POWER_PORT_TEMPLATE 5: ASSERT - Delete Power Port Template"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "present"
@@ -131,7 +131,7 @@
- name: "POWER_PORT_TEMPLATE 6: Necessary info creation"
netbox.netbox.netbox_power_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Module Power Port Template
module_type: Module Type Power Tests
@@ -139,7 +139,7 @@
register: test_six
- name: "POWER_PORT_TEMPLATE 6: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_six is changed
- test_six['diff']['before']['state'] == "absent"
@@ -151,7 +151,7 @@
- name: "POWER_PORT_TEMPLATE 7: Create duplicate"
netbox.netbox.netbox_power_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Module Power Port Template
module_type: Module Type Power Tests
@@ -159,7 +159,7 @@
register: test_seven
- name: "POWER_PORT_TEMPLATE 7: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_seven['changed']
- test_seven['power_port_template']['name'] == "Module Power Port Template"
@@ -169,7 +169,7 @@
- name: "POWER_PORT_TEMPLATE 8: Update power_port_template with other fields"
netbox.netbox.netbox_power_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Module Power Port Template
module_type: Module Type Power Tests
@@ -180,7 +180,7 @@
register: test_eight
- name: "POWER_PORT_TEMPLATE 8: ASSERT - Update power_port_template with other fields"
- assert:
+ ansible.builtin.assert:
that:
- test_eight is changed
- test_eight['diff']['after']['type'] == "ita-e"
@@ -196,7 +196,7 @@
- name: "POWER_PORT_TEMPLATE 9: Delete Power Port Template"
netbox.netbox.netbox_power_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Module Power Port Template
module_type: Module Type Power Tests
@@ -204,7 +204,7 @@
register: test_nine
- name: "POWER_PORT_TEMPLATE 9: ASSERT - Delete Power Port Template"
- assert:
+ ansible.builtin.assert:
that:
- test_nine is changed
- test_nine['diff']['before']['state'] == "present"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_prefix.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_prefix.yml
index 59ed336ad..64adae855 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_prefix.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_prefix.yml
@@ -4,17 +4,17 @@
### NETBOX_PREFIX
##
##
-- name: "1 - Create prefix within NetBox with only required information"
+- name: 1 - Create prefix within NetBox with only required information
netbox.netbox.netbox_prefix:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
prefix: 10.156.0.0/19
state: present
register: test_one
-- name: "1 - ASSERT"
- assert:
+- name: 1 - ASSERT
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -22,36 +22,36 @@
- test_one['msg'] == "prefix 10.156.0.0/19 created"
- test_one['prefix']['prefix'] == "10.156.0.0/19"
-- name: "2 - Duplicate"
+- name: 2 - Duplicate
netbox.netbox.netbox_prefix:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
prefix: 10.156.0.0/19
state: present
register: test_two
-- name: "2 - ASSERT"
- assert:
+- name: 2 - ASSERT
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['msg'] == "prefix 10.156.0.0/19 already exists"
- test_two['prefix']['prefix'] == "10.156.0.0/19"
-- name: "3 - Update 10.156.0.0/19"
+- name: 3 - Update 10.156.0.0/19
netbox.netbox.netbox_prefix:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
prefix: 10.156.0.0/19
site: Test Site
status: Reserved
- description: "This prefix has been updated"
+ description: This prefix has been updated
state: present
register: test_three
-- name: "3 - ASSERT"
- assert:
+- name: 3 - ASSERT
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['site'] == 1
@@ -63,27 +63,27 @@
- test_three['prefix']['status'] == "reserved"
- test_three['prefix']['description'] == "This prefix has been updated"
-- name: "4 - Delete prefix within netbox"
+- name: 4 - Delete prefix within netbox
netbox.netbox.netbox_prefix:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
prefix: 10.156.0.0/19
state: absent
register: test_four
-- name: "4 - ASSERT"
- assert:
+- name: 4 - ASSERT
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
- test_four['diff']['after']['state'] == "absent"
- test_four['msg'] == "prefix 10.156.0.0/19 deleted"
-- name: "5 - Create prefix with several specified options"
+- name: 5 - Create prefix with several specified options
netbox.netbox.netbox_prefix:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
family: 4
prefix: 10.156.32.0/19
@@ -100,12 +100,12 @@
description: Test description
is_pool: true
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: test_five
-- name: "5 - ASSERT"
- assert:
+- name: 5 - ASSERT
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "absent"
@@ -123,34 +123,34 @@
- test_five['prefix']['is_pool'] == true
- test_five['prefix']['tags'][0] == 4
-- name: "6 - Get a new /24 inside 10.156.0.0/19 within NetBox - Parent doesn't exist"
+- name: 6 - Get a new /24 inside 10.156.0.0/19 within NetBox - Parent doesn't exist
netbox.netbox.netbox_prefix:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
parent: 10.156.0.0/19
prefix_length: 24
state: present
- first_available: yes
+ first_available: true
register: test_six
-- name: "6 - ASSERT"
- assert:
+- name: 6 - ASSERT
+ ansible.builtin.assert:
that:
- not test_six['changed']
- test_six['msg'] == "Parent prefix does not exist - 10.156.0.0/19"
-- name: "7 - Create prefix within NetBox with only required information"
+- name: 7 - Create prefix within NetBox with only required information
netbox.netbox.netbox_prefix:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
prefix: 10.156.0.0/19
state: present
register: test_seven
-- name: "7 - ASSERT"
- assert:
+- name: 7 - ASSERT
+ ansible.builtin.assert:
that:
- test_seven is changed
- test_seven['diff']['before']['state'] == "absent"
@@ -158,19 +158,19 @@
- test_seven['msg'] == "prefix 10.156.0.0/19 created"
- test_seven['prefix']['prefix'] == "10.156.0.0/19"
-- name: "8 - Get a new /24 inside 10.156.0.0/19 within NetBox"
+- name: 8 - Get a new /24 inside 10.156.0.0/19 within NetBox
netbox.netbox.netbox_prefix:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
parent: 10.156.0.0/19
prefix_length: 24
state: present
- first_available: yes
+ first_available: true
register: test_eight
-- name: "8 - ASSERT"
- assert:
+- name: 8 - ASSERT
+ ansible.builtin.assert:
that:
- test_eight is changed
- test_eight['diff']['before']['state'] == "absent"
@@ -178,10 +178,10 @@
- test_eight['msg'] == "prefix 10.156.0.0/24 created"
- test_eight['prefix']['prefix'] == "10.156.0.0/24"
-- name: "9 - Create 10.157.0.0/19"
+- name: 9 - Create 10.157.0.0/19
netbox.netbox.netbox_prefix:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
prefix: 10.157.0.0/19
vrf: Test VRF
@@ -189,8 +189,8 @@
state: present
register: test_nine
-- name: "9 - ASSERT"
- assert:
+- name: 9 - ASSERT
+ ansible.builtin.assert:
that:
- test_nine is changed
- test_nine['diff']['before']['state'] == "absent"
@@ -200,21 +200,21 @@
- test_nine['prefix']['site'] == 1
- test_nine['prefix']['vrf'] == 1
-- name: "10 - Get a new /24 inside 10.157.0.0/19 within NetBox with additional values"
+- name: 10 - Get a new /24 inside 10.157.0.0/19 within NetBox with additional values
netbox.netbox.netbox_prefix:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
parent: 10.157.0.0/19
prefix_length: 24
vrf: Test VRF
site: Test Site
state: present
- first_available: yes
+ first_available: true
register: test_ten
-- name: "10 - ASSERT"
- assert:
+- name: 10 - ASSERT
+ ansible.builtin.assert:
that:
- test_ten is changed
- test_ten['diff']['before']['state'] == "absent"
@@ -224,19 +224,19 @@
- test_ten['prefix']['site'] == 1
- test_ten['prefix']['vrf'] == 1
-- name: "11 - Get a new /24 inside 10.156.0.0/19 within NetBox"
+- name: 11 - Get a new /24 inside 10.156.0.0/19 within NetBox
netbox.netbox.netbox_prefix:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
parent: 10.156.0.0/19
prefix_length: 24
state: present
- first_available: yes
+ first_available: true
register: test_eleven
-- name: "11 - ASSERT"
- assert:
+- name: 11 - ASSERT
+ ansible.builtin.assert:
that:
- test_eleven is changed
- test_eleven['diff']['before']['state'] == "absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_provider.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_provider.yml
index cba224eb4..ec7b2c30a 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_provider.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_provider.yml
@@ -7,14 +7,14 @@
- name: "NETBOX_PROVIDER 1: Create provider within NetBox with only required information"
netbox.netbox.netbox_provider:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test Provider One
state: present
register: test_one
- name: "NETBOX_PROVIDER 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -26,14 +26,14 @@
- name: "NETBOX_PROVIDER 2: Duplicate"
netbox.netbox.netbox_provider:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test Provider One
state: present
register: test_two
- name: "NETBOX_PROVIDER 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['provider']['name'] == "Test Provider One"
@@ -43,15 +43,15 @@
- name: "NETBOX_PROVIDER 3: Update provider with other fields"
netbox.netbox.netbox_provider:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test Provider One
- comments: "BAD PROVIDER"
+ comments: BAD PROVIDER
state: present
register: test_three
- name: "NETBOX_PROVIDER 3: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['comments'] == "BAD PROVIDER"
@@ -63,14 +63,14 @@
- name: "NETBOX_PROVIDER 4: Delete provider within netbox"
netbox.netbox.netbox_provider:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test Provider One
state: absent
register: test_four
- name: "NETBOX_PROVIDER 4 : ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['provider']['name'] == "Test Provider One"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_provider_network.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_provider_network.yml
index bf7d4bf97..6f8b2b331 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_provider_network.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_provider_network.yml
@@ -7,7 +7,7 @@
- name: "NETBOX_PROVIDER_NETWORK 1: Create provider network within NetBox with only required information"
netbox.netbox.netbox_provider_network:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
provider: Test Provider
name: Test Provider Network One
@@ -15,7 +15,7 @@
register: test_one
- name: "NETBOX_PROVIDER_NETWORK 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -26,7 +26,7 @@
- name: "NETBOX_PROVIDER_NETWORK 2: Duplicate"
netbox.netbox.netbox_provider_network:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
provider: Test Provider
name: Test Provider Network One
@@ -34,7 +34,7 @@
register: test_two
- name: "NETBOX_PROVIDER_NETWORK 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['provider_network']['name'] == "Test Provider Network One"
@@ -43,17 +43,17 @@
- name: "NETBOX_PROVIDER_NETWORK 3: Update provider network with other fields"
netbox.netbox.netbox_provider_network:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
provider: Test Provider
name: Test Provider Network One
description: Describe a Provider Network
- comments: "A provider network"
+ comments: A provider network
state: present
register: test_three
- name: "NETBOX_PROVIDER_NETWORK 3: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['comments'] == "A provider network"
@@ -66,7 +66,7 @@
- name: "NETBOX_PROVIDER_NETWORK 4: Delete provider within netbox"
netbox.netbox.netbox_provider_network:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
provider: Test Provider
name: Test Provider Network One
@@ -74,7 +74,7 @@
register: test_four
- name: "NETBOX_PROVIDER_NETWORK 4 : ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['provider_network']['name'] == "Test Provider Network One"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_rack.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_rack.yml
index 245dd0c29..5dc05c3a1 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_rack.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_rack.yml
@@ -4,18 +4,18 @@
### NETBOX_RACK
##
##
-- name: "1 - Test rack creation"
+- name: 1 - Test rack creation
netbox.netbox.netbox_rack:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test rack one"
- site: "Test Site"
- location: "Test Rack Group"
+ name: Test rack one
+ site: Test Site
+ location: Test Rack Group
register: test_one
-- name: "1 - ASSERT"
- assert:
+- name: 1 - ASSERT
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -23,34 +23,34 @@
- test_one['rack']['name'] == "Test rack one"
- test_one['rack']['site'] == 1
-- name: "Test duplicate rack"
+- name: Test duplicate rack
netbox.netbox.netbox_rack:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test rack one"
+ name: Test rack one
register: test_two
-- name: "2 - ASSERT"
- assert:
+- name: 2 - ASSERT
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['rack']['name'] == "Test rack one"
- test_two['rack']['site'] == 1
- test_two['msg'] == "rack Test rack one already exists"
-- name: "3 - Create new rack with similar name"
+- name: 3 - Create new rack with similar name
netbox.netbox.netbox_rack:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test rack - Test Site
site: Test Site
state: present
register: test_three
-- name: "3 - ASSERT"
- assert:
+- name: 3 - ASSERT
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['before']['state'] == "absent"
@@ -59,53 +59,53 @@
- test_three['rack']['site'] == 1
- test_three['msg'] == "rack Test rack - Test Site created"
-- name: "4 - Attempt to create Test rack one again"
+- name: 4 - Attempt to create Test rack one again
netbox.netbox.netbox_rack:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test rack one
site: Test Site
- location: "Test Rack Group"
+ location: Test Rack Group
state: present
register: test_four
-- name: "4 - ASSERT"
- assert:
+- name: 4 - ASSERT
+ ansible.builtin.assert:
that:
- not test_four['changed']
- test_four['rack']['name'] == "Test rack one"
- test_four['rack']['site'] == 1
- test_four['msg'] == "rack Test rack one already exists"
-- name: "5 - Update Test rack one with more options"
+- name: 5 - Update Test rack one with more options
netbox.netbox.netbox_rack:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test rack one
site: Test Site
- rack_role: "Test Rack Role"
- location: "Test Rack Group"
- facility_id: "EQUI10291"
- tenant: "Test Tenant"
+ rack_role: Test Rack Role
+ location: Test Rack Group
+ facility_id: EQUI10291
+ tenant: Test Tenant
status: Available
- serial: "FXS10001"
+ serial: FXS10001
asset_tag: "1234"
width: 23
u_height: 48
- type: "2-post frame"
+ type: 2-post frame
outer_width: 32
outer_depth: 24
- outer_unit: "Inches"
- comments: "Just testing rack module"
+ outer_unit: Inches
+ comments: Just testing rack module
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: test_five
-- name: "5 - ASSERT"
- assert:
+- name: 5 - ASSERT
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['after']['asset_tag'] == "1234"
@@ -141,34 +141,34 @@
- test_five['rack']['width'] == 23
- test_five['msg'] == "rack Test rack one updated"
-- name: "6 - Update Test rack one with same options"
+- name: 6 - Update Test rack one with same options
netbox.netbox.netbox_rack:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test rack one
site: Test Site
- rack_role: "Test Rack Role"
- location: "Test Rack Group"
- facility_id: "EQUI10291"
- tenant: "Test Tenant"
+ rack_role: Test Rack Role
+ location: Test Rack Group
+ facility_id: EQUI10291
+ tenant: Test Tenant
status: Available
- serial: "FXS10001"
+ serial: FXS10001
asset_tag: "1234"
width: 23
u_height: 48
- type: "2-post frame"
+ type: 2-post frame
outer_width: 32
outer_depth: 24
- outer_unit: "Inches"
- comments: "Just testing rack module"
+ outer_unit: Inches
+ comments: Just testing rack module
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: test_six
-- name: "6 - ASSERT"
- assert:
+- name: 6 - ASSERT
+ ansible.builtin.assert:
that:
- test_six is not changed
- test_six['rack']['name'] == "Test rack one"
@@ -189,36 +189,36 @@
- test_six['rack']['u_height'] == 48
- test_six['rack']['width'] == 23
-- name: "7 - Create rack with same asset tag and serial number"
+- name: 7 - Create rack with same asset tag and serial number
netbox.netbox.netbox_rack:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test rack two
site: Test Site
- serial: "FXS10001"
+ serial: FXS10001
asset_tag: "1234"
state: present
- ignore_errors: yes
+ ignore_errors: true
register: test_seven
-- name: "7 - ASSERT"
- assert:
+- name: 7 - ASSERT
+ ansible.builtin.assert:
that:
- test_seven is failed
- "'asset tag already exists' in test_seven['msg']"
-- name: "8 - Test delete"
+- name: 8 - Test delete
netbox.netbox.netbox_rack:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test rack one"
- state: "absent"
+ name: Test rack one
+ state: absent
register: test_eight
-- name: "8 - ASSERT"
- assert:
+- name: 8 - ASSERT
+ ansible.builtin.assert:
that:
- test_eight is changed
- test_eight['diff']['before']['state'] == "present"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_rack_group.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_rack_group.yml
index 6a10cbf94..40dba8408 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_rack_group.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_rack_group.yml
@@ -7,7 +7,7 @@
- name: "RACK_GROUP 1: Necessary info creation"
netbox.netbox.netbox_rack_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Rack Group
site: Test Site
@@ -15,7 +15,7 @@
register: test_one
- name: "RACK_GROUP 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -28,7 +28,7 @@
- name: "RACK_GROUP 2: Create duplicate"
netbox.netbox.netbox_rack_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Rack Group
site: Test Site
@@ -36,7 +36,7 @@
register: test_two
- name: "RACK_GROUP 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['rack_group']['name'] == "Rack Group"
@@ -47,14 +47,14 @@
- name: "RACK_GROUP 3: ASSERT - Delete"
netbox.netbox.netbox_rack_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Rack Group
state: absent
register: test_three
- name: "RACK_GROUP 3: ASSERT - Update"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['before']['state'] == "present"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_rack_role.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_rack_role.yml
index b437a5a78..b60fcab8c 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_rack_role.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_rack_role.yml
@@ -7,15 +7,15 @@
- name: "RACK_ROLE 1: Necessary info creation"
netbox.netbox.netbox_rack_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Rack Role
- color: "ffffff"
+ color: ffffff
state: present
register: test_one
- name: "RACK_ROLE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -28,14 +28,14 @@
- name: "RACK_ROLE 2: Create duplicate"
netbox.netbox.netbox_rack_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Rack Role
state: present
register: test_two
- name: "RACK_ROLE 1: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['rack_role']['name'] == "Rack Role"
@@ -46,16 +46,16 @@
- name: "RACK_ROLE 3: Update"
netbox.netbox.netbox_rack_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Rack Role
color: "003EFF"
- description: "This is a Rack Role test"
+ description: This is a Rack Role test
state: present
register: test_three
- name: "RACK_ROLE 3: ASSERT - Update"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['color'] == "003eff"
@@ -68,14 +68,14 @@
- name: "RACK_ROLE 4: Delete"
netbox.netbox.netbox_rack_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Rack Role
state: absent
register: test_four
- name: "RACK_ROLE 4: ASSERT - Update"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_rear_port.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_rear_port.yml
index 10eb23098..d186233be 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_rear_port.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_rear_port.yml
@@ -10,7 +10,7 @@
- name: "REAR_PORT 1: Necessary info creation"
netbox.netbox.netbox_rear_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Rear Port
device: test100
@@ -19,7 +19,7 @@
register: test_one
- name: "REAR_PORT 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -32,7 +32,7 @@
- name: "REAR_PORT 2: Create duplicate"
netbox.netbox.netbox_rear_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Rear Port
device: test100
@@ -41,7 +41,7 @@
register: test_two
- name: "REAR_PORT 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['rear_port']['name'] == "Rear Port"
@@ -52,7 +52,7 @@
- name: "REAR_PORT 3: Update Rear Port with other fields"
netbox.netbox.netbox_rear_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Rear Port
device: test100
@@ -63,7 +63,7 @@
register: test_three
- name: "REAR_PORT 3: ASSERT - Update Rear Port with other fields"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['positions'] == 5
@@ -78,7 +78,7 @@
- name: "REAR_PORT 4: Create Rear Port for Delete Test"
netbox.netbox.netbox_rear_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Rear Port 2
device: test100
@@ -87,7 +87,7 @@
register: test_four
- name: "REAR_PORT 4: ASSERT - Create Rear Port for Delete Test"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -100,7 +100,7 @@
- name: "REAR_PORT 5: Delete Rear Port"
netbox.netbox.netbox_rear_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Rear Port 2
device: test100
@@ -109,7 +109,7 @@
register: test_five
- name: "REAR_PORT 5: ASSERT - Delete Rear Port"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "present"
@@ -119,7 +119,7 @@
- name: "REAR_PORT 6: Create second Rear Port"
netbox.netbox.netbox_rear_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Rear Port
device: Test Nexus One
@@ -128,7 +128,7 @@
register: test_six
- name: "REAR_PORT 6: ASSERT - Create second Rear Port"
- assert:
+ ansible.builtin.assert:
that:
- test_six is changed
- test_six['diff']['before']['state'] == "absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_rear_port_template.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_rear_port_template.yml
index ca867619a..5342627d0 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_rear_port_template.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_rear_port_template.yml
@@ -10,7 +10,7 @@
- name: "REAR_PORT_TEMPLATE 1: Necessary info creation"
netbox.netbox.netbox_rear_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Rear Port Template
device_type: Cisco Test
@@ -19,7 +19,7 @@
register: test_one
- name: "REAR_PORT_TEMPLATE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -32,7 +32,7 @@
- name: "REAR_PORT_TEMPLATE 2: Create duplicate"
netbox.netbox.netbox_rear_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Rear Port Template
device_type: Cisco Test
@@ -41,7 +41,7 @@
register: test_two
- name: "REAR_PORT_TEMPLATE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['rear_port_template']['name'] == "Rear Port Template"
@@ -52,7 +52,7 @@
- name: "REAR_PORT_TEMPLATE 3: Update Rear Port Template with other fields"
netbox.netbox.netbox_rear_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Rear Port Template
device_type: Cisco Test
@@ -62,7 +62,7 @@
register: test_three
- name: "REAR_PORT_TEMPLATE 3: ASSERT - Update Rear Port Template with other fields"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['positions'] == 5
@@ -75,7 +75,7 @@
- name: "REAR_PORT_TEMPLATE 4: Create Rear Port Template for Delete Test"
netbox.netbox.netbox_rear_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Rear Port Template 2
device_type: Cisco Test
@@ -84,7 +84,7 @@
register: test_four
- name: "REAR_PORT_TEMPLATE 4: ASSERT - Create Rear Port Template for Delete Test"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -97,7 +97,7 @@
- name: "REAR_PORT_TEMPLATE 5: Delete Rear Port Template"
netbox.netbox.netbox_rear_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Rear Port Template 2
device_type: Cisco Test
@@ -106,7 +106,7 @@
register: test_five
- name: "REAR_PORT_TEMPLATE 5: ASSERT - Delete Rear Port Template"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "present"
@@ -116,7 +116,7 @@
- name: "REAR_PORT_TEMPLATE 6: Create second Rear Port Template"
netbox.netbox.netbox_rear_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Rear Port Template 2
device_type: Arista Test
@@ -125,7 +125,7 @@
register: test_six
- name: "REAR_PORT_TEMPLATE 6: ASSERT - Create second Rear Port Template"
- assert:
+ ansible.builtin.assert:
that:
- test_six is changed
- test_six['diff']['before']['state'] == "absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_region.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_region.yml
index 1359598af..0395db435 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_region.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_region.yml
@@ -7,14 +7,14 @@
- name: "REGION 1: Necessary info creation"
netbox.netbox.netbox_region:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Region One"
+ name: Test Region One
state: present
register: test_one
- name: "REGION 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -26,14 +26,14 @@
- name: "REGION 2: Create duplicate"
netbox.netbox.netbox_region:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Region One"
+ name: Test Region One
state: present
register: test_two
- name: "REGION 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['region']['name'] == "Test Region One"
@@ -43,15 +43,15 @@
- name: "REGION 3: ASSERT - Update"
netbox.netbox.netbox_region:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Region One"
- parent_region: "Test Region"
+ name: Test Region One
+ parent_region: Test Region
state: present
register: test_three
- name: "REGION 3: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['parent'] == 1
@@ -63,14 +63,14 @@
- name: "REGION 4: ASSERT - Delete"
netbox.netbox.netbox_region:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Region One"
+ name: Test Region One
state: absent
register: test_four
- name: "REGION 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['region']['name'] == "Test Region One"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_rir.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_rir.yml
index cc4935938..3a7bf79fa 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_rir.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_rir.yml
@@ -7,14 +7,14 @@
- name: "RIR 1: Necessary info creation"
netbox.netbox.netbox_rir:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test RIR One
state: present
register: test_one
- name: "RIR 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -26,14 +26,14 @@
- name: "RIR 2: Create duplicate"
netbox.netbox.netbox_rir:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test RIR One
state: present
register: test_two
- name: "RIR 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['rir']['name'] == "Test RIR One"
@@ -43,15 +43,15 @@
- name: "RIR 3: ASSERT - Update"
netbox.netbox.netbox_rir:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test RIR One"
+ name: Test RIR One
is_private: true
state: present
register: test_three
- name: "RIR 3: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['is_private'] == true
@@ -63,14 +63,14 @@
- name: "RIR 4: ASSERT - Delete"
netbox.netbox.netbox_rir:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test RIR One"
+ name: Test RIR One
state: absent
register: test_four
- name: "RIR 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['rir']['name'] == "Test RIR One"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_route_target.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_route_target.yml
index 354978dc2..c2576c4cc 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_route_target.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_route_target.yml
@@ -4,18 +4,18 @@
- name: "NETBOX_ROUTE_TARGET_ADD: Check Mode - Add all fields except description"
netbox.netbox.netbox_route_target:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "65000:65001"
- tenant: "Test Tenant"
+ name: 65000:65001
+ tenant: Test Tenant
tags:
- first
- second
- check_mode: yes
+ check_mode: true
register: test_results
- name: "NETBOX_ROUTE_TARGET_ADD: (ASSERT) Check Mode - Add all fields except description"
- assert:
+ ansible.builtin.assert:
that:
- test_results is changed
- test_results['route_target']['name'] == "65000:65001"
@@ -29,17 +29,17 @@
- name: "NETBOX_ROUTE_TARGET_ADD: Add all fields except description"
netbox.netbox.netbox_route_target:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "65000:65001"
- tenant: "Test Tenant"
+ name: 65000:65001
+ tenant: Test Tenant
tags:
- first
- second
register: test_results
- name: "NETBOX_ROUTE_TARGET_ADD: (ASSERT) Add all fields except description"
- assert:
+ ansible.builtin.assert:
that:
- test_results is changed
- test_results['route_target']['name'] == "65000:65001"
@@ -53,17 +53,17 @@
- name: "NETBOX_ROUTE_TARGET_ADD_IDEM: (IDEMPOTENT) Add all fields except description"
netbox.netbox.netbox_route_target:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "65000:65001"
- tenant: "Test Tenant"
+ name: 65000:65001
+ tenant: Test Tenant
tags:
- first
- second
register: test_results
- name: "NETBOX_ROUTE_TARGET_ADD_IDEM: (IDEMPOTENT) Add all fields except description"
- assert:
+ ansible.builtin.assert:
that:
- test_results is not changed
- test_results['route_target']['name'] == "65000:65001"
@@ -76,19 +76,19 @@
- name: "NETBOX_ROUTE_TARGET_UPDATE: Check Mode - Update description"
netbox.netbox.netbox_route_target:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "65000:65001"
- description: "NEW DESCRIPTION"
- tenant: "Test Tenant"
+ name: 65000:65001
+ description: NEW DESCRIPTION
+ tenant: Test Tenant
tags:
- first
- second
- check_mode: yes
+ check_mode: true
register: test_results
- name: "NETBOX_ROUTE_TARGET_UPDATE: (ASSERT) Check Mode - Update description"
- assert:
+ ansible.builtin.assert:
that:
- test_results is changed
- test_results['route_target']['name'] == "65000:65001"
@@ -103,18 +103,18 @@
- name: "NETBOX_ROUTE_TARGET_UPDATE: Update description"
netbox.netbox.netbox_route_target:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "65000:65001"
- tenant: "Test Tenant"
- description: "NEW DESCRIPTION"
+ name: 65000:65001
+ tenant: Test Tenant
+ description: NEW DESCRIPTION
tags:
- first
- second
register: test_results
- name: "NETBOX_ROUTE_TARGET_UPDATE: (ASSERT) Update description"
- assert:
+ ansible.builtin.assert:
that:
- test_results is changed
- test_results['route_target']['name'] == "65000:65001"
@@ -129,18 +129,18 @@
- name: "NETBOX_ROUTE_TARGET_UPDATE_IDEM: (IDEMPOTENT) Update description"
netbox.netbox.netbox_route_target:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "65000:65001"
- tenant: "Test Tenant"
- description: "NEW DESCRIPTION"
+ name: 65000:65001
+ tenant: Test Tenant
+ description: NEW DESCRIPTION
tags:
- first
- second
register: test_results
- name: "NETBOX_ROUTE_TARGET_UPDATE_IDEM: (IDEMPOTENT) Update description"
- assert:
+ ansible.builtin.assert:
that:
- test_results is not changed
- test_results['route_target']['name'] == "65000:65001"
@@ -154,15 +154,15 @@
- name: "NETBOX_ROUTE_TARGET_DELETE: Check Mode - Delete route target"
netbox.netbox.netbox_route_target:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "65000:65001"
+ name: 65000:65001
state: absent
- check_mode: yes
+ check_mode: true
register: test_results
- name: "NETBOX_ROUTE_TARGET_DELETE: (ASSERT) Check Mode - Delete route target"
- assert:
+ ansible.builtin.assert:
that:
- test_results is changed
- test_results['diff']['after']['state'] == "absent"
@@ -173,14 +173,14 @@
- name: "NETBOX_ROUTE_TARGET_DELETE: Delete route target"
netbox.netbox.netbox_route_target:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "65000:65001"
+ name: 65000:65001
state: absent
register: test_results
- name: "NETBOX_ROUTE_TARGET_DELETE: (ASSERT) Delete route target"
- assert:
+ ansible.builtin.assert:
that:
- test_results is changed
- test_results['diff']['after']['state'] == "absent"
@@ -191,14 +191,14 @@
- name: "NETBOX_ROUTE_TARGET_DELETE_IDEM: (IDEMPOTENT) Delete route target"
netbox.netbox.netbox_route_target:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "65000:65001"
+ name: 65000:65001
state: absent
register: test_results
- name: "NETBOX_ROUTE_TARGET_DELETE_IDEM: (IDEMPOTENT) Delete route target"
- assert:
+ ansible.builtin.assert:
that:
- test_results is not changed
- test_results['msg'] == "route_target 65000:65001 already absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_service.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_service.yml
index 09a993c3d..d937e50a6 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_service.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_service.yml
@@ -4,32 +4,32 @@
### NETBOX_SERVICE
##
##
-- name: "1 - Device with required information needs to add new service"
+- name: 1 - Device with required information needs to add new service
netbox.netbox.netbox_device:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "FOR_SERVICE"
- device_type: "Cisco Test"
- device_role: "Core Switch"
- site: "Test Site"
- status: "Staged"
+ name: FOR_SERVICE
+ device_type: Cisco Test
+ device_role: Core Switch
+ site: Test Site
+ status: Staged
state: present
- name: "NETBOX_SERVICE: Create new service"
netbox.netbox.netbox_service:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- device: "FOR_SERVICE"
- name: "node-exporter"
+ device: FOR_SERVICE
+ name: node-exporter
port: 9100
protocol: TCP
state: present
register: test_service_create
-- name: "NETBOX_SERVICE ASSERT - Create"
- assert:
+- name: NETBOX_SERVICE ASSERT - Create
+ ansible.builtin.assert:
that:
- test_service_create is changed
- test_service_create['services']['name'] == "node-exporter"
@@ -41,18 +41,18 @@
- name: "NETBOX_SERVICE: Test idempotence"
netbox.netbox.netbox_service:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- device: "FOR_SERVICE"
- name: "node-exporter"
+ device: FOR_SERVICE
+ name: node-exporter
port: 9100
protocol: TCP
state: present
register: test_service_idempotence
-- name: "NETBOX_SERVICE ASSERT - Not changed"
- assert:
+- name: NETBOX_SERVICE ASSERT - Not changed
+ ansible.builtin.assert:
that:
- test_service_idempotence['services']['name'] == "node-exporter"
- test_service_idempotence['services']['ports'] == [9100]
@@ -61,22 +61,22 @@
- name: "NETBOX_SERVICE: Test update"
netbox.netbox.netbox_service:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- device: "FOR_SERVICE"
- name: "node-exporter"
+ device: FOR_SERVICE
+ name: node-exporter
ports:
- 9100
- 9200
protocol: TCP
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: test_service_update
-- name: "NETBOX_SERVICE ASSERT - Service has been updated"
- assert:
+- name: NETBOX_SERVICE ASSERT - Service has been updated
+ ansible.builtin.assert:
that:
- test_service_update is changed
- test_service_update['diff']['after']['tags'][0] == 4
@@ -85,18 +85,18 @@
- name: "NETBOX_SERVICE: Test same details, but different protocol - Create"
netbox.netbox.netbox_service:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- device: "FOR_SERVICE"
- name: "node-exporter"
+ device: FOR_SERVICE
+ name: node-exporter
port: 9100
protocol: UDP
state: present
register: test_service_protocol
-- name: "NETBOX_SERVICE ASSERT - Different protocol - Create"
- assert:
+- name: NETBOX_SERVICE ASSERT - Different protocol - Create
+ ansible.builtin.assert:
that:
- test_service_protocol is changed
- test_service_protocol['diff']['after']['state'] == "present"
@@ -108,18 +108,18 @@
- name: "NETBOX_SERVICE: Test service deletion"
netbox.netbox.netbox_service:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- device: "FOR_SERVICE"
- name: "node-exporter"
+ device: FOR_SERVICE
+ name: node-exporter
port: 9100
protocol: UDP
state: absent
register: test_service_delete
-- name: "NETBOX_SERVICE ASSERT - Service has been deleted"
- assert:
+- name: NETBOX_SERVICE ASSERT - Service has been deleted
+ ansible.builtin.assert:
that:
- test_service_delete is changed
- test_service_delete['diff']['after']['state'] == "absent"
@@ -128,20 +128,20 @@
- name: "NETBOX_SERVICE: Test service IP addresses"
netbox.netbox.netbox_service:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- device: "test100"
- name: "node-exporter"
+ device: test100
+ name: node-exporter
port: 9100
protocol: UDP
ipaddresses:
- - address: "172.16.180.1/24"
+ - address: 172.16.180.1/24
state: present
register: test_service_ip_addresses
-- name: "NETBOX_SERVICE ASSERT - Service has been created with IP address"
- assert:
+- name: NETBOX_SERVICE ASSERT - Service has been created with IP address
+ ansible.builtin.assert:
that:
- test_service_ip_addresses is changed
- test_service_ip_addresses['diff']['after']['state'] == "present"
@@ -154,38 +154,38 @@
- name: "NETBOX_SERVICE: Missing both device & virtual_machine options - Tests required_one_of"
netbox.netbox.netbox_service:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "node-exporter"
+ name: node-exporter
port: 9100
protocol: UDP
ipaddresses:
- - address: "172.16.180.1/24"
+ - address: 172.16.180.1/24
state: present
- ignore_errors: yes
+ ignore_errors: true
register: test_service_required_one_of
-- name: "NETBOX_SERVICE ASSERT - Failed due to missing arguments"
- assert:
+- name: NETBOX_SERVICE ASSERT - Failed due to missing arguments
+ ansible.builtin.assert:
that:
- test_service_required_one_of is failed
- 'test_service_required_one_of["msg"] == "one of the following is required: device, virtual_machine"'
- name: "NETBOX_SERVICE: Create new service on virtual_machine"
netbox.netbox.netbox_service:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- virtual_machine: "test100-vm"
- name: "node-exporter"
+ virtual_machine: test100-vm
+ name: node-exporter
port: 9100
protocol: TCP
state: present
register: test_service_create_vm
-- name: "NETBOX_SERVICE ASSERT - Create"
- assert:
+- name: NETBOX_SERVICE ASSERT - Create
+ ansible.builtin.assert:
that:
- test_service_create_vm is changed
- test_service_create_vm['services']['name'] == "node-exporter"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_service_template.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_service_template.yml
index f64509acf..507a566a4 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_service_template.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_service_template.yml
@@ -7,7 +7,7 @@
- name: "SERVICE_TEMPLATE 1: Necessary info creation"
netbox.netbox.netbox_service_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Service Template for SSH
ports:
@@ -17,7 +17,7 @@
register: test_one
- name: "SERVICE_TEMPLATE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -30,7 +30,7 @@
- name: "SERVICE_TEMPLATE 2: Create duplicate"
netbox.netbox.netbox_service_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Service Template for SSH
ports:
@@ -40,7 +40,7 @@
register: test_two
- name: "SERVICE_TEMPLATE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['service_template']['name'] == "Service Template for SSH"
@@ -51,7 +51,7 @@
- name: "SERVICE_TEMPLATE 3: Update Service Template with other fields"
netbox.netbox.netbox_service_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Service Template for SSH
ports:
@@ -62,7 +62,7 @@
register: test_three
- name: "SERVICE_TEMPLATE 3: ASSERT - Update Service Template with other fields"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['comments'] == "For SSH service"
@@ -75,7 +75,7 @@
- name: "SERVICE_TEMPLATE 4: Create Service Template for Delete Test"
netbox.netbox.netbox_service_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Service Template for DNS
ports:
@@ -86,7 +86,7 @@
register: test_four
- name: "SERVICE_TEMPLATE 4: ASSERT - Create Service Template for Delete Test"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -100,14 +100,14 @@
- name: "SERVICE_TEMPLATE 5: Delete Service Template"
netbox.netbox.netbox_service_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Service Template for DNS
state: absent
register: test_five
- name: "SERVICE_TEMPLATE 5: ASSERT - Delete Service Template"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "present"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_site.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_site.yml
index 6960eeb3d..a5861522e 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_site.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_site.yml
@@ -4,17 +4,17 @@
### NETBOX_SITE
##
##
-- name: "1 - Create site within NetBox with only required information"
+- name: 1 - Create site within NetBox with only required information
netbox.netbox.netbox_site:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test - Colorado
state: present
register: test_one
-- name: "1 - ASSERT"
- assert:
+- name: 1 - ASSERT
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -22,26 +22,26 @@
- test_one['site']['name'] == "Test - Colorado"
- test_one['msg'] == "site Test - Colorado created"
-- name: "2 - Duplicate"
+- name: 2 - Duplicate
netbox.netbox.netbox_site:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test - Colorado
state: present
register: test_two
-- name: "2 - ASSERT"
- assert:
+- name: 2 - ASSERT
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['msg'] == "site Test - Colorado already exists"
- test_two['site']['name'] == "Test - Colorado"
-- name: "3 - Update Test - Colorado"
+- name: 3 - Update Test - Colorado
netbox.netbox.netbox_site:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test - Colorado
status: Planned
@@ -49,8 +49,8 @@
state: present
register: test_three
-- name: "3 - ASSERT"
- assert:
+- name: 3 - ASSERT
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['status'] == "planned"
@@ -60,10 +60,10 @@
- test_three['site']['status'] == "planned"
- test_three['site']['region'] == 1
-- name: "4 - Create site with all parameters"
+- name: 4 - Create site with all parameters
netbox.netbox.netbox_site:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test - California
status: Planned
@@ -78,12 +78,12 @@
latitude: "22.169141"
longitude: "-100.994041"
comments: "### Placeholder"
- slug: "test_california"
+ slug: test_california
state: present
register: test_four
-- name: "4 - ASSERT"
- assert:
+- name: 4 - ASSERT
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -107,7 +107,7 @@
- name: "NETBOX_SITE_IDEM: Idempotency - Create duplicate site with all parameters"
netbox.netbox.netbox_site:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test - California
status: Planned
@@ -122,12 +122,12 @@
latitude: "22.169141"
longitude: "-100.994041"
comments: "### Placeholder"
- slug: "test_california"
+ slug: test_california
state: present
register: test_results
- name: "NETBOX_SITE_IDEM: (ASSERT) Idempotency - Duplicate device site with all parameters"
- assert:
+ ansible.builtin.assert:
that:
- test_results is not changed
- test_results['site']['name'] == "Test - California"
@@ -146,17 +146,17 @@
- test_results['site']['comments'] == "### Placeholder"
- test_results['site']['slug'] == "test_california"
-- name: "5 - Delete site within netbox"
+- name: 5 - Delete site within netbox
netbox.netbox.netbox_site:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test - Colorado
state: absent
register: test_five
-- name: "5 - ASSERT"
- assert:
+- name: 5 - ASSERT
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "present"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_site_group.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_site_group.yml
index 09e2df4c6..11b9cbb24 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_site_group.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_site_group.yml
@@ -7,14 +7,14 @@
- name: "SITE_GROUP 1: Necessary info creation"
netbox.netbox.netbox_site_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Site Group
state: present
register: test_one
- name: "SITE_GROUP 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -26,14 +26,14 @@
- name: "SITE_GROUP 2: Create duplicate"
netbox.netbox.netbox_site_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Site Group
state: present
register: test_two
- name: "SITE_GROUP 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['site_group']['name'] == "Site Group"
@@ -43,7 +43,7 @@
- name: "SITE_GROUP 3: Update"
netbox.netbox.netbox_site_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Site Group
parent_site_group: Test Site Group
@@ -52,7 +52,7 @@
register: test_three
- name: "SITE_GROUP 3: ASSERT - Update"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['parent'] == 4
@@ -66,14 +66,14 @@
- name: "SITE_GROUP 4: Delete"
netbox.netbox.netbox_site_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Site Group
state: absent
register: test_four
- name: "SITE_GROUP 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_tag.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_tag.yml
index 38c4157c2..f3fcbac39 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_tag.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_tag.yml
@@ -7,16 +7,16 @@
- name: "TAG 1: ASSERT - Necessary info creation"
netbox.netbox.netbox_tag:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Tag 1"
- description: "Tag 1 test"
+ name: Test Tag 1
+ description: Tag 1 test
color: "0000ff"
state: present
register: test_one
- name: "TAG 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -30,16 +30,16 @@
- name: "TAG 2: Create duplicate"
netbox.netbox.netbox_tag:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Tag 1"
- description: "Tag 1 test"
+ name: Test Tag 1
+ description: Tag 1 test
color: "0000ff"
state: present
register: test_two
- name: "TAG 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['tags']['name'] == "Test Tag 1"
@@ -48,16 +48,16 @@
- name: "TAG 3: ASSERT - Update"
netbox.netbox.netbox_tag:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Tag 1"
- description: "Tag 1 update test"
+ name: Test Tag 1
+ description: Tag 1 update test
color: "00ff00"
state: present
register: test_three
- name: "TAG 3: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['color'] == "00ff00"
@@ -70,14 +70,14 @@
- name: "TAG 4: ASSERT - Delete"
netbox.netbox.netbox_tag:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Tag 1"
+ name: Test Tag 1
state: absent
register: test_four
- name: "TAG 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['after']['state'] == "absent"
@@ -88,17 +88,17 @@
- name: "TAG 5: ASSERT - Necessary info creation"
netbox.netbox.netbox_tag:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Tag 5"
- slug: "test-tag-five"
- description: "Tag 5 test"
+ name: Test Tag 5
+ slug: test-tag-five
+ description: Tag 5 test
color: "0000ff"
state: present
register: test_five
- name: "TAG 5: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_tenant.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_tenant.yml
index 7c9af8b9f..0731e0268 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_tenant.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_tenant.yml
@@ -4,16 +4,16 @@
### NETBOX_TENANT
##
##
-- name: "1 - Test tenant creation"
+- name: 1 - Test tenant creation
netbox.netbox.netbox_tenant:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Tenant ABC"
+ name: Tenant ABC
register: test_one
-- name: "1 - ASSERT"
- assert:
+- name: 1 - ASSERT
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -22,33 +22,33 @@
- test_one['tenant']['slug'] == "tenant-abc"
- test_one['msg'] == "tenant Tenant ABC created"
-- name: "Test duplicate tenant"
+- name: Test duplicate tenant
netbox.netbox.netbox_tenant:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Tenant ABC"
+ name: Tenant ABC
register: test_two
-- name: "2 - ASSERT"
- assert:
+- name: 2 - ASSERT
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['tenant']['name'] == "Tenant ABC"
- test_two['tenant']['slug'] == "tenant-abc"
- test_two['msg'] == "tenant Tenant ABC already exists"
-- name: "3 - Test update"
+- name: 3 - Test update
netbox.netbox.netbox_tenant:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Tenant ABC"
- description: "Updated description"
+ name: Tenant ABC
+ description: Updated description
register: test_three
-- name: "3 - ASSERT"
- assert:
+- name: 3 - ASSERT
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['description'] == "Updated description"
@@ -57,42 +57,42 @@
- test_three['tenant']['description'] == "Updated description"
- test_three['msg'] == "tenant Tenant ABC updated"
-- name: "4 - Test delete"
+- name: 4 - Test delete
netbox.netbox.netbox_tenant:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Tenant ABC"
- state: "absent"
+ name: Tenant ABC
+ state: absent
register: test_four
-- name: "4 - ASSERT"
- assert:
+- name: 4 - ASSERT
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
- test_four['diff']['after']['state'] == "absent"
- test_four['msg'] == "tenant Tenant ABC deleted"
-- name: "5 - Create tenant with all parameters"
+- name: 5 - Create tenant with all parameters
netbox.netbox.netbox_tenant:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Tenant ABC"
- description: "ABC Incorporated"
+ name: Tenant ABC
+ description: ABC Incorporated
comments: "### This tenant is super cool"
- tenant_group: "Test Tenant Group"
- slug: "tenant_abc"
+ tenant_group: Test Tenant Group
+ slug: tenant_abc
tags:
- - "tagA"
- - "tagB"
- - "tagC"
+ - tagA
+ - tagB
+ - tagC
state: present
register: test_five
-- name: "5 - ASSERT"
- assert:
+- name: 5 - ASSERT
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_tenant_group.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_tenant_group.yml
index 4113a2260..f7f57f3c2 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_tenant_group.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_tenant_group.yml
@@ -4,16 +4,16 @@
### NETBOX_TENANT_GROUP
##
##
-- name: "1 - Test tenant group creation"
+- name: 1 - Test tenant group creation
netbox.netbox.netbox_tenant_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Tenant Group Two"
+ name: Test Tenant Group Two
register: test_one
-- name: "1 - ASSERT"
- assert:
+- name: 1 - ASSERT
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -22,50 +22,50 @@
- test_one['tenant_group']['slug'] == "test-tenant-group-two"
- test_one['msg'] == "tenant_group Test Tenant Group Two created"
-- name: "Test duplicate tenant group"
+- name: Test duplicate tenant group
netbox.netbox.netbox_tenant_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Tenant Group Two"
+ name: Test Tenant Group Two
register: test_two
-- name: "2 - ASSERT"
- assert:
+- name: 2 - ASSERT
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['tenant_group']['name'] == "Test Tenant Group Two"
- test_two['tenant_group']['slug'] == "test-tenant-group-two"
- test_two['msg'] == "tenant_group Test Tenant Group Two already exists"
-- name: "3 - Test delete"
+- name: 3 - Test delete
netbox.netbox.netbox_tenant_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Tenant Group Two"
- state: "absent"
+ name: Test Tenant Group Two
+ state: absent
register: test_three
-- name: "3 - ASSERT"
- assert:
+- name: 3 - ASSERT
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['before']['state'] == "present"
- test_three['diff']['after']['state'] == "absent"
- test_three['msg'] == "tenant_group Test Tenant Group Two deleted"
-- name: "4 - Test tenant group creation with custom slug"
+- name: 4 - Test tenant group creation with custom slug
netbox.netbox.netbox_tenant_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Tenant Group ABC"
- slug: "test_tenant_group_four"
+ name: Test Tenant Group ABC
+ slug: test_tenant_group_four
register: test_four
-- name: "4 - ASSERT"
- assert:
+- name: 4 - ASSERT
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -74,17 +74,17 @@
- test_four['tenant_group']['slug'] == "test_tenant_group_four"
- test_four['msg'] == "tenant_group Test Tenant Group ABC created"
-- name: "5 - Test child tenant group creation"
+- name: 5 - Test child tenant group creation
netbox.netbox.netbox_tenant_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Child Test Tenant Group"
+ name: Child Test Tenant Group
parent_tenant_group: "{{ test_four.tenant_group.slug }}"
register: test_five
-- name: "5 - ASSERT"
- assert:
+- name: 5 - ASSERT
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "absent"
@@ -93,35 +93,35 @@
- test_five['tenant_group']['parent'] == test_four.tenant_group.id
- test_five['msg'] == "tenant_group Child Test Tenant Group created"
-- name: "6 - Test child tenant group deletion"
+- name: 6 - Test child tenant group deletion
netbox.netbox.netbox_tenant_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Child Test Tenant Group"
- state: "absent"
+ name: Child Test Tenant Group
+ state: absent
register: test_six
-- name: "6 - ASSERT"
- assert:
+- name: 6 - ASSERT
+ ansible.builtin.assert:
that:
- test_six is changed
- test_six['diff']['before']['state'] == "present"
- test_six['diff']['after']['state'] == "absent"
- test_six['msg'] == "tenant_group Child Test Tenant Group deleted"
-- name: "7 - Test deletion of the tenant group with custom slug"
+- name: 7 - Test deletion of the tenant group with custom slug
netbox.netbox.netbox_tenant_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Tenant Group ABC"
- slug: "test_tenant_group_four"
- state: "absent"
+ name: Test Tenant Group ABC
+ slug: test_tenant_group_four
+ state: absent
register: test_seven
-- name: "7 - ASSERT"
- assert:
+- name: 7 - ASSERT
+ ansible.builtin.assert:
that:
- test_seven is changed
- test_seven['diff']['before']['state'] == "present"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_virtual_chassis.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_virtual_chassis.yml
index 04ada8411..395be8395 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_virtual_chassis.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_virtual_chassis.yml
@@ -10,7 +10,7 @@
- name: "VIRTUAL_CHASSIS 0: Create device for testing virtual chassis"
netbox.netbox.netbox_device:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Device Virtual Chassis Tests
device_type: Cisco Test
@@ -23,15 +23,15 @@
- name: "VIRTUAL_CHASSIS 1: Necessary info creation"
netbox.netbox.netbox_virtual_chassis:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "First VC"
+ name: First VC
master: Device Virtual Chassis Tests
state: present
register: test_one
- name: "VIRTUAL_CHASSIS 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -43,15 +43,15 @@
- name: "VIRTUAL_CHASSIS 2: Create duplicate"
netbox.netbox.netbox_virtual_chassis:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "First VC"
+ name: First VC
master: Device Virtual Chassis Tests
state: present
register: test_two
- name: "VIRTUAL_CHASSIS 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['virtual_chassis']['master'] == 11
@@ -61,16 +61,16 @@
- name: "POWER_FEED 3: Update virtual_chassis with other fields"
netbox.netbox.netbox_virtual_chassis:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "First VC"
+ name: First VC
master: Device Virtual Chassis Tests
domain: Domain Text
state: present
register: test_three
- name: "POWER_FEED 3: ASSERT - Update virtual_chassis with other fields"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['domain'] == "Domain Text"
@@ -82,7 +82,7 @@
- name: "VIRTUAL_CHASSIS 4: Create device for testing virtual chassis deletion"
netbox.netbox.netbox_device:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Device Virtual Chassis Tests 2
device_type: Cisco Test
@@ -95,15 +95,15 @@
- name: "VIRTUAL_CHASSIS 4: Create Virtual Chassis for Delete Test"
netbox.netbox.netbox_virtual_chassis:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Second VC"
+ name: Second VC
master: Device Virtual Chassis Tests 2
state: present
register: test_four
- name: "VIRTUAL_CHASSIS 4: ASSERT - Create Virtual Chassis for Delete Test"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -115,15 +115,15 @@
- name: "VIRTUAL_CHASSIS 5: Delete Virtual Chassis"
netbox.netbox.netbox_virtual_chassis:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Second VC"
+ name: Second VC
master: Device Virtual Chassis Tests 2
state: absent
register: test_five
- name: "VIRTUAL_CHASSIS 5: ASSERT - Delete Virtual Chassis"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "present"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_virtual_machine.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_virtual_machine.yml
index 79d3882bc..3fb0e4319 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_virtual_machine.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_virtual_machine.yml
@@ -7,15 +7,15 @@
- name: "VIRTUAL_MACHINE 1: Necessary info creation"
netbox.netbox.netbox_virtual_machine:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test VM One"
- cluster: "Test Cluster"
+ name: Test VM One
+ cluster: Test Cluster
state: present
register: test_one
- name: "VIRTUAL_MACHINE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -27,15 +27,15 @@
- name: "VIRTUAL_MACHINE 2: Create duplicate"
netbox.netbox.netbox_virtual_machine:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test VM One"
- cluster: "Test Cluster"
+ name: Test VM One
+ cluster: Test Cluster
state: present
register: test_two
- name: "VIRTUAL_MACHINE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['virtual_machine']['name'] == "Test VM One"
@@ -45,21 +45,21 @@
- name: "VIRTUAL_MACHINE 3: Update"
netbox.netbox.netbox_virtual_machine:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test VM One"
- cluster: "Test Cluster"
+ name: Test VM One
+ cluster: Test Cluster
vcpus: 8.5
memory: 8
- status: "Planned"
- virtual_machine_role: "Test VM Role"
+ status: Planned
+ virtual_machine_role: Test VM Role
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: test_three
- name: "VIRTUAL_MACHINE 3: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['vcpus'] == 8.5
@@ -79,21 +79,21 @@
- name: "VIRTUAL_MACHINE 4: Test idempotence"
netbox.netbox.netbox_virtual_machine:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test VM One"
- cluster: "Test Cluster"
+ name: Test VM One
+ cluster: Test Cluster
vcpus: 8.5
memory: 8
- status: "Planned"
- virtual_machine_role: "Test VM Role"
+ status: Planned
+ virtual_machine_role: Test VM Role
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: test_four_idempotence
- name: "VIRTUAL_MACHINE 4: ASSERT - Not changed"
- assert:
+ ansible.builtin.assert:
that:
- test_four_idempotence is not changed
- test_four_idempotence['virtual_machine']['name'] == "Test VM One"
@@ -108,14 +108,14 @@
- name: "VIRTUAL_MACHINE 5: Delete"
netbox.netbox.netbox_virtual_machine:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test VM One"
+ name: Test VM One
state: absent
register: test_five
- name: "VIRTUAL_MACHINE 5: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['virtual_machine']['name'] == "Test VM One"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_vlan.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_vlan.yml
index d95abbeca..463873ca5 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_vlan.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_vlan.yml
@@ -7,7 +7,7 @@
- name: "VLAN 1: Necessary info creation"
netbox.netbox.netbox_vlan:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test VLAN 500
vid: 500
@@ -15,7 +15,7 @@
register: test_one
- name: "VLAN 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -27,7 +27,7 @@
- name: "VLAN 2: Create duplicate"
netbox.netbox.netbox_vlan:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test VLAN 500
vid: 500
@@ -35,7 +35,7 @@
register: test_two
- name: "VLAN 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['vlan']['name'] == "Test VLAN 500"
@@ -45,18 +45,18 @@
- name: "VLAN 3: Create VLAN with same name, but different site"
netbox.netbox.netbox_vlan:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test VLAN 500
vid: 500
site: Test Site
tenant: Test Tenant
- vlan_group: "Test VLAN Group"
+ vlan_group: Test VLAN Group
state: present
register: test_three
- name: "VLAN 3: ASSERT - Create VLAN with same name, but different site"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['before']['state'] == "absent"
@@ -70,23 +70,23 @@
- name: "VLAN 4: ASSERT - Update"
netbox.netbox.netbox_vlan:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test VLAN 500"
+ name: Test VLAN 500
vid: 500
- tenant: "Test Tenant"
- vlan_group: "Test VLAN Group"
+ tenant: Test Tenant
+ vlan_group: Test VLAN Group
status: Reserved
vlan_role: Network of care
description: Updated description
- site: "Test Site"
+ site: Test Site
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: test_four
- name: "VLAN 4: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['after']['status'] == "reserved"
@@ -106,41 +106,41 @@
- name: "VLAN: ASSERT - IDEMPOTENT WITH VLAN_GROUP"
netbox.netbox.netbox_vlan:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test VLAN 500"
+ name: Test VLAN 500
vid: 500
- tenant: "Test Tenant"
- vlan_group: "Test VLAN Group"
+ tenant: Test Tenant
+ vlan_group: Test VLAN Group
status: Reserved
vlan_role: Network of care
description: Updated description
- site: "Test Site"
+ site: Test Site
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: idempotent_vlan_group
- name: "VLAN: ASSERT - IDEMPOTENT WITH VLAN_GROUP"
- assert:
+ ansible.builtin.assert:
that:
- idempotent_vlan_group is not changed
- name: "VLAN: Create VLAN with same name, but different vlan_group"
netbox.netbox.netbox_vlan:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test VLAN 500
vid: 500
site: Test Site
tenant: Test Tenant
- vlan_group: "Test VLAN Group 2"
+ vlan_group: Test VLAN Group 2
state: present
register: new_vlan_group
- name: "VLAN: ASSERT - Create VLAN with same name, but different vlan_group"
- assert:
+ ansible.builtin.assert:
that:
- new_vlan_group is changed
- new_vlan_group['diff']['before']['state'] == "absent"
@@ -154,15 +154,15 @@
- name: "VLAN 5: ASSERT - Delete more than one result"
netbox.netbox.netbox_vlan:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test VLAN 500"
+ name: Test VLAN 500
state: absent
- ignore_errors: yes
+ ignore_errors: true
register: test_five
- name: "VLAN 5: ASSERT - Delete more than one result"
- assert:
+ ansible.builtin.assert:
that:
- test_five is failed
- test_five['msg'] == "More than one result returned for Test VLAN 500"
@@ -170,16 +170,16 @@
- name: "VLAN 6: ASSERT - Delete"
netbox.netbox.netbox_vlan:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test VLAN 500"
+ name: Test VLAN 500
site: Test Site
- vlan_group: "Test VLAN Group"
+ vlan_group: Test VLAN Group
state: absent
register: test_six
- name: "VLAN 6: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_six is changed
- test_six['vlan']['name'] == "Test VLAN 500"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_vlan_group.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_vlan_group.yml
index 2be4fd3df..9dd4d1789 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_vlan_group.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_vlan_group.yml
@@ -7,16 +7,16 @@
- name: "VLAN_GROUP 1: Necessary info creation"
netbox.netbox.netbox_vlan_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "VLAN Group One"
- scope_type: "dcim.site"
+ name: VLAN Group One
+ scope_type: dcim.site
scope: Test Site
state: present
register: results
- name: "VLAN_GROUP 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- results is changed
- results['diff']['before']['state'] == "absent"
@@ -30,16 +30,16 @@
- name: "VLAN_GROUP 2: Create duplicate"
netbox.netbox.netbox_vlan_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "VLAN Group One"
- scope_type: "dcim.site"
+ name: VLAN Group One
+ scope_type: dcim.site
scope: Test Site
state: present
register: results
- name: "VLAN_GROUP 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not results['changed']
- results['vlan_group']['name'] == "VLAN Group One"
@@ -51,16 +51,16 @@
- name: "VLAN_GROUP 3: ASSERT - Create with same name, different site"
netbox.netbox.netbox_vlan_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "VLAN Group One"
- scope_type: "dcim.site"
- scope: "Test Site2"
+ name: VLAN Group One
+ scope_type: dcim.site
+ scope: Test Site2
state: present
register: results
- name: "VLAN_GROUP 3: ASSERT - Create with same name, different site"
- assert:
+ ansible.builtin.assert:
that:
- results is changed
- results['vlan_group']['name'] == "VLAN Group One"
@@ -72,15 +72,15 @@
- name: "VLAN_GROUP 4: ASSERT - Create vlan group, no site"
netbox.netbox.netbox_vlan_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "VLAN Group One"
+ name: VLAN Group One
state: present
- ignore_errors: yes
+ ignore_errors: true
register: results
- name: "VLAN_GROUP 4: ASSERT - Create with same name, different site"
- assert:
+ ansible.builtin.assert:
that:
- results is failed
- results['msg'] == "More than one result returned for VLAN Group One"
@@ -88,16 +88,16 @@
- name: "VLAN_GROUP 5: ASSERT - Delete"
netbox.netbox.netbox_vlan_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: VLAN Group One
- scope_type: "dcim.site"
+ scope_type: dcim.site
scope: Test Site2
state: absent
register: results
- name: "VLAN_GROUP 5: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- results is changed
- results['diff']['before']['state'] == "present"
@@ -111,16 +111,16 @@
- name: "VLAN_GROUP 6: ASSERT - Delete non existing"
netbox.netbox.netbox_vlan_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: VLAN Group One
- scope_type: "dcim.site"
+ scope_type: dcim.site
scope: Test Site2
state: absent
register: results
- name: "VLAN_GROUP 6: ASSERT - Delete non existing`"
- assert:
+ ansible.builtin.assert:
that:
- not results['changed']
- results['vlan_group'] == None
@@ -129,16 +129,16 @@
- name: "VLAN_GROUP 7: Necessary info creation - scope_type: dcim.location"
netbox.netbox.netbox_vlan_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "VLAN Group Location"
- scope_type: "dcim.location"
+ name: VLAN Group Location
+ scope_type: dcim.location
scope: Test Rack Group
state: present
register: results
- name: "VLAN_GROUP 7: ASSERT - Necessary info creation - scope_type: dcim.location"
- assert:
+ ansible.builtin.assert:
that:
- results is changed
- results['diff']['before']['state'] == "absent"
@@ -152,16 +152,16 @@
- name: "VLAN_GROUP 8: Necessary info creation - scope_type: dcim.rack"
netbox.netbox.netbox_vlan_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "VLAN Group Rack"
- scope_type: "dcim.rack"
+ name: VLAN Group Rack
+ scope_type: dcim.rack
scope: Test Rack
state: present
register: results
- name: "VLAN_GROUP 8: ASSERT - Necessary info creation - scope_type: dcim.rack"
- assert:
+ ansible.builtin.assert:
that:
- results is changed
- results['diff']['before']['state'] == "absent"
@@ -175,16 +175,16 @@
- name: "VLAN_GROUP 9: Necessary info creation - scope_type: dcim.region"
netbox.netbox.netbox_vlan_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "VLAN Group Region"
- scope_type: "dcim.region"
+ name: VLAN Group Region
+ scope_type: dcim.region
scope: Test Region
state: present
register: results
- name: "VLAN_GROUP 9: ASSERT - Necessary info creation - scope_type: dcim.region"
- assert:
+ ansible.builtin.assert:
that:
- results is changed
- results['diff']['before']['state'] == "absent"
@@ -198,16 +198,16 @@
- name: "VLAN_GROUP 10: Necessary info creation - scope_type: dcim.sitegroup"
netbox.netbox.netbox_vlan_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "VLAN Group Site Group"
- scope_type: "dcim.sitegroup"
+ name: VLAN Group Site Group
+ scope_type: dcim.sitegroup
scope: Test Site Group
state: present
register: results
- name: "VLAN_GROUP 10: ASSERT - Necessary info creation - scope_type: dcim.sitegroup"
- assert:
+ ansible.builtin.assert:
that:
- results is changed
- results['diff']['before']['state'] == "absent"
@@ -219,7 +219,7 @@
- results['msg'] == "vlan_group VLAN Group Site Group created"
# Commented out due to invalid content type being reported back by API
-#- name: "VLAN_GROUP 11: Necessary info creation - scope_type: virtualization.cluster"
+# - name: "VLAN_GROUP 11: Necessary info creation - scope_type: virtualization.cluster"
# netbox.netbox.netbox_vlan_group:
# netbox_url: http://localhost:32768
# netbox_token: 0123456789abcdef0123456789abcdef01234567
@@ -230,7 +230,7 @@
# state: present
# register: results
-#- name: "VLAN_GROUP 11: ASSERT - Necessary info creation - scope_type: virtualization.cluster"
+# - name: "VLAN_GROUP 11: ASSERT - Necessary info creation - scope_type: virtualization.cluster"
# assert:
# that:
# - results is changed
@@ -241,8 +241,8 @@
# - results['vlan_group']['scope_type'] == "virtualization.cluster"
# - results['vlan_group']['scope_id'] == 1
# - results['msg'] == "vlan_group VLAN Group Cluster created"
-#
-#- name: "VLAN_GROUP 12: Necessary info creation - scope_type: virtualization.clustergroup"
+
+# - name: "VLAN_GROUP 12: Necessary info creation - scope_type: virtualization.clustergroup"
# netbox.netbox.netbox_vlan_group:
# netbox_url: http://localhost:32768
# netbox_token: 0123456789abcdef0123456789abcdef01234567
@@ -252,8 +252,8 @@
# scope: Test Cluster Group
# state: present
# register: results
-#
-#- name: "VLAN_GROUP 12: ASSERT - Necessary info creation - scope_type: virtualization.clustergroup"
+
+# - name: "VLAN_GROUP 12: ASSERT - Necessary info creation - scope_type: virtualization.clustergroup"
# assert:
# that:
# - results is changed
@@ -268,17 +268,17 @@
- name: "VLAN_GROUP 12: Update Description - scope_type: dcim.location"
netbox.netbox.netbox_vlan_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "VLAN Group Location"
- scope_type: "dcim.location"
+ name: VLAN Group Location
+ scope_type: dcim.location
scope: Test Rack Group
- description: "Ansible updated description"
+ description: Ansible updated description
state: present
register: results
- name: "VLAN_GROUP 12: ASSERT - Update Description - scope_type: dcim.location"
- assert:
+ ansible.builtin.assert:
that:
- results is changed
- results['diff']['before']['description'] == ""
@@ -293,17 +293,17 @@
- name: "VLAN_GROUP 12: Update Description (IDEM) - scope_type: dcim.location"
netbox.netbox.netbox_vlan_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "VLAN Group Location"
- scope_type: "dcim.location"
+ name: VLAN Group Location
+ scope_type: dcim.location
scope: Test Rack Group
- description: "Ansible updated description"
+ description: Ansible updated description
state: present
register: results
- name: "VLAN_GROUP 12: ASSERT - Update Description (IDEM) - scope_type: dcim.location"
- assert:
+ ansible.builtin.assert:
that:
- results is not changed
- results['vlan_group']['name'] == "VLAN Group Location"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_vm_interface.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_vm_interface.yml
index 799d04b16..653f31e0e 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_vm_interface.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_vm_interface.yml
@@ -7,15 +7,15 @@
- name: "NETBOX_VM_INTERFACE 1: Necessary info creation"
netbox.netbox.netbox_vm_interface:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- virtual_machine: "test100-vm"
- name: "Eth10"
+ virtual_machine: test100-vm
+ name: Eth10
state: present
register: test_one
- name: "NETBOX_VM_INTERFACE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -27,15 +27,15 @@
- name: "NETBOX_VM_INTERFACE 2: Create duplicate"
netbox.netbox.netbox_vm_interface:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- virtual_machine: "test100-vm"
- name: "Eth10"
+ virtual_machine: test100-vm
+ name: Eth10
state: present
register: test_two
- name: "NETBOX_VM_INTERFACE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['interface']['name'] == "Eth10"
@@ -45,30 +45,30 @@
- name: "NETBOX_VM_INTERFACE 3: Updated"
netbox.netbox.netbox_vm_interface:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- virtual_machine: "test100-vm"
- name: "Eth10"
+ virtual_machine: test100-vm
+ name: Eth10
enabled: false
mtu: 9000
mac_address: "00:00:00:AA:AA:01"
- description: "Updated test100-vm"
+ description: Updated test100-vm
mode: Tagged
- #untagged_vlan:
+ # untagged_vlan:
# name: Wireless
# site: Test Site
- #tagged_vlans:
+ # tagged_vlans:
# - name: Data
# site: Test Site
# - name: VoIP
# site: Test Site
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: test_three
- name: "NETBOX_VM_INTERFACE 4: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['enabled'] == false
@@ -76,8 +76,8 @@
- test_three['diff']['after']['mac_address'] == "00:00:00:AA:AA:01"
- test_three['diff']['after']['description'] == "Updated test100-vm"
- test_three['diff']['after']['mode'] == "tagged"
- #- test_three['diff']['after']['untagged_vlan'] == 1
- #- test_three['diff']['after']['tagged_vlans'] == [2, 3]
+ # - test_three['diff']['after']['untagged_vlan'] == 1
+ # - test_three['diff']['after']['tagged_vlans'] == [2, 3]
- test_three['diff']['after']['tags'][0] == 4
- test_three['interface']['name'] == "Eth10"
- test_three['interface']['virtual_machine'] == 1
@@ -86,23 +86,23 @@
- test_three['interface']['mac_address'] == "00:00:00:AA:AA:01"
- test_three['interface']['description'] == "Updated test100-vm"
- test_three['interface']['mode'] == "tagged"
- #- test_three['interface']['untagged_vlan'] == 1
- #- test_three['interface']['tagged_vlans'] == [2, 3]
+ # - test_three['interface']['untagged_vlan'] == 1
+ # - test_three['interface']['tagged_vlans'] == [2, 3]
- test_three['interface']['tags'][0] == 4
- test_three['msg'] == "interface Eth10 updated"
- name: "NETBOX_VM_INTERFACE 4: ASSERT - Delete"
netbox.netbox.netbox_vm_interface:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Eth10"
- virtual_machine: "test100-vm"
+ name: Eth10
+ virtual_machine: test100-vm
state: absent
register: test_four
- name: "NETBOX_VM_INTERFACE 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['interface']['name'] == "Eth10"
@@ -112,30 +112,30 @@
- name: "NETBOX_VM_INTERFACE 5: Attempt to update interface with same name on other VMs"
netbox.netbox.netbox_vm_interface:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- virtual_machine: "test100-vm"
- name: "Eth0"
+ virtual_machine: test100-vm
+ name: Eth0
enabled: false
mtu: 9000
mac_address: "00:00:00:AA:AA:01"
- description: "Updated test100-vm Eth0 intf"
+ description: Updated test100-vm Eth0 intf
mode: Tagged
- #untagged_vlan:
+ # untagged_vlan:
# name: Wireless
# site: Test Site
- #tagged_vlans:
+ # tagged_vlans:
# - name: Data
# site: Test Site
# - name: VoIP
# site: Test Site
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: test_five
- name: "NETBOX_VM_INTERFACE 5: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['after']['enabled'] == false
@@ -143,8 +143,8 @@
- test_five['diff']['after']['mac_address'] == "00:00:00:AA:AA:01"
- test_five['diff']['after']['description'] == "Updated test100-vm Eth0 intf"
- test_five['diff']['after']['mode'] == "tagged"
- #- test_five['diff']['after']['untagged_vlan'] == 1
- #- test_five['diff']['after']['tagged_vlans'] == [2, 3]
+ # - test_five['diff']['after']['untagged_vlan'] == 1
+ # - test_five['diff']['after']['tagged_vlans'] == [2, 3]
- test_five['diff']['after']['tags'][0] == 4
- test_five['interface']['name'] == "Eth0"
- test_five['interface']['virtual_machine'] == 1
@@ -153,7 +153,7 @@
- test_five['interface']['mac_address'] == "00:00:00:AA:AA:01"
- test_five['interface']['description'] == "Updated test100-vm Eth0 intf"
- test_five['interface']['mode'] == "tagged"
- #- test_five['interface']['untagged_vlan'] == 1
- #- test_five['interface']['tagged_vlans'] == [2, 3]
+ # - test_five['interface']['untagged_vlan'] == 1
+ # - test_five['interface']['tagged_vlans'] == [2, 3]
- test_five['interface']['tags'][0] == 4
- test_five['msg'] == "interface Eth0 updated"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_vrf.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_vrf.yml
index abf9bab0e..1da7ae382 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_vrf.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_vrf.yml
@@ -7,14 +7,14 @@
- name: "VRF 1: Necessary info creation"
netbox.netbox.netbox_vrf:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test VRF One
state: present
register: test_one
- name: "VRF 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -25,14 +25,14 @@
- name: "VRF 2: Create duplicate"
netbox.netbox.netbox_vrf:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test VRF One
state: present
register: test_two
- name: "VRF 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['vrf']['name'] == "Test VRF One"
@@ -41,7 +41,7 @@
- name: "VRF 3: Create VRF with same name, but different tenant"
netbox.netbox.netbox_vrf:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test VRF One
tenant: Test Tenant
@@ -49,7 +49,7 @@
register: test_three
- name: "VRF 3: ASSERT - Create VRF with same name, but different site"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['before']['state'] == "absent"
@@ -61,25 +61,25 @@
- name: "VRF 4: ASSERT - Update"
netbox.netbox.netbox_vrf:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test VRF One"
+ name: Test VRF One
rd: "65001:1"
- enforce_unique: False
- tenant: "Test Tenant"
+ enforce_unique: false
+ tenant: Test Tenant
description: Updated description
import_targets:
- - "4000:4000"
- - "5000:5000"
+ - 4000:4000
+ - 5000:5000
export_targets:
- - "5000:5000"
+ - 5000:5000
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: test_four
- name: "VRF 4: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['after']['rd'] == "65001:1"
@@ -101,15 +101,15 @@
- name: "VRF 5: ASSERT - Delete more than one result"
netbox.netbox.netbox_vrf:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test VRF One"
+ name: Test VRF One
state: absent
- ignore_errors: yes
+ ignore_errors: true
register: test_five
- name: "VRF 5: ASSERT - Delete more than one result"
- assert:
+ ansible.builtin.assert:
that:
- test_five is failed
- test_five['msg'] == "More than one result returned for Test VRF One"
@@ -117,15 +117,15 @@
- name: "VRF 6: ASSERT - Delete"
netbox.netbox.netbox_vrf:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test VRF One"
+ name: Test VRF One
tenant: Test Tenant
state: absent
register: test_six
- name: "VRF 6: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_six is changed
- test_six['vrf']['name'] == "Test VRF One"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_webhook.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_webhook.yml
index f2f5d517e..a3d5a3825 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_webhook.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_webhook.yml
@@ -7,12 +7,12 @@
- name: "WEBHOOK 1: Necessary info creation"
netbox.netbox.netbox_webhook:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
content_types:
- - "dcim.device"
+ - dcim.device
name: Example Webhook
- type_create: yes
+ type_create: true
payload_url: https://payload.url
body_template: !unsafe >-
{{ data }}
@@ -20,7 +20,7 @@
register: test_one
- name: "WEBHOOK 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -34,12 +34,12 @@
- name: "WEBHOOK 2: Create duplicate"
netbox.netbox.netbox_webhook:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
content_types:
- - "dcim.device"
+ - dcim.device
name: Example Webhook
- type_create: yes
+ type_create: true
payload_url: https://payload.url
body_template: !unsafe >-
{{ data }}
@@ -47,7 +47,7 @@
register: test_two
- name: "WEBHOOK 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['webhook']['name'] == "Example Webhook"
@@ -56,12 +56,12 @@
- name: "WEBHOOK 3: Update data and add on delete"
netbox.netbox.netbox_webhook:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
content_types:
- - "dcim.device"
- type_create: yes
- type_delete: yes
+ - dcim.device
+ type_create: true
+ type_delete: true
name: Example Webhook
payload_url: https://payload.url
body_template: !unsafe >-
@@ -70,7 +70,7 @@
register: test_three
- name: "WEBHOOK 3: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['webhook']['name'] == "Example Webhook"
@@ -79,10 +79,10 @@
- name: "WEBHOOK 4: Change content type"
netbox.netbox.netbox_webhook:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
content_types:
- - "virtualization.virtualmachine"
+ - virtualization.virtualmachine
name: Example Webhook
payload_url: https://payload.url
body_template: !unsafe >-
@@ -91,7 +91,7 @@
register: test_four
- name: "WEBHOOK 4: ASSERT - Change content type"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['after']['content_types'] == ["virtualization.virtualmachine"]
@@ -101,10 +101,10 @@
- name: "WEBHOOK 5: Delete"
netbox.netbox.netbox_webhook:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
content_types:
- - "virtualization.virtualmachine"
+ - virtualization.virtualmachine
name: Example Webhook
payload_url: https://payload.url
body_template: !unsafe >-
@@ -113,7 +113,7 @@
register: test_five
- name: "WEBHOOK 5: ASSERT - Deleted"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['after']['state'] == "absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_wireless_lan.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_wireless_lan.yml
index e59af42ee..e70fd3e59 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_wireless_lan.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_wireless_lan.yml
@@ -4,16 +4,16 @@
### NETBOX_WIRELESS_LAN
##
##
-- name: "1 - Test wireless LAN creation"
+- name: 1 - Test wireless LAN creation
netbox.netbox.netbox_wireless_lan:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- ssid: "Wireless LAN One"
+ ssid: Wireless LAN One
register: test_one
-- name: "1 - ASSERT"
- assert:
+- name: 1 - ASSERT
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -21,32 +21,32 @@
- test_one['wireless_lan']['ssid'] == "Wireless LAN One"
- test_one['msg'] == "wireless_lan Wireless LAN One created"
-- name: "Test duplicate wireless LAN"
+- name: Test duplicate wireless LAN
netbox.netbox.netbox_wireless_lan:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- ssid: "Wireless LAN One"
+ ssid: Wireless LAN One
register: test_two
-- name: "2 - ASSERT"
- assert:
+- name: 2 - ASSERT
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['wireless_lan']['ssid'] == "Wireless LAN One"
- test_two['msg'] == "wireless_lan Wireless LAN One already exists"
-- name: "3 - Test update"
+- name: 3 - Test update
netbox.netbox.netbox_wireless_lan:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- ssid: "Wireless LAN One"
- description: "New Description"
+ ssid: Wireless LAN One
+ description: New Description
register: test_three
-- name: "3 - ASSERT"
- assert:
+- name: 3 - ASSERT
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['description'] == "New Description"
@@ -54,27 +54,27 @@
- test_three['wireless_lan']['description'] == "New Description"
- test_three['msg'] == "wireless_lan Wireless LAN One updated"
-- name: "4 - Test delete"
+- name: 4 - Test delete
netbox.netbox.netbox_wireless_lan:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- ssid: "Wireless LAN One"
- state: "absent"
+ ssid: Wireless LAN One
+ state: absent
register: test_four
-- name: "4 - ASSERT"
- assert:
+- name: 4 - ASSERT
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
- test_four['diff']['after']['state'] == "absent"
- test_four['msg'] == "wireless_lan Wireless LAN One deleted"
-- name: "5 - Create wireless LAN with all parameters"
+- name: 5 - Create wireless LAN with all parameters
netbox.netbox.netbox_wireless_lan:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
ssid: Wireless Network One
description: Cool Wireless Network
@@ -88,8 +88,8 @@
state: present
register: test_five
-- name: "5 - ASSERT"
- assert:
+- name: 5 - ASSERT
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "absent"
@@ -99,5 +99,5 @@
- test_five['wireless_lan']['auth_type'] == "wpa-enterprise"
- test_five['wireless_lan']['auth_cipher'] == "aes"
- test_five['wireless_lan']['auth_psk'] == "psk123456"
- #- test_five['wireless_lan']['tags'] | length == 3
+ # - test_five['wireless_lan']['tags'] | length == 3
- test_five['msg'] == "wireless_lan Wireless Network One created"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_wireless_lan_group.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_wireless_lan_group.yml
index 51ae3a74f..57daaf62c 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_wireless_lan_group.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_wireless_lan_group.yml
@@ -4,16 +4,16 @@
### NETBOX_WIRELESS_LAN_GROUP
##
##
-- name: "1 - Test wireless LAN group creation"
+- name: 1 - Test wireless LAN group creation
netbox.netbox.netbox_wireless_lan_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Wireless LAN Group One"
+ name: Wireless LAN Group One
register: test_one
-- name: "1 - ASSERT"
- assert:
+- name: 1 - ASSERT
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -21,32 +21,32 @@
- test_one['wireless_lan_group']['name'] == "Wireless LAN Group One"
- test_one['msg'] == "wireless_lan_group Wireless LAN Group One created"
-- name: "Test duplicate wireless LAN group"
+- name: Test duplicate wireless LAN group
netbox.netbox.netbox_wireless_lan_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Wireless LAN Group One"
+ name: Wireless LAN Group One
register: test_two
-- name: "2 - ASSERT"
- assert:
+- name: 2 - ASSERT
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['wireless_lan_group']['name'] == "Wireless LAN Group One"
- test_two['msg'] == "wireless_lan_group Wireless LAN Group One already exists"
-- name: "3 - Test update"
+- name: 3 - Test update
netbox.netbox.netbox_wireless_lan_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Wireless LAN Group One"
- description: "New Description"
+ name: Wireless LAN Group One
+ description: New Description
register: test_three
-- name: "3 - ASSERT"
- assert:
+- name: 3 - ASSERT
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['description'] == "New Description"
@@ -54,27 +54,27 @@
- test_three['wireless_lan_group']['description'] == "New Description"
- test_three['msg'] == "wireless_lan_group Wireless LAN Group One updated"
-- name: "4 - Test delete"
+- name: 4 - Test delete
netbox.netbox.netbox_wireless_lan_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Wireless LAN Group One"
- state: "absent"
+ name: Wireless LAN Group One
+ state: absent
register: test_four
-- name: "4 - ASSERT"
- assert:
+- name: 4 - ASSERT
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
- test_four['diff']['after']['state'] == "absent"
- test_four['msg'] == "wireless_lan_group Wireless LAN Group One deleted"
-- name: "5 - Create wireless LAN group with all parameters"
+- name: 5 - Create wireless LAN group with all parameters
netbox.netbox.netbox_wireless_lan_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Wireless LAN Group One
description: Cool Wireless LAN Group
@@ -85,8 +85,8 @@
state: present
register: test_five
-- name: "5 - ASSERT"
- assert:
+- name: 5 - ASSERT
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_wireless_link.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_wireless_link.yml
index baa4574d1..b026db81e 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_wireless_link.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.6/tasks/netbox_wireless_link.yml
@@ -4,10 +4,10 @@
### NETBOX_WIRELESS_LINK
##
##
-- name: "1 - Test wireless link creation"
+- name: 1 - Test wireless link creation
netbox.netbox.netbox_wireless_link:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
interface_a:
device: Test Nexus One
@@ -17,8 +17,8 @@
name: wlink1
register: test_one
-- name: "1 - ASSERT"
- assert:
+- name: 1 - ASSERT
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -27,10 +27,10 @@
- test_one['wireless_link']['interface_b'] == 5
- test_one['msg'] == "wireless_link Test Nexus One wlink1 <> test100 wlink1 created"
-- name: "Test duplicate wireless link"
+- name: Test duplicate wireless link
netbox.netbox.netbox_wireless_link:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
interface_a:
device: Test Nexus One
@@ -40,16 +40,16 @@
name: wlink1
register: test_two
-- name: "2 - ASSERT"
- assert:
+- name: 2 - ASSERT
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['msg'] == "wireless_link Test Nexus One wlink1 <> test100 wlink1 already exists"
-- name: "3 - Test update"
+- name: 3 - Test update
netbox.netbox.netbox_wireless_link:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
interface_a:
device: Test Nexus One
@@ -60,18 +60,18 @@
status: planned
register: test_three
-- name: "3 - ASSERT"
- assert:
+- name: 3 - ASSERT
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['status'] == "planned"
- test_three['wireless_link']['status'] == "planned"
- test_three['msg'] == "wireless_link Test Nexus One wlink1 <> test100 wlink1 updated"
-- name: "4 - Test delete"
+- name: 4 - Test delete
netbox.netbox.netbox_wireless_link:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
interface_a:
device: Test Nexus One
@@ -79,21 +79,21 @@
interface_b:
device: test100
name: wlink1
- state: "absent"
+ state: absent
register: test_four
-- name: "4 - ASSERT"
- assert:
+- name: 4 - ASSERT
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
- test_four['diff']['after']['state'] == "absent"
- test_four['msg'] == "wireless_link Test Nexus One wlink1 <> test100 wlink1 deleted"
-- name: "5 - Create wireless link with all parameters"
+- name: 5 - Create wireless link with all parameters
netbox.netbox.netbox_wireless_link:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
interface_a:
device: Test Nexus One
@@ -113,8 +113,8 @@
state: present
register: test_five
-- name: "5 - ASSERT"
- assert:
+- name: 5 - ASSERT
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/main.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/main.yml
index d1c4b6fe7..d775d1889 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/main.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/main.yml
@@ -1,227 +1,227 @@
---
-- name: "NETBOX_DEVICE TESTS"
- include_tasks: "netbox_device.yml"
+- name: NETBOX_DEVICE TESTS
+ ansible.builtin.include_tasks: netbox_device.yml
-- name: "NETBOX_DEVICE_INTERFACE TESTS"
- include_tasks: "netbox_device_interface.yml"
+- name: NETBOX_DEVICE_INTERFACE TESTS
+ ansible.builtin.include_tasks: netbox_device_interface.yml
-- name: "NETBOX_DEVICE_INTERFACE_TEMPLATE TESTS"
- include_tasks: "netbox_device_interface_template.yml"
+- name: NETBOX_DEVICE_INTERFACE_TEMPLATE TESTS
+ ansible.builtin.include_tasks: netbox_device_interface_template.yml
-- name: "NETBOX_IP_ADDRESS TESTS"
- include_tasks: "netbox_ip_address.yml"
+- name: NETBOX_IP_ADDRESS TESTS
+ ansible.builtin.include_tasks: netbox_ip_address.yml
-- name: "NETBOX_PREFIX TESTS"
- include_tasks: "netbox_prefix.yml"
+- name: NETBOX_PREFIX TESTS
+ ansible.builtin.include_tasks: netbox_prefix.yml
-- name: "NETBOX_SITE TESTS"
- include_tasks: "netbox_site.yml"
+- name: NETBOX_SITE TESTS
+ ansible.builtin.include_tasks: netbox_site.yml
-- name: "NETBOX_SITE_GROUP TESTS"
- include_tasks: "netbox_site_group.yml"
+- name: NETBOX_SITE_GROUP TESTS
+ ansible.builtin.include_tasks: netbox_site_group.yml
-- name: "NETBOX_CONTACT TESTS"
- include_tasks: "netbox_contact.yml"
+- name: NETBOX_CONTACT TESTS
+ ansible.builtin.include_tasks: netbox_contact.yml
-- name: "NETBOX_CONTACT_ROLE TESTS"
- include_tasks: "netbox_contact_role.yml"
+- name: NETBOX_CONTACT_ROLE TESTS
+ ansible.builtin.include_tasks: netbox_contact_role.yml
-- name: "NETBOX_TENTANT TESTS"
- include_tasks: "netbox_tenant.yml"
+- name: NETBOX_TENTANT TESTS
+ ansible.builtin.include_tasks: netbox_tenant.yml
-- name: "NETBOX_TENTANT_GROUP TESTS"
- include_tasks: "netbox_tenant_group.yml"
+- name: NETBOX_TENTANT_GROUP TESTS
+ ansible.builtin.include_tasks: netbox_tenant_group.yml
-- name: "NETBOX_RACK TESTS"
- include_tasks: "netbox_rack.yml"
+- name: NETBOX_RACK TESTS
+ ansible.builtin.include_tasks: netbox_rack.yml
-- name: "NETBOX_RACK_ROLE TESTS"
- include_tasks: "netbox_rack_role.yml"
+- name: NETBOX_RACK_ROLE TESTS
+ ansible.builtin.include_tasks: netbox_rack_role.yml
-- name: "NETBOX_LOCATION TESTS"
- include_tasks: "netbox_location.yml"
+- name: NETBOX_LOCATION TESTS
+ ansible.builtin.include_tasks: netbox_location.yml
-- name: "NETBOX_MANUFACTURER TESTS"
- include_tasks: "netbox_manufacturer.yml"
+- name: NETBOX_MANUFACTURER TESTS
+ ansible.builtin.include_tasks: netbox_manufacturer.yml
-- name: "NETBOX_PLATFORM TESTS"
- include_tasks: "netbox_platform.yml"
+- name: NETBOX_PLATFORM TESTS
+ ansible.builtin.include_tasks: netbox_platform.yml
-- name: "NETBOX_DEVICE_TYPE TESTS"
- include_tasks: "netbox_device_type.yml"
+- name: NETBOX_DEVICE_TYPE TESTS
+ ansible.builtin.include_tasks: netbox_device_type.yml
-- name: "NETBOX_DEVICE_ROLE TESTS"
- include_tasks: "netbox_device_role.yml"
+- name: NETBOX_DEVICE_ROLE TESTS
+ ansible.builtin.include_tasks: netbox_device_role.yml
-- name: "NETBOX_IPAM_ROLE TESTS"
- include_tasks: "netbox_ipam_role.yml"
+- name: NETBOX_IPAM_ROLE TESTS
+ ansible.builtin.include_tasks: netbox_ipam_role.yml
-- name: "NETBOX_VLAN_GROUP TESTS"
- include_tasks: "netbox_vlan_group.yml"
+- name: NETBOX_VLAN_GROUP TESTS
+ ansible.builtin.include_tasks: netbox_vlan_group.yml
-- name: "NETBOX_VLAN TESTS"
- include_tasks: "netbox_vlan.yml"
+- name: NETBOX_VLAN TESTS
+ ansible.builtin.include_tasks: netbox_vlan.yml
-- name: "NETBOX_VRF TESTS"
- include_tasks: "netbox_vrf.yml"
+- name: NETBOX_VRF TESTS
+ ansible.builtin.include_tasks: netbox_vrf.yml
-- name: "NETBOX_RIR TESTS"
- include_tasks: "netbox_rir.yml"
+- name: NETBOX_RIR TESTS
+ ansible.builtin.include_tasks: netbox_rir.yml
-- name: "NETBOX_AGGREGATE TESTS"
- include_tasks: "netbox_aggregate.yml"
+- name: NETBOX_AGGREGATE TESTS
+ ansible.builtin.include_tasks: netbox_aggregate.yml
-- name: "NETBOX_REGION TESTS"
- include_tasks: "netbox_region.yml"
+- name: NETBOX_REGION TESTS
+ ansible.builtin.include_tasks: netbox_region.yml
-- name: "NETBOX_DEVICE_BAY TESTS"
- include_tasks: "netbox_device_bay.yml"
+- name: NETBOX_DEVICE_BAY TESTS
+ ansible.builtin.include_tasks: netbox_device_bay.yml
-- name: "NETBOX_DEVICE_BAY_TEMPLATE TESTS"
- include_tasks: "netbox_device_bay_template.yml"
+- name: NETBOX_DEVICE_BAY_TEMPLATE TESTS
+ ansible.builtin.include_tasks: netbox_device_bay_template.yml
-- name: "NETBOX_INVENTORY_ITEM TESTS"
- include_tasks: "netbox_inventory_item.yml"
+- name: NETBOX_INVENTORY_ITEM TESTS
+ ansible.builtin.include_tasks: netbox_inventory_item.yml
-- name: "NETBOX_VIRTUAL_MACHINE TESTS"
- include_tasks: "netbox_virtual_machine.yml"
+- name: NETBOX_VIRTUAL_MACHINE TESTS
+ ansible.builtin.include_tasks: netbox_virtual_machine.yml
-- name: "NETBOX_CLUSTER TESTS"
- include_tasks: "netbox_cluster.yml"
+- name: NETBOX_CLUSTER TESTS
+ ansible.builtin.include_tasks: netbox_cluster.yml
-- name: "NETBOX_CLUSTER_GROUP TESTS"
- include_tasks: "netbox_cluster_group.yml"
+- name: NETBOX_CLUSTER_GROUP TESTS
+ ansible.builtin.include_tasks: netbox_cluster_group.yml
-- name: "NETBOX_CLUSTER_TYPE TESTS"
- include_tasks: "netbox_cluster_type.yml"
+- name: NETBOX_CLUSTER_TYPE TESTS
+ ansible.builtin.include_tasks: netbox_cluster_type.yml
-- name: "NETBOX_VM_INTERFACE TESTS"
- include_tasks: "netbox_vm_interface.yml"
+- name: NETBOX_VM_INTERFACE TESTS
+ ansible.builtin.include_tasks: netbox_vm_interface.yml
-- name: "NETBOX_PROVIDER TESTS"
- include_tasks: "netbox_provider.yml"
+- name: NETBOX_PROVIDER TESTS
+ ansible.builtin.include_tasks: netbox_provider.yml
-- name: "NETBOX_PROVIDER_NETWORK TESTS"
- include_tasks: "netbox_provider_network.yml"
+- name: NETBOX_PROVIDER_NETWORK TESTS
+ ansible.builtin.include_tasks: netbox_provider_network.yml
-- name: "NETBOX_CIRCUIT_TYPE TESTS"
- include_tasks: "netbox_circuit_type.yml"
+- name: NETBOX_CIRCUIT_TYPE TESTS
+ ansible.builtin.include_tasks: netbox_circuit_type.yml
-- name: "NETBOX_CIRCUIT TESTS"
- include_tasks: "netbox_circuit.yml"
+- name: NETBOX_CIRCUIT TESTS
+ ansible.builtin.include_tasks: netbox_circuit.yml
-- name: "NETBOX_CIRCUIT_TERMINATION TESTS"
- include_tasks: "netbox_circuit_termination.yml"
+- name: NETBOX_CIRCUIT_TERMINATION TESTS
+ ansible.builtin.include_tasks: netbox_circuit_termination.yml
-- name: "NETBOX_REAR_PORT TESTS"
- include_tasks: "netbox_rear_port.yml"
+- name: NETBOX_REAR_PORT TESTS
+ ansible.builtin.include_tasks: netbox_rear_port.yml
-- name: "NETBOX_REAR_PORT_TEMPLATE TESTS"
- include_tasks: "netbox_rear_port_template.yml"
+- name: NETBOX_REAR_PORT_TEMPLATE TESTS
+ ansible.builtin.include_tasks: netbox_rear_port_template.yml
-- name: "NETBOX_FRONT_PORT TESTS"
- include_tasks: "netbox_front_port.yml"
+- name: NETBOX_FRONT_PORT TESTS
+ ansible.builtin.include_tasks: netbox_front_port.yml
-- name: "NETBOX_FRONT_PORT_TEMPLATE TESTS"
- include_tasks: "netbox_front_port_template.yml"
+- name: NETBOX_FRONT_PORT_TEMPLATE TESTS
+ ansible.builtin.include_tasks: netbox_front_port_template.yml
-- name: "NETBOX_CONSOLE_PORT TESTS"
- include_tasks: "netbox_console_port.yml"
+- name: NETBOX_CONSOLE_PORT TESTS
+ ansible.builtin.include_tasks: netbox_console_port.yml
-- name: "NETBOX_CONSOLE_PORT_TEMPLATE TESTS"
- include_tasks: "netbox_console_port_template.yml"
+- name: NETBOX_CONSOLE_PORT_TEMPLATE TESTS
+ ansible.builtin.include_tasks: netbox_console_port_template.yml
-- name: "NETBOX_CONSOLE_SERVER_PORT TESTS"
- include_tasks: "netbox_console_server_port.yml"
+- name: NETBOX_CONSOLE_SERVER_PORT TESTS
+ ansible.builtin.include_tasks: netbox_console_server_port.yml
-- name: "NETBOX_CONSOLE_SERVER_PORT_TEMPLATE TESTS"
- include_tasks: "netbox_console_server_port_template.yml"
+- name: NETBOX_CONSOLE_SERVER_PORT_TEMPLATE TESTS
+ ansible.builtin.include_tasks: netbox_console_server_port_template.yml
-- name: "NETBOX_POWER_PANEL TESTS"
- include_tasks: "netbox_power_panel.yml"
+- name: NETBOX_POWER_PANEL TESTS
+ ansible.builtin.include_tasks: netbox_power_panel.yml
-- name: "NETBOX_POWER_FEED TESTS"
- include_tasks: "netbox_power_feed.yml"
+- name: NETBOX_POWER_FEED TESTS
+ ansible.builtin.include_tasks: netbox_power_feed.yml
-- name: "NETBOX_POWER_PORT TESTS"
- include_tasks: "netbox_power_port.yml"
+- name: NETBOX_POWER_PORT TESTS
+ ansible.builtin.include_tasks: netbox_power_port.yml
-- name: "NETBOX_POWER_PORT_TEMPLATE TESTS"
- include_tasks: "netbox_power_port_template.yml"
+- name: NETBOX_POWER_PORT_TEMPLATE TESTS
+ ansible.builtin.include_tasks: netbox_power_port_template.yml
-- name: "NETBOX_POWER_OUTLET TESTS"
- include_tasks: "netbox_power_outlet.yml"
+- name: NETBOX_POWER_OUTLET TESTS
+ ansible.builtin.include_tasks: netbox_power_outlet.yml
-- name: "NETBOX_POWER_OUTLET_TEMPLATE TESTS"
- include_tasks: "netbox_power_outlet_template.yml"
+- name: NETBOX_POWER_OUTLET_TEMPLATE TESTS
+ ansible.builtin.include_tasks: netbox_power_outlet_template.yml
-- name: "NETBOX_VIRTUAL_CHASSIS TESTS"
- include_tasks: "netbox_virtual_chassis.yml"
+- name: NETBOX_VIRTUAL_CHASSIS TESTS
+ ansible.builtin.include_tasks: netbox_virtual_chassis.yml
# Module has to be updated for 3.3
-#- name: "NETBOX_CABLE TESTS"
+# - name: "NETBOX_CABLE TESTS"
# include_tasks: "netbox_cable.yml"
-- name: "NETBOX_SERVICE TESTS"
- include_tasks: "netbox_service.yml"
+- name: NETBOX_SERVICE TESTS
+ ansible.builtin.include_tasks: netbox_service.yml
-- name: "NETBOX_LOOKUP TESTS"
- include_tasks: "netbox_lookup.yml"
+- name: NETBOX_LOOKUP TESTS
+ ansible.builtin.include_tasks: netbox_lookup.yml
-- name: "NETBOX_TAG_TESTS"
- include_tasks: "netbox_tag.yml"
+- name: NETBOX_TAG_TESTS
+ ansible.builtin.include_tasks: netbox_tag.yml
-- name: "NETBOX_ROUTE_TARGET_TESTS"
- include_tasks: "netbox_route_target.yml"
+- name: NETBOX_ROUTE_TARGET_TESTS
+ ansible.builtin.include_tasks: netbox_route_target.yml
-- name: "NETBOX_WIRELESS_LAN TESTS"
- include_tasks:
- file: "netbox_wireless_lan.yml"
+- name: NETBOX_WIRELESS_LAN TESTS
+ ansible.builtin.include_tasks:
+ file: netbox_wireless_lan.yml
apply:
tags:
- netbox_wireless_lan
tags:
- netbox_wireless_lan
-- name: "NETBOX_WIRELESS_LAN_GROUP TESTS"
- include_tasks:
- file: "netbox_wireless_lan_group.yml"
+- name: NETBOX_WIRELESS_LAN_GROUP TESTS
+ ansible.builtin.include_tasks:
+ file: netbox_wireless_lan_group.yml
apply:
tags:
- netbox_wireless_lan_group
tags:
- netbox_wireless_lan_group
-- name: "NETBOX_WIRELESS_LINK TESTS"
- include_tasks:
- file: "netbox_wireless_link.yml"
+- name: NETBOX_WIRELESS_LINK TESTS
+ ansible.builtin.include_tasks:
+ file: netbox_wireless_link.yml
apply:
tags:
- netbox_wireless_link
tags:
- netbox_wireless_link
-- name: "NETBOX_CUSTOM_FIELD TESTS"
- include_tasks:
- file: "netbox_custom_field.yml"
+- name: NETBOX_CUSTOM_FIELD TESTS
+ ansible.builtin.include_tasks:
+ file: netbox_custom_field.yml
apply:
tags:
- netbox_custom_field
tags:
- netbox_custom_field
-- name: "NETBOX_CUSTOM_LINK TESTS"
- include_tasks:
- file: "netbox_custom_link.yml"
+- name: NETBOX_CUSTOM_LINK TESTS
+ ansible.builtin.include_tasks:
+ file: netbox_custom_link.yml
apply:
tags:
- netbox_custom_link
tags:
- netbox_custom_link
-- name: "NETBOX_EXPORT_TEMPLATE TESTS"
- include_tasks:
- file: "netbox_export_template.yml"
+- name: NETBOX_EXPORT_TEMPLATE TESTS
+ ansible.builtin.include_tasks:
+ file: netbox_export_template.yml
apply:
tags:
- netbox_export_template
@@ -229,7 +229,7 @@
- netbox_export_template
# Must update for 3.7
-#- name: "NETBOX_WEBHOOK TESTS"
+# - name: "NETBOX_WEBHOOK TESTS"
# include_tasks:
# file: "netbox_webhook.yml"
# apply:
@@ -238,7 +238,7 @@
# tags:
# - netbox_webhook
-#- name: "NETBOX_L2VPN TESTS"
+# - name: "NETBOX_L2VPN TESTS"
# include_tasks:
# file: "netbox_l2vpn.yml"
# apply:
@@ -247,7 +247,7 @@
# tags:
# - netbox_l2vpn
-#- name: "NETBOX_L2VPN_TERMINATION TESTS"
+# - name: "NETBOX_L2VPN_TERMINATION TESTS"
# include_tasks:
# file: "netbox_l2vpn_termination.yml"
# apply:
@@ -256,63 +256,63 @@
# tags:
# - netbox_l2vpn_termination
-- name: "NETBOX_INVENTORY_ITEM_ROLE TESTS"
- include_tasks:
- file: "netbox_inventory_item_role.yml"
+- name: NETBOX_INVENTORY_ITEM_ROLE TESTS
+ ansible.builtin.include_tasks:
+ file: netbox_inventory_item_role.yml
apply:
tags:
- netbox_inventory_item_role
tags:
- netbox_inventory_item_role
-- name: "NETBOX_MODULE_TYPE TESTS"
- include_tasks:
- file: "netbox_module_type.yml"
+- name: NETBOX_MODULE_TYPE TESTS
+ ansible.builtin.include_tasks:
+ file: netbox_module_type.yml
apply:
tags:
- netbox_module_type
tags:
- netbox_module_type
-- name: "NETBOX_SERVICE_TEMPLATE TESTS"
- include_tasks:
- file: "netbox_service_template.yml"
+- name: NETBOX_SERVICE_TEMPLATE TESTS
+ ansible.builtin.include_tasks:
+ file: netbox_service_template.yml
apply:
tags:
- netbox_service_template
tags:
- netbox_service_template
-- name: "NETBOX_ASN TESTS"
- include_tasks: "netbox_asn.yml"
+- name: NETBOX_ASN TESTS
+ ansible.builtin.include_tasks: netbox_asn.yml
-- name: "NETBOX_FHRP_GROUP TESTS"
- include_tasks: "netbox_fhrp_group.yml"
+- name: NETBOX_FHRP_GROUP TESTS
+ ansible.builtin.include_tasks: netbox_fhrp_group.yml
-- name: "NETBOX_JOURNAL_ENTRY TESTS"
- include_tasks:
- file: "netbox_journal_entry.yml"
+- name: NETBOX_JOURNAL_ENTRY TESTS
+ ansible.builtin.include_tasks:
+ file: netbox_journal_entry.yml
apply:
tags:
- netbox_journal_entry
tags:
- netbox_journal_entry
-- name: "NETBOX_FHRP_GROUP_ASSIGNMENT TESTS"
- include_tasks:
- file: "netbox_fhrp_group_assignment.yml"
+- name: NETBOX_FHRP_GROUP_ASSIGNMENT TESTS
+ ansible.builtin.include_tasks:
+ file: netbox_fhrp_group_assignment.yml
apply:
tags:
- netbox_fhrp_group_assignmen
tags:
- netbox_fhrp_group_assignmen
-- name: "NETBOX_CONFIG_TEMPLATE"
- include_tasks: "netbox_config_template.yml"
+- name: NETBOX_CONFIG_TEMPLATE
+ ansible.builtin.include_tasks: netbox_config_template.yml
tags:
- netbox_config_template
-- name: "NETBOX_VIRTUAL_DISK"
- include_tasks: "netbox_virtual_disk.yml"
+- name: NETBOX_VIRTUAL_DISK
+ ansible.builtin.include_tasks: netbox_virtual_disk.yml
tags:
- netbox_virtual_disk
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_aggregate.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_aggregate.yml
index ac02641dc..d45b9c835 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_aggregate.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_aggregate.yml
@@ -7,35 +7,35 @@
- name: "AGGREGATE 1: Necessary info creation"
netbox.netbox.netbox_aggregate:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- prefix: "10.0.0.0/8"
- rir: "Example RIR"
+ prefix: 10.0.0.0/8
+ rir: Example RIR
state: present
register: test_one
- name: "AGGREGATE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
- test_one['diff']['after']['state'] == "present"
- test_one['aggregate']['prefix'] == "10.0.0.0/8"
- #- test_one['aggregate']['family'] == 4
+ # - test_one['aggregate']['family'] == 4
- test_one['aggregate']['rir'] == 1
- test_one['msg'] == "aggregate 10.0.0.0/8 created"
- name: "AGGREGATE 2: Create duplicate"
netbox.netbox.netbox_aggregate:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- prefix: "10.0.0.0/8"
+ prefix: 10.0.0.0/8
state: present
register: test_two
- name: "AGGREGATE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['aggregate']['prefix'] == "10.0.0.0/8"
@@ -46,19 +46,19 @@
- name: "AGGREGATE 3: ASSERT - Update"
netbox.netbox.netbox_aggregate:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- prefix: "10.0.0.0/8"
- rir: "Example RIR"
+ prefix: 10.0.0.0/8
+ rir: Example RIR
date_added: "1989-01-18"
- description: "Test Description"
+ description: Test Description
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: test_three
- name: "AGGREGATE 3: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['date_added'] == "1989-01-18"
@@ -75,14 +75,14 @@
- name: "AGGREGATE 4: ASSERT - Delete"
netbox.netbox.netbox_aggregate:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- prefix: "10.0.0.0/8"
+ prefix: 10.0.0.0/8
state: absent
register: test_four
- name: "AGGREGATE 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['aggregate']['prefix'] == "10.0.0.0/8"
@@ -96,20 +96,20 @@
- name: "AGGREGATE 5: Necessary info creation"
netbox.netbox.netbox_aggregate:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- prefix: "2001::/32"
- rir: "Example RIR"
+ prefix: 2001::/32
+ rir: Example RIR
state: present
register: test_five
- name: "AGGREGATE 5: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "absent"
- test_five['diff']['after']['state'] == "present"
- test_five['aggregate']['prefix'] == "2001::/32"
- #- test_five['aggregate']['family'] == 6
+ # - test_five['aggregate']['family'] == 6
- test_five['aggregate']['rir'] == 1
- test_five['msg'] == "aggregate 2001::/32 created"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_asn.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_asn.yml
index 346bafda9..fab4270a9 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_asn.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_asn.yml
@@ -7,7 +7,7 @@
- name: "ASN 1: Test ASN creation"
netbox.netbox.netbox_asn:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
asn: 1111111111
rir: Example RIR
@@ -27,7 +27,7 @@
- name: "ASN 2: Create duplicate"
netbox.netbox.netbox_asn:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
asn: 1111111111
rir: Example RIR
@@ -45,14 +45,14 @@
- name: "ASN 3: Update ASN with other fields"
netbox.netbox.netbox_asn:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
asn: 1111111111
rir: Example RIR
tenant: Test Tenant
description: Test description
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: test_three
@@ -73,7 +73,7 @@
- name: "ASN 4: ASSERT - Delete"
netbox.netbox.netbox_asn:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
asn: 1111111111
state: absent
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_cable.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_cable.yml
index 5728986c0..d6f0ab02e 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_cable.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_cable.yml
@@ -10,7 +10,7 @@
- name: "CABLE 1: Necessary info creation"
netbox.netbox.netbox_cable:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
termination_a_type: dcim.interface
termination_a:
@@ -24,7 +24,7 @@
register: test_one
- name: "CABLE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -38,7 +38,7 @@
- name: "CABLE 2: Create duplicate"
netbox.netbox.netbox_cable:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
termination_a_type: dcim.interface
termination_a:
@@ -52,7 +52,7 @@
register: test_two
- name: "CABLE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['cable']['termination_a_type'] == "dcim.interface"
@@ -64,7 +64,7 @@
- name: "CABLE 3: Update Cable with other fields"
netbox.netbox.netbox_cable:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
termination_a_type: dcim.interface
termination_a:
@@ -81,13 +81,13 @@
length: 30
length_unit: m
tags:
- - "Schnozzberry"
- tenant: "Test Tenant"
+ - Schnozzberry
+ tenant: Test Tenant
state: present
register: test_three
- name: "CABLE 3: ASSERT - Update Cable with other fields"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['type'] == "mmf-om4"
@@ -115,7 +115,7 @@
- name: "CABLE 4: ASSERT - Delete"
netbox.netbox.netbox_cable:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
termination_a_type: dcim.interface
termination_a:
@@ -129,7 +129,7 @@
register: test_four
- name: "CABLE 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
@@ -139,7 +139,7 @@
- name: "CABLE 5: Connect Console Port and Console Server Port"
netbox.netbox.netbox_cable:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
termination_a_type: dcim.consoleserverport
termination_a:
@@ -153,7 +153,7 @@
register: test_five
- name: "CABLE 5: ASSERT - Connect Console Port and Console Server Port"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "absent"
@@ -167,21 +167,21 @@
- name: "CABLE 6: Circuits Termination as side A"
netbox.netbox.netbox_cable:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
termination_a_type: circuits.circuittermination
termination_a:
- circuit: "Test Circuit Two"
- term_side: "A"
+ circuit: Test Circuit Two
+ term_side: A
termination_b_type: dcim.interface
termination_b:
- device: "test100"
- name: "GigabitEthernet2"
+ device: test100
+ name: GigabitEthernet2
state: present
register: test_six
- name: "CABLE 6: ASSERT - Circuits Termination as side A"
- assert:
+ ansible.builtin.assert:
that:
- test_six is changed
- test_six['diff']['before']['state'] == "absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_circuit.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_circuit.yml
index 1d8f1ec47..8bfd265ac 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_circuit.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_circuit.yml
@@ -7,7 +7,7 @@
- name: "NETBOX_CIRCUIT 1: Create provider within NetBox with only required information"
netbox.netbox.netbox_circuit:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
cid: Test Circuit One
provider: Test Provider
@@ -16,7 +16,7 @@
register: test_one
- name: "NETBOX_CIRCUIT 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -29,7 +29,7 @@
- name: "NETBOX_CIRCUIT 2: Duplicate"
netbox.netbox.netbox_circuit:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
cid: Test Circuit One
provider: Test Provider
@@ -38,7 +38,7 @@
register: test_two
- name: "NETBOX_CIRCUIT 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['circuit']['cid'] == "Test Circuit One"
@@ -49,7 +49,7 @@
- name: "NETBOX_CIRCUIT 3: Update provider with other fields"
netbox.netbox.netbox_circuit:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
cid: Test Circuit One
provider: Test Provider
@@ -59,12 +59,12 @@
install_date: "2018-12-25"
commit_rate: 10000
description: "Test circuit "
- comments: "FAST CIRCUIT"
+ comments: FAST CIRCUIT
state: present
register: test_three
- name: "NETBOX_CIRCUIT 3: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['status'] == "planned"
@@ -87,14 +87,14 @@
- name: "NETBOX_CIRCUIT 4: Delete provider within netbox"
netbox.netbox.netbox_circuit:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
cid: Test Circuit One
state: absent
register: test_four
- name: "NETBOX_CIRCUIT 4 : ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['circuit']['cid'] == "Test Circuit One"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_circuit_termination.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_circuit_termination.yml
index a37cdc680..cbc749f63 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_circuit_termination.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_circuit_termination.yml
@@ -7,17 +7,17 @@
- name: "NETBOX_CIRCUIT_TERMINATION 1: Create provider within NetBox with only required information"
netbox.netbox.netbox_circuit_termination:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
circuit: Test Circuit
term_side: A
- site: "Test Site"
+ site: Test Site
port_speed: 10000
state: present
register: test_one
- name: "NETBOX_CIRCUIT_TERMINATION 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -31,7 +31,7 @@
- name: "NETBOX_CIRCUIT_TERMINATION 2: Duplicate"
netbox.netbox.netbox_circuit_termination:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
circuit: Test Circuit
term_side: A
@@ -39,7 +39,7 @@
register: test_two
- name: "NETBOX_CIRCUIT_TERMINATION 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['circuit_termination']['circuit'] == 1
@@ -51,19 +51,19 @@
- name: "NETBOX_CIRCUIT_TERMINATION 3: Update provider with other fields"
netbox.netbox.netbox_circuit_termination:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
circuit: Test Circuit
term_side: A
upstream_speed: 1000
xconnect_id: 10X100
pp_info: PP10-24
- description: "Test description"
+ description: Test description
state: present
register: test_three
- name: "NETBOX_CIRCUIT_TERMINATION 3: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['upstream_speed'] == 1000
@@ -83,17 +83,17 @@
- name: "NETBOX_CIRCUIT_TERMINATION 4: Create Z Side"
netbox.netbox.netbox_circuit_termination:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
circuit: Test Circuit
term_side: Z
- site: "Test Site"
+ site: Test Site
port_speed: 10000
state: present
register: test_four
- name: "NETBOX_CIRCUIT_TERMINATION 4: ASSERT - Create Z Side"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -107,7 +107,7 @@
- name: "NETBOX_CIRCUIT_TERMINATION 5: Delete provider within netbox"
netbox.netbox.netbox_circuit_termination:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
circuit: Test Circuit
term_side: A
@@ -115,7 +115,7 @@
register: test_five
- name: "NETBOX_CIRCUIT_TERMINATION 5: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['circuit_termination']['circuit'] == 1
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_circuit_type.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_circuit_type.yml
index 45f956e34..08dc67985 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_circuit_type.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_circuit_type.yml
@@ -7,14 +7,14 @@
- name: "CIRCUIT_TYPE 1: Necessary info creation"
netbox.netbox.netbox_circuit_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Circuit Type One"
+ name: Test Circuit Type One
state: present
register: test_one
- name: "CIRCUIT_TYPE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -26,14 +26,14 @@
- name: "CIRCUIT_TYPE 2: Create duplicate"
netbox.netbox.netbox_circuit_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Circuit Type One"
+ name: Test Circuit Type One
state: present
register: test_two
- name: "CIRCUIT_TYPE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['circuit_type']['name'] == "Test Circuit Type One"
@@ -43,15 +43,15 @@
- name: "CIRCUIT_TYPE 3: User specified slug"
netbox.netbox.netbox_circuit_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Circuit Type Two"
- slug: "test-circuit-type-2"
+ name: Test Circuit Type Two
+ slug: test-circuit-type-2
state: present
register: test_three
- name: "CIRCUIT_TYPE 3: ASSERT - User specified slug"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['before']['state'] == "absent"
@@ -63,14 +63,14 @@
- name: "CIRCUIT_TYPE 4: ASSERT - Delete"
netbox.netbox.netbox_circuit_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Circuit Type One"
+ name: Test Circuit Type One
state: absent
register: test_four
- name: "CIRCUIT_TYPE 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['circuit_type']['name'] == "Test Circuit Type One"
@@ -80,15 +80,15 @@
- name: "CIRCUIT_TYPE 5: ASSERT - Delete"
netbox.netbox.netbox_circuit_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Circuit Type Two"
- slug: "test-circuit-type-2"
+ name: Test Circuit Type Two
+ slug: test-circuit-type-2
state: absent
register: test_five
- name: "CIRCUIT_TYPE 5: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['circuit_type']['name'] == "Test Circuit Type Two"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_cluster.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_cluster.yml
index 11ba5ef82..b2810a1be 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_cluster.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_cluster.yml
@@ -7,15 +7,15 @@
- name: "CLUSTER 1: Necessary info creation"
netbox.netbox.netbox_cluster:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Cluster One"
- cluster_type: "Test Cluster Type"
+ name: Test Cluster One
+ cluster_type: Test Cluster Type
state: present
register: test_one
- name: "CLUSTER 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -27,15 +27,15 @@
- name: "CLUSTER 2: Create duplicate"
netbox.netbox.netbox_cluster:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Cluster One"
- cluster_type: "Test Cluster Type"
+ name: Test Cluster One
+ cluster_type: Test Cluster Type
state: present
register: test_two
- name: "CLUSTER 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['cluster']['name'] == "Test Cluster One"
@@ -45,21 +45,21 @@
- name: "CLUSTER 3: Update"
netbox.netbox.netbox_cluster:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Cluster One"
- cluster_type: "Test Cluster Type"
- cluster_group: "Test Cluster Group"
- site: "Test Site"
- comments: "Updated cluster"
- tenant: "Test Tenant"
+ name: Test Cluster One
+ cluster_type: Test Cluster Type
+ cluster_group: Test Cluster Group
+ site: Test Site
+ comments: Updated cluster
+ tenant: Test Tenant
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: test_three
- name: "CLUSTER 3: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['group'] == 1
@@ -79,14 +79,14 @@
- name: "CLUSTER 4: ASSERT - Delete"
netbox.netbox.netbox_cluster:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Cluster One"
+ name: Test Cluster One
state: absent
register: test_four
- name: "CLUSTER 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['cluster']['name'] == "Test Cluster One"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_cluster_group.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_cluster_group.yml
index 9f84518ce..64dda6c44 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_cluster_group.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_cluster_group.yml
@@ -7,14 +7,14 @@
- name: "CLUSTER_GROUP 1: Necessary info creation"
netbox.netbox.netbox_cluster_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Cluster Group One"
+ name: Test Cluster Group One
state: present
register: test_one
- name: "CLUSTER_GROUP 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -26,14 +26,14 @@
- name: "CLUSTER_GROUP 2: Create duplicate"
netbox.netbox.netbox_cluster_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Cluster Group One"
+ name: Test Cluster Group One
state: present
register: test_two
- name: "CLUSTER_GROUP 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['cluster_group']['name'] == "Test Cluster Group One"
@@ -43,15 +43,15 @@
- name: "CLUSTER_GROUP 3: User specified slug"
netbox.netbox.netbox_cluster_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Cluster Group Two"
- slug: "test-cluster-group-2"
+ name: Test Cluster Group Two
+ slug: test-cluster-group-2
state: present
register: test_three
- name: "CLUSTER_GROUP 3: ASSERT - User specified slug"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['before']['state'] == "absent"
@@ -63,14 +63,14 @@
- name: "CLUSTER_GROUP 4: ASSERT - Delete"
netbox.netbox.netbox_cluster_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Cluster Group One"
+ name: Test Cluster Group One
state: absent
register: test_four
- name: "CLUSTER_GROUP 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['cluster_group']['name'] == "Test Cluster Group One"
@@ -80,15 +80,15 @@
- name: "CLUSTER_GROUP 5: ASSERT - Delete"
netbox.netbox.netbox_cluster_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Cluster Group Two"
- slug: "test-cluster-group-2"
+ name: Test Cluster Group Two
+ slug: test-cluster-group-2
state: absent
register: test_five
- name: "CLUSTER_GROUP 5: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['cluster_group']['name'] == "Test Cluster Group Two"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_cluster_type.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_cluster_type.yml
index 11c88206f..bc09ac8a6 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_cluster_type.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_cluster_type.yml
@@ -7,14 +7,14 @@
- name: "CLUSTER_TYPE 1: Necessary info creation"
netbox.netbox.netbox_cluster_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Cluster Type One"
+ name: Test Cluster Type One
state: present
register: test_one
- name: "CLUSTER_TYPE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -26,14 +26,14 @@
- name: "CLUSTER_TYPE 2: Create duplicate"
netbox.netbox.netbox_cluster_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Cluster Type One"
+ name: Test Cluster Type One
state: present
register: test_two
- name: "CLUSTER_TYPE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['cluster_type']['name'] == "Test Cluster Type One"
@@ -43,15 +43,15 @@
- name: "CLUSTER_TYPE 3: User specified slug"
netbox.netbox.netbox_cluster_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Cluster Type Two"
- slug: "test-cluster-type-2"
+ name: Test Cluster Type Two
+ slug: test-cluster-type-2
state: present
register: test_three
- name: "CLUSTER_TYPE 3: ASSERT - User specified slug"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['before']['state'] == "absent"
@@ -63,14 +63,14 @@
- name: "CLUSTER_TYPE 4: ASSERT - Delete"
netbox.netbox.netbox_cluster_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Cluster Type One"
+ name: Test Cluster Type One
state: absent
register: test_four
- name: "CLUSTER_TYPE 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['cluster_type']['name'] == "Test Cluster Type One"
@@ -80,15 +80,15 @@
- name: "CLUSTER_TYPE 5: ASSERT - Delete"
netbox.netbox.netbox_cluster_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Cluster Type Two"
- slug: "test-cluster-type-2"
+ name: Test Cluster Type Two
+ slug: test-cluster-type-2
state: absent
register: test_five
- name: "CLUSTER_TYPE 5: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['cluster_type']['name'] == "Test Cluster Type Two"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_config_context.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_config_context.yml
index 2353dfbfa..910ec82ca 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_config_context.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_config_context.yml
@@ -7,16 +7,16 @@
- name: "CONFIG_CONTEXT 1: Necessary info creation"
netbox.netbox.netbox_config_context:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "test_context"
- description: "Test context"
- data: "{ \"testkey\": { \"testsubkey\": [ \"testvaule\" ] } }"
+ name: test_context
+ description: Test context
+ data: '{ "testkey": { "testsubkey": [ "testvaule" ] } }'
state: present
register: test_one
- name: "CONFIG_CONTEXT 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -31,16 +31,16 @@
- name: "CONFIG_CONTEXT 2: Create duplicate"
netbox.netbox.netbox_config_context:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "test_context"
- description: "Test context"
- data: "{ \"testkey\": { \"testsubkey\": [ \"testvaule\" ] } }"
+ name: test_context
+ description: Test context
+ data: '{ "testkey": { "testsubkey": [ "testvaule" ] } }'
state: present
register: test_two
- name: "CONFIG_CONTEXT 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['config_context']['name'] == "test_context"
@@ -49,18 +49,18 @@
- name: "CONFIG_CONTEXT 3: Update data and attach to site"
netbox.netbox.netbox_config_context:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "test_context"
- description: "Updated test context"
- data: "{ \"testkey\": { \"testsubkey\": [ \"updatedvaule\" ] } }"
+ name: test_context
+ description: Updated test context
+ data: '{ "testkey": { "testsubkey": [ "updatedvaule" ] } }'
weight: 100
sites: [test-site]
state: present
register: test_three
- name: "CONFIG_CONTEXT 3: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['data'].testkey.testsubkey[0] == "updatedvaule"
@@ -73,16 +73,16 @@
- name: "CONFIG_CONTEXT 4: Detach from site"
netbox.netbox.netbox_config_context:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "test_context"
- data: "{ \"testkey\": { \"testsubkey\": [ \"updatedvaule\" ] } }"
+ name: test_context
+ data: '{ "testkey": { "testsubkey": [ "updatedvaule" ] } }'
sites: []
state: present
register: test_four
- name: "CONFIG_CONTEXT 4: ASSERT - Detached"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['after']['sites']|length == 0
@@ -92,14 +92,14 @@
- name: "CONFIG_CONTEXT 5: Delete"
netbox.netbox.netbox_config_context:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "test_context"
+ name: test_context
state: absent
register: test_five
- name: "CONFIG_CONTEXT 5: ASSERT - Deleted"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['after']['state'] == "absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_config_template.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_config_template.yml
index 9abf3ea05..af6cfb471 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_config_template.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_config_template.yml
@@ -7,16 +7,16 @@
- name: "CONFIG_TEMPLATES 1: Necessary info creation"
netbox.netbox.netbox_config_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "test_template"
- description: "Test template"
- template_code: "test template"
+ name: test_template
+ description: Test template
+ template_code: test template
state: present
register: test_one
- name: "CONFIG_TEMPLATES 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -29,16 +29,16 @@
- name: "CONFIG_TEMPLATES 2: Create duplicate"
netbox.netbox.netbox_config_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "test_template"
- description: "Test template"
- template_code: "test template"
+ name: test_template
+ description: Test template
+ template_code: test template
state: present
register: test_two
- name: "CONFIG_TEMPLATES 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['config_template']['name'] == "test_template"
@@ -47,16 +47,16 @@
- name: "CONFIG_TEMPLATES 3: Update data"
netbox.netbox.netbox_config_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "test_template"
- description: "Updated test template"
- template_code: "updated test template"
+ name: test_template
+ description: Updated test template
+ template_code: updated test template
state: present
register: test_three
- name: "CONFIG_TEMPLATES 3: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['template_code'] == "updated test template"
@@ -67,14 +67,14 @@
- name: "CONFIG_TEMPLATES 4: Delete"
netbox.netbox.netbox_config_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "test_template"
+ name: test_template
state: absent
register: test_four
- name: "CONFIG_TEMPLATES 4: ASSERT - Deleted"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['after']['state'] == "absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_console_port.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_console_port.yml
index 1c97cb8ad..6be028bec 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_console_port.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_console_port.yml
@@ -10,7 +10,7 @@
- name: "CONSOLE_PORT 1: Necessary info creation"
netbox.netbox.netbox_console_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Port
device: test100
@@ -18,7 +18,7 @@
register: test_one
- name: "CONSOLE_PORT 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -30,7 +30,7 @@
- name: "CONSOLE_PORT 2: Create duplicate"
netbox.netbox.netbox_console_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Port
device: test100
@@ -38,7 +38,7 @@
register: test_two
- name: "CONSOLE_PORT 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['console_port']['name'] == "Console Port"
@@ -48,7 +48,7 @@
- name: "CONSOLE_PORT 3: Update Console Port with other fields"
netbox.netbox.netbox_console_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Port
device: test100
@@ -58,7 +58,7 @@
register: test_three
- name: "CONSOLE_PORT 3: ASSERT - Update Console Port with other fields"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['type'] == "usb-a"
@@ -72,7 +72,7 @@
- name: "CONSOLE_PORT 4: Create Console Port for Delete Test"
netbox.netbox.netbox_console_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Port 2
device: test100
@@ -80,7 +80,7 @@
register: test_four
- name: "CONSOLE_PORT 4: ASSERT - Create Console Port for Delete Test"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -92,7 +92,7 @@
- name: "CONSOLE_PORT 5: Delete Console Port"
netbox.netbox.netbox_console_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Port 2
device: test100
@@ -100,7 +100,7 @@
register: test_five
- name: "CONSOLE_PORT 5: ASSERT - Delete Console Port"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "present"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_console_port_template.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_console_port_template.yml
index b1bd1d1e4..ef6149214 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_console_port_template.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_console_port_template.yml
@@ -10,7 +10,7 @@
- name: "CONSOLE_PORT_TEMPLATE 1: Necessary info creation"
netbox.netbox.netbox_console_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Port Template
device_type: Cisco Test
@@ -18,7 +18,7 @@
register: test_one
- name: "CONSOLE_PORT_TEMPLATE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -30,7 +30,7 @@
- name: "CONSOLE_PORT_TEMPLATE 2: Create duplicate"
netbox.netbox.netbox_console_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Port Template
device_type: Cisco Test
@@ -38,7 +38,7 @@
register: test_two
- name: "CONSOLE_PORT_TEMPLATE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['console_port_template']['name'] == "Console Port Template"
@@ -48,7 +48,7 @@
- name: "CONSOLE_PORT_TEMPLATE 3: Update Console Port Template with other fields"
netbox.netbox.netbox_console_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Port Template
device_type: Cisco Test
@@ -57,7 +57,7 @@
register: test_three
- name: "CONSOLE_PORT_TEMPLATE 3: ASSERT - Update Console Port Template with other fields"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['type'] == "usb-a"
@@ -69,7 +69,7 @@
- name: "CONSOLE_PORT_TEMPLATE 4: Create Console Port Template for Delete Test"
netbox.netbox.netbox_console_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Port Template 2
device_type: Cisco Test
@@ -77,7 +77,7 @@
register: test_four
- name: "CONSOLE_PORT_TEMPLATE 4: ASSERT - Create Console Port Template for Delete Test"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -89,7 +89,7 @@
- name: "CONSOLE_PORT_TEMPLATE 5: Delete Console Port Template"
netbox.netbox.netbox_console_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Port Template 2
device_type: Cisco Test
@@ -97,7 +97,7 @@
register: test_five
- name: "CONSOLE_PORT_TEMPLATE 5: ASSERT - Delete Console Port Template"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "present"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_console_server_port.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_console_server_port.yml
index f70345329..0ac7b65d0 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_console_server_port.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_console_server_port.yml
@@ -10,7 +10,7 @@
- name: "CONSOLE_SERVER_PORT 1: Necessary info creation"
netbox.netbox.netbox_console_server_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Server Port
device: test100
@@ -18,7 +18,7 @@
register: test_one
- name: "CONSOLE_SERVER_PORT 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -30,7 +30,7 @@
- name: "CONSOLE_SERVER_PORT 2: Create duplicate"
netbox.netbox.netbox_console_server_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Server Port
device: test100
@@ -38,7 +38,7 @@
register: test_two
- name: "CONSOLE_SERVER_PORT 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['console_server_port']['name'] == "Console Server Port"
@@ -48,7 +48,7 @@
- name: "CONSOLE_SERVER_PORT 3: Update Console Server Port with other fields"
netbox.netbox.netbox_console_server_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Server Port
device: test100
@@ -58,7 +58,7 @@
register: test_three
- name: "CONSOLE_SERVER_PORT 3: ASSERT - Update Console Server Port with other fields"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['type'] == "usb-a"
@@ -72,7 +72,7 @@
- name: "CONSOLE_SERVER_PORT 4: Create Console Server Port for Delete Test"
netbox.netbox.netbox_console_server_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Server Port 2
device: test100
@@ -80,7 +80,7 @@
register: test_four
- name: "CONSOLE_SERVER_PORT 4: ASSERT - Create Console Server Port for Delete Test"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -92,7 +92,7 @@
- name: "CONSOLE_SERVER_PORT 5: Delete Console Server Port"
netbox.netbox.netbox_console_server_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Server Port 2
device: test100
@@ -100,7 +100,7 @@
register: test_five
- name: "CONSOLE_SERVER_PORT 5: ASSERT - Delete Console Server Port"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "present"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_console_server_port_template.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_console_server_port_template.yml
index 4ad47d40a..4c7dfc846 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_console_server_port_template.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_console_server_port_template.yml
@@ -10,7 +10,7 @@
- name: "CONSOLE_SERVER_PORT_TEMPLATE 1: Necessary info creation"
netbox.netbox.netbox_console_server_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Server Port Template
device_type: Cisco Test
@@ -18,7 +18,7 @@
register: test_one
- name: "CONSOLE_SERVER_PORT_TEMPLATE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -30,7 +30,7 @@
- name: "CONSOLE_SERVER_PORT_TEMPLATE 2: Create duplicate"
netbox.netbox.netbox_console_server_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Server Port Template
device_type: Cisco Test
@@ -38,7 +38,7 @@
register: test_two
- name: "CONSOLE_SERVER_PORT_TEMPLATE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['console_server_port_template']['name'] == "Console Server Port Template"
@@ -48,7 +48,7 @@
- name: "CONSOLE_SERVER_PORT_TEMPLATE 3: Update Console Server Port Template with other fields"
netbox.netbox.netbox_console_server_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Server Port Template
device_type: Cisco Test
@@ -57,7 +57,7 @@
register: test_three
- name: "CONSOLE_SERVER_PORT_TEMPLATE 3: ASSERT - Update Console Server Port Template with other fields"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['type'] == "usb-a"
@@ -69,7 +69,7 @@
- name: "CONSOLE_SERVER_PORT_TEMPLATE 4: Create Console Server Port Template for Delete Test"
netbox.netbox.netbox_console_server_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Server Port Template 2
device_type: Cisco Test
@@ -77,7 +77,7 @@
register: test_four
- name: "CONSOLE_SERVER_PORT_TEMPLATE 4: ASSERT - Create Console Server Port Template for Delete Test"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -89,7 +89,7 @@
- name: "CONSOLE_SERVER_PORT_TEMPLATE 5: Delete Console Server Port Template"
netbox.netbox.netbox_console_server_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Console Server Port Template 2
device_type: Cisco Test
@@ -97,7 +97,7 @@
register: test_five
- name: "CONSOLE_SERVER_PORT_TEMPLATE 5: ASSERT - Delete Console Server Port Template"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "present"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_contact.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_contact.yml
index feb9f7e9f..f5ede9c1a 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_contact.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_contact.yml
@@ -4,16 +4,16 @@
### NETBOX_CONTACT
##
##
-- name: "1 - Test contact creation"
+- name: 1 - Test contact creation
netbox.netbox.netbox_contact:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Contact ABC"
+ name: Contact ABC
register: test_one
-- name: "1 - ASSERT"
- assert:
+- name: 1 - ASSERT
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -21,32 +21,32 @@
- test_one['contact']['name'] == "Contact ABC"
- test_one['msg'] == "contact Contact ABC created"
-- name: "Test duplicate contact"
+- name: Test duplicate contact
netbox.netbox.netbox_contact:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Contact ABC"
+ name: Contact ABC
register: test_two
-- name: "2 - ASSERT"
- assert:
+- name: 2 - ASSERT
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['contact']['name'] == "Contact ABC"
- test_two['msg'] == "contact Contact ABC already exists"
-- name: "3 - Test update"
+- name: 3 - Test update
netbox.netbox.netbox_contact:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Contact ABC"
- title: "New Title"
+ name: Contact ABC
+ title: New Title
register: test_three
-- name: "3 - ASSERT"
- assert:
+- name: 3 - ASSERT
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['title'] == "New Title"
@@ -54,41 +54,41 @@
- test_three['contact']['title'] == "New Title"
- test_three['msg'] == "contact Contact ABC updated"
-- name: "4 - Test delete"
+- name: 4 - Test delete
netbox.netbox.netbox_contact:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Contact ABC"
- state: "absent"
+ name: Contact ABC
+ state: absent
register: test_four
-- name: "4 - ASSERT"
- assert:
+- name: 4 - ASSERT
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
- test_four['diff']['after']['state'] == "absent"
- test_four['msg'] == "contact Contact ABC deleted"
-- name: "5 - Create contact with all parameters"
+- name: 5 - Create contact with all parameters
netbox.netbox.netbox_contact:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Contact ABC"
- title: "Fancy title"
+ name: Contact ABC
+ title: Fancy title
phone: "12345678"
- email: "contact@contact.com"
+ email: contact@contact.com
tags:
- - "tagA"
- - "tagB"
- - "tagC"
+ - tagA
+ - tagB
+ - tagC
state: present
register: test_five
-- name: "5 - ASSERT"
- assert:
+- name: 5 - ASSERT
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_contact_role.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_contact_role.yml
index fb875748e..c84703da9 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_contact_role.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_contact_role.yml
@@ -7,14 +7,14 @@
- name: "CONTACT_ROLE 1: Necessary info creation"
netbox.netbox.netbox_contact_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Contact Role"
+ name: Test Contact Role
state: present
register: test_one
- name: "CONTACT_ROLE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -26,14 +26,14 @@
- name: "CONTACT_ROLE 2: Create duplicate"
netbox.netbox.netbox_contact_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Contact Role"
+ name: Test Contact Role
state: present
register: test_two
- name: "CONTACT ROLE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['contact_role']['name'] == "Test Contact Role"
@@ -43,15 +43,15 @@
- name: "CONTACT_ROLE 3: ASSERT - Update"
netbox.netbox.netbox_contact_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Contact Role"
- description: "Update description"
+ name: Test Contact Role
+ description: Update description
state: present
register: test_three
- name: "IPAM_ROLE 3: ASSERT - Update"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['description'] == "Update description"
@@ -63,14 +63,14 @@
- name: "CONTACT_ROLE 4: ASSERT - Delete"
netbox.netbox.netbox_contact_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test Contact Role
state: absent
register: test_four
- name: "CONTACT_ROLE 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
@@ -80,14 +80,14 @@
- name: "CONTACT_ROLE 5: ASSERT - Delete non existing"
netbox.netbox.netbox_contact_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test Contact Role
state: absent
register: test_five
- name: "CONTACT_ROLE 5: ASSERT - Delete non existing`"
- assert:
+ ansible.builtin.assert:
that:
- not test_five['changed']
- test_five['contact_role'] == None
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_custom_field.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_custom_field.yml
index 7b8525e8e..c15f2565a 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_custom_field.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_custom_field.yml
@@ -7,17 +7,17 @@
- name: "CUSTOM_FIELD 1: Necessary info creation"
netbox.netbox.netbox_custom_field:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
content_types:
- - "dcim.device"
+ - dcim.device
name: A_CustomField
type: text
state: present
register: test_one
- name: "CUSTOM_FIELD 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -32,16 +32,16 @@
- name: "CUSTOM_FIELD 2: Create duplicate"
netbox.netbox.netbox_custom_field:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
content_types:
- - "dcim.device"
+ - dcim.device
name: A_CustomField
state: present
register: test_two
- name: "CUSTOM_FIELD 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['custom_field']['name'] == "A_CustomField"
@@ -50,18 +50,18 @@
- name: "CUSTOM_FIELD 3: Update data and make it required"
netbox.netbox.netbox_custom_field:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
content_types:
- - "dcim.device"
- name: "A_CustomField"
- description: "Added a description"
- required: yes
+ - dcim.device
+ name: A_CustomField
+ description: Added a description
+ required: true
state: present
register: test_three
- name: "CUSTOM_FIELD 3: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['description'] == "Added a description"
@@ -72,18 +72,18 @@
- name: "CUSTOM_FIELD 4: Change content type"
netbox.netbox.netbox_custom_field:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
content_types:
- - "virtualization.virtualmachine"
- name: "A_CustomField"
- description: "Added a description"
- required: yes
+ - virtualization.virtualmachine
+ name: A_CustomField
+ description: Added a description
+ required: true
state: present
register: test_four
- name: "CUSTOM_FIELD 4: ASSERT - Change content type"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['after']['content_types'] == ["virtualization.virtualmachine"]
@@ -93,14 +93,14 @@
- name: "CUSTOM_FIELD 5: Delete"
netbox.netbox.netbox_custom_field:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "A_CustomField"
+ name: A_CustomField
state: absent
register: test_five
- name: "CUSTOM_FIELD 5: ASSERT - Deleted"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['after']['state'] == "absent"
@@ -108,7 +108,7 @@
- test_five['msg'] == "custom_field A_CustomField deleted"
# Change in NetBox 3.7
-#- name: "CUSTOM_FIELD 6: UI Visibility (hidden-ifunset)"
+# - name: "CUSTOM_FIELD 6: UI Visibility (hidden-ifunset)"
# netbox.netbox.netbox_custom_field:
# netbox_url: http://localhost:32768
# netbox_token: 0123456789abcdef0123456789abcdef01234567
@@ -121,7 +121,7 @@
# state: present
# register: test_six
-#- name: "CUSTOM_FIELD 6: UI Visibility (hidden-ifunset)"
+# - name: "CUSTOM_FIELD 6: UI Visibility (hidden-ifunset)"
# assert:
# that:
# - test_six is changed
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_custom_link.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_custom_link.yml
index 8376d357b..b9bbcc8a7 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_custom_link.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_custom_link.yml
@@ -7,10 +7,10 @@
- name: "CUSTOM_LINK 1: Necessary info creation"
netbox.netbox.netbox_custom_link:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
content_types:
- - "dcim.device"
+ - dcim.device
name: Custom Link
link_text: Open Web management
link_url: !unsafe https://{{ obj.name }}.domain.local/
@@ -18,7 +18,7 @@
register: test_one
- name: "CUSTOM_LINK 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -31,10 +31,10 @@
- name: "CUSTOM_LINK 2: Create duplicate"
netbox.netbox.netbox_custom_link:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
content_types:
- - "dcim.device"
+ - dcim.device
name: Custom Link
link_text: Open Web management
link_url: !unsafe https://{{ obj.name }}.domain.local/
@@ -42,7 +42,7 @@
register: test_two
- name: "CUSTOM_LINK 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['custom_link']['name'] == "Custom Link"
@@ -51,10 +51,10 @@
- name: "CUSTOM_FIELD 3: Update data and add weight"
netbox.netbox.netbox_custom_link:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
content_types:
- - "dcim.device"
+ - dcim.device
name: Custom Link
link_text: Open Web management
link_url: !unsafe https://{{ obj.name }}.domain.local/
@@ -63,7 +63,7 @@
register: test_three
- name: "CUSTOM_FIELD 3: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['weight'] == 50
@@ -73,10 +73,10 @@
- name: "CUSTOM_LINK 4: Change content type"
netbox.netbox.netbox_custom_link:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
content_types:
- - "virtualization.virtualmachine"
+ - virtualization.virtualmachine
name: Custom Link
link_text: Open Web management
link_url: !unsafe https://{{ obj.name }}.domain.local/
@@ -84,7 +84,7 @@
register: test_four
- name: "CUSTOM_LINK 4: ASSERT - Change content type"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['after']['content_types'] == ["virtualization.virtualmachine"]
@@ -94,10 +94,10 @@
- name: "CUSTOM_LINK 5: Delete"
netbox.netbox.netbox_custom_link:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
content_types:
- - "virtualization.virtualmachine"
+ - virtualization.virtualmachine
name: Custom Link
link_text: Open Web management
link_url: !unsafe https://{{ obj.name }}.domain.local/
@@ -105,7 +105,7 @@
register: test_five
- name: "CUSTOM_LINK 5: ASSERT - Deleted"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['after']['state'] == "absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_device.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_device.yml
index 5a791b242..a3a5d8308 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_device.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_device.yml
@@ -4,22 +4,22 @@
### NETBOX_DEVICE
##
##
-- name: "1 - Device with required information"
+- name: 1 - Device with required information
netbox.netbox.netbox_device:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "R1"
+ name: R1
device_type:
id: "1"
- device_role: "Core Switch"
- site: "Test Site"
- status: "Staged"
+ device_role: Core Switch
+ site: Test Site
+ status: Staged
state: present
register: test_one
-- name: "1 - ASSERT"
- assert:
+- name: 1 - ASSERT
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == 'absent'
@@ -32,21 +32,21 @@
- test_one['device']['name'] == "R1"
- test_one['msg'] == "device R1 created"
-- name: "2 - Duplicate device"
+- name: 2 - Duplicate device
netbox.netbox.netbox_device:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "R1"
- device_type: "Cisco Test"
- device_role: "Core Switch"
- site: "Test Site"
- status: "Staged"
+ name: R1
+ device_type: Cisco Test
+ device_role: Core Switch
+ site: Test Site
+ status: Staged
state: present
register: test_two
-- name: "2 - ASSERT"
- assert:
+- name: 2 - ASSERT
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['device']['name'] == "R1"
@@ -56,24 +56,24 @@
- test_two['device']['status'] == "staged"
- test_two['msg'] == "device R1 already exists"
-- name: "3 - Update device"
+- name: 3 - Update device
netbox.netbox.netbox_device:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "R1"
- serial: "FXS1001"
+ name: R1
+ serial: FXS1001
local_context_data:
bgp_as: "65412"
- virtual_chassis: "VC1"
+ virtual_chassis: VC1
vc_position: 3
vc_priority: 15
- location: "Test Rack Group"
+ location: Test Rack Group
state: present
register: test_three
-- name: "3 - ASSERT"
- assert:
+- name: 3 - ASSERT
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['serial'] == "FXS1001"
@@ -94,20 +94,20 @@
- test_three['device']['location'] == 1
- test_three['msg'] == "device R1 updated"
-- name: "3.1 - Update device name using query_params"
+- name: 3.1 - Update device name using query_params
netbox.netbox.netbox_device:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "R1-changed-name"
- serial: "FXS1001"
+ name: R1-changed-name
+ serial: FXS1001
query_params:
- serial
state: present
register: test_three_dot_one
-- name: "3.1 - ASSERT"
- assert:
+- name: 3.1 - ASSERT
+ ansible.builtin.assert:
that:
- test_three_dot_one is changed
- test_three_dot_one['diff']['after']['name'] == "R1-changed-name"
@@ -119,27 +119,27 @@
- test_three_dot_one['device']['local_context_data']["bgp_as"] == "65412"
- test_three_dot_one['msg'] == "device R1-changed-name updated"
-- name: "4 - Create device with tags and assign to rack"
+- name: 4 - Create device with tags and assign to rack
netbox.netbox.netbox_device:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "TestR1"
+ name: TestR1
device_type: "1841"
- device_role: "Core Switch"
- site: "Test Site2"
- rack: "Test Rack Site 2"
+ device_role: Core Switch
+ site: Test Site2
+ rack: Test Rack Site 2
position: 35.5
- face: "Front"
+ face: Front
tags:
- - "schnozzberry"
- tenant: "Test Tenant"
+ - schnozzberry
+ tenant: Test Tenant
asset_tag: "1234"
state: present
register: test_four
-- name: "4 - ASSERT"
- assert:
+- name: 4 - ASSERT
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -155,53 +155,53 @@
- test_four['device']['asset_tag'] == '1234'
- test_four['msg'] == "device TestR1 created"
-- name: "5 - Delete previous device"
+- name: 5 - Delete previous device
netbox.netbox.netbox_device:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "TestR1"
+ name: TestR1
state: absent
register: test_five
-- name: "5 - ASSERT"
- assert:
+- name: 5 - ASSERT
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "present"
- test_five['diff']['after']['state'] == "absent"
- test_five['msg'] == "device TestR1 deleted"
-- name: "6 - Delete R1"
+- name: 6 - Delete R1
netbox.netbox.netbox_device:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "R1-changed-name"
+ name: R1-changed-name
state: absent
register: test_six
-- name: "6 - ASSERT"
- assert:
+- name: 6 - ASSERT
+ ansible.builtin.assert:
that:
- test_six is changed
- test_six['diff']['before']['state'] == "present"
- test_six['diff']['after']['state'] == "absent"
- test_six['msg'] == "device R1-changed-name deleted"
-- name: "7 - Add primary_ip4/6 to test100"
+- name: 7 - Add primary_ip4/6 to test100
netbox.netbox.netbox_device:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "test100"
- primary_ip4: "172.16.180.1/24"
- primary_ip6: "2001::1:1/64"
+ name: test100
+ primary_ip4: 172.16.180.1/24
+ primary_ip6: 2001::1:1/64
state: present
register: test_seven
-- name: "7 - ASSERT"
- assert:
+- name: 7 - ASSERT
+ ansible.builtin.assert:
that:
- test_seven is changed
- test_seven['diff']['after']['primary_ip4'] == 1
@@ -215,22 +215,22 @@
- test_seven['device']['primary_ip6'] == 2
- test_seven['msg'] == "device test100 updated"
-- name: "8 - Device with empty string name"
+- name: 8 - Device with empty string name
netbox.netbox.netbox_device:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: ""
device_type:
id: 1
- device_role: "Core Switch"
- site: "Test Site"
- status: "Staged"
+ device_role: Core Switch
+ site: Test Site
+ status: Staged
state: present
register: test_eight
-- name: "8 - ASSERT"
- assert:
+- name: 8 - ASSERT
+ ansible.builtin.assert:
that:
- test_eight is changed
- test_eight['diff']['before']['state'] == 'absent'
@@ -240,4 +240,4 @@
- test_eight['device']['site'] == 1
- test_eight['device']['status'] == "staged"
- "'-' in test_eight['device']['name']"
- - "test_eight['device']['name'] | length == 36"
+ - test_eight['device']['name'] | length == 36
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_device_bay.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_device_bay.yml
index 18a804354..b892d2160 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_device_bay.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_device_bay.yml
@@ -7,15 +7,15 @@
- name: "DEVICE_BAY 1: Necessary info creation"
netbox.netbox.netbox_device_bay:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- device: "Test Nexus One"
- name: "Device Bay One"
+ device: Test Nexus One
+ name: Device Bay One
state: present
register: test_one
- name: "DEVICE_BAY 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -27,15 +27,15 @@
- name: "DEVICE_BAY 2: Create duplicate"
netbox.netbox.netbox_device_bay:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- device: "Test Nexus One"
- name: "Device Bay One"
+ device: Test Nexus One
+ name: Device Bay One
state: present
register: test_two
- name: "DEVICE_BAY 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['device_bay']['name'] == "Device Bay One"
@@ -45,18 +45,18 @@
- name: "DEVICE_BAY 3: ASSERT - Update"
netbox.netbox.netbox_device_bay:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- device: "Test Nexus One"
- name: "Device Bay One"
- installed_device: "Test Nexus Child One"
+ device: Test Nexus One
+ name: Device Bay One
+ installed_device: Test Nexus Child One
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: test_three
- name: "DEVICE_BAY 3: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['installed_device'] == 5
@@ -70,14 +70,14 @@
- name: "DEVICE_BAY 4: ASSERT - Delete"
netbox.netbox.netbox_device_bay:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Device Bay One"
+ name: Device Bay One
state: absent
register: test_four
- name: "DEVICE_BAY 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['device_bay']['name'] == "Device Bay One"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_device_bay_template.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_device_bay_template.yml
index 4b6965033..c2ba1e32c 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_device_bay_template.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_device_bay_template.yml
@@ -7,7 +7,7 @@
- name: "DEVICE_BAY_TEMPLATE 1: Necessary info creation"
netbox.netbox.netbox_device_bay_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device_type: WS Test 3850
name: Device Bay Template One
@@ -15,7 +15,7 @@
register: test_one
- name: "DEVICE_BAY_TEMPLATE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -27,7 +27,7 @@
- name: "DEVICE_BAY_TEMPLATE 2: Create duplicate"
netbox.netbox.netbox_device_bay_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device_type: WS Test 3850
name: Device Bay Template One
@@ -35,7 +35,7 @@
register: test_two
- name: "DEVICE_BAY_TEMPLATE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['device_bay_template']['name'] == "Device Bay Template One"
@@ -45,7 +45,7 @@
- name: "DEVICE_BAY_TEMPLATE 3: ASSERT - Create Device Bay Template for Delete Test"
netbox.netbox.netbox_device_bay_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device_type: WS Test 3850
name: Device Bay Template Two
@@ -53,7 +53,7 @@
register: test_three
- name: "DEVICE_BAY_TEMPLATE 3: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['before']['state'] == "absent"
@@ -65,7 +65,7 @@
- name: "DEVICE_BAY_TEMPLATE 4: ASSERT - Delete"
netbox.netbox.netbox_device_bay_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Device Bay Template Two
device_type: WS Test 3850
@@ -73,7 +73,7 @@
register: test_four
- name: "DEVICE_BAY_TEMPLATE 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['device_bay_template']['name'] == "Device Bay Template Two"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_device_interface.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_device_interface.yml
index fa3f10214..dd3c2383b 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_device_interface.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_device_interface.yml
@@ -1,18 +1,18 @@
---
# NETBOX_DEVICE_INTERFACE
-- name: "1 - Interface with required information"
+- name: 1 - Interface with required information
netbox.netbox.netbox_device_interface:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: test100
name: GigabitEthernet3
- type: "1000Base-T (1GE)"
+ type: 1000Base-T (1GE)
register: test_one
-- name: "1 - ASSERT"
- assert:
+- name: 1 - ASSERT
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['msg'] == "interface GigabitEthernet3 created"
@@ -21,9 +21,9 @@
- test_one['interface']['name'] == "GigabitEthernet3"
- test_one['interface']['device'] == 1
-- name: "2 - Update test100 - GigabitEthernet3"
+- name: 2 - Update test100 - GigabitEthernet3
netbox.netbox.netbox_device_interface:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: test100
@@ -32,8 +32,8 @@
enabled: false
register: test_two
-- name: "2 - ASSERT"
- assert:
+- name: 2 - ASSERT
+ ansible.builtin.assert:
that:
- test_two is changed
- test_two['msg'] == "interface GigabitEthernet3 updated"
@@ -44,9 +44,9 @@
- test_two['interface']['enabled'] == false
- test_two['interface']['mtu'] == 1600
-- name: "3 - Delete interface test100 - GigabitEthernet3"
+- name: 3 - Delete interface test100 - GigabitEthernet3
netbox.netbox.netbox_device_interface:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: test100
@@ -54,17 +54,17 @@
state: absent
register: test_three
-- name: "3 - ASSERT"
- assert:
+- name: 3 - ASSERT
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['msg'] == "interface GigabitEthernet3 deleted"
- test_three['diff']['before']['state'] == "present"
- test_three['diff']['after']['state'] == "absent"
-- name: "4 - Create LAG with several specified options"
+- name: 4 - Create LAG with several specified options
netbox.netbox.netbox_device_interface:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: test100
@@ -76,8 +76,8 @@
state: present
register: test_four
-- name: "4 - ASSERT"
- assert:
+- name: 4 - ASSERT
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['msg'] == "interface port-channel1 created"
@@ -91,9 +91,9 @@
- test_four['interface']['mode'] == "access"
- test_four['interface']['mtu'] == 1600
-- name: "5 - Create interface and assign it to parent LAG"
+- name: 5 - Create interface and assign it to parent LAG
netbox.netbox.netbox_device_interface:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: test100
@@ -108,8 +108,8 @@
state: present
register: test_five
-- name: "5 - ASSERT"
- assert:
+- name: 5 - ASSERT
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['msg'] == "interface GigabitEthernet3 created"
@@ -124,9 +124,9 @@
- test_five['interface']['mode'] == "access"
- test_five['interface']['mtu'] == 1600
-- name: "6 - Create interface as trunk port"
+- name: 6 - Create interface as trunk port
netbox.netbox.netbox_device_interface:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: test100
@@ -147,8 +147,8 @@
state: present
register: test_six
-- name: "6 - ASSERT"
- assert:
+- name: 6 - ASSERT
+ ansible.builtin.assert:
that:
- test_six is changed
- test_six['msg'] == "interface GigabitEthernet21 created"
@@ -164,50 +164,50 @@
- test_six['interface']['tagged_vlans'] == [2, 3]
- test_six['interface']['untagged_vlan'] == 1
-- name: "7 - Duplicate Interface"
+- name: 7 - Duplicate Interface
netbox.netbox.netbox_device_interface:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: test100
name: GigabitEthernet1
register: test_seven
-- name: "7 - ASSERT"
- assert:
+- name: 7 - ASSERT
+ ansible.builtin.assert:
that:
- not test_seven['changed']
- test_seven['msg'] == "interface GigabitEthernet1 already exists"
- test_seven['interface']['name'] == "GigabitEthernet1"
- test_seven['interface']['device'] == 1
-- name: "Add port-channel1 to R1 to test finding proper port-channel1"
+- name: Add port-channel1 to R1 to test finding proper port-channel1
netbox.netbox.netbox_device_interface:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- device: "R1-Device"
- name: "port-channel1"
- type: "Link Aggregation Group (LAG)"
+ device: R1-Device
+ name: port-channel1
+ type: Link Aggregation Group (LAG)
-- name: "8 - Create interface and assign it to parent LAG - non dict"
+- name: 8 - Create interface and assign it to parent LAG - non dict
netbox.netbox.netbox_device_interface:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: test100
name: GigabitEthernet4
enabled: false
type: 1000Base-T (1GE)
- lag: "port-channel1"
+ lag: port-channel1
mtu: 1600
mgmt_only: false
mode: Access
state: present
register: test_eight
-- name: "8 - ASSERT"
- assert:
+- name: 8 - ASSERT
+ ansible.builtin.assert:
that:
- test_eight is changed
- test_eight['msg'] == "interface GigabitEthernet4 created"
@@ -222,9 +222,9 @@
- test_eight['interface']['mode'] == "access"
- test_eight['interface']['mtu'] == 1600
-- name: "9 - Create interface on VC child"
+- name: 9 - Create interface on VC child
netbox.netbox.netbox_device_interface:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: Test Nexus Child One
@@ -233,8 +233,8 @@
state: present
register: test_nine
-- name: "9 - ASSERT"
- assert:
+- name: 9 - ASSERT
+ ansible.builtin.assert:
that:
- test_nine is changed
- test_nine['msg'] == "interface Ethernet2/2 created"
@@ -245,21 +245,21 @@
- test_nine['interface']['enabled'] == true
- test_nine['interface']['type'] == "1000base-t"
-- name: "10 - Update interface on VC child"
+- name: 10 - Update interface on VC child
netbox.netbox.netbox_device_interface:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: Test Nexus One
name: Ethernet2/2
- description: "Updated child interface from parent device"
+ description: Updated child interface from parent device
type: 1000Base-T (1GE)
- update_vc_child: True
+ update_vc_child: true
state: present
register: test_ten
-- name: "10 - ASSERT"
- assert:
+- name: 10 - ASSERT
+ ansible.builtin.assert:
that:
- test_ten is changed
- test_ten['msg'] == "interface Ethernet2/2 updated"
@@ -270,28 +270,28 @@
- test_ten['interface']['type'] == "1000base-t"
- test_ten['interface']['description'] == 'Updated child interface from parent device'
-- name: "11 - Update interface on VC child w/o update_vc_child"
+- name: 11 - Update interface on VC child w/o update_vc_child
netbox.netbox.netbox_device_interface:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: Test Nexus One
name: Ethernet2/2
- description: "Updated child interface from parent device - test"
+ description: Updated child interface from parent device - test
type: 1000Base-T (1GE)
state: present
- ignore_errors: yes
+ ignore_errors: true
register: test_eleven
-- name: "11 - ASSERT"
- assert:
+- name: 11 - ASSERT
+ ansible.builtin.assert:
that:
- test_eleven is failed
- test_eleven['msg'] == "Must set update_vc_child to True to allow child device interface modification"
-- name: "12 - Create interface and mark it as connected"
+- name: 12 - Create interface and mark it as connected
netbox.netbox.netbox_device_interface:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: test100
@@ -300,8 +300,8 @@
mark_connected: true
register: test_twelve
-- name: "12- ASSERT"
- assert:
+- name: 12- ASSERT
+ ansible.builtin.assert:
that:
- test_twelve is changed
- test_twelve['msg'] == "interface GigabitEthernet5 created"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_device_interface_template.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_device_interface_template.yml
index b6e98914f..abe8f3405 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_device_interface_template.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_device_interface_template.yml
@@ -4,9 +4,9 @@
### NETBOX_DEVICE_INTERFACE_TEMPLATE
##
##
-- name: "1 - Interface with required information"
+- name: 1 - Interface with required information
netbox.netbox.netbox_device_interface_template:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device_type: Arista Test
@@ -14,8 +14,8 @@
type: 10gbase-t
register: test_one
-- name: "1 - ASSERT"
- assert:
+- name: 1 - ASSERT
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['msg'] == "interface_template 10GBASE-T (10GE) created"
@@ -25,9 +25,9 @@
- test_one['interface_template']['device_type'] == 2
- test_one['interface_template']['type'] == '10gbase-t'
-- name: "2 - Update 10GBASE-T (10GE)"
+- name: 2 - Update 10GBASE-T (10GE)
netbox.netbox.netbox_device_interface_template:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device_type: Arista Test
@@ -36,8 +36,8 @@
mgmt_only: true
register: test_two
-- name: "2 - ASSERT"
- assert:
+- name: 2 - ASSERT
+ ansible.builtin.assert:
that:
- test_two is changed
- test_two['msg'] == "interface_template 10GBASE-T (10GE) updated"
@@ -46,9 +46,9 @@
- test_two['interface_template']['device_type'] == 2
- test_two['interface_template']['mgmt_only'] == true
-- name: "3 - Delete interface template 10GBASE-T (10GE)"
+- name: 3 - Delete interface template 10GBASE-T (10GE)
netbox.netbox.netbox_device_interface_template:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device_type: Arista Test
@@ -57,17 +57,17 @@
state: absent
register: test_three
-- name: "3 - ASSERT"
- assert:
+- name: 3 - ASSERT
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['msg'] == "interface_template 10GBASE-T (10GE) deleted"
- test_three['diff']['before']['state'] == "present"
- test_three['diff']['after']['state'] == "absent"
-- name: "4 - Create LAG with several specified options"
+- name: 4 - Create LAG with several specified options
netbox.netbox.netbox_device_interface_template:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device_type: Arista Test
@@ -77,8 +77,8 @@
state: present
register: test_four
-- name: "4 - ASSERT"
- assert:
+- name: 4 - ASSERT
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['msg'] == "interface_template port channel template created"
@@ -89,9 +89,9 @@
- test_four['interface_template']['type'] == "lag"
- test_four['interface_template']['mgmt_only'] == false
-- name: "5 - Duplicate Interface Template port channel template"
+- name: 5 - Duplicate Interface Template port channel template
netbox.netbox.netbox_device_interface_template:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device_type: Arista Test
@@ -99,8 +99,8 @@
type: lag
register: test_five
-- name: "5 - ASSERT"
- assert:
+- name: 5 - ASSERT
+ ansible.builtin.assert:
that:
- not test_five['changed']
- test_five['msg'] == "interface_template port channel template already exists"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_device_role.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_device_role.yml
index 64ec9840f..b85ee2c86 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_device_role.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_device_role.yml
@@ -7,15 +7,15 @@
- name: "DEVICE_ROLE 1: Necessary info creation"
netbox.netbox.netbox_device_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Device Role"
- color: "FFFFFF"
+ name: Test Device Role
+ color: FFFFFF
state: present
register: test_one
- name: "DEVICE_ROLE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -28,15 +28,15 @@
- name: "DEVICE_ROLE 2: Create duplicate"
netbox.netbox.netbox_device_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Device Role"
- color: "FFFFFF"
+ name: Test Device Role
+ color: FFFFFF
state: present
register: test_two
- name: "DEVICE_ROLE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['device_role']['name'] == "Test Device Role"
@@ -47,16 +47,16 @@
- name: "DEVICE_ROLE 3: ASSERT - Update"
netbox.netbox.netbox_device_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Device Role"
+ name: Test Device Role
color: "003EFF"
vm_role: false
state: present
register: test_three
- name: "DEVICE_ROLE 3: ASSERT - Update"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['color'] == "003eff"
@@ -70,14 +70,14 @@
- name: "DEVICE_ROLE 4: ASSERT - Delete"
netbox.netbox.netbox_device_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test Device Role
state: absent
register: test_four
- name: "DEVICE_ROLE 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
@@ -87,14 +87,14 @@
- name: "DEVICE_ROLE 5: ASSERT - Delete non existing"
netbox.netbox.netbox_device_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test Device Role
state: absent
register: test_five
- name: "DEVICE_ROLE 5: ASSERT - Delete non existing`"
- assert:
+ ansible.builtin.assert:
that:
- not test_five['changed']
- test_five['device_role'] == None
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_device_type.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_device_type.yml
index 5587119f3..3027f6fd1 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_device_type.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_device_type.yml
@@ -7,7 +7,7 @@
- name: "DEVICE_TYPE 1: Necessary info creation"
netbox.netbox.netbox_device_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
slug: test-device-type
model: ws-test-3750
@@ -16,7 +16,7 @@
register: test_one
- name: "DEVICE_TYPE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -29,16 +29,16 @@
- name: "DEVICE_TYPE 2: Create duplicate"
netbox.netbox.netbox_device_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
slug: test-device-type
- model: "ws-test-3750"
+ model: ws-test-3750
manufacturer: Test Manufacturer
state: present
register: test_two
- name: "DEVICE_TYPE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_one['device_type']['slug'] == "test-device-type"
@@ -49,7 +49,7 @@
- name: "DEVICE_TYPE 3: ASSERT - Update"
netbox.netbox.netbox_device_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
slug: test-device-type
model: ws-test-3750
@@ -62,7 +62,7 @@
register: test_three
- name: "DEVICE_TYPE 3: ASSERT - Update"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['is_full_depth'] == false
@@ -79,14 +79,14 @@
- name: "DEVICE_TYPE 4: ASSERT - Delete"
netbox.netbox.netbox_device_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
model: test-device-type
state: absent
register: test_four
- name: "DEVICE_TYPE 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
@@ -96,14 +96,14 @@
- name: "DEVICE_TYPE 5: ASSERT - Delete non existing"
netbox.netbox.netbox_device_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- model: "Test Device Type"
+ model: Test Device Type
state: absent
register: test_five
- name: "DEVICE_TYPE 5: ASSERT - Delete non existing`"
- assert:
+ ansible.builtin.assert:
that:
- not test_five['changed']
- test_five['device_type'] == None
@@ -112,16 +112,16 @@
- name: "DEVICE_TYPE 6: Without Slug"
netbox.netbox.netbox_device_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- model: "WS Test 3850"
- manufacturer: "Test Manufacturer"
- subdevice_role: "parent"
+ model: WS Test 3850
+ manufacturer: Test Manufacturer
+ subdevice_role: parent
state: present
register: test_six
- name: "DEVICE_TYPE 6: ASSERT - Without Slug"
- assert:
+ ansible.builtin.assert:
that:
- test_six is changed
- test_six['diff']['before']['state'] == "absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_export_template.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_export_template.yml
index b3dcea015..ffdc0424f 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_export_template.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_export_template.yml
@@ -7,10 +7,10 @@
- name: "EXPORT_TEMPLATE 1: Necessary info creation"
netbox.netbox.netbox_export_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
content_types:
- - "dcim.device"
+ - dcim.device
name: Example Export Template
description: Export Devices
template_code: !unsafe >-
@@ -19,7 +19,7 @@
register: test_one
- name: "EXPORT_TEMPLATE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -32,10 +32,10 @@
- name: "EXPORT_TEMPLATE 2: Create duplicate"
netbox.netbox.netbox_export_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
content_types:
- - "dcim.device"
+ - dcim.device
name: Example Export Template
description: Export Devices
template_code: !unsafe >-
@@ -44,7 +44,7 @@
register: test_two
- name: "EXPORT_TEMPLATE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['export_template']['name'] == "Example Export Template"
@@ -53,20 +53,20 @@
- name: "EXPORT_TEMPLATE 3: Update data and remove as_attachment"
netbox.netbox.netbox_export_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
content_types:
- - "dcim.device"
+ - dcim.device
name: Example Export Template
description: Export Devices
template_code: !unsafe >-
{% for obj in queryset %}{{ obj.name }}{% endfor %}
- as_attachment: no
+ as_attachment: false
state: present
register: test_three
- name: "EXPORT_TEMPLATE 3: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['as_attachment'] == false
@@ -76,10 +76,10 @@
- name: "EXPORT_TEMPLATE 4: Change content type"
netbox.netbox.netbox_export_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
content_types:
- - "virtualization.virtualmachine"
+ - virtualization.virtualmachine
name: Example Export Template
description: Export Devices
template_code: !unsafe >-
@@ -88,7 +88,7 @@
register: test_four
- name: "EXPORT_TEMPLATE 4: ASSERT - Change content type"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['after']['content_types'] == ["virtualization.virtualmachine"]
@@ -98,10 +98,10 @@
- name: "EXPORT_TEMPLATE 5: Delete"
netbox.netbox.netbox_export_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
content_types:
- - "virtualization.virtualmachine"
+ - virtualization.virtualmachine
name: Example Export Template
description: Export Devices
template_code: !unsafe >-
@@ -110,7 +110,7 @@
register: test_five
- name: "EXPORT_TEMPLATE 5: ASSERT - Deleted"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['after']['state'] == "absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_fhrp_group.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_fhrp_group.yml
index 91788ef02..eed3f2f88 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_fhrp_group.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_fhrp_group.yml
@@ -7,9 +7,9 @@
- name: "FHRP group 1: Test FHRP group creation"
netbox.netbox.netbox_fhrp_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- protocol: "glbp"
+ protocol: glbp
group_id: 111
state: present
register: test_one
@@ -27,9 +27,9 @@
- name: "FHRP group 2: Create duplicate"
netbox.netbox.netbox_fhrp_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- protocol: "glbp"
+ protocol: glbp
group_id: 111
state: present
register: test_two
@@ -45,15 +45,15 @@
- name: "FHRP group 3: Update FHRP group with other fields"
netbox.netbox.netbox_fhrp_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- protocol: "glbp"
+ protocol: glbp
group_id: 111
auth_type: md5
auth_key: 11111
description: Test description
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: test_three
@@ -76,7 +76,7 @@
- name: "FHRP group 4: ASSERT - Delete"
netbox.netbox.netbox_fhrp_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
group_id: 111
state: absent
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_fhrp_group_assignment.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_fhrp_group_assignment.yml
index 27f20c293..2dd2f6d61 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_fhrp_group_assignment.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_fhrp_group_assignment.yml
@@ -7,7 +7,7 @@
- name: "FHRP group assignment 1: Test FHRP group assignment creation"
netbox.netbox.netbox_fhrp_group_assignment:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
fhrp_group: 1
interface_type: dcim.interface
@@ -31,7 +31,7 @@
- name: "FHRP group assignment 2: Create duplicate"
netbox.netbox.netbox_fhrp_group_assignment:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
fhrp_group: 1
interface_type: dcim.interface
@@ -53,7 +53,7 @@
- name: "FHRP group assignment 3: Update FHRP group assignment"
netbox.netbox.netbox_fhrp_group_assignment:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
fhrp_group: 1
interface_type: dcim.interface
@@ -75,7 +75,7 @@
- name: "FHRP group assignment 4: Delete FHRP group assignment"
netbox.netbox.netbox_fhrp_group_assignment:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
fhrp_group: 1
interface_type: dcim.interface
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_front_port.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_front_port.yml
index 1f4c60a4d..7088bbeef 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_front_port.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_front_port.yml
@@ -10,7 +10,7 @@
- name: "FRONT_PORT 1: Necessary info creation"
netbox.netbox.netbox_front_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Front Port
device: test100
@@ -20,7 +20,7 @@
register: test_one
- name: "FRONT_PORT 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -34,7 +34,7 @@
- name: "FRONT_PORT 2: Create duplicate"
netbox.netbox.netbox_front_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Front Port
device: test100
@@ -44,7 +44,7 @@
register: test_two
- name: "FRONT_PORT 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['front_port']['name'] == "Front Port"
@@ -56,7 +56,7 @@
- name: "FRONT_PORT 3: Update Front Port with other fields"
netbox.netbox.netbox_front_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Front Port
device: test100
@@ -68,7 +68,7 @@
register: test_three
- name: "FRONT_PORT 3: ASSERT - Update Front Port with other fields"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['rear_port_position'] == 5
@@ -84,7 +84,7 @@
- name: "FRONT_PORT 4: Create Front Port for Delete Test"
netbox.netbox.netbox_front_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Front Port 2
device: test100
@@ -94,7 +94,7 @@
register: test_four
- name: "FRONT_PORT 4: ASSERT - Create Front Port for Delete Test"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -108,7 +108,7 @@
- name: "FRONT_PORT 5: Delete Front Port"
netbox.netbox.netbox_front_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Front Port 2
device: test100
@@ -118,7 +118,7 @@
register: test_five
- name: "FRONT_PORT 5: ASSERT - Delete Front Port"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "present"
@@ -128,7 +128,7 @@
- name: "FRONT_PORT 6: Create duplicate with rear_port dictionary"
netbox.netbox.netbox_front_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Front Port
device: test100
@@ -140,7 +140,7 @@
register: test_six
- name: "FRONT_PORT 6: ASSERT - Create duplicate with rear_port dictionary"
- assert:
+ ansible.builtin.assert:
that:
- not test_six['changed']
- test_six['front_port']['name'] == "Front Port"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_front_port_template.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_front_port_template.yml
index 30e907100..966279fa8 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_front_port_template.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_front_port_template.yml
@@ -10,7 +10,7 @@
- name: "FRONT_PORT_TEMPLATE 1: Necessary info creation"
netbox.netbox.netbox_front_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Front Port Template
device_type: Cisco Test
@@ -20,7 +20,7 @@
register: test_one
- name: "FRONT_PORT_TEMPLATE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -34,7 +34,7 @@
- name: "FRONT_PORT_TEMPLATE 2: Create duplicate"
netbox.netbox.netbox_front_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Front Port Template
device_type: Cisco Test
@@ -44,7 +44,7 @@
register: test_two
- name: "FRONT_PORT_TEMPLATE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['front_port_template']['name'] == "Front Port Template"
@@ -56,7 +56,7 @@
- name: "FRONT_PORT_TEMPLATE 3: Update Front Port Template with other fields"
netbox.netbox.netbox_front_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Front Port Template
device_type: Cisco Test
@@ -67,7 +67,7 @@
register: test_three
- name: "FRONT_PORT_TEMPLATE 3: ASSERT - Update Front Port Template with other fields"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['rear_port_position'] == 5
@@ -81,7 +81,7 @@
- name: "FRONT_PORT_TEMPLATE 4: Create Front Port Template for Delete Test"
netbox.netbox.netbox_front_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Front Port Template 2
device_type: Cisco Test
@@ -91,7 +91,7 @@
register: test_four
- name: "FRONT_PORT_TEMPLATE 4: ASSERT - Create Front Port Template for Delete Test"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -105,7 +105,7 @@
- name: "FRONT_PORT_TEMPLATE 5: Delete Front Port Template"
netbox.netbox.netbox_front_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Front Port Template 2
device_type: Cisco Test
@@ -115,7 +115,7 @@
register: test_five
- name: "FRONT_PORT_TEMPLATE 5: ASSERT - Delete Front Port Template"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "present"
@@ -125,7 +125,7 @@
- name: "FRONT_PORT 6: Create duplicate with rear_port_template dictionary"
netbox.netbox.netbox_front_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Front Port Template
device_type: Cisco Test
@@ -137,7 +137,7 @@
register: test_six
- name: "FRONT_PORT 6: ASSERT - Create duplicate with rear_port_template dictionary"
- assert:
+ ansible.builtin.assert:
that:
- not test_six['changed']
- test_six['front_port_template']['name'] == "Front Port Template"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_inventory_item.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_inventory_item.yml
index cc038ac94..5b330c3a5 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_inventory_item.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_inventory_item.yml
@@ -7,10 +7,10 @@
- name: "INVENTORY_ITEM 1: Necessary info creation"
netbox.netbox.netbox_inventory_item:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- device: "test100"
- name: "10G-SFP+"
+ device: test100
+ name: 10G-SFP+
state: present
register: test_one
@@ -27,10 +27,10 @@
- name: "INVENTORY_ITEM 2: Create duplicate"
netbox.netbox.netbox_inventory_item:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- device: "test100"
- name: "10G-SFP+"
+ device: test100
+ name: 10G-SFP+
state: present
register: test_two
@@ -45,18 +45,18 @@
- name: "INVENTORY_ITEM 3: Update properties"
netbox.netbox.netbox_inventory_item:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- device: "test100"
- name: "10G-SFP+"
- manufacturer: "Cisco"
- part_id: "10G-SFP+"
+ device: test100
+ name: 10G-SFP+
+ manufacturer: Cisco
+ part_id: 10G-SFP+
serial: "1234"
asset_tag: "1234"
- description: "New SFP"
- discovered: True
+ description: New SFP
+ discovered: true
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: test_three
@@ -85,10 +85,10 @@
- name: "INVENTORY_ITEM 4: Delete inventory item"
netbox.netbox.netbox_inventory_item:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- device: "test100"
- name: "10G-SFP+"
+ device: test100
+ name: 10G-SFP+
state: absent
register: test_four
@@ -109,7 +109,7 @@
- name: "INVENTORY_ITEM 5: PREWORK - Create inventory item role"
netbox.netbox.netbox_inventory_item_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Processor
color: FFFFFF
@@ -119,7 +119,7 @@
- name: "INVENTORY_ITEM 5: Create inventory item with role"
netbox.netbox.netbox_inventory_item:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: test100
name: test_processor
@@ -141,7 +141,7 @@
- name: "INVENTORY_ITEM 6: Create inventory item with missing role"
netbox.netbox.netbox_inventory_item:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: test100
name: test_processor
@@ -159,14 +159,14 @@
- name: "INVENTORY_ITEM 7: Create inventory item with component"
netbox.netbox.netbox_inventory_item:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: test100
name: test_component
- component_type: "dcim.interface"
+ component_type: dcim.interface
component:
name: GigabitEthernet2
- device: "test100"
+ device: test100
state: present
register: test_seven
@@ -185,13 +185,13 @@
- name: "INVENTORY_ITEM 8: Create inventory item with missing component_type"
netbox.netbox.netbox_inventory_item:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: test100
name: test_component
component:
name: GigabitEthernet2
- device: "test100"
+ device: test100
state: present
ignore_errors: true
register: test_eight
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_inventory_item_role.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_inventory_item_role.yml
index 1c2de3f03..71d86d747 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_inventory_item_role.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_inventory_item_role.yml
@@ -7,15 +7,15 @@
- name: "INVENTORY_ITEM_ROLE 1: Necessary info creation"
netbox.netbox.netbox_inventory_item_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Inventory Item Role"
- color: "FFFFFF"
+ name: Test Inventory Item Role
+ color: FFFFFF
state: present
register: test_one
- name: "INVENTORY_ITEM_ROLE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -28,15 +28,15 @@
- name: "INVENTORY_ITEM_ROLE 2: Create duplicate"
netbox.netbox.netbox_inventory_item_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Inventory Item Role"
- color: "FFFFFF"
+ name: Test Inventory Item Role
+ color: FFFFFF
state: present
register: test_two
- name: "INVENTORY_ITEM_ROLE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['inventory_item_role']['name'] == "Test Inventory Item Role"
@@ -47,15 +47,15 @@
- name: "INVENTORY_ITEM_ROLE 3: ASSERT - Update"
netbox.netbox.netbox_inventory_item_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Inventory Item Role"
+ name: Test Inventory Item Role
color: "003EFF"
state: present
register: test_three
- name: "INVENTORY_ITEM_ROLE 3: ASSERT - Update"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['color'] == "003eff"
@@ -67,14 +67,14 @@
- name: "INVENTORY_ITEM_ROLE 4: ASSERT - Delete"
netbox.netbox.netbox_inventory_item_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test Inventory Item Role
state: absent
register: test_four
- name: "INVENTORY_ITEM_ROLE 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
@@ -84,14 +84,14 @@
- name: "INVENTORY_ITEM_ROLE 5: ASSERT - Delete non existing"
netbox.netbox.netbox_inventory_item_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test Inventory Item Role
state: absent
register: test_five
- name: "INVENTORY_ITEM_ROLE 5: ASSERT - Delete non existing`"
- assert:
+ ansible.builtin.assert:
that:
- not test_five['changed']
- test_five['inventory_item_role'] == None
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_ip_address.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_ip_address.yml
index c18cad9bd..8813f8dcb 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_ip_address.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_ip_address.yml
@@ -7,14 +7,14 @@
- name: "1 - Create IP address within NetBox with only required information - State: Present"
netbox.netbox.netbox_ip_address:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
address: 192.168.1.10/30
state: present
register: test_one
-- name: "1 - ASSERT"
- assert:
+- name: 1 - ASSERT
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -22,20 +22,20 @@
- test_one['msg'] == "ip_address 192.168.1.10/30 created"
- test_one['ip_address']['address'] == "192.168.1.10/30"
-- name: "2 - Update 192.168.1.10/30"
+- name: 2 - Update 192.168.1.10/30
netbox.netbox.netbox_ip_address:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
address: 192.168.1.10/30
- description: "Updated ip address"
+ description: Updated ip address
tags:
- - "Updated"
+ - Updated
state: present
register: test_two
-- name: "2 - ASSERT"
- assert:
+- name: 2 - ASSERT
+ ansible.builtin.assert:
that:
- test_two is changed
- test_two['diff']['after']['description'] == "Updated ip address"
@@ -48,14 +48,14 @@
- name: "3 - Delete IP - 192.168.1.10 - State: Absent"
netbox.netbox.netbox_ip_address:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
address: 192.168.1.10/30
state: absent
register: test_three
-- name: "3 - ASSERT"
- assert:
+- name: 3 - ASSERT
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['before']['state'] == "present"
@@ -65,14 +65,14 @@
- name: "4 - Create IP in global VRF - 192.168.1.20/30 - State: Present"
netbox.netbox.netbox_ip_address:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
address: 192.168.1.20/30
state: present
register: test_four
-- name: "4 - ASSERT"
- assert:
+- name: 4 - ASSERT
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -81,7 +81,7 @@
- test_four['ip_address']['address'] == "192.168.1.20/30"
# Enforce uniqueness in NetBox 3.7
-#- name: "5 - Create IP in global VRF - 192.168.1.20/30 - State: New"
+# - name: "5 - Create IP in global VRF - 192.168.1.20/30 - State: New"
# netbox.netbox.netbox_ip_address:
# netbox_url: http://localhost:32768
# netbox_token: 0123456789abcdef0123456789abcdef01234567
@@ -90,7 +90,7 @@
# state: new
# register: test_five
-#- name: "5 - ASSERT"
+# - name: "5 - ASSERT"
# assert:
# that:
# - test_five is changed
@@ -102,14 +102,14 @@
- name: "6 - Create new address with only prefix specified - State: new"
netbox.netbox.netbox_ip_address:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
prefix: 192.168.100.0/24
state: new
register: test_six
-- name: "6 - ASSERT"
- assert:
+- name: 6 - ASSERT
+ ansible.builtin.assert:
that:
- test_six is changed
- test_six['diff']['before']['state'] == "absent"
@@ -117,10 +117,10 @@
- test_six['msg'] == "ip_address 192.168.100.1/24 created"
- test_six['ip_address']['address'] == "192.168.100.1/24"
-- name: "7 - Create IP address with several specified"
+- name: 7 - Create IP address with several specified
netbox.netbox.netbox_ip_address:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
family: 4
address: 172.16.1.20/24
@@ -130,12 +130,12 @@
role: Loopback
description: Test description
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: test_seven
-- name: "7 - ASSERT"
- assert:
+- name: 7 - ASSERT
+ ansible.builtin.assert:
that:
- test_seven is changed
- test_seven['diff']['before']['state'] == "absent"
@@ -150,10 +150,10 @@
- test_seven['ip_address']['tenant'] == 1
- test_seven['ip_address']['vrf'] == 1
-- name: "8 - Create IP address and assign a nat_inside IP"
+- name: 8 - Create IP address and assign a nat_inside IP
netbox.netbox.netbox_ip_address:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
family: 4
address: 10.10.1.30/16
@@ -163,8 +163,8 @@
vrf: Test VRF
register: test_eight
-- name: "8 - ASSERT"
- assert:
+- name: 8 - ASSERT
+ ansible.builtin.assert:
that:
- test_eight is changed
- test_eight['diff']['before']['state'] == "absent"
@@ -178,7 +178,7 @@
- name: "9 - Create IP address on GigabitEthernet2 - test100 - State: present"
netbox.netbox.netbox_ip_address:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
family: 4
address: 10.10.200.30/16
@@ -187,8 +187,8 @@
device: test100
register: test_nine
-- name: "9 - ASSERT"
- assert:
+- name: 9 - ASSERT
+ ansible.builtin.assert:
that:
- test_nine is changed
- test_nine['diff']['before']['state'] == "absent"
@@ -202,7 +202,7 @@
- name: "10 - Create IP address on GigabitEthernet2 - test100 - State: new"
netbox.netbox.netbox_ip_address:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
family: 4
prefix: 10.10.0.0/16
@@ -212,8 +212,8 @@
state: new
register: test_ten
-- name: "10 - ASSERT"
- assert:
+- name: 10 - ASSERT
+ ansible.builtin.assert:
that:
- test_ten is changed
- test_ten['diff']['before']['state'] == "absent"
@@ -227,7 +227,7 @@
- name: "11 - Create IP address on GigabitEthernet2 - test100 - State: present"
netbox.netbox.netbox_ip_address:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
family: 4
prefix: 192.168.100.0/24
@@ -237,8 +237,8 @@
state: present
register: test_eleven
-- name: "11 - ASSERT"
- assert:
+- name: 11 - ASSERT
+ ansible.builtin.assert:
that:
- test_eleven is changed
- test_eleven['diff']['before']['state'] == "absent"
@@ -246,10 +246,10 @@
- test_eleven['msg'] == "ip_address 192.168.100.2/24 created"
- test_eleven['ip_address']['address'] == "192.168.100.2/24"
-- name: "12 - Duplicate - 192.168.100.2/24 on interface"
+- name: 12 - Duplicate - 192.168.100.2/24 on interface
netbox.netbox.netbox_ip_address:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
address: 192.168.100.2/24
assigned_object:
@@ -258,8 +258,8 @@
state: present
register: test_twelve
-- name: "12 - ASSERT"
- assert:
+- name: 12 - ASSERT
+ ansible.builtin.assert:
that:
- not test_twelve['changed']
- test_twelve['msg'] == "ip_address 192.168.100.2/24 already exists"
@@ -267,17 +267,17 @@
- test_twelve['ip_address']['assigned_object_type'] == "dcim.interface"
- test_twelve['ip_address']['assigned_object_id'] == 4
-- name: "13 - Duplicate - 192.168.100.2/24"
+- name: 13 - Duplicate - 192.168.100.2/24
netbox.netbox.netbox_ip_address:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
address: 192.168.100.2/24
state: present
register: test_thirteen
-- name: "13 - ASSERT"
- assert:
+- name: 13 - ASSERT
+ ansible.builtin.assert:
that:
- not test_thirteen['changed']
- test_thirteen['msg'] == "ip_address 192.168.100.2/24 already exists"
@@ -286,7 +286,7 @@
- name: "14 - Create IP address on Eth0 - test100-vm - State: present"
netbox.netbox.netbox_ip_address:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
family: 4
address: 10.188.1.100/24
@@ -295,8 +295,8 @@
virtual_machine: test100-vm
register: test_fourteen
-- name: "14 - ASSERT"
- assert:
+- name: 14 - ASSERT
+ ansible.builtin.assert:
that:
- test_fourteen is changed
- test_fourteen['diff']['before']['state'] == "absent"
@@ -308,7 +308,7 @@
- test_fourteen['ip_address']['assigned_object_id'] == 1
# Enforce uniqueness in NetBox 3.7
-#- name: "15 - Create same IP address on Eth0 - test101-vm - State: present"
+# - name: "15 - Create same IP address on Eth0 - test101-vm - State: present"
# netbox.netbox.netbox_ip_address:
# netbox_url: http://localhost:32768
# netbox_token: 0123456789abcdef0123456789abcdef01234567
@@ -321,7 +321,7 @@
# state: "present"
# register: test_fifteen
-#- name: "15 - ASSERT"
+# - name: "15 - ASSERT"
# assert:
# that:
# - test_fifteen is changed
@@ -336,14 +336,14 @@
- name: "16 - Create IP address with no mask - State: Present"
netbox.netbox.netbox_ip_address:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
address: 10.120.10.1
state: present
register: test_sixteen
-- name: "16 - ASSERT"
- assert:
+- name: 16 - ASSERT
+ ansible.builtin.assert:
that:
- test_sixteen is changed
- test_sixteen['diff']['before']['state'] == "absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_ipam_role.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_ipam_role.yml
index 1f1ff0f36..9f376c2ed 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_ipam_role.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_ipam_role.yml
@@ -7,14 +7,14 @@
- name: "IPAM_ROLE 1: Necessary info creation"
netbox.netbox.netbox_ipam_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test IPAM Role"
+ name: Test IPAM Role
state: present
register: test_one
- name: "IPAM_ROLE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -26,14 +26,14 @@
- name: "IPAM_ROLE 2: Create duplicate"
netbox.netbox.netbox_ipam_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test IPAM Role"
+ name: Test IPAM Role
state: present
register: test_two
- name: "IPAM_ROLE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['role']['name'] == "Test IPAM Role"
@@ -43,15 +43,15 @@
- name: "IPAM_ROLE 3: ASSERT - Update"
netbox.netbox.netbox_ipam_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test IPAM Role"
+ name: Test IPAM Role
weight: 4096
state: present
register: test_three
- name: "IPAM_ROLE 3: ASSERT - Update"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['weight'] == 4096
@@ -63,14 +63,14 @@
- name: "IPAM_ROLE 4: ASSERT - Delete"
netbox.netbox.netbox_ipam_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test IPAM Role
state: absent
register: test_four
- name: "IPAM_ROLE 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
@@ -80,14 +80,14 @@
- name: "IPAM_ROLE 5: ASSERT - Delete non existing"
netbox.netbox.netbox_ipam_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test IPAM Role
state: absent
register: test_five
- name: "IPAM_ROLE 5: ASSERT - Delete non existing`"
- assert:
+ ansible.builtin.assert:
that:
- not test_five['changed']
- test_five['role'] == None
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_journal_entry.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_journal_entry.yml
index d65e64cb8..7eead8db1 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_journal_entry.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_journal_entry.yml
@@ -7,7 +7,7 @@
- name: "JOURNAL ENTRY 1: Creation"
netbox.netbox.netbox_journal_entry:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
assigned_object_type: dcim.device
assigned_object_id: 1
@@ -17,7 +17,7 @@
register: test_one
- name: "JOURNAL_ENTRY 1: ASSERT - Creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_l2vpn.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_l2vpn.yml
index 292badb93..655be9455 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_l2vpn.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_l2vpn.yml
@@ -7,7 +7,7 @@
- name: "L2VPN 1: Necessary info creation"
netbox.netbox.netbox_l2vpn:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test L2VPN
type: vxlan
@@ -15,7 +15,7 @@
register: test_one
- name: "L2VPN 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -27,7 +27,7 @@
- name: "L2VPN 2: Create duplicate"
netbox.netbox.netbox_l2vpn:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test L2VPN
type: vxlan
@@ -35,7 +35,7 @@
register: test_two
- name: "L2VPN 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['l2vpn']['name'] == "Test L2VPN"
@@ -45,24 +45,24 @@
- name: "L2VPN 4: ASSERT - Update"
netbox.netbox.netbox_l2vpn:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test L2VPN"
+ name: Test L2VPN
type: vxlan
- tenant: "Test Tenant"
+ tenant: Test Tenant
description: Updated description
import_targets:
- - "4000:4000"
- - "5000:5000"
+ - 4000:4000
+ - 5000:5000
export_targets:
- - "6000:6000"
+ - 6000:6000
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: test_four
- name: "L2VPN: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['after']['description'] == "Updated description"
@@ -80,15 +80,15 @@
- name: "L2VPN: ASSERT - Delete"
netbox.netbox.netbox_l2vpn:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test L2VPN"
+ name: Test L2VPN
type: vxlan
state: absent
register: test_six
- name: "L2VPN 6: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_six is changed
- test_six['l2vpn']['name'] == "Test L2VPN"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_l2vpn_termination.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_l2vpn_termination.yml
index a4560f407..0dd539943 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_l2vpn_termination.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_l2vpn_termination.yml
@@ -7,7 +7,7 @@
- name: "L2VPN_TERMINATION 1: Necessary info creation"
netbox.netbox.netbox_l2vpn_termination:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
l2vpn: 1
assigned_object_type: dcim.interface
@@ -29,7 +29,7 @@
- name: "L2VPN_TERMINATION 2: Create duplicate"
netbox.netbox.netbox_l2vpn_termination:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
l2vpn: 1
assigned_object_type: dcim.interface
@@ -49,13 +49,13 @@
- name: "L2VPN_TERMINATION 3: Update"
netbox.netbox.netbox_l2vpn_termination:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
l2vpn: 1
assigned_object_type: dcim.interface
assigned_object_id: 1
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: test_three
@@ -73,7 +73,7 @@
- name: "L2VPN_TERMINATION 4: Delete"
netbox.netbox.netbox_l2vpn_termination:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
l2vpn: 1
assigned_object_type: dcim.interface
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_location.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_location.yml
index f2f7bb9db..2e37c7682 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_location.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_location.yml
@@ -7,7 +7,7 @@
- name: "LOCATION 1: Necessary info creation"
netbox.netbox.netbox_location:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Location
site: Test Site
@@ -15,7 +15,7 @@
register: test_one
- name: "LOCATION 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -28,7 +28,7 @@
- name: "LOCATION 2: Create duplicate"
netbox.netbox.netbox_location:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Location
site: Test Site
@@ -36,7 +36,7 @@
register: test_two
- name: "LOCATION 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['location']['name'] == "Location"
@@ -47,7 +47,7 @@
- name: "LOCATION 3: Update"
netbox.netbox.netbox_location:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Location
parent_location: Parent Rack Group
@@ -56,7 +56,7 @@
register: test_three
- name: "LOCATION 3: ASSERT - Update"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['parent'] == 2
@@ -70,14 +70,14 @@
- name: "LOCATION 4: Delete"
netbox.netbox.netbox_location:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Location
state: absent
register: test_four
- name: "LOCATION 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_lookup.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_lookup.yml
index 277454fcd..3d54e02b1 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_lookup.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_lookup.yml
@@ -5,79 +5,86 @@
##
##
- name: "NETBOX_LOOKUP 1: Lookup returns exactly two sites"
- assert:
+ ansible.builtin.assert:
that: query_result == "3"
vars:
- query_result: "{{ query('netbox.netbox.nb_lookup', 'sites', api_endpoint='http://localhost:32768', token='0123456789abcdef0123456789abcdef01234567')|count }}"
+ query_result: "{{ query('netbox.netbox.nb_lookup', 'sites', api_endpoint='http://localhost:32768', token='0123456789abcdef0123456789abcdef01234567') | count }}"
- name: "NETBOX_LOOKUP 2: Query doesn't return Wibble (sanity check json_query)"
- assert:
+ ansible.builtin.assert:
that: query_result == "0"
vars:
- query_result: "{{ query('netbox.netbox.nb_lookup', 'devices', api_endpoint='http://localhost:32768', token='0123456789abcdef0123456789abcdef01234567')|community.general.json_query('[?value.display==`Wibble`]')|count}}"
+ query_result: "{{ query('netbox.netbox.nb_lookup', 'devices', api_endpoint='http://localhost:32768', token='0123456789abcdef0123456789abcdef01234567') | community.general.json_query('[?value.display==`Wibble`]')
+ | count }}"
- name: "NETBOX_LOOKUP 3: Device query returns exactly one TestDeviceR1"
- assert:
+ ansible.builtin.assert:
that: query_result == "1"
vars:
- query_result: "{{ query('netbox.netbox.nb_lookup', 'devices', api_endpoint='http://localhost:32768', token='0123456789abcdef0123456789abcdef01234567')|community.general.json_query('[?value.display==`TestDeviceR1`]')|count }}"
+ query_result: "{{ query('netbox.netbox.nb_lookup', 'devices', api_endpoint='http://localhost:32768', token='0123456789abcdef0123456789abcdef01234567') | community.general.json_query('[?value.display==`TestDeviceR1`]')
+ | count }}"
- name: "NETBOX_LOOKUP 4: VLAN ID 400 can be queried and is named 'Test VLAN'"
- assert:
+ ansible.builtin.assert:
that: query_result == 'Test VLAN'
vars:
- query_result: "{{ (query('netbox.netbox.nb_lookup', 'vlans', api_endpoint='http://localhost:32768', token='0123456789abcdef0123456789abcdef01234567')|community.general.json_query('[?value.vid==`400`].value.name'))[0] }}"
+ query_result: "{{ (query('netbox.netbox.nb_lookup', 'vlans', api_endpoint='http://localhost:32768', token='0123456789abcdef0123456789abcdef01234567') | community.general.json_query('[?value.vid==`400`].value.name'))[0]
+ }}"
- name: "NETBOX_LOOKUP 5: Add one of two devices for lookup filter test."
netbox.netbox.netbox_device:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "L1"
- device_type: "Cisco Test"
- device_role: "Core Switch"
- site: "Test Site"
- status: "Staged"
+ name: L1
+ device_type: Cisco Test
+ device_role: Core Switch
+ site: Test Site
+ status: Staged
tags:
- - "nolookup"
+ - nolookup
state: present
- name: "NETBOX_LOOKUP 6: Add two of two devices for lookup filter test."
netbox.netbox.netbox_device:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "L2"
- device_type: "Cisco Test"
- device_role: "Core Switch"
- site: "Test Site2"
- status: "Staged"
+ name: L2
+ device_type: Cisco Test
+ device_role: Core Switch
+ site: Test Site2
+ status: Staged
tags:
- - "lookup"
+ - lookup
state: present
- name: "NETBOX_LOOKUP 7: Device query returns exactly the L2 device"
- assert:
+ ansible.builtin.assert:
that: query_result == "1"
vars:
- query_result: "{{ query('netbox.netbox.nb_lookup', 'devices', api_filter='role=core-switch tag=lookup', api_endpoint='http://localhost:32768', token='0123456789abcdef0123456789abcdef01234567')|community.general.json_query('[?value.display==`L2`]')|count }}"
+ query_result: "{{ query('netbox.netbox.nb_lookup', 'devices', api_filter='role=core-switch tag=lookup', api_endpoint='http://localhost:32768', token='0123456789abcdef0123456789abcdef01234567')
+ | community.general.json_query('[?value.display==`L2`]') | count }}"
- name: "NETBOX_LOOKUP 8: Device query specifying raw data returns payload without key/value dict"
- assert:
+ ansible.builtin.assert:
that: query_result == "1"
vars:
- query_result: "{{ query('netbox.netbox.nb_lookup', 'devices', api_filter='role=core-switch tag=lookup', api_endpoint='http://localhost:32768', token='0123456789abcdef0123456789abcdef01234567', raw_data=True)|community.general.json_query('[?display==`L2`]')|count }}"
+ query_result: "{{ query('netbox.netbox.nb_lookup', 'devices', api_filter='role=core-switch tag=lookup', api_endpoint='http://localhost:32768', token='0123456789abcdef0123456789abcdef01234567',
+ raw_data=True) | community.general.json_query('[?display==`L2`]') | count }}"
- name: "NETBOX_LOOKUP 9: Device query specifying multiple sites, Make sure L1 and L2 are in the results"
- assert:
+ ansible.builtin.assert:
that:
- "'L1' in query_result"
- "'L2' in query_result"
vars:
- query_result: "{{ query('netbox.netbox.nb_lookup', 'devices', api_filter='role=core-switch site=test-site site=test-site2', api_endpoint='http://localhost:32768', token='0123456789abcdef0123456789abcdef01234567', raw_data=True)|community.general.json_query('[*].display') }}"
+ query_result: "{{ query('netbox.netbox.nb_lookup', 'devices', api_filter='role=core-switch site=test-site site=test-site2', api_endpoint='http://localhost:32768',
+ token='0123456789abcdef0123456789abcdef01234567', raw_data=True) | community.general.json_query('[*].display') }}"
- name: "NETBOX_LOOKUP 10: Device query by ID"
- assert:
+ ansible.builtin.assert:
that: query_result
vars:
- query_result: "{{ query('netbox.netbox.nb_lookup', 'devices', api_filter='id=1', api_endpoint='http://localhost:32768', token='0123456789abcdef0123456789abcdef01234567') }}"
+ query_result: "{{ query('netbox.netbox.nb_lookup', 'devices', api_filter='id=1', api_endpoint='http://localhost:32768', token='0123456789abcdef0123456789abcdef01234567')
+ }}"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_manufacturer.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_manufacturer.yml
index 4dd1907eb..39804659b 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_manufacturer.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_manufacturer.yml
@@ -7,14 +7,14 @@
- name: "MANUFACTURER 1: Necessary info creation"
netbox.netbox.netbox_manufacturer:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test Manufacturer Two
state: present
register: test_one
- name: "MANUFACTURER 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -26,14 +26,14 @@
- name: "MANUFACTURER 2: Create duplicate"
netbox.netbox.netbox_manufacturer:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test Manufacturer Two
state: present
register: test_two
- name: "MANUFACTURER 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['manufacturer']['name'] == "Test Manufacturer Two"
@@ -43,14 +43,14 @@
- name: "MANUFACTURER 3: Update"
netbox.netbox.netbox_manufacturer:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: test manufacturer two
state: present
register: test_three
- name: "MANUFACTURER 3: ASSERT - Update"
- assert:
+ ansible.builtin.assert:
that:
- test_three['changed']
- test_three['manufacturer']['name'] == "test manufacturer two"
@@ -60,14 +60,14 @@
- name: "MANUFACTURER 4: ASSERT - Delete"
netbox.netbox.netbox_manufacturer:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: test manufacturer two
state: absent
register: test_four
- name: "MANUFACTURER 3: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
@@ -77,14 +77,14 @@
- name: "MANUFACTURER 5: ASSERT - Delete non existing"
netbox.netbox.netbox_manufacturer:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test Manufacturer Two
state: absent
register: test_five
- name: "MANUFACTURER 5: ASSERT - Delete non existing"
- assert:
+ ansible.builtin.assert:
that:
- not test_five['changed']
- test_five['manufacturer'] == None
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_module.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_module.yml
index e8f090d8c..0a18f3894 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_module.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_module.yml
@@ -7,7 +7,7 @@
- name: "MODULE 1: Necessary info creation"
netbox.netbox.netbox_module:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: C9300-DEMO
module_bay: Network Module
@@ -16,7 +16,7 @@
register: test_one
- name: "MODULE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -29,7 +29,7 @@
- name: "MODULE 2: Create duplicate"
netbox.netbox.netbox_module:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: C9300-DEMO
module_bay: Network Module
@@ -38,7 +38,7 @@
register: test_two
- name: "MODULE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['module']['device'] == "C9300-DEMO"
@@ -49,7 +49,7 @@
- name: "MODULE 3: ASSERT - Update"
netbox.netbox.netbox_module:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: C9300-DEMO
module_bay: Network Module
@@ -59,7 +59,7 @@
register: test_three
- name: "MODULE 3: ASSERT - Update"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['serial'] == "XXXNNNNXXXX"
@@ -72,7 +72,7 @@
- name: "MODULE 4: ASSERT - Delete"
netbox.netbox.netbox_module:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: C9300-DEMO
module_bay: Network Module
@@ -81,7 +81,7 @@
register: test_four
- name: "MODULE 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
@@ -91,7 +91,7 @@
- name: "MODULE 5: ASSERT - Delete non existing"
netbox.netbox.netbox_module:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: C9300-DEMO
module_bay: Network Module
@@ -100,7 +100,7 @@
register: test_five
- name: "MODULE 5: ASSERT - Delete non existing`"
- assert:
+ ansible.builtin.assert:
that:
- not test_five['changed']
- test_five['module'] == None
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_module_bay.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_module_bay.yml
index 44bb76269..843a4767e 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_module_bay.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_module_bay.yml
@@ -7,7 +7,7 @@
- name: "MODULE 1: Necessary info creation"
netbox.netbox.netbox_module_bay:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: C9300-DEMO
name: Network Module
@@ -16,7 +16,7 @@
register: test_one
- name: "MODULE BAY 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -29,7 +29,7 @@
- name: "MODULE BAY 2: Create duplicate"
netbox.netbox.netbox_module_bay:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: C9300-DEMO
name: Network Module
@@ -38,7 +38,7 @@
register: test_two
- name: "MODULE BAY 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['module_bay']['device'] == "C9300-DEMO"
@@ -49,7 +49,7 @@
- name: "MODULE BAY 3: ASSERT - Update"
netbox.netbox.netbox_module_bay:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: C9300-DEMO
name: Network Module
@@ -59,7 +59,7 @@
register: test_three
- name: "MODULE BAY 3: ASSERT - Update"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['label'] == "TEST"
@@ -72,7 +72,7 @@
- name: "MODULE BAY 4: ASSERT - Delete"
netbox.netbox.netbox_module_bay:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: C9300-DEMO
name: Network Module
@@ -80,7 +80,7 @@
register: test_four
- name: "MODULE BAY 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
@@ -90,7 +90,7 @@
- name: "MODULE BAY 5: ASSERT - Delete non existing"
netbox.netbox.netbox_module_bay:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
device: C9300-DEMO
name: Network Module
@@ -98,7 +98,7 @@
register: test_five
- name: "MODULE BAY 5: ASSERT - Delete non existing`"
- assert:
+ ansible.builtin.assert:
that:
- not test_five['changed']
- test_five['module_bay'] == None
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_module_type.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_module_type.yml
index e1d51c4cb..8ab077803 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_module_type.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_module_type.yml
@@ -7,7 +7,7 @@
- name: "MODULE_TYPE 1: Necessary info creation"
netbox.netbox.netbox_module_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
model: ws-test-3750
manufacturer: Test Manufacturer
@@ -15,7 +15,7 @@
register: test_one
- name: "MODULE_TYPE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -27,15 +27,15 @@
- name: "MODULE_TYPE 2: Create duplicate"
netbox.netbox.netbox_module_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- model: "ws-test-3750"
+ model: ws-test-3750
manufacturer: Test Manufacturer
state: present
register: test_two
- name: "MODULE_TYPE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_one['module_type']['model'] == "ws-test-3750"
@@ -45,7 +45,7 @@
- name: "MODULE_TYPE 3: ASSERT - Update"
netbox.netbox.netbox_module_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
model: ws-test-3750
manufacturer: Test Manufacturer
@@ -54,7 +54,7 @@
register: test_three
- name: "MODULE_TYPE 3: ASSERT - Update"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['part_number'] == "ws-3750g-v2"
@@ -66,14 +66,14 @@
- name: "MODULE_TYPE 4: ASSERT - Delete"
netbox.netbox.netbox_module_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
model: ws-test-3750
state: absent
register: test_four
- name: "MODULE_TYPE 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
@@ -83,14 +83,14 @@
- name: "MODULE_TYPE 5: ASSERT - Delete non existing"
netbox.netbox.netbox_module_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- model: "Test Module Type"
+ model: Test Module Type
state: absent
register: test_five
- name: "MODULE_TYPE 5: ASSERT - Delete non existing`"
- assert:
+ ansible.builtin.assert:
that:
- not test_five['changed']
- test_five['module_type'] == None
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_platform.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_platform.yml
index 1e3621add..8a36ef2c1 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_platform.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_platform.yml
@@ -7,14 +7,14 @@
- name: "PLATFORM 1: Necessary info creation"
netbox.netbox.netbox_platform:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test Platform
state: present
register: test_one
- name: "PLATFORM 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -26,14 +26,14 @@
- name: "PLATFORM 2: Create duplicate"
netbox.netbox.netbox_platform:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test Platform
state: present
register: test_two
- name: "PLATFORM 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['platform']['name'] == "Test Platform"
@@ -43,7 +43,7 @@
- name: "PLATFORM 3: ASSERT - Update"
netbox.netbox.netbox_platform:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test Platform
manufacturer: Test Manufacturer
@@ -51,7 +51,7 @@
register: test_three
- name: "PLATFORM 3: ASSERT - Update"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['manufacturer'] == 3
@@ -61,14 +61,14 @@
- name: "PLATFORM 4: ASSERT - Delete"
netbox.netbox.netbox_platform:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test Platform
state: absent
register: test_four
- name: "PLATFORM 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
@@ -78,14 +78,14 @@
- name: "PLATFORM 5: ASSERT - Delete non existing"
netbox.netbox.netbox_platform:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test Platform
state: absent
register: test_five
- name: "PLATFORM 5: ASSERT - Delete non existing`"
- assert:
+ ansible.builtin.assert:
that:
- not test_five['changed']
- test_five['platform'] == None
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_power_feed.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_power_feed.yml
index a02fd9129..f4ac71e01 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_power_feed.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_power_feed.yml
@@ -10,7 +10,7 @@
- name: "POWER_FEED 1: Necessary info creation"
netbox.netbox.netbox_power_feed:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Feed
power_panel: Power Panel
@@ -18,7 +18,7 @@
register: test_one
- name: "POWER_FEED 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -30,7 +30,7 @@
- name: "POWER_FEED 2: Create duplicate"
netbox.netbox.netbox_power_feed:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Feed
power_panel: Power Panel
@@ -38,7 +38,7 @@
register: test_two
- name: "POWER_FEED 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['power_feed']['name'] == "Power Feed"
@@ -48,7 +48,7 @@
- name: "POWER_FEED 3: Update power_feed with other fields"
netbox.netbox.netbox_power_feed:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Feed
power_panel: Power Panel
@@ -64,7 +64,7 @@
register: test_three
- name: "POWER_FEED 3: ASSERT - Update power_feed with other fields"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['status'] == "offline"
@@ -90,7 +90,7 @@
- name: "POWER_FEED 4: Create Power Feed for Delete Test"
netbox.netbox.netbox_power_feed:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Feed 2
power_panel: Power Panel
@@ -98,7 +98,7 @@
register: test_four
- name: "POWER_FEED 4: ASSERT - Create Power Feed for Delete Test"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -107,11 +107,10 @@
- test_four['power_feed']['power_panel'] == 1
- test_four['msg'] == "power_feed Power Feed 2 created"
-
- name: "POWER_FEED 5: Delete Power Feed"
netbox.netbox.netbox_power_feed:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Feed 2
power_panel: Power Panel
@@ -119,7 +118,7 @@
register: test_five
- name: "POWER_FEED 5: ASSERT - Delete Power Feed"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "present"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_power_outlet.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_power_outlet.yml
index c7b5f7d3d..84f9d3255 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_power_outlet.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_power_outlet.yml
@@ -10,7 +10,7 @@
- name: "POWER_OUTLET 1: Necessary info creation"
netbox.netbox.netbox_power_outlet:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Outlet
device: Device Power Tests
@@ -18,7 +18,7 @@
register: test_one
- name: "POWER_OUTLET 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -30,7 +30,7 @@
- name: "POWER_OUTLET 2: Create duplicate"
netbox.netbox.netbox_power_outlet:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Outlet
device: Device Power Tests
@@ -38,7 +38,7 @@
register: test_two
- name: "POWER_OUTLET 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['power_outlet']['name'] == "Power Outlet"
@@ -48,7 +48,7 @@
- name: "POWER_OUTLET 3: Update power_outlet with other fields"
netbox.netbox.netbox_power_outlet:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Outlet
device: Device Power Tests
@@ -60,7 +60,7 @@
register: test_three
- name: "POWER_OUTLET 3: ASSERT - Update power_outlet with other fields"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['type'] == "ita-e"
@@ -78,7 +78,7 @@
- name: "POWER_OUTLET 4: Create Power Outlet for Delete Test"
netbox.netbox.netbox_power_outlet:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Outlet 2
device: Device Power Tests
@@ -86,7 +86,7 @@
register: test_four
- name: "POWER_OUTLET 4: ASSERT - Create Power Outlet for Delete Test"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -98,7 +98,7 @@
- name: "POWER_OUTLET 5: Delete Power Outlet"
netbox.netbox.netbox_power_outlet:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Outlet 2
device: Device Power Tests
@@ -106,7 +106,7 @@
register: test_five
- name: "POWER_OUTLET 5: ASSERT - Delete Power Outlet"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "present"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_power_outlet_template.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_power_outlet_template.yml
index 96f283486..14ff49d0c 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_power_outlet_template.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_power_outlet_template.yml
@@ -10,7 +10,7 @@
- name: "POWER_OUTLET_TEMPLATE 1: Necessary info creation"
netbox.netbox.netbox_power_outlet_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Outlet Template
device_type: Device Type Power Tests
@@ -18,7 +18,7 @@
register: test_one
- name: "POWER_OUTLET_TEMPLATE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -30,7 +30,7 @@
- name: "POWER_OUTLET_TEMPLATE 2: Create duplicate"
netbox.netbox.netbox_power_outlet_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Outlet Template
device_type: Device Type Power Tests
@@ -38,7 +38,7 @@
register: test_two
- name: "POWER_OUTLET_TEMPLATE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['power_outlet_template']['name'] == "Power Outlet Template"
@@ -48,7 +48,7 @@
- name: "POWER_OUTLET_TEMPLATE 3: Update power_outlet_template with other fields"
netbox.netbox.netbox_power_outlet_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Outlet Template
device_type: Device Type Power Tests
@@ -59,7 +59,7 @@
register: test_three
- name: "POWER_OUTLET_TEMPLATE 3: ASSERT - Update power_outlet_template with other fields"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['type'] == "ita-e"
@@ -75,7 +75,7 @@
- name: "POWER_OUTLET_TEMPLATE 4: Create Power Outlet Template for Delete Test"
netbox.netbox.netbox_power_outlet_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Outlet Template 2
device_type: Device Type Power Tests
@@ -83,7 +83,7 @@
register: test_four
- name: "POWER_OUTLET_TEMPLATE 4: ASSERT - Create Power Outlet Template for Delete Test"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -95,7 +95,7 @@
- name: "POWER_OUTLET_TEMPLATE 5: Delete Power Outlet Template"
netbox.netbox.netbox_power_outlet_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Outlet Template 2
device_type: Device Type Power Tests
@@ -103,7 +103,7 @@
register: test_five
- name: "POWER_OUTLET_TEMPLATE 5: ASSERT - Delete Power Outlet Template"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "present"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_power_panel.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_power_panel.yml
index 082a6b375..e568a75f4 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_power_panel.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_power_panel.yml
@@ -10,7 +10,7 @@
- name: "POWER_PANEL 1: Necessary info creation"
netbox.netbox.netbox_power_panel:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Panel
site: Test Site
@@ -18,7 +18,7 @@
register: test_one
- name: "POWER_PANEL 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -30,7 +30,7 @@
- name: "POWER_PANEL 2: Create duplicate"
netbox.netbox.netbox_power_panel:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Panel
site: Test Site
@@ -38,7 +38,7 @@
register: test_two
- name: "POWER_PANEL 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['power_panel']['name'] == "Power Panel"
@@ -48,7 +48,7 @@
- name: "POWER_PANEL 3: Update power_panel with other fields"
netbox.netbox.netbox_power_panel:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Panel
site: Test Site
@@ -57,7 +57,7 @@
register: test_three
- name: "POWER_PANEL 3: ASSERT - Update power_panel with other fields"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['location'] == 1
@@ -69,7 +69,7 @@
- name: "POWER_PANEL 4: Create Power Panel for Delete Test"
netbox.netbox.netbox_power_panel:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Panel 2
site: Test Site
@@ -77,7 +77,7 @@
register: test_four
- name: "POWER_PANEL 4: ASSERT - Create Power Panel for Delete Test"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -89,7 +89,7 @@
- name: "POWER_PANEL 5: Delete Power Panel"
netbox.netbox.netbox_power_panel:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Panel 2
site: Test Site
@@ -97,7 +97,7 @@
register: test_five
- name: "POWER_PANEL 5: ASSERT - Delete Power Panel"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "present"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_power_port.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_power_port.yml
index b6368bd92..311820ccd 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_power_port.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_power_port.yml
@@ -10,7 +10,7 @@
- name: "POWER_PORT 0: Create device for testing power ports"
netbox.netbox.netbox_device:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Device Power Tests
device_type: Cisco Test
@@ -21,7 +21,7 @@
- name: "POWER_PORT 1: Necessary info creation"
netbox.netbox.netbox_power_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Port
device: Device Power Tests
@@ -29,7 +29,7 @@
register: test_one
- name: "POWER_PORT 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -41,7 +41,7 @@
- name: "POWER_PORT 2: Create duplicate"
netbox.netbox.netbox_power_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Port
device: Device Power Tests
@@ -49,7 +49,7 @@
register: test_two
- name: "POWER_PORT 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['power_port']['name'] == "Power Port"
@@ -59,7 +59,7 @@
- name: "POWER_FEED 3: Update power_port with other fields"
netbox.netbox.netbox_power_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Port
device: Device Power Tests
@@ -71,7 +71,7 @@
register: test_three
- name: "POWER_FEED 3: ASSERT - Update power_port with other fields"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['type'] == "ita-e"
@@ -89,7 +89,7 @@
- name: "POWER_PORT 4: Create Power Port for Delete Test"
netbox.netbox.netbox_power_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Port 2
device: Device Power Tests
@@ -97,7 +97,7 @@
register: test_four
- name: "POWER_PORT 4: ASSERT - Create Power Port for Delete Test"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -109,7 +109,7 @@
- name: "POWER_PORT 5: Delete Power Port"
netbox.netbox.netbox_power_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Port 2
device: Device Power Tests
@@ -117,7 +117,7 @@
register: test_five
- name: "POWER_PORT 5: ASSERT - Delete Power Port"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "present"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_power_port_template.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_power_port_template.yml
index aaa5eb43c..b413c29ce 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_power_port_template.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_power_port_template.yml
@@ -10,7 +10,7 @@
- name: "POWER_PORT_TEMPLATE 0.1: Create device type for testing power ports on device types"
netbox.netbox.netbox_device_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
model: Device Type Power Tests
manufacturer: Test Manufacturer
@@ -19,7 +19,7 @@
- name: "POWER_PORT_TEMPLATE 0.2: Create module type for testing power ports on module types"
netbox.netbox.netbox_module_type:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
model: Module Type Power Tests
manufacturer: Test Manufacturer
@@ -28,7 +28,7 @@
- name: "POWER_PORT_TEMPLATE 1: Necessary info creation"
netbox.netbox.netbox_power_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Port Template
device_type: Device Type Power Tests
@@ -36,7 +36,7 @@
register: test_one
- name: "POWER_PORT_TEMPLATE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -48,7 +48,7 @@
- name: "POWER_PORT_TEMPLATE 2: Create duplicate"
netbox.netbox.netbox_power_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Port Template
device_type: Device Type Power Tests
@@ -56,7 +56,7 @@
register: test_two
- name: "POWER_PORT_TEMPLATE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['power_port_template']['name'] == "Power Port Template"
@@ -66,7 +66,7 @@
- name: "POWER_PORT_TEMPLATE 3: Update power_port_template with other fields"
netbox.netbox.netbox_power_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Port Template
device_type: Device Type Power Tests
@@ -77,7 +77,7 @@
register: test_three
- name: "POWER_PORT_TEMPLATE 3: ASSERT - Update power_port_template with other fields"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['type'] == "ita-e"
@@ -93,7 +93,7 @@
- name: "POWER_PORT_TEMPLATE 4: Create Power Port Template for Delete Test"
netbox.netbox.netbox_power_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Port Template 2
device_type: Device Type Power Tests
@@ -101,7 +101,7 @@
register: test_four
- name: "POWER_PORT_TEMPLATE 4: ASSERT - Create Power Port Template for Delete Test"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -113,7 +113,7 @@
- name: "POWER_PORT_TEMPLATE 5: Delete Power Port Template"
netbox.netbox.netbox_power_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Power Port Template 2
device_type: Device Type Power Tests
@@ -121,7 +121,7 @@
register: test_five
- name: "POWER_PORT_TEMPLATE 5: ASSERT - Delete Power Port Template"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "present"
@@ -131,7 +131,7 @@
- name: "POWER_PORT_TEMPLATE 6: Necessary info creation"
netbox.netbox.netbox_power_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Module Power Port Template
module_type: Module Type Power Tests
@@ -139,7 +139,7 @@
register: test_six
- name: "POWER_PORT_TEMPLATE 6: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_six is changed
- test_six['diff']['before']['state'] == "absent"
@@ -151,7 +151,7 @@
- name: "POWER_PORT_TEMPLATE 7: Create duplicate"
netbox.netbox.netbox_power_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Module Power Port Template
module_type: Module Type Power Tests
@@ -159,7 +159,7 @@
register: test_seven
- name: "POWER_PORT_TEMPLATE 7: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_seven['changed']
- test_seven['power_port_template']['name'] == "Module Power Port Template"
@@ -169,7 +169,7 @@
- name: "POWER_PORT_TEMPLATE 8: Update power_port_template with other fields"
netbox.netbox.netbox_power_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Module Power Port Template
module_type: Module Type Power Tests
@@ -180,7 +180,7 @@
register: test_eight
- name: "POWER_PORT_TEMPLATE 8: ASSERT - Update power_port_template with other fields"
- assert:
+ ansible.builtin.assert:
that:
- test_eight is changed
- test_eight['diff']['after']['type'] == "ita-e"
@@ -196,7 +196,7 @@
- name: "POWER_PORT_TEMPLATE 9: Delete Power Port Template"
netbox.netbox.netbox_power_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Module Power Port Template
module_type: Module Type Power Tests
@@ -204,7 +204,7 @@
register: test_nine
- name: "POWER_PORT_TEMPLATE 9: ASSERT - Delete Power Port Template"
- assert:
+ ansible.builtin.assert:
that:
- test_nine is changed
- test_nine['diff']['before']['state'] == "present"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_prefix.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_prefix.yml
index 59ed336ad..64adae855 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_prefix.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_prefix.yml
@@ -4,17 +4,17 @@
### NETBOX_PREFIX
##
##
-- name: "1 - Create prefix within NetBox with only required information"
+- name: 1 - Create prefix within NetBox with only required information
netbox.netbox.netbox_prefix:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
prefix: 10.156.0.0/19
state: present
register: test_one
-- name: "1 - ASSERT"
- assert:
+- name: 1 - ASSERT
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -22,36 +22,36 @@
- test_one['msg'] == "prefix 10.156.0.0/19 created"
- test_one['prefix']['prefix'] == "10.156.0.0/19"
-- name: "2 - Duplicate"
+- name: 2 - Duplicate
netbox.netbox.netbox_prefix:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
prefix: 10.156.0.0/19
state: present
register: test_two
-- name: "2 - ASSERT"
- assert:
+- name: 2 - ASSERT
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['msg'] == "prefix 10.156.0.0/19 already exists"
- test_two['prefix']['prefix'] == "10.156.0.0/19"
-- name: "3 - Update 10.156.0.0/19"
+- name: 3 - Update 10.156.0.0/19
netbox.netbox.netbox_prefix:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
prefix: 10.156.0.0/19
site: Test Site
status: Reserved
- description: "This prefix has been updated"
+ description: This prefix has been updated
state: present
register: test_three
-- name: "3 - ASSERT"
- assert:
+- name: 3 - ASSERT
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['site'] == 1
@@ -63,27 +63,27 @@
- test_three['prefix']['status'] == "reserved"
- test_three['prefix']['description'] == "This prefix has been updated"
-- name: "4 - Delete prefix within netbox"
+- name: 4 - Delete prefix within netbox
netbox.netbox.netbox_prefix:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
prefix: 10.156.0.0/19
state: absent
register: test_four
-- name: "4 - ASSERT"
- assert:
+- name: 4 - ASSERT
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
- test_four['diff']['after']['state'] == "absent"
- test_four['msg'] == "prefix 10.156.0.0/19 deleted"
-- name: "5 - Create prefix with several specified options"
+- name: 5 - Create prefix with several specified options
netbox.netbox.netbox_prefix:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
family: 4
prefix: 10.156.32.0/19
@@ -100,12 +100,12 @@
description: Test description
is_pool: true
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: test_five
-- name: "5 - ASSERT"
- assert:
+- name: 5 - ASSERT
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "absent"
@@ -123,34 +123,34 @@
- test_five['prefix']['is_pool'] == true
- test_five['prefix']['tags'][0] == 4
-- name: "6 - Get a new /24 inside 10.156.0.0/19 within NetBox - Parent doesn't exist"
+- name: 6 - Get a new /24 inside 10.156.0.0/19 within NetBox - Parent doesn't exist
netbox.netbox.netbox_prefix:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
parent: 10.156.0.0/19
prefix_length: 24
state: present
- first_available: yes
+ first_available: true
register: test_six
-- name: "6 - ASSERT"
- assert:
+- name: 6 - ASSERT
+ ansible.builtin.assert:
that:
- not test_six['changed']
- test_six['msg'] == "Parent prefix does not exist - 10.156.0.0/19"
-- name: "7 - Create prefix within NetBox with only required information"
+- name: 7 - Create prefix within NetBox with only required information
netbox.netbox.netbox_prefix:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
prefix: 10.156.0.0/19
state: present
register: test_seven
-- name: "7 - ASSERT"
- assert:
+- name: 7 - ASSERT
+ ansible.builtin.assert:
that:
- test_seven is changed
- test_seven['diff']['before']['state'] == "absent"
@@ -158,19 +158,19 @@
- test_seven['msg'] == "prefix 10.156.0.0/19 created"
- test_seven['prefix']['prefix'] == "10.156.0.0/19"
-- name: "8 - Get a new /24 inside 10.156.0.0/19 within NetBox"
+- name: 8 - Get a new /24 inside 10.156.0.0/19 within NetBox
netbox.netbox.netbox_prefix:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
parent: 10.156.0.0/19
prefix_length: 24
state: present
- first_available: yes
+ first_available: true
register: test_eight
-- name: "8 - ASSERT"
- assert:
+- name: 8 - ASSERT
+ ansible.builtin.assert:
that:
- test_eight is changed
- test_eight['diff']['before']['state'] == "absent"
@@ -178,10 +178,10 @@
- test_eight['msg'] == "prefix 10.156.0.0/24 created"
- test_eight['prefix']['prefix'] == "10.156.0.0/24"
-- name: "9 - Create 10.157.0.0/19"
+- name: 9 - Create 10.157.0.0/19
netbox.netbox.netbox_prefix:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
prefix: 10.157.0.0/19
vrf: Test VRF
@@ -189,8 +189,8 @@
state: present
register: test_nine
-- name: "9 - ASSERT"
- assert:
+- name: 9 - ASSERT
+ ansible.builtin.assert:
that:
- test_nine is changed
- test_nine['diff']['before']['state'] == "absent"
@@ -200,21 +200,21 @@
- test_nine['prefix']['site'] == 1
- test_nine['prefix']['vrf'] == 1
-- name: "10 - Get a new /24 inside 10.157.0.0/19 within NetBox with additional values"
+- name: 10 - Get a new /24 inside 10.157.0.0/19 within NetBox with additional values
netbox.netbox.netbox_prefix:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
parent: 10.157.0.0/19
prefix_length: 24
vrf: Test VRF
site: Test Site
state: present
- first_available: yes
+ first_available: true
register: test_ten
-- name: "10 - ASSERT"
- assert:
+- name: 10 - ASSERT
+ ansible.builtin.assert:
that:
- test_ten is changed
- test_ten['diff']['before']['state'] == "absent"
@@ -224,19 +224,19 @@
- test_ten['prefix']['site'] == 1
- test_ten['prefix']['vrf'] == 1
-- name: "11 - Get a new /24 inside 10.156.0.0/19 within NetBox"
+- name: 11 - Get a new /24 inside 10.156.0.0/19 within NetBox
netbox.netbox.netbox_prefix:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
parent: 10.156.0.0/19
prefix_length: 24
state: present
- first_available: yes
+ first_available: true
register: test_eleven
-- name: "11 - ASSERT"
- assert:
+- name: 11 - ASSERT
+ ansible.builtin.assert:
that:
- test_eleven is changed
- test_eleven['diff']['before']['state'] == "absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_provider.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_provider.yml
index cba224eb4..ec7b2c30a 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_provider.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_provider.yml
@@ -7,14 +7,14 @@
- name: "NETBOX_PROVIDER 1: Create provider within NetBox with only required information"
netbox.netbox.netbox_provider:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test Provider One
state: present
register: test_one
- name: "NETBOX_PROVIDER 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -26,14 +26,14 @@
- name: "NETBOX_PROVIDER 2: Duplicate"
netbox.netbox.netbox_provider:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test Provider One
state: present
register: test_two
- name: "NETBOX_PROVIDER 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['provider']['name'] == "Test Provider One"
@@ -43,15 +43,15 @@
- name: "NETBOX_PROVIDER 3: Update provider with other fields"
netbox.netbox.netbox_provider:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test Provider One
- comments: "BAD PROVIDER"
+ comments: BAD PROVIDER
state: present
register: test_three
- name: "NETBOX_PROVIDER 3: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['comments'] == "BAD PROVIDER"
@@ -63,14 +63,14 @@
- name: "NETBOX_PROVIDER 4: Delete provider within netbox"
netbox.netbox.netbox_provider:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test Provider One
state: absent
register: test_four
- name: "NETBOX_PROVIDER 4 : ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['provider']['name'] == "Test Provider One"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_provider_network.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_provider_network.yml
index bf7d4bf97..6f8b2b331 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_provider_network.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_provider_network.yml
@@ -7,7 +7,7 @@
- name: "NETBOX_PROVIDER_NETWORK 1: Create provider network within NetBox with only required information"
netbox.netbox.netbox_provider_network:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
provider: Test Provider
name: Test Provider Network One
@@ -15,7 +15,7 @@
register: test_one
- name: "NETBOX_PROVIDER_NETWORK 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -26,7 +26,7 @@
- name: "NETBOX_PROVIDER_NETWORK 2: Duplicate"
netbox.netbox.netbox_provider_network:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
provider: Test Provider
name: Test Provider Network One
@@ -34,7 +34,7 @@
register: test_two
- name: "NETBOX_PROVIDER_NETWORK 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['provider_network']['name'] == "Test Provider Network One"
@@ -43,17 +43,17 @@
- name: "NETBOX_PROVIDER_NETWORK 3: Update provider network with other fields"
netbox.netbox.netbox_provider_network:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
provider: Test Provider
name: Test Provider Network One
description: Describe a Provider Network
- comments: "A provider network"
+ comments: A provider network
state: present
register: test_three
- name: "NETBOX_PROVIDER_NETWORK 3: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['comments'] == "A provider network"
@@ -66,7 +66,7 @@
- name: "NETBOX_PROVIDER_NETWORK 4: Delete provider within netbox"
netbox.netbox.netbox_provider_network:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
provider: Test Provider
name: Test Provider Network One
@@ -74,7 +74,7 @@
register: test_four
- name: "NETBOX_PROVIDER_NETWORK 4 : ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['provider_network']['name'] == "Test Provider Network One"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_rack.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_rack.yml
index 245dd0c29..5dc05c3a1 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_rack.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_rack.yml
@@ -4,18 +4,18 @@
### NETBOX_RACK
##
##
-- name: "1 - Test rack creation"
+- name: 1 - Test rack creation
netbox.netbox.netbox_rack:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test rack one"
- site: "Test Site"
- location: "Test Rack Group"
+ name: Test rack one
+ site: Test Site
+ location: Test Rack Group
register: test_one
-- name: "1 - ASSERT"
- assert:
+- name: 1 - ASSERT
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -23,34 +23,34 @@
- test_one['rack']['name'] == "Test rack one"
- test_one['rack']['site'] == 1
-- name: "Test duplicate rack"
+- name: Test duplicate rack
netbox.netbox.netbox_rack:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test rack one"
+ name: Test rack one
register: test_two
-- name: "2 - ASSERT"
- assert:
+- name: 2 - ASSERT
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['rack']['name'] == "Test rack one"
- test_two['rack']['site'] == 1
- test_two['msg'] == "rack Test rack one already exists"
-- name: "3 - Create new rack with similar name"
+- name: 3 - Create new rack with similar name
netbox.netbox.netbox_rack:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test rack - Test Site
site: Test Site
state: present
register: test_three
-- name: "3 - ASSERT"
- assert:
+- name: 3 - ASSERT
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['before']['state'] == "absent"
@@ -59,53 +59,53 @@
- test_three['rack']['site'] == 1
- test_three['msg'] == "rack Test rack - Test Site created"
-- name: "4 - Attempt to create Test rack one again"
+- name: 4 - Attempt to create Test rack one again
netbox.netbox.netbox_rack:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test rack one
site: Test Site
- location: "Test Rack Group"
+ location: Test Rack Group
state: present
register: test_four
-- name: "4 - ASSERT"
- assert:
+- name: 4 - ASSERT
+ ansible.builtin.assert:
that:
- not test_four['changed']
- test_four['rack']['name'] == "Test rack one"
- test_four['rack']['site'] == 1
- test_four['msg'] == "rack Test rack one already exists"
-- name: "5 - Update Test rack one with more options"
+- name: 5 - Update Test rack one with more options
netbox.netbox.netbox_rack:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test rack one
site: Test Site
- rack_role: "Test Rack Role"
- location: "Test Rack Group"
- facility_id: "EQUI10291"
- tenant: "Test Tenant"
+ rack_role: Test Rack Role
+ location: Test Rack Group
+ facility_id: EQUI10291
+ tenant: Test Tenant
status: Available
- serial: "FXS10001"
+ serial: FXS10001
asset_tag: "1234"
width: 23
u_height: 48
- type: "2-post frame"
+ type: 2-post frame
outer_width: 32
outer_depth: 24
- outer_unit: "Inches"
- comments: "Just testing rack module"
+ outer_unit: Inches
+ comments: Just testing rack module
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: test_five
-- name: "5 - ASSERT"
- assert:
+- name: 5 - ASSERT
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['after']['asset_tag'] == "1234"
@@ -141,34 +141,34 @@
- test_five['rack']['width'] == 23
- test_five['msg'] == "rack Test rack one updated"
-- name: "6 - Update Test rack one with same options"
+- name: 6 - Update Test rack one with same options
netbox.netbox.netbox_rack:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test rack one
site: Test Site
- rack_role: "Test Rack Role"
- location: "Test Rack Group"
- facility_id: "EQUI10291"
- tenant: "Test Tenant"
+ rack_role: Test Rack Role
+ location: Test Rack Group
+ facility_id: EQUI10291
+ tenant: Test Tenant
status: Available
- serial: "FXS10001"
+ serial: FXS10001
asset_tag: "1234"
width: 23
u_height: 48
- type: "2-post frame"
+ type: 2-post frame
outer_width: 32
outer_depth: 24
- outer_unit: "Inches"
- comments: "Just testing rack module"
+ outer_unit: Inches
+ comments: Just testing rack module
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: test_six
-- name: "6 - ASSERT"
- assert:
+- name: 6 - ASSERT
+ ansible.builtin.assert:
that:
- test_six is not changed
- test_six['rack']['name'] == "Test rack one"
@@ -189,36 +189,36 @@
- test_six['rack']['u_height'] == 48
- test_six['rack']['width'] == 23
-- name: "7 - Create rack with same asset tag and serial number"
+- name: 7 - Create rack with same asset tag and serial number
netbox.netbox.netbox_rack:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test rack two
site: Test Site
- serial: "FXS10001"
+ serial: FXS10001
asset_tag: "1234"
state: present
- ignore_errors: yes
+ ignore_errors: true
register: test_seven
-- name: "7 - ASSERT"
- assert:
+- name: 7 - ASSERT
+ ansible.builtin.assert:
that:
- test_seven is failed
- "'asset tag already exists' in test_seven['msg']"
-- name: "8 - Test delete"
+- name: 8 - Test delete
netbox.netbox.netbox_rack:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test rack one"
- state: "absent"
+ name: Test rack one
+ state: absent
register: test_eight
-- name: "8 - ASSERT"
- assert:
+- name: 8 - ASSERT
+ ansible.builtin.assert:
that:
- test_eight is changed
- test_eight['diff']['before']['state'] == "present"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_rack_group.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_rack_group.yml
index 6a10cbf94..40dba8408 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_rack_group.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_rack_group.yml
@@ -7,7 +7,7 @@
- name: "RACK_GROUP 1: Necessary info creation"
netbox.netbox.netbox_rack_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Rack Group
site: Test Site
@@ -15,7 +15,7 @@
register: test_one
- name: "RACK_GROUP 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -28,7 +28,7 @@
- name: "RACK_GROUP 2: Create duplicate"
netbox.netbox.netbox_rack_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Rack Group
site: Test Site
@@ -36,7 +36,7 @@
register: test_two
- name: "RACK_GROUP 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['rack_group']['name'] == "Rack Group"
@@ -47,14 +47,14 @@
- name: "RACK_GROUP 3: ASSERT - Delete"
netbox.netbox.netbox_rack_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Rack Group
state: absent
register: test_three
- name: "RACK_GROUP 3: ASSERT - Update"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['before']['state'] == "present"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_rack_role.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_rack_role.yml
index 5ca9e44eb..ff0d6b4da 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_rack_role.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_rack_role.yml
@@ -7,15 +7,15 @@
- name: "RACK_ROLE 1: Necessary info creation"
netbox.netbox.netbox_rack_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Rack Role
- color: "ffffff"
+ color: ffffff
state: present
register: test_one
- name: "RACK_ROLE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -28,14 +28,14 @@
- name: "RACK_ROLE 2: Create duplicate"
netbox.netbox.netbox_rack_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Rack Role
state: present
register: test_two
- name: "RACK_ROLE 1: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['rack_role']['name'] == "Rack Role"
@@ -46,7 +46,7 @@
- name: "RACK_ROLE 3: Update"
netbox.netbox.netbox_rack_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Rack Role
color: "003EFF"
@@ -54,7 +54,7 @@
register: test_three
- name: "RACK_ROLE 3: ASSERT - Update"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['color'] == "003eff"
@@ -66,14 +66,14 @@
- name: "RACK_ROLE 4: Delete"
netbox.netbox.netbox_rack_role:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Rack Role
state: absent
register: test_four
- name: "RACK_ROLE 4: ASSERT - Update"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_rear_port.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_rear_port.yml
index 10eb23098..d186233be 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_rear_port.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_rear_port.yml
@@ -10,7 +10,7 @@
- name: "REAR_PORT 1: Necessary info creation"
netbox.netbox.netbox_rear_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Rear Port
device: test100
@@ -19,7 +19,7 @@
register: test_one
- name: "REAR_PORT 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -32,7 +32,7 @@
- name: "REAR_PORT 2: Create duplicate"
netbox.netbox.netbox_rear_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Rear Port
device: test100
@@ -41,7 +41,7 @@
register: test_two
- name: "REAR_PORT 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['rear_port']['name'] == "Rear Port"
@@ -52,7 +52,7 @@
- name: "REAR_PORT 3: Update Rear Port with other fields"
netbox.netbox.netbox_rear_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Rear Port
device: test100
@@ -63,7 +63,7 @@
register: test_three
- name: "REAR_PORT 3: ASSERT - Update Rear Port with other fields"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['positions'] == 5
@@ -78,7 +78,7 @@
- name: "REAR_PORT 4: Create Rear Port for Delete Test"
netbox.netbox.netbox_rear_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Rear Port 2
device: test100
@@ -87,7 +87,7 @@
register: test_four
- name: "REAR_PORT 4: ASSERT - Create Rear Port for Delete Test"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -100,7 +100,7 @@
- name: "REAR_PORT 5: Delete Rear Port"
netbox.netbox.netbox_rear_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Rear Port 2
device: test100
@@ -109,7 +109,7 @@
register: test_five
- name: "REAR_PORT 5: ASSERT - Delete Rear Port"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "present"
@@ -119,7 +119,7 @@
- name: "REAR_PORT 6: Create second Rear Port"
netbox.netbox.netbox_rear_port:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Rear Port
device: Test Nexus One
@@ -128,7 +128,7 @@
register: test_six
- name: "REAR_PORT 6: ASSERT - Create second Rear Port"
- assert:
+ ansible.builtin.assert:
that:
- test_six is changed
- test_six['diff']['before']['state'] == "absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_rear_port_template.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_rear_port_template.yml
index ca867619a..5342627d0 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_rear_port_template.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_rear_port_template.yml
@@ -10,7 +10,7 @@
- name: "REAR_PORT_TEMPLATE 1: Necessary info creation"
netbox.netbox.netbox_rear_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Rear Port Template
device_type: Cisco Test
@@ -19,7 +19,7 @@
register: test_one
- name: "REAR_PORT_TEMPLATE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -32,7 +32,7 @@
- name: "REAR_PORT_TEMPLATE 2: Create duplicate"
netbox.netbox.netbox_rear_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Rear Port Template
device_type: Cisco Test
@@ -41,7 +41,7 @@
register: test_two
- name: "REAR_PORT_TEMPLATE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['rear_port_template']['name'] == "Rear Port Template"
@@ -52,7 +52,7 @@
- name: "REAR_PORT_TEMPLATE 3: Update Rear Port Template with other fields"
netbox.netbox.netbox_rear_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Rear Port Template
device_type: Cisco Test
@@ -62,7 +62,7 @@
register: test_three
- name: "REAR_PORT_TEMPLATE 3: ASSERT - Update Rear Port Template with other fields"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['positions'] == 5
@@ -75,7 +75,7 @@
- name: "REAR_PORT_TEMPLATE 4: Create Rear Port Template for Delete Test"
netbox.netbox.netbox_rear_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Rear Port Template 2
device_type: Cisco Test
@@ -84,7 +84,7 @@
register: test_four
- name: "REAR_PORT_TEMPLATE 4: ASSERT - Create Rear Port Template for Delete Test"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -97,7 +97,7 @@
- name: "REAR_PORT_TEMPLATE 5: Delete Rear Port Template"
netbox.netbox.netbox_rear_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Rear Port Template 2
device_type: Cisco Test
@@ -106,7 +106,7 @@
register: test_five
- name: "REAR_PORT_TEMPLATE 5: ASSERT - Delete Rear Port Template"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "present"
@@ -116,7 +116,7 @@
- name: "REAR_PORT_TEMPLATE 6: Create second Rear Port Template"
netbox.netbox.netbox_rear_port_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Rear Port Template 2
device_type: Arista Test
@@ -125,7 +125,7 @@
register: test_six
- name: "REAR_PORT_TEMPLATE 6: ASSERT - Create second Rear Port Template"
- assert:
+ ansible.builtin.assert:
that:
- test_six is changed
- test_six['diff']['before']['state'] == "absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_region.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_region.yml
index 1359598af..0395db435 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_region.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_region.yml
@@ -7,14 +7,14 @@
- name: "REGION 1: Necessary info creation"
netbox.netbox.netbox_region:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Region One"
+ name: Test Region One
state: present
register: test_one
- name: "REGION 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -26,14 +26,14 @@
- name: "REGION 2: Create duplicate"
netbox.netbox.netbox_region:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Region One"
+ name: Test Region One
state: present
register: test_two
- name: "REGION 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['region']['name'] == "Test Region One"
@@ -43,15 +43,15 @@
- name: "REGION 3: ASSERT - Update"
netbox.netbox.netbox_region:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Region One"
- parent_region: "Test Region"
+ name: Test Region One
+ parent_region: Test Region
state: present
register: test_three
- name: "REGION 3: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['parent'] == 1
@@ -63,14 +63,14 @@
- name: "REGION 4: ASSERT - Delete"
netbox.netbox.netbox_region:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Region One"
+ name: Test Region One
state: absent
register: test_four
- name: "REGION 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['region']['name'] == "Test Region One"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_rir.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_rir.yml
index cc4935938..3a7bf79fa 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_rir.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_rir.yml
@@ -7,14 +7,14 @@
- name: "RIR 1: Necessary info creation"
netbox.netbox.netbox_rir:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test RIR One
state: present
register: test_one
- name: "RIR 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -26,14 +26,14 @@
- name: "RIR 2: Create duplicate"
netbox.netbox.netbox_rir:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test RIR One
state: present
register: test_two
- name: "RIR 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['rir']['name'] == "Test RIR One"
@@ -43,15 +43,15 @@
- name: "RIR 3: ASSERT - Update"
netbox.netbox.netbox_rir:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test RIR One"
+ name: Test RIR One
is_private: true
state: present
register: test_three
- name: "RIR 3: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['is_private'] == true
@@ -63,14 +63,14 @@
- name: "RIR 4: ASSERT - Delete"
netbox.netbox.netbox_rir:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test RIR One"
+ name: Test RIR One
state: absent
register: test_four
- name: "RIR 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['rir']['name'] == "Test RIR One"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_route_target.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_route_target.yml
index 354978dc2..c2576c4cc 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_route_target.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_route_target.yml
@@ -4,18 +4,18 @@
- name: "NETBOX_ROUTE_TARGET_ADD: Check Mode - Add all fields except description"
netbox.netbox.netbox_route_target:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "65000:65001"
- tenant: "Test Tenant"
+ name: 65000:65001
+ tenant: Test Tenant
tags:
- first
- second
- check_mode: yes
+ check_mode: true
register: test_results
- name: "NETBOX_ROUTE_TARGET_ADD: (ASSERT) Check Mode - Add all fields except description"
- assert:
+ ansible.builtin.assert:
that:
- test_results is changed
- test_results['route_target']['name'] == "65000:65001"
@@ -29,17 +29,17 @@
- name: "NETBOX_ROUTE_TARGET_ADD: Add all fields except description"
netbox.netbox.netbox_route_target:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "65000:65001"
- tenant: "Test Tenant"
+ name: 65000:65001
+ tenant: Test Tenant
tags:
- first
- second
register: test_results
- name: "NETBOX_ROUTE_TARGET_ADD: (ASSERT) Add all fields except description"
- assert:
+ ansible.builtin.assert:
that:
- test_results is changed
- test_results['route_target']['name'] == "65000:65001"
@@ -53,17 +53,17 @@
- name: "NETBOX_ROUTE_TARGET_ADD_IDEM: (IDEMPOTENT) Add all fields except description"
netbox.netbox.netbox_route_target:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "65000:65001"
- tenant: "Test Tenant"
+ name: 65000:65001
+ tenant: Test Tenant
tags:
- first
- second
register: test_results
- name: "NETBOX_ROUTE_TARGET_ADD_IDEM: (IDEMPOTENT) Add all fields except description"
- assert:
+ ansible.builtin.assert:
that:
- test_results is not changed
- test_results['route_target']['name'] == "65000:65001"
@@ -76,19 +76,19 @@
- name: "NETBOX_ROUTE_TARGET_UPDATE: Check Mode - Update description"
netbox.netbox.netbox_route_target:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "65000:65001"
- description: "NEW DESCRIPTION"
- tenant: "Test Tenant"
+ name: 65000:65001
+ description: NEW DESCRIPTION
+ tenant: Test Tenant
tags:
- first
- second
- check_mode: yes
+ check_mode: true
register: test_results
- name: "NETBOX_ROUTE_TARGET_UPDATE: (ASSERT) Check Mode - Update description"
- assert:
+ ansible.builtin.assert:
that:
- test_results is changed
- test_results['route_target']['name'] == "65000:65001"
@@ -103,18 +103,18 @@
- name: "NETBOX_ROUTE_TARGET_UPDATE: Update description"
netbox.netbox.netbox_route_target:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "65000:65001"
- tenant: "Test Tenant"
- description: "NEW DESCRIPTION"
+ name: 65000:65001
+ tenant: Test Tenant
+ description: NEW DESCRIPTION
tags:
- first
- second
register: test_results
- name: "NETBOX_ROUTE_TARGET_UPDATE: (ASSERT) Update description"
- assert:
+ ansible.builtin.assert:
that:
- test_results is changed
- test_results['route_target']['name'] == "65000:65001"
@@ -129,18 +129,18 @@
- name: "NETBOX_ROUTE_TARGET_UPDATE_IDEM: (IDEMPOTENT) Update description"
netbox.netbox.netbox_route_target:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "65000:65001"
- tenant: "Test Tenant"
- description: "NEW DESCRIPTION"
+ name: 65000:65001
+ tenant: Test Tenant
+ description: NEW DESCRIPTION
tags:
- first
- second
register: test_results
- name: "NETBOX_ROUTE_TARGET_UPDATE_IDEM: (IDEMPOTENT) Update description"
- assert:
+ ansible.builtin.assert:
that:
- test_results is not changed
- test_results['route_target']['name'] == "65000:65001"
@@ -154,15 +154,15 @@
- name: "NETBOX_ROUTE_TARGET_DELETE: Check Mode - Delete route target"
netbox.netbox.netbox_route_target:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "65000:65001"
+ name: 65000:65001
state: absent
- check_mode: yes
+ check_mode: true
register: test_results
- name: "NETBOX_ROUTE_TARGET_DELETE: (ASSERT) Check Mode - Delete route target"
- assert:
+ ansible.builtin.assert:
that:
- test_results is changed
- test_results['diff']['after']['state'] == "absent"
@@ -173,14 +173,14 @@
- name: "NETBOX_ROUTE_TARGET_DELETE: Delete route target"
netbox.netbox.netbox_route_target:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "65000:65001"
+ name: 65000:65001
state: absent
register: test_results
- name: "NETBOX_ROUTE_TARGET_DELETE: (ASSERT) Delete route target"
- assert:
+ ansible.builtin.assert:
that:
- test_results is changed
- test_results['diff']['after']['state'] == "absent"
@@ -191,14 +191,14 @@
- name: "NETBOX_ROUTE_TARGET_DELETE_IDEM: (IDEMPOTENT) Delete route target"
netbox.netbox.netbox_route_target:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "65000:65001"
+ name: 65000:65001
state: absent
register: test_results
- name: "NETBOX_ROUTE_TARGET_DELETE_IDEM: (IDEMPOTENT) Delete route target"
- assert:
+ ansible.builtin.assert:
that:
- test_results is not changed
- test_results['msg'] == "route_target 65000:65001 already absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_service.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_service.yml
index 09a993c3d..d937e50a6 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_service.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_service.yml
@@ -4,32 +4,32 @@
### NETBOX_SERVICE
##
##
-- name: "1 - Device with required information needs to add new service"
+- name: 1 - Device with required information needs to add new service
netbox.netbox.netbox_device:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "FOR_SERVICE"
- device_type: "Cisco Test"
- device_role: "Core Switch"
- site: "Test Site"
- status: "Staged"
+ name: FOR_SERVICE
+ device_type: Cisco Test
+ device_role: Core Switch
+ site: Test Site
+ status: Staged
state: present
- name: "NETBOX_SERVICE: Create new service"
netbox.netbox.netbox_service:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- device: "FOR_SERVICE"
- name: "node-exporter"
+ device: FOR_SERVICE
+ name: node-exporter
port: 9100
protocol: TCP
state: present
register: test_service_create
-- name: "NETBOX_SERVICE ASSERT - Create"
- assert:
+- name: NETBOX_SERVICE ASSERT - Create
+ ansible.builtin.assert:
that:
- test_service_create is changed
- test_service_create['services']['name'] == "node-exporter"
@@ -41,18 +41,18 @@
- name: "NETBOX_SERVICE: Test idempotence"
netbox.netbox.netbox_service:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- device: "FOR_SERVICE"
- name: "node-exporter"
+ device: FOR_SERVICE
+ name: node-exporter
port: 9100
protocol: TCP
state: present
register: test_service_idempotence
-- name: "NETBOX_SERVICE ASSERT - Not changed"
- assert:
+- name: NETBOX_SERVICE ASSERT - Not changed
+ ansible.builtin.assert:
that:
- test_service_idempotence['services']['name'] == "node-exporter"
- test_service_idempotence['services']['ports'] == [9100]
@@ -61,22 +61,22 @@
- name: "NETBOX_SERVICE: Test update"
netbox.netbox.netbox_service:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- device: "FOR_SERVICE"
- name: "node-exporter"
+ device: FOR_SERVICE
+ name: node-exporter
ports:
- 9100
- 9200
protocol: TCP
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: test_service_update
-- name: "NETBOX_SERVICE ASSERT - Service has been updated"
- assert:
+- name: NETBOX_SERVICE ASSERT - Service has been updated
+ ansible.builtin.assert:
that:
- test_service_update is changed
- test_service_update['diff']['after']['tags'][0] == 4
@@ -85,18 +85,18 @@
- name: "NETBOX_SERVICE: Test same details, but different protocol - Create"
netbox.netbox.netbox_service:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- device: "FOR_SERVICE"
- name: "node-exporter"
+ device: FOR_SERVICE
+ name: node-exporter
port: 9100
protocol: UDP
state: present
register: test_service_protocol
-- name: "NETBOX_SERVICE ASSERT - Different protocol - Create"
- assert:
+- name: NETBOX_SERVICE ASSERT - Different protocol - Create
+ ansible.builtin.assert:
that:
- test_service_protocol is changed
- test_service_protocol['diff']['after']['state'] == "present"
@@ -108,18 +108,18 @@
- name: "NETBOX_SERVICE: Test service deletion"
netbox.netbox.netbox_service:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- device: "FOR_SERVICE"
- name: "node-exporter"
+ device: FOR_SERVICE
+ name: node-exporter
port: 9100
protocol: UDP
state: absent
register: test_service_delete
-- name: "NETBOX_SERVICE ASSERT - Service has been deleted"
- assert:
+- name: NETBOX_SERVICE ASSERT - Service has been deleted
+ ansible.builtin.assert:
that:
- test_service_delete is changed
- test_service_delete['diff']['after']['state'] == "absent"
@@ -128,20 +128,20 @@
- name: "NETBOX_SERVICE: Test service IP addresses"
netbox.netbox.netbox_service:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- device: "test100"
- name: "node-exporter"
+ device: test100
+ name: node-exporter
port: 9100
protocol: UDP
ipaddresses:
- - address: "172.16.180.1/24"
+ - address: 172.16.180.1/24
state: present
register: test_service_ip_addresses
-- name: "NETBOX_SERVICE ASSERT - Service has been created with IP address"
- assert:
+- name: NETBOX_SERVICE ASSERT - Service has been created with IP address
+ ansible.builtin.assert:
that:
- test_service_ip_addresses is changed
- test_service_ip_addresses['diff']['after']['state'] == "present"
@@ -154,38 +154,38 @@
- name: "NETBOX_SERVICE: Missing both device & virtual_machine options - Tests required_one_of"
netbox.netbox.netbox_service:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "node-exporter"
+ name: node-exporter
port: 9100
protocol: UDP
ipaddresses:
- - address: "172.16.180.1/24"
+ - address: 172.16.180.1/24
state: present
- ignore_errors: yes
+ ignore_errors: true
register: test_service_required_one_of
-- name: "NETBOX_SERVICE ASSERT - Failed due to missing arguments"
- assert:
+- name: NETBOX_SERVICE ASSERT - Failed due to missing arguments
+ ansible.builtin.assert:
that:
- test_service_required_one_of is failed
- 'test_service_required_one_of["msg"] == "one of the following is required: device, virtual_machine"'
- name: "NETBOX_SERVICE: Create new service on virtual_machine"
netbox.netbox.netbox_service:
- netbox_url: "http://localhost:32768"
+ netbox_url: http://localhost:32768
netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- virtual_machine: "test100-vm"
- name: "node-exporter"
+ virtual_machine: test100-vm
+ name: node-exporter
port: 9100
protocol: TCP
state: present
register: test_service_create_vm
-- name: "NETBOX_SERVICE ASSERT - Create"
- assert:
+- name: NETBOX_SERVICE ASSERT - Create
+ ansible.builtin.assert:
that:
- test_service_create_vm is changed
- test_service_create_vm['services']['name'] == "node-exporter"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_service_template.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_service_template.yml
index f64509acf..507a566a4 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_service_template.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_service_template.yml
@@ -7,7 +7,7 @@
- name: "SERVICE_TEMPLATE 1: Necessary info creation"
netbox.netbox.netbox_service_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Service Template for SSH
ports:
@@ -17,7 +17,7 @@
register: test_one
- name: "SERVICE_TEMPLATE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -30,7 +30,7 @@
- name: "SERVICE_TEMPLATE 2: Create duplicate"
netbox.netbox.netbox_service_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Service Template for SSH
ports:
@@ -40,7 +40,7 @@
register: test_two
- name: "SERVICE_TEMPLATE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['service_template']['name'] == "Service Template for SSH"
@@ -51,7 +51,7 @@
- name: "SERVICE_TEMPLATE 3: Update Service Template with other fields"
netbox.netbox.netbox_service_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Service Template for SSH
ports:
@@ -62,7 +62,7 @@
register: test_three
- name: "SERVICE_TEMPLATE 3: ASSERT - Update Service Template with other fields"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['comments'] == "For SSH service"
@@ -75,7 +75,7 @@
- name: "SERVICE_TEMPLATE 4: Create Service Template for Delete Test"
netbox.netbox.netbox_service_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Service Template for DNS
ports:
@@ -86,7 +86,7 @@
register: test_four
- name: "SERVICE_TEMPLATE 4: ASSERT - Create Service Template for Delete Test"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -100,14 +100,14 @@
- name: "SERVICE_TEMPLATE 5: Delete Service Template"
netbox.netbox.netbox_service_template:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Service Template for DNS
state: absent
register: test_five
- name: "SERVICE_TEMPLATE 5: ASSERT - Delete Service Template"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "present"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_site.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_site.yml
index 6960eeb3d..a5861522e 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_site.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_site.yml
@@ -4,17 +4,17 @@
### NETBOX_SITE
##
##
-- name: "1 - Create site within NetBox with only required information"
+- name: 1 - Create site within NetBox with only required information
netbox.netbox.netbox_site:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test - Colorado
state: present
register: test_one
-- name: "1 - ASSERT"
- assert:
+- name: 1 - ASSERT
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -22,26 +22,26 @@
- test_one['site']['name'] == "Test - Colorado"
- test_one['msg'] == "site Test - Colorado created"
-- name: "2 - Duplicate"
+- name: 2 - Duplicate
netbox.netbox.netbox_site:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test - Colorado
state: present
register: test_two
-- name: "2 - ASSERT"
- assert:
+- name: 2 - ASSERT
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['msg'] == "site Test - Colorado already exists"
- test_two['site']['name'] == "Test - Colorado"
-- name: "3 - Update Test - Colorado"
+- name: 3 - Update Test - Colorado
netbox.netbox.netbox_site:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test - Colorado
status: Planned
@@ -49,8 +49,8 @@
state: present
register: test_three
-- name: "3 - ASSERT"
- assert:
+- name: 3 - ASSERT
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['status'] == "planned"
@@ -60,10 +60,10 @@
- test_three['site']['status'] == "planned"
- test_three['site']['region'] == 1
-- name: "4 - Create site with all parameters"
+- name: 4 - Create site with all parameters
netbox.netbox.netbox_site:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test - California
status: Planned
@@ -78,12 +78,12 @@
latitude: "22.169141"
longitude: "-100.994041"
comments: "### Placeholder"
- slug: "test_california"
+ slug: test_california
state: present
register: test_four
-- name: "4 - ASSERT"
- assert:
+- name: 4 - ASSERT
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -107,7 +107,7 @@
- name: "NETBOX_SITE_IDEM: Idempotency - Create duplicate site with all parameters"
netbox.netbox.netbox_site:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test - California
status: Planned
@@ -122,12 +122,12 @@
latitude: "22.169141"
longitude: "-100.994041"
comments: "### Placeholder"
- slug: "test_california"
+ slug: test_california
state: present
register: test_results
- name: "NETBOX_SITE_IDEM: (ASSERT) Idempotency - Duplicate device site with all parameters"
- assert:
+ ansible.builtin.assert:
that:
- test_results is not changed
- test_results['site']['name'] == "Test - California"
@@ -146,17 +146,17 @@
- test_results['site']['comments'] == "### Placeholder"
- test_results['site']['slug'] == "test_california"
-- name: "5 - Delete site within netbox"
+- name: 5 - Delete site within netbox
netbox.netbox.netbox_site:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test - Colorado
state: absent
register: test_five
-- name: "5 - ASSERT"
- assert:
+- name: 5 - ASSERT
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "present"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_site_group.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_site_group.yml
index 09e2df4c6..11b9cbb24 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_site_group.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_site_group.yml
@@ -7,14 +7,14 @@
- name: "SITE_GROUP 1: Necessary info creation"
netbox.netbox.netbox_site_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Site Group
state: present
register: test_one
- name: "SITE_GROUP 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -26,14 +26,14 @@
- name: "SITE_GROUP 2: Create duplicate"
netbox.netbox.netbox_site_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Site Group
state: present
register: test_two
- name: "SITE_GROUP 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['site_group']['name'] == "Site Group"
@@ -43,7 +43,7 @@
- name: "SITE_GROUP 3: Update"
netbox.netbox.netbox_site_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Site Group
parent_site_group: Test Site Group
@@ -52,7 +52,7 @@
register: test_three
- name: "SITE_GROUP 3: ASSERT - Update"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['parent'] == 4
@@ -66,14 +66,14 @@
- name: "SITE_GROUP 4: Delete"
netbox.netbox.netbox_site_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Site Group
state: absent
register: test_four
- name: "SITE_GROUP 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_tag.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_tag.yml
index 38c4157c2..f3fcbac39 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_tag.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_tag.yml
@@ -7,16 +7,16 @@
- name: "TAG 1: ASSERT - Necessary info creation"
netbox.netbox.netbox_tag:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Tag 1"
- description: "Tag 1 test"
+ name: Test Tag 1
+ description: Tag 1 test
color: "0000ff"
state: present
register: test_one
- name: "TAG 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -30,16 +30,16 @@
- name: "TAG 2: Create duplicate"
netbox.netbox.netbox_tag:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Tag 1"
- description: "Tag 1 test"
+ name: Test Tag 1
+ description: Tag 1 test
color: "0000ff"
state: present
register: test_two
- name: "TAG 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['tags']['name'] == "Test Tag 1"
@@ -48,16 +48,16 @@
- name: "TAG 3: ASSERT - Update"
netbox.netbox.netbox_tag:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Tag 1"
- description: "Tag 1 update test"
+ name: Test Tag 1
+ description: Tag 1 update test
color: "00ff00"
state: present
register: test_three
- name: "TAG 3: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['color'] == "00ff00"
@@ -70,14 +70,14 @@
- name: "TAG 4: ASSERT - Delete"
netbox.netbox.netbox_tag:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Tag 1"
+ name: Test Tag 1
state: absent
register: test_four
- name: "TAG 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['after']['state'] == "absent"
@@ -88,17 +88,17 @@
- name: "TAG 5: ASSERT - Necessary info creation"
netbox.netbox.netbox_tag:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Tag 5"
- slug: "test-tag-five"
- description: "Tag 5 test"
+ name: Test Tag 5
+ slug: test-tag-five
+ description: Tag 5 test
color: "0000ff"
state: present
register: test_five
- name: "TAG 5: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_tenant.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_tenant.yml
index 7c9af8b9f..0731e0268 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_tenant.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_tenant.yml
@@ -4,16 +4,16 @@
### NETBOX_TENANT
##
##
-- name: "1 - Test tenant creation"
+- name: 1 - Test tenant creation
netbox.netbox.netbox_tenant:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Tenant ABC"
+ name: Tenant ABC
register: test_one
-- name: "1 - ASSERT"
- assert:
+- name: 1 - ASSERT
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -22,33 +22,33 @@
- test_one['tenant']['slug'] == "tenant-abc"
- test_one['msg'] == "tenant Tenant ABC created"
-- name: "Test duplicate tenant"
+- name: Test duplicate tenant
netbox.netbox.netbox_tenant:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Tenant ABC"
+ name: Tenant ABC
register: test_two
-- name: "2 - ASSERT"
- assert:
+- name: 2 - ASSERT
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['tenant']['name'] == "Tenant ABC"
- test_two['tenant']['slug'] == "tenant-abc"
- test_two['msg'] == "tenant Tenant ABC already exists"
-- name: "3 - Test update"
+- name: 3 - Test update
netbox.netbox.netbox_tenant:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Tenant ABC"
- description: "Updated description"
+ name: Tenant ABC
+ description: Updated description
register: test_three
-- name: "3 - ASSERT"
- assert:
+- name: 3 - ASSERT
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['description'] == "Updated description"
@@ -57,42 +57,42 @@
- test_three['tenant']['description'] == "Updated description"
- test_three['msg'] == "tenant Tenant ABC updated"
-- name: "4 - Test delete"
+- name: 4 - Test delete
netbox.netbox.netbox_tenant:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Tenant ABC"
- state: "absent"
+ name: Tenant ABC
+ state: absent
register: test_four
-- name: "4 - ASSERT"
- assert:
+- name: 4 - ASSERT
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
- test_four['diff']['after']['state'] == "absent"
- test_four['msg'] == "tenant Tenant ABC deleted"
-- name: "5 - Create tenant with all parameters"
+- name: 5 - Create tenant with all parameters
netbox.netbox.netbox_tenant:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Tenant ABC"
- description: "ABC Incorporated"
+ name: Tenant ABC
+ description: ABC Incorporated
comments: "### This tenant is super cool"
- tenant_group: "Test Tenant Group"
- slug: "tenant_abc"
+ tenant_group: Test Tenant Group
+ slug: tenant_abc
tags:
- - "tagA"
- - "tagB"
- - "tagC"
+ - tagA
+ - tagB
+ - tagC
state: present
register: test_five
-- name: "5 - ASSERT"
- assert:
+- name: 5 - ASSERT
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_tenant_group.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_tenant_group.yml
index 4113a2260..f7f57f3c2 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_tenant_group.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_tenant_group.yml
@@ -4,16 +4,16 @@
### NETBOX_TENANT_GROUP
##
##
-- name: "1 - Test tenant group creation"
+- name: 1 - Test tenant group creation
netbox.netbox.netbox_tenant_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Tenant Group Two"
+ name: Test Tenant Group Two
register: test_one
-- name: "1 - ASSERT"
- assert:
+- name: 1 - ASSERT
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -22,50 +22,50 @@
- test_one['tenant_group']['slug'] == "test-tenant-group-two"
- test_one['msg'] == "tenant_group Test Tenant Group Two created"
-- name: "Test duplicate tenant group"
+- name: Test duplicate tenant group
netbox.netbox.netbox_tenant_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Tenant Group Two"
+ name: Test Tenant Group Two
register: test_two
-- name: "2 - ASSERT"
- assert:
+- name: 2 - ASSERT
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['tenant_group']['name'] == "Test Tenant Group Two"
- test_two['tenant_group']['slug'] == "test-tenant-group-two"
- test_two['msg'] == "tenant_group Test Tenant Group Two already exists"
-- name: "3 - Test delete"
+- name: 3 - Test delete
netbox.netbox.netbox_tenant_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Tenant Group Two"
- state: "absent"
+ name: Test Tenant Group Two
+ state: absent
register: test_three
-- name: "3 - ASSERT"
- assert:
+- name: 3 - ASSERT
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['before']['state'] == "present"
- test_three['diff']['after']['state'] == "absent"
- test_three['msg'] == "tenant_group Test Tenant Group Two deleted"
-- name: "4 - Test tenant group creation with custom slug"
+- name: 4 - Test tenant group creation with custom slug
netbox.netbox.netbox_tenant_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Tenant Group ABC"
- slug: "test_tenant_group_four"
+ name: Test Tenant Group ABC
+ slug: test_tenant_group_four
register: test_four
-- name: "4 - ASSERT"
- assert:
+- name: 4 - ASSERT
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -74,17 +74,17 @@
- test_four['tenant_group']['slug'] == "test_tenant_group_four"
- test_four['msg'] == "tenant_group Test Tenant Group ABC created"
-- name: "5 - Test child tenant group creation"
+- name: 5 - Test child tenant group creation
netbox.netbox.netbox_tenant_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Child Test Tenant Group"
+ name: Child Test Tenant Group
parent_tenant_group: "{{ test_four.tenant_group.slug }}"
register: test_five
-- name: "5 - ASSERT"
- assert:
+- name: 5 - ASSERT
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "absent"
@@ -93,35 +93,35 @@
- test_five['tenant_group']['parent'] == test_four.tenant_group.id
- test_five['msg'] == "tenant_group Child Test Tenant Group created"
-- name: "6 - Test child tenant group deletion"
+- name: 6 - Test child tenant group deletion
netbox.netbox.netbox_tenant_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Child Test Tenant Group"
- state: "absent"
+ name: Child Test Tenant Group
+ state: absent
register: test_six
-- name: "6 - ASSERT"
- assert:
+- name: 6 - ASSERT
+ ansible.builtin.assert:
that:
- test_six is changed
- test_six['diff']['before']['state'] == "present"
- test_six['diff']['after']['state'] == "absent"
- test_six['msg'] == "tenant_group Child Test Tenant Group deleted"
-- name: "7 - Test deletion of the tenant group with custom slug"
+- name: 7 - Test deletion of the tenant group with custom slug
netbox.netbox.netbox_tenant_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test Tenant Group ABC"
- slug: "test_tenant_group_four"
- state: "absent"
+ name: Test Tenant Group ABC
+ slug: test_tenant_group_four
+ state: absent
register: test_seven
-- name: "7 - ASSERT"
- assert:
+- name: 7 - ASSERT
+ ansible.builtin.assert:
that:
- test_seven is changed
- test_seven['diff']['before']['state'] == "present"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_virtual_chassis.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_virtual_chassis.yml
index 04ada8411..395be8395 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_virtual_chassis.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_virtual_chassis.yml
@@ -10,7 +10,7 @@
- name: "VIRTUAL_CHASSIS 0: Create device for testing virtual chassis"
netbox.netbox.netbox_device:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Device Virtual Chassis Tests
device_type: Cisco Test
@@ -23,15 +23,15 @@
- name: "VIRTUAL_CHASSIS 1: Necessary info creation"
netbox.netbox.netbox_virtual_chassis:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "First VC"
+ name: First VC
master: Device Virtual Chassis Tests
state: present
register: test_one
- name: "VIRTUAL_CHASSIS 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -43,15 +43,15 @@
- name: "VIRTUAL_CHASSIS 2: Create duplicate"
netbox.netbox.netbox_virtual_chassis:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "First VC"
+ name: First VC
master: Device Virtual Chassis Tests
state: present
register: test_two
- name: "VIRTUAL_CHASSIS 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['virtual_chassis']['master'] == 11
@@ -61,16 +61,16 @@
- name: "POWER_FEED 3: Update virtual_chassis with other fields"
netbox.netbox.netbox_virtual_chassis:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "First VC"
+ name: First VC
master: Device Virtual Chassis Tests
domain: Domain Text
state: present
register: test_three
- name: "POWER_FEED 3: ASSERT - Update virtual_chassis with other fields"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['domain'] == "Domain Text"
@@ -82,7 +82,7 @@
- name: "VIRTUAL_CHASSIS 4: Create device for testing virtual chassis deletion"
netbox.netbox.netbox_device:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Device Virtual Chassis Tests 2
device_type: Cisco Test
@@ -95,15 +95,15 @@
- name: "VIRTUAL_CHASSIS 4: Create Virtual Chassis for Delete Test"
netbox.netbox.netbox_virtual_chassis:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Second VC"
+ name: Second VC
master: Device Virtual Chassis Tests 2
state: present
register: test_four
- name: "VIRTUAL_CHASSIS 4: ASSERT - Create Virtual Chassis for Delete Test"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "absent"
@@ -115,15 +115,15 @@
- name: "VIRTUAL_CHASSIS 5: Delete Virtual Chassis"
netbox.netbox.netbox_virtual_chassis:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Second VC"
+ name: Second VC
master: Device Virtual Chassis Tests 2
state: absent
register: test_five
- name: "VIRTUAL_CHASSIS 5: ASSERT - Delete Virtual Chassis"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "present"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_virtual_disk.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_virtual_disk.yml
index 999d3edc3..5b17eaddf 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_virtual_disk.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_virtual_disk.yml
@@ -7,16 +7,16 @@
- name: "NETBOX_VIRTUAL_DISK 1: Necessary info creation"
netbox.netbox.netbox_virtual_disk:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- virtual_machine: "test100-vm"
- name: "disk0"
+ virtual_machine: test100-vm
+ name: disk0
size: 50
state: present
register: test_one
- name: "NETBOX_VIRTUAL_DISK 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -28,16 +28,16 @@
- name: "NETBOX_VIRTUAL_DISK 2: Create duplicate"
netbox.netbox.netbox_virtual_disk:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- virtual_machine: "test100-vm"
- name: "disk0"
+ virtual_machine: test100-vm
+ name: disk0
size: 50
state: present
register: test_two
- name: "NETBOX_VIRTUAL_DISK 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['virtual_disk']['name'] == "disk0"
@@ -47,18 +47,18 @@
- name: "NETBOX_VIRTUAL_DISK 3: Update"
netbox.netbox.netbox_virtual_disk:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- virtual_machine: "test100-vm"
- name: "disk0"
+ virtual_machine: test100-vm
+ name: disk0
size: 60
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: test_three
- name: "NETBOX_VIRTUAL_DISK 4: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['size'] == 60
@@ -71,15 +71,15 @@
- name: "NETBOX_VIRTUAL_DISK 4: ASSERT - Delete"
netbox.netbox.netbox_virtual_disk:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "disk0"
- virtual_machine: "test100-vm"
+ name: disk0
+ virtual_machine: test100-vm
state: absent
register: test_four
- name: "NETBOX_VIRTUAL_DISK 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['virtual_disk']['name'] == "disk0"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_virtual_machine.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_virtual_machine.yml
index 79d3882bc..3fb0e4319 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_virtual_machine.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_virtual_machine.yml
@@ -7,15 +7,15 @@
- name: "VIRTUAL_MACHINE 1: Necessary info creation"
netbox.netbox.netbox_virtual_machine:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test VM One"
- cluster: "Test Cluster"
+ name: Test VM One
+ cluster: Test Cluster
state: present
register: test_one
- name: "VIRTUAL_MACHINE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -27,15 +27,15 @@
- name: "VIRTUAL_MACHINE 2: Create duplicate"
netbox.netbox.netbox_virtual_machine:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test VM One"
- cluster: "Test Cluster"
+ name: Test VM One
+ cluster: Test Cluster
state: present
register: test_two
- name: "VIRTUAL_MACHINE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['virtual_machine']['name'] == "Test VM One"
@@ -45,21 +45,21 @@
- name: "VIRTUAL_MACHINE 3: Update"
netbox.netbox.netbox_virtual_machine:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test VM One"
- cluster: "Test Cluster"
+ name: Test VM One
+ cluster: Test Cluster
vcpus: 8.5
memory: 8
- status: "Planned"
- virtual_machine_role: "Test VM Role"
+ status: Planned
+ virtual_machine_role: Test VM Role
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: test_three
- name: "VIRTUAL_MACHINE 3: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['vcpus'] == 8.5
@@ -79,21 +79,21 @@
- name: "VIRTUAL_MACHINE 4: Test idempotence"
netbox.netbox.netbox_virtual_machine:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test VM One"
- cluster: "Test Cluster"
+ name: Test VM One
+ cluster: Test Cluster
vcpus: 8.5
memory: 8
- status: "Planned"
- virtual_machine_role: "Test VM Role"
+ status: Planned
+ virtual_machine_role: Test VM Role
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: test_four_idempotence
- name: "VIRTUAL_MACHINE 4: ASSERT - Not changed"
- assert:
+ ansible.builtin.assert:
that:
- test_four_idempotence is not changed
- test_four_idempotence['virtual_machine']['name'] == "Test VM One"
@@ -108,14 +108,14 @@
- name: "VIRTUAL_MACHINE 5: Delete"
netbox.netbox.netbox_virtual_machine:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test VM One"
+ name: Test VM One
state: absent
register: test_five
- name: "VIRTUAL_MACHINE 5: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['virtual_machine']['name'] == "Test VM One"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_vlan.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_vlan.yml
index d95abbeca..463873ca5 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_vlan.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_vlan.yml
@@ -7,7 +7,7 @@
- name: "VLAN 1: Necessary info creation"
netbox.netbox.netbox_vlan:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test VLAN 500
vid: 500
@@ -15,7 +15,7 @@
register: test_one
- name: "VLAN 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -27,7 +27,7 @@
- name: "VLAN 2: Create duplicate"
netbox.netbox.netbox_vlan:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test VLAN 500
vid: 500
@@ -35,7 +35,7 @@
register: test_two
- name: "VLAN 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['vlan']['name'] == "Test VLAN 500"
@@ -45,18 +45,18 @@
- name: "VLAN 3: Create VLAN with same name, but different site"
netbox.netbox.netbox_vlan:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test VLAN 500
vid: 500
site: Test Site
tenant: Test Tenant
- vlan_group: "Test VLAN Group"
+ vlan_group: Test VLAN Group
state: present
register: test_three
- name: "VLAN 3: ASSERT - Create VLAN with same name, but different site"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['before']['state'] == "absent"
@@ -70,23 +70,23 @@
- name: "VLAN 4: ASSERT - Update"
netbox.netbox.netbox_vlan:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test VLAN 500"
+ name: Test VLAN 500
vid: 500
- tenant: "Test Tenant"
- vlan_group: "Test VLAN Group"
+ tenant: Test Tenant
+ vlan_group: Test VLAN Group
status: Reserved
vlan_role: Network of care
description: Updated description
- site: "Test Site"
+ site: Test Site
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: test_four
- name: "VLAN 4: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['after']['status'] == "reserved"
@@ -106,41 +106,41 @@
- name: "VLAN: ASSERT - IDEMPOTENT WITH VLAN_GROUP"
netbox.netbox.netbox_vlan:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test VLAN 500"
+ name: Test VLAN 500
vid: 500
- tenant: "Test Tenant"
- vlan_group: "Test VLAN Group"
+ tenant: Test Tenant
+ vlan_group: Test VLAN Group
status: Reserved
vlan_role: Network of care
description: Updated description
- site: "Test Site"
+ site: Test Site
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: idempotent_vlan_group
- name: "VLAN: ASSERT - IDEMPOTENT WITH VLAN_GROUP"
- assert:
+ ansible.builtin.assert:
that:
- idempotent_vlan_group is not changed
- name: "VLAN: Create VLAN with same name, but different vlan_group"
netbox.netbox.netbox_vlan:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test VLAN 500
vid: 500
site: Test Site
tenant: Test Tenant
- vlan_group: "Test VLAN Group 2"
+ vlan_group: Test VLAN Group 2
state: present
register: new_vlan_group
- name: "VLAN: ASSERT - Create VLAN with same name, but different vlan_group"
- assert:
+ ansible.builtin.assert:
that:
- new_vlan_group is changed
- new_vlan_group['diff']['before']['state'] == "absent"
@@ -154,15 +154,15 @@
- name: "VLAN 5: ASSERT - Delete more than one result"
netbox.netbox.netbox_vlan:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test VLAN 500"
+ name: Test VLAN 500
state: absent
- ignore_errors: yes
+ ignore_errors: true
register: test_five
- name: "VLAN 5: ASSERT - Delete more than one result"
- assert:
+ ansible.builtin.assert:
that:
- test_five is failed
- test_five['msg'] == "More than one result returned for Test VLAN 500"
@@ -170,16 +170,16 @@
- name: "VLAN 6: ASSERT - Delete"
netbox.netbox.netbox_vlan:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test VLAN 500"
+ name: Test VLAN 500
site: Test Site
- vlan_group: "Test VLAN Group"
+ vlan_group: Test VLAN Group
state: absent
register: test_six
- name: "VLAN 6: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_six is changed
- test_six['vlan']['name'] == "Test VLAN 500"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_vlan_group.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_vlan_group.yml
index 2be4fd3df..9dd4d1789 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_vlan_group.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_vlan_group.yml
@@ -7,16 +7,16 @@
- name: "VLAN_GROUP 1: Necessary info creation"
netbox.netbox.netbox_vlan_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "VLAN Group One"
- scope_type: "dcim.site"
+ name: VLAN Group One
+ scope_type: dcim.site
scope: Test Site
state: present
register: results
- name: "VLAN_GROUP 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- results is changed
- results['diff']['before']['state'] == "absent"
@@ -30,16 +30,16 @@
- name: "VLAN_GROUP 2: Create duplicate"
netbox.netbox.netbox_vlan_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "VLAN Group One"
- scope_type: "dcim.site"
+ name: VLAN Group One
+ scope_type: dcim.site
scope: Test Site
state: present
register: results
- name: "VLAN_GROUP 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not results['changed']
- results['vlan_group']['name'] == "VLAN Group One"
@@ -51,16 +51,16 @@
- name: "VLAN_GROUP 3: ASSERT - Create with same name, different site"
netbox.netbox.netbox_vlan_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "VLAN Group One"
- scope_type: "dcim.site"
- scope: "Test Site2"
+ name: VLAN Group One
+ scope_type: dcim.site
+ scope: Test Site2
state: present
register: results
- name: "VLAN_GROUP 3: ASSERT - Create with same name, different site"
- assert:
+ ansible.builtin.assert:
that:
- results is changed
- results['vlan_group']['name'] == "VLAN Group One"
@@ -72,15 +72,15 @@
- name: "VLAN_GROUP 4: ASSERT - Create vlan group, no site"
netbox.netbox.netbox_vlan_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "VLAN Group One"
+ name: VLAN Group One
state: present
- ignore_errors: yes
+ ignore_errors: true
register: results
- name: "VLAN_GROUP 4: ASSERT - Create with same name, different site"
- assert:
+ ansible.builtin.assert:
that:
- results is failed
- results['msg'] == "More than one result returned for VLAN Group One"
@@ -88,16 +88,16 @@
- name: "VLAN_GROUP 5: ASSERT - Delete"
netbox.netbox.netbox_vlan_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: VLAN Group One
- scope_type: "dcim.site"
+ scope_type: dcim.site
scope: Test Site2
state: absent
register: results
- name: "VLAN_GROUP 5: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- results is changed
- results['diff']['before']['state'] == "present"
@@ -111,16 +111,16 @@
- name: "VLAN_GROUP 6: ASSERT - Delete non existing"
netbox.netbox.netbox_vlan_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: VLAN Group One
- scope_type: "dcim.site"
+ scope_type: dcim.site
scope: Test Site2
state: absent
register: results
- name: "VLAN_GROUP 6: ASSERT - Delete non existing`"
- assert:
+ ansible.builtin.assert:
that:
- not results['changed']
- results['vlan_group'] == None
@@ -129,16 +129,16 @@
- name: "VLAN_GROUP 7: Necessary info creation - scope_type: dcim.location"
netbox.netbox.netbox_vlan_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "VLAN Group Location"
- scope_type: "dcim.location"
+ name: VLAN Group Location
+ scope_type: dcim.location
scope: Test Rack Group
state: present
register: results
- name: "VLAN_GROUP 7: ASSERT - Necessary info creation - scope_type: dcim.location"
- assert:
+ ansible.builtin.assert:
that:
- results is changed
- results['diff']['before']['state'] == "absent"
@@ -152,16 +152,16 @@
- name: "VLAN_GROUP 8: Necessary info creation - scope_type: dcim.rack"
netbox.netbox.netbox_vlan_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "VLAN Group Rack"
- scope_type: "dcim.rack"
+ name: VLAN Group Rack
+ scope_type: dcim.rack
scope: Test Rack
state: present
register: results
- name: "VLAN_GROUP 8: ASSERT - Necessary info creation - scope_type: dcim.rack"
- assert:
+ ansible.builtin.assert:
that:
- results is changed
- results['diff']['before']['state'] == "absent"
@@ -175,16 +175,16 @@
- name: "VLAN_GROUP 9: Necessary info creation - scope_type: dcim.region"
netbox.netbox.netbox_vlan_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "VLAN Group Region"
- scope_type: "dcim.region"
+ name: VLAN Group Region
+ scope_type: dcim.region
scope: Test Region
state: present
register: results
- name: "VLAN_GROUP 9: ASSERT - Necessary info creation - scope_type: dcim.region"
- assert:
+ ansible.builtin.assert:
that:
- results is changed
- results['diff']['before']['state'] == "absent"
@@ -198,16 +198,16 @@
- name: "VLAN_GROUP 10: Necessary info creation - scope_type: dcim.sitegroup"
netbox.netbox.netbox_vlan_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "VLAN Group Site Group"
- scope_type: "dcim.sitegroup"
+ name: VLAN Group Site Group
+ scope_type: dcim.sitegroup
scope: Test Site Group
state: present
register: results
- name: "VLAN_GROUP 10: ASSERT - Necessary info creation - scope_type: dcim.sitegroup"
- assert:
+ ansible.builtin.assert:
that:
- results is changed
- results['diff']['before']['state'] == "absent"
@@ -219,7 +219,7 @@
- results['msg'] == "vlan_group VLAN Group Site Group created"
# Commented out due to invalid content type being reported back by API
-#- name: "VLAN_GROUP 11: Necessary info creation - scope_type: virtualization.cluster"
+# - name: "VLAN_GROUP 11: Necessary info creation - scope_type: virtualization.cluster"
# netbox.netbox.netbox_vlan_group:
# netbox_url: http://localhost:32768
# netbox_token: 0123456789abcdef0123456789abcdef01234567
@@ -230,7 +230,7 @@
# state: present
# register: results
-#- name: "VLAN_GROUP 11: ASSERT - Necessary info creation - scope_type: virtualization.cluster"
+# - name: "VLAN_GROUP 11: ASSERT - Necessary info creation - scope_type: virtualization.cluster"
# assert:
# that:
# - results is changed
@@ -241,8 +241,8 @@
# - results['vlan_group']['scope_type'] == "virtualization.cluster"
# - results['vlan_group']['scope_id'] == 1
# - results['msg'] == "vlan_group VLAN Group Cluster created"
-#
-#- name: "VLAN_GROUP 12: Necessary info creation - scope_type: virtualization.clustergroup"
+
+# - name: "VLAN_GROUP 12: Necessary info creation - scope_type: virtualization.clustergroup"
# netbox.netbox.netbox_vlan_group:
# netbox_url: http://localhost:32768
# netbox_token: 0123456789abcdef0123456789abcdef01234567
@@ -252,8 +252,8 @@
# scope: Test Cluster Group
# state: present
# register: results
-#
-#- name: "VLAN_GROUP 12: ASSERT - Necessary info creation - scope_type: virtualization.clustergroup"
+
+# - name: "VLAN_GROUP 12: ASSERT - Necessary info creation - scope_type: virtualization.clustergroup"
# assert:
# that:
# - results is changed
@@ -268,17 +268,17 @@
- name: "VLAN_GROUP 12: Update Description - scope_type: dcim.location"
netbox.netbox.netbox_vlan_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "VLAN Group Location"
- scope_type: "dcim.location"
+ name: VLAN Group Location
+ scope_type: dcim.location
scope: Test Rack Group
- description: "Ansible updated description"
+ description: Ansible updated description
state: present
register: results
- name: "VLAN_GROUP 12: ASSERT - Update Description - scope_type: dcim.location"
- assert:
+ ansible.builtin.assert:
that:
- results is changed
- results['diff']['before']['description'] == ""
@@ -293,17 +293,17 @@
- name: "VLAN_GROUP 12: Update Description (IDEM) - scope_type: dcim.location"
netbox.netbox.netbox_vlan_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "VLAN Group Location"
- scope_type: "dcim.location"
+ name: VLAN Group Location
+ scope_type: dcim.location
scope: Test Rack Group
- description: "Ansible updated description"
+ description: Ansible updated description
state: present
register: results
- name: "VLAN_GROUP 12: ASSERT - Update Description (IDEM) - scope_type: dcim.location"
- assert:
+ ansible.builtin.assert:
that:
- results is not changed
- results['vlan_group']['name'] == "VLAN Group Location"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_vm_interface.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_vm_interface.yml
index 799d04b16..653f31e0e 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_vm_interface.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_vm_interface.yml
@@ -7,15 +7,15 @@
- name: "NETBOX_VM_INTERFACE 1: Necessary info creation"
netbox.netbox.netbox_vm_interface:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- virtual_machine: "test100-vm"
- name: "Eth10"
+ virtual_machine: test100-vm
+ name: Eth10
state: present
register: test_one
- name: "NETBOX_VM_INTERFACE 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -27,15 +27,15 @@
- name: "NETBOX_VM_INTERFACE 2: Create duplicate"
netbox.netbox.netbox_vm_interface:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- virtual_machine: "test100-vm"
- name: "Eth10"
+ virtual_machine: test100-vm
+ name: Eth10
state: present
register: test_two
- name: "NETBOX_VM_INTERFACE 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['interface']['name'] == "Eth10"
@@ -45,30 +45,30 @@
- name: "NETBOX_VM_INTERFACE 3: Updated"
netbox.netbox.netbox_vm_interface:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- virtual_machine: "test100-vm"
- name: "Eth10"
+ virtual_machine: test100-vm
+ name: Eth10
enabled: false
mtu: 9000
mac_address: "00:00:00:AA:AA:01"
- description: "Updated test100-vm"
+ description: Updated test100-vm
mode: Tagged
- #untagged_vlan:
+ # untagged_vlan:
# name: Wireless
# site: Test Site
- #tagged_vlans:
+ # tagged_vlans:
# - name: Data
# site: Test Site
# - name: VoIP
# site: Test Site
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: test_three
- name: "NETBOX_VM_INTERFACE 4: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['enabled'] == false
@@ -76,8 +76,8 @@
- test_three['diff']['after']['mac_address'] == "00:00:00:AA:AA:01"
- test_three['diff']['after']['description'] == "Updated test100-vm"
- test_three['diff']['after']['mode'] == "tagged"
- #- test_three['diff']['after']['untagged_vlan'] == 1
- #- test_three['diff']['after']['tagged_vlans'] == [2, 3]
+ # - test_three['diff']['after']['untagged_vlan'] == 1
+ # - test_three['diff']['after']['tagged_vlans'] == [2, 3]
- test_three['diff']['after']['tags'][0] == 4
- test_three['interface']['name'] == "Eth10"
- test_three['interface']['virtual_machine'] == 1
@@ -86,23 +86,23 @@
- test_three['interface']['mac_address'] == "00:00:00:AA:AA:01"
- test_three['interface']['description'] == "Updated test100-vm"
- test_three['interface']['mode'] == "tagged"
- #- test_three['interface']['untagged_vlan'] == 1
- #- test_three['interface']['tagged_vlans'] == [2, 3]
+ # - test_three['interface']['untagged_vlan'] == 1
+ # - test_three['interface']['tagged_vlans'] == [2, 3]
- test_three['interface']['tags'][0] == 4
- test_three['msg'] == "interface Eth10 updated"
- name: "NETBOX_VM_INTERFACE 4: ASSERT - Delete"
netbox.netbox.netbox_vm_interface:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Eth10"
- virtual_machine: "test100-vm"
+ name: Eth10
+ virtual_machine: test100-vm
state: absent
register: test_four
- name: "NETBOX_VM_INTERFACE 4: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['interface']['name'] == "Eth10"
@@ -112,30 +112,30 @@
- name: "NETBOX_VM_INTERFACE 5: Attempt to update interface with same name on other VMs"
netbox.netbox.netbox_vm_interface:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- virtual_machine: "test100-vm"
- name: "Eth0"
+ virtual_machine: test100-vm
+ name: Eth0
enabled: false
mtu: 9000
mac_address: "00:00:00:AA:AA:01"
- description: "Updated test100-vm Eth0 intf"
+ description: Updated test100-vm Eth0 intf
mode: Tagged
- #untagged_vlan:
+ # untagged_vlan:
# name: Wireless
# site: Test Site
- #tagged_vlans:
+ # tagged_vlans:
# - name: Data
# site: Test Site
# - name: VoIP
# site: Test Site
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: test_five
- name: "NETBOX_VM_INTERFACE 5: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['after']['enabled'] == false
@@ -143,8 +143,8 @@
- test_five['diff']['after']['mac_address'] == "00:00:00:AA:AA:01"
- test_five['diff']['after']['description'] == "Updated test100-vm Eth0 intf"
- test_five['diff']['after']['mode'] == "tagged"
- #- test_five['diff']['after']['untagged_vlan'] == 1
- #- test_five['diff']['after']['tagged_vlans'] == [2, 3]
+ # - test_five['diff']['after']['untagged_vlan'] == 1
+ # - test_five['diff']['after']['tagged_vlans'] == [2, 3]
- test_five['diff']['after']['tags'][0] == 4
- test_five['interface']['name'] == "Eth0"
- test_five['interface']['virtual_machine'] == 1
@@ -153,7 +153,7 @@
- test_five['interface']['mac_address'] == "00:00:00:AA:AA:01"
- test_five['interface']['description'] == "Updated test100-vm Eth0 intf"
- test_five['interface']['mode'] == "tagged"
- #- test_five['interface']['untagged_vlan'] == 1
- #- test_five['interface']['tagged_vlans'] == [2, 3]
+ # - test_five['interface']['untagged_vlan'] == 1
+ # - test_five['interface']['tagged_vlans'] == [2, 3]
- test_five['interface']['tags'][0] == 4
- test_five['msg'] == "interface Eth0 updated"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_vrf.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_vrf.yml
index abf9bab0e..1da7ae382 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_vrf.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_vrf.yml
@@ -7,14 +7,14 @@
- name: "VRF 1: Necessary info creation"
netbox.netbox.netbox_vrf:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test VRF One
state: present
register: test_one
- name: "VRF 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -25,14 +25,14 @@
- name: "VRF 2: Create duplicate"
netbox.netbox.netbox_vrf:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test VRF One
state: present
register: test_two
- name: "VRF 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['vrf']['name'] == "Test VRF One"
@@ -41,7 +41,7 @@
- name: "VRF 3: Create VRF with same name, but different tenant"
netbox.netbox.netbox_vrf:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Test VRF One
tenant: Test Tenant
@@ -49,7 +49,7 @@
register: test_three
- name: "VRF 3: ASSERT - Create VRF with same name, but different site"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['before']['state'] == "absent"
@@ -61,25 +61,25 @@
- name: "VRF 4: ASSERT - Update"
netbox.netbox.netbox_vrf:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test VRF One"
+ name: Test VRF One
rd: "65001:1"
- enforce_unique: False
- tenant: "Test Tenant"
+ enforce_unique: false
+ tenant: Test Tenant
description: Updated description
import_targets:
- - "4000:4000"
- - "5000:5000"
+ - 4000:4000
+ - 5000:5000
export_targets:
- - "5000:5000"
+ - 5000:5000
tags:
- - "Schnozzberry"
+ - Schnozzberry
state: present
register: test_four
- name: "VRF 4: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['after']['rd'] == "65001:1"
@@ -101,15 +101,15 @@
- name: "VRF 5: ASSERT - Delete more than one result"
netbox.netbox.netbox_vrf:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test VRF One"
+ name: Test VRF One
state: absent
- ignore_errors: yes
+ ignore_errors: true
register: test_five
- name: "VRF 5: ASSERT - Delete more than one result"
- assert:
+ ansible.builtin.assert:
that:
- test_five is failed
- test_five['msg'] == "More than one result returned for Test VRF One"
@@ -117,15 +117,15 @@
- name: "VRF 6: ASSERT - Delete"
netbox.netbox.netbox_vrf:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Test VRF One"
+ name: Test VRF One
tenant: Test Tenant
state: absent
register: test_six
- name: "VRF 6: ASSERT - Delete"
- assert:
+ ansible.builtin.assert:
that:
- test_six is changed
- test_six['vrf']['name'] == "Test VRF One"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_webhook.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_webhook.yml
index f2f5d517e..a3d5a3825 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_webhook.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_webhook.yml
@@ -7,12 +7,12 @@
- name: "WEBHOOK 1: Necessary info creation"
netbox.netbox.netbox_webhook:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
content_types:
- - "dcim.device"
+ - dcim.device
name: Example Webhook
- type_create: yes
+ type_create: true
payload_url: https://payload.url
body_template: !unsafe >-
{{ data }}
@@ -20,7 +20,7 @@
register: test_one
- name: "WEBHOOK 1: ASSERT - Necessary info creation"
- assert:
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -34,12 +34,12 @@
- name: "WEBHOOK 2: Create duplicate"
netbox.netbox.netbox_webhook:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
content_types:
- - "dcim.device"
+ - dcim.device
name: Example Webhook
- type_create: yes
+ type_create: true
payload_url: https://payload.url
body_template: !unsafe >-
{{ data }}
@@ -47,7 +47,7 @@
register: test_two
- name: "WEBHOOK 2: ASSERT - Create duplicate"
- assert:
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['webhook']['name'] == "Example Webhook"
@@ -56,12 +56,12 @@
- name: "WEBHOOK 3: Update data and add on delete"
netbox.netbox.netbox_webhook:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
content_types:
- - "dcim.device"
- type_create: yes
- type_delete: yes
+ - dcim.device
+ type_create: true
+ type_delete: true
name: Example Webhook
payload_url: https://payload.url
body_template: !unsafe >-
@@ -70,7 +70,7 @@
register: test_three
- name: "WEBHOOK 3: ASSERT - Updated"
- assert:
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['webhook']['name'] == "Example Webhook"
@@ -79,10 +79,10 @@
- name: "WEBHOOK 4: Change content type"
netbox.netbox.netbox_webhook:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
content_types:
- - "virtualization.virtualmachine"
+ - virtualization.virtualmachine
name: Example Webhook
payload_url: https://payload.url
body_template: !unsafe >-
@@ -91,7 +91,7 @@
register: test_four
- name: "WEBHOOK 4: ASSERT - Change content type"
- assert:
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['after']['content_types'] == ["virtualization.virtualmachine"]
@@ -101,10 +101,10 @@
- name: "WEBHOOK 5: Delete"
netbox.netbox.netbox_webhook:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
content_types:
- - "virtualization.virtualmachine"
+ - virtualization.virtualmachine
name: Example Webhook
payload_url: https://payload.url
body_template: !unsafe >-
@@ -113,7 +113,7 @@
register: test_five
- name: "WEBHOOK 5: ASSERT - Deleted"
- assert:
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['after']['state'] == "absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_wireless_lan.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_wireless_lan.yml
index e59af42ee..e70fd3e59 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_wireless_lan.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_wireless_lan.yml
@@ -4,16 +4,16 @@
### NETBOX_WIRELESS_LAN
##
##
-- name: "1 - Test wireless LAN creation"
+- name: 1 - Test wireless LAN creation
netbox.netbox.netbox_wireless_lan:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- ssid: "Wireless LAN One"
+ ssid: Wireless LAN One
register: test_one
-- name: "1 - ASSERT"
- assert:
+- name: 1 - ASSERT
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -21,32 +21,32 @@
- test_one['wireless_lan']['ssid'] == "Wireless LAN One"
- test_one['msg'] == "wireless_lan Wireless LAN One created"
-- name: "Test duplicate wireless LAN"
+- name: Test duplicate wireless LAN
netbox.netbox.netbox_wireless_lan:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- ssid: "Wireless LAN One"
+ ssid: Wireless LAN One
register: test_two
-- name: "2 - ASSERT"
- assert:
+- name: 2 - ASSERT
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['wireless_lan']['ssid'] == "Wireless LAN One"
- test_two['msg'] == "wireless_lan Wireless LAN One already exists"
-- name: "3 - Test update"
+- name: 3 - Test update
netbox.netbox.netbox_wireless_lan:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- ssid: "Wireless LAN One"
- description: "New Description"
+ ssid: Wireless LAN One
+ description: New Description
register: test_three
-- name: "3 - ASSERT"
- assert:
+- name: 3 - ASSERT
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['description'] == "New Description"
@@ -54,27 +54,27 @@
- test_three['wireless_lan']['description'] == "New Description"
- test_three['msg'] == "wireless_lan Wireless LAN One updated"
-- name: "4 - Test delete"
+- name: 4 - Test delete
netbox.netbox.netbox_wireless_lan:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- ssid: "Wireless LAN One"
- state: "absent"
+ ssid: Wireless LAN One
+ state: absent
register: test_four
-- name: "4 - ASSERT"
- assert:
+- name: 4 - ASSERT
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
- test_four['diff']['after']['state'] == "absent"
- test_four['msg'] == "wireless_lan Wireless LAN One deleted"
-- name: "5 - Create wireless LAN with all parameters"
+- name: 5 - Create wireless LAN with all parameters
netbox.netbox.netbox_wireless_lan:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
ssid: Wireless Network One
description: Cool Wireless Network
@@ -88,8 +88,8 @@
state: present
register: test_five
-- name: "5 - ASSERT"
- assert:
+- name: 5 - ASSERT
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "absent"
@@ -99,5 +99,5 @@
- test_five['wireless_lan']['auth_type'] == "wpa-enterprise"
- test_five['wireless_lan']['auth_cipher'] == "aes"
- test_five['wireless_lan']['auth_psk'] == "psk123456"
- #- test_five['wireless_lan']['tags'] | length == 3
+ # - test_five['wireless_lan']['tags'] | length == 3
- test_five['msg'] == "wireless_lan Wireless Network One created"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_wireless_lan_group.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_wireless_lan_group.yml
index 51ae3a74f..57daaf62c 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_wireless_lan_group.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_wireless_lan_group.yml
@@ -4,16 +4,16 @@
### NETBOX_WIRELESS_LAN_GROUP
##
##
-- name: "1 - Test wireless LAN group creation"
+- name: 1 - Test wireless LAN group creation
netbox.netbox.netbox_wireless_lan_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Wireless LAN Group One"
+ name: Wireless LAN Group One
register: test_one
-- name: "1 - ASSERT"
- assert:
+- name: 1 - ASSERT
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -21,32 +21,32 @@
- test_one['wireless_lan_group']['name'] == "Wireless LAN Group One"
- test_one['msg'] == "wireless_lan_group Wireless LAN Group One created"
-- name: "Test duplicate wireless LAN group"
+- name: Test duplicate wireless LAN group
netbox.netbox.netbox_wireless_lan_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Wireless LAN Group One"
+ name: Wireless LAN Group One
register: test_two
-- name: "2 - ASSERT"
- assert:
+- name: 2 - ASSERT
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['wireless_lan_group']['name'] == "Wireless LAN Group One"
- test_two['msg'] == "wireless_lan_group Wireless LAN Group One already exists"
-- name: "3 - Test update"
+- name: 3 - Test update
netbox.netbox.netbox_wireless_lan_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Wireless LAN Group One"
- description: "New Description"
+ name: Wireless LAN Group One
+ description: New Description
register: test_three
-- name: "3 - ASSERT"
- assert:
+- name: 3 - ASSERT
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['description'] == "New Description"
@@ -54,27 +54,27 @@
- test_three['wireless_lan_group']['description'] == "New Description"
- test_three['msg'] == "wireless_lan_group Wireless LAN Group One updated"
-- name: "4 - Test delete"
+- name: 4 - Test delete
netbox.netbox.netbox_wireless_lan_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
- name: "Wireless LAN Group One"
- state: "absent"
+ name: Wireless LAN Group One
+ state: absent
register: test_four
-- name: "4 - ASSERT"
- assert:
+- name: 4 - ASSERT
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
- test_four['diff']['after']['state'] == "absent"
- test_four['msg'] == "wireless_lan_group Wireless LAN Group One deleted"
-- name: "5 - Create wireless LAN group with all parameters"
+- name: 5 - Create wireless LAN group with all parameters
netbox.netbox.netbox_wireless_lan_group:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
name: Wireless LAN Group One
description: Cool Wireless LAN Group
@@ -85,8 +85,8 @@
state: present
register: test_five
-- name: "5 - ASSERT"
- assert:
+- name: 5 - ASSERT
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_wireless_link.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_wireless_link.yml
index baa4574d1..b026db81e 100644
--- a/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_wireless_link.yml
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v3.7/tasks/netbox_wireless_link.yml
@@ -4,10 +4,10 @@
### NETBOX_WIRELESS_LINK
##
##
-- name: "1 - Test wireless link creation"
+- name: 1 - Test wireless link creation
netbox.netbox.netbox_wireless_link:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
interface_a:
device: Test Nexus One
@@ -17,8 +17,8 @@
name: wlink1
register: test_one
-- name: "1 - ASSERT"
- assert:
+- name: 1 - ASSERT
+ ansible.builtin.assert:
that:
- test_one is changed
- test_one['diff']['before']['state'] == "absent"
@@ -27,10 +27,10 @@
- test_one['wireless_link']['interface_b'] == 5
- test_one['msg'] == "wireless_link Test Nexus One wlink1 <> test100 wlink1 created"
-- name: "Test duplicate wireless link"
+- name: Test duplicate wireless link
netbox.netbox.netbox_wireless_link:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
interface_a:
device: Test Nexus One
@@ -40,16 +40,16 @@
name: wlink1
register: test_two
-- name: "2 - ASSERT"
- assert:
+- name: 2 - ASSERT
+ ansible.builtin.assert:
that:
- not test_two['changed']
- test_two['msg'] == "wireless_link Test Nexus One wlink1 <> test100 wlink1 already exists"
-- name: "3 - Test update"
+- name: 3 - Test update
netbox.netbox.netbox_wireless_link:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
interface_a:
device: Test Nexus One
@@ -60,18 +60,18 @@
status: planned
register: test_three
-- name: "3 - ASSERT"
- assert:
+- name: 3 - ASSERT
+ ansible.builtin.assert:
that:
- test_three is changed
- test_three['diff']['after']['status'] == "planned"
- test_three['wireless_link']['status'] == "planned"
- test_three['msg'] == "wireless_link Test Nexus One wlink1 <> test100 wlink1 updated"
-- name: "4 - Test delete"
+- name: 4 - Test delete
netbox.netbox.netbox_wireless_link:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
interface_a:
device: Test Nexus One
@@ -79,21 +79,21 @@
interface_b:
device: test100
name: wlink1
- state: "absent"
+ state: absent
register: test_four
-- name: "4 - ASSERT"
- assert:
+- name: 4 - ASSERT
+ ansible.builtin.assert:
that:
- test_four is changed
- test_four['diff']['before']['state'] == "present"
- test_four['diff']['after']['state'] == "absent"
- test_four['msg'] == "wireless_link Test Nexus One wlink1 <> test100 wlink1 deleted"
-- name: "5 - Create wireless link with all parameters"
+- name: 5 - Create wireless link with all parameters
netbox.netbox.netbox_wireless_link:
netbox_url: http://localhost:32768
- netbox_token: 0123456789abcdef0123456789abcdef01234567
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
data:
interface_a:
device: Test Nexus One
@@ -113,8 +113,8 @@
state: present
register: test_five
-- name: "5 - ASSERT"
- assert:
+- name: 5 - ASSERT
+ ansible.builtin.assert:
that:
- test_five is changed
- test_five['diff']['before']['state'] == "absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/main.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/main.yml
new file mode 100644
index 000000000..11eab01a0
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/main.yml
@@ -0,0 +1,330 @@
+---
+- name: NETBOX_DEVICE TESTS
+ ansible.builtin.include_tasks: netbox_device.yml
+
+- name: NETBOX_DEVICE_INTERFACE TESTS
+ ansible.builtin.include_tasks: netbox_device_interface.yml
+
+- name: NETBOX_DEVICE_INTERFACE_TEMPLATE TESTS
+ ansible.builtin.include_tasks: netbox_device_interface_template.yml
+
+- name: NETBOX_IP_ADDRESS TESTS
+ ansible.builtin.include_tasks: netbox_ip_address.yml
+
+- name: NETBOX_PREFIX TESTS
+ ansible.builtin.include_tasks: netbox_prefix.yml
+
+- name: NETBOX_SITE TESTS
+ ansible.builtin.include_tasks: netbox_site.yml
+
+- name: NETBOX_SITE_GROUP TESTS
+ ansible.builtin.include_tasks: netbox_site_group.yml
+
+- name: NETBOX_CONTACT TESTS
+ ansible.builtin.include_tasks: netbox_contact.yml
+
+- name: NETBOX_CONTACT_ROLE TESTS
+ ansible.builtin.include_tasks: netbox_contact_role.yml
+
+- name: NETBOX_TENTANT TESTS
+ ansible.builtin.include_tasks: netbox_tenant.yml
+
+- name: NETBOX_TENTANT_GROUP TESTS
+ ansible.builtin.include_tasks: netbox_tenant_group.yml
+
+- name: NETBOX_RACK TESTS
+ ansible.builtin.include_tasks: netbox_rack.yml
+
+- name: NETBOX_RACK_ROLE TESTS
+ ansible.builtin.include_tasks: netbox_rack_role.yml
+
+- name: NETBOX_LOCATION TESTS
+ ansible.builtin.include_tasks: netbox_location.yml
+
+- name: NETBOX_MANUFACTURER TESTS
+ ansible.builtin.include_tasks: netbox_manufacturer.yml
+
+- name: NETBOX_PLATFORM TESTS
+ ansible.builtin.include_tasks: netbox_platform.yml
+
+- name: NETBOX_DEVICE_TYPE TESTS
+ ansible.builtin.include_tasks: netbox_device_type.yml
+
+- name: NETBOX_DEVICE_ROLE TESTS
+ ansible.builtin.include_tasks: netbox_device_role.yml
+
+- name: NETBOX_IPAM_ROLE TESTS
+ ansible.builtin.include_tasks: netbox_ipam_role.yml
+
+- name: NETBOX_VLAN_GROUP TESTS
+ ansible.builtin.include_tasks: netbox_vlan_group.yml
+
+- name: NETBOX_VLAN TESTS
+ ansible.builtin.include_tasks: netbox_vlan.yml
+
+- name: NETBOX_VRF TESTS
+ ansible.builtin.include_tasks: netbox_vrf.yml
+
+- name: NETBOX_RIR TESTS
+ ansible.builtin.include_tasks: netbox_rir.yml
+
+- name: NETBOX_AGGREGATE TESTS
+ ansible.builtin.include_tasks: netbox_aggregate.yml
+
+- name: NETBOX_REGION TESTS
+ ansible.builtin.include_tasks: netbox_region.yml
+
+- name: NETBOX_DEVICE_BAY TESTS
+ ansible.builtin.include_tasks: netbox_device_bay.yml
+
+- name: NETBOX_DEVICE_BAY_TEMPLATE TESTS
+ ansible.builtin.include_tasks: netbox_device_bay_template.yml
+
+- name: NETBOX_INVENTORY_ITEM TESTS
+ ansible.builtin.include_tasks: netbox_inventory_item.yml
+
+- name: NETBOX_VIRTUAL_MACHINE TESTS
+ ansible.builtin.include_tasks: netbox_virtual_machine.yml
+
+- name: NETBOX_CLUSTER TESTS
+ ansible.builtin.include_tasks: netbox_cluster.yml
+
+- name: NETBOX_CLUSTER_GROUP TESTS
+ ansible.builtin.include_tasks: netbox_cluster_group.yml
+
+- name: NETBOX_CLUSTER_TYPE TESTS
+ ansible.builtin.include_tasks: netbox_cluster_type.yml
+
+- name: NETBOX_VM_INTERFACE TESTS
+ ansible.builtin.include_tasks: netbox_vm_interface.yml
+
+- name: NETBOX_PROVIDER TESTS
+ ansible.builtin.include_tasks: netbox_provider.yml
+
+- name: NETBOX_PROVIDER_NETWORK TESTS
+ ansible.builtin.include_tasks: netbox_provider_network.yml
+
+- name: NETBOX_CIRCUIT_TYPE TESTS
+ ansible.builtin.include_tasks: netbox_circuit_type.yml
+
+- name: NETBOX_CIRCUIT TESTS
+ ansible.builtin.include_tasks: netbox_circuit.yml
+
+- name: NETBOX_CIRCUIT_TERMINATION TESTS
+ ansible.builtin.include_tasks: netbox_circuit_termination.yml
+
+- name: NETBOX_REAR_PORT TESTS
+ ansible.builtin.include_tasks: netbox_rear_port.yml
+
+- name: NETBOX_REAR_PORT_TEMPLATE TESTS
+ ansible.builtin.include_tasks: netbox_rear_port_template.yml
+
+- name: NETBOX_FRONT_PORT TESTS
+ ansible.builtin.include_tasks: netbox_front_port.yml
+
+- name: NETBOX_FRONT_PORT_TEMPLATE TESTS
+ ansible.builtin.include_tasks: netbox_front_port_template.yml
+
+- name: NETBOX_CONSOLE_PORT TESTS
+ ansible.builtin.include_tasks: netbox_console_port.yml
+
+- name: NETBOX_CONSOLE_PORT_TEMPLATE TESTS
+ ansible.builtin.include_tasks: netbox_console_port_template.yml
+
+- name: NETBOX_CONSOLE_SERVER_PORT TESTS
+ ansible.builtin.include_tasks: netbox_console_server_port.yml
+
+- name: NETBOX_CONSOLE_SERVER_PORT_TEMPLATE TESTS
+ ansible.builtin.include_tasks: netbox_console_server_port_template.yml
+
+- name: NETBOX_POWER_PANEL TESTS
+ ansible.builtin.include_tasks: netbox_power_panel.yml
+
+- name: NETBOX_POWER_FEED TESTS
+ ansible.builtin.include_tasks: netbox_power_feed.yml
+
+- name: NETBOX_POWER_PORT TESTS
+ ansible.builtin.include_tasks: netbox_power_port.yml
+
+- name: NETBOX_POWER_PORT_TEMPLATE TESTS
+ ansible.builtin.include_tasks: netbox_power_port_template.yml
+
+- name: NETBOX_POWER_OUTLET TESTS
+ ansible.builtin.include_tasks: netbox_power_outlet.yml
+
+- name: NETBOX_POWER_OUTLET_TEMPLATE TESTS
+ ansible.builtin.include_tasks: netbox_power_outlet_template.yml
+
+- name: NETBOX_VIRTUAL_CHASSIS TESTS
+ ansible.builtin.include_tasks: netbox_virtual_chassis.yml
+
+# Module has to be updated for 3.3
+# - name: "NETBOX_CABLE TESTS"
+# include_tasks: "netbox_cable.yml"
+
+- name: NETBOX_SERVICE TESTS
+ ansible.builtin.include_tasks: netbox_service.yml
+
+- name: NETBOX_LOOKUP TESTS
+ ansible.builtin.include_tasks: netbox_lookup.yml
+
+- name: NETBOX_TAG_TESTS
+ ansible.builtin.include_tasks:
+ file: netbox_tag.yml
+ apply:
+ tags:
+ - netbox_tag
+ tags:
+ - netbox_tag
+
+- name: NETBOX_ROUTE_TARGET_TESTS
+ ansible.builtin.include_tasks:
+ file: netbox_route_target.yml
+ apply:
+ tags:
+ - netbox_route_target
+ tags:
+ - netbox_route_target
+
+- name: NETBOX_WIRELESS_LAN TESTS
+ ansible.builtin.include_tasks:
+ file: netbox_wireless_lan.yml
+ apply:
+ tags:
+ - netbox_wireless_lan
+ tags:
+ - netbox_wireless_lan
+
+- name: NETBOX_WIRELESS_LAN_GROUP TESTS
+ ansible.builtin.include_tasks:
+ file: netbox_wireless_lan_group.yml
+ apply:
+ tags:
+ - netbox_wireless_lan_group
+ tags:
+ - netbox_wireless_lan_group
+
+- name: NETBOX_WIRELESS_LINK TESTS
+ ansible.builtin.include_tasks:
+ file: netbox_wireless_link.yml
+ apply:
+ tags:
+ - netbox_wireless_link
+ tags:
+ - netbox_wireless_link
+
+- name: NETBOX_CUSTOM_FIELD TESTS
+ ansible.builtin.include_tasks:
+ file: netbox_custom_field.yml
+ apply:
+ tags:
+ - netbox_custom_field
+ tags:
+ - netbox_custom_field
+
+- name: NETBOX_CUSTOM_LINK TESTS
+ ansible.builtin.include_tasks:
+ file: netbox_custom_link.yml
+ apply:
+ tags:
+ - netbox_custom_link
+ tags:
+ - netbox_custom_link
+
+- name: NETBOX_EXPORT_TEMPLATE TESTS
+ ansible.builtin.include_tasks:
+ file: netbox_export_template.yml
+ apply:
+ tags:
+ - netbox_export_template
+ tags:
+ - netbox_export_template
+
+# Must update for 3.7
+# - name: "NETBOX_WEBHOOK TESTS"
+# include_tasks:
+# file: "netbox_webhook.yml"
+# apply:
+# tags:
+# - netbox_webhook
+# tags:
+# - netbox_webhook
+
+# - name: "NETBOX_L2VPN TESTS"
+# include_tasks:
+# file: "netbox_l2vpn.yml"
+# apply:
+# tags:
+# - netbox_l2vpn
+# tags:
+# - netbox_l2vpn
+
+# - name: "NETBOX_L2VPN_TERMINATION TESTS"
+# include_tasks:
+# file: "netbox_l2vpn_termination.yml"
+# apply:
+# tags:
+# - netbox_l2vpn_termination
+# tags:
+# - netbox_l2vpn_termination
+
+- name: NETBOX_INVENTORY_ITEM_ROLE TESTS
+ ansible.builtin.include_tasks:
+ file: netbox_inventory_item_role.yml
+ apply:
+ tags:
+ - netbox_inventory_item_role
+ tags:
+ - netbox_inventory_item_role
+
+- name: NETBOX_MODULE_TYPE TESTS
+ ansible.builtin.include_tasks:
+ file: netbox_module_type.yml
+ apply:
+ tags:
+ - netbox_module_type
+ tags:
+ - netbox_module_type
+
+- name: NETBOX_SERVICE_TEMPLATE TESTS
+ ansible.builtin.include_tasks:
+ file: netbox_service_template.yml
+ apply:
+ tags:
+ - netbox_service_template
+ tags:
+ - netbox_service_template
+
+- name: NETBOX_ASN TESTS
+ ansible.builtin.include_tasks: netbox_asn.yml
+
+- name: NETBOX_FHRP_GROUP TESTS
+ ansible.builtin.include_tasks: netbox_fhrp_group.yml
+
+- name: NETBOX_JOURNAL_ENTRY TESTS
+ ansible.builtin.include_tasks:
+ file: netbox_journal_entry.yml
+ apply:
+ tags:
+ - netbox_journal_entry
+ tags:
+ - netbox_journal_entry
+
+- name: NETBOX_FHRP_GROUP_ASSIGNMENT TESTS
+ ansible.builtin.include_tasks:
+ file: netbox_fhrp_group_assignment.yml
+ apply:
+ tags:
+ - netbox_fhrp_group_assignmen
+ tags:
+ - netbox_fhrp_group_assignmen
+
+- name: NETBOX_CONFIG_TEMPLATE
+ ansible.builtin.include_tasks: netbox_config_template.yml
+ tags:
+ - netbox_config_template
+
+- name: NETBOX_VIRTUAL_DISK
+ ansible.builtin.include_tasks: netbox_virtual_disk.yml
+ tags:
+ - netbox_virtual_disk
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_aggregate.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_aggregate.yml
new file mode 100644
index 000000000..d45b9c835
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_aggregate.yml
@@ -0,0 +1,115 @@
+---
+##
+##
+### NETBOX_AGGEGATE
+##
+##
+- name: "AGGREGATE 1: Necessary info creation"
+ netbox.netbox.netbox_aggregate:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ prefix: 10.0.0.0/8
+ rir: Example RIR
+ state: present
+ register: test_one
+
+- name: "AGGREGATE 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['aggregate']['prefix'] == "10.0.0.0/8"
+ # - test_one['aggregate']['family'] == 4
+ - test_one['aggregate']['rir'] == 1
+ - test_one['msg'] == "aggregate 10.0.0.0/8 created"
+
+- name: "AGGREGATE 2: Create duplicate"
+ netbox.netbox.netbox_aggregate:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ prefix: 10.0.0.0/8
+ state: present
+ register: test_two
+
+- name: "AGGREGATE 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['aggregate']['prefix'] == "10.0.0.0/8"
+ - test_two['aggregate']['family'] == 4
+ - test_two['aggregate']['rir'] == 1
+ - test_two['msg'] == "aggregate 10.0.0.0/8 already exists"
+
+- name: "AGGREGATE 3: ASSERT - Update"
+ netbox.netbox.netbox_aggregate:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ prefix: 10.0.0.0/8
+ rir: Example RIR
+ date_added: "1989-01-18"
+ description: Test Description
+ tags:
+ - Schnozzberry
+ state: present
+ register: test_three
+
+- name: "AGGREGATE 3: ASSERT - Updated"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['after']['date_added'] == "1989-01-18"
+ - test_three['diff']['after']['description'] == "Test Description"
+ - test_three['diff']['after']['tags'][0] == 4
+ - test_three['aggregate']['prefix'] == "10.0.0.0/8"
+ - test_three['aggregate']['family'] == 4
+ - test_three['aggregate']['rir'] == 1
+ - test_three['aggregate']['date_added'] == "1989-01-18"
+ - test_three['aggregate']['description'] == "Test Description"
+ - test_three['aggregate']['tags'][0] == 4
+ - test_three['msg'] == "aggregate 10.0.0.0/8 updated"
+
+- name: "AGGREGATE 4: ASSERT - Delete"
+ netbox.netbox.netbox_aggregate:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ prefix: 10.0.0.0/8
+ state: absent
+ register: test_four
+
+- name: "AGGREGATE 4: ASSERT - Delete"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['aggregate']['prefix'] == "10.0.0.0/8"
+ - test_four['aggregate']['family'] == 4
+ - test_four['aggregate']['rir'] == 1
+ - test_four['aggregate']['date_added'] == "1989-01-18"
+ - test_four['aggregate']['description'] == "Test Description"
+ - test_four['aggregate']['tags'][0] == 4
+ - test_four['msg'] == "aggregate 10.0.0.0/8 deleted"
+
+- name: "AGGREGATE 5: Necessary info creation"
+ netbox.netbox.netbox_aggregate:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ prefix: 2001::/32
+ rir: Example RIR
+ state: present
+ register: test_five
+
+- name: "AGGREGATE 5: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_five is changed
+ - test_five['diff']['before']['state'] == "absent"
+ - test_five['diff']['after']['state'] == "present"
+ - test_five['aggregate']['prefix'] == "2001::/32"
+ # - test_five['aggregate']['family'] == 6
+ - test_five['aggregate']['rir'] == 1
+ - test_five['msg'] == "aggregate 2001::/32 created"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_asn.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_asn.yml
new file mode 100644
index 000000000..fab4270a9
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_asn.yml
@@ -0,0 +1,88 @@
+---
+##
+##
+### NETBOX_ASN
+##
+##
+- name: "ASN 1: Test ASN creation"
+ netbox.netbox.netbox_asn:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ asn: 1111111111
+ rir: Example RIR
+ state: present
+ register: test_one
+
+- name: "ASN 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['asn']['asn'] == 1111111111
+ - test_one['asn']['rir'] == 1
+ - test_one['msg'] == "asn 1111111111 created"
+
+- name: "ASN 2: Create duplicate"
+ netbox.netbox.netbox_asn:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ asn: 1111111111
+ rir: Example RIR
+ state: present
+ register: test_two
+
+- name: "ASN 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['asn']['asn'] == 1111111111
+ - test_two['asn']['rir'] == 1
+ - test_two['msg'] == "asn 1111111111 already exists"
+
+- name: "ASN 3: Update ASN with other fields"
+ netbox.netbox.netbox_asn:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ asn: 1111111111
+ rir: Example RIR
+ tenant: Test Tenant
+ description: Test description
+ tags:
+ - Schnozzberry
+ state: present
+ register: test_three
+
+- name: "ASN 3: ASSERT - Update ASN with other fields"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['after']['tenant'] == 1
+ - test_three['diff']['after']['description'] == "Test description"
+ - test_three['diff']['after']['tags'][0] == 4
+ - test_three['asn']['asn'] == 1111111111
+ - test_three['asn']['rir'] == 1
+ - test_three['asn']['tenant'] == 1
+ - test_three['asn']['description'] == "Test description"
+ - test_three['asn']['tags'][0] == 4
+ - test_three['msg'] == "asn 1111111111 updated"
+
+- name: "ASN 4: ASSERT - Delete"
+ netbox.netbox.netbox_asn:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ asn: 1111111111
+ state: absent
+ register: test_four
+
+- name: "ASN 4: ASSERT - Delete"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['diff']['before']['state'] == "present"
+ - test_four['diff']['after']['state'] == "absent"
+ - test_four['msg'] == "asn 1111111111 deleted"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_cable.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_cable.yml
new file mode 100644
index 000000000..d6f0ab02e
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_cable.yml
@@ -0,0 +1,193 @@
+---
+# © 2020 Nokia
+# Licensed under the GNU General Public License v3.0 only
+# SPDX-License-Identifier: GPL-3.0-only
+##
+##
+### NETBOX_CABLE
+##
+##
+- name: "CABLE 1: Necessary info creation"
+ netbox.netbox.netbox_cable:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ termination_a_type: dcim.interface
+ termination_a:
+ device: Test Nexus Child One
+ name: Ethernet2/2
+ termination_b_type: dcim.interface
+ termination_b:
+ device: Test Nexus Child One
+ name: Ethernet2/1
+ state: present
+ register: test_one
+
+- name: "CABLE 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['cable']['termination_a_type'] == "dcim.interface"
+ - test_one['cable']['termination_a_id'] == 15
+ - test_one['cable']['termination_b_type'] == "dcim.interface"
+ - test_one['cable']['termination_b_id'] == 2
+ - test_one['msg'] == "cable dcim.interface Ethernet2/2 <> dcim.interface Ethernet2/1 created"
+
+- name: "CABLE 2: Create duplicate"
+ netbox.netbox.netbox_cable:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ termination_a_type: dcim.interface
+ termination_a:
+ device: Test Nexus Child One
+ name: Ethernet2/2
+ termination_b_type: dcim.interface
+ termination_b:
+ device: Test Nexus Child One
+ name: Ethernet2/1
+ state: present
+ register: test_two
+
+- name: "CABLE 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['cable']['termination_a_type'] == "dcim.interface"
+ - test_two['cable']['termination_a_id'] == 15
+ - test_two['cable']['termination_b_type'] == "dcim.interface"
+ - test_two['cable']['termination_b_id'] == 2
+ - test_two['msg'] == "cable dcim.interface Ethernet2/2 <> dcim.interface Ethernet2/1 already exists"
+
+- name: "CABLE 3: Update Cable with other fields"
+ netbox.netbox.netbox_cable:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ termination_a_type: dcim.interface
+ termination_a:
+ device: Test Nexus Child One
+ name: Ethernet2/2
+ termination_b_type: dcim.interface
+ termination_b:
+ device: Test Nexus Child One
+ name: Ethernet2/1
+ type: mmf-om4
+ status: planned
+ label: label123
+ color: abcdef
+ length: 30
+ length_unit: m
+ tags:
+ - Schnozzberry
+ tenant: Test Tenant
+ state: present
+ register: test_three
+
+- name: "CABLE 3: ASSERT - Update Cable with other fields"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['after']['type'] == "mmf-om4"
+ - test_three['diff']['after']['status'] == "planned"
+ - test_three['diff']['after']['label'] == "label123"
+ - test_three['diff']['after']['color'] == "abcdef"
+ - test_three['diff']['after']['length'] == 30
+ - test_three['diff']['after']['length_unit'] == "m"
+ - test_three['diff']['after']['tags'][0] == 4
+ - test_three['diff']['after']['tenant'] == 1
+ - test_three['cable']['termination_a_type'] == "dcim.interface"
+ - test_three['cable']['termination_a_id'] == 15
+ - test_three['cable']['termination_b_type'] == "dcim.interface"
+ - test_three['cable']['termination_b_id'] == 2
+ - test_three['cable']['type'] == "mmf-om4"
+ - test_three['cable']['status'] == "planned"
+ - test_three['cable']['label'] == "label123"
+ - test_three['cable']['color'] == "abcdef"
+ - test_three['cable']['length'] == 30
+ - test_three['cable']['length_unit'] == "m"
+ - test_three['cable']['tags'][0] == 4
+ - test_three['cable']['tenant'] == 1
+ - test_three['msg'] == "cable dcim.interface Ethernet2/2 <> dcim.interface Ethernet2/1 updated"
+
+- name: "CABLE 4: ASSERT - Delete"
+ netbox.netbox.netbox_cable:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ termination_a_type: dcim.interface
+ termination_a:
+ device: Test Nexus Child One
+ name: Ethernet2/2
+ termination_b_type: dcim.interface
+ termination_b:
+ device: Test Nexus Child One
+ name: Ethernet2/1
+ state: absent
+ register: test_four
+
+- name: "CABLE 4: ASSERT - Delete"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['diff']['before']['state'] == "present"
+ - test_four['diff']['after']['state'] == "absent"
+ - test_four['msg'] == "cable dcim.interface Ethernet2/2 <> dcim.interface Ethernet2/1 deleted"
+
+- name: "CABLE 5: Connect Console Port and Console Server Port"
+ netbox.netbox.netbox_cable:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ termination_a_type: dcim.consoleserverport
+ termination_a:
+ name: Console Server Port
+ device: test100
+ termination_b_type: dcim.consoleport
+ termination_b:
+ name: Console Port
+ device: test100
+ state: present
+ register: test_five
+
+- name: "CABLE 5: ASSERT - Connect Console Port and Console Server Port"
+ ansible.builtin.assert:
+ that:
+ - test_five is changed
+ - test_five['diff']['before']['state'] == "absent"
+ - test_five['diff']['after']['state'] == "present"
+ - test_five['cable']['termination_a_type'] == "dcim.consoleserverport"
+ - test_five['cable']['termination_a_id'] == 1
+ - test_five['cable']['termination_b_type'] == "dcim.consoleport"
+ - test_five['cable']['termination_b_id'] == 1
+ - test_five['msg'] == "cable dcim.consoleserverport Console Server Port <> dcim.consoleport Console Port created"
+
+- name: "CABLE 6: Circuits Termination as side A"
+ netbox.netbox.netbox_cable:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ termination_a_type: circuits.circuittermination
+ termination_a:
+ circuit: Test Circuit Two
+ term_side: A
+ termination_b_type: dcim.interface
+ termination_b:
+ device: test100
+ name: GigabitEthernet2
+ state: present
+ register: test_six
+
+- name: "CABLE 6: ASSERT - Circuits Termination as side A"
+ ansible.builtin.assert:
+ that:
+ - test_six is changed
+ - test_six['diff']['before']['state'] == "absent"
+ - test_six['diff']['after']['state'] == "present"
+ - test_six['cable']['termination_a_type'] == "circuits.circuittermination"
+ - test_six['cable']['termination_a_id'] == 1
+ - test_six['cable']['termination_b_type'] == "dcim.interface"
+ - test_six['cable']['termination_b_id'] == 4
+ - test_six['msg'] == "cable circuits.circuittermination 1 <> dcim.interface GigabitEthernet2 created"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_circuit.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_circuit.yml
new file mode 100644
index 000000000..8bfd265ac
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_circuit.yml
@@ -0,0 +1,109 @@
+---
+##
+##
+### NETBOX_CIRCUIT
+##
+##
+- name: "NETBOX_CIRCUIT 1: Create provider within NetBox with only required information"
+ netbox.netbox.netbox_circuit:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ cid: Test Circuit One
+ provider: Test Provider
+ circuit_type: Test Circuit Type
+ state: present
+ register: test_one
+
+- name: "NETBOX_CIRCUIT 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['circuit']['cid'] == "Test Circuit One"
+ - test_one['circuit']['provider'] == 1
+ - test_one['circuit']['type'] == 1
+ - test_one['msg'] == "circuit Test Circuit One created"
+
+- name: "NETBOX_CIRCUIT 2: Duplicate"
+ netbox.netbox.netbox_circuit:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ cid: Test Circuit One
+ provider: Test Provider
+ circuit_type: Test Circuit Type
+ state: present
+ register: test_two
+
+- name: "NETBOX_CIRCUIT 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['circuit']['cid'] == "Test Circuit One"
+ - test_two['circuit']['provider'] == 1
+ - test_two['circuit']['type'] == 1
+ - test_two['msg'] == "circuit Test Circuit One already exists"
+
+- name: "NETBOX_CIRCUIT 3: Update provider with other fields"
+ netbox.netbox.netbox_circuit:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ cid: Test Circuit One
+ provider: Test Provider
+ circuit_type: Test Circuit Type
+ status: Planned
+ tenant: Test Tenant
+ install_date: "2018-12-25"
+ commit_rate: 10000
+ description: "Test circuit "
+ comments: FAST CIRCUIT
+ state: present
+ register: test_three
+
+- name: "NETBOX_CIRCUIT 3: ASSERT - Updated"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['after']['status'] == "planned"
+ - test_three['diff']['after']['tenant'] == 1
+ - test_three['diff']['after']['install_date'] == "2018-12-25"
+ - test_three['diff']['after']['commit_rate'] == 10000
+ - test_three['diff']['after']['description'] == "Test circuit"
+ - test_three['diff']['after']['comments'] == "FAST CIRCUIT"
+ - test_three['circuit']['cid'] == "Test Circuit One"
+ - test_three['circuit']['provider'] == 1
+ - test_three['circuit']['type'] == 1
+ - test_three['circuit']['status'] == "planned"
+ - test_three['circuit']['tenant'] == 1
+ - test_three['circuit']['install_date'] == "2018-12-25"
+ - test_three['circuit']['commit_rate'] == 10000
+ - test_three['circuit']['description'] == "Test circuit"
+ - test_three['circuit']['comments'] == "FAST CIRCUIT"
+ - test_three['msg'] == "circuit Test Circuit One updated"
+
+- name: "NETBOX_CIRCUIT 4: Delete provider within netbox"
+ netbox.netbox.netbox_circuit:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ cid: Test Circuit One
+ state: absent
+ register: test_four
+
+- name: "NETBOX_CIRCUIT 4 : ASSERT - Delete"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['circuit']['cid'] == "Test Circuit One"
+ - test_four['circuit']['provider'] == 1
+ - test_four['circuit']['type'] == 1
+ - test_four['circuit']['status'] == "planned"
+ - test_four['circuit']['tenant'] == 1
+ - test_four['circuit']['install_date'] == "2018-12-25"
+ - test_four['circuit']['commit_rate'] == 10000
+ - test_four['circuit']['description'] == "Test circuit"
+ - test_four['circuit']['comments'] == "FAST CIRCUIT"
+ - test_four['msg'] == "circuit Test Circuit One deleted"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_circuit_termination.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_circuit_termination.yml
new file mode 100644
index 000000000..cbc749f63
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_circuit_termination.yml
@@ -0,0 +1,129 @@
+---
+##
+##
+### NETBOX_CIRCUIT_TERMINATION
+##
+##
+- name: "NETBOX_CIRCUIT_TERMINATION 1: Create provider within NetBox with only required information"
+ netbox.netbox.netbox_circuit_termination:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ circuit: Test Circuit
+ term_side: A
+ site: Test Site
+ port_speed: 10000
+ state: present
+ register: test_one
+
+- name: "NETBOX_CIRCUIT_TERMINATION 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['circuit_termination']['circuit'] == 1
+ - test_one['circuit_termination']['term_side'] == "A"
+ - test_one['circuit_termination']['site'] == 1
+ - test_one['circuit_termination']['port_speed'] == 10000
+ - test_one['msg'] == "circuit_termination test_circuit_a created"
+
+- name: "NETBOX_CIRCUIT_TERMINATION 2: Duplicate"
+ netbox.netbox.netbox_circuit_termination:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ circuit: Test Circuit
+ term_side: A
+ state: present
+ register: test_two
+
+- name: "NETBOX_CIRCUIT_TERMINATION 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['circuit_termination']['circuit'] == 1
+ - test_two['circuit_termination']['term_side'] == "A"
+ - test_two['circuit_termination']['site'] == 1
+ - test_two['circuit_termination']['port_speed'] == 10000
+ - test_two['msg'] == "circuit_termination test_circuit_a already exists"
+
+- name: "NETBOX_CIRCUIT_TERMINATION 3: Update provider with other fields"
+ netbox.netbox.netbox_circuit_termination:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ circuit: Test Circuit
+ term_side: A
+ upstream_speed: 1000
+ xconnect_id: 10X100
+ pp_info: PP10-24
+ description: Test description
+ state: present
+ register: test_three
+
+- name: "NETBOX_CIRCUIT_TERMINATION 3: ASSERT - Updated"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['after']['upstream_speed'] == 1000
+ - test_three['diff']['after']['xconnect_id'] == "10X100"
+ - test_three['diff']['after']['pp_info'] == "PP10-24"
+ - test_three['diff']['after']['description'] == "Test description"
+ - test_three['circuit_termination']['circuit'] == 1
+ - test_three['circuit_termination']['term_side'] == "A"
+ - test_three['circuit_termination']['site'] == 1
+ - test_three['circuit_termination']['port_speed'] == 10000
+ - test_three['circuit_termination']['upstream_speed'] == 1000
+ - test_three['circuit_termination']['xconnect_id'] == "10X100"
+ - test_three['circuit_termination']['pp_info'] == "PP10-24"
+ - test_three['circuit_termination']['description'] == "Test description"
+ - test_three['msg'] == "circuit_termination test_circuit_a updated"
+
+- name: "NETBOX_CIRCUIT_TERMINATION 4: Create Z Side"
+ netbox.netbox.netbox_circuit_termination:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ circuit: Test Circuit
+ term_side: Z
+ site: Test Site
+ port_speed: 10000
+ state: present
+ register: test_four
+
+- name: "NETBOX_CIRCUIT_TERMINATION 4: ASSERT - Create Z Side"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['diff']['before']['state'] == "absent"
+ - test_four['diff']['after']['state'] == "present"
+ - test_four['circuit_termination']['circuit'] == 1
+ - test_four['circuit_termination']['term_side'] == "Z"
+ - test_four['circuit_termination']['site'] == 1
+ - test_four['circuit_termination']['port_speed'] == 10000
+ - test_four['msg'] == "circuit_termination test_circuit_z created"
+
+- name: "NETBOX_CIRCUIT_TERMINATION 5: Delete provider within netbox"
+ netbox.netbox.netbox_circuit_termination:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ circuit: Test Circuit
+ term_side: A
+ state: absent
+ register: test_five
+
+- name: "NETBOX_CIRCUIT_TERMINATION 5: ASSERT - Delete"
+ ansible.builtin.assert:
+ that:
+ - test_five is changed
+ - test_five['circuit_termination']['circuit'] == 1
+ - test_five['circuit_termination']['term_side'] == "A"
+ - test_five['circuit_termination']['site'] == 1
+ - test_five['circuit_termination']['port_speed'] == 10000
+ - test_five['circuit_termination']['upstream_speed'] == 1000
+ - test_five['circuit_termination']['xconnect_id'] == "10X100"
+ - test_five['circuit_termination']['pp_info'] == "PP10-24"
+ - test_five['circuit_termination']['description'] == "Test description"
+ - test_five['msg'] == "circuit_termination test_circuit_a deleted"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_circuit_type.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_circuit_type.yml
new file mode 100644
index 000000000..08dc67985
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_circuit_type.yml
@@ -0,0 +1,96 @@
+---
+##
+##
+### NETBOX_CIRCUIT_TYPE
+##
+##
+- name: "CIRCUIT_TYPE 1: Necessary info creation"
+ netbox.netbox.netbox_circuit_type:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Circuit Type One
+ state: present
+ register: test_one
+
+- name: "CIRCUIT_TYPE 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['circuit_type']['name'] == "Test Circuit Type One"
+ - test_one['circuit_type']['slug'] == "test-circuit-type-one"
+ - test_one['msg'] == "circuit_type Test Circuit Type One created"
+
+- name: "CIRCUIT_TYPE 2: Create duplicate"
+ netbox.netbox.netbox_circuit_type:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Circuit Type One
+ state: present
+ register: test_two
+
+- name: "CIRCUIT_TYPE 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['circuit_type']['name'] == "Test Circuit Type One"
+ - test_two['circuit_type']['slug'] == "test-circuit-type-one"
+ - test_two['msg'] == "circuit_type Test Circuit Type One already exists"
+
+- name: "CIRCUIT_TYPE 3: User specified slug"
+ netbox.netbox.netbox_circuit_type:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Circuit Type Two
+ slug: test-circuit-type-2
+ state: present
+ register: test_three
+
+- name: "CIRCUIT_TYPE 3: ASSERT - User specified slug"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['before']['state'] == "absent"
+ - test_three['diff']['after']['state'] == "present"
+ - test_three['circuit_type']['name'] == "Test Circuit Type Two"
+ - test_three['circuit_type']['slug'] == "test-circuit-type-2"
+ - test_three['msg'] == "circuit_type Test Circuit Type Two created"
+
+- name: "CIRCUIT_TYPE 4: ASSERT - Delete"
+ netbox.netbox.netbox_circuit_type:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Circuit Type One
+ state: absent
+ register: test_four
+
+- name: "CIRCUIT_TYPE 4: ASSERT - Delete"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['circuit_type']['name'] == "Test Circuit Type One"
+ - test_four['circuit_type']['slug'] == "test-circuit-type-one"
+ - test_four['msg'] == "circuit_type Test Circuit Type One deleted"
+
+- name: "CIRCUIT_TYPE 5: ASSERT - Delete"
+ netbox.netbox.netbox_circuit_type:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Circuit Type Two
+ slug: test-circuit-type-2
+ state: absent
+ register: test_five
+
+- name: "CIRCUIT_TYPE 5: ASSERT - Delete"
+ ansible.builtin.assert:
+ that:
+ - test_five is changed
+ - test_five['circuit_type']['name'] == "Test Circuit Type Two"
+ - test_five['circuit_type']['slug'] == "test-circuit-type-2"
+ - test_five['msg'] == "circuit_type Test Circuit Type Two deleted"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_cluster.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_cluster.yml
new file mode 100644
index 000000000..b2810a1be
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_cluster.yml
@@ -0,0 +1,98 @@
+---
+##
+##
+### NETBOX_CLUSTER
+##
+##
+- name: "CLUSTER 1: Necessary info creation"
+ netbox.netbox.netbox_cluster:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Cluster One
+ cluster_type: Test Cluster Type
+ state: present
+ register: test_one
+
+- name: "CLUSTER 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['cluster']['name'] == "Test Cluster One"
+ - test_one['cluster']['type'] == 1
+ - test_one['msg'] == "cluster Test Cluster One created"
+
+- name: "CLUSTER 2: Create duplicate"
+ netbox.netbox.netbox_cluster:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Cluster One
+ cluster_type: Test Cluster Type
+ state: present
+ register: test_two
+
+- name: "CLUSTER 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['cluster']['name'] == "Test Cluster One"
+ - test_two['cluster']['type'] == 1
+ - test_two['msg'] == "cluster Test Cluster One already exists"
+
+- name: "CLUSTER 3: Update"
+ netbox.netbox.netbox_cluster:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Cluster One
+ cluster_type: Test Cluster Type
+ cluster_group: Test Cluster Group
+ site: Test Site
+ comments: Updated cluster
+ tenant: Test Tenant
+ tags:
+ - Schnozzberry
+ state: present
+ register: test_three
+
+- name: "CLUSTER 3: ASSERT - Updated"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['after']['group'] == 1
+ - test_three['diff']['after']['site'] == 1
+ - test_three['diff']['after']['comments'] == "Updated cluster"
+ - test_three['diff']['after']['tags'][0] == 4
+ - test_three['diff']['after']['tenant'] == 1
+ - test_three['cluster']['name'] == "Test Cluster One"
+ - test_three['cluster']['type'] == 1
+ - test_three['cluster']['group'] == 1
+ - test_three['cluster']['site'] == 1
+ - test_three['cluster']['comments'] == "Updated cluster"
+ - test_three['cluster']['tags'][0] == 4
+ - test_three['cluster']['tenant'] == 1
+ - test_three['msg'] == "cluster Test Cluster One updated"
+
+- name: "CLUSTER 4: ASSERT - Delete"
+ netbox.netbox.netbox_cluster:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Cluster One
+ state: absent
+ register: test_four
+
+- name: "CLUSTER 4: ASSERT - Delete"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['cluster']['name'] == "Test Cluster One"
+ - test_four['cluster']['type'] == 1
+ - test_four['cluster']['group'] == 1
+ - test_four['cluster']['site'] == 1
+ - test_four['cluster']['comments'] == "Updated cluster"
+ - test_four['cluster']['tags'][0] == 4
+ - test_four['msg'] == "cluster Test Cluster One deleted"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_cluster_group.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_cluster_group.yml
new file mode 100644
index 000000000..64dda6c44
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_cluster_group.yml
@@ -0,0 +1,96 @@
+---
+##
+##
+### NETBOX_CLUSTER_GROUP
+##
+##
+- name: "CLUSTER_GROUP 1: Necessary info creation"
+ netbox.netbox.netbox_cluster_group:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Cluster Group One
+ state: present
+ register: test_one
+
+- name: "CLUSTER_GROUP 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['cluster_group']['name'] == "Test Cluster Group One"
+ - test_one['cluster_group']['slug'] == "test-cluster-group-one"
+ - test_one['msg'] == "cluster_group Test Cluster Group One created"
+
+- name: "CLUSTER_GROUP 2: Create duplicate"
+ netbox.netbox.netbox_cluster_group:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Cluster Group One
+ state: present
+ register: test_two
+
+- name: "CLUSTER_GROUP 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['cluster_group']['name'] == "Test Cluster Group One"
+ - test_two['cluster_group']['slug'] == "test-cluster-group-one"
+ - test_two['msg'] == "cluster_group Test Cluster Group One already exists"
+
+- name: "CLUSTER_GROUP 3: User specified slug"
+ netbox.netbox.netbox_cluster_group:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Cluster Group Two
+ slug: test-cluster-group-2
+ state: present
+ register: test_three
+
+- name: "CLUSTER_GROUP 3: ASSERT - User specified slug"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['before']['state'] == "absent"
+ - test_three['diff']['after']['state'] == "present"
+ - test_three['cluster_group']['name'] == "Test Cluster Group Two"
+ - test_three['cluster_group']['slug'] == "test-cluster-group-2"
+ - test_three['msg'] == "cluster_group Test Cluster Group Two created"
+
+- name: "CLUSTER_GROUP 4: ASSERT - Delete"
+ netbox.netbox.netbox_cluster_group:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Cluster Group One
+ state: absent
+ register: test_four
+
+- name: "CLUSTER_GROUP 4: ASSERT - Delete"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['cluster_group']['name'] == "Test Cluster Group One"
+ - test_four['cluster_group']['slug'] == "test-cluster-group-one"
+ - test_four['msg'] == "cluster_group Test Cluster Group One deleted"
+
+- name: "CLUSTER_GROUP 5: ASSERT - Delete"
+ netbox.netbox.netbox_cluster_group:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Cluster Group Two
+ slug: test-cluster-group-2
+ state: absent
+ register: test_five
+
+- name: "CLUSTER_GROUP 5: ASSERT - Delete"
+ ansible.builtin.assert:
+ that:
+ - test_five is changed
+ - test_five['cluster_group']['name'] == "Test Cluster Group Two"
+ - test_five['cluster_group']['slug'] == "test-cluster-group-2"
+ - test_five['msg'] == "cluster_group Test Cluster Group Two deleted"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_cluster_type.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_cluster_type.yml
new file mode 100644
index 000000000..bc09ac8a6
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_cluster_type.yml
@@ -0,0 +1,96 @@
+---
+##
+##
+### NETBOX_CLUSTER_TYPE
+##
+##
+- name: "CLUSTER_TYPE 1: Necessary info creation"
+ netbox.netbox.netbox_cluster_type:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Cluster Type One
+ state: present
+ register: test_one
+
+- name: "CLUSTER_TYPE 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['cluster_type']['name'] == "Test Cluster Type One"
+ - test_one['cluster_type']['slug'] == "test-cluster-type-one"
+ - test_one['msg'] == "cluster_type Test Cluster Type One created"
+
+- name: "CLUSTER_TYPE 2: Create duplicate"
+ netbox.netbox.netbox_cluster_type:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Cluster Type One
+ state: present
+ register: test_two
+
+- name: "CLUSTER_TYPE 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['cluster_type']['name'] == "Test Cluster Type One"
+ - test_two['cluster_type']['slug'] == "test-cluster-type-one"
+ - test_two['msg'] == "cluster_type Test Cluster Type One already exists"
+
+- name: "CLUSTER_TYPE 3: User specified slug"
+ netbox.netbox.netbox_cluster_type:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Cluster Type Two
+ slug: test-cluster-type-2
+ state: present
+ register: test_three
+
+- name: "CLUSTER_TYPE 3: ASSERT - User specified slug"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['before']['state'] == "absent"
+ - test_three['diff']['after']['state'] == "present"
+ - test_three['cluster_type']['name'] == "Test Cluster Type Two"
+ - test_three['cluster_type']['slug'] == "test-cluster-type-2"
+ - test_three['msg'] == "cluster_type Test Cluster Type Two created"
+
+- name: "CLUSTER_TYPE 4: ASSERT - Delete"
+ netbox.netbox.netbox_cluster_type:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Cluster Type One
+ state: absent
+ register: test_four
+
+- name: "CLUSTER_TYPE 4: ASSERT - Delete"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['cluster_type']['name'] == "Test Cluster Type One"
+ - test_four['cluster_type']['slug'] == "test-cluster-type-one"
+ - test_four['msg'] == "cluster_type Test Cluster Type One deleted"
+
+- name: "CLUSTER_TYPE 5: ASSERT - Delete"
+ netbox.netbox.netbox_cluster_type:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Cluster Type Two
+ slug: test-cluster-type-2
+ state: absent
+ register: test_five
+
+- name: "CLUSTER_TYPE 5: ASSERT - Delete"
+ ansible.builtin.assert:
+ that:
+ - test_five is changed
+ - test_five['cluster_type']['name'] == "Test Cluster Type Two"
+ - test_five['cluster_type']['slug'] == "test-cluster-type-2"
+ - test_five['msg'] == "cluster_type Test Cluster Type Two deleted"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_config_context.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_config_context.yml
new file mode 100644
index 000000000..910ec82ca
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_config_context.yml
@@ -0,0 +1,107 @@
+---
+##
+##
+### NETBOX_CONFIG_CONTEXTS
+##
+##
+- name: "CONFIG_CONTEXT 1: Necessary info creation"
+ netbox.netbox.netbox_config_context:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: test_context
+ description: Test context
+ data: '{ "testkey": { "testsubkey": [ "testvaule" ] } }'
+ state: present
+ register: test_one
+
+- name: "CONFIG_CONTEXT 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['config_context']['name'] == "test_context"
+ - test_one['config_context']['description'] == "Test context"
+ - test_one['config_context']['is_active'] == true
+ - test_one['config_context']['weight'] == 1000
+ - test_one['config_context']['data'].testkey.testsubkey[0] == "testvaule"
+ - test_one['msg'] == "config_context test_context created"
+
+- name: "CONFIG_CONTEXT 2: Create duplicate"
+ netbox.netbox.netbox_config_context:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: test_context
+ description: Test context
+ data: '{ "testkey": { "testsubkey": [ "testvaule" ] } }'
+ state: present
+ register: test_two
+
+- name: "CONFIG_CONTEXT 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['config_context']['name'] == "test_context"
+ - test_two['msg'] == "config_context test_context already exists"
+
+- name: "CONFIG_CONTEXT 3: Update data and attach to site"
+ netbox.netbox.netbox_config_context:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: test_context
+ description: Updated test context
+ data: '{ "testkey": { "testsubkey": [ "updatedvaule" ] } }'
+ weight: 100
+ sites: [test-site]
+ state: present
+ register: test_three
+
+- name: "CONFIG_CONTEXT 3: ASSERT - Updated"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['after']['data'].testkey.testsubkey[0] == "updatedvaule"
+ - test_three['diff']['after']['description'] == "Updated test context"
+ - test_three['diff']['after']['weight'] == 100
+ - test_three['diff']['after']['sites'][0] == 1
+ - test_three['config_context']['name'] == "test_context"
+ - test_three['msg'] == "config_context test_context updated"
+
+- name: "CONFIG_CONTEXT 4: Detach from site"
+ netbox.netbox.netbox_config_context:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: test_context
+ data: '{ "testkey": { "testsubkey": [ "updatedvaule" ] } }'
+ sites: []
+ state: present
+ register: test_four
+
+- name: "CONFIG_CONTEXT 4: ASSERT - Detached"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['diff']['after']['sites']|length == 0
+ - test_four['config_context']['name'] == "test_context"
+ - test_four['msg'] == "config_context test_context updated"
+
+- name: "CONFIG_CONTEXT 5: Delete"
+ netbox.netbox.netbox_config_context:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: test_context
+ state: absent
+ register: test_five
+
+- name: "CONFIG_CONTEXT 5: ASSERT - Deleted"
+ ansible.builtin.assert:
+ that:
+ - test_five is changed
+ - test_five['diff']['after']['state'] == "absent"
+ - test_five['config_context']['name'] == "test_context"
+ - test_five['msg'] == "config_context test_context deleted"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_config_template.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_config_template.yml
new file mode 100644
index 000000000..af6cfb471
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_config_template.yml
@@ -0,0 +1,82 @@
+---
+##
+##
+### NETBOX_CONFIG_TEMPLATES
+##
+##
+- name: "CONFIG_TEMPLATES 1: Necessary info creation"
+ netbox.netbox.netbox_config_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: test_template
+ description: Test template
+ template_code: test template
+ state: present
+ register: test_one
+
+- name: "CONFIG_TEMPLATES 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['config_template']['name'] == "test_template"
+ - test_one['config_template']['description'] == "Test template"
+ - test_one['config_template']['template_code'] == "test template"
+ - test_one['msg'] == "config_template test_template created"
+
+- name: "CONFIG_TEMPLATES 2: Create duplicate"
+ netbox.netbox.netbox_config_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: test_template
+ description: Test template
+ template_code: test template
+ state: present
+ register: test_two
+
+- name: "CONFIG_TEMPLATES 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['config_template']['name'] == "test_template"
+ - test_two['msg'] == "config_template test_template already exists"
+
+- name: "CONFIG_TEMPLATES 3: Update data"
+ netbox.netbox.netbox_config_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: test_template
+ description: Updated test template
+ template_code: updated test template
+ state: present
+ register: test_three
+
+- name: "CONFIG_TEMPLATES 3: ASSERT - Updated"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['after']['template_code'] == "updated test template"
+ - test_three['diff']['after']['description'] == "Updated test template"
+ - test_three['config_template']['name'] == "test_template"
+ - test_three['msg'] == "config_template test_template updated"
+
+- name: "CONFIG_TEMPLATES 4: Delete"
+ netbox.netbox.netbox_config_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: test_template
+ state: absent
+ register: test_four
+
+- name: "CONFIG_TEMPLATES 4: ASSERT - Deleted"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['diff']['after']['state'] == "absent"
+ - test_four['config_template']['name'] == "test_template"
+ - test_four['msg'] == "config_template test_template deleted"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_console_port.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_console_port.yml
new file mode 100644
index 000000000..6be028bec
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_console_port.yml
@@ -0,0 +1,108 @@
+---
+# © 2020 Nokia
+# Licensed under the GNU General Public License v3.0 only
+# SPDX-License-Identifier: GPL-3.0-only
+##
+##
+### NETBOX_CONSOLE_PORT
+##
+##
+- name: "CONSOLE_PORT 1: Necessary info creation"
+ netbox.netbox.netbox_console_port:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Console Port
+ device: test100
+ state: present
+ register: test_one
+
+- name: "CONSOLE_PORT 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['console_port']['name'] == "Console Port"
+ - test_one['console_port']['device'] == 1
+ - test_one['msg'] == "console_port Console Port created"
+
+- name: "CONSOLE_PORT 2: Create duplicate"
+ netbox.netbox.netbox_console_port:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Console Port
+ device: test100
+ state: present
+ register: test_two
+
+- name: "CONSOLE_PORT 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['console_port']['name'] == "Console Port"
+ - test_two['console_port']['device'] == 1
+ - test_two['msg'] == "console_port Console Port already exists"
+
+- name: "CONSOLE_PORT 3: Update Console Port with other fields"
+ netbox.netbox.netbox_console_port:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Console Port
+ device: test100
+ type: usb-a
+ description: test description
+ state: present
+ register: test_three
+
+- name: "CONSOLE_PORT 3: ASSERT - Update Console Port with other fields"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['after']['type'] == "usb-a"
+ - test_three['diff']['after']['description'] == "test description"
+ - test_three['console_port']['name'] == "Console Port"
+ - test_three['console_port']['device'] == 1
+ - test_three['console_port']['type'] == "usb-a"
+ - test_three['console_port']['description'] == "test description"
+ - test_three['msg'] == "console_port Console Port updated"
+
+- name: "CONSOLE_PORT 4: Create Console Port for Delete Test"
+ netbox.netbox.netbox_console_port:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Console Port 2
+ device: test100
+ state: present
+ register: test_four
+
+- name: "CONSOLE_PORT 4: ASSERT - Create Console Port for Delete Test"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['diff']['before']['state'] == "absent"
+ - test_four['diff']['after']['state'] == "present"
+ - test_four['console_port']['name'] == "Console Port 2"
+ - test_four['console_port']['device'] == 1
+ - test_four['msg'] == "console_port Console Port 2 created"
+
+- name: "CONSOLE_PORT 5: Delete Console Port"
+ netbox.netbox.netbox_console_port:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Console Port 2
+ device: test100
+ state: absent
+ register: test_five
+
+- name: "CONSOLE_PORT 5: ASSERT - Delete Console Port"
+ ansible.builtin.assert:
+ that:
+ - test_five is changed
+ - test_five['diff']['before']['state'] == "present"
+ - test_five['diff']['after']['state'] == "absent"
+ - test_five['msg'] == "console_port Console Port 2 deleted"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_console_port_template.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_console_port_template.yml
new file mode 100644
index 000000000..ef6149214
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_console_port_template.yml
@@ -0,0 +1,105 @@
+---
+# © 2020 Nokia
+# Licensed under the GNU General Public License v3.0 only
+# SPDX-License-Identifier: GPL-3.0-only
+##
+##
+### NETBOX_CONSOLE_PORT_TEMPLATE
+##
+##
+- name: "CONSOLE_PORT_TEMPLATE 1: Necessary info creation"
+ netbox.netbox.netbox_console_port_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Console Port Template
+ device_type: Cisco Test
+ state: present
+ register: test_one
+
+- name: "CONSOLE_PORT_TEMPLATE 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['console_port_template']['name'] == "Console Port Template"
+ - test_one['console_port_template']['device_type'] == 1
+ - test_one['msg'] == "console_port_template Console Port Template created"
+
+- name: "CONSOLE_PORT_TEMPLATE 2: Create duplicate"
+ netbox.netbox.netbox_console_port_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Console Port Template
+ device_type: Cisco Test
+ state: present
+ register: test_two
+
+- name: "CONSOLE_PORT_TEMPLATE 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['console_port_template']['name'] == "Console Port Template"
+ - test_two['console_port_template']['device_type'] == 1
+ - test_two['msg'] == "console_port_template Console Port Template already exists"
+
+- name: "CONSOLE_PORT_TEMPLATE 3: Update Console Port Template with other fields"
+ netbox.netbox.netbox_console_port_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Console Port Template
+ device_type: Cisco Test
+ type: usb-a
+ state: present
+ register: test_three
+
+- name: "CONSOLE_PORT_TEMPLATE 3: ASSERT - Update Console Port Template with other fields"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['after']['type'] == "usb-a"
+ - test_three['console_port_template']['name'] == "Console Port Template"
+ - test_three['console_port_template']['device_type'] == 1
+ - test_three['console_port_template']['type'] == "usb-a"
+ - test_three['msg'] == "console_port_template Console Port Template updated"
+
+- name: "CONSOLE_PORT_TEMPLATE 4: Create Console Port Template for Delete Test"
+ netbox.netbox.netbox_console_port_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Console Port Template 2
+ device_type: Cisco Test
+ state: present
+ register: test_four
+
+- name: "CONSOLE_PORT_TEMPLATE 4: ASSERT - Create Console Port Template for Delete Test"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['diff']['before']['state'] == "absent"
+ - test_four['diff']['after']['state'] == "present"
+ - test_four['console_port_template']['name'] == "Console Port Template 2"
+ - test_four['console_port_template']['device_type'] == 1
+ - test_four['msg'] == "console_port_template Console Port Template 2 created"
+
+- name: "CONSOLE_PORT_TEMPLATE 5: Delete Console Port Template"
+ netbox.netbox.netbox_console_port_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Console Port Template 2
+ device_type: Cisco Test
+ state: absent
+ register: test_five
+
+- name: "CONSOLE_PORT_TEMPLATE 5: ASSERT - Delete Console Port Template"
+ ansible.builtin.assert:
+ that:
+ - test_five is changed
+ - test_five['diff']['before']['state'] == "present"
+ - test_five['diff']['after']['state'] == "absent"
+ - test_five['msg'] == "console_port_template Console Port Template 2 deleted"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_console_server_port.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_console_server_port.yml
new file mode 100644
index 000000000..0ac7b65d0
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_console_server_port.yml
@@ -0,0 +1,108 @@
+---
+# © 2020 Nokia
+# Licensed under the GNU General Public License v3.0 only
+# SPDX-License-Identifier: GPL-3.0-only
+##
+##
+### NETBOX_CONSOLE_SERVER_PORT
+##
+##
+- name: "CONSOLE_SERVER_PORT 1: Necessary info creation"
+ netbox.netbox.netbox_console_server_port:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Console Server Port
+ device: test100
+ state: present
+ register: test_one
+
+- name: "CONSOLE_SERVER_PORT 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['console_server_port']['name'] == "Console Server Port"
+ - test_one['console_server_port']['device'] == 1
+ - test_one['msg'] == "console_server_port Console Server Port created"
+
+- name: "CONSOLE_SERVER_PORT 2: Create duplicate"
+ netbox.netbox.netbox_console_server_port:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Console Server Port
+ device: test100
+ state: present
+ register: test_two
+
+- name: "CONSOLE_SERVER_PORT 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['console_server_port']['name'] == "Console Server Port"
+ - test_two['console_server_port']['device'] == 1
+ - test_two['msg'] == "console_server_port Console Server Port already exists"
+
+- name: "CONSOLE_SERVER_PORT 3: Update Console Server Port with other fields"
+ netbox.netbox.netbox_console_server_port:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Console Server Port
+ device: test100
+ type: usb-a
+ description: test description
+ state: present
+ register: test_three
+
+- name: "CONSOLE_SERVER_PORT 3: ASSERT - Update Console Server Port with other fields"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['after']['type'] == "usb-a"
+ - test_three['diff']['after']['description'] == "test description"
+ - test_three['console_server_port']['name'] == "Console Server Port"
+ - test_three['console_server_port']['device'] == 1
+ - test_three['console_server_port']['type'] == "usb-a"
+ - test_three['console_server_port']['description'] == "test description"
+ - test_three['msg'] == "console_server_port Console Server Port updated"
+
+- name: "CONSOLE_SERVER_PORT 4: Create Console Server Port for Delete Test"
+ netbox.netbox.netbox_console_server_port:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Console Server Port 2
+ device: test100
+ state: present
+ register: test_four
+
+- name: "CONSOLE_SERVER_PORT 4: ASSERT - Create Console Server Port for Delete Test"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['diff']['before']['state'] == "absent"
+ - test_four['diff']['after']['state'] == "present"
+ - test_four['console_server_port']['name'] == "Console Server Port 2"
+ - test_four['console_server_port']['device'] == 1
+ - test_four['msg'] == "console_server_port Console Server Port 2 created"
+
+- name: "CONSOLE_SERVER_PORT 5: Delete Console Server Port"
+ netbox.netbox.netbox_console_server_port:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Console Server Port 2
+ device: test100
+ state: absent
+ register: test_five
+
+- name: "CONSOLE_SERVER_PORT 5: ASSERT - Delete Console Server Port"
+ ansible.builtin.assert:
+ that:
+ - test_five is changed
+ - test_five['diff']['before']['state'] == "present"
+ - test_five['diff']['after']['state'] == "absent"
+ - test_five['msg'] == "console_server_port Console Server Port 2 deleted"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_console_server_port_template.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_console_server_port_template.yml
new file mode 100644
index 000000000..4c7dfc846
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_console_server_port_template.yml
@@ -0,0 +1,105 @@
+---
+# © 2020 Nokia
+# Licensed under the GNU General Public License v3.0 only
+# SPDX-License-Identifier: GPL-3.0-only
+##
+##
+### NETBOX_CONSOLE_SERVER_PORT_TEMPLATE
+##
+##
+- name: "CONSOLE_SERVER_PORT_TEMPLATE 1: Necessary info creation"
+ netbox.netbox.netbox_console_server_port_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Console Server Port Template
+ device_type: Cisco Test
+ state: present
+ register: test_one
+
+- name: "CONSOLE_SERVER_PORT_TEMPLATE 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['console_server_port_template']['name'] == "Console Server Port Template"
+ - test_one['console_server_port_template']['device_type'] == 1
+ - test_one['msg'] == "console_server_port_template Console Server Port Template created"
+
+- name: "CONSOLE_SERVER_PORT_TEMPLATE 2: Create duplicate"
+ netbox.netbox.netbox_console_server_port_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Console Server Port Template
+ device_type: Cisco Test
+ state: present
+ register: test_two
+
+- name: "CONSOLE_SERVER_PORT_TEMPLATE 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['console_server_port_template']['name'] == "Console Server Port Template"
+ - test_two['console_server_port_template']['device_type'] == 1
+ - test_two['msg'] == "console_server_port_template Console Server Port Template already exists"
+
+- name: "CONSOLE_SERVER_PORT_TEMPLATE 3: Update Console Server Port Template with other fields"
+ netbox.netbox.netbox_console_server_port_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Console Server Port Template
+ device_type: Cisco Test
+ type: usb-a
+ state: present
+ register: test_three
+
+- name: "CONSOLE_SERVER_PORT_TEMPLATE 3: ASSERT - Update Console Server Port Template with other fields"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['after']['type'] == "usb-a"
+ - test_three['console_server_port_template']['name'] == "Console Server Port Template"
+ - test_three['console_server_port_template']['device_type'] == 1
+ - test_three['console_server_port_template']['type'] == "usb-a"
+ - test_three['msg'] == "console_server_port_template Console Server Port Template updated"
+
+- name: "CONSOLE_SERVER_PORT_TEMPLATE 4: Create Console Server Port Template for Delete Test"
+ netbox.netbox.netbox_console_server_port_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Console Server Port Template 2
+ device_type: Cisco Test
+ state: present
+ register: test_four
+
+- name: "CONSOLE_SERVER_PORT_TEMPLATE 4: ASSERT - Create Console Server Port Template for Delete Test"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['diff']['before']['state'] == "absent"
+ - test_four['diff']['after']['state'] == "present"
+ - test_four['console_server_port_template']['name'] == "Console Server Port Template 2"
+ - test_four['console_server_port_template']['device_type'] == 1
+ - test_four['msg'] == "console_server_port_template Console Server Port Template 2 created"
+
+- name: "CONSOLE_SERVER_PORT_TEMPLATE 5: Delete Console Server Port Template"
+ netbox.netbox.netbox_console_server_port_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Console Server Port Template 2
+ device_type: Cisco Test
+ state: absent
+ register: test_five
+
+- name: "CONSOLE_SERVER_PORT_TEMPLATE 5: ASSERT - Delete Console Server Port Template"
+ ansible.builtin.assert:
+ that:
+ - test_five is changed
+ - test_five['diff']['before']['state'] == "present"
+ - test_five['diff']['after']['state'] == "absent"
+ - test_five['msg'] == "console_server_port_template Console Server Port Template 2 deleted"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_contact.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_contact.yml
new file mode 100644
index 000000000..f5ede9c1a
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_contact.yml
@@ -0,0 +1,100 @@
+---
+##
+##
+### NETBOX_CONTACT
+##
+##
+- name: 1 - Test contact creation
+ netbox.netbox.netbox_contact:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Contact ABC
+ register: test_one
+
+- name: 1 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['contact']['name'] == "Contact ABC"
+ - test_one['msg'] == "contact Contact ABC created"
+
+- name: Test duplicate contact
+ netbox.netbox.netbox_contact:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Contact ABC
+ register: test_two
+
+- name: 2 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['contact']['name'] == "Contact ABC"
+ - test_two['msg'] == "contact Contact ABC already exists"
+
+- name: 3 - Test update
+ netbox.netbox.netbox_contact:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Contact ABC
+ title: New Title
+ register: test_three
+
+- name: 3 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['after']['title'] == "New Title"
+ - test_three['contact']['name'] == "Contact ABC"
+ - test_three['contact']['title'] == "New Title"
+ - test_three['msg'] == "contact Contact ABC updated"
+
+- name: 4 - Test delete
+ netbox.netbox.netbox_contact:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Contact ABC
+ state: absent
+ register: test_four
+
+- name: 4 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['diff']['before']['state'] == "present"
+ - test_four['diff']['after']['state'] == "absent"
+ - test_four['msg'] == "contact Contact ABC deleted"
+
+- name: 5 - Create contact with all parameters
+ netbox.netbox.netbox_contact:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Contact ABC
+ title: Fancy title
+ phone: "12345678"
+ email: contact@contact.com
+ tags:
+ - tagA
+ - tagB
+ - tagC
+ state: present
+ register: test_five
+
+- name: 5 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_five is changed
+ - test_five['diff']['before']['state'] == "absent"
+ - test_five['diff']['after']['state'] == "present"
+ - test_five['contact']['name'] == "Contact ABC"
+ - test_five['contact']['title'] == "Fancy title"
+ - test_five['contact']['phone'] == "12345678"
+ - test_five['contact']['tags'] | length == 3
+ - test_five['msg'] == "contact Contact ABC created"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_contact_role.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_contact_role.yml
new file mode 100644
index 000000000..c84703da9
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_contact_role.yml
@@ -0,0 +1,94 @@
+---
+##
+##
+### NETBOX_CONTACT_ROLE
+##
+##
+- name: "CONTACT_ROLE 1: Necessary info creation"
+ netbox.netbox.netbox_contact_role:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Contact Role
+ state: present
+ register: test_one
+
+- name: "CONTACT_ROLE 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['contact_role']['name'] == "Test Contact Role"
+ - test_one['contact_role']['slug'] == "test-contact-role"
+ - test_one['msg'] == "contact_role Test Contact Role created"
+
+- name: "CONTACT_ROLE 2: Create duplicate"
+ netbox.netbox.netbox_contact_role:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Contact Role
+ state: present
+ register: test_two
+
+- name: "CONTACT ROLE 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['contact_role']['name'] == "Test Contact Role"
+ - test_two['contact_role']['slug'] == "test-contact-role"
+ - test_two['msg'] == "contact_role Test Contact Role already exists"
+
+- name: "CONTACT_ROLE 3: ASSERT - Update"
+ netbox.netbox.netbox_contact_role:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Contact Role
+ description: Update description
+ state: present
+ register: test_three
+
+- name: "IPAM_ROLE 3: ASSERT - Update"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['after']['description'] == "Update description"
+ - test_three['contact_role']['name'] == "Test Contact Role"
+ - test_three['contact_role']['slug'] == "test-contact-role"
+ - test_three['contact_role']['description'] == "Update description"
+ - test_three['msg'] == "contact_role Test Contact Role updated"
+
+- name: "CONTACT_ROLE 4: ASSERT - Delete"
+ netbox.netbox.netbox_contact_role:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Contact Role
+ state: absent
+ register: test_four
+
+- name: "CONTACT_ROLE 4: ASSERT - Delete"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['diff']['before']['state'] == "present"
+ - test_four['diff']['after']['state'] == "absent"
+ - test_four['msg'] == "contact_role Test Contact Role deleted"
+
+- name: "CONTACT_ROLE 5: ASSERT - Delete non existing"
+ netbox.netbox.netbox_contact_role:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Contact Role
+ state: absent
+ register: test_five
+
+- name: "CONTACT_ROLE 5: ASSERT - Delete non existing`"
+ ansible.builtin.assert:
+ that:
+ - not test_five['changed']
+ - test_five['contact_role'] == None
+ - test_five['msg'] == "contact_role Test Contact Role already absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_custom_field.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_custom_field.yml
new file mode 100644
index 000000000..a4aba8608
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_custom_field.yml
@@ -0,0 +1,129 @@
+---
+##
+##
+### NETBOX_CUSTOM_FIELD
+##
+##
+- name: "CUSTOM_FIELD 1: Necessary info creation"
+ netbox.netbox.netbox_custom_field:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ object_types:
+ - dcim.device
+ name: A_CustomField
+ type: text
+ state: present
+ register: test_one
+
+- name: "CUSTOM_FIELD 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['custom_field']['name'] == "A_CustomField"
+ - test_one['custom_field']['required'] == false
+ - test_one['custom_field']['object_types'] == ["dcim.device"]
+ - test_one['custom_field']['type'] == "text"
+ - test_one['custom_field']['weight'] == 100
+ - test_one['msg'] == "custom_field A_CustomField created"
+
+- name: "CUSTOM_FIELD 2: Create duplicate"
+ netbox.netbox.netbox_custom_field:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ object_types:
+ - dcim.device
+ name: A_CustomField
+ state: present
+ register: test_two
+
+- name: "CUSTOM_FIELD 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['custom_field']['name'] == "A_CustomField"
+ - test_two['msg'] == "custom_field A_CustomField already exists"
+
+- name: "CUSTOM_FIELD 3: Update data and make it required"
+ netbox.netbox.netbox_custom_field:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ object_types:
+ - dcim.device
+ name: A_CustomField
+ description: Added a description
+ required: true
+ state: present
+ register: test_three
+
+- name: "CUSTOM_FIELD 3: ASSERT - Updated"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['after']['description'] == "Added a description"
+ - test_three['diff']['after']['required'] == true
+ - test_three['custom_field']['name'] == "A_CustomField"
+ - test_three['msg'] == "custom_field A_CustomField updated"
+
+- name: "CUSTOM_FIELD 4: Change content type"
+ netbox.netbox.netbox_custom_field:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ object_types:
+ - virtualization.virtualmachine
+ name: A_CustomField
+ description: Added a description
+ required: true
+ state: present
+ register: test_four
+
+- name: "CUSTOM_FIELD 4: ASSERT - Change content type"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['diff']['after']['object_types'] == ["virtualization.virtualmachine"]
+ - test_four['custom_field']['name'] == "A_CustomField"
+ - test_four['msg'] == "custom_field A_CustomField updated"
+
+- name: "CUSTOM_FIELD 5: Delete"
+ netbox.netbox.netbox_custom_field:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: A_CustomField
+ state: absent
+ register: test_five
+
+- name: "CUSTOM_FIELD 5: ASSERT - Deleted"
+ ansible.builtin.assert:
+ that:
+ - test_five is changed
+ - test_five['diff']['after']['state'] == "absent"
+ - test_five['custom_field']['name'] == "A_CustomField"
+ - test_five['msg'] == "custom_field A_CustomField deleted"
+
+# Change in NetBox 3.7
+# - name: "CUSTOM_FIELD 6: UI Visibility (hidden-ifunset)"
+# netbox.netbox.netbox_custom_field:
+# netbox_url: http://localhost:32768
+# netbox_token: 0123456789abcdef0123456789abcdef01234567
+# data:
+# object_types:
+# - "dcim.device"
+# name: A_CustomField
+# type: text
+# ui_visibility: hidden-ifunset
+# state: present
+# register: test_six
+
+# - name: "CUSTOM_FIELD 6: UI Visibility (hidden-ifunset)"
+# assert:
+# that:
+# - test_six is changed
+# - test_six['custom_field']['name'] == "A_CustomField"
+# - test_six['custom_field']['ui_visibility'] == "hidden-ifunset"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_custom_link.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_custom_link.yml
new file mode 100644
index 000000000..3aa29d958
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_custom_link.yml
@@ -0,0 +1,113 @@
+---
+##
+##
+### NETBOX_CUSTOM_LINK
+##
+##
+- name: "CUSTOM_LINK 1: Necessary info creation"
+ netbox.netbox.netbox_custom_link:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ object_types:
+ - dcim.device
+ name: Custom Link
+ link_text: Open Web management
+ link_url: !unsafe https://{{ obj.name }}.domain.local/
+ state: present
+ register: test_one
+
+- name: "CUSTOM_LINK 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['custom_link']['name'] == "Custom Link"
+ - test_one['custom_link']['object_types'] == ["dcim.device"]
+ - test_one['custom_link']['link_text'] == "Open Web management"
+ - test_one['msg'] == "custom_link Custom Link created"
+
+- name: "CUSTOM_LINK 2: Create duplicate"
+ netbox.netbox.netbox_custom_link:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ object_types:
+ - dcim.device
+ name: Custom Link
+ link_text: Open Web management
+ link_url: !unsafe https://{{ obj.name }}.domain.local/
+ state: present
+ register: test_two
+
+- name: "CUSTOM_LINK 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['custom_link']['name'] == "Custom Link"
+ - test_two['msg'] == "custom_link Custom Link already exists"
+
+- name: "CUSTOM_FIELD 3: Update data and add weight"
+ netbox.netbox.netbox_custom_link:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ object_types:
+ - dcim.device
+ name: Custom Link
+ link_text: Open Web management
+ link_url: !unsafe https://{{ obj.name }}.domain.local/
+ weight: 50
+ state: present
+ register: test_three
+
+- name: "CUSTOM_FIELD 3: ASSERT - Updated"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['after']['weight'] == 50
+ - test_three['custom_link']['name'] == "Custom Link"
+ - test_three['msg'] == "custom_link Custom Link updated"
+
+- name: "CUSTOM_LINK 4: Change content type"
+ netbox.netbox.netbox_custom_link:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ object_types:
+ - virtualization.virtualmachine
+ name: Custom Link
+ link_text: Open Web management
+ link_url: !unsafe https://{{ obj.name }}.domain.local/
+ state: present
+ register: test_four
+
+- name: "CUSTOM_LINK 4: ASSERT - Change content type"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['diff']['after']['object_types'] == ["virtualization.virtualmachine"]
+ - test_four['custom_link']['name'] == "Custom Link"
+ - test_four['msg'] == "custom_link Custom Link updated"
+
+- name: "CUSTOM_LINK 5: Delete"
+ netbox.netbox.netbox_custom_link:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ object_types:
+ - virtualization.virtualmachine
+ name: Custom Link
+ link_text: Open Web management
+ link_url: !unsafe https://{{ obj.name }}.domain.local/
+ state: absent
+ register: test_five
+
+- name: "CUSTOM_LINK 5: ASSERT - Deleted"
+ ansible.builtin.assert:
+ that:
+ - test_five is changed
+ - test_five['diff']['after']['state'] == "absent"
+ - test_five['custom_link']['name'] == "Custom Link"
+ - test_five['msg'] == "custom_link Custom Link deleted"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_device.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_device.yml
new file mode 100644
index 000000000..4d42e6e71
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_device.yml
@@ -0,0 +1,243 @@
+---
+##
+##
+### NETBOX_DEVICE
+##
+##
+- name: 1 - Device with required information
+ netbox.netbox.netbox_device:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: R1
+ device_type:
+ id: "1"
+ device_role: Core Switch
+ site: Test Site
+ status: Staged
+ state: present
+ register: test_one
+
+- name: 1 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == 'absent'
+ - test_one['diff']['after']['state'] == 'present'
+ - test_one['device']['name'] == "R1"
+ - test_one['device']['role'] == 1
+ - test_one['device']['device_type'] == 1
+ - test_one['device']['site'] == 1
+ - test_one['device']['status'] == "staged"
+ - test_one['device']['name'] == "R1"
+ - test_one['msg'] == "device R1 created"
+
+- name: 2 - Duplicate device
+ netbox.netbox.netbox_device:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: R1
+ device_type: Cisco Test
+ device_role: Core Switch
+ site: Test Site
+ status: Staged
+ state: present
+ register: test_two
+
+- name: 2 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['device']['name'] == "R1"
+ - test_two['device']['role'] == 1
+ - test_two['device']['device_type'] == 1
+ - test_two['device']['site'] == 1
+ - test_two['device']['status'] == "staged"
+ - test_two['msg'] == "device R1 already exists"
+
+- name: 3 - Update device
+ netbox.netbox.netbox_device:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: R1
+ serial: FXS1001
+ local_context_data:
+ bgp_as: "65412"
+ virtual_chassis: VC1
+ vc_position: 3
+ vc_priority: 15
+ location: Test Rack Group
+ state: present
+ register: test_three
+
+- name: 3 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['after']['serial'] == "FXS1001"
+ - test_three['diff']['after']['local_context_data']["bgp_as"] == "65412"
+ - test_three['diff']['after']['virtual_chassis'] == 1
+ - test_three['diff']['after']['vc_position'] == 3
+ - test_three['diff']['after']['vc_priority'] == 15
+ - test_three['device']['name'] == "R1"
+ - test_three['device']['role'] == 1
+ - test_three['device']['device_type'] == 1
+ - test_three['device']['site'] == 1
+ - test_three['device']['status'] == "staged"
+ - test_three['device']['serial'] == "FXS1001"
+ - test_three['device']['local_context_data']["bgp_as"] == "65412"
+ - test_three['device']['virtual_chassis'] == 1
+ - test_three['device']['vc_position'] == 3
+ - test_three['device']['vc_priority'] == 15
+ - test_three['device']['location'] == 1
+ - test_three['msg'] == "device R1 updated"
+
+- name: 3.1 - Update device name using query_params
+ netbox.netbox.netbox_device:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: R1-changed-name
+ serial: FXS1001
+ query_params:
+ - serial
+ state: present
+ register: test_three_dot_one
+
+- name: 3.1 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_three_dot_one is changed
+ - test_three_dot_one['diff']['after']['name'] == "R1-changed-name"
+ - test_three_dot_one['device']['role'] == 1
+ - test_three_dot_one['device']['device_type'] == 1
+ - test_three_dot_one['device']['site'] == 1
+ - test_three_dot_one['device']['status'] == "staged"
+ - test_three_dot_one['device']['serial'] == "FXS1001"
+ - test_three_dot_one['device']['local_context_data']["bgp_as"] == "65412"
+ - test_three_dot_one['msg'] == "device R1-changed-name updated"
+
+- name: 4 - Create device with tags and assign to rack
+ netbox.netbox.netbox_device:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: TestR1
+ device_type: "1841"
+ device_role: Core Switch
+ site: Test Site2
+ rack: Test Rack Site 2
+ position: 35.5
+ face: Front
+ tags:
+ - schnozzberry
+ tenant: Test Tenant
+ asset_tag: "1234"
+ state: present
+ register: test_four
+
+- name: 4 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['diff']['before']['state'] == "absent"
+ - test_four['diff']['after']['state'] == "present"
+ - test_four['device']['name'] == "TestR1"
+ - test_four['device']['role'] == 1
+ - test_four['device']['device_type'] == 5
+ - test_four['device']['site'] == 2
+ - test_four['device']['status'] == "active"
+ - test_four['device']['rack'] == 1
+ - test_four['device']['tags'][0] == 4
+ - test_four['device']['tenant'] == 1
+ - test_four['device']['asset_tag'] == '1234'
+ - test_four['msg'] == "device TestR1 created"
+
+- name: 5 - Delete previous device
+ netbox.netbox.netbox_device:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: TestR1
+ state: absent
+ register: test_five
+
+- name: 5 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_five is changed
+ - test_five['diff']['before']['state'] == "present"
+ - test_five['diff']['after']['state'] == "absent"
+ - test_five['msg'] == "device TestR1 deleted"
+
+- name: 6 - Delete R1
+ netbox.netbox.netbox_device:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: R1-changed-name
+ state: absent
+ register: test_six
+
+- name: 6 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_six is changed
+ - test_six['diff']['before']['state'] == "present"
+ - test_six['diff']['after']['state'] == "absent"
+ - test_six['msg'] == "device R1-changed-name deleted"
+
+- name: 7 - Add primary_ip4/6 to test100
+ netbox.netbox.netbox_device:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: test100
+ primary_ip4: 172.16.180.1/24
+ primary_ip6: 2001::1:1/64
+ state: present
+ register: test_seven
+
+- name: 7 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_seven is changed
+ - test_seven['diff']['after']['primary_ip4'] == 1
+ - test_seven['diff']['after']['primary_ip6'] == 2
+ - test_seven['device']['name'] == "test100"
+ - test_seven['device']['role'] == 1
+ - test_seven['device']['device_type'] == 1
+ - test_seven['device']['site'] == 1
+ - test_seven['device']['status'] == "active"
+ - test_seven['device']['primary_ip4'] == 1
+ - test_seven['device']['primary_ip6'] == 2
+ - test_seven['msg'] == "device test100 updated"
+
+- name: 8 - Device with empty string name
+ netbox.netbox.netbox_device:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: ""
+ device_type:
+ id: 1
+ device_role: Core Switch
+ site: Test Site
+ status: Staged
+ state: present
+ register: test_eight
+
+- name: 8 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_eight is changed
+ - test_eight['diff']['before']['state'] == 'absent'
+ - test_eight['diff']['after']['state'] == 'present'
+ - test_eight['device']['role'] == 1
+ - test_eight['device']['device_type'] == 1
+ - test_eight['device']['site'] == 1
+ - test_eight['device']['status'] == "staged"
+ - "'-' in test_eight['device']['name']"
+ - test_eight['device']['name'] | length == 36
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_device_bay.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_device_bay.yml
new file mode 100644
index 000000000..b892d2160
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_device_bay.yml
@@ -0,0 +1,87 @@
+---
+##
+##
+### NETBOX_DEVICE_BAY
+##
+##
+- name: "DEVICE_BAY 1: Necessary info creation"
+ netbox.netbox.netbox_device_bay:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ device: Test Nexus One
+ name: Device Bay One
+ state: present
+ register: test_one
+
+- name: "DEVICE_BAY 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['device_bay']['name'] == "Device Bay One"
+ - test_one['device_bay']['device'] == 4
+ - test_one['msg'] == "device_bay Device Bay One created"
+
+- name: "DEVICE_BAY 2: Create duplicate"
+ netbox.netbox.netbox_device_bay:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ device: Test Nexus One
+ name: Device Bay One
+ state: present
+ register: test_two
+
+- name: "DEVICE_BAY 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['device_bay']['name'] == "Device Bay One"
+ - test_two['device_bay']['device'] == 4
+ - test_two['msg'] == "device_bay Device Bay One already exists"
+
+- name: "DEVICE_BAY 3: ASSERT - Update"
+ netbox.netbox.netbox_device_bay:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ device: Test Nexus One
+ name: Device Bay One
+ installed_device: Test Nexus Child One
+ tags:
+ - Schnozzberry
+ state: present
+ register: test_three
+
+- name: "DEVICE_BAY 3: ASSERT - Updated"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['after']['installed_device'] == 5
+ - test_three['diff']['after']['tags'][0] == 4
+ - test_three['device_bay']['name'] == "Device Bay One"
+ - test_three['device_bay']['device'] == 4
+ - test_three['device_bay']['installed_device'] == 5
+ - test_three['device_bay']['tags'][0] == 4
+ - test_three['msg'] == "device_bay Device Bay One updated"
+
+- name: "DEVICE_BAY 4: ASSERT - Delete"
+ netbox.netbox.netbox_device_bay:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Device Bay One
+ state: absent
+ register: test_four
+
+- name: "DEVICE_BAY 4: ASSERT - Delete"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['device_bay']['name'] == "Device Bay One"
+ - test_four['device_bay']['device'] == 4
+ - test_four['device_bay']['installed_device'] == 5
+ - test_four['device_bay']['tags'][0] == 4
+ - test_four['msg'] == "device_bay Device Bay One deleted"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_device_bay_template.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_device_bay_template.yml
new file mode 100644
index 000000000..c2ba1e32c
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_device_bay_template.yml
@@ -0,0 +1,81 @@
+---
+##
+##
+### NETBOX_DEVICE_BAY_TEMPLATE
+##
+##
+- name: "DEVICE_BAY_TEMPLATE 1: Necessary info creation"
+ netbox.netbox.netbox_device_bay_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ device_type: WS Test 3850
+ name: Device Bay Template One
+ state: present
+ register: test_one
+
+- name: "DEVICE_BAY_TEMPLATE 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['device_bay_template']['name'] == "Device Bay Template One"
+ - test_one['device_bay_template']['device_type'] == 7
+ - test_one['msg'] == "device_bay_template Device Bay Template One created"
+
+- name: "DEVICE_BAY_TEMPLATE 2: Create duplicate"
+ netbox.netbox.netbox_device_bay_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ device_type: WS Test 3850
+ name: Device Bay Template One
+ state: present
+ register: test_two
+
+- name: "DEVICE_BAY_TEMPLATE 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['device_bay_template']['name'] == "Device Bay Template One"
+ - test_two['device_bay_template']['device_type'] == 7
+ - test_two['msg'] == "device_bay_template Device Bay Template One already exists"
+
+- name: "DEVICE_BAY_TEMPLATE 3: ASSERT - Create Device Bay Template for Delete Test"
+ netbox.netbox.netbox_device_bay_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ device_type: WS Test 3850
+ name: Device Bay Template Two
+ state: present
+ register: test_three
+
+- name: "DEVICE_BAY_TEMPLATE 3: ASSERT - Updated"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['before']['state'] == "absent"
+ - test_three['diff']['after']['state'] == "present"
+ - test_three['device_bay_template']['name'] == "Device Bay Template Two"
+ - test_three['device_bay_template']['device_type'] == 7
+ - test_three['msg'] == "device_bay_template Device Bay Template Two created"
+
+- name: "DEVICE_BAY_TEMPLATE 4: ASSERT - Delete"
+ netbox.netbox.netbox_device_bay_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Device Bay Template Two
+ device_type: WS Test 3850
+ state: absent
+ register: test_four
+
+- name: "DEVICE_BAY_TEMPLATE 4: ASSERT - Delete"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['device_bay_template']['name'] == "Device Bay Template Two"
+ - test_four['device_bay_template']['device_type'] == 7
+ - test_four['msg'] == "device_bay_template Device Bay Template Two deleted"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_device_interface.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_device_interface.yml
new file mode 100644
index 000000000..dd3c2383b
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_device_interface.yml
@@ -0,0 +1,312 @@
+---
+# NETBOX_DEVICE_INTERFACE
+
+- name: 1 - Interface with required information
+ netbox.netbox.netbox_device_interface:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ device: test100
+ name: GigabitEthernet3
+ type: 1000Base-T (1GE)
+ register: test_one
+
+- name: 1 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['msg'] == "interface GigabitEthernet3 created"
+ - test_one['diff']['before']['state'] == 'absent'
+ - test_one['diff']['after']['state'] == 'present'
+ - test_one['interface']['name'] == "GigabitEthernet3"
+ - test_one['interface']['device'] == 1
+
+- name: 2 - Update test100 - GigabitEthernet3
+ netbox.netbox.netbox_device_interface:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ device: test100
+ name: GigabitEthernet3
+ mtu: 1600
+ enabled: false
+ register: test_two
+
+- name: 2 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_two is changed
+ - test_two['msg'] == "interface GigabitEthernet3 updated"
+ - test_two['diff']['after']['enabled'] == false
+ - test_two['diff']['after']['mtu'] == 1600
+ - test_two['interface']['name'] == "GigabitEthernet3"
+ - test_two['interface']['device'] == 1
+ - test_two['interface']['enabled'] == false
+ - test_two['interface']['mtu'] == 1600
+
+- name: 3 - Delete interface test100 - GigabitEthernet3
+ netbox.netbox.netbox_device_interface:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ device: test100
+ name: GigabitEthernet3
+ state: absent
+ register: test_three
+
+- name: 3 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['msg'] == "interface GigabitEthernet3 deleted"
+ - test_three['diff']['before']['state'] == "present"
+ - test_three['diff']['after']['state'] == "absent"
+
+- name: 4 - Create LAG with several specified options
+ netbox.netbox.netbox_device_interface:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ device: test100
+ name: port-channel1
+ type: Link Aggregation Group (LAG)
+ mtu: 1600
+ mgmt_only: false
+ mode: Access
+ state: present
+ register: test_four
+
+- name: 4 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['msg'] == "interface port-channel1 created"
+ - test_four['diff']['before']['state'] == 'absent'
+ - test_four['diff']['after']['state'] == 'present'
+ - test_four['interface']['name'] == "port-channel1"
+ - test_four['interface']['device'] == 1
+ - test_four['interface']['enabled'] == true
+ - test_four['interface']['type'] == "lag"
+ - test_four['interface']['mgmt_only'] == false
+ - test_four['interface']['mode'] == "access"
+ - test_four['interface']['mtu'] == 1600
+
+- name: 5 - Create interface and assign it to parent LAG
+ netbox.netbox.netbox_device_interface:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ device: test100
+ name: GigabitEthernet3
+ enabled: false
+ type: 1000Base-T (1GE)
+ lag:
+ name: port-channel1
+ mtu: 1600
+ mgmt_only: false
+ mode: Access
+ state: present
+ register: test_five
+
+- name: 5 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_five is changed
+ - test_five['msg'] == "interface GigabitEthernet3 created"
+ - test_five['diff']['before']['state'] == 'absent'
+ - test_five['diff']['after']['state'] == 'present'
+ - test_five['interface']['name'] == "GigabitEthernet3"
+ - test_five['interface']['device'] == 1
+ - test_five['interface']['enabled'] == false
+ - test_five['interface']['type'] == "1000base-t"
+ - test_five['interface']['mgmt_only'] == false
+ - test_five['interface']['lag'] == test_four["interface"]["id"]
+ - test_five['interface']['mode'] == "access"
+ - test_five['interface']['mtu'] == 1600
+
+- name: 6 - Create interface as trunk port
+ netbox.netbox.netbox_device_interface:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ device: test100
+ name: GigabitEthernet21
+ enabled: false
+ type: 1000Base-T (1GE)
+ untagged_vlan:
+ name: Wireless
+ site: Test Site
+ tagged_vlans:
+ - name: Data
+ site: Test Site
+ - name: VoIP
+ site: Test Site
+ mtu: 1600
+ mgmt_only: true
+ mode: Tagged
+ state: present
+ register: test_six
+
+- name: 6 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_six is changed
+ - test_six['msg'] == "interface GigabitEthernet21 created"
+ - test_six['diff']['before']['state'] == 'absent'
+ - test_six['diff']['after']['state'] == 'present'
+ - test_six['interface']['name'] == "GigabitEthernet21"
+ - test_six['interface']['device'] == 1
+ - test_six['interface']['enabled'] == false
+ - test_six['interface']['type'] == "1000base-t"
+ - test_six['interface']['mgmt_only'] == true
+ - test_six['interface']['mode'] == "tagged"
+ - test_six['interface']['mtu'] == 1600
+ - test_six['interface']['tagged_vlans'] == [2, 3]
+ - test_six['interface']['untagged_vlan'] == 1
+
+- name: 7 - Duplicate Interface
+ netbox.netbox.netbox_device_interface:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ device: test100
+ name: GigabitEthernet1
+ register: test_seven
+
+- name: 7 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - not test_seven['changed']
+ - test_seven['msg'] == "interface GigabitEthernet1 already exists"
+ - test_seven['interface']['name'] == "GigabitEthernet1"
+ - test_seven['interface']['device'] == 1
+
+- name: Add port-channel1 to R1 to test finding proper port-channel1
+ netbox.netbox.netbox_device_interface:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ device: R1-Device
+ name: port-channel1
+ type: Link Aggregation Group (LAG)
+
+- name: 8 - Create interface and assign it to parent LAG - non dict
+ netbox.netbox.netbox_device_interface:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ device: test100
+ name: GigabitEthernet4
+ enabled: false
+ type: 1000Base-T (1GE)
+ lag: port-channel1
+ mtu: 1600
+ mgmt_only: false
+ mode: Access
+ state: present
+ register: test_eight
+
+- name: 8 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_eight is changed
+ - test_eight['msg'] == "interface GigabitEthernet4 created"
+ - test_eight['diff']['before']['state'] == 'absent'
+ - test_eight['diff']['after']['state'] == 'present'
+ - test_eight['interface']['name'] == "GigabitEthernet4"
+ - test_eight['interface']['device'] == 1
+ - test_eight['interface']['enabled'] == false
+ - test_eight['interface']['type'] == "1000base-t"
+ - test_eight['interface']['mgmt_only'] == false
+ - test_eight['interface']['lag'] == 10
+ - test_eight['interface']['mode'] == "access"
+ - test_eight['interface']['mtu'] == 1600
+
+- name: 9 - Create interface on VC child
+ netbox.netbox.netbox_device_interface:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ device: Test Nexus Child One
+ name: Ethernet2/2
+ type: 1000Base-T (1GE)
+ state: present
+ register: test_nine
+
+- name: 9 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_nine is changed
+ - test_nine['msg'] == "interface Ethernet2/2 created"
+ - test_nine['diff']['before']['state'] == 'absent'
+ - test_nine['diff']['after']['state'] == 'present'
+ - test_nine['interface']['name'] == "Ethernet2/2"
+ - test_nine['interface']['device'] == 5
+ - test_nine['interface']['enabled'] == true
+ - test_nine['interface']['type'] == "1000base-t"
+
+- name: 10 - Update interface on VC child
+ netbox.netbox.netbox_device_interface:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ device: Test Nexus One
+ name: Ethernet2/2
+ description: Updated child interface from parent device
+ type: 1000Base-T (1GE)
+ update_vc_child: true
+ state: present
+ register: test_ten
+
+- name: 10 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_ten is changed
+ - test_ten['msg'] == "interface Ethernet2/2 updated"
+ - test_ten['diff']['after']['description'] == 'Updated child interface from parent device'
+ - test_ten['interface']['name'] == "Ethernet2/2"
+ - test_ten['interface']['device'] == 5
+ - test_ten['interface']['enabled'] == true
+ - test_ten['interface']['type'] == "1000base-t"
+ - test_ten['interface']['description'] == 'Updated child interface from parent device'
+
+- name: 11 - Update interface on VC child w/o update_vc_child
+ netbox.netbox.netbox_device_interface:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ device: Test Nexus One
+ name: Ethernet2/2
+ description: Updated child interface from parent device - test
+ type: 1000Base-T (1GE)
+ state: present
+ ignore_errors: true
+ register: test_eleven
+
+- name: 11 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_eleven is failed
+ - test_eleven['msg'] == "Must set update_vc_child to True to allow child device interface modification"
+
+- name: 12 - Create interface and mark it as connected
+ netbox.netbox.netbox_device_interface:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ device: test100
+ name: GigabitEthernet5
+ type: 1000Base-T (1GE)
+ mark_connected: true
+ register: test_twelve
+
+- name: 12- ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_twelve is changed
+ - test_twelve['msg'] == "interface GigabitEthernet5 created"
+ - test_twelve['diff']['before']['state'] == 'absent'
+ - test_twelve['diff']['after']['state'] == 'present'
+ - test_twelve['interface']['name'] == "GigabitEthernet5"
+ - test_twelve['interface']['device'] == 1
+ - test_twelve['interface']['mark_connected'] == true
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_device_interface_template.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_device_interface_template.yml
new file mode 100644
index 000000000..abe8f3405
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_device_interface_template.yml
@@ -0,0 +1,109 @@
+---
+##
+##
+### NETBOX_DEVICE_INTERFACE_TEMPLATE
+##
+##
+- name: 1 - Interface with required information
+ netbox.netbox.netbox_device_interface_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ device_type: Arista Test
+ name: 10GBASE-T (10GE)
+ type: 10gbase-t
+ register: test_one
+
+- name: 1 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['msg'] == "interface_template 10GBASE-T (10GE) created"
+ - test_one['diff']['before']['state'] == 'absent'
+ - test_one['diff']['after']['state'] == 'present'
+ - test_one['interface_template']['name'] == "10GBASE-T (10GE)"
+ - test_one['interface_template']['device_type'] == 2
+ - test_one['interface_template']['type'] == '10gbase-t'
+
+- name: 2 - Update 10GBASE-T (10GE)
+ netbox.netbox.netbox_device_interface_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ device_type: Arista Test
+ name: 10GBASE-T (10GE)
+ type: 10gbase-t
+ mgmt_only: true
+ register: test_two
+
+- name: 2 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_two is changed
+ - test_two['msg'] == "interface_template 10GBASE-T (10GE) updated"
+ - test_two['diff']['after']['mgmt_only'] == true
+ - test_two['interface_template']['name'] == "10GBASE-T (10GE)"
+ - test_two['interface_template']['device_type'] == 2
+ - test_two['interface_template']['mgmt_only'] == true
+
+- name: 3 - Delete interface template 10GBASE-T (10GE)
+ netbox.netbox.netbox_device_interface_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ device_type: Arista Test
+ name: 10GBASE-T (10GE)
+ type: 10gbase-t
+ state: absent
+ register: test_three
+
+- name: 3 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['msg'] == "interface_template 10GBASE-T (10GE) deleted"
+ - test_three['diff']['before']['state'] == "present"
+ - test_three['diff']['after']['state'] == "absent"
+
+- name: 4 - Create LAG with several specified options
+ netbox.netbox.netbox_device_interface_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ device_type: Arista Test
+ name: port channel template
+ type: lag
+ mgmt_only: false
+ state: present
+ register: test_four
+
+- name: 4 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['msg'] == "interface_template port channel template created"
+ - test_four['diff']['before']['state'] == 'absent'
+ - test_four['diff']['after']['state'] == 'present'
+ - test_four['interface_template']['name'] == "port channel template"
+ - test_four['interface_template']['device_type'] == 2
+ - test_four['interface_template']['type'] == "lag"
+ - test_four['interface_template']['mgmt_only'] == false
+
+- name: 5 - Duplicate Interface Template port channel template
+ netbox.netbox.netbox_device_interface_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ device_type: Arista Test
+ name: port channel template
+ type: lag
+ register: test_five
+
+- name: 5 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - not test_five['changed']
+ - test_five['msg'] == "interface_template port channel template already exists"
+ - test_five['interface_template']['name'] == "port channel template"
+ - test_five['interface_template']['device_type'] == 2
+ - test_five['interface_template']['type'] == "lag"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_device_role.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_device_role.yml
new file mode 100644
index 000000000..b85ee2c86
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_device_role.yml
@@ -0,0 +1,101 @@
+---
+##
+##
+### NETBOX_DEVICE_ROLE
+##
+##
+- name: "DEVICE_ROLE 1: Necessary info creation"
+ netbox.netbox.netbox_device_role:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Device Role
+ color: FFFFFF
+ state: present
+ register: test_one
+
+- name: "DEVICE_ROLE 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['device_role']['name'] == "Test Device Role"
+ - test_one['device_role']['slug'] == "test-device-role"
+ - test_one['device_role']['color'] == "ffffff"
+ - test_one['msg'] == "device_role Test Device Role created"
+
+- name: "DEVICE_ROLE 2: Create duplicate"
+ netbox.netbox.netbox_device_role:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Device Role
+ color: FFFFFF
+ state: present
+ register: test_two
+
+- name: "DEVICE_ROLE 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['device_role']['name'] == "Test Device Role"
+ - test_two['device_role']['slug'] == "test-device-role"
+ - test_two['device_role']['color'] == "ffffff"
+ - test_two['msg'] == "device_role Test Device Role already exists"
+
+- name: "DEVICE_ROLE 3: ASSERT - Update"
+ netbox.netbox.netbox_device_role:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Device Role
+ color: "003EFF"
+ vm_role: false
+ state: present
+ register: test_three
+
+- name: "DEVICE_ROLE 3: ASSERT - Update"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['after']['color'] == "003eff"
+ - test_three['diff']['after']['vm_role'] == false
+ - test_three['device_role']['name'] == "Test Device Role"
+ - test_three['device_role']['slug'] == "test-device-role"
+ - test_three['device_role']['color'] == "003eff"
+ - test_three['device_role']['vm_role'] == false
+ - test_three['msg'] == "device_role Test Device Role updated"
+
+- name: "DEVICE_ROLE 4: ASSERT - Delete"
+ netbox.netbox.netbox_device_role:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Device Role
+ state: absent
+ register: test_four
+
+- name: "DEVICE_ROLE 4: ASSERT - Delete"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['diff']['before']['state'] == "present"
+ - test_four['diff']['after']['state'] == "absent"
+ - test_four['msg'] == "device_role Test Device Role deleted"
+
+- name: "DEVICE_ROLE 5: ASSERT - Delete non existing"
+ netbox.netbox.netbox_device_role:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Device Role
+ state: absent
+ register: test_five
+
+- name: "DEVICE_ROLE 5: ASSERT - Delete non existing`"
+ ansible.builtin.assert:
+ that:
+ - not test_five['changed']
+ - test_five['device_role'] == None
+ - test_five['msg'] == "device_role Test Device Role already absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_device_type.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_device_type.yml
new file mode 100644
index 000000000..3027f6fd1
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_device_type.yml
@@ -0,0 +1,132 @@
+---
+##
+##
+### NETBOX_DEVICE_TYPE
+##
+##
+- name: "DEVICE_TYPE 1: Necessary info creation"
+ netbox.netbox.netbox_device_type:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ slug: test-device-type
+ model: ws-test-3750
+ manufacturer: Test Manufacturer
+ state: present
+ register: test_one
+
+- name: "DEVICE_TYPE 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['device_type']['slug'] == "test-device-type"
+ - test_one['device_type']['model'] == "ws-test-3750"
+ - test_one['device_type']['manufacturer'] == 3
+ - test_one['msg'] == "device_type test-device-type created"
+
+- name: "DEVICE_TYPE 2: Create duplicate"
+ netbox.netbox.netbox_device_type:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ slug: test-device-type
+ model: ws-test-3750
+ manufacturer: Test Manufacturer
+ state: present
+ register: test_two
+
+- name: "DEVICE_TYPE 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_one['device_type']['slug'] == "test-device-type"
+ - test_one['device_type']['model'] == "ws-test-3750"
+ - test_one['device_type']['manufacturer'] == 3
+ - test_two['msg'] == "device_type test-device-type already exists"
+
+- name: "DEVICE_TYPE 3: ASSERT - Update"
+ netbox.netbox.netbox_device_type:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ slug: test-device-type
+ model: ws-test-3750
+ manufacturer: Test Manufacturer
+ part_number: ws-3750g-v2
+ u_height: 1.5
+ is_full_depth: false
+ subdevice_role: parent
+ state: present
+ register: test_three
+
+- name: "DEVICE_TYPE 3: ASSERT - Update"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['after']['is_full_depth'] == false
+ - test_three['diff']['after']['part_number'] == "ws-3750g-v2"
+ - test_three['diff']['after']['subdevice_role'] == "parent"
+ - test_three['device_type']['slug'] == "test-device-type"
+ - test_three['device_type']['model'] == "ws-test-3750"
+ - test_three['device_type']['manufacturer'] == 3
+ - test_three['device_type']['is_full_depth'] == false
+ - test_three['device_type']['part_number'] == "ws-3750g-v2"
+ - test_three['device_type']['subdevice_role'] == "parent"
+ - test_three['msg'] == "device_type test-device-type updated"
+
+- name: "DEVICE_TYPE 4: ASSERT - Delete"
+ netbox.netbox.netbox_device_type:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ model: test-device-type
+ state: absent
+ register: test_four
+
+- name: "DEVICE_TYPE 4: ASSERT - Delete"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['diff']['before']['state'] == "present"
+ - test_four['diff']['after']['state'] == "absent"
+ - test_four['msg'] == "device_type test-device-type deleted"
+
+- name: "DEVICE_TYPE 5: ASSERT - Delete non existing"
+ netbox.netbox.netbox_device_type:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ model: Test Device Type
+ state: absent
+ register: test_five
+
+- name: "DEVICE_TYPE 5: ASSERT - Delete non existing`"
+ ansible.builtin.assert:
+ that:
+ - not test_five['changed']
+ - test_five['device_type'] == None
+ - test_five['msg'] == "device_type Test Device Type already absent"
+
+- name: "DEVICE_TYPE 6: Without Slug"
+ netbox.netbox.netbox_device_type:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ model: WS Test 3850
+ manufacturer: Test Manufacturer
+ subdevice_role: parent
+ state: present
+ register: test_six
+
+- name: "DEVICE_TYPE 6: ASSERT - Without Slug"
+ ansible.builtin.assert:
+ that:
+ - test_six is changed
+ - test_six['diff']['before']['state'] == "absent"
+ - test_six['diff']['after']['state'] == "present"
+ - test_six['device_type']['slug'] == "ws-test-3850"
+ - test_six['device_type']['model'] == "WS Test 3850"
+ - test_six['device_type']['manufacturer'] == 3
+ - test_six['msg'] == "device_type WS Test 3850 created"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_export_template.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_export_template.yml
new file mode 100644
index 000000000..1e7535c08
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_export_template.yml
@@ -0,0 +1,118 @@
+---
+##
+##
+### NETBOX_EXPORT_TEMPLATE
+##
+##
+- name: "EXPORT_TEMPLATE 1: Necessary info creation"
+ netbox.netbox.netbox_export_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ object_types:
+ - dcim.device
+ name: Example Export Template
+ description: Export Devices
+ template_code: !unsafe >-
+ {% for obj in queryset %}{{ obj.name }}{% endfor %}
+ state: present
+ register: test_one
+
+- name: "EXPORT_TEMPLATE 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['export_template']['name'] == "Example Export Template"
+ - test_one['export_template']['object_types'] == ["dcim.device"]
+ - test_one['export_template']['description'] == "Export Devices"
+ - test_one['msg'] == "export_template Example Export Template created"
+
+- name: "EXPORT_TEMPLATE 2: Create duplicate"
+ netbox.netbox.netbox_export_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ object_types:
+ - dcim.device
+ name: Example Export Template
+ description: Export Devices
+ template_code: !unsafe >-
+ {% for obj in queryset %}{{ obj.name }}{% endfor %}
+ state: present
+ register: test_two
+
+- name: "EXPORT_TEMPLATE 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['export_template']['name'] == "Example Export Template"
+ - test_two['msg'] == "export_template Example Export Template already exists"
+
+- name: "EXPORT_TEMPLATE 3: Update data and remove as_attachment"
+ netbox.netbox.netbox_export_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ object_types:
+ - dcim.device
+ name: Example Export Template
+ description: Export Devices
+ template_code: !unsafe >-
+ {% for obj in queryset %}{{ obj.name }}{% endfor %}
+ as_attachment: false
+ state: present
+ register: test_three
+
+- name: "EXPORT_TEMPLATE 3: ASSERT - Updated"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['after']['as_attachment'] == false
+ - test_three['export_template']['name'] == "Example Export Template"
+ - test_three['msg'] == "export_template Example Export Template updated"
+
+- name: "EXPORT_TEMPLATE 4: Change content type"
+ netbox.netbox.netbox_export_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ object_types:
+ - virtualization.virtualmachine
+ name: Example Export Template
+ description: Export Devices
+ template_code: !unsafe >-
+ {% for obj in queryset %}{{ obj.name }}{% endfor %}
+ state: present
+ register: test_four
+
+- name: "EXPORT_TEMPLATE 4: ASSERT - Change content type"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['diff']['after']['object_types'] == ["virtualization.virtualmachine"]
+ - test_four['export_template']['name'] == "Example Export Template"
+ - test_four['msg'] == "export_template Example Export Template updated"
+
+- name: "EXPORT_TEMPLATE 5: Delete"
+ netbox.netbox.netbox_export_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ object_types:
+ - virtualization.virtualmachine
+ name: Example Export Template
+ description: Export Devices
+ template_code: !unsafe >-
+ {% for obj in queryset %}{{ obj.name }}{% endfor %}
+ state: absent
+ register: test_five
+
+- name: "EXPORT_TEMPLATE 5: ASSERT - Deleted"
+ ansible.builtin.assert:
+ that:
+ - test_five is changed
+ - test_five['diff']['after']['state'] == "absent"
+ - test_five['export_template']['name'] == "Example Export Template"
+ - test_five['msg'] == "export_template Example Export Template deleted"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_fhrp_group.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_fhrp_group.yml
new file mode 100644
index 000000000..eed3f2f88
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_fhrp_group.yml
@@ -0,0 +1,91 @@
+---
+##
+##
+### NETBOX_FHRP_GROUP
+##
+##
+- name: "FHRP group 1: Test FHRP group creation"
+ netbox.netbox.netbox_fhrp_group:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ protocol: glbp
+ group_id: 111
+ state: present
+ register: test_one
+
+- name: "FHRP group: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['fhrp_group']['group_id'] == 111
+ - test_one['fhrp_group']['protocol'] == "glbp"
+ - test_one['msg'] == "fhrp_group 111 created"
+
+- name: "FHRP group 2: Create duplicate"
+ netbox.netbox.netbox_fhrp_group:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ protocol: glbp
+ group_id: 111
+ state: present
+ register: test_two
+
+- name: "FHRP group 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['fhrp_group']['group_id'] == 111
+ - test_two['fhrp_group']['protocol'] == "glbp"
+ - test_two['msg'] == "fhrp_group 111 already exists"
+
+- name: "FHRP group 3: Update FHRP group with other fields"
+ netbox.netbox.netbox_fhrp_group:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ protocol: glbp
+ group_id: 111
+ auth_type: md5
+ auth_key: 11111
+ description: Test description
+ tags:
+ - Schnozzberry
+ state: present
+ register: test_three
+
+- name: "FHRP group 3: ASSERT - Update FHRP group with other fields"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['after']['auth_type'] == "md5"
+ - test_three['diff']['after']['auth_key'] == "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER"
+ - test_three['diff']['after']['description'] == "Test description"
+ - test_three['diff']['after']['tags'][0] == 4
+ - test_three['fhrp_group']['group_id'] == 111
+ - test_three['fhrp_group']['protocol'] == "glbp"
+ - test_three['fhrp_group']['auth_type'] == "md5"
+ - test_three['fhrp_group']['auth_key'] == "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER"
+ - test_three['fhrp_group']['description'] == "Test description"
+ - test_three['fhrp_group']['tags'][0] == 4
+ - test_three['msg'] == "fhrp_group 111 updated"
+
+- name: "FHRP group 4: ASSERT - Delete"
+ netbox.netbox.netbox_fhrp_group:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ group_id: 111
+ state: absent
+ register: test_four
+
+- name: "FHRP group 4: ASSERT - Delete"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['diff']['before']['state'] == "present"
+ - test_four['diff']['after']['state'] == "absent"
+ - test_four['msg'] == "fhrp_group 111 deleted"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_fhrp_group_assignment.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_fhrp_group_assignment.yml
new file mode 100644
index 000000000..2dd2f6d61
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_fhrp_group_assignment.yml
@@ -0,0 +1,92 @@
+---
+##
+##
+### NETBOX_FHRP_GROUP_ASSIGNMENT
+##
+##
+- name: "FHRP group assignment 1: Test FHRP group assignment creation"
+ netbox.netbox.netbox_fhrp_group_assignment:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ fhrp_group: 1
+ interface_type: dcim.interface
+ interface_id: 1
+ priority: 1
+ state: present
+ register: test_one
+
+- name: "FHRP group assignment: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['fhrp_group_assignment']['group'] == 1
+ - test_one['fhrp_group_assignment']['interface_type'] == "dcim.interface"
+ - test_one['fhrp_group_assignment']['interface_id'] == 1
+ - test_one['fhrp_group_assignment']['priority'] == 1
+ - test_one['msg'] == "fhrp_group_assignment fhrp_group 1 > dcim.interface 1 created"
+
+- name: "FHRP group assignment 2: Create duplicate"
+ netbox.netbox.netbox_fhrp_group_assignment:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ fhrp_group: 1
+ interface_type: dcim.interface
+ interface_id: 1
+ priority: 1
+ state: present
+ register: test_two
+
+- name: "FHRP group assignment 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['fhrp_group_assignment']['group'] == 1
+ - test_two['fhrp_group_assignment']['interface_type'] == "dcim.interface"
+ - test_two['fhrp_group_assignment']['interface_id'] == 1
+ - test_two['fhrp_group_assignment']['priority'] == 1
+ - test_two['msg'] == "fhrp_group_assignment fhrp_group 1 > dcim.interface 1 already exists"
+
+- name: "FHRP group assignment 3: Update FHRP group assignment"
+ netbox.netbox.netbox_fhrp_group_assignment:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ fhrp_group: 1
+ interface_type: dcim.interface
+ interface_id: 1
+ priority: 2
+ state: present
+ register: test_three
+
+- name: "FHRP group assignment 3: ASSERT - Update FHRP group assignment"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['fhrp_group_assignment']['group'] == 1
+ - test_three['fhrp_group_assignment']['interface_type'] == "dcim.interface"
+ - test_three['fhrp_group_assignment']['interface_id'] == 1
+ - test_three['fhrp_group_assignment']['priority'] == 2
+ - test_three['msg'] == "fhrp_group_assignment fhrp_group 1 > dcim.interface 1 updated"
+
+- name: "FHRP group assignment 4: Delete FHRP group assignment"
+ netbox.netbox.netbox_fhrp_group_assignment:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ fhrp_group: 1
+ interface_type: dcim.interface
+ interface_id: 1
+ state: absent
+ register: test_four
+
+- name: "FHRP group assignment 3: ASSERT - Delete FHRP group assignment"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['diff']['before']['state'] == "present"
+ - test_four['diff']['after']['state'] == "absent"
+ - test_four['msg'] == "fhrp_group_assignment fhrp_group 1 > dcim.interface 1 deleted"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_front_port.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_front_port.yml
new file mode 100644
index 000000000..7088bbeef
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_front_port.yml
@@ -0,0 +1,150 @@
+---
+# © 2020 Nokia
+# Licensed under the GNU General Public License v3.0 only
+# SPDX-License-Identifier: GPL-3.0-only
+##
+##
+### NETBOX_FRONT_PORT
+##
+##
+- name: "FRONT_PORT 1: Necessary info creation"
+ netbox.netbox.netbox_front_port:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Front Port
+ device: test100
+ type: bnc
+ rear_port: Rear Port
+ state: present
+ register: test_one
+
+- name: "FRONT_PORT 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['front_port']['name'] == "Front Port"
+ - test_one['front_port']['device'] == 1
+ - test_one['front_port']['type'] == "bnc"
+ - test_one['front_port']['rear_port'] == 1
+ - test_one['msg'] == "front_port Front Port created"
+
+- name: "FRONT_PORT 2: Create duplicate"
+ netbox.netbox.netbox_front_port:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Front Port
+ device: test100
+ type: bnc
+ rear_port: Rear Port
+ state: present
+ register: test_two
+
+- name: "FRONT_PORT 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['front_port']['name'] == "Front Port"
+ - test_two['front_port']['device'] == 1
+ - test_two['front_port']['type'] == "bnc"
+ - test_two['front_port']['rear_port'] == 1
+ - test_two['msg'] == "front_port Front Port already exists"
+
+- name: "FRONT_PORT 3: Update Front Port with other fields"
+ netbox.netbox.netbox_front_port:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Front Port
+ device: test100
+ type: bnc
+ rear_port: Rear Port
+ rear_port_position: 5
+ description: test description
+ state: present
+ register: test_three
+
+- name: "FRONT_PORT 3: ASSERT - Update Front Port with other fields"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['after']['rear_port_position'] == 5
+ - test_three['diff']['after']['description'] == "test description"
+ - test_three['front_port']['name'] == "Front Port"
+ - test_three['front_port']['device'] == 1
+ - test_three['front_port']['type'] == "bnc"
+ - test_three['front_port']['rear_port'] == 1
+ - test_three['front_port']['rear_port_position'] == 5
+ - test_three['front_port']['description'] == "test description"
+ - test_three['msg'] == "front_port Front Port updated"
+
+- name: "FRONT_PORT 4: Create Front Port for Delete Test"
+ netbox.netbox.netbox_front_port:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Front Port 2
+ device: test100
+ type: bnc
+ rear_port: Rear Port
+ state: present
+ register: test_four
+
+- name: "FRONT_PORT 4: ASSERT - Create Front Port for Delete Test"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['diff']['before']['state'] == "absent"
+ - test_four['diff']['after']['state'] == "present"
+ - test_four['front_port']['name'] == "Front Port 2"
+ - test_four['front_port']['device'] == 1
+ - test_four['front_port']['type'] == "bnc"
+ - test_four['front_port']['rear_port'] == 1
+ - test_four['msg'] == "front_port Front Port 2 created"
+
+- name: "FRONT_PORT 5: Delete Front Port"
+ netbox.netbox.netbox_front_port:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Front Port 2
+ device: test100
+ type: bnc
+ rear_port: Rear Port
+ state: absent
+ register: test_five
+
+- name: "FRONT_PORT 5: ASSERT - Delete Front Port"
+ ansible.builtin.assert:
+ that:
+ - test_five is changed
+ - test_five['diff']['before']['state'] == "present"
+ - test_five['diff']['after']['state'] == "absent"
+ - test_five['msg'] == "front_port Front Port 2 deleted"
+
+- name: "FRONT_PORT 6: Create duplicate with rear_port dictionary"
+ netbox.netbox.netbox_front_port:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Front Port
+ device: test100
+ type: bnc
+ rear_port:
+ device: test100
+ name: Rear Port
+ state: present
+ register: test_six
+
+- name: "FRONT_PORT 6: ASSERT - Create duplicate with rear_port dictionary"
+ ansible.builtin.assert:
+ that:
+ - not test_six['changed']
+ - test_six['front_port']['name'] == "Front Port"
+ - test_six['front_port']['device'] == 1
+ - test_six['front_port']['type'] == "bnc"
+ - test_six['front_port']['rear_port'] == 1
+ - test_six['msg'] == "front_port Front Port already exists"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_front_port_template.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_front_port_template.yml
new file mode 100644
index 000000000..966279fa8
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_front_port_template.yml
@@ -0,0 +1,147 @@
+---
+# © 2020 Nokia
+# Licensed under the GNU General Public License v3.0 only
+# SPDX-License-Identifier: GPL-3.0-only
+##
+##
+### NETBOX_FRONT_PORT_TEMPLATE
+##
+##
+- name: "FRONT_PORT_TEMPLATE 1: Necessary info creation"
+ netbox.netbox.netbox_front_port_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Front Port Template
+ device_type: Cisco Test
+ type: bnc
+ rear_port_template: Rear Port Template
+ state: present
+ register: test_one
+
+- name: "FRONT_PORT_TEMPLATE 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['front_port_template']['name'] == "Front Port Template"
+ - test_one['front_port_template']['device_type'] == 1
+ - test_one['front_port_template']['type'] == "bnc"
+ - test_one['front_port_template']['rear_port'] == 1
+ - test_one['msg'] == "front_port_template Front Port Template created"
+
+- name: "FRONT_PORT_TEMPLATE 2: Create duplicate"
+ netbox.netbox.netbox_front_port_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Front Port Template
+ device_type: Cisco Test
+ type: bnc
+ rear_port_template: Rear Port Template
+ state: present
+ register: test_two
+
+- name: "FRONT_PORT_TEMPLATE 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['front_port_template']['name'] == "Front Port Template"
+ - test_two['front_port_template']['device_type'] == 1
+ - test_two['front_port_template']['type'] == "bnc"
+ - test_two['front_port_template']['rear_port'] == 1
+ - test_two['msg'] == "front_port_template Front Port Template already exists"
+
+- name: "FRONT_PORT_TEMPLATE 3: Update Front Port Template with other fields"
+ netbox.netbox.netbox_front_port_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Front Port Template
+ device_type: Cisco Test
+ type: bnc
+ rear_port_template: Rear Port Template
+ rear_port_template_position: 5
+ state: present
+ register: test_three
+
+- name: "FRONT_PORT_TEMPLATE 3: ASSERT - Update Front Port Template with other fields"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['after']['rear_port_position'] == 5
+ - test_three['front_port_template']['name'] == "Front Port Template"
+ - test_three['front_port_template']['device_type'] == 1
+ - test_three['front_port_template']['type'] == "bnc"
+ - test_three['front_port_template']['rear_port_position'] == 5
+ - test_three['front_port_template']['rear_port'] == 1
+ - test_three['msg'] == "front_port_template Front Port Template updated"
+
+- name: "FRONT_PORT_TEMPLATE 4: Create Front Port Template for Delete Test"
+ netbox.netbox.netbox_front_port_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Front Port Template 2
+ device_type: Cisco Test
+ type: bnc
+ rear_port_template: Rear Port Template
+ state: present
+ register: test_four
+
+- name: "FRONT_PORT_TEMPLATE 4: ASSERT - Create Front Port Template for Delete Test"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['diff']['before']['state'] == "absent"
+ - test_four['diff']['after']['state'] == "present"
+ - test_four['front_port_template']['name'] == "Front Port Template 2"
+ - test_four['front_port_template']['device_type'] == 1
+ - test_four['front_port_template']['type'] == "bnc"
+ - test_four['front_port_template']['rear_port'] == 1
+ - test_four['msg'] == "front_port_template Front Port Template 2 created"
+
+- name: "FRONT_PORT_TEMPLATE 5: Delete Front Port Template"
+ netbox.netbox.netbox_front_port_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Front Port Template 2
+ device_type: Cisco Test
+ type: bnc
+ rear_port_template: Rear Port Template
+ state: absent
+ register: test_five
+
+- name: "FRONT_PORT_TEMPLATE 5: ASSERT - Delete Front Port Template"
+ ansible.builtin.assert:
+ that:
+ - test_five is changed
+ - test_five['diff']['before']['state'] == "present"
+ - test_five['diff']['after']['state'] == "absent"
+ - test_five['msg'] == "front_port_template Front Port Template 2 deleted"
+
+- name: "FRONT_PORT 6: Create duplicate with rear_port_template dictionary"
+ netbox.netbox.netbox_front_port_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Front Port Template
+ device_type: Cisco Test
+ type: bnc
+ rear_port_template:
+ device: Cisco Test
+ name: Rear Port Template
+ state: present
+ register: test_six
+
+- name: "FRONT_PORT 6: ASSERT - Create duplicate with rear_port_template dictionary"
+ ansible.builtin.assert:
+ that:
+ - not test_six['changed']
+ - test_six['front_port_template']['name'] == "Front Port Template"
+ - test_six['front_port_template']['device_type'] == 1
+ - test_six['front_port_template']['type'] == "bnc"
+ - test_six['front_port_template']['rear_port'] == 1
+ - test_six['msg'] == "front_port_template Front Port Template already exists"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_inventory_item.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_inventory_item.yml
new file mode 100644
index 000000000..5b330c3a5
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_inventory_item.yml
@@ -0,0 +1,203 @@
+---
+##
+##
+### NETBOX_INVENTORY_ITEM
+##
+##
+- name: "INVENTORY_ITEM 1: Necessary info creation"
+ netbox.netbox.netbox_inventory_item:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ device: test100
+ name: 10G-SFP+
+ state: present
+ register: test_one
+
+- name: "INVENTORY_ITEM 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one.diff.before.state == "absent"
+ - test_one.diff.after.state == "present"
+ - test_one.inventory_item.name == "10G-SFP+"
+ - test_one.inventory_item.device == 1
+ - test_one.msg == "inventory_item 10G-SFP+ created"
+
+- name: "INVENTORY_ITEM 2: Create duplicate"
+ netbox.netbox.netbox_inventory_item:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ device: test100
+ name: 10G-SFP+
+ state: present
+ register: test_two
+
+- name: "INVENTORY_ITEM 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two.changed
+ - test_two.inventory_item.name == "10G-SFP+"
+ - test_two.inventory_item.device == 1
+ - test_two.msg == "inventory_item 10G-SFP+ already exists"
+
+- name: "INVENTORY_ITEM 3: Update properties"
+ netbox.netbox.netbox_inventory_item:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ device: test100
+ name: 10G-SFP+
+ manufacturer: Cisco
+ part_id: 10G-SFP+
+ serial: "1234"
+ asset_tag: "1234"
+ description: New SFP
+ discovered: true
+ tags:
+ - Schnozzberry
+ state: present
+ register: test_three
+
+- name: "INVENTORY_ITEM 3: ASSERT - Updated"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three.diff.after.asset_tag == "1234"
+ - test_three.diff.after.serial == "1234"
+ - test_three.diff.after.description == "New SFP"
+ - test_three.diff.after.manufacturer == 1
+ - test_three.diff.after.part_id == "10G-SFP+"
+ - test_three.diff.after.tags[0] == 4
+ - test_three.diff.after.discovered == True
+ - test_three.inventory_item.name == "10G-SFP+"
+ - test_three.inventory_item.device == 1
+ - test_three.inventory_item.asset_tag == "1234"
+ - test_three.inventory_item.serial == "1234"
+ - test_three.inventory_item.description == "New SFP"
+ - test_three.inventory_item.manufacturer == 1
+ - test_three.inventory_item.part_id == "10G-SFP+"
+ - test_three.inventory_item.tags[0] == 4
+ - test_three.inventory_item.discovered == True
+ - test_three.msg == "inventory_item 10G-SFP+ updated"
+
+- name: "INVENTORY_ITEM 4: Delete inventory item"
+ netbox.netbox.netbox_inventory_item:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ device: test100
+ name: 10G-SFP+
+ state: absent
+ register: test_four
+
+- name: "INVENTORY_ITEM 4: ASSERT - Delete"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four.inventory_item.name == "10G-SFP+"
+ - test_four.inventory_item.device == 1
+ - test_four.inventory_item.asset_tag == "1234"
+ - test_four.inventory_item.serial == "1234"
+ - test_four.inventory_item.description == "New SFP"
+ - test_four.inventory_item.manufacturer == 1
+ - test_four.inventory_item.part_id == "10G-SFP+"
+ - test_four.inventory_item.tags[0] == 4
+ - test_four.msg == "inventory_item 10G-SFP+ deleted"
+
+- name: "INVENTORY_ITEM 5: PREWORK - Create inventory item role"
+ netbox.netbox.netbox_inventory_item_role:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Processor
+ color: FFFFFF
+ state: present
+ register: test_five_prework
+
+- name: "INVENTORY_ITEM 5: Create inventory item with role"
+ netbox.netbox.netbox_inventory_item:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ device: test100
+ name: test_processor
+ inventory_item_role: Processor
+ state: present
+ register: test_five
+
+- name: "INVENTORY_ITEM 5: ASSERT - Inventory item creation with role"
+ ansible.builtin.assert:
+ that:
+ - test_five is changed
+ - test_five.diff.before.state == "absent"
+ - test_five.diff.after.state == "present"
+ - test_five.inventory_item.name == "test_processor"
+ - test_five.inventory_item.role == test_five_prework.inventory_item_role.id
+ - test_five.inventory_item.device == 1
+ - test_five.msg == "inventory_item test_processor created"
+
+- name: "INVENTORY_ITEM 6: Create inventory item with missing role"
+ netbox.netbox.netbox_inventory_item:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ device: test100
+ name: test_processor
+ inventory_item_role: Foo
+ state: present
+ ignore_errors: true
+ register: test_six
+
+- name: "INVENTORY_ITEM 6: ASSERT - Inventory item creation with missing role"
+ ansible.builtin.assert:
+ that:
+ - test_six.failed
+ - test_six.msg == "Could not resolve id of inventory_item_role: Foo"
+
+- name: "INVENTORY_ITEM 7: Create inventory item with component"
+ netbox.netbox.netbox_inventory_item:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ device: test100
+ name: test_component
+ component_type: dcim.interface
+ component:
+ name: GigabitEthernet2
+ device: test100
+ state: present
+ register: test_seven
+
+- name: "INVENTORY_ITEM 7: ASSERT - Inventory item creation with component"
+ ansible.builtin.assert:
+ that:
+ - test_seven is changed
+ - test_seven.diff.before.state == "absent"
+ - test_seven.diff.after.state == "present"
+ - test_seven.inventory_item.name == "test_component"
+ - test_seven.inventory_item.component_type == "dcim.interface"
+ - test_seven.inventory_item.component_id == 4
+ - test_seven.inventory_item.device == 1
+ - test_seven.msg == "inventory_item test_component created"
+
+- name: "INVENTORY_ITEM 8: Create inventory item with missing component_type"
+ netbox.netbox.netbox_inventory_item:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ device: test100
+ name: test_component
+ component:
+ name: GigabitEthernet2
+ device: test100
+ state: present
+ ignore_errors: true
+ register: test_eight
+
+- name: "INVENTORY_ITEM 8: ASSERT - Inventory item creation with missing component_type"
+ ansible.builtin.assert:
+ that:
+ - test_eight.failed
+ - test_eight.msg == "parameters are required together: component_type, component"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_inventory_item_role.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_inventory_item_role.yml
new file mode 100644
index 000000000..71d86d747
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_inventory_item_role.yml
@@ -0,0 +1,98 @@
+---
+##
+##
+### NETBOX_INVENTORY_ITEM_ROLE
+##
+##
+- name: "INVENTORY_ITEM_ROLE 1: Necessary info creation"
+ netbox.netbox.netbox_inventory_item_role:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Inventory Item Role
+ color: FFFFFF
+ state: present
+ register: test_one
+
+- name: "INVENTORY_ITEM_ROLE 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['inventory_item_role']['name'] == "Test Inventory Item Role"
+ - test_one['inventory_item_role']['slug'] == "test-inventory-item-role"
+ - test_one['inventory_item_role']['color'] == "ffffff"
+ - test_one['msg'] == "inventory_item_role Test Inventory Item Role created"
+
+- name: "INVENTORY_ITEM_ROLE 2: Create duplicate"
+ netbox.netbox.netbox_inventory_item_role:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Inventory Item Role
+ color: FFFFFF
+ state: present
+ register: test_two
+
+- name: "INVENTORY_ITEM_ROLE 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['inventory_item_role']['name'] == "Test Inventory Item Role"
+ - test_two['inventory_item_role']['slug'] == "test-inventory-item-role"
+ - test_two['inventory_item_role']['color'] == "ffffff"
+ - test_two['msg'] == "inventory_item_role Test Inventory Item Role already exists"
+
+- name: "INVENTORY_ITEM_ROLE 3: ASSERT - Update"
+ netbox.netbox.netbox_inventory_item_role:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Inventory Item Role
+ color: "003EFF"
+ state: present
+ register: test_three
+
+- name: "INVENTORY_ITEM_ROLE 3: ASSERT - Update"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['after']['color'] == "003eff"
+ - test_three['inventory_item_role']['name'] == "Test Inventory Item Role"
+ - test_three['inventory_item_role']['slug'] == "test-inventory-item-role"
+ - test_three['inventory_item_role']['color'] == "003eff"
+ - test_three['msg'] == "inventory_item_role Test Inventory Item Role updated"
+
+- name: "INVENTORY_ITEM_ROLE 4: ASSERT - Delete"
+ netbox.netbox.netbox_inventory_item_role:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Inventory Item Role
+ state: absent
+ register: test_four
+
+- name: "INVENTORY_ITEM_ROLE 4: ASSERT - Delete"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['diff']['before']['state'] == "present"
+ - test_four['diff']['after']['state'] == "absent"
+ - test_four['msg'] == "inventory_item_role Test Inventory Item Role deleted"
+
+- name: "INVENTORY_ITEM_ROLE 5: ASSERT - Delete non existing"
+ netbox.netbox.netbox_inventory_item_role:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Inventory Item Role
+ state: absent
+ register: test_five
+
+- name: "INVENTORY_ITEM_ROLE 5: ASSERT - Delete non existing`"
+ ansible.builtin.assert:
+ that:
+ - not test_five['changed']
+ - test_five['inventory_item_role'] == None
+ - test_five['msg'] == "inventory_item_role Test Inventory Item Role already absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_ip_address.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_ip_address.yml
new file mode 100644
index 000000000..8813f8dcb
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_ip_address.yml
@@ -0,0 +1,352 @@
+---
+##
+##
+### NETBOX_IP_ADDRESS
+##
+##
+- name: "1 - Create IP address within NetBox with only required information - State: Present"
+ netbox.netbox.netbox_ip_address:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ address: 192.168.1.10/30
+ state: present
+ register: test_one
+
+- name: 1 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['msg'] == "ip_address 192.168.1.10/30 created"
+ - test_one['ip_address']['address'] == "192.168.1.10/30"
+
+- name: 2 - Update 192.168.1.10/30
+ netbox.netbox.netbox_ip_address:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ address: 192.168.1.10/30
+ description: Updated ip address
+ tags:
+ - Updated
+ state: present
+ register: test_two
+
+- name: 2 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_two is changed
+ - test_two['diff']['after']['description'] == "Updated ip address"
+ - test_two['diff']['after']['tags'][0] == 10
+ - test_two['msg'] == "ip_address 192.168.1.10/30 updated"
+ - test_two['ip_address']['address'] == "192.168.1.10/30"
+ - test_two['ip_address']['tags'][0] == 10
+ - test_two['ip_address']['description'] == "Updated ip address"
+
+- name: "3 - Delete IP - 192.168.1.10 - State: Absent"
+ netbox.netbox.netbox_ip_address:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ address: 192.168.1.10/30
+ state: absent
+ register: test_three
+
+- name: 3 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['before']['state'] == "present"
+ - test_three['diff']['after']['state'] == "absent"
+ - test_three['msg'] == "ip_address 192.168.1.10/30 deleted"
+
+- name: "4 - Create IP in global VRF - 192.168.1.20/30 - State: Present"
+ netbox.netbox.netbox_ip_address:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ address: 192.168.1.20/30
+ state: present
+ register: test_four
+
+- name: 4 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['diff']['before']['state'] == "absent"
+ - test_four['diff']['after']['state'] == "present"
+ - test_four['msg'] == "ip_address 192.168.1.20/30 created"
+ - test_four['ip_address']['address'] == "192.168.1.20/30"
+
+# Enforce uniqueness in NetBox 3.7
+# - name: "5 - Create IP in global VRF - 192.168.1.20/30 - State: New"
+# netbox.netbox.netbox_ip_address:
+# netbox_url: http://localhost:32768
+# netbox_token: 0123456789abcdef0123456789abcdef01234567
+# data:
+# address: 192.168.1.20/30
+# state: new
+# register: test_five
+
+# - name: "5 - ASSERT"
+# assert:
+# that:
+# - test_five is changed
+# - test_five['diff']['before']['state'] == "absent"
+# - test_five['diff']['after']['state'] == "present"
+# - test_five['msg'] == "ip_address 192.168.1.20/30 created"
+# - test_five['ip_address']['address'] == "192.168.1.20/30"
+
+- name: "6 - Create new address with only prefix specified - State: new"
+ netbox.netbox.netbox_ip_address:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ prefix: 192.168.100.0/24
+ state: new
+ register: test_six
+
+- name: 6 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_six is changed
+ - test_six['diff']['before']['state'] == "absent"
+ - test_six['diff']['after']['state'] == "present"
+ - test_six['msg'] == "ip_address 192.168.100.1/24 created"
+ - test_six['ip_address']['address'] == "192.168.100.1/24"
+
+- name: 7 - Create IP address with several specified
+ netbox.netbox.netbox_ip_address:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ family: 4
+ address: 172.16.1.20/24
+ vrf: Test VRF
+ tenant: Test Tenant
+ status: Reserved
+ role: Loopback
+ description: Test description
+ tags:
+ - Schnozzberry
+ state: present
+ register: test_seven
+
+- name: 7 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_seven is changed
+ - test_seven['diff']['before']['state'] == "absent"
+ - test_seven['diff']['after']['state'] == "present"
+ - test_seven['msg'] == "ip_address 172.16.1.20/24 created"
+ - test_seven['ip_address']['address'] == "172.16.1.20/24"
+ - test_seven['ip_address']['description'] == "Test description"
+ - test_seven['ip_address']['family'] == 4
+ - test_seven['ip_address']['role'] == "loopback"
+ - test_seven['ip_address']['status'] == "reserved"
+ - test_seven['ip_address']['tags'][0] == 4
+ - test_seven['ip_address']['tenant'] == 1
+ - test_seven['ip_address']['vrf'] == 1
+
+- name: 8 - Create IP address and assign a nat_inside IP
+ netbox.netbox.netbox_ip_address:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ family: 4
+ address: 10.10.1.30/16
+ vrf: Test VRF
+ nat_inside:
+ address: 172.16.1.20
+ vrf: Test VRF
+ register: test_eight
+
+- name: 8 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_eight is changed
+ - test_eight['diff']['before']['state'] == "absent"
+ - test_eight['diff']['after']['state'] == "present"
+ - test_eight['msg'] == "ip_address 10.10.1.30/16 created"
+ - test_eight['ip_address']['address'] == "10.10.1.30/16"
+ - test_eight['ip_address']['family'] == 4
+ - test_eight['ip_address'].get('nat_inside')
+ - test_eight['ip_address']['vrf'] == 1
+
+- name: "9 - Create IP address on GigabitEthernet2 - test100 - State: present"
+ netbox.netbox.netbox_ip_address:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ family: 4
+ address: 10.10.200.30/16
+ assigned_object:
+ name: GigabitEthernet2
+ device: test100
+ register: test_nine
+
+- name: 9 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_nine is changed
+ - test_nine['diff']['before']['state'] == "absent"
+ - test_nine['diff']['after']['state'] == "present"
+ - test_nine['msg'] == "ip_address 10.10.200.30/16 created"
+ - test_nine['ip_address']['address'] == "10.10.200.30/16"
+ - test_nine['ip_address']['family'] == 4
+ - test_nine['ip_address']['assigned_object_type'] == "dcim.interface"
+ - test_nine['ip_address']['assigned_object_id'] == 4
+
+- name: "10 - Create IP address on GigabitEthernet2 - test100 - State: new"
+ netbox.netbox.netbox_ip_address:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ family: 4
+ prefix: 10.10.0.0/16
+ assigned_object:
+ name: GigabitEthernet2
+ device: test100
+ state: new
+ register: test_ten
+
+- name: 10 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_ten is changed
+ - test_ten['diff']['before']['state'] == "absent"
+ - test_ten['diff']['after']['state'] == "present"
+ - test_ten['msg'] == "ip_address 10.10.0.1/16 created"
+ - test_ten['ip_address']['address'] == "10.10.0.1/16"
+ - test_ten['ip_address']['family'] == 4
+ - test_ten['ip_address']['assigned_object_type'] == "dcim.interface"
+ - test_ten['ip_address']['assigned_object_id'] == 4
+
+- name: "11 - Create IP address on GigabitEthernet2 - test100 - State: present"
+ netbox.netbox.netbox_ip_address:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ family: 4
+ prefix: 192.168.100.0/24
+ assigned_object:
+ name: GigabitEthernet2
+ device: test100
+ state: present
+ register: test_eleven
+
+- name: 11 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_eleven is changed
+ - test_eleven['diff']['before']['state'] == "absent"
+ - test_eleven['diff']['after']['state'] == "present"
+ - test_eleven['msg'] == "ip_address 192.168.100.2/24 created"
+ - test_eleven['ip_address']['address'] == "192.168.100.2/24"
+
+- name: 12 - Duplicate - 192.168.100.2/24 on interface
+ netbox.netbox.netbox_ip_address:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ address: 192.168.100.2/24
+ assigned_object:
+ name: GigabitEthernet2
+ device: test100
+ state: present
+ register: test_twelve
+
+- name: 12 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - not test_twelve['changed']
+ - test_twelve['msg'] == "ip_address 192.168.100.2/24 already exists"
+ - test_twelve['ip_address']['address'] == "192.168.100.2/24"
+ - test_twelve['ip_address']['assigned_object_type'] == "dcim.interface"
+ - test_twelve['ip_address']['assigned_object_id'] == 4
+
+- name: 13 - Duplicate - 192.168.100.2/24
+ netbox.netbox.netbox_ip_address:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ address: 192.168.100.2/24
+ state: present
+ register: test_thirteen
+
+- name: 13 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - not test_thirteen['changed']
+ - test_thirteen['msg'] == "ip_address 192.168.100.2/24 already exists"
+ - test_thirteen['ip_address']['address'] == "192.168.100.2/24"
+
+- name: "14 - Create IP address on Eth0 - test100-vm - State: present"
+ netbox.netbox.netbox_ip_address:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ family: 4
+ address: 10.188.1.100/24
+ assigned_object:
+ name: Eth0
+ virtual_machine: test100-vm
+ register: test_fourteen
+
+- name: 14 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_fourteen is changed
+ - test_fourteen['diff']['before']['state'] == "absent"
+ - test_fourteen['diff']['after']['state'] == "present"
+ - test_fourteen['msg'] == "ip_address 10.188.1.100/24 created"
+ - test_fourteen['ip_address']['address'] == "10.188.1.100/24"
+ - test_fourteen['ip_address']['family'] == 4
+ - test_fourteen['ip_address']['assigned_object_type'] == "virtualization.vminterface"
+ - test_fourteen['ip_address']['assigned_object_id'] == 1
+
+# Enforce uniqueness in NetBox 3.7
+# - name: "15 - Create same IP address on Eth0 - test101-vm - State: present"
+# netbox.netbox.netbox_ip_address:
+# netbox_url: http://localhost:32768
+# netbox_token: 0123456789abcdef0123456789abcdef01234567
+# data:
+# family: 4
+# address: 10.188.1.100/24
+# assigned_object:
+# name: Eth0
+# virtual_machine: test101-vm
+# state: "present"
+# register: test_fifteen
+
+# - name: "15 - ASSERT"
+# assert:
+# that:
+# - test_fifteen is changed
+# - test_fifteen['diff']['before']['state'] == "absent"
+# - test_fifteen['diff']['after']['state'] == "present"
+# - test_fifteen['msg'] == "ip_address 10.188.1.100/24 created"
+# - test_fifteen['ip_address']['address'] == "10.188.1.100/24"
+# - test_fifteen['ip_address']['family'] == 4
+# - test_fifteen['ip_address']['assigned_object_type'] == "virtualization.vminterface"
+# - test_fifteen['ip_address']['assigned_object_id'] == 6
+
+- name: "16 - Create IP address with no mask - State: Present"
+ netbox.netbox.netbox_ip_address:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ address: 10.120.10.1
+ state: present
+ register: test_sixteen
+
+- name: 16 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_sixteen is changed
+ - test_sixteen['diff']['before']['state'] == "absent"
+ - test_sixteen['diff']['after']['state'] == "present"
+ - test_sixteen['msg'] == "ip_address 10.120.10.1/32 created"
+ - test_sixteen['ip_address']['address'] == "10.120.10.1/32"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_ipam_role.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_ipam_role.yml
new file mode 100644
index 000000000..9f376c2ed
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_ipam_role.yml
@@ -0,0 +1,94 @@
+---
+##
+##
+### NETBOX_IPAM_ROLE
+##
+##
+- name: "IPAM_ROLE 1: Necessary info creation"
+ netbox.netbox.netbox_ipam_role:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test IPAM Role
+ state: present
+ register: test_one
+
+- name: "IPAM_ROLE 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['role']['name'] == "Test IPAM Role"
+ - test_one['role']['slug'] == "test-ipam-role"
+ - test_one['msg'] == "role Test IPAM Role created"
+
+- name: "IPAM_ROLE 2: Create duplicate"
+ netbox.netbox.netbox_ipam_role:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test IPAM Role
+ state: present
+ register: test_two
+
+- name: "IPAM_ROLE 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['role']['name'] == "Test IPAM Role"
+ - test_two['role']['slug'] == "test-ipam-role"
+ - test_two['msg'] == "role Test IPAM Role already exists"
+
+- name: "IPAM_ROLE 3: ASSERT - Update"
+ netbox.netbox.netbox_ipam_role:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test IPAM Role
+ weight: 4096
+ state: present
+ register: test_three
+
+- name: "IPAM_ROLE 3: ASSERT - Update"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['after']['weight'] == 4096
+ - test_three['role']['name'] == "Test IPAM Role"
+ - test_three['role']['slug'] == "test-ipam-role"
+ - test_three['role']['weight'] == 4096
+ - test_three['msg'] == "role Test IPAM Role updated"
+
+- name: "IPAM_ROLE 4: ASSERT - Delete"
+ netbox.netbox.netbox_ipam_role:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test IPAM Role
+ state: absent
+ register: test_four
+
+- name: "IPAM_ROLE 4: ASSERT - Delete"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['diff']['before']['state'] == "present"
+ - test_four['diff']['after']['state'] == "absent"
+ - test_four['msg'] == "role Test IPAM Role deleted"
+
+- name: "IPAM_ROLE 5: ASSERT - Delete non existing"
+ netbox.netbox.netbox_ipam_role:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test IPAM Role
+ state: absent
+ register: test_five
+
+- name: "IPAM_ROLE 5: ASSERT - Delete non existing`"
+ ansible.builtin.assert:
+ that:
+ - not test_five['changed']
+ - test_five['role'] == None
+ - test_five['msg'] == "role Test IPAM Role already absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_journal_entry.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_journal_entry.yml
new file mode 100644
index 000000000..7eead8db1
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_journal_entry.yml
@@ -0,0 +1,26 @@
+---
+##
+##
+### NETBOX_JOURNAL_ENTRY
+##
+##
+- name: "JOURNAL ENTRY 1: Creation"
+ netbox.netbox.netbox_journal_entry:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ assigned_object_type: dcim.device
+ assigned_object_id: 1
+ comments: |
+ Comment on device
+ state: new
+ register: test_one
+
+- name: "JOURNAL_ENTRY 1: ASSERT - Creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['journal_entry']['kind'] == "info"
+ - test_one['msg'] == "journal_entry created"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_l2vpn.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_l2vpn.yml
new file mode 100644
index 000000000..655be9455
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_l2vpn.yml
@@ -0,0 +1,99 @@
+---
+##
+##
+### NETBOX_L2VPN
+##
+##
+- name: "L2VPN 1: Necessary info creation"
+ netbox.netbox.netbox_l2vpn:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test L2VPN
+ type: vxlan
+ state: present
+ register: test_one
+
+- name: "L2VPN 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['l2vpn']['name'] == "Test L2VPN"
+ - test_one['l2vpn']['type'] == "vxlan"
+ - test_one['msg'] == "l2vpn Test L2VPN created"
+
+- name: "L2VPN 2: Create duplicate"
+ netbox.netbox.netbox_l2vpn:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test L2VPN
+ type: vxlan
+ state: present
+ register: test_two
+
+- name: "L2VPN 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['l2vpn']['name'] == "Test L2VPN"
+ - test_two['l2vpn']['type'] == "vxlan"
+ - test_two['msg'] == "l2vpn Test L2VPN already exists"
+
+- name: "L2VPN 4: ASSERT - Update"
+ netbox.netbox.netbox_l2vpn:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test L2VPN
+ type: vxlan
+ tenant: Test Tenant
+ description: Updated description
+ import_targets:
+ - 4000:4000
+ - 5000:5000
+ export_targets:
+ - 6000:6000
+ tags:
+ - Schnozzberry
+ state: present
+ register: test_four
+
+- name: "L2VPN: ASSERT - Updated"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['diff']['after']['description'] == "Updated description"
+ - test_four['diff']['after']['import_targets'] == [1, 2]
+ - test_four['diff']['after']['export_targets'] == [3]
+ - test_four['diff']['after']['tags'][0] == 4
+ - test_four['l2vpn']['name'] == "Test L2VPN"
+ - test_four['l2vpn']['tenant'] == 1
+ - test_four['l2vpn']['import_targets'] == [1, 2]
+ - test_four['l2vpn']['export_targets'] == [3]
+ - test_four['l2vpn']['description'] == "Updated description"
+ - test_four['l2vpn']['tags'][0] == 4
+ - test_four['msg'] == "l2vpn Test L2VPN updated"
+
+- name: "L2VPN: ASSERT - Delete"
+ netbox.netbox.netbox_l2vpn:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test L2VPN
+ type: vxlan
+ state: absent
+ register: test_six
+
+- name: "L2VPN 6: ASSERT - Delete"
+ ansible.builtin.assert:
+ that:
+ - test_six is changed
+ - test_six['l2vpn']['name'] == "Test L2VPN"
+ - test_six['l2vpn']['tenant'] == 1
+ - test_six['l2vpn']['type'] == "vxlan"
+ - test_six['l2vpn']['description'] == "Updated description"
+ - test_six['l2vpn']['tags'][0] == 4
+ - test_six['msg'] == "l2vpn Test L2VPN deleted"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_l2vpn_termination.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_l2vpn_termination.yml
new file mode 100644
index 000000000..0dd539943
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_l2vpn_termination.yml
@@ -0,0 +1,94 @@
+---
+##
+##
+### NETBOX_L2VPN_TERMINATION
+##
+##
+- name: "L2VPN_TERMINATION 1: Necessary info creation"
+ netbox.netbox.netbox_l2vpn_termination:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ l2vpn: 1
+ assigned_object_type: dcim.interface
+ assigned_object_id: 1
+ state: present
+ register: test_one
+
+- name: "L2VPN_TERMINATION 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['l2vpn_termination']['l2vpn'] == 1
+ - test_one['l2vpn_termination']['assigned_object_type'] == "dcim.interface"
+ - test_one['l2vpn_termination']['assigned_object_id'] == 1
+ - test_one['msg'] == "l2vpn_termination l2vpn 1 <> dcim.interface 1 created"
+
+- name: "L2VPN_TERMINATION 2: Create duplicate"
+ netbox.netbox.netbox_l2vpn_termination:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ l2vpn: 1
+ assigned_object_type: dcim.interface
+ assigned_object_id: 1
+ state: present
+ register: test_two
+
+- name: "L2VPN_TERMINATION 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['l2vpn_termination']['l2vpn'] == 1
+ - test_two['l2vpn_termination']['assigned_object_type'] == "dcim.interface"
+ - test_two['l2vpn_termination']['assigned_object_id'] == 1
+ - test_two['msg'] == "l2vpn_termination l2vpn 1 <> dcim.interface 1 already exists"
+
+- name: "L2VPN_TERMINATION 3: Update"
+ netbox.netbox.netbox_l2vpn_termination:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ l2vpn: 1
+ assigned_object_type: dcim.interface
+ assigned_object_id: 1
+ tags:
+ - Schnozzberry
+ state: present
+ register: test_three
+
+- name: "L2VPN_TERMINATION 3: ASSERT - Updated"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['after']['tags'][0] == 4
+ - test_three['l2vpn_termination']['l2vpn'] == 1
+ - test_three['l2vpn_termination']['assigned_object_type'] == "dcim.interface"
+ - test_three['l2vpn_termination']['assigned_object_id'] == 1
+ - test_three['l2vpn_termination']['tags'][0] == 4
+ - test_three['msg'] == "l2vpn_termination l2vpn 1 <> dcim.interface 1 updated"
+
+- name: "L2VPN_TERMINATION 4: Delete"
+ netbox.netbox.netbox_l2vpn_termination:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ l2vpn: 1
+ assigned_object_type: dcim.interface
+ assigned_object_id: 1
+ state: absent
+ register: test_four
+
+- name: "L2VPN_TERMINATION 4: ASSERT - Delete"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['diff']['before']['state'] == "present"
+ - test_four['diff']['after']['state'] == "absent"
+ - test_four['l2vpn_termination']['l2vpn'] == 1
+ - test_four['l2vpn_termination']['assigned_object_type'] == "dcim.interface"
+ - test_four['l2vpn_termination']['assigned_object_id'] == 1
+ - test_four['l2vpn_termination']['tags'][0] == 4
+ - test_four['msg'] == "l2vpn_termination l2vpn 1 <> dcim.interface 1 deleted"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_location.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_location.yml
new file mode 100644
index 000000000..2e37c7682
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_location.yml
@@ -0,0 +1,85 @@
+---
+##
+##
+### NETBOX_LOCATION
+##
+##
+- name: "LOCATION 1: Necessary info creation"
+ netbox.netbox.netbox_location:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Location
+ site: Test Site
+ state: present
+ register: test_one
+
+- name: "LOCATION 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['location']['name'] == "Location"
+ - test_one['location']['slug'] == "location"
+ - test_one['location']['site'] == 1
+ - test_one['msg'] == "location Location created"
+
+- name: "LOCATION 2: Create duplicate"
+ netbox.netbox.netbox_location:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Location
+ site: Test Site
+ state: present
+ register: test_two
+
+- name: "LOCATION 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['location']['name'] == "Location"
+ - test_two['location']['slug'] == "location"
+ - test_two['location']['site'] == 1
+ - test_two['msg'] == "location Location already exists"
+
+- name: "LOCATION 3: Update"
+ netbox.netbox.netbox_location:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Location
+ parent_location: Parent Rack Group
+ description: This is a location
+ state: present
+ register: test_three
+
+- name: "LOCATION 3: ASSERT - Update"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['after']['parent'] == 2
+ - test_three['diff']['after']['description'] == "This is a location"
+ - test_three['location']['name'] == "Location"
+ - test_three['location']['slug'] == "location"
+ - test_three['location']['parent'] == 2
+ - test_three['location']['description'] == "This is a location"
+ - test_three['msg'] == "location Location updated"
+
+- name: "LOCATION 4: Delete"
+ netbox.netbox.netbox_location:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Location
+ state: absent
+ register: test_four
+
+- name: "LOCATION 4: ASSERT - Delete"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['diff']['before']['state'] == "present"
+ - test_four['diff']['after']['state'] == "absent"
+ - test_four['msg'] == "location Location deleted"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_lookup.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_lookup.yml
new file mode 100644
index 000000000..3d54e02b1
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_lookup.yml
@@ -0,0 +1,90 @@
+---
+##
+##
+### NETBOX_LOOKUP
+##
+##
+- name: "NETBOX_LOOKUP 1: Lookup returns exactly two sites"
+ ansible.builtin.assert:
+ that: query_result == "3"
+ vars:
+ query_result: "{{ query('netbox.netbox.nb_lookup', 'sites', api_endpoint='http://localhost:32768', token='0123456789abcdef0123456789abcdef01234567') | count }}"
+
+- name: "NETBOX_LOOKUP 2: Query doesn't return Wibble (sanity check json_query)"
+ ansible.builtin.assert:
+ that: query_result == "0"
+ vars:
+ query_result: "{{ query('netbox.netbox.nb_lookup', 'devices', api_endpoint='http://localhost:32768', token='0123456789abcdef0123456789abcdef01234567') | community.general.json_query('[?value.display==`Wibble`]')
+ | count }}"
+
+- name: "NETBOX_LOOKUP 3: Device query returns exactly one TestDeviceR1"
+ ansible.builtin.assert:
+ that: query_result == "1"
+ vars:
+ query_result: "{{ query('netbox.netbox.nb_lookup', 'devices', api_endpoint='http://localhost:32768', token='0123456789abcdef0123456789abcdef01234567') | community.general.json_query('[?value.display==`TestDeviceR1`]')
+ | count }}"
+
+- name: "NETBOX_LOOKUP 4: VLAN ID 400 can be queried and is named 'Test VLAN'"
+ ansible.builtin.assert:
+ that: query_result == 'Test VLAN'
+ vars:
+ query_result: "{{ (query('netbox.netbox.nb_lookup', 'vlans', api_endpoint='http://localhost:32768', token='0123456789abcdef0123456789abcdef01234567') | community.general.json_query('[?value.vid==`400`].value.name'))[0]
+ }}"
+
+- name: "NETBOX_LOOKUP 5: Add one of two devices for lookup filter test."
+ netbox.netbox.netbox_device:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: L1
+ device_type: Cisco Test
+ device_role: Core Switch
+ site: Test Site
+ status: Staged
+ tags:
+ - nolookup
+ state: present
+
+- name: "NETBOX_LOOKUP 6: Add two of two devices for lookup filter test."
+ netbox.netbox.netbox_device:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: L2
+ device_type: Cisco Test
+ device_role: Core Switch
+ site: Test Site2
+ status: Staged
+ tags:
+ - lookup
+ state: present
+
+- name: "NETBOX_LOOKUP 7: Device query returns exactly the L2 device"
+ ansible.builtin.assert:
+ that: query_result == "1"
+ vars:
+ query_result: "{{ query('netbox.netbox.nb_lookup', 'devices', api_filter='role=core-switch tag=lookup', api_endpoint='http://localhost:32768', token='0123456789abcdef0123456789abcdef01234567')
+ | community.general.json_query('[?value.display==`L2`]') | count }}"
+
+- name: "NETBOX_LOOKUP 8: Device query specifying raw data returns payload without key/value dict"
+ ansible.builtin.assert:
+ that: query_result == "1"
+ vars:
+ query_result: "{{ query('netbox.netbox.nb_lookup', 'devices', api_filter='role=core-switch tag=lookup', api_endpoint='http://localhost:32768', token='0123456789abcdef0123456789abcdef01234567',
+ raw_data=True) | community.general.json_query('[?display==`L2`]') | count }}"
+
+- name: "NETBOX_LOOKUP 9: Device query specifying multiple sites, Make sure L1 and L2 are in the results"
+ ansible.builtin.assert:
+ that:
+ - "'L1' in query_result"
+ - "'L2' in query_result"
+ vars:
+ query_result: "{{ query('netbox.netbox.nb_lookup', 'devices', api_filter='role=core-switch site=test-site site=test-site2', api_endpoint='http://localhost:32768',
+ token='0123456789abcdef0123456789abcdef01234567', raw_data=True) | community.general.json_query('[*].display') }}"
+
+- name: "NETBOX_LOOKUP 10: Device query by ID"
+ ansible.builtin.assert:
+ that: query_result
+ vars:
+ query_result: "{{ query('netbox.netbox.nb_lookup', 'devices', api_filter='id=1', api_endpoint='http://localhost:32768', token='0123456789abcdef0123456789abcdef01234567')
+ }}"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_manufacturer.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_manufacturer.yml
new file mode 100644
index 000000000..39804659b
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_manufacturer.yml
@@ -0,0 +1,91 @@
+---
+##
+##
+### NETBOX_MANUFACTURER
+##
+##
+- name: "MANUFACTURER 1: Necessary info creation"
+ netbox.netbox.netbox_manufacturer:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Manufacturer Two
+ state: present
+ register: test_one
+
+- name: "MANUFACTURER 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['manufacturer']['name'] == "Test Manufacturer Two"
+ - test_one['manufacturer']['slug'] == "test-manufacturer-two"
+ - test_one['msg'] == "manufacturer Test Manufacturer Two created"
+
+- name: "MANUFACTURER 2: Create duplicate"
+ netbox.netbox.netbox_manufacturer:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Manufacturer Two
+ state: present
+ register: test_two
+
+- name: "MANUFACTURER 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['manufacturer']['name'] == "Test Manufacturer Two"
+ - test_two['manufacturer']['slug'] == "test-manufacturer-two"
+ - test_two['msg'] == "manufacturer Test Manufacturer Two already exists"
+
+- name: "MANUFACTURER 3: Update"
+ netbox.netbox.netbox_manufacturer:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: test manufacturer two
+ state: present
+ register: test_three
+
+- name: "MANUFACTURER 3: ASSERT - Update"
+ ansible.builtin.assert:
+ that:
+ - test_three['changed']
+ - test_three['manufacturer']['name'] == "test manufacturer two"
+ - test_three['manufacturer']['slug'] == "test-manufacturer-two"
+ - test_three['msg'] == "manufacturer test manufacturer two updated"
+
+- name: "MANUFACTURER 4: ASSERT - Delete"
+ netbox.netbox.netbox_manufacturer:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: test manufacturer two
+ state: absent
+ register: test_four
+
+- name: "MANUFACTURER 3: ASSERT - Delete"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['diff']['before']['state'] == "present"
+ - test_four['diff']['after']['state'] == "absent"
+ - test_four['msg'] == "manufacturer test manufacturer two deleted"
+
+- name: "MANUFACTURER 5: ASSERT - Delete non existing"
+ netbox.netbox.netbox_manufacturer:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Manufacturer Two
+ state: absent
+ register: test_five
+
+- name: "MANUFACTURER 5: ASSERT - Delete non existing"
+ ansible.builtin.assert:
+ that:
+ - not test_five['changed']
+ - test_five['manufacturer'] == None
+ - test_five['msg'] == "manufacturer Test Manufacturer Two already absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_module.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_module.yml
new file mode 100644
index 000000000..0a18f3894
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_module.yml
@@ -0,0 +1,107 @@
+---
+##
+##
+### NETBOX_MODULE
+##
+##
+- name: "MODULE 1: Necessary info creation"
+ netbox.netbox.netbox_module:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ device: C9300-DEMO
+ module_bay: Network Module
+ module_type: C9300-NM-8X
+ state: present
+ register: test_one
+
+- name: "MODULE 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['module']['device'] == "C9300-DEMO"
+ - test_one['module']['module_bay'] == "Network Module"
+ - test_one['module']['module_type'] == "C9300-NM-8X"
+ - test_one['msg'] == "module ws-test-3750 created"
+
+- name: "MODULE 2: Create duplicate"
+ netbox.netbox.netbox_module:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ device: C9300-DEMO
+ module_bay: Network Module
+ module_type: C9300-NM-8X
+ state: present
+ register: test_two
+
+- name: "MODULE 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['module']['device'] == "C9300-DEMO"
+ - test_two['module']['module_bay'] == "Network Module"
+ - test_two['module']['module_type'] == "C9300-NM-8X"
+ - test_two['msg'] == "module C9300-NM-8X already exists in slot Network Module of C9300-DEMO"
+
+- name: "MODULE 3: ASSERT - Update"
+ netbox.netbox.netbox_module:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ device: C9300-DEMO
+ module_bay: Network Module
+ module_type: C9300-NM-8X
+ serial: XXXNNNNXXXX
+ state: present
+ register: test_three
+
+- name: "MODULE 3: ASSERT - Update"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['after']['serial'] == "XXXNNNNXXXX"
+ - test_three['module']['device'] == "C9300-DEMO"
+ - test_three['module']['module_bay'] == "Network Module"
+ - test_three['module']['module_type'] == "C9300-NM-8X"
+ - test_three['module']['serial'] == "XXXNNNNXXXX"
+ - test_three['msg'] == "module C9300-DEMO - Network Module - C9300-NM-8X updated"
+
+- name: "MODULE 4: ASSERT - Delete"
+ netbox.netbox.netbox_module:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ device: C9300-DEMO
+ module_bay: Network Module
+ module_type: C9300-NM-8X
+ state: absent
+ register: test_four
+
+- name: "MODULE 4: ASSERT - Delete"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['diff']['before']['state'] == "present"
+ - test_four['diff']['after']['state'] == "absent"
+ - test_four['msg'] == "module C9300-NM-8X deleted in slot Network Module of C9300-DEMO"
+
+- name: "MODULE 5: ASSERT - Delete non existing"
+ netbox.netbox.netbox_module:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ device: C9300-DEMO
+ module_bay: Network Module
+ module_type: C9300-NM-2Y
+ state: absent
+ register: test_five
+
+- name: "MODULE 5: ASSERT - Delete non existing`"
+ ansible.builtin.assert:
+ that:
+ - not test_five['changed']
+ - test_five['module'] == None
+ - test_five['msg'] == "module Test Module Type already absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_module_bay.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_module_bay.yml
new file mode 100644
index 000000000..843a4767e
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_module_bay.yml
@@ -0,0 +1,105 @@
+---
+##
+##
+### NETBOX_MODULE_BAY
+##
+##
+- name: "MODULE 1: Necessary info creation"
+ netbox.netbox.netbox_module_bay:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ device: C9300-DEMO
+ name: Network Module
+ position: 0
+ state: present
+ register: test_one
+
+- name: "MODULE BAY 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['module_bay']['device'] == "C9300-DEMO"
+ - test_one['module_bay']['name'] == "Network Module"
+ - test_one['module_bay']['position'] == "0"
+ - test_one['msg'] == "module_bay Network Module in C9300-DEMO position 0 created"
+
+- name: "MODULE BAY 2: Create duplicate"
+ netbox.netbox.netbox_module_bay:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ device: C9300-DEMO
+ name: Network Module
+ position: 0
+ state: present
+ register: test_two
+
+- name: "MODULE BAY 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['module_bay']['device'] == "C9300-DEMO"
+ - test_two['module_bay']['name'] == "Network Module"
+ - test_two['module_bay']['position'] == "0"
+ - test_two['msg'] == "module_bay Network Module in C9300-DEMO position 0 already exists"
+
+- name: "MODULE BAY 3: ASSERT - Update"
+ netbox.netbox.netbox_module_bay:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ device: C9300-DEMO
+ name: Network Module
+ position: 0
+ label: TEST
+ state: present
+ register: test_three
+
+- name: "MODULE BAY 3: ASSERT - Update"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['after']['label'] == "TEST"
+ - test_three['module_bay']['device'] == "C9300-DEMO"
+ - test_three['module_bay']['name'] == "Network Module"
+ - test_three['module_bay']['position'] == "0"
+ - test_three['module_bay']['label'] == "TEST"
+ - test_three['msg'] == "module_bay Network Module in C9300-DEMO position 0 updated with label TEST"
+
+- name: "MODULE BAY 4: ASSERT - Delete"
+ netbox.netbox.netbox_module_bay:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ device: C9300-DEMO
+ name: Network Module
+ state: absent
+ register: test_four
+
+- name: "MODULE BAY 4: ASSERT - Delete"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['diff']['before']['state'] == "present"
+ - test_four['diff']['after']['state'] == "absent"
+ - test_four['msg'] == "module_bay Network Module in C9300-DEMO position 0 deleted"
+
+- name: "MODULE BAY 5: ASSERT - Delete non existing"
+ netbox.netbox.netbox_module_bay:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ device: C9300-DEMO
+ name: Network Module
+ state: absent
+ register: test_five
+
+- name: "MODULE BAY 5: ASSERT - Delete non existing`"
+ ansible.builtin.assert:
+ that:
+ - not test_five['changed']
+ - test_five['module_bay'] == None
+ - test_five['msg'] == "module_bay Network Module already absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_module_type.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_module_type.yml
new file mode 100644
index 000000000..8ab077803
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_module_type.yml
@@ -0,0 +1,97 @@
+---
+##
+##
+### NETBOX_MODULE_TYPE
+##
+##
+- name: "MODULE_TYPE 1: Necessary info creation"
+ netbox.netbox.netbox_module_type:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ model: ws-test-3750
+ manufacturer: Test Manufacturer
+ state: present
+ register: test_one
+
+- name: "MODULE_TYPE 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['module_type']['model'] == "ws-test-3750"
+ - test_one['module_type']['manufacturer'] == 3
+ - test_one['msg'] == "module_type ws-test-3750 created"
+
+- name: "MODULE_TYPE 2: Create duplicate"
+ netbox.netbox.netbox_module_type:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ model: ws-test-3750
+ manufacturer: Test Manufacturer
+ state: present
+ register: test_two
+
+- name: "MODULE_TYPE 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_one['module_type']['model'] == "ws-test-3750"
+ - test_one['module_type']['manufacturer'] == 3
+ - test_two['msg'] == "module_type ws-test-3750 already exists"
+
+- name: "MODULE_TYPE 3: ASSERT - Update"
+ netbox.netbox.netbox_module_type:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ model: ws-test-3750
+ manufacturer: Test Manufacturer
+ part_number: ws-3750g-v2
+ state: present
+ register: test_three
+
+- name: "MODULE_TYPE 3: ASSERT - Update"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['after']['part_number'] == "ws-3750g-v2"
+ - test_three['module_type']['model'] == "ws-test-3750"
+ - test_three['module_type']['manufacturer'] == 3
+ - test_three['module_type']['part_number'] == "ws-3750g-v2"
+ - test_three['msg'] == "module_type ws-test-3750 updated"
+
+- name: "MODULE_TYPE 4: ASSERT - Delete"
+ netbox.netbox.netbox_module_type:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ model: ws-test-3750
+ state: absent
+ register: test_four
+
+- name: "MODULE_TYPE 4: ASSERT - Delete"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['diff']['before']['state'] == "present"
+ - test_four['diff']['after']['state'] == "absent"
+ - test_four['msg'] == "module_type ws-test-3750 deleted"
+
+- name: "MODULE_TYPE 5: ASSERT - Delete non existing"
+ netbox.netbox.netbox_module_type:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ model: Test Module Type
+ state: absent
+ register: test_five
+
+- name: "MODULE_TYPE 5: ASSERT - Delete non existing`"
+ ansible.builtin.assert:
+ that:
+ - not test_five['changed']
+ - test_five['module_type'] == None
+ - test_five['msg'] == "module_type Test Module Type already absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_platform.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_platform.yml
new file mode 100644
index 000000000..8a36ef2c1
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_platform.yml
@@ -0,0 +1,92 @@
+---
+##
+##
+### NETBOX_PLATFORM
+##
+##
+- name: "PLATFORM 1: Necessary info creation"
+ netbox.netbox.netbox_platform:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Platform
+ state: present
+ register: test_one
+
+- name: "PLATFORM 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['platform']['name'] == "Test Platform"
+ - test_one['platform']['slug'] == "test-platform"
+ - test_one['msg'] == "platform Test Platform created"
+
+- name: "PLATFORM 2: Create duplicate"
+ netbox.netbox.netbox_platform:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Platform
+ state: present
+ register: test_two
+
+- name: "PLATFORM 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['platform']['name'] == "Test Platform"
+ - test_two['platform']['slug'] == "test-platform"
+ - test_two['msg'] == "platform Test Platform already exists"
+
+- name: "PLATFORM 3: ASSERT - Update"
+ netbox.netbox.netbox_platform:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Platform
+ manufacturer: Test Manufacturer
+ state: present
+ register: test_three
+
+- name: "PLATFORM 3: ASSERT - Update"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['after']['manufacturer'] == 3
+ - test_three['platform']['manufacturer'] == 3
+ - test_three['msg'] == "platform Test Platform updated"
+
+- name: "PLATFORM 4: ASSERT - Delete"
+ netbox.netbox.netbox_platform:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Platform
+ state: absent
+ register: test_four
+
+- name: "PLATFORM 4: ASSERT - Delete"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['diff']['before']['state'] == "present"
+ - test_four['diff']['after']['state'] == "absent"
+ - test_four['msg'] == "platform Test Platform deleted"
+
+- name: "PLATFORM 5: ASSERT - Delete non existing"
+ netbox.netbox.netbox_platform:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Platform
+ state: absent
+ register: test_five
+
+- name: "PLATFORM 5: ASSERT - Delete non existing`"
+ ansible.builtin.assert:
+ that:
+ - not test_five['changed']
+ - test_five['platform'] == None
+ - test_five['msg'] == "platform Test Platform already absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_power_feed.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_power_feed.yml
new file mode 100644
index 000000000..f4ac71e01
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_power_feed.yml
@@ -0,0 +1,126 @@
+---
+# © 2020 Nokia
+# Licensed under the GNU General Public License v3.0 only
+# SPDX-License-Identifier: GPL-3.0-only
+##
+##
+### NETBOX_POWER_FEED
+##
+##
+- name: "POWER_FEED 1: Necessary info creation"
+ netbox.netbox.netbox_power_feed:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Power Feed
+ power_panel: Power Panel
+ state: present
+ register: test_one
+
+- name: "POWER_FEED 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['power_feed']['name'] == "Power Feed"
+ - test_one['power_feed']['power_panel'] == 1
+ - test_one['msg'] == "power_feed Power Feed created"
+
+- name: "POWER_FEED 2: Create duplicate"
+ netbox.netbox.netbox_power_feed:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Power Feed
+ power_panel: Power Panel
+ state: present
+ register: test_two
+
+- name: "POWER_FEED 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['power_feed']['name'] == "Power Feed"
+ - test_two['power_feed']['power_panel'] == 1
+ - test_two['msg'] == "power_feed Power Feed already exists"
+
+- name: "POWER_FEED 3: Update power_feed with other fields"
+ netbox.netbox.netbox_power_feed:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Power Feed
+ power_panel: Power Panel
+ status: offline
+ type: redundant
+ supply: dc
+ phase: three-phase
+ voltage: 400
+ amperage: 32
+ max_utilization: 25
+ comments: totally normal power feed
+ state: present
+ register: test_three
+
+- name: "POWER_FEED 3: ASSERT - Update power_feed with other fields"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['after']['status'] == "offline"
+ - test_three['diff']['after']['type'] == "redundant"
+ - test_three['diff']['after']['supply'] == "dc"
+ - test_three['diff']['after']['phase'] == "three-phase"
+ - test_three['diff']['after']['voltage'] == 400
+ - test_three['diff']['after']['amperage'] == 32
+ - test_three['diff']['after']['max_utilization'] == 25
+ - test_three['diff']['after']['comments'] == "totally normal power feed"
+ - test_three['power_feed']['name'] == "Power Feed"
+ - test_three['power_feed']['power_panel'] == 1
+ - test_three['power_feed']['status'] == "offline"
+ - test_three['power_feed']['type'] == "redundant"
+ - test_three['power_feed']['supply'] == "dc"
+ - test_three['power_feed']['phase'] == "three-phase"
+ - test_three['power_feed']['voltage'] == 400
+ - test_three['power_feed']['amperage'] == 32
+ - test_three['power_feed']['max_utilization'] == 25
+ - test_three['power_feed']['comments'] == "totally normal power feed"
+ - test_three['msg'] == "power_feed Power Feed updated"
+
+- name: "POWER_FEED 4: Create Power Feed for Delete Test"
+ netbox.netbox.netbox_power_feed:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Power Feed 2
+ power_panel: Power Panel
+ state: present
+ register: test_four
+
+- name: "POWER_FEED 4: ASSERT - Create Power Feed for Delete Test"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['diff']['before']['state'] == "absent"
+ - test_four['diff']['after']['state'] == "present"
+ - test_four['power_feed']['name'] == "Power Feed 2"
+ - test_four['power_feed']['power_panel'] == 1
+ - test_four['msg'] == "power_feed Power Feed 2 created"
+
+- name: "POWER_FEED 5: Delete Power Feed"
+ netbox.netbox.netbox_power_feed:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Power Feed 2
+ power_panel: Power Panel
+ state: absent
+ register: test_five
+
+- name: "POWER_FEED 5: ASSERT - Delete Power Feed"
+ ansible.builtin.assert:
+ that:
+ - test_five is changed
+ - test_five['diff']['before']['state'] == "present"
+ - test_five['diff']['after']['state'] == "absent"
+ - test_five['msg'] == "power_feed Power Feed 2 deleted"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_power_outlet.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_power_outlet.yml
new file mode 100644
index 000000000..84f9d3255
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_power_outlet.yml
@@ -0,0 +1,114 @@
+---
+# © 2020 Nokia
+# Licensed under the GNU General Public License v3.0 only
+# SPDX-License-Identifier: GPL-3.0-only
+##
+##
+### NETBOX_POWER_OUTLET
+##
+##
+- name: "POWER_OUTLET 1: Necessary info creation"
+ netbox.netbox.netbox_power_outlet:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Power Outlet
+ device: Device Power Tests
+ state: present
+ register: test_one
+
+- name: "POWER_OUTLET 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['power_outlet']['name'] == "Power Outlet"
+ - test_one['power_outlet']['device'] == 10
+ - test_one['msg'] == "power_outlet Power Outlet created"
+
+- name: "POWER_OUTLET 2: Create duplicate"
+ netbox.netbox.netbox_power_outlet:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Power Outlet
+ device: Device Power Tests
+ state: present
+ register: test_two
+
+- name: "POWER_OUTLET 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['power_outlet']['name'] == "Power Outlet"
+ - test_two['power_outlet']['device'] == 10
+ - test_two['msg'] == "power_outlet Power Outlet already exists"
+
+- name: "POWER_OUTLET 3: Update power_outlet with other fields"
+ netbox.netbox.netbox_power_outlet:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Power Outlet
+ device: Device Power Tests
+ type: ita-e
+ power_port: Power Port
+ feed_leg: B
+ description: test description
+ state: present
+ register: test_three
+
+- name: "POWER_OUTLET 3: ASSERT - Update power_outlet with other fields"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['after']['type'] == "ita-e"
+ - test_three['diff']['after']['power_port'] == 1
+ - test_three['diff']['after']['feed_leg'] == "B"
+ - test_three['diff']['after']['description'] == "test description"
+ - test_three['power_outlet']['name'] == "Power Outlet"
+ - test_three['power_outlet']['device'] == 10
+ - test_three['power_outlet']['type'] == "ita-e"
+ - test_three['power_outlet']['power_port'] == 1
+ - test_three['power_outlet']['feed_leg'] == "B"
+ - test_three['power_outlet']['description'] == "test description"
+ - test_three['msg'] == "power_outlet Power Outlet updated"
+
+- name: "POWER_OUTLET 4: Create Power Outlet for Delete Test"
+ netbox.netbox.netbox_power_outlet:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Power Outlet 2
+ device: Device Power Tests
+ state: present
+ register: test_four
+
+- name: "POWER_OUTLET 4: ASSERT - Create Power Outlet for Delete Test"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['diff']['before']['state'] == "absent"
+ - test_four['diff']['after']['state'] == "present"
+ - test_four['power_outlet']['name'] == "Power Outlet 2"
+ - test_four['power_outlet']['device'] == 10
+ - test_four['msg'] == "power_outlet Power Outlet 2 created"
+
+- name: "POWER_OUTLET 5: Delete Power Outlet"
+ netbox.netbox.netbox_power_outlet:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Power Outlet 2
+ device: Device Power Tests
+ state: absent
+ register: test_five
+
+- name: "POWER_OUTLET 5: ASSERT - Delete Power Outlet"
+ ansible.builtin.assert:
+ that:
+ - test_five is changed
+ - test_five['diff']['before']['state'] == "present"
+ - test_five['diff']['after']['state'] == "absent"
+ - test_five['msg'] == "power_outlet Power Outlet 2 deleted"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_power_outlet_template.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_power_outlet_template.yml
new file mode 100644
index 000000000..14ff49d0c
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_power_outlet_template.yml
@@ -0,0 +1,111 @@
+---
+# © 2020 Nokia
+# Licensed under the GNU General Public License v3.0 only
+# SPDX-License-Identifier: GPL-3.0-only
+##
+##
+### NETBOX_POWER_OUTLET_TEMPLATE
+##
+##
+- name: "POWER_OUTLET_TEMPLATE 1: Necessary info creation"
+ netbox.netbox.netbox_power_outlet_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Power Outlet Template
+ device_type: Device Type Power Tests
+ state: present
+ register: test_one
+
+- name: "POWER_OUTLET_TEMPLATE 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['power_outlet_template']['name'] == "Power Outlet Template"
+ - test_one['power_outlet_template']['device_type'] == 8
+ - test_one['msg'] == "power_outlet_template Power Outlet Template created"
+
+- name: "POWER_OUTLET_TEMPLATE 2: Create duplicate"
+ netbox.netbox.netbox_power_outlet_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Power Outlet Template
+ device_type: Device Type Power Tests
+ state: present
+ register: test_two
+
+- name: "POWER_OUTLET_TEMPLATE 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['power_outlet_template']['name'] == "Power Outlet Template"
+ - test_two['power_outlet_template']['device_type'] == 8
+ - test_two['msg'] == "power_outlet_template Power Outlet Template already exists"
+
+- name: "POWER_OUTLET_TEMPLATE 3: Update power_outlet_template with other fields"
+ netbox.netbox.netbox_power_outlet_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Power Outlet Template
+ device_type: Device Type Power Tests
+ type: ita-e
+ power_port_template: Power Port Template
+ feed_leg: B
+ state: present
+ register: test_three
+
+- name: "POWER_OUTLET_TEMPLATE 3: ASSERT - Update power_outlet_template with other fields"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['after']['type'] == "ita-e"
+ - test_three['diff']['after']['power_port'] == 1
+ - test_three['diff']['after']['feed_leg'] == "B"
+ - test_three['power_outlet_template']['name'] == "Power Outlet Template"
+ - test_three['power_outlet_template']['device_type'] == 8
+ - test_three['power_outlet_template']['type'] == "ita-e"
+ - test_three['power_outlet_template']['power_port'] == 1
+ - test_three['power_outlet_template']['feed_leg'] == "B"
+ - test_three['msg'] == "power_outlet_template Power Outlet Template updated"
+
+- name: "POWER_OUTLET_TEMPLATE 4: Create Power Outlet Template for Delete Test"
+ netbox.netbox.netbox_power_outlet_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Power Outlet Template 2
+ device_type: Device Type Power Tests
+ state: present
+ register: test_four
+
+- name: "POWER_OUTLET_TEMPLATE 4: ASSERT - Create Power Outlet Template for Delete Test"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['diff']['before']['state'] == "absent"
+ - test_four['diff']['after']['state'] == "present"
+ - test_four['power_outlet_template']['name'] == "Power Outlet Template 2"
+ - test_four['power_outlet_template']['device_type'] == 8
+ - test_four['msg'] == "power_outlet_template Power Outlet Template 2 created"
+
+- name: "POWER_OUTLET_TEMPLATE 5: Delete Power Outlet Template"
+ netbox.netbox.netbox_power_outlet_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Power Outlet Template 2
+ device_type: Device Type Power Tests
+ state: absent
+ register: test_five
+
+- name: "POWER_OUTLET_TEMPLATE 5: ASSERT - Delete Power Outlet Template"
+ ansible.builtin.assert:
+ that:
+ - test_five is changed
+ - test_five['diff']['before']['state'] == "present"
+ - test_five['diff']['after']['state'] == "absent"
+ - test_five['msg'] == "power_outlet_template Power Outlet Template 2 deleted"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_power_panel.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_power_panel.yml
new file mode 100644
index 000000000..e568a75f4
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_power_panel.yml
@@ -0,0 +1,105 @@
+---
+# © 2020 Nokia
+# Licensed under the GNU General Public License v3.0 only
+# SPDX-License-Identifier: GPL-3.0-only
+##
+##
+### NETBOX_POWER_PANEL
+##
+##
+- name: "POWER_PANEL 1: Necessary info creation"
+ netbox.netbox.netbox_power_panel:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Power Panel
+ site: Test Site
+ state: present
+ register: test_one
+
+- name: "POWER_PANEL 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['power_panel']['name'] == "Power Panel"
+ - test_one['power_panel']['site'] == 1
+ - test_one['msg'] == "power_panel Power Panel created"
+
+- name: "POWER_PANEL 2: Create duplicate"
+ netbox.netbox.netbox_power_panel:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Power Panel
+ site: Test Site
+ state: present
+ register: test_two
+
+- name: "POWER_PANEL 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['power_panel']['name'] == "Power Panel"
+ - test_two['power_panel']['site'] == 1
+ - test_two['msg'] == "power_panel Power Panel already exists"
+
+- name: "POWER_PANEL 3: Update power_panel with other fields"
+ netbox.netbox.netbox_power_panel:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Power Panel
+ site: Test Site
+ location: Test Rack Group
+ state: present
+ register: test_three
+
+- name: "POWER_PANEL 3: ASSERT - Update power_panel with other fields"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['after']['location'] == 1
+ - test_three['power_panel']['name'] == "Power Panel"
+ - test_three['power_panel']['site'] == 1
+ - test_three['power_panel']['location'] == 1
+ - test_three['msg'] == "power_panel Power Panel updated"
+
+- name: "POWER_PANEL 4: Create Power Panel for Delete Test"
+ netbox.netbox.netbox_power_panel:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Power Panel 2
+ site: Test Site
+ state: present
+ register: test_four
+
+- name: "POWER_PANEL 4: ASSERT - Create Power Panel for Delete Test"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['diff']['before']['state'] == "absent"
+ - test_four['diff']['after']['state'] == "present"
+ - test_four['power_panel']['name'] == "Power Panel 2"
+ - test_four['power_panel']['site'] == 1
+ - test_four['msg'] == "power_panel Power Panel 2 created"
+
+- name: "POWER_PANEL 5: Delete Power Panel"
+ netbox.netbox.netbox_power_panel:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Power Panel 2
+ site: Test Site
+ state: absent
+ register: test_five
+
+- name: "POWER_PANEL 5: ASSERT - Delete Power Panel"
+ ansible.builtin.assert:
+ that:
+ - test_five is changed
+ - test_five['diff']['before']['state'] == "present"
+ - test_five['diff']['after']['state'] == "absent"
+ - test_five['msg'] == "power_panel Power Panel 2 deleted"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_power_port.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_power_port.yml
new file mode 100644
index 000000000..311820ccd
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_power_port.yml
@@ -0,0 +1,125 @@
+---
+# © 2020 Nokia
+# Licensed under the GNU General Public License v3.0 only
+# SPDX-License-Identifier: GPL-3.0-only
+##
+##
+### NETBOX_POWER_PORT
+##
+##
+- name: "POWER_PORT 0: Create device for testing power ports"
+ netbox.netbox.netbox_device:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Device Power Tests
+ device_type: Cisco Test
+ device_role: Core Switch
+ site: Test Site
+ state: present
+
+- name: "POWER_PORT 1: Necessary info creation"
+ netbox.netbox.netbox_power_port:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Power Port
+ device: Device Power Tests
+ state: present
+ register: test_one
+
+- name: "POWER_PORT 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['power_port']['name'] == "Power Port"
+ - test_one['power_port']['device'] == 10
+ - test_one['msg'] == "power_port Power Port created"
+
+- name: "POWER_PORT 2: Create duplicate"
+ netbox.netbox.netbox_power_port:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Power Port
+ device: Device Power Tests
+ state: present
+ register: test_two
+
+- name: "POWER_PORT 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['power_port']['name'] == "Power Port"
+ - test_two['power_port']['device'] == 10
+ - test_two['msg'] == "power_port Power Port already exists"
+
+- name: "POWER_FEED 3: Update power_port with other fields"
+ netbox.netbox.netbox_power_port:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Power Port
+ device: Device Power Tests
+ type: ita-e
+ allocated_draw: 10
+ maximum_draw: 20
+ description: test description
+ state: present
+ register: test_three
+
+- name: "POWER_FEED 3: ASSERT - Update power_port with other fields"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['after']['type'] == "ita-e"
+ - test_three['diff']['after']['allocated_draw'] == 10
+ - test_three['diff']['after']['maximum_draw'] == 20
+ - test_three['diff']['after']['description'] == "test description"
+ - test_three['power_port']['name'] == "Power Port"
+ - test_three['power_port']['device'] == 10
+ - test_three['power_port']['type'] == "ita-e"
+ - test_three['power_port']['allocated_draw'] == 10
+ - test_three['power_port']['maximum_draw'] == 20
+ - test_three['power_port']['description'] == "test description"
+ - test_three['msg'] == "power_port Power Port updated"
+
+- name: "POWER_PORT 4: Create Power Port for Delete Test"
+ netbox.netbox.netbox_power_port:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Power Port 2
+ device: Device Power Tests
+ state: present
+ register: test_four
+
+- name: "POWER_PORT 4: ASSERT - Create Power Port for Delete Test"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['diff']['before']['state'] == "absent"
+ - test_four['diff']['after']['state'] == "present"
+ - test_four['power_port']['name'] == "Power Port 2"
+ - test_four['power_port']['device'] == 10
+ - test_four['msg'] == "power_port Power Port 2 created"
+
+- name: "POWER_PORT 5: Delete Power Port"
+ netbox.netbox.netbox_power_port:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Power Port 2
+ device: Device Power Tests
+ state: absent
+ register: test_five
+
+- name: "POWER_PORT 5: ASSERT - Delete Power Port"
+ ansible.builtin.assert:
+ that:
+ - test_five is changed
+ - test_five['diff']['before']['state'] == "present"
+ - test_five['diff']['after']['state'] == "absent"
+ - test_five['msg'] == "power_port Power Port 2 deleted"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_power_port_template.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_power_port_template.yml
new file mode 100644
index 000000000..b413c29ce
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_power_port_template.yml
@@ -0,0 +1,212 @@
+---
+# © 2020 Nokia
+# Licensed under the GNU General Public License v3.0 only
+# SPDX-License-Identifier: GPL-3.0-only
+##
+##
+### NETBOX_POWER_PORT_TEMPLATE
+##
+##
+- name: "POWER_PORT_TEMPLATE 0.1: Create device type for testing power ports on device types"
+ netbox.netbox.netbox_device_type:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ model: Device Type Power Tests
+ manufacturer: Test Manufacturer
+ state: present
+
+- name: "POWER_PORT_TEMPLATE 0.2: Create module type for testing power ports on module types"
+ netbox.netbox.netbox_module_type:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ model: Module Type Power Tests
+ manufacturer: Test Manufacturer
+ state: present
+
+- name: "POWER_PORT_TEMPLATE 1: Necessary info creation"
+ netbox.netbox.netbox_power_port_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Power Port Template
+ device_type: Device Type Power Tests
+ state: present
+ register: test_one
+
+- name: "POWER_PORT_TEMPLATE 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['power_port_template']['name'] == "Power Port Template"
+ - test_one['power_port_template']['device_type'] == 8
+ - test_one['msg'] == "power_port_template Power Port Template created"
+
+- name: "POWER_PORT_TEMPLATE 2: Create duplicate"
+ netbox.netbox.netbox_power_port_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Power Port Template
+ device_type: Device Type Power Tests
+ state: present
+ register: test_two
+
+- name: "POWER_PORT_TEMPLATE 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['power_port_template']['name'] == "Power Port Template"
+ - test_two['power_port_template']['device_type'] == 8
+ - test_two['msg'] == "power_port_template Power Port Template already exists"
+
+- name: "POWER_PORT_TEMPLATE 3: Update power_port_template with other fields"
+ netbox.netbox.netbox_power_port_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Power Port Template
+ device_type: Device Type Power Tests
+ type: ita-e
+ allocated_draw: 10
+ maximum_draw: 20
+ state: present
+ register: test_three
+
+- name: "POWER_PORT_TEMPLATE 3: ASSERT - Update power_port_template with other fields"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['after']['type'] == "ita-e"
+ - test_three['diff']['after']['allocated_draw'] == 10
+ - test_three['diff']['after']['maximum_draw'] == 20
+ - test_three['power_port_template']['name'] == "Power Port Template"
+ - test_three['power_port_template']['device_type'] == 8
+ - test_three['power_port_template']['type'] == "ita-e"
+ - test_three['power_port_template']['allocated_draw'] == 10
+ - test_three['power_port_template']['maximum_draw'] == 20
+ - test_three['msg'] == "power_port_template Power Port Template updated"
+
+- name: "POWER_PORT_TEMPLATE 4: Create Power Port Template for Delete Test"
+ netbox.netbox.netbox_power_port_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Power Port Template 2
+ device_type: Device Type Power Tests
+ state: present
+ register: test_four
+
+- name: "POWER_PORT_TEMPLATE 4: ASSERT - Create Power Port Template for Delete Test"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['diff']['before']['state'] == "absent"
+ - test_four['diff']['after']['state'] == "present"
+ - test_four['power_port_template']['name'] == "Power Port Template 2"
+ - test_four['power_port_template']['device_type'] == 8
+ - test_four['msg'] == "power_port_template Power Port Template 2 created"
+
+- name: "POWER_PORT_TEMPLATE 5: Delete Power Port Template"
+ netbox.netbox.netbox_power_port_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Power Port Template 2
+ device_type: Device Type Power Tests
+ state: absent
+ register: test_five
+
+- name: "POWER_PORT_TEMPLATE 5: ASSERT - Delete Power Port Template"
+ ansible.builtin.assert:
+ that:
+ - test_five is changed
+ - test_five['diff']['before']['state'] == "present"
+ - test_five['diff']['after']['state'] == "absent"
+ - test_five['msg'] == "power_port_template Power Port Template 2 deleted"
+
+- name: "POWER_PORT_TEMPLATE 6: Necessary info creation"
+ netbox.netbox.netbox_power_port_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Module Power Port Template
+ module_type: Module Type Power Tests
+ state: present
+ register: test_six
+
+- name: "POWER_PORT_TEMPLATE 6: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_six is changed
+ - test_six['diff']['before']['state'] == "absent"
+ - test_six['diff']['after']['state'] == "present"
+ - test_six['power_port_template']['name'] == "Module Power Port Template"
+ - test_six['power_port_template']['module_type'] == 1
+ - test_six['msg'] == "power_port_template Module Power Port Template created"
+
+- name: "POWER_PORT_TEMPLATE 7: Create duplicate"
+ netbox.netbox.netbox_power_port_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Module Power Port Template
+ module_type: Module Type Power Tests
+ state: present
+ register: test_seven
+
+- name: "POWER_PORT_TEMPLATE 7: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_seven['changed']
+ - test_seven['power_port_template']['name'] == "Module Power Port Template"
+ - test_seven['power_port_template']['module_type'] == 1
+ - test_seven['msg'] == "power_port_template Module Power Port Template already exists"
+
+- name: "POWER_PORT_TEMPLATE 8: Update power_port_template with other fields"
+ netbox.netbox.netbox_power_port_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Module Power Port Template
+ module_type: Module Type Power Tests
+ type: ita-e
+ allocated_draw: 10
+ maximum_draw: 20
+ state: present
+ register: test_eight
+
+- name: "POWER_PORT_TEMPLATE 8: ASSERT - Update power_port_template with other fields"
+ ansible.builtin.assert:
+ that:
+ - test_eight is changed
+ - test_eight['diff']['after']['type'] == "ita-e"
+ - test_eight['diff']['after']['allocated_draw'] == 10
+ - test_eight['diff']['after']['maximum_draw'] == 20
+ - test_eight['power_port_template']['name'] == "Module Power Port Template"
+ - test_eight['power_port_template']['module_type'] == 1
+ - test_eight['power_port_template']['type'] == "ita-e"
+ - test_eight['power_port_template']['allocated_draw'] == 10
+ - test_eight['power_port_template']['maximum_draw'] == 20
+ - test_eight['msg'] == "power_port_template Module Power Port Template updated"
+
+- name: "POWER_PORT_TEMPLATE 9: Delete Power Port Template"
+ netbox.netbox.netbox_power_port_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Module Power Port Template
+ module_type: Module Type Power Tests
+ state: absent
+ register: test_nine
+
+- name: "POWER_PORT_TEMPLATE 9: ASSERT - Delete Power Port Template"
+ ansible.builtin.assert:
+ that:
+ - test_nine is changed
+ - test_nine['diff']['before']['state'] == "present"
+ - test_nine['diff']['after']['state'] == "absent"
+ - test_nine['msg'] == "power_port_template Module Power Port Template deleted"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_prefix.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_prefix.yml
new file mode 100644
index 000000000..64adae855
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_prefix.yml
@@ -0,0 +1,245 @@
+---
+##
+##
+### NETBOX_PREFIX
+##
+##
+- name: 1 - Create prefix within NetBox with only required information
+ netbox.netbox.netbox_prefix:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ prefix: 10.156.0.0/19
+ state: present
+ register: test_one
+
+- name: 1 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['msg'] == "prefix 10.156.0.0/19 created"
+ - test_one['prefix']['prefix'] == "10.156.0.0/19"
+
+- name: 2 - Duplicate
+ netbox.netbox.netbox_prefix:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ prefix: 10.156.0.0/19
+ state: present
+ register: test_two
+
+- name: 2 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['msg'] == "prefix 10.156.0.0/19 already exists"
+ - test_two['prefix']['prefix'] == "10.156.0.0/19"
+
+- name: 3 - Update 10.156.0.0/19
+ netbox.netbox.netbox_prefix:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ prefix: 10.156.0.0/19
+ site: Test Site
+ status: Reserved
+ description: This prefix has been updated
+ state: present
+ register: test_three
+
+- name: 3 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['after']['site'] == 1
+ - test_three['diff']['after']['status'] == "reserved"
+ - test_three['diff']['after']['description'] == "This prefix has been updated"
+ - test_three['msg'] == "prefix 10.156.0.0/19 updated"
+ - test_three['prefix']['prefix'] == "10.156.0.0/19"
+ - test_three['prefix']['site'] == 1
+ - test_three['prefix']['status'] == "reserved"
+ - test_three['prefix']['description'] == "This prefix has been updated"
+
+- name: 4 - Delete prefix within netbox
+ netbox.netbox.netbox_prefix:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ prefix: 10.156.0.0/19
+ state: absent
+ register: test_four
+
+- name: 4 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['diff']['before']['state'] == "present"
+ - test_four['diff']['after']['state'] == "absent"
+ - test_four['msg'] == "prefix 10.156.0.0/19 deleted"
+
+- name: 5 - Create prefix with several specified options
+ netbox.netbox.netbox_prefix:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ family: 4
+ prefix: 10.156.32.0/19
+ site: Test Site
+ vrf: Test VRF
+ tenant: Test Tenant
+ vlan:
+ name: Test VLAN
+ site: Test Site
+ tenant: Test Tenant
+ vlan_group: Test Vlan Group
+ status: Reserved
+ prefix_role: Network of care
+ description: Test description
+ is_pool: true
+ tags:
+ - Schnozzberry
+ state: present
+ register: test_five
+
+- name: 5 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_five is changed
+ - test_five['diff']['before']['state'] == "absent"
+ - test_five['diff']['after']['state'] == "present"
+ - test_five['msg'] == "prefix 10.156.32.0/19 created"
+ - test_five['prefix']['prefix'] == "10.156.32.0/19"
+ - test_five['prefix']['family'] == 4
+ - test_five['prefix']['site'] == 1
+ - test_five['prefix']['vrf'] == 1
+ - test_five['prefix']['tenant'] == 1
+ - test_five['prefix']['vlan'] == 4
+ - test_five['prefix']['status'] == "reserved"
+ - test_five['prefix']['role'] == 1
+ - test_five['prefix']['description'] == "Test description"
+ - test_five['prefix']['is_pool'] == true
+ - test_five['prefix']['tags'][0] == 4
+
+- name: 6 - Get a new /24 inside 10.156.0.0/19 within NetBox - Parent doesn't exist
+ netbox.netbox.netbox_prefix:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ parent: 10.156.0.0/19
+ prefix_length: 24
+ state: present
+ first_available: true
+ register: test_six
+
+- name: 6 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - not test_six['changed']
+ - test_six['msg'] == "Parent prefix does not exist - 10.156.0.0/19"
+
+- name: 7 - Create prefix within NetBox with only required information
+ netbox.netbox.netbox_prefix:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ prefix: 10.156.0.0/19
+ state: present
+ register: test_seven
+
+- name: 7 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_seven is changed
+ - test_seven['diff']['before']['state'] == "absent"
+ - test_seven['diff']['after']['state'] == "present"
+ - test_seven['msg'] == "prefix 10.156.0.0/19 created"
+ - test_seven['prefix']['prefix'] == "10.156.0.0/19"
+
+- name: 8 - Get a new /24 inside 10.156.0.0/19 within NetBox
+ netbox.netbox.netbox_prefix:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ parent: 10.156.0.0/19
+ prefix_length: 24
+ state: present
+ first_available: true
+ register: test_eight
+
+- name: 8 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_eight is changed
+ - test_eight['diff']['before']['state'] == "absent"
+ - test_eight['diff']['after']['state'] == "present"
+ - test_eight['msg'] == "prefix 10.156.0.0/24 created"
+ - test_eight['prefix']['prefix'] == "10.156.0.0/24"
+
+- name: 9 - Create 10.157.0.0/19
+ netbox.netbox.netbox_prefix:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ prefix: 10.157.0.0/19
+ vrf: Test VRF
+ site: Test Site
+ state: present
+ register: test_nine
+
+- name: 9 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_nine is changed
+ - test_nine['diff']['before']['state'] == "absent"
+ - test_nine['diff']['after']['state'] == "present"
+ - test_nine['msg'] == "prefix 10.157.0.0/19 created"
+ - test_nine['prefix']['prefix'] == "10.157.0.0/19"
+ - test_nine['prefix']['site'] == 1
+ - test_nine['prefix']['vrf'] == 1
+
+- name: 10 - Get a new /24 inside 10.157.0.0/19 within NetBox with additional values
+ netbox.netbox.netbox_prefix:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ parent: 10.157.0.0/19
+ prefix_length: 24
+ vrf: Test VRF
+ site: Test Site
+ state: present
+ first_available: true
+ register: test_ten
+
+- name: 10 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_ten is changed
+ - test_ten['diff']['before']['state'] == "absent"
+ - test_ten['diff']['after']['state'] == "present"
+ - test_ten['msg'] == "prefix 10.157.0.0/24 created"
+ - test_ten['prefix']['prefix'] == "10.157.0.0/24"
+ - test_ten['prefix']['site'] == 1
+ - test_ten['prefix']['vrf'] == 1
+
+- name: 11 - Get a new /24 inside 10.156.0.0/19 within NetBox
+ netbox.netbox.netbox_prefix:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ parent: 10.156.0.0/19
+ prefix_length: 24
+ state: present
+ first_available: true
+ register: test_eleven
+
+- name: 11 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_eleven is changed
+ - test_eleven['diff']['before']['state'] == "absent"
+ - test_eleven['diff']['after']['state'] == "present"
+ - test_eleven['msg'] == "prefix 10.156.1.0/24 created"
+ - test_eleven['prefix']['prefix'] == "10.156.1.0/24"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_provider.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_provider.yml
new file mode 100644
index 000000000..ec7b2c30a
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_provider.yml
@@ -0,0 +1,79 @@
+---
+##
+##
+### NETBOX_PROVIDER
+##
+##
+- name: "NETBOX_PROVIDER 1: Create provider within NetBox with only required information"
+ netbox.netbox.netbox_provider:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Provider One
+ state: present
+ register: test_one
+
+- name: "NETBOX_PROVIDER 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['provider']['name'] == "Test Provider One"
+ - test_one['provider']['slug'] == "test-provider-one"
+ - test_one['msg'] == "provider Test Provider One created"
+
+- name: "NETBOX_PROVIDER 2: Duplicate"
+ netbox.netbox.netbox_provider:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Provider One
+ state: present
+ register: test_two
+
+- name: "NETBOX_PROVIDER 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['provider']['name'] == "Test Provider One"
+ - test_two['provider']['slug'] == "test-provider-one"
+ - test_two['msg'] == "provider Test Provider One already exists"
+
+- name: "NETBOX_PROVIDER 3: Update provider with other fields"
+ netbox.netbox.netbox_provider:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Provider One
+ comments: BAD PROVIDER
+ state: present
+ register: test_three
+
+- name: "NETBOX_PROVIDER 3: ASSERT - Updated"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['after']['comments'] == "BAD PROVIDER"
+ - test_three['provider']['name'] == "Test Provider One"
+ - test_three['provider']['slug'] == "test-provider-one"
+ - test_three['provider']['comments'] == "BAD PROVIDER"
+ - test_three['msg'] == "provider Test Provider One updated"
+
+- name: "NETBOX_PROVIDER 4: Delete provider within netbox"
+ netbox.netbox.netbox_provider:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Provider One
+ state: absent
+ register: test_four
+
+- name: "NETBOX_PROVIDER 4 : ASSERT - Delete"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['provider']['name'] == "Test Provider One"
+ - test_four['provider']['slug'] == "test-provider-one"
+ - test_four['provider']['comments'] == "BAD PROVIDER"
+ - test_four['msg'] == "provider Test Provider One deleted"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_provider_network.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_provider_network.yml
new file mode 100644
index 000000000..6f8b2b331
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_provider_network.yml
@@ -0,0 +1,83 @@
+---
+##
+##
+### NETBOX_PROVIDER
+##
+##
+- name: "NETBOX_PROVIDER_NETWORK 1: Create provider network within NetBox with only required information"
+ netbox.netbox.netbox_provider_network:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ provider: Test Provider
+ name: Test Provider Network One
+ state: present
+ register: test_one
+
+- name: "NETBOX_PROVIDER_NETWORK 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['provider_network']['name'] == "Test Provider Network One"
+ - test_one['msg'] == "provider_network Test Provider Network One created"
+
+- name: "NETBOX_PROVIDER_NETWORK 2: Duplicate"
+ netbox.netbox.netbox_provider_network:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ provider: Test Provider
+ name: Test Provider Network One
+ state: present
+ register: test_two
+
+- name: "NETBOX_PROVIDER_NETWORK 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['provider_network']['name'] == "Test Provider Network One"
+ - test_two['msg'] == "provider_network Test Provider Network One already exists"
+
+- name: "NETBOX_PROVIDER_NETWORK 3: Update provider network with other fields"
+ netbox.netbox.netbox_provider_network:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ provider: Test Provider
+ name: Test Provider Network One
+ description: Describe a Provider Network
+ comments: A provider network
+ state: present
+ register: test_three
+
+- name: "NETBOX_PROVIDER_NETWORK 3: ASSERT - Updated"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['after']['comments'] == "A provider network"
+ - test_three['diff']['after']['description'] == "Describe a Provider Network"
+ - test_three['provider_network']['name'] == "Test Provider Network One"
+ - test_three['provider_network']['comments'] == "A provider network"
+ - test_three['provider_network']['description'] == "Describe a Provider Network"
+ - test_three['msg'] == "provider_network Test Provider Network One updated"
+
+- name: "NETBOX_PROVIDER_NETWORK 4: Delete provider within netbox"
+ netbox.netbox.netbox_provider_network:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ provider: Test Provider
+ name: Test Provider Network One
+ state: absent
+ register: test_four
+
+- name: "NETBOX_PROVIDER_NETWORK 4 : ASSERT - Delete"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['provider_network']['name'] == "Test Provider Network One"
+ - test_four['provider_network']['comments'] == "A provider network"
+ - test_four['provider_network']['description'] == "Describe a Provider Network"
+ - test_four['msg'] == "provider_network Test Provider Network One deleted"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_rack.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_rack.yml
new file mode 100644
index 000000000..5dc05c3a1
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_rack.yml
@@ -0,0 +1,226 @@
+---
+##
+##
+### NETBOX_RACK
+##
+##
+- name: 1 - Test rack creation
+ netbox.netbox.netbox_rack:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test rack one
+ site: Test Site
+ location: Test Rack Group
+ register: test_one
+
+- name: 1 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['rack']['name'] == "Test rack one"
+ - test_one['rack']['site'] == 1
+
+- name: Test duplicate rack
+ netbox.netbox.netbox_rack:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test rack one
+ register: test_two
+
+- name: 2 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['rack']['name'] == "Test rack one"
+ - test_two['rack']['site'] == 1
+ - test_two['msg'] == "rack Test rack one already exists"
+
+- name: 3 - Create new rack with similar name
+ netbox.netbox.netbox_rack:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test rack - Test Site
+ site: Test Site
+ state: present
+ register: test_three
+
+- name: 3 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['before']['state'] == "absent"
+ - test_three['diff']['after']['state'] == "present"
+ - test_three['rack']['name'] == "Test rack - Test Site"
+ - test_three['rack']['site'] == 1
+ - test_three['msg'] == "rack Test rack - Test Site created"
+
+- name: 4 - Attempt to create Test rack one again
+ netbox.netbox.netbox_rack:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test rack one
+ site: Test Site
+ location: Test Rack Group
+ state: present
+ register: test_four
+
+- name: 4 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - not test_four['changed']
+ - test_four['rack']['name'] == "Test rack one"
+ - test_four['rack']['site'] == 1
+ - test_four['msg'] == "rack Test rack one already exists"
+
+- name: 5 - Update Test rack one with more options
+ netbox.netbox.netbox_rack:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test rack one
+ site: Test Site
+ rack_role: Test Rack Role
+ location: Test Rack Group
+ facility_id: EQUI10291
+ tenant: Test Tenant
+ status: Available
+ serial: FXS10001
+ asset_tag: "1234"
+ width: 23
+ u_height: 48
+ type: 2-post frame
+ outer_width: 32
+ outer_depth: 24
+ outer_unit: Inches
+ comments: Just testing rack module
+ tags:
+ - Schnozzberry
+ state: present
+ register: test_five
+
+- name: 5 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_five is changed
+ - test_five['diff']['after']['asset_tag'] == "1234"
+ - test_five['diff']['after']['comments'] == "Just testing rack module"
+ - test_five['diff']['after']['facility_id'] == "EQUI10291"
+ - test_five['diff']['after']['outer_depth'] == 24
+ - test_five['diff']['after']['outer_unit'] == "in"
+ - test_five['diff']['after']['outer_width'] == 32
+ - test_five['diff']['after']['role'] == 1
+ - test_five['diff']['after']['serial'] == "FXS10001"
+ - test_five['diff']['after']['status'] == "available"
+ - test_five['diff']['after']['tenant'] == 1
+ - test_five['diff']['after']['tags'][0] == 4
+ - test_five['diff']['after']['type'] == "2-post-frame"
+ - test_five['diff']['after']['u_height'] == 48
+ - test_five['diff']['after']['width'] == 23
+ - test_five['rack']['name'] == "Test rack one"
+ - test_five['rack']['site'] == 1
+ - test_five['rack']['asset_tag'] == "1234"
+ - test_five['rack']['comments'] == "Just testing rack module"
+ - test_five['rack']['facility_id'] == "EQUI10291"
+ - test_five['rack']['location'] == 1
+ - test_five['rack']['outer_depth'] == 24
+ - test_five['rack']['outer_unit'] == "in"
+ - test_five['rack']['outer_width'] == 32
+ - test_five['rack']['role'] == 1
+ - test_five['rack']['serial'] == "FXS10001"
+ - test_five['rack']['status'] == "available"
+ - test_five['rack']['tenant'] == 1
+ - test_five['rack']['tags'][0] == 4
+ - test_five['rack']['type'] == "2-post-frame"
+ - test_five['rack']['u_height'] == 48
+ - test_five['rack']['width'] == 23
+ - test_five['msg'] == "rack Test rack one updated"
+
+- name: 6 - Update Test rack one with same options
+ netbox.netbox.netbox_rack:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test rack one
+ site: Test Site
+ rack_role: Test Rack Role
+ location: Test Rack Group
+ facility_id: EQUI10291
+ tenant: Test Tenant
+ status: Available
+ serial: FXS10001
+ asset_tag: "1234"
+ width: 23
+ u_height: 48
+ type: 2-post frame
+ outer_width: 32
+ outer_depth: 24
+ outer_unit: Inches
+ comments: Just testing rack module
+ tags:
+ - Schnozzberry
+ state: present
+ register: test_six
+
+- name: 6 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_six is not changed
+ - test_six['rack']['name'] == "Test rack one"
+ - test_six['rack']['site'] == 1
+ - test_six['rack']['asset_tag'] == "1234"
+ - test_six['rack']['comments'] == "Just testing rack module"
+ - test_six['rack']['facility_id'] == "EQUI10291"
+ - test_six['rack']['location'] == 1
+ - test_six['rack']['outer_depth'] == 24
+ - test_six['rack']['outer_unit'] == "in"
+ - test_six['rack']['outer_width'] == 32
+ - test_six['rack']['role'] == 1
+ - test_six['rack']['serial'] == "FXS10001"
+ - test_six['rack']['status'] == "available"
+ - test_six['rack']['tenant'] == 1
+ - test_six['rack']['tags'][0] == 4
+ - test_six['rack']['type'] == "2-post-frame"
+ - test_six['rack']['u_height'] == 48
+ - test_six['rack']['width'] == 23
+
+- name: 7 - Create rack with same asset tag and serial number
+ netbox.netbox.netbox_rack:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test rack two
+ site: Test Site
+ serial: FXS10001
+ asset_tag: "1234"
+ state: present
+ ignore_errors: true
+ register: test_seven
+
+- name: 7 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_seven is failed
+ - "'asset tag already exists' in test_seven['msg']"
+
+- name: 8 - Test delete
+ netbox.netbox.netbox_rack:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test rack one
+ state: absent
+ register: test_eight
+
+- name: 8 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_eight is changed
+ - test_eight['diff']['before']['state'] == "present"
+ - test_eight['diff']['after']['state'] == "absent"
+ - test_eight['msg'] == "rack Test rack one deleted"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_rack_group.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_rack_group.yml
new file mode 100644
index 000000000..40dba8408
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_rack_group.yml
@@ -0,0 +1,62 @@
+---
+##
+##
+### NETBOX_RACK_GROUP
+##
+##
+- name: "RACK_GROUP 1: Necessary info creation"
+ netbox.netbox.netbox_rack_group:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Rack Group
+ site: Test Site
+ state: present
+ register: test_one
+
+- name: "RACK_GROUP 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['rack_group']['name'] == "Rack Group"
+ - test_one['rack_group']['slug'] == "rack-group"
+ - test_one['rack_group']['site'] == 1
+ - test_one['msg'] == "rack_group Rack Group created"
+
+- name: "RACK_GROUP 2: Create duplicate"
+ netbox.netbox.netbox_rack_group:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Rack Group
+ site: Test Site
+ state: present
+ register: test_two
+
+- name: "RACK_GROUP 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['rack_group']['name'] == "Rack Group"
+ - test_two['rack_group']['slug'] == "rack-group"
+ - test_two['rack_group']['site'] == 1
+ - test_two['msg'] == "rack_group Rack Group already exists"
+
+- name: "RACK_GROUP 3: ASSERT - Delete"
+ netbox.netbox.netbox_rack_group:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Rack Group
+ state: absent
+ register: test_three
+
+- name: "RACK_GROUP 3: ASSERT - Update"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['before']['state'] == "present"
+ - test_three['diff']['after']['state'] == "absent"
+ - test_three['msg'] == "rack_group Rack Group deleted"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_rack_role.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_rack_role.yml
new file mode 100644
index 000000000..ff0d6b4da
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_rack_role.yml
@@ -0,0 +1,81 @@
+---
+##
+##
+### NETBOX_RACK_ROLE
+##
+##
+- name: "RACK_ROLE 1: Necessary info creation"
+ netbox.netbox.netbox_rack_role:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Rack Role
+ color: ffffff
+ state: present
+ register: test_one
+
+- name: "RACK_ROLE 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['rack_role']['name'] == "Rack Role"
+ - test_one['rack_role']['slug'] == "rack-role"
+ - test_one['rack_role']['color'] == "ffffff"
+ - test_one['msg'] == "rack_role Rack Role created"
+
+- name: "RACK_ROLE 2: Create duplicate"
+ netbox.netbox.netbox_rack_role:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Rack Role
+ state: present
+ register: test_two
+
+- name: "RACK_ROLE 1: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['rack_role']['name'] == "Rack Role"
+ - test_two['rack_role']['slug'] == "rack-role"
+ - test_two['rack_role']['color'] == "ffffff"
+ - test_two['msg'] == "rack_role Rack Role already exists"
+
+- name: "RACK_ROLE 3: Update"
+ netbox.netbox.netbox_rack_role:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Rack Role
+ color: "003EFF"
+ state: present
+ register: test_three
+
+- name: "RACK_ROLE 3: ASSERT - Update"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['after']['color'] == "003eff"
+ - test_three['rack_role']['name'] == "Rack Role"
+ - test_three['rack_role']['slug'] == "rack-role"
+ - test_three['rack_role']['color'] == "003eff"
+ - test_three['msg'] == "rack_role Rack Role updated"
+
+- name: "RACK_ROLE 4: Delete"
+ netbox.netbox.netbox_rack_role:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Rack Role
+ state: absent
+ register: test_four
+
+- name: "RACK_ROLE 4: ASSERT - Update"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['diff']['before']['state'] == "present"
+ - test_four['diff']['after']['state'] == "absent"
+ - test_four['msg'] == "rack_role Rack Role deleted"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_rear_port.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_rear_port.yml
new file mode 100644
index 000000000..d186233be
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_rear_port.yml
@@ -0,0 +1,139 @@
+---
+# © 2020 Nokia
+# Licensed under the GNU General Public License v3.0 only
+# SPDX-License-Identifier: GPL-3.0-only
+##
+##
+### NETBOX_REAR_PORT
+##
+##
+- name: "REAR_PORT 1: Necessary info creation"
+ netbox.netbox.netbox_rear_port:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Rear Port
+ device: test100
+ type: bnc
+ state: present
+ register: test_one
+
+- name: "REAR_PORT 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['rear_port']['name'] == "Rear Port"
+ - test_one['rear_port']['device'] == 1
+ - test_one['rear_port']['type'] == "bnc"
+ - test_one['msg'] == "rear_port Rear Port created"
+
+- name: "REAR_PORT 2: Create duplicate"
+ netbox.netbox.netbox_rear_port:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Rear Port
+ device: test100
+ type: bnc
+ state: present
+ register: test_two
+
+- name: "REAR_PORT 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['rear_port']['name'] == "Rear Port"
+ - test_two['rear_port']['device'] == 1
+ - test_two['rear_port']['type'] == "bnc"
+ - test_two['msg'] == "rear_port Rear Port already exists"
+
+- name: "REAR_PORT 3: Update Rear Port with other fields"
+ netbox.netbox.netbox_rear_port:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Rear Port
+ device: test100
+ type: bnc
+ positions: 5
+ description: test description
+ state: present
+ register: test_three
+
+- name: "REAR_PORT 3: ASSERT - Update Rear Port with other fields"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['after']['positions'] == 5
+ - test_three['diff']['after']['description'] == "test description"
+ - test_three['rear_port']['name'] == "Rear Port"
+ - test_three['rear_port']['device'] == 1
+ - test_three['rear_port']['type'] == "bnc"
+ - test_three['rear_port']['positions'] == 5
+ - test_three['rear_port']['description'] == "test description"
+ - test_three['msg'] == "rear_port Rear Port updated"
+
+- name: "REAR_PORT 4: Create Rear Port for Delete Test"
+ netbox.netbox.netbox_rear_port:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Rear Port 2
+ device: test100
+ type: bnc
+ state: present
+ register: test_four
+
+- name: "REAR_PORT 4: ASSERT - Create Rear Port for Delete Test"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['diff']['before']['state'] == "absent"
+ - test_four['diff']['after']['state'] == "present"
+ - test_four['rear_port']['name'] == "Rear Port 2"
+ - test_four['rear_port']['device'] == 1
+ - test_four['rear_port']['type'] == "bnc"
+ - test_four['msg'] == "rear_port Rear Port 2 created"
+
+- name: "REAR_PORT 5: Delete Rear Port"
+ netbox.netbox.netbox_rear_port:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Rear Port 2
+ device: test100
+ type: bnc
+ state: absent
+ register: test_five
+
+- name: "REAR_PORT 5: ASSERT - Delete Rear Port"
+ ansible.builtin.assert:
+ that:
+ - test_five is changed
+ - test_five['diff']['before']['state'] == "present"
+ - test_five['diff']['after']['state'] == "absent"
+ - test_five['msg'] == "rear_port Rear Port 2 deleted"
+
+- name: "REAR_PORT 6: Create second Rear Port"
+ netbox.netbox.netbox_rear_port:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Rear Port
+ device: Test Nexus One
+ type: bnc
+ state: present
+ register: test_six
+
+- name: "REAR_PORT 6: ASSERT - Create second Rear Port"
+ ansible.builtin.assert:
+ that:
+ - test_six is changed
+ - test_six['diff']['before']['state'] == "absent"
+ - test_six['diff']['after']['state'] == "present"
+ - test_six['rear_port']['name'] == "Rear Port"
+ - test_six['rear_port']['device'] == 4
+ - test_six['rear_port']['type'] == "bnc"
+ - test_six['msg'] == "rear_port Rear Port created"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_rear_port_template.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_rear_port_template.yml
new file mode 100644
index 000000000..5342627d0
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_rear_port_template.yml
@@ -0,0 +1,136 @@
+---
+# © 2020 Nokia
+# Licensed under the GNU General Public License v3.0 only
+# SPDX-License-Identifier: GPL-3.0-only
+##
+##
+### NETBOX_REAR_PORT_TEMPLATE
+##
+##
+- name: "REAR_PORT_TEMPLATE 1: Necessary info creation"
+ netbox.netbox.netbox_rear_port_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Rear Port Template
+ device_type: Cisco Test
+ type: bnc
+ state: present
+ register: test_one
+
+- name: "REAR_PORT_TEMPLATE 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['rear_port_template']['name'] == "Rear Port Template"
+ - test_one['rear_port_template']['device_type'] == 1
+ - test_one['rear_port_template']['type'] == "bnc"
+ - test_one['msg'] == "rear_port_template Rear Port Template created"
+
+- name: "REAR_PORT_TEMPLATE 2: Create duplicate"
+ netbox.netbox.netbox_rear_port_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Rear Port Template
+ device_type: Cisco Test
+ type: bnc
+ state: present
+ register: test_two
+
+- name: "REAR_PORT_TEMPLATE 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['rear_port_template']['name'] == "Rear Port Template"
+ - test_two['rear_port_template']['device_type'] == 1
+ - test_two['rear_port_template']['type'] == "bnc"
+ - test_two['msg'] == "rear_port_template Rear Port Template already exists"
+
+- name: "REAR_PORT_TEMPLATE 3: Update Rear Port Template with other fields"
+ netbox.netbox.netbox_rear_port_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Rear Port Template
+ device_type: Cisco Test
+ type: bnc
+ positions: 5
+ state: present
+ register: test_three
+
+- name: "REAR_PORT_TEMPLATE 3: ASSERT - Update Rear Port Template with other fields"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['after']['positions'] == 5
+ - test_three['rear_port_template']['name'] == "Rear Port Template"
+ - test_three['rear_port_template']['device_type'] == 1
+ - test_three['rear_port_template']['type'] == "bnc"
+ - test_three['rear_port_template']['positions'] == 5
+ - test_three['msg'] == "rear_port_template Rear Port Template updated"
+
+- name: "REAR_PORT_TEMPLATE 4: Create Rear Port Template for Delete Test"
+ netbox.netbox.netbox_rear_port_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Rear Port Template 2
+ device_type: Cisco Test
+ type: bnc
+ state: present
+ register: test_four
+
+- name: "REAR_PORT_TEMPLATE 4: ASSERT - Create Rear Port Template for Delete Test"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['diff']['before']['state'] == "absent"
+ - test_four['diff']['after']['state'] == "present"
+ - test_four['rear_port_template']['name'] == "Rear Port Template 2"
+ - test_four['rear_port_template']['device_type'] == 1
+ - test_four['rear_port_template']['type'] == "bnc"
+ - test_four['msg'] == "rear_port_template Rear Port Template 2 created"
+
+- name: "REAR_PORT_TEMPLATE 5: Delete Rear Port Template"
+ netbox.netbox.netbox_rear_port_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Rear Port Template 2
+ device_type: Cisco Test
+ type: bnc
+ state: absent
+ register: test_five
+
+- name: "REAR_PORT_TEMPLATE 5: ASSERT - Delete Rear Port Template"
+ ansible.builtin.assert:
+ that:
+ - test_five is changed
+ - test_five['diff']['before']['state'] == "present"
+ - test_five['diff']['after']['state'] == "absent"
+ - test_five['msg'] == "rear_port_template Rear Port Template 2 deleted"
+
+- name: "REAR_PORT_TEMPLATE 6: Create second Rear Port Template"
+ netbox.netbox.netbox_rear_port_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Rear Port Template 2
+ device_type: Arista Test
+ type: bnc
+ state: present
+ register: test_six
+
+- name: "REAR_PORT_TEMPLATE 6: ASSERT - Create second Rear Port Template"
+ ansible.builtin.assert:
+ that:
+ - test_six is changed
+ - test_six['diff']['before']['state'] == "absent"
+ - test_six['diff']['after']['state'] == "present"
+ - test_six['rear_port_template']['name'] == "Rear Port Template 2"
+ - test_six['rear_port_template']['device_type'] == 2
+ - test_six['rear_port_template']['type'] == "bnc"
+ - test_six['msg'] == "rear_port_template Rear Port Template 2 created"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_region.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_region.yml
new file mode 100644
index 000000000..0395db435
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_region.yml
@@ -0,0 +1,79 @@
+---
+##
+##
+### NETBOX_REGION
+##
+##
+- name: "REGION 1: Necessary info creation"
+ netbox.netbox.netbox_region:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Region One
+ state: present
+ register: test_one
+
+- name: "REGION 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['region']['name'] == "Test Region One"
+ - test_one['region']['slug'] == "test-region-one"
+ - test_one['msg'] == "region Test Region One created"
+
+- name: "REGION 2: Create duplicate"
+ netbox.netbox.netbox_region:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Region One
+ state: present
+ register: test_two
+
+- name: "REGION 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['region']['name'] == "Test Region One"
+ - test_two['region']['slug'] == "test-region-one"
+ - test_two['msg'] == "region Test Region One already exists"
+
+- name: "REGION 3: ASSERT - Update"
+ netbox.netbox.netbox_region:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Region One
+ parent_region: Test Region
+ state: present
+ register: test_three
+
+- name: "REGION 3: ASSERT - Updated"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['after']['parent'] == 1
+ - test_three['region']['name'] == "Test Region One"
+ - test_three['region']['slug'] == "test-region-one"
+ - test_three['region']['parent'] == 1
+ - test_three['msg'] == "region Test Region One updated"
+
+- name: "REGION 4: ASSERT - Delete"
+ netbox.netbox.netbox_region:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Region One
+ state: absent
+ register: test_four
+
+- name: "REGION 4: ASSERT - Delete"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['region']['name'] == "Test Region One"
+ - test_four['region']['slug'] == "test-region-one"
+ - test_four['region']['parent'] == 1
+ - test_four['msg'] == "region Test Region One deleted"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_rir.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_rir.yml
new file mode 100644
index 000000000..3a7bf79fa
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_rir.yml
@@ -0,0 +1,79 @@
+---
+##
+##
+### NETBOX_RIR
+##
+##
+- name: "RIR 1: Necessary info creation"
+ netbox.netbox.netbox_rir:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test RIR One
+ state: present
+ register: test_one
+
+- name: "RIR 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['rir']['name'] == "Test RIR One"
+ - test_one['rir']['slug'] == "test-rir-one"
+ - test_one['msg'] == "rir Test RIR One created"
+
+- name: "RIR 2: Create duplicate"
+ netbox.netbox.netbox_rir:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test RIR One
+ state: present
+ register: test_two
+
+- name: "RIR 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['rir']['name'] == "Test RIR One"
+ - test_two['rir']['slug'] == "test-rir-one"
+ - test_two['msg'] == "rir Test RIR One already exists"
+
+- name: "RIR 3: ASSERT - Update"
+ netbox.netbox.netbox_rir:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test RIR One
+ is_private: true
+ state: present
+ register: test_three
+
+- name: "RIR 3: ASSERT - Updated"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['after']['is_private'] == true
+ - test_three['rir']['name'] == "Test RIR One"
+ - test_three['rir']['slug'] == "test-rir-one"
+ - test_three['rir']['is_private'] == true
+ - test_three['msg'] == "rir Test RIR One updated"
+
+- name: "RIR 4: ASSERT - Delete"
+ netbox.netbox.netbox_rir:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test RIR One
+ state: absent
+ register: test_four
+
+- name: "RIR 4: ASSERT - Delete"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['rir']['name'] == "Test RIR One"
+ - test_four['rir']['slug'] == "test-rir-one"
+ - test_four['rir']['is_private'] == true
+ - test_four['msg'] == "rir Test RIR One deleted"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_route_target.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_route_target.yml
new file mode 100644
index 000000000..c2576c4cc
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_route_target.yml
@@ -0,0 +1,204 @@
+---
+#
+# ADD (CHECK MODE)
+- name: "NETBOX_ROUTE_TARGET_ADD: Check Mode - Add all fields except description"
+ netbox.netbox.netbox_route_target:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: 65000:65001
+ tenant: Test Tenant
+ tags:
+ - first
+ - second
+ check_mode: true
+ register: test_results
+
+- name: "NETBOX_ROUTE_TARGET_ADD: (ASSERT) Check Mode - Add all fields except description"
+ ansible.builtin.assert:
+ that:
+ - test_results is changed
+ - test_results['route_target']['name'] == "65000:65001"
+ - test_results['route_target']['tenant'] == 1
+ - test_results['route_target']['tags'] | length == 2
+ - test_results['diff']['after']['state'] == "present"
+ - test_results['diff']['before']['state'] == "absent"
+ - test_results['msg'] == "route_target 65000:65001 created"
+
+# ADD
+- name: "NETBOX_ROUTE_TARGET_ADD: Add all fields except description"
+ netbox.netbox.netbox_route_target:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: 65000:65001
+ tenant: Test Tenant
+ tags:
+ - first
+ - second
+ register: test_results
+
+- name: "NETBOX_ROUTE_TARGET_ADD: (ASSERT) Add all fields except description"
+ ansible.builtin.assert:
+ that:
+ - test_results is changed
+ - test_results['route_target']['name'] == "65000:65001"
+ - test_results['route_target']['tenant'] == 1
+ - test_results['route_target']['tags'] | length == 2
+ - test_results['diff']['after']['state'] == "present"
+ - test_results['diff']['before']['state'] == "absent"
+ - test_results['msg'] == "route_target 65000:65001 created"
+
+# ADD (IDEMPOTENT)
+- name: "NETBOX_ROUTE_TARGET_ADD_IDEM: (IDEMPOTENT) Add all fields except description"
+ netbox.netbox.netbox_route_target:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: 65000:65001
+ tenant: Test Tenant
+ tags:
+ - first
+ - second
+ register: test_results
+
+- name: "NETBOX_ROUTE_TARGET_ADD_IDEM: (IDEMPOTENT) Add all fields except description"
+ ansible.builtin.assert:
+ that:
+ - test_results is not changed
+ - test_results['route_target']['name'] == "65000:65001"
+ - test_results['route_target']['tenant'] == 1
+ - test_results['route_target']['tags'] | length == 2
+ - test_results['msg'] == "route_target 65000:65001 already exists"
+
+#
+# UPDATE (CHECK MODE)
+- name: "NETBOX_ROUTE_TARGET_UPDATE: Check Mode - Update description"
+ netbox.netbox.netbox_route_target:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: 65000:65001
+ description: NEW DESCRIPTION
+ tenant: Test Tenant
+ tags:
+ - first
+ - second
+ check_mode: true
+ register: test_results
+
+- name: "NETBOX_ROUTE_TARGET_UPDATE: (ASSERT) Check Mode - Update description"
+ ansible.builtin.assert:
+ that:
+ - test_results is changed
+ - test_results['route_target']['name'] == "65000:65001"
+ - test_results['route_target']['tenant'] == 1
+ - test_results['route_target']['tags'] | length == 2
+ - test_results['route_target']['description'] == "NEW DESCRIPTION"
+ - test_results['diff']['after']['description'] == "NEW DESCRIPTION"
+ - test_results['diff']['before']['description'] == ""
+ - test_results['msg'] == "route_target 65000:65001 updated"
+
+# UPDATE
+- name: "NETBOX_ROUTE_TARGET_UPDATE: Update description"
+ netbox.netbox.netbox_route_target:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: 65000:65001
+ tenant: Test Tenant
+ description: NEW DESCRIPTION
+ tags:
+ - first
+ - second
+ register: test_results
+
+- name: "NETBOX_ROUTE_TARGET_UPDATE: (ASSERT) Update description"
+ ansible.builtin.assert:
+ that:
+ - test_results is changed
+ - test_results['route_target']['name'] == "65000:65001"
+ - test_results['route_target']['tenant'] == 1
+ - test_results['route_target']['tags'] | length == 2
+ - test_results['route_target']['description'] == "NEW DESCRIPTION"
+ - test_results['diff']['after']['description'] == "NEW DESCRIPTION"
+ - test_results['diff']['before']['description'] == ""
+ - test_results['msg'] == "route_target 65000:65001 updated"
+
+# UPDATE (IDEMPOTENT)
+- name: "NETBOX_ROUTE_TARGET_UPDATE_IDEM: (IDEMPOTENT) Update description"
+ netbox.netbox.netbox_route_target:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: 65000:65001
+ tenant: Test Tenant
+ description: NEW DESCRIPTION
+ tags:
+ - first
+ - second
+ register: test_results
+
+- name: "NETBOX_ROUTE_TARGET_UPDATE_IDEM: (IDEMPOTENT) Update description"
+ ansible.builtin.assert:
+ that:
+ - test_results is not changed
+ - test_results['route_target']['name'] == "65000:65001"
+ - test_results['route_target']['tenant'] == 1
+ - test_results['route_target']['tags'] | length == 2
+ - test_results['route_target']['description'] == "NEW DESCRIPTION"
+ - test_results['msg'] == "route_target 65000:65001 already exists"
+
+#
+# DELETE (CHECK MODE)
+- name: "NETBOX_ROUTE_TARGET_DELETE: Check Mode - Delete route target"
+ netbox.netbox.netbox_route_target:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: 65000:65001
+ state: absent
+ check_mode: true
+ register: test_results
+
+- name: "NETBOX_ROUTE_TARGET_DELETE: (ASSERT) Check Mode - Delete route target"
+ ansible.builtin.assert:
+ that:
+ - test_results is changed
+ - test_results['diff']['after']['state'] == "absent"
+ - test_results['diff']['before']['state'] == "present"
+ - test_results['msg'] == "route_target 65000:65001 deleted"
+
+# DELETE
+- name: "NETBOX_ROUTE_TARGET_DELETE: Delete route target"
+ netbox.netbox.netbox_route_target:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: 65000:65001
+ state: absent
+ register: test_results
+
+- name: "NETBOX_ROUTE_TARGET_DELETE: (ASSERT) Delete route target"
+ ansible.builtin.assert:
+ that:
+ - test_results is changed
+ - test_results['diff']['after']['state'] == "absent"
+ - test_results['diff']['before']['state'] == "present"
+ - test_results['msg'] == "route_target 65000:65001 deleted"
+
+# DELETE (IDEMPOTENT)
+- name: "NETBOX_ROUTE_TARGET_DELETE_IDEM: (IDEMPOTENT) Delete route target"
+ netbox.netbox.netbox_route_target:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: 65000:65001
+ state: absent
+ register: test_results
+
+- name: "NETBOX_ROUTE_TARGET_DELETE_IDEM: (IDEMPOTENT) Delete route target"
+ ansible.builtin.assert:
+ that:
+ - test_results is not changed
+ - test_results['msg'] == "route_target 65000:65001 already absent"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_service.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_service.yml
new file mode 100644
index 000000000..d937e50a6
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_service.yml
@@ -0,0 +1,196 @@
+---
+##
+##
+### NETBOX_SERVICE
+##
+##
+- name: 1 - Device with required information needs to add new service
+ netbox.netbox.netbox_device:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: FOR_SERVICE
+ device_type: Cisco Test
+ device_role: Core Switch
+ site: Test Site
+ status: Staged
+ state: present
+
+- name: "NETBOX_SERVICE: Create new service"
+ netbox.netbox.netbox_service:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ device: FOR_SERVICE
+ name: node-exporter
+ port: 9100
+ protocol: TCP
+ state: present
+ register: test_service_create
+
+- name: NETBOX_SERVICE ASSERT - Create
+ ansible.builtin.assert:
+ that:
+ - test_service_create is changed
+ - test_service_create['services']['name'] == "node-exporter"
+ - test_service_create['services']['ports'] == [9100]
+ - test_service_create['services']['protocol'] == "tcp"
+ - test_service_create['diff']['after']['state'] == "present"
+ - test_service_create['diff']['before']['state'] == "absent"
+ - test_service_create['msg'] == "services node-exporter created"
+
+- name: "NETBOX_SERVICE: Test idempotence"
+ netbox.netbox.netbox_service:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ device: FOR_SERVICE
+ name: node-exporter
+ port: 9100
+ protocol: TCP
+ state: present
+ register: test_service_idempotence
+
+- name: NETBOX_SERVICE ASSERT - Not changed
+ ansible.builtin.assert:
+ that:
+ - test_service_idempotence['services']['name'] == "node-exporter"
+ - test_service_idempotence['services']['ports'] == [9100]
+ - test_service_idempotence['services']['protocol'] == "tcp"
+ - test_service_idempotence['msg'] == "services node-exporter already exists"
+
+- name: "NETBOX_SERVICE: Test update"
+ netbox.netbox.netbox_service:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ device: FOR_SERVICE
+ name: node-exporter
+ ports:
+ - 9100
+ - 9200
+ protocol: TCP
+ tags:
+ - Schnozzberry
+ state: present
+ register: test_service_update
+
+- name: NETBOX_SERVICE ASSERT - Service has been updated
+ ansible.builtin.assert:
+ that:
+ - test_service_update is changed
+ - test_service_update['diff']['after']['tags'][0] == 4
+ - test_service_update['diff']['after']['ports'] == [9100, 9200]
+ - test_service_update['msg'] == "services node-exporter updated"
+
+- name: "NETBOX_SERVICE: Test same details, but different protocol - Create"
+ netbox.netbox.netbox_service:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ device: FOR_SERVICE
+ name: node-exporter
+ port: 9100
+ protocol: UDP
+ state: present
+ register: test_service_protocol
+
+- name: NETBOX_SERVICE ASSERT - Different protocol - Create
+ ansible.builtin.assert:
+ that:
+ - test_service_protocol is changed
+ - test_service_protocol['diff']['after']['state'] == "present"
+ - test_service_protocol['diff']['before']['state'] == "absent"
+ - test_service_protocol['services']['name'] == "node-exporter"
+ - test_service_protocol['services']['ports'] == [9100]
+ - test_service_protocol['services']['protocol'] == "udp"
+ - test_service_protocol['msg'] == "services node-exporter created"
+
+- name: "NETBOX_SERVICE: Test service deletion"
+ netbox.netbox.netbox_service:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ device: FOR_SERVICE
+ name: node-exporter
+ port: 9100
+ protocol: UDP
+ state: absent
+ register: test_service_delete
+
+- name: NETBOX_SERVICE ASSERT - Service has been deleted
+ ansible.builtin.assert:
+ that:
+ - test_service_delete is changed
+ - test_service_delete['diff']['after']['state'] == "absent"
+ - test_service_delete['diff']['before']['state'] == "present"
+ - test_service_delete['msg'] == "services node-exporter deleted"
+
+- name: "NETBOX_SERVICE: Test service IP addresses"
+ netbox.netbox.netbox_service:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ device: test100
+ name: node-exporter
+ port: 9100
+ protocol: UDP
+ ipaddresses:
+ - address: 172.16.180.1/24
+ state: present
+ register: test_service_ip_addresses
+
+- name: NETBOX_SERVICE ASSERT - Service has been created with IP address
+ ansible.builtin.assert:
+ that:
+ - test_service_ip_addresses is changed
+ - test_service_ip_addresses['diff']['after']['state'] == "present"
+ - test_service_ip_addresses['diff']['before']['state'] == "absent"
+ - test_service_ip_addresses['services']['name'] == "node-exporter"
+ - test_service_ip_addresses['services']['ports'] == [9100]
+ - test_service_ip_addresses['services']['protocol'] == "udp"
+ - test_service_ip_addresses['services']['ipaddresses'] is defined
+ - test_service_ip_addresses['msg'] == "services node-exporter created"
+
+- name: "NETBOX_SERVICE: Missing both device & virtual_machine options - Tests required_one_of"
+ netbox.netbox.netbox_service:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: node-exporter
+ port: 9100
+ protocol: UDP
+ ipaddresses:
+ - address: 172.16.180.1/24
+ state: present
+ ignore_errors: true
+ register: test_service_required_one_of
+
+- name: NETBOX_SERVICE ASSERT - Failed due to missing arguments
+ ansible.builtin.assert:
+ that:
+ - test_service_required_one_of is failed
+ - 'test_service_required_one_of["msg"] == "one of the following is required: device, virtual_machine"'
+
+- name: "NETBOX_SERVICE: Create new service on virtual_machine"
+ netbox.netbox.netbox_service:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ virtual_machine: test100-vm
+ name: node-exporter
+ port: 9100
+ protocol: TCP
+ state: present
+ register: test_service_create_vm
+
+- name: NETBOX_SERVICE ASSERT - Create
+ ansible.builtin.assert:
+ that:
+ - test_service_create_vm is changed
+ - test_service_create_vm['services']['name'] == "node-exporter"
+ - test_service_create_vm['services']['ports'] == [9100]
+ - test_service_create_vm['services']['protocol'] == "tcp"
+ - test_service_create_vm['diff']['after']['state'] == "present"
+ - test_service_create_vm['diff']['before']['state'] == "absent"
+ - test_service_create_vm['msg'] == "services node-exporter created"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_service_template.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_service_template.yml
new file mode 100644
index 000000000..507a566a4
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_service_template.yml
@@ -0,0 +1,115 @@
+---
+##
+##
+### NETBOX_SERVICE_TEMPLATE
+##
+##
+- name: "SERVICE_TEMPLATE 1: Necessary info creation"
+ netbox.netbox.netbox_service_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Service Template for SSH
+ ports:
+ - 22
+ protocol: tcp
+ state: present
+ register: test_one
+
+- name: "SERVICE_TEMPLATE 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['service_template']['name'] == "Service Template for SSH"
+ - test_one['service_template']['ports'] == [22]
+ - test_one['service_template']['protocol'] == "tcp"
+ - test_one['msg'] == "service_template Service Template for SSH created"
+
+- name: "SERVICE_TEMPLATE 2: Create duplicate"
+ netbox.netbox.netbox_service_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Service Template for SSH
+ ports:
+ - 22
+ protocol: tcp
+ state: present
+ register: test_two
+
+- name: "SERVICE_TEMPLATE 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['service_template']['name'] == "Service Template for SSH"
+ - test_two['service_template']['ports'] == [22]
+ - test_two['service_template']['protocol'] == "tcp"
+ - test_two['msg'] == "service_template Service Template for SSH already exists"
+
+- name: "SERVICE_TEMPLATE 3: Update Service Template with other fields"
+ netbox.netbox.netbox_service_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Service Template for SSH
+ ports:
+ - 22
+ protocol: tcp
+ comments: For SSH service
+ state: present
+ register: test_three
+
+- name: "SERVICE_TEMPLATE 3: ASSERT - Update Service Template with other fields"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['after']['comments'] == "For SSH service"
+ - test_three['service_template']['name'] == "Service Template for SSH"
+ - test_three['service_template']['ports'] == [22]
+ - test_three['service_template']['protocol'] == "tcp"
+ - test_three['service_template']['comments'] == "For SSH service"
+ - test_three['msg'] == "service_template Service Template for SSH updated"
+
+- name: "SERVICE_TEMPLATE 4: Create Service Template for Delete Test"
+ netbox.netbox.netbox_service_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Service Template for DNS
+ ports:
+ - 53
+ protocol: udp
+ comments: Domain Name System
+ state: present
+ register: test_four
+
+- name: "SERVICE_TEMPLATE 4: ASSERT - Create Service Template for Delete Test"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['diff']['before']['state'] == "absent"
+ - test_four['diff']['after']['state'] == "present"
+ - test_four['service_template']['name'] == "Service Template for DNS"
+ - test_four['service_template']['ports'] == [53]
+ - test_four['service_template']['protocol'] == "udp"
+ - test_four['service_template']['comments'] == "Domain Name System"
+ - test_four['msg'] == "service_template Service Template for DNS created"
+
+- name: "SERVICE_TEMPLATE 5: Delete Service Template"
+ netbox.netbox.netbox_service_template:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Service Template for DNS
+ state: absent
+ register: test_five
+
+- name: "SERVICE_TEMPLATE 5: ASSERT - Delete Service Template"
+ ansible.builtin.assert:
+ that:
+ - test_five is changed
+ - test_five['diff']['before']['state'] == "present"
+ - test_five['diff']['after']['state'] == "absent"
+ - test_five['msg'] == "service_template Service Template for DNS deleted"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_site.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_site.yml
new file mode 100644
index 000000000..a5861522e
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_site.yml
@@ -0,0 +1,165 @@
+---
+##
+##
+### NETBOX_SITE
+##
+##
+- name: 1 - Create site within NetBox with only required information
+ netbox.netbox.netbox_site:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test - Colorado
+ state: present
+ register: test_one
+
+- name: 1 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['site']['name'] == "Test - Colorado"
+ - test_one['msg'] == "site Test - Colorado created"
+
+- name: 2 - Duplicate
+ netbox.netbox.netbox_site:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test - Colorado
+ state: present
+ register: test_two
+
+- name: 2 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['msg'] == "site Test - Colorado already exists"
+ - test_two['site']['name'] == "Test - Colorado"
+
+- name: 3 - Update Test - Colorado
+ netbox.netbox.netbox_site:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test - Colorado
+ status: Planned
+ region: Test Region
+ state: present
+ register: test_three
+
+- name: 3 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['after']['status'] == "planned"
+ - test_three['diff']['after']['region'] == 1
+ - test_three['msg'] == "site Test - Colorado updated"
+ - test_three['site']['name'] == "Test - Colorado"
+ - test_three['site']['status'] == "planned"
+ - test_three['site']['region'] == 1
+
+- name: 4 - Create site with all parameters
+ netbox.netbox.netbox_site:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test - California
+ status: Planned
+ region: Test Region
+ site_group: Test Site Group
+ tenant: Test Tenant
+ facility: EquinoxCA7
+ time_zone: America/Los Angeles
+ description: This is a test description
+ physical_address: Hollywood, CA, 90210
+ shipping_address: Hollywood, CA, 90210
+ latitude: "22.169141"
+ longitude: "-100.994041"
+ comments: "### Placeholder"
+ slug: test_california
+ state: present
+ register: test_four
+
+- name: 4 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['diff']['before']['state'] == "absent"
+ - test_four['diff']['after']['state'] == "present"
+ - test_four['site']['name'] == "Test - California"
+ - test_four['msg'] == "site Test - California created"
+ - test_four['site']['status'] == "planned"
+ - test_four['site']['region'] == 1
+ - test_four['site']['group'] == 4
+ - test_four['site']['tenant'] == 1
+ - test_four['site']['facility'] == "EquinoxCA7"
+ - test_four['site']['time_zone'] == "America/Los_Angeles"
+ - test_four['site']['description'] == "This is a test description"
+ - test_four['site']['physical_address'] == "Hollywood, CA, 90210"
+ - test_four['site']['shipping_address'] == "Hollywood, CA, 90210"
+ - test_four['site']['latitude'] == 22.169141
+ - test_four['site']['longitude'] == -100.994041
+ - test_four['site']['comments'] == "### Placeholder"
+ - test_four['site']['slug'] == "test_california"
+
+- name: "NETBOX_SITE_IDEM: Idempotency - Create duplicate site with all parameters"
+ netbox.netbox.netbox_site:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test - California
+ status: Planned
+ region: Test Region
+ site_group: Test Site Group
+ tenant: Test Tenant
+ facility: EquinoxCA7
+ time_zone: America/Los Angeles
+ description: This is a test description
+ physical_address: Hollywood, CA, 90210
+ shipping_address: Hollywood, CA, 90210
+ latitude: "22.169141"
+ longitude: "-100.994041"
+ comments: "### Placeholder"
+ slug: test_california
+ state: present
+ register: test_results
+
+- name: "NETBOX_SITE_IDEM: (ASSERT) Idempotency - Duplicate device site with all parameters"
+ ansible.builtin.assert:
+ that:
+ - test_results is not changed
+ - test_results['site']['name'] == "Test - California"
+ - test_results['msg'] == "site Test - California already exists"
+ - test_results['site']['status'] == "planned"
+ - test_results['site']['region'] == 1
+ - test_results['site']['group'] == 4
+ - test_results['site']['tenant'] == 1
+ - test_results['site']['facility'] == "EquinoxCA7"
+ - test_results['site']['time_zone'] == "America/Los_Angeles"
+ - test_results['site']['description'] == "This is a test description"
+ - test_results['site']['physical_address'] == "Hollywood, CA, 90210"
+ - test_results['site']['shipping_address'] == "Hollywood, CA, 90210"
+ - test_results['site']['latitude'] == 22.169141
+ - test_results['site']['longitude'] == -100.994041
+ - test_results['site']['comments'] == "### Placeholder"
+ - test_results['site']['slug'] == "test_california"
+
+- name: 5 - Delete site within netbox
+ netbox.netbox.netbox_site:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test - Colorado
+ state: absent
+ register: test_five
+
+- name: 5 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_five is changed
+ - test_five['diff']['before']['state'] == "present"
+ - test_five['diff']['after']['state'] == "absent"
+ - test_five['site']['name'] == "Test - Colorado"
+ - test_five['msg'] == "site Test - Colorado deleted"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_site_group.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_site_group.yml
new file mode 100644
index 000000000..11b9cbb24
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_site_group.yml
@@ -0,0 +1,81 @@
+---
+##
+##
+### NETBOX_SITE_GROUP
+##
+##
+- name: "SITE_GROUP 1: Necessary info creation"
+ netbox.netbox.netbox_site_group:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Site Group
+ state: present
+ register: test_one
+
+- name: "SITE_GROUP 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['site_group']['name'] == "Site Group"
+ - test_one['site_group']['slug'] == "site-group"
+ - test_one['msg'] == "site_group Site Group created"
+
+- name: "SITE_GROUP 2: Create duplicate"
+ netbox.netbox.netbox_site_group:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Site Group
+ state: present
+ register: test_two
+
+- name: "SITE_GROUP 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['site_group']['name'] == "Site Group"
+ - test_two['site_group']['slug'] == "site-group"
+ - test_two['msg'] == "site_group Site Group already exists"
+
+- name: "SITE_GROUP 3: Update"
+ netbox.netbox.netbox_site_group:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Site Group
+ parent_site_group: Test Site Group
+ description: This is a site group
+ state: present
+ register: test_three
+
+- name: "SITE_GROUP 3: ASSERT - Update"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['after']['parent'] == 4
+ - test_three['diff']['after']['description'] == "This is a site group"
+ - test_three['site_group']['name'] == "Site Group"
+ - test_three['site_group']['slug'] == "site-group"
+ - test_three['site_group']['parent'] == 4
+ - test_three['site_group']['description'] == "This is a site group"
+ - test_three['msg'] == "site_group Site Group updated"
+
+- name: "SITE_GROUP 4: Delete"
+ netbox.netbox.netbox_site_group:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Site Group
+ state: absent
+ register: test_four
+
+- name: "SITE_GROUP 4: ASSERT - Delete"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['diff']['before']['state'] == "present"
+ - test_four['diff']['after']['state'] == "absent"
+ - test_four['msg'] == "site_group Site Group deleted"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_tag.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_tag.yml
new file mode 100644
index 000000000..f3fcbac39
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_tag.yml
@@ -0,0 +1,110 @@
+---
+##
+##
+### NETBOX_TAGS
+##
+##
+- name: "TAG 1: ASSERT - Necessary info creation"
+ netbox.netbox.netbox_tag:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Tag 1
+ description: Tag 1 test
+ color: "0000ff"
+ state: present
+ register: test_one
+
+- name: "TAG 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['tags']['color'] == "0000ff"
+ - test_one['tags']['description'] == "Tag 1 test"
+ - test_one['tags']['name'] == "Test Tag 1"
+ - test_one['tags']['slug'] == "test-tag-1"
+ - test_one['msg'] == "tags Test Tag 1 created"
+
+- name: "TAG 2: Create duplicate"
+ netbox.netbox.netbox_tag:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Tag 1
+ description: Tag 1 test
+ color: "0000ff"
+ state: present
+ register: test_two
+
+- name: "TAG 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['tags']['name'] == "Test Tag 1"
+ - test_two['msg'] == "tags Test Tag 1 already exists"
+
+- name: "TAG 3: ASSERT - Update"
+ netbox.netbox.netbox_tag:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Tag 1
+ description: Tag 1 update test
+ color: "00ff00"
+ state: present
+ register: test_three
+
+- name: "TAG 3: ASSERT - Updated"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['after']['color'] == "00ff00"
+ - test_three['diff']['after']['description'] == "Tag 1 update test"
+ - test_three['tags']['name'] == "Test Tag 1"
+ - test_three['tags']['description'] == "Tag 1 update test"
+ - test_three['tags']['color'] == "00ff00"
+ - test_three['msg'] == "tags Test Tag 1 updated"
+
+- name: "TAG 4: ASSERT - Delete"
+ netbox.netbox.netbox_tag:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Tag 1
+ state: absent
+ register: test_four
+
+- name: "TAG 4: ASSERT - Delete"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['diff']['after']['state'] == "absent"
+ - test_four['tags']['name'] == "Test Tag 1"
+ - test_four['tags']['slug'] == "test-tag-1"
+ - test_four['msg'] == "tags Test Tag 1 deleted"
+
+- name: "TAG 5: ASSERT - Necessary info creation"
+ netbox.netbox.netbox_tag:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Tag 5
+ slug: test-tag-five
+ description: Tag 5 test
+ color: "0000ff"
+ state: present
+ register: test_five
+
+- name: "TAG 5: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_five is changed
+ - test_five['diff']['before']['state'] == "absent"
+ - test_five['diff']['after']['state'] == "present"
+ - test_five['tags']['color'] == "0000ff"
+ - test_five['tags']['description'] == "Tag 5 test"
+ - test_five['tags']['name'] == "Test Tag 5"
+ - test_five['tags']['slug'] == "test-tag-five"
+ - test_five['msg'] == "tags Test Tag 5 created"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_tenant.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_tenant.yml
new file mode 100644
index 000000000..0731e0268
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_tenant.yml
@@ -0,0 +1,106 @@
+---
+##
+##
+### NETBOX_TENANT
+##
+##
+- name: 1 - Test tenant creation
+ netbox.netbox.netbox_tenant:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Tenant ABC
+ register: test_one
+
+- name: 1 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['tenant']['name'] == "Tenant ABC"
+ - test_one['tenant']['slug'] == "tenant-abc"
+ - test_one['msg'] == "tenant Tenant ABC created"
+
+- name: Test duplicate tenant
+ netbox.netbox.netbox_tenant:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Tenant ABC
+ register: test_two
+
+- name: 2 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['tenant']['name'] == "Tenant ABC"
+ - test_two['tenant']['slug'] == "tenant-abc"
+ - test_two['msg'] == "tenant Tenant ABC already exists"
+
+- name: 3 - Test update
+ netbox.netbox.netbox_tenant:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Tenant ABC
+ description: Updated description
+ register: test_three
+
+- name: 3 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['after']['description'] == "Updated description"
+ - test_three['tenant']['name'] == "Tenant ABC"
+ - test_three['tenant']['slug'] == "tenant-abc"
+ - test_three['tenant']['description'] == "Updated description"
+ - test_three['msg'] == "tenant Tenant ABC updated"
+
+- name: 4 - Test delete
+ netbox.netbox.netbox_tenant:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Tenant ABC
+ state: absent
+ register: test_four
+
+- name: 4 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['diff']['before']['state'] == "present"
+ - test_four['diff']['after']['state'] == "absent"
+ - test_four['msg'] == "tenant Tenant ABC deleted"
+
+- name: 5 - Create tenant with all parameters
+ netbox.netbox.netbox_tenant:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Tenant ABC
+ description: ABC Incorporated
+ comments: "### This tenant is super cool"
+ tenant_group: Test Tenant Group
+ slug: tenant_abc
+ tags:
+ - tagA
+ - tagB
+ - tagC
+ state: present
+ register: test_five
+
+- name: 5 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_five is changed
+ - test_five['diff']['before']['state'] == "absent"
+ - test_five['diff']['after']['state'] == "present"
+ - test_five['tenant']['name'] == "Tenant ABC"
+ - test_five['tenant']['slug'] == "tenant_abc"
+ - test_five['tenant']['description'] == "ABC Incorporated"
+ - test_five['tenant']['comments'] == "### This tenant is super cool"
+ - test_five['tenant']['group'] == 1
+ - test_five['tenant']['tags'] | length == 3
+ - test_five['msg'] == "tenant Tenant ABC created"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_tenant_group.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_tenant_group.yml
new file mode 100644
index 000000000..f7f57f3c2
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_tenant_group.yml
@@ -0,0 +1,129 @@
+---
+##
+##
+### NETBOX_TENANT_GROUP
+##
+##
+- name: 1 - Test tenant group creation
+ netbox.netbox.netbox_tenant_group:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Tenant Group Two
+ register: test_one
+
+- name: 1 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['tenant_group']['name'] == "Test Tenant Group Two"
+ - test_one['tenant_group']['slug'] == "test-tenant-group-two"
+ - test_one['msg'] == "tenant_group Test Tenant Group Two created"
+
+- name: Test duplicate tenant group
+ netbox.netbox.netbox_tenant_group:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Tenant Group Two
+ register: test_two
+
+- name: 2 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['tenant_group']['name'] == "Test Tenant Group Two"
+ - test_two['tenant_group']['slug'] == "test-tenant-group-two"
+ - test_two['msg'] == "tenant_group Test Tenant Group Two already exists"
+
+- name: 3 - Test delete
+ netbox.netbox.netbox_tenant_group:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Tenant Group Two
+ state: absent
+ register: test_three
+
+- name: 3 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['before']['state'] == "present"
+ - test_three['diff']['after']['state'] == "absent"
+ - test_three['msg'] == "tenant_group Test Tenant Group Two deleted"
+
+- name: 4 - Test tenant group creation with custom slug
+ netbox.netbox.netbox_tenant_group:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Tenant Group ABC
+ slug: test_tenant_group_four
+ register: test_four
+
+- name: 4 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['diff']['before']['state'] == "absent"
+ - test_four['diff']['after']['state'] == "present"
+ - test_four['tenant_group']['name'] == "Test Tenant Group ABC"
+ - test_four['tenant_group']['slug'] == "test_tenant_group_four"
+ - test_four['msg'] == "tenant_group Test Tenant Group ABC created"
+
+- name: 5 - Test child tenant group creation
+ netbox.netbox.netbox_tenant_group:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Child Test Tenant Group
+ parent_tenant_group: "{{ test_four.tenant_group.slug }}"
+ register: test_five
+
+- name: 5 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_five is changed
+ - test_five['diff']['before']['state'] == "absent"
+ - test_five['diff']['after']['state'] == "present"
+ - test_five['tenant_group']['name'] == "Child Test Tenant Group"
+ - test_five['tenant_group']['parent'] == test_four.tenant_group.id
+ - test_five['msg'] == "tenant_group Child Test Tenant Group created"
+
+- name: 6 - Test child tenant group deletion
+ netbox.netbox.netbox_tenant_group:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Child Test Tenant Group
+ state: absent
+ register: test_six
+
+- name: 6 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_six is changed
+ - test_six['diff']['before']['state'] == "present"
+ - test_six['diff']['after']['state'] == "absent"
+ - test_six['msg'] == "tenant_group Child Test Tenant Group deleted"
+
+- name: 7 - Test deletion of the tenant group with custom slug
+ netbox.netbox.netbox_tenant_group:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test Tenant Group ABC
+ slug: test_tenant_group_four
+ state: absent
+ register: test_seven
+
+- name: 7 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_seven is changed
+ - test_seven['diff']['before']['state'] == "present"
+ - test_seven['diff']['after']['state'] == "absent"
+ - test_seven['msg'] == "tenant_group Test Tenant Group ABC deleted"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_virtual_chassis.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_virtual_chassis.yml
new file mode 100644
index 000000000..395be8395
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_virtual_chassis.yml
@@ -0,0 +1,131 @@
+---
+# © 2020 Nokia
+# Licensed under the GNU General Public License v3.0 only
+# SPDX-License-Identifier: GPL-3.0-only
+##
+##
+### NETBOX_VIRTUAL_CHASSIS
+##
+##
+- name: "VIRTUAL_CHASSIS 0: Create device for testing virtual chassis"
+ netbox.netbox.netbox_device:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Device Virtual Chassis Tests
+ device_type: Cisco Test
+ device_role: Core Switch
+ site: Test Site
+ vc_position: 1
+ vc_priority: 1
+ state: present
+
+- name: "VIRTUAL_CHASSIS 1: Necessary info creation"
+ netbox.netbox.netbox_virtual_chassis:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: First VC
+ master: Device Virtual Chassis Tests
+ state: present
+ register: test_one
+
+- name: "VIRTUAL_CHASSIS 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['virtual_chassis']['master'] == 11
+ - test_one['virtual_chassis']['name'] == "First VC"
+ - test_one['msg'] == "virtual_chassis First VC created"
+
+- name: "VIRTUAL_CHASSIS 2: Create duplicate"
+ netbox.netbox.netbox_virtual_chassis:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: First VC
+ master: Device Virtual Chassis Tests
+ state: present
+ register: test_two
+
+- name: "VIRTUAL_CHASSIS 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['virtual_chassis']['master'] == 11
+ - test_two['virtual_chassis']['name'] == "First VC"
+ - test_two['msg'] == "virtual_chassis First VC already exists"
+
+- name: "POWER_FEED 3: Update virtual_chassis with other fields"
+ netbox.netbox.netbox_virtual_chassis:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: First VC
+ master: Device Virtual Chassis Tests
+ domain: Domain Text
+ state: present
+ register: test_three
+
+- name: "POWER_FEED 3: ASSERT - Update virtual_chassis with other fields"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['after']['domain'] == "Domain Text"
+ - test_three['virtual_chassis']['master'] == 11
+ - test_three['virtual_chassis']['domain'] == "Domain Text"
+ - test_three['virtual_chassis']['name'] == "First VC"
+ - test_three['msg'] == "virtual_chassis First VC updated"
+
+- name: "VIRTUAL_CHASSIS 4: Create device for testing virtual chassis deletion"
+ netbox.netbox.netbox_device:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Device Virtual Chassis Tests 2
+ device_type: Cisco Test
+ device_role: Core Switch
+ site: Test Site
+ vc_position: 1
+ vc_priority: 15
+ state: present
+
+- name: "VIRTUAL_CHASSIS 4: Create Virtual Chassis for Delete Test"
+ netbox.netbox.netbox_virtual_chassis:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Second VC
+ master: Device Virtual Chassis Tests 2
+ state: present
+ register: test_four
+
+- name: "VIRTUAL_CHASSIS 4: ASSERT - Create Virtual Chassis for Delete Test"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['diff']['before']['state'] == "absent"
+ - test_four['diff']['after']['state'] == "present"
+ - test_four['virtual_chassis']['master'] == 12
+ - test_four['virtual_chassis']['name'] == "Second VC"
+ - test_four['msg'] == "virtual_chassis Second VC created"
+
+- name: "VIRTUAL_CHASSIS 5: Delete Virtual Chassis"
+ netbox.netbox.netbox_virtual_chassis:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Second VC
+ master: Device Virtual Chassis Tests 2
+ state: absent
+ register: test_five
+
+- name: "VIRTUAL_CHASSIS 5: ASSERT - Delete Virtual Chassis"
+ ansible.builtin.assert:
+ that:
+ - test_five is changed
+ - test_five['diff']['before']['state'] == "present"
+ - test_five['diff']['after']['state'] == "absent"
+ - test_five['msg'] == "virtual_chassis Second VC deleted"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_virtual_disk.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_virtual_disk.yml
new file mode 100644
index 000000000..5b17eaddf
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_virtual_disk.yml
@@ -0,0 +1,87 @@
+---
+##
+##
+### NETBOX_VIRTUAL_DISK
+##
+##
+- name: "NETBOX_VIRTUAL_DISK 1: Necessary info creation"
+ netbox.netbox.netbox_virtual_disk:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ virtual_machine: test100-vm
+ name: disk0
+ size: 50
+ state: present
+ register: test_one
+
+- name: "NETBOX_VIRTUAL_DISK 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['virtual_disk']['name'] == "disk0"
+ - test_one['virtual_disk']['virtual_machine'] == 1
+ - test_one['msg'] == "virtual_disk disk0 created"
+
+- name: "NETBOX_VIRTUAL_DISK 2: Create duplicate"
+ netbox.netbox.netbox_virtual_disk:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ virtual_machine: test100-vm
+ name: disk0
+ size: 50
+ state: present
+ register: test_two
+
+- name: "NETBOX_VIRTUAL_DISK 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['virtual_disk']['name'] == "disk0"
+ - test_two['virtual_disk']['virtual_machine'] == 1
+ - test_two['msg'] == "virtual_disk disk0 already exists"
+
+- name: "NETBOX_VIRTUAL_DISK 3: Update"
+ netbox.netbox.netbox_virtual_disk:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ virtual_machine: test100-vm
+ name: disk0
+ size: 60
+ tags:
+ - Schnozzberry
+ state: present
+ register: test_three
+
+- name: "NETBOX_VIRTUAL_DISK 4: ASSERT - Updated"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['after']['size'] == 60
+ - test_three['virtual_disk']['name'] == "disk0"
+ - test_three['virtual_disk']['virtual_machine'] == 1
+ - test_three['virtual_disk']['size'] == 60
+ - test_three['virtual_disk']['tags'][0] == 4
+ - test_three['msg'] == "virtual_disk disk0 updated"
+
+- name: "NETBOX_VIRTUAL_DISK 4: ASSERT - Delete"
+ netbox.netbox.netbox_virtual_disk:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: disk0
+ virtual_machine: test100-vm
+ state: absent
+ register: test_four
+
+- name: "NETBOX_VIRTUAL_DISK 4: ASSERT - Delete"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['virtual_disk']['name'] == "disk0"
+ - test_four['virtual_disk']['virtual_machine'] == 1
+ - test_four['msg'] == "virtual_disk disk0 deleted"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_virtual_machine.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_virtual_machine.yml
new file mode 100644
index 000000000..3fb0e4319
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_virtual_machine.yml
@@ -0,0 +1,128 @@
+---
+##
+##
+### NETBOX_VIRTUAL_MACHINES
+##
+##
+- name: "VIRTUAL_MACHINE 1: Necessary info creation"
+ netbox.netbox.netbox_virtual_machine:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test VM One
+ cluster: Test Cluster
+ state: present
+ register: test_one
+
+- name: "VIRTUAL_MACHINE 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['virtual_machine']['name'] == "Test VM One"
+ - test_one['virtual_machine']['cluster'] == 1
+ - test_one['msg'] == "virtual_machine Test VM One created"
+
+- name: "VIRTUAL_MACHINE 2: Create duplicate"
+ netbox.netbox.netbox_virtual_machine:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test VM One
+ cluster: Test Cluster
+ state: present
+ register: test_two
+
+- name: "VIRTUAL_MACHINE 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['virtual_machine']['name'] == "Test VM One"
+ - test_two['virtual_machine']['cluster'] == 1
+ - test_two['msg'] == "virtual_machine Test VM One already exists"
+
+- name: "VIRTUAL_MACHINE 3: Update"
+ netbox.netbox.netbox_virtual_machine:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test VM One
+ cluster: Test Cluster
+ vcpus: 8.5
+ memory: 8
+ status: Planned
+ virtual_machine_role: Test VM Role
+ tags:
+ - Schnozzberry
+ state: present
+ register: test_three
+
+- name: "VIRTUAL_MACHINE 3: ASSERT - Updated"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['after']['vcpus'] == 8.5
+ - test_three['diff']['after']['memory'] == 8
+ - test_three['diff']['after']['status'] == "planned"
+ - test_three['diff']['after']['role'] == 2
+ - test_three['diff']['after']['tags'][0] == 4
+ - test_three['virtual_machine']['name'] == "Test VM One"
+ - test_three['virtual_machine']['cluster'] == 1
+ - test_three['virtual_machine']['vcpus'] == 8.5
+ - test_three['virtual_machine']['memory'] == 8
+ - test_three['virtual_machine']['status'] == "planned"
+ - test_three['virtual_machine']['role'] == 2
+ - test_three['virtual_machine']['tags'][0] == 4
+ - test_three['msg'] == "virtual_machine Test VM One updated"
+
+- name: "VIRTUAL_MACHINE 4: Test idempotence"
+ netbox.netbox.netbox_virtual_machine:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test VM One
+ cluster: Test Cluster
+ vcpus: 8.5
+ memory: 8
+ status: Planned
+ virtual_machine_role: Test VM Role
+ tags:
+ - Schnozzberry
+ state: present
+ register: test_four_idempotence
+
+- name: "VIRTUAL_MACHINE 4: ASSERT - Not changed"
+ ansible.builtin.assert:
+ that:
+ - test_four_idempotence is not changed
+ - test_four_idempotence['virtual_machine']['name'] == "Test VM One"
+ - test_four_idempotence['virtual_machine']['cluster'] == 1
+ - test_four_idempotence['virtual_machine']['vcpus'] == 8.5
+ - test_four_idempotence['virtual_machine']['memory'] == 8
+ - test_four_idempotence['virtual_machine']['status'] == "planned"
+ - test_four_idempotence['virtual_machine']['role'] == 2
+ - test_four_idempotence['virtual_machine']['tags'][0] == 4
+ - test_four_idempotence['msg'] == "virtual_machine Test VM One already exists"
+
+- name: "VIRTUAL_MACHINE 5: Delete"
+ netbox.netbox.netbox_virtual_machine:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test VM One
+ state: absent
+ register: test_five
+
+- name: "VIRTUAL_MACHINE 5: ASSERT - Delete"
+ ansible.builtin.assert:
+ that:
+ - test_five is changed
+ - test_five['virtual_machine']['name'] == "Test VM One"
+ - test_five['virtual_machine']['cluster'] == 1
+ - test_five['virtual_machine']['vcpus'] == 8.5
+ - test_five['virtual_machine']['memory'] == 8
+ - test_five['virtual_machine']['status'] == "planned"
+ - test_five['virtual_machine']['role'] == 2
+ - test_five['virtual_machine']['tags'][0] == 4
+ - test_five['msg'] == "virtual_machine Test VM One deleted"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_vlan.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_vlan.yml
new file mode 100644
index 000000000..463873ca5
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_vlan.yml
@@ -0,0 +1,193 @@
+---
+##
+##
+### NETBOX_VLAN
+##
+##
+- name: "VLAN 1: Necessary info creation"
+ netbox.netbox.netbox_vlan:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test VLAN 500
+ vid: 500
+ state: present
+ register: test_one
+
+- name: "VLAN 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['vlan']['name'] == "Test VLAN 500"
+ - test_one['vlan']['vid'] == 500
+ - test_one['msg'] == "vlan Test VLAN 500 created"
+
+- name: "VLAN 2: Create duplicate"
+ netbox.netbox.netbox_vlan:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test VLAN 500
+ vid: 500
+ state: present
+ register: test_two
+
+- name: "VLAN 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['vlan']['name'] == "Test VLAN 500"
+ - test_two['vlan']['vid'] == 500
+ - test_two['msg'] == "vlan Test VLAN 500 already exists"
+
+- name: "VLAN 3: Create VLAN with same name, but different site"
+ netbox.netbox.netbox_vlan:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test VLAN 500
+ vid: 500
+ site: Test Site
+ tenant: Test Tenant
+ vlan_group: Test VLAN Group
+ state: present
+ register: test_three
+
+- name: "VLAN 3: ASSERT - Create VLAN with same name, but different site"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['before']['state'] == "absent"
+ - test_three['diff']['after']['state'] == "present"
+ - test_three['vlan']['name'] == "Test VLAN 500"
+ - test_three['vlan']['vid'] == 500
+ - test_three['vlan']['site'] == 1
+ - test_three['vlan']['group'] == 1
+ - test_three['msg'] == "vlan Test VLAN 500 created"
+
+- name: "VLAN 4: ASSERT - Update"
+ netbox.netbox.netbox_vlan:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test VLAN 500
+ vid: 500
+ tenant: Test Tenant
+ vlan_group: Test VLAN Group
+ status: Reserved
+ vlan_role: Network of care
+ description: Updated description
+ site: Test Site
+ tags:
+ - Schnozzberry
+ state: present
+ register: test_four
+
+- name: "VLAN 4: ASSERT - Updated"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['diff']['after']['status'] == "reserved"
+ - test_four['diff']['after']['role'] == 1
+ - test_four['diff']['after']['description'] == "Updated description"
+ - test_four['diff']['after']['tags'][0] == 4
+ - test_four['vlan']['name'] == "Test VLAN 500"
+ - test_four['vlan']['tenant'] == 1
+ - test_four['vlan']['site'] == 1
+ - test_four['vlan']['group'] == 1
+ - test_four['vlan']['status'] == "reserved"
+ - test_four['vlan']['role'] == 1
+ - test_four['vlan']['description'] == "Updated description"
+ - test_four['vlan']['tags'][0] == 4
+ - test_four['msg'] == "vlan Test VLAN 500 updated"
+
+- name: "VLAN: ASSERT - IDEMPOTENT WITH VLAN_GROUP"
+ netbox.netbox.netbox_vlan:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test VLAN 500
+ vid: 500
+ tenant: Test Tenant
+ vlan_group: Test VLAN Group
+ status: Reserved
+ vlan_role: Network of care
+ description: Updated description
+ site: Test Site
+ tags:
+ - Schnozzberry
+ state: present
+ register: idempotent_vlan_group
+
+- name: "VLAN: ASSERT - IDEMPOTENT WITH VLAN_GROUP"
+ ansible.builtin.assert:
+ that:
+ - idempotent_vlan_group is not changed
+
+- name: "VLAN: Create VLAN with same name, but different vlan_group"
+ netbox.netbox.netbox_vlan:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test VLAN 500
+ vid: 500
+ site: Test Site
+ tenant: Test Tenant
+ vlan_group: Test VLAN Group 2
+ state: present
+ register: new_vlan_group
+
+- name: "VLAN: ASSERT - Create VLAN with same name, but different vlan_group"
+ ansible.builtin.assert:
+ that:
+ - new_vlan_group is changed
+ - new_vlan_group['diff']['before']['state'] == "absent"
+ - new_vlan_group['diff']['after']['state'] == "present"
+ - new_vlan_group['vlan']['name'] == "Test VLAN 500"
+ - new_vlan_group['vlan']['vid'] == 500
+ - new_vlan_group['vlan']['site'] == 1
+ - new_vlan_group['vlan']['group'] == 2
+ - new_vlan_group['msg'] == "vlan Test VLAN 500 created"
+
+- name: "VLAN 5: ASSERT - Delete more than one result"
+ netbox.netbox.netbox_vlan:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test VLAN 500
+ state: absent
+ ignore_errors: true
+ register: test_five
+
+- name: "VLAN 5: ASSERT - Delete more than one result"
+ ansible.builtin.assert:
+ that:
+ - test_five is failed
+ - test_five['msg'] == "More than one result returned for Test VLAN 500"
+
+- name: "VLAN 6: ASSERT - Delete"
+ netbox.netbox.netbox_vlan:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test VLAN 500
+ site: Test Site
+ vlan_group: Test VLAN Group
+ state: absent
+ register: test_six
+
+- name: "VLAN 6: ASSERT - Delete"
+ ansible.builtin.assert:
+ that:
+ - test_six is changed
+ - test_six['vlan']['name'] == "Test VLAN 500"
+ - test_six['vlan']['tenant'] == 1
+ - test_six['vlan']['site'] == 1
+ - test_six['vlan']['group'] == 1
+ - test_six['vlan']['status'] == "reserved"
+ - test_six['vlan']['role'] == 1
+ - test_six['vlan']['description'] == "Updated description"
+ - test_six['vlan']['tags'][0] == 4
+ - test_six['msg'] == "vlan Test VLAN 500 deleted"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_vlan_group.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_vlan_group.yml
new file mode 100644
index 000000000..9dd4d1789
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_vlan_group.yml
@@ -0,0 +1,314 @@
+---
+##
+##
+### NETBOX_VLAN_GROUP
+##
+##
+- name: "VLAN_GROUP 1: Necessary info creation"
+ netbox.netbox.netbox_vlan_group:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: VLAN Group One
+ scope_type: dcim.site
+ scope: Test Site
+ state: present
+ register: results
+
+- name: "VLAN_GROUP 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - results is changed
+ - results['diff']['before']['state'] == "absent"
+ - results['diff']['after']['state'] == "present"
+ - results['vlan_group']['name'] == "VLAN Group One"
+ - results['vlan_group']['slug'] == "vlan-group-one"
+ - results['vlan_group']['scope_type'] == "dcim.site"
+ - results['vlan_group']['scope_id'] == 1
+ - results['msg'] == "vlan_group VLAN Group One created"
+
+- name: "VLAN_GROUP 2: Create duplicate"
+ netbox.netbox.netbox_vlan_group:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: VLAN Group One
+ scope_type: dcim.site
+ scope: Test Site
+ state: present
+ register: results
+
+- name: "VLAN_GROUP 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not results['changed']
+ - results['vlan_group']['name'] == "VLAN Group One"
+ - results['vlan_group']['slug'] == "vlan-group-one"
+ - results['vlan_group']['scope_type'] == "dcim.site"
+ - results['vlan_group']['scope_id'] == 1
+ - results['msg'] == "vlan_group VLAN Group One already exists"
+
+- name: "VLAN_GROUP 3: ASSERT - Create with same name, different site"
+ netbox.netbox.netbox_vlan_group:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: VLAN Group One
+ scope_type: dcim.site
+ scope: Test Site2
+ state: present
+ register: results
+
+- name: "VLAN_GROUP 3: ASSERT - Create with same name, different site"
+ ansible.builtin.assert:
+ that:
+ - results is changed
+ - results['vlan_group']['name'] == "VLAN Group One"
+ - results['vlan_group']['slug'] == "vlan-group-one"
+ - results['vlan_group']['scope_type'] == "dcim.site"
+ - results['vlan_group']['scope_id'] == 2
+ - results['msg'] == "vlan_group VLAN Group One created"
+
+- name: "VLAN_GROUP 4: ASSERT - Create vlan group, no site"
+ netbox.netbox.netbox_vlan_group:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: VLAN Group One
+ state: present
+ ignore_errors: true
+ register: results
+
+- name: "VLAN_GROUP 4: ASSERT - Create with same name, different site"
+ ansible.builtin.assert:
+ that:
+ - results is failed
+ - results['msg'] == "More than one result returned for VLAN Group One"
+
+- name: "VLAN_GROUP 5: ASSERT - Delete"
+ netbox.netbox.netbox_vlan_group:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: VLAN Group One
+ scope_type: dcim.site
+ scope: Test Site2
+ state: absent
+ register: results
+
+- name: "VLAN_GROUP 5: ASSERT - Delete"
+ ansible.builtin.assert:
+ that:
+ - results is changed
+ - results['diff']['before']['state'] == "present"
+ - results['diff']['after']['state'] == "absent"
+ - results['vlan_group']['name'] == "VLAN Group One"
+ - results['vlan_group']['slug'] == "vlan-group-one"
+ - results['vlan_group']['scope_type'] == "dcim.site"
+ - results['vlan_group']['scope_id'] == 2
+ - results['msg'] == "vlan_group VLAN Group One deleted"
+
+- name: "VLAN_GROUP 6: ASSERT - Delete non existing"
+ netbox.netbox.netbox_vlan_group:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: VLAN Group One
+ scope_type: dcim.site
+ scope: Test Site2
+ state: absent
+ register: results
+
+- name: "VLAN_GROUP 6: ASSERT - Delete non existing`"
+ ansible.builtin.assert:
+ that:
+ - not results['changed']
+ - results['vlan_group'] == None
+ - results['msg'] == "vlan_group VLAN Group One already absent"
+
+- name: "VLAN_GROUP 7: Necessary info creation - scope_type: dcim.location"
+ netbox.netbox.netbox_vlan_group:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: VLAN Group Location
+ scope_type: dcim.location
+ scope: Test Rack Group
+ state: present
+ register: results
+
+- name: "VLAN_GROUP 7: ASSERT - Necessary info creation - scope_type: dcim.location"
+ ansible.builtin.assert:
+ that:
+ - results is changed
+ - results['diff']['before']['state'] == "absent"
+ - results['diff']['after']['state'] == "present"
+ - results['vlan_group']['name'] == "VLAN Group Location"
+ - results['vlan_group']['slug'] == "vlan-group-location"
+ - results['vlan_group']['scope_type'] == "dcim.location"
+ - results['vlan_group']['scope_id'] == 1
+ - results['msg'] == "vlan_group VLAN Group Location created"
+
+- name: "VLAN_GROUP 8: Necessary info creation - scope_type: dcim.rack"
+ netbox.netbox.netbox_vlan_group:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: VLAN Group Rack
+ scope_type: dcim.rack
+ scope: Test Rack
+ state: present
+ register: results
+
+- name: "VLAN_GROUP 8: ASSERT - Necessary info creation - scope_type: dcim.rack"
+ ansible.builtin.assert:
+ that:
+ - results is changed
+ - results['diff']['before']['state'] == "absent"
+ - results['diff']['after']['state'] == "present"
+ - results['vlan_group']['name'] == "VLAN Group Rack"
+ - results['vlan_group']['slug'] == "vlan-group-rack"
+ - results['vlan_group']['scope_type'] == "dcim.rack"
+ - results['vlan_group']['scope_id'] == 2
+ - results['msg'] == "vlan_group VLAN Group Rack created"
+
+- name: "VLAN_GROUP 9: Necessary info creation - scope_type: dcim.region"
+ netbox.netbox.netbox_vlan_group:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: VLAN Group Region
+ scope_type: dcim.region
+ scope: Test Region
+ state: present
+ register: results
+
+- name: "VLAN_GROUP 9: ASSERT - Necessary info creation - scope_type: dcim.region"
+ ansible.builtin.assert:
+ that:
+ - results is changed
+ - results['diff']['before']['state'] == "absent"
+ - results['diff']['after']['state'] == "present"
+ - results['vlan_group']['name'] == "VLAN Group Region"
+ - results['vlan_group']['slug'] == "vlan-group-region"
+ - results['vlan_group']['scope_type'] == "dcim.region"
+ - results['vlan_group']['scope_id'] == 1
+ - results['msg'] == "vlan_group VLAN Group Region created"
+
+- name: "VLAN_GROUP 10: Necessary info creation - scope_type: dcim.sitegroup"
+ netbox.netbox.netbox_vlan_group:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: VLAN Group Site Group
+ scope_type: dcim.sitegroup
+ scope: Test Site Group
+ state: present
+ register: results
+
+- name: "VLAN_GROUP 10: ASSERT - Necessary info creation - scope_type: dcim.sitegroup"
+ ansible.builtin.assert:
+ that:
+ - results is changed
+ - results['diff']['before']['state'] == "absent"
+ - results['diff']['after']['state'] == "present"
+ - results['vlan_group']['name'] == "VLAN Group Site Group"
+ - results['vlan_group']['slug'] == "vlan-group-site-group"
+ - results['vlan_group']['scope_type'] == "dcim.sitegroup"
+ - results['vlan_group']['scope_id'] == 4
+ - results['msg'] == "vlan_group VLAN Group Site Group created"
+
+# Commented out due to invalid content type being reported back by API
+# - name: "VLAN_GROUP 11: Necessary info creation - scope_type: virtualization.cluster"
+# netbox.netbox.netbox_vlan_group:
+# netbox_url: http://localhost:32768
+# netbox_token: 0123456789abcdef0123456789abcdef01234567
+# data:
+# name: "VLAN Group Cluster"
+# scope_type: "virtualization.cluster"
+# scope: Test Cluster
+# state: present
+# register: results
+
+# - name: "VLAN_GROUP 11: ASSERT - Necessary info creation - scope_type: virtualization.cluster"
+# assert:
+# that:
+# - results is changed
+# - results['diff']['before']['state'] == "absent"
+# - results['diff']['after']['state'] == "present"
+# - results['vlan_group']['name'] == "VLAN Group Cluster"
+# - results['vlan_group']['slug'] == "vlan-group-cluster"
+# - results['vlan_group']['scope_type'] == "virtualization.cluster"
+# - results['vlan_group']['scope_id'] == 1
+# - results['msg'] == "vlan_group VLAN Group Cluster created"
+
+# - name: "VLAN_GROUP 12: Necessary info creation - scope_type: virtualization.clustergroup"
+# netbox.netbox.netbox_vlan_group:
+# netbox_url: http://localhost:32768
+# netbox_token: 0123456789abcdef0123456789abcdef01234567
+# data:
+# name: "VLAN Group Cluster Group"
+# scope_type: "virtualization.clustergroup"
+# scope: Test Cluster Group
+# state: present
+# register: results
+
+# - name: "VLAN_GROUP 12: ASSERT - Necessary info creation - scope_type: virtualization.clustergroup"
+# assert:
+# that:
+# - results is changed
+# - results['diff']['before']['state'] == "absent"
+# - results['diff']['after']['state'] == "present"
+# - results['vlan_group']['name'] == "VLAN Group Cluster Group"
+# - results['vlan_group']['slug'] == "vlan-group-cluster-group"
+# - results['vlan_group']['scope_type'] == "virtualization.clustergroup"
+# - results['vlan_group']['scope_id'] == 1
+# - results['msg'] == "vlan_group VLAN Group Cluster Group created"
+
+- name: "VLAN_GROUP 12: Update Description - scope_type: dcim.location"
+ netbox.netbox.netbox_vlan_group:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: VLAN Group Location
+ scope_type: dcim.location
+ scope: Test Rack Group
+ description: Ansible updated description
+ state: present
+ register: results
+
+- name: "VLAN_GROUP 12: ASSERT - Update Description - scope_type: dcim.location"
+ ansible.builtin.assert:
+ that:
+ - results is changed
+ - results['diff']['before']['description'] == ""
+ - results['diff']['after']['description'] == "Ansible updated description"
+ - results['vlan_group']['name'] == "VLAN Group Location"
+ - results['vlan_group']['slug'] == "vlan-group-location"
+ - results['vlan_group']['scope_type'] == "dcim.location"
+ - results['vlan_group']['scope_id'] == 1
+ - results['vlan_group']['description'] == "Ansible updated description"
+ - results['msg'] == "vlan_group VLAN Group Location updated"
+
+- name: "VLAN_GROUP 12: Update Description (IDEM) - scope_type: dcim.location"
+ netbox.netbox.netbox_vlan_group:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: VLAN Group Location
+ scope_type: dcim.location
+ scope: Test Rack Group
+ description: Ansible updated description
+ state: present
+ register: results
+
+- name: "VLAN_GROUP 12: ASSERT - Update Description (IDEM) - scope_type: dcim.location"
+ ansible.builtin.assert:
+ that:
+ - results is not changed
+ - results['vlan_group']['name'] == "VLAN Group Location"
+ - results['vlan_group']['slug'] == "vlan-group-location"
+ - results['vlan_group']['scope_type'] == "dcim.location"
+ - results['vlan_group']['scope_id'] == 1
+ - results['vlan_group']['description'] == "Ansible updated description"
+ - results['msg'] == "vlan_group VLAN Group Location already exists"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_vm_interface.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_vm_interface.yml
new file mode 100644
index 000000000..6ca36e1e5
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_vm_interface.yml
@@ -0,0 +1,159 @@
+---
+##
+##
+### NETBOX_VM_INTERFACE
+##
+##
+- name: "NETBOX_VM_INTERFACE 1: Necessary info creation"
+ netbox.netbox.netbox_vm_interface:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ virtual_machine: test100-vm
+ name: Eth10
+ state: present
+ register: test_one
+
+- name: "NETBOX_VM_INTERFACE 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['interface']['name'] == "Eth10"
+ - test_one['interface']['virtual_machine'] == 1
+ - test_one['msg'] == "interface Eth10 created"
+
+- name: "NETBOX_VM_INTERFACE 2: Create duplicate"
+ netbox.netbox.netbox_vm_interface:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ virtual_machine: test100-vm
+ name: Eth10
+ state: present
+ register: test_two
+
+- name: "NETBOX_VM_INTERFACE 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['interface']['name'] == "Eth10"
+ - test_two['interface']['virtual_machine'] == 1
+ - test_two['msg'] == "interface Eth10 already exists"
+
+- name: "NETBOX_VM_INTERFACE 3: Updated"
+ netbox.netbox.netbox_vm_interface:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ virtual_machine: test100-vm
+ name: Eth10
+ enabled: false
+ mtu: 9000
+ mac_address: "00:00:00:AA:AA:01"
+ description: Updated test100-vm
+ mode: Tagged
+ # untagged_vlan:
+ # name: Wireless
+ # site: Test Site
+ # tagged_vlans:
+ # - name: Data
+ # site: Test Site
+ # - name: VoIP
+ # site: Test Site
+ tags:
+ - Schnozzberry
+ state: present
+ register: test_three
+
+- name: "NETBOX_VM_INTERFACE 4: ASSERT - Updated"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['after']['enabled'] == false
+ - test_three['diff']['after']['mtu'] == 9000
+ - test_three['diff']['after']['mac_address'] == "00:00:00:AA:AA:01"
+ - test_three['diff']['after']['description'] == "Updated test100-vm"
+ - test_three['diff']['after']['mode'] == "tagged"
+ # - test_three['diff']['after']['untagged_vlan'] == 1
+ # - test_three['diff']['after']['tagged_vlans'] == [2, 3]
+ - test_three['diff']['after']['tags'][0] == 4
+ - test_three['interface']['name'] == "Eth10"
+ - test_three['interface']['virtual_machine'] == 1
+ - test_three['interface']['enabled'] == false
+ - test_three['interface']['mtu'] == 9000
+ - test_three['interface']['mac_address'] == "00:00:00:AA:AA:01"
+ - test_three['interface']['description'] == "Updated test100-vm"
+ - test_three['interface']['mode'] == "tagged"
+ # - test_three['interface']['untagged_vlan'] == 1
+ # - test_three['interface']['tagged_vlans'] == [2, 3]
+ - test_three['interface']['tags'][0] == 4
+ - test_three['msg'] == "interface Eth10 updated"
+
+- name: "NETBOX_VM_INTERFACE 4: ASSERT - Delete"
+ netbox.netbox.netbox_vm_interface:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Eth10
+ virtual_machine: test100-vm
+ state: absent
+ register: test_four
+
+- name: "NETBOX_VM_INTERFACE 4: ASSERT - Delete"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['interface']['name'] == "Eth10"
+ - test_four['interface']['virtual_machine'] == 1
+ - test_four['msg'] == "interface Eth10 deleted"
+
+- name: "NETBOX_VM_INTERFACE 5: Attempt to update interface with same name on other VMs"
+ netbox.netbox.netbox_vm_interface:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ virtual_machine: test100-vm
+ name: Eth0
+ enabled: false
+ mtu: 9000
+ mac_address: "00:00:00:AA:AA:01"
+ description: Updated test100-vm Eth0 intf
+ mode: Tagged
+ # untagged_vlan:
+ # name: Wireless
+ # site: Test Site
+ # tagged_vlans:
+ # - name: Data
+ # site: Test Site
+ # - name: VoIP
+ # site: Test Site
+ tags:
+ - Schnozzberry
+ state: present
+ register: test_five
+
+- name: "NETBOX_VM_INTERFACE 5: ASSERT - Updated"
+ ansible.builtin.assert:
+ that:
+ - test_five is changed
+ - test_five['diff']['after']['enabled'] == false
+ - test_five['diff']['after']['mtu'] == 9000
+ - test_five['diff']['after']['mac_address'] == "00:00:00:AA:AA:01"
+ - test_five['diff']['after']['description'] == "Updated test100-vm Eth0 intf"
+ - test_five['diff']['after']['mode'] == "tagged"
+ # - test_five['diff']['after']['untagged_vlan'] == 1
+ # - test_five['diff']['after']['tagged_vlans'] == [2, 3]
+ - test_five['diff']['after']['tags'][0] == 4
+ - test_five['interface']['name'] == "Eth0"
+ - test_five['interface']['virtual_machine'] == 1
+ - test_five['interface']['enabled'] == false
+ - test_five['interface']['mtu'] == 9000
+ - test_five['interface']['mac_address'] == "00:00:00:AA:AA:01"
+ - test_five['interface']['description'] == "Updated test100-vm Eth0 intf"
+ - test_five['interface']['mode'] == "tagged"
+ # - test_five['interface']['untagged_vlan'] == 1
+ # - test_five['interface']['tagged_vlans'] == [2, 3]
+ - test_five['interface']['tags'][0] == 4
+ - test_five['msg'] == "interface Eth0 updated"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_vrf.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_vrf.yml
new file mode 100644
index 000000000..1da7ae382
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_vrf.yml
@@ -0,0 +1,137 @@
+---
+##
+##
+### NETBOX_VRF
+##
+##
+- name: "VRF 1: Necessary info creation"
+ netbox.netbox.netbox_vrf:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test VRF One
+ state: present
+ register: test_one
+
+- name: "VRF 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['vrf']['name'] == "Test VRF One"
+ - test_one['msg'] == "vrf Test VRF One created"
+
+- name: "VRF 2: Create duplicate"
+ netbox.netbox.netbox_vrf:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test VRF One
+ state: present
+ register: test_two
+
+- name: "VRF 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['vrf']['name'] == "Test VRF One"
+ - test_two['msg'] == "vrf Test VRF One already exists"
+
+- name: "VRF 3: Create VRF with same name, but different tenant"
+ netbox.netbox.netbox_vrf:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test VRF One
+ tenant: Test Tenant
+ state: present
+ register: test_three
+
+- name: "VRF 3: ASSERT - Create VRF with same name, but different site"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['before']['state'] == "absent"
+ - test_three['diff']['after']['state'] == "present"
+ - test_three['vrf']['name'] == "Test VRF One"
+ - test_three['vrf']['tenant'] == 1
+ - test_three['msg'] == "vrf Test VRF One created"
+
+- name: "VRF 4: ASSERT - Update"
+ netbox.netbox.netbox_vrf:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test VRF One
+ rd: "65001:1"
+ enforce_unique: false
+ tenant: Test Tenant
+ description: Updated description
+ import_targets:
+ - 4000:4000
+ - 5000:5000
+ export_targets:
+ - 5000:5000
+ tags:
+ - Schnozzberry
+ state: present
+ register: test_four
+
+- name: "VRF 4: ASSERT - Updated"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['diff']['after']['rd'] == "65001:1"
+ - test_four['diff']['after']['enforce_unique'] == false
+ - test_four['diff']['after']['description'] == "Updated description"
+ - test_four['diff']['after']['tags'][0] == 4
+ - test_four['diff']['after']['import_targets'] | length == 2
+ - test_four['diff']['after']['export_targets'] | length == 1
+ - test_four['vrf']['name'] == "Test VRF One"
+ - test_four['vrf']['tenant'] == 1
+ - test_four['vrf']['rd'] == "65001:1"
+ - test_four['vrf']['enforce_unique'] == false
+ - test_four['vrf']['description'] == "Updated description"
+ - test_four['vrf']['tags'][0] == 4
+ - test_four['vrf']['import_targets'] | length == 2
+ - test_four['vrf']['export_targets'] | length == 1
+ - test_four['msg'] == "vrf Test VRF One updated"
+
+- name: "VRF 5: ASSERT - Delete more than one result"
+ netbox.netbox.netbox_vrf:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test VRF One
+ state: absent
+ ignore_errors: true
+ register: test_five
+
+- name: "VRF 5: ASSERT - Delete more than one result"
+ ansible.builtin.assert:
+ that:
+ - test_five is failed
+ - test_five['msg'] == "More than one result returned for Test VRF One"
+
+- name: "VRF 6: ASSERT - Delete"
+ netbox.netbox.netbox_vrf:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Test VRF One
+ tenant: Test Tenant
+ state: absent
+ register: test_six
+
+- name: "VRF 6: ASSERT - Delete"
+ ansible.builtin.assert:
+ that:
+ - test_six is changed
+ - test_six['vrf']['name'] == "Test VRF One"
+ - test_six['vrf']['tenant'] == 1
+ - test_six['vrf']['rd'] == "65001:1"
+ - test_six['vrf']['enforce_unique'] == false
+ - test_six['vrf']['description'] == "Updated description"
+ - test_six['vrf']['tags'][0] == 4
+ - test_six['msg'] == "vrf Test VRF One deleted"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_webhook.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_webhook.yml
new file mode 100644
index 000000000..a3d5a3825
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_webhook.yml
@@ -0,0 +1,121 @@
+---
+##
+##
+### NETBOX_WEBHOOK
+##
+##
+- name: "WEBHOOK 1: Necessary info creation"
+ netbox.netbox.netbox_webhook:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ content_types:
+ - dcim.device
+ name: Example Webhook
+ type_create: true
+ payload_url: https://payload.url
+ body_template: !unsafe >-
+ {{ data }}
+ state: present
+ register: test_one
+
+- name: "WEBHOOK 1: ASSERT - Necessary info creation"
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['webhook']['name'] == "Example Webhook"
+ - test_one['webhook']['type_create'] == True
+ - test_one['webhook']['payload_url'] == "https://payload.url"
+ - test_one['webhook']['content_types'] == ["dcim.device"]
+ - test_one['msg'] == "webhook Example Webhook created"
+
+- name: "WEBHOOK 2: Create duplicate"
+ netbox.netbox.netbox_webhook:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ content_types:
+ - dcim.device
+ name: Example Webhook
+ type_create: true
+ payload_url: https://payload.url
+ body_template: !unsafe >-
+ {{ data }}
+ state: present
+ register: test_two
+
+- name: "WEBHOOK 2: ASSERT - Create duplicate"
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['webhook']['name'] == "Example Webhook"
+ - test_two['msg'] == "webhook Example Webhook already exists"
+
+- name: "WEBHOOK 3: Update data and add on delete"
+ netbox.netbox.netbox_webhook:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ content_types:
+ - dcim.device
+ type_create: true
+ type_delete: true
+ name: Example Webhook
+ payload_url: https://payload.url
+ body_template: !unsafe >-
+ {{ data }}
+ state: present
+ register: test_three
+
+- name: "WEBHOOK 3: ASSERT - Updated"
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['webhook']['name'] == "Example Webhook"
+ - test_three['msg'] == "webhook Example Webhook updated"
+
+- name: "WEBHOOK 4: Change content type"
+ netbox.netbox.netbox_webhook:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ content_types:
+ - virtualization.virtualmachine
+ name: Example Webhook
+ payload_url: https://payload.url
+ body_template: !unsafe >-
+ {{ data }}
+ state: present
+ register: test_four
+
+- name: "WEBHOOK 4: ASSERT - Change content type"
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['diff']['after']['content_types'] == ["virtualization.virtualmachine"]
+ - test_four['webhook']['name'] == "Example Webhook"
+ - test_four['msg'] == "webhook Example Webhook updated"
+
+- name: "WEBHOOK 5: Delete"
+ netbox.netbox.netbox_webhook:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ content_types:
+ - virtualization.virtualmachine
+ name: Example Webhook
+ payload_url: https://payload.url
+ body_template: !unsafe >-
+ {{ data }}
+ state: absent
+ register: test_five
+
+- name: "WEBHOOK 5: ASSERT - Deleted"
+ ansible.builtin.assert:
+ that:
+ - test_five is changed
+ - test_five['diff']['after']['state'] == "absent"
+ - test_five['webhook']['name'] == "Example Webhook"
+ - test_five['msg'] == "webhook Example Webhook deleted"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_wireless_lan.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_wireless_lan.yml
new file mode 100644
index 000000000..e70fd3e59
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_wireless_lan.yml
@@ -0,0 +1,103 @@
+---
+##
+##
+### NETBOX_WIRELESS_LAN
+##
+##
+- name: 1 - Test wireless LAN creation
+ netbox.netbox.netbox_wireless_lan:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ ssid: Wireless LAN One
+ register: test_one
+
+- name: 1 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['wireless_lan']['ssid'] == "Wireless LAN One"
+ - test_one['msg'] == "wireless_lan Wireless LAN One created"
+
+- name: Test duplicate wireless LAN
+ netbox.netbox.netbox_wireless_lan:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ ssid: Wireless LAN One
+ register: test_two
+
+- name: 2 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['wireless_lan']['ssid'] == "Wireless LAN One"
+ - test_two['msg'] == "wireless_lan Wireless LAN One already exists"
+
+- name: 3 - Test update
+ netbox.netbox.netbox_wireless_lan:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ ssid: Wireless LAN One
+ description: New Description
+ register: test_three
+
+- name: 3 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['after']['description'] == "New Description"
+ - test_three['wireless_lan']['ssid'] == "Wireless LAN One"
+ - test_three['wireless_lan']['description'] == "New Description"
+ - test_three['msg'] == "wireless_lan Wireless LAN One updated"
+
+- name: 4 - Test delete
+ netbox.netbox.netbox_wireless_lan:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ ssid: Wireless LAN One
+ state: absent
+ register: test_four
+
+- name: 4 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['diff']['before']['state'] == "present"
+ - test_four['diff']['after']['state'] == "absent"
+ - test_four['msg'] == "wireless_lan Wireless LAN One deleted"
+
+- name: 5 - Create wireless LAN with all parameters
+ netbox.netbox.netbox_wireless_lan:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ ssid: Wireless Network One
+ description: Cool Wireless Network
+ auth_type: wpa-enterprise
+ auth_cipher: aes
+ auth_psk: psk123456
+ tags:
+ - tagA
+ - tagB
+ - tagC
+ state: present
+ register: test_five
+
+- name: 5 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_five is changed
+ - test_five['diff']['before']['state'] == "absent"
+ - test_five['diff']['after']['state'] == "present"
+ - test_five['wireless_lan']['ssid'] == "Wireless Network One"
+ - test_five['wireless_lan']['description'] == "Cool Wireless Network"
+ - test_five['wireless_lan']['auth_type'] == "wpa-enterprise"
+ - test_five['wireless_lan']['auth_cipher'] == "aes"
+ - test_five['wireless_lan']['auth_psk'] == "psk123456"
+ # - test_five['wireless_lan']['tags'] | length == 3
+ - test_five['msg'] == "wireless_lan Wireless Network One created"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_wireless_lan_group.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_wireless_lan_group.yml
new file mode 100644
index 000000000..57daaf62c
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_wireless_lan_group.yml
@@ -0,0 +1,97 @@
+---
+##
+##
+### NETBOX_WIRELESS_LAN_GROUP
+##
+##
+- name: 1 - Test wireless LAN group creation
+ netbox.netbox.netbox_wireless_lan_group:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Wireless LAN Group One
+ register: test_one
+
+- name: 1 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['wireless_lan_group']['name'] == "Wireless LAN Group One"
+ - test_one['msg'] == "wireless_lan_group Wireless LAN Group One created"
+
+- name: Test duplicate wireless LAN group
+ netbox.netbox.netbox_wireless_lan_group:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Wireless LAN Group One
+ register: test_two
+
+- name: 2 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['wireless_lan_group']['name'] == "Wireless LAN Group One"
+ - test_two['msg'] == "wireless_lan_group Wireless LAN Group One already exists"
+
+- name: 3 - Test update
+ netbox.netbox.netbox_wireless_lan_group:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Wireless LAN Group One
+ description: New Description
+ register: test_three
+
+- name: 3 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['after']['description'] == "New Description"
+ - test_three['wireless_lan_group']['name'] == "Wireless LAN Group One"
+ - test_three['wireless_lan_group']['description'] == "New Description"
+ - test_three['msg'] == "wireless_lan_group Wireless LAN Group One updated"
+
+- name: 4 - Test delete
+ netbox.netbox.netbox_wireless_lan_group:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Wireless LAN Group One
+ state: absent
+ register: test_four
+
+- name: 4 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['diff']['before']['state'] == "present"
+ - test_four['diff']['after']['state'] == "absent"
+ - test_four['msg'] == "wireless_lan_group Wireless LAN Group One deleted"
+
+- name: 5 - Create wireless LAN group with all parameters
+ netbox.netbox.netbox_wireless_lan_group:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ name: Wireless LAN Group One
+ description: Cool Wireless LAN Group
+ tags:
+ - tagA
+ - tagB
+ - tagC
+ state: present
+ register: test_five
+
+- name: 5 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_five is changed
+ - test_five['diff']['before']['state'] == "absent"
+ - test_five['diff']['after']['state'] == "present"
+ - test_five['wireless_lan_group']['name'] == "Wireless LAN Group One"
+ - test_five['wireless_lan_group']['description'] == "Cool Wireless LAN Group"
+ - test_five['wireless_lan_group']['tags'] | length == 3
+ - test_five['msg'] == "wireless_lan_group Wireless LAN Group One created"
diff --git a/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_wireless_link.yml b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_wireless_link.yml
new file mode 100644
index 000000000..b026db81e
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/integration/targets/v4.0/tasks/netbox_wireless_link.yml
@@ -0,0 +1,130 @@
+---
+##
+##
+### NETBOX_WIRELESS_LINK
+##
+##
+- name: 1 - Test wireless link creation
+ netbox.netbox.netbox_wireless_link:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ interface_a:
+ device: Test Nexus One
+ name: wlink1
+ interface_b:
+ device: test100
+ name: wlink1
+ register: test_one
+
+- name: 1 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_one is changed
+ - test_one['diff']['before']['state'] == "absent"
+ - test_one['diff']['after']['state'] == "present"
+ - test_one['wireless_link']['interface_a'] == 6
+ - test_one['wireless_link']['interface_b'] == 5
+ - test_one['msg'] == "wireless_link Test Nexus One wlink1 <> test100 wlink1 created"
+
+- name: Test duplicate wireless link
+ netbox.netbox.netbox_wireless_link:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ interface_a:
+ device: Test Nexus One
+ name: wlink1
+ interface_b:
+ device: test100
+ name: wlink1
+ register: test_two
+
+- name: 2 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - not test_two['changed']
+ - test_two['msg'] == "wireless_link Test Nexus One wlink1 <> test100 wlink1 already exists"
+
+- name: 3 - Test update
+ netbox.netbox.netbox_wireless_link:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ interface_a:
+ device: Test Nexus One
+ name: wlink1
+ interface_b:
+ device: test100
+ name: wlink1
+ status: planned
+ register: test_three
+
+- name: 3 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_three is changed
+ - test_three['diff']['after']['status'] == "planned"
+ - test_three['wireless_link']['status'] == "planned"
+ - test_three['msg'] == "wireless_link Test Nexus One wlink1 <> test100 wlink1 updated"
+
+- name: 4 - Test delete
+ netbox.netbox.netbox_wireless_link:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ interface_a:
+ device: Test Nexus One
+ name: wlink1
+ interface_b:
+ device: test100
+ name: wlink1
+ state: absent
+ register: test_four
+
+- name: 4 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_four is changed
+ - test_four['diff']['before']['state'] == "present"
+ - test_four['diff']['after']['state'] == "absent"
+ - test_four['msg'] == "wireless_link Test Nexus One wlink1 <> test100 wlink1 deleted"
+
+- name: 5 - Create wireless link with all parameters
+ netbox.netbox.netbox_wireless_link:
+ netbox_url: http://localhost:32768
+ netbox_token: "0123456789abcdef0123456789abcdef01234567"
+ data:
+ interface_a:
+ device: Test Nexus One
+ name: wlink1
+ interface_b:
+ device: test100
+ name: wlink1
+ ssid: Wireless Network One
+ description: Cool Wireless Network
+ auth_type: wpa-enterprise
+ auth_cipher: aes
+ auth_psk: psk123456
+ tags:
+ - tagA
+ - tagB
+ - tagC
+ state: present
+ register: test_five
+
+- name: 5 - ASSERT
+ ansible.builtin.assert:
+ that:
+ - test_five is changed
+ - test_five['diff']['before']['state'] == "absent"
+ - test_five['diff']['after']['state'] == "present"
+ - test_five['wireless_link']['ssid'] == "Wireless Network One"
+ - test_five['wireless_link']['description'] == "Cool Wireless Network"
+ - test_five['wireless_link']['interface_a'] == 6
+ - test_five['wireless_link']['interface_b'] == 5
+ - test_five['wireless_link']['auth_type'] == "wpa-enterprise"
+ - test_five['wireless_link']['auth_cipher'] == "aes"
+ - test_five['wireless_link']['auth_psk'] == "psk123456"
+ # - test_five['wireless_link']['tags'] | length == 3
+ - test_five['msg'] == "wireless_link Test Nexus One wlink1 <> test100 wlink1 created"
diff --git a/ansible_collections/netbox/netbox/tests/netbox-docker/v3.5/docker-compose.override.yml b/ansible_collections/netbox/netbox/tests/netbox-docker/v3.5/docker-compose.override.yml
index ed3c533aa..d9831f94c 100644
--- a/ansible_collections/netbox/netbox/tests/netbox-docker/v3.5/docker-compose.override.yml
+++ b/ansible_collections/netbox/netbox/tests/netbox-docker/v3.5/docker-compose.override.yml
@@ -1,5 +1,5 @@
---
-version: '3.4'
+version: "3.4"
services:
netbox-worker: &netbox-override
image: netboxcommunity/netbox:v3.5
@@ -7,7 +7,7 @@ services:
start_period: 300s
timeout: 3s
interval: 15s
- test: "ps -aux | grep -v grep | grep -q rqworker || exit 1"
+ test: ps -aux | grep -v grep | grep -q rqworker || exit 1
netbox:
<<: *netbox-override
ports:
@@ -16,10 +16,10 @@ services:
start_period: 300s
timeout: 3s
interval: 15s
- test: "curl -f http://localhost:8080/api/ || exit 1"
+ test: curl -f http://localhost:8080/api/ || exit 1
environment:
SKIP_SUPERUSER: "false"
SUPERUSER_API_TOKEN: "0123456789abcdef0123456789abcdef01234567"
- SUPERUSER_EMAIL: "admin@localhost.com"
- SUPERUSER_NAME: "admin"
- SUPERUSER_PASSWORD: "admin"
+ SUPERUSER_EMAIL: admin@localhost.com
+ SUPERUSER_NAME: admin
+ SUPERUSER_PASSWORD: admin
diff --git a/ansible_collections/netbox/netbox/tests/netbox-docker/v3.6/docker-compose.override.yml b/ansible_collections/netbox/netbox/tests/netbox-docker/v3.6/docker-compose.override.yml
index bb526050c..6b8d1f5cf 100644
--- a/ansible_collections/netbox/netbox/tests/netbox-docker/v3.6/docker-compose.override.yml
+++ b/ansible_collections/netbox/netbox/tests/netbox-docker/v3.6/docker-compose.override.yml
@@ -1,5 +1,5 @@
---
-version: '3.4'
+version: "3.4"
services:
netbox-worker: &netbox-override
image: netboxcommunity/netbox:v3.6
@@ -7,7 +7,7 @@ services:
start_period: 300s
timeout: 3s
interval: 15s
- test: "ps -aux | grep -v grep | grep -q rqworker || exit 1"
+ test: ps -aux | grep -v grep | grep -q rqworker || exit 1
netbox:
<<: *netbox-override
ports:
@@ -16,10 +16,10 @@ services:
start_period: 300s
timeout: 3s
interval: 15s
- test: "curl -f http://localhost:8080/api/ || exit 1"
+ test: curl -f http://localhost:8080/api/ || exit 1
environment:
SKIP_SUPERUSER: "false"
SUPERUSER_API_TOKEN: "0123456789abcdef0123456789abcdef01234567"
- SUPERUSER_EMAIL: "admin@localhost.com"
- SUPERUSER_NAME: "admin"
- SUPERUSER_PASSWORD: "admin"
+ SUPERUSER_EMAIL: admin@localhost.com
+ SUPERUSER_NAME: admin
+ SUPERUSER_PASSWORD: admin
diff --git a/ansible_collections/netbox/netbox/tests/netbox-docker/v3.7/docker-compose.override.yml b/ansible_collections/netbox/netbox/tests/netbox-docker/v3.7/docker-compose.override.yml
index 47e7e80c0..916d5ea73 100644
--- a/ansible_collections/netbox/netbox/tests/netbox-docker/v3.7/docker-compose.override.yml
+++ b/ansible_collections/netbox/netbox/tests/netbox-docker/v3.7/docker-compose.override.yml
@@ -1,5 +1,5 @@
---
-version: '3.4'
+version: "3.4"
services:
netbox-worker: &netbox-override
image: netboxcommunity/netbox:v3.7
@@ -7,7 +7,7 @@ services:
start_period: 300s
timeout: 3s
interval: 15s
- test: "ps -aux | grep -v grep | grep -q rqworker || exit 1"
+ test: ps -aux | grep -v grep | grep -q rqworker || exit 1
netbox:
<<: *netbox-override
ports:
@@ -16,10 +16,10 @@ services:
start_period: 300s
timeout: 3s
interval: 15s
- test: "curl -f http://localhost:8080/api/ || exit 1"
+ test: curl -f http://localhost:8080/api/ || exit 1
environment:
SKIP_SUPERUSER: "false"
SUPERUSER_API_TOKEN: "0123456789abcdef0123456789abcdef01234567"
- SUPERUSER_EMAIL: "admin@localhost.com"
- SUPERUSER_NAME: "admin"
- SUPERUSER_PASSWORD: "admin"
+ SUPERUSER_EMAIL: admin@localhost.com
+ SUPERUSER_NAME: admin
+ SUPERUSER_PASSWORD: admin
diff --git a/ansible_collections/netbox/netbox/tests/netbox-docker/v4.0/docker-compose.override.yml b/ansible_collections/netbox/netbox/tests/netbox-docker/v4.0/docker-compose.override.yml
new file mode 100644
index 000000000..ef74aafa9
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tests/netbox-docker/v4.0/docker-compose.override.yml
@@ -0,0 +1,25 @@
+---
+version: "3.4"
+services:
+ netbox-worker: &netbox-override
+ image: netboxcommunity/netbox:v4.0
+ healthcheck:
+ start_period: 300s
+ timeout: 3s
+ interval: 15s
+ test: ps -aux | grep -v grep | grep -q rqworker || exit 1
+ netbox:
+ <<: *netbox-override
+ ports:
+ - 32768:8080
+ healthcheck:
+ start_period: 300s
+ timeout: 3s
+ interval: 15s
+ test: curl -f http://localhost:8080/api/ || exit 1
+ environment:
+ SKIP_SUPERUSER: "false"
+ SUPERUSER_API_TOKEN: "0123456789abcdef0123456789abcdef01234567"
+ SUPERUSER_EMAIL: admin@localhost.com
+ SUPERUSER_NAME: admin
+ SUPERUSER_PASSWORD: admin
diff --git a/ansible_collections/netbox/netbox/tests/sanity/ignore-2.11.txt b/ansible_collections/netbox/netbox/tests/sanity/ignore-2.11.txt
deleted file mode 100644
index 6a8f5ac59..000000000
--- a/ansible_collections/netbox/netbox/tests/sanity/ignore-2.11.txt
+++ /dev/null
@@ -1 +0,0 @@
-plugins/inventory/nb_inventory.py pylint!skip
diff --git a/ansible_collections/netbox/netbox/tox-ansible.ini b/ansible_collections/netbox/netbox/tox-ansible.ini
new file mode 100644
index 000000000..4b2f004f6
--- /dev/null
+++ b/ansible_collections/netbox/netbox/tox-ansible.ini
@@ -0,0 +1,11 @@
+[ansible]
+skip =
+ py3.7
+ py3.8
+ py3.9
+ 2.9
+ 2.10
+ 2.11
+ 2.12
+ 2.13
+ 2.14 \ No newline at end of file
diff --git a/ansible_collections/purestorage/flasharray/.github/workflows/main.yml b/ansible_collections/purestorage/flasharray/.github/workflows/main.yml
index 27d5532d2..1e8544e3b 100644
--- a/ansible_collections/purestorage/flasharray/.github/workflows/main.yml
+++ b/ansible_collections/purestorage/flasharray/.github/workflows/main.yml
@@ -15,6 +15,7 @@ jobs:
ansible:
- stable-2.15
- stable-2.16
+ - stable-2.17
- devel
python-version:
- 3.9
@@ -24,6 +25,8 @@ jobs:
- python-version: 3.9
ansible: stable-2.16
- python-version: 3.9
+ ansible: stable-2.17
+ - python-version: 3.9
ansible: devel
steps:
- name: Check out code
diff --git a/ansible_collections/purestorage/flasharray/CHANGELOG.rst b/ansible_collections/purestorage/flasharray/CHANGELOG.rst
index 111a76e53..44fa6a613 100644
--- a/ansible_collections/purestorage/flasharray/CHANGELOG.rst
+++ b/ansible_collections/purestorage/flasharray/CHANGELOG.rst
@@ -5,6 +5,16 @@ Purestorage.Flasharray Release Notes
.. contents:: Topics
+v1.28.1
+=======
+
+Bugfixes
+--------
+
+- purefa_network - Fix issue with clearing network interface addresses
+- purefa_network - Resolve issue when setting a network port on a new array
+- purefa_policy - Enhanced idempotency for snapshot policy rules
+
v1.28.0
=======
diff --git a/ansible_collections/purestorage/flasharray/FILES.json b/ansible_collections/purestorage/flasharray/FILES.json
index 81e70779b..cdd19cf58 100644
--- a/ansible_collections/purestorage/flasharray/FILES.json
+++ b/ansible_collections/purestorage/flasharray/FILES.json
@@ -750,6 +750,13 @@
"format": 1
},
{
+ "name": "changelogs/fragments/567_newarray_network.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "133ca1f5bdf8e67168153906cfc60fbac837265a42e934b410c624e1c954c0a8",
+ "format": 1
+ },
+ {
"name": "changelogs/fragments/531_ra_rest.yaml",
"ftype": "file",
"chksum_type": "sha256",
@@ -946,6 +953,13 @@
"format": 1
},
{
+ "name": "changelogs/fragments/565_fix_clear_interface.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "f82f45bc0c45406a1f31e022272f5402feea9acb1fbc12558acd850b756619c6",
+ "format": 1
+ },
+ {
"name": "changelogs/fragments/560_snapshot_epoch.yaml",
"ftype": "file",
"chksum_type": "sha256",
@@ -1219,6 +1233,13 @@
"format": 1
},
{
+ "name": "changelogs/fragments/569_enhanced_snap_policy.yaml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "72668007fa3552289903f4c439f82eba3cbbeaf4098a16728ef92bd0e3f70e0d",
+ "format": 1
+ },
+ {
"name": "changelogs/fragments/136_add_vol_get_send_info.yaml",
"ftype": "file",
"chksum_type": "sha256",
@@ -1565,14 +1586,14 @@
"name": "changelogs/changelog.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0b587f1c2ab470c01ea25d07a6102c7bd45c511ce57596be9c20f38bb3b5e456",
+ "chksum_sha256": "dbb3a56908b638220035f4e7cd04174075a0558bf455d0562da15d2ba8247eff",
"format": 1
},
{
"name": "changelogs/.plugin-cache.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2eb4d8b274c69bde5af5d3e8dfb793a902429d99ca679286f7cce43712013dad",
+ "chksum_sha256": "1c4ab55e3f9d085516eb971584c311fa6065bcc4503fe7530422cf4f491bdb88",
"format": 1
},
{
@@ -1824,7 +1845,7 @@
"name": "plugins/modules/purefa_policy.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "eb24acd1b8171f2a4f746db11574f9973956393ca450507029e08bd0a4de9f52",
+ "chksum_sha256": "75f1461aa996b7af83b6ab595aca7f42954d1a8d3af0ee87f00756ca7bf0a08e",
"format": 1
},
{
@@ -1915,7 +1936,7 @@
"name": "plugins/modules/purefa_network.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0e9a2651b81bb8edd1fa026e1a1d9f2496478f207405178b47f5fab8c2e27ae9",
+ "chksum_sha256": "ec4e1720c6c824a076bb08442a3164c37589dce5088921ad8eaea8bbec156f94",
"format": 1
},
{
@@ -2118,7 +2139,7 @@
"name": "plugins/modules/purefa_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "71476398cc0db0ada0a924a210ea483bcad581eb94dd778562f845f510fcfb01",
+ "chksum_sha256": "901490359cac7ad45d4ed9cba3249b91807c66dedb5243c800b9be6fd736d967",
"format": 1
},
{
@@ -2265,7 +2286,7 @@
"name": ".github/workflows/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "de55a03dadff2ec71a72efd44dcbcb3c2950d1a00aea1ffd6223c6ce9a9e815a",
+ "chksum_sha256": "dba5acd44f50630ab9db379b8474245603d41a7eb50388aab39bb5d1f9fca914",
"format": 1
},
{
@@ -2286,7 +2307,7 @@
"name": "README.md",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f7f6650c570a9b0da9f733fa208da1166b49bddf08658d59ef9905648f80060f",
+ "chksum_sha256": "0bc220295e26ec0084734261891239b1a7fedbb8c54cb35f95b944f80881f9a3",
"format": 1
},
{
@@ -2370,7 +2391,7 @@
"name": "CHANGELOG.rst",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ef83ad3fb215623f94ceb80b0dab6978894dd59b90203d2999e9026fa9627b76",
+ "chksum_sha256": "dda2052fb4a4af4461e1c56807cf896aae1754f6f762c5641b6ddccfbb49fbff",
"format": 1
}
],
diff --git a/ansible_collections/purestorage/flasharray/MANIFEST.json b/ansible_collections/purestorage/flasharray/MANIFEST.json
index 5c39c4c14..91c975a81 100644
--- a/ansible_collections/purestorage/flasharray/MANIFEST.json
+++ b/ansible_collections/purestorage/flasharray/MANIFEST.json
@@ -2,7 +2,7 @@
"collection_info": {
"namespace": "purestorage",
"name": "flasharray",
- "version": "1.28.0",
+ "version": "1.28.1",
"authors": [
"Pure Storage Ansible Team <pure-ansible-team@purestorage.com>"
],
@@ -29,7 +29,7 @@
"name": "FILES.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ab3ade4ad530c2f9141631d1ac2c2b431cec2651753ec9e7dc3a845eda21dd82",
+ "chksum_sha256": "9898c292949e2e5cc9ba99514c95c7b8706ada3f4eb938acbd7061de21ca4d00",
"format": 1
},
"format": 1
diff --git a/ansible_collections/purestorage/flasharray/README.md b/ansible_collections/purestorage/flasharray/README.md
index 92ae085d8..ea5923bf4 100644
--- a/ansible_collections/purestorage/flasharray/README.md
+++ b/ansible_collections/purestorage/flasharray/README.md
@@ -6,14 +6,11 @@
# Pure Storage FlashArray Collection
-The Pure Storage FlashArray collection consists of the latest versions of the FlashArray modules and also includes support for Cloud Block Store
-
-## Supported Platforms
+## Description
-- Pure Storage FlashArray with Purity 6.1.0 or later
-- Certain modules and functionality require higher versions of Purity. Modules will inform you if your Purity version is not high enough to use a module.
+The Pure Storage FlashArray collection consists of the latest versions of the FlashArray modules and also includes support for Cloud Block Store
-## Prerequisites
+## Requirements
- Ansible 2.15 or later
- Pure Storage FlashArray system running Purity 6.1.0 or later
@@ -28,11 +25,76 @@ The Pure Storage FlashArray collection consists of the latest versions of the Fl
- pycountry
- packaging
-## Idempotency
+## Installation
+
+Before using this collection, you need to install it with the Ansible Galaxy command-line tool:
+
+```
+ansible-galaxy collection install purestorage.flasharray
+```
+
+You can also include it in a `requirements.yml` file and install it with `ansible-galaxy collection install -r requirements.yml`, using the format:
+
+```
+collections:
+ - name: purestorage.flasharray
+```
+
+Note that if you install the collection from Ansible Galaxy, it will not be upgraded automatically when you upgrade the Ansible package.
+
+To upgrade the collection to the latest available version, run the following command:
+
+```
+ansible-galaxy collection install purestorage.flasharray --upgrade
+```
+
+You can also install a specific version of the collection, for example, if you need to downgrade when something is broken in the latest version (please report an issue in this repository). Use the following syntax to install version 1.0.0:
+
+```
+ansible-galaxy collection install purestorage.flasharray:==1.0.0
+```
+
+See [using Ansible collections](https://docs.ansible.com/ansible/devel/user_guide/collections_using.html) for more details.
+
+## Use Cases
+
+This collection can be used to perform any task that you would normally perform using the command-line or GUI on a FlashArray. This also includes using the collection in larger playbooks to perform actions with storage-related components, such as:
+* Infrastructure Drift Control
+* Database Cloning
+* Disaster Recovery
+
+## Contributing
+
+There are many ways in which you can participate in the project, for example:
+
+* Submit bugs and feature requests, and help us verify as they are checked in
+* Review source code changes
+* Review the documentation and make pull requests for anything from typos to new content
+* If you are interested in fixing issues and contributing directly to the code base, please see the details below:
+ 1. Fork this project into your account if you are a first-time contributor.
+ 2. Create a branch based on the latest `master` branch, commit your changes on this branch.
+ 3. You may merge the Pull Request in once you have the sign-off of two other developers, or if you do not have permission to do that, you may request the second reviewer to merge it for you.
+
+## Support
+
+Only the latest release of this collection is supported.
+
+For support please raise a GitHub Issue on this repository.
+
+If you are a Pure Storage customer, you may log a support call with the Pure Storage Support team ([support\@purestorage.com](mailto:support@purestorage.com?subject=FlashArray-Ansible-Collection))
+
+If you have a Red Hat Ansible support contract, as this is a Certified collection, you may log a support call with Red Hat directly.
+
+## Release Notes
+
+Release notes for this collection can be found [here](https://github.com/Pure-Storage-Ansible/FlashArray-Collection/releases)
+
+## Related Information
+### Idempotency
All modules are idempotent with the exception of modules that change or set passwords. Due to security requirements exisitng passwords can be validated against and therefore will always be modified, even if there is no change.
-## Available Modules
+### Available Modules
- purefa_ad - manage FlashArray Active Directoy accounts
- purefa_admin - Configure Pure Storage FlashArray Global Admin settings
@@ -95,18 +157,10 @@ All modules are idempotent with the exception of modules that change or set pass
- purefa_volume - manage volumes on the FlashArray
- purefa_volume_tags - manage volume tags on the FlashArray
-## Instructions
-
-Install the Pure Storage FlashArray collection on your Ansible management host.
-
-- Using ansible-galaxy (Ansible 2.9 or later):
-```
-ansible-galaxy collection install purestorage.flasharray -p ~/.ansible/collections
-```
-
-## License
+## License Information
[BSD-2-Clause](https://directory.fsf.org/wiki?title=License:FreeBSD)
+
[GPL-3.0-or-later](https://www.gnu.org/licenses/gpl-3.0.en.html)
## Author
diff --git a/ansible_collections/purestorage/flasharray/changelogs/.plugin-cache.yaml b/ansible_collections/purestorage/flasharray/changelogs/.plugin-cache.yaml
index f99c9ee8a..7a5c7b9aa 100644
--- a/ansible_collections/purestorage/flasharray/changelogs/.plugin-cache.yaml
+++ b/ansible_collections/purestorage/flasharray/changelogs/.plugin-cache.yaml
@@ -322,4 +322,4 @@ plugins:
strategy: {}
test: {}
vars: {}
-version: 1.28.0
+version: 1.28.1
diff --git a/ansible_collections/purestorage/flasharray/changelogs/changelog.yaml b/ansible_collections/purestorage/flasharray/changelogs/changelog.yaml
index c73a6b48b..07129c043 100644
--- a/ansible_collections/purestorage/flasharray/changelogs/changelog.yaml
+++ b/ansible_collections/purestorage/flasharray/changelogs/changelog.yaml
@@ -609,6 +609,17 @@ releases:
- 561_remote_snap_info.yaml
- 562_host_vol_fix.yaml
release_date: '2024-05-01'
+ 1.28.1:
+ changes:
+ bugfixes:
+ - purefa_network - Fix issue with clearing network interface addresses
+ - purefa_network - Resolve issue when setting a network port on a new array
+ - purefa_policy - Enhanced idempotency for snapshot policy rules
+ fragments:
+ - 565_fix_clear_interface.yaml
+ - 567_newarray_network.yaml
+ - 569_enhanced_snap_policy.yaml
+ release_date: '2024-05-31'
1.4.0:
changes:
bugfixes:
diff --git a/ansible_collections/purestorage/flasharray/changelogs/fragments/565_fix_clear_interface.yaml b/ansible_collections/purestorage/flasharray/changelogs/fragments/565_fix_clear_interface.yaml
new file mode 100644
index 000000000..8830deddf
--- /dev/null
+++ b/ansible_collections/purestorage/flasharray/changelogs/fragments/565_fix_clear_interface.yaml
@@ -0,0 +1,2 @@
+bugfixes:
+ - purefa_network - Fix issue with clearing network interface addresses
diff --git a/ansible_collections/purestorage/flasharray/changelogs/fragments/567_newarray_network.yaml b/ansible_collections/purestorage/flasharray/changelogs/fragments/567_newarray_network.yaml
new file mode 100644
index 000000000..2591d79ac
--- /dev/null
+++ b/ansible_collections/purestorage/flasharray/changelogs/fragments/567_newarray_network.yaml
@@ -0,0 +1,2 @@
+bugfixes:
+ - purefa_network - Resolve issue when setting a network port on a new array
diff --git a/ansible_collections/purestorage/flasharray/changelogs/fragments/569_enhanced_snap_policy.yaml b/ansible_collections/purestorage/flasharray/changelogs/fragments/569_enhanced_snap_policy.yaml
new file mode 100644
index 000000000..bac87c8d2
--- /dev/null
+++ b/ansible_collections/purestorage/flasharray/changelogs/fragments/569_enhanced_snap_policy.yaml
@@ -0,0 +1,2 @@
+bugfixes:
+ - purefa_policy - Enhanced idempotency for snapshot policy rules
diff --git a/ansible_collections/purestorage/flasharray/plugins/modules/purefa_info.py b/ansible_collections/purestorage/flasharray/plugins/modules/purefa_info.py
index cc2c92fdc..ca6e73dfe 100644
--- a/ansible_collections/purestorage/flasharray/plugins/modules/purefa_info.py
+++ b/ansible_collections/purestorage/flasharray/plugins/modules/purefa_info.py
@@ -1800,7 +1800,7 @@ def generate_del_pgroups_dict(module, array):
return pgroups_info
-def generate_pgroups_dict(module, array, performance):
+def generate_pgroups_dict(module, array):
pgroups_info = {}
api_version = array._list_available_rest_versions()
pgroups = array.list_pgroups()
diff --git a/ansible_collections/purestorage/flasharray/plugins/modules/purefa_network.py b/ansible_collections/purestorage/flasharray/plugins/modules/purefa_network.py
index c296707d0..97a510504 100644
--- a/ansible_collections/purestorage/flasharray/plugins/modules/purefa_network.py
+++ b/ansible_collections/purestorage/flasharray/plugins/modules/purefa_network.py
@@ -319,6 +319,8 @@ def update_interface(module, array, interface):
"services": sorted(interface["services"]),
"slaves": sorted(interface["slaves"]),
}
+ if not current_state["address"]:
+ current_state["address"] = "0.0.0.0"
array6 = get_array(module)
subinterfaces = sorted(current_state["slaves"])
if module.params["subinterfaces"]:
@@ -339,19 +341,19 @@ def update_interface(module, array, interface):
enabled = current_state["enabled"]
if not current_state["gateway"]:
try:
- if valid_ipv4(interface["address"]):
+ if valid_ipv4(current_state["address"]):
current_state["gateway"] = None
- elif valid_ipv6(interface["address"]):
+ elif valid_ipv6(current_state["address"]):
current_state["gateway"] = None
except AttributeError:
current_state["gateway"] = None
if not module.params["servicelist"]:
- services = sorted(interface["services"])
+ services = current_state["services"]
else:
services = sorted(module.params["servicelist"])
if not module.params["address"]:
- address = interface["address"]
- netmask = interface["netmask"]
+ address = current_state["address"]
+ netmask = current_state["netmask"]
else:
if module.params["gateway"] and module.params["gateway"] not in [
"0.0.0.0",
@@ -359,7 +361,7 @@ def update_interface(module, array, interface):
]:
if module.params["gateway"] not in IPNetwork(module.params["address"]):
module.fail_json(msg="Gateway and subnet are not compatible.")
- if not module.params["gateway"] and interface["gateway"] not in [
+ if not module.params["gateway"] and current_state["gateway"] not in [
None,
IPNetwork(module.params["address"]),
]:
@@ -368,7 +370,7 @@ def update_interface(module, array, interface):
if address in ["0.0.0.0", "::"]:
address = None
if not module.params["mtu"]:
- mtu = interface["mtu"]
+ mtu = current_state["mtu"]
else:
if not 1280 <= module.params["mtu"] <= 9216:
module.fail_json(
@@ -379,6 +381,8 @@ def update_interface(module, array, interface):
else:
mtu = module.params["mtu"]
if module.params["address"]:
+ if not address:
+ address = "0.0.0.0"
if valid_ipv4(address):
netmask = str(IPNetwork(module.params["address"]).netmask)
else:
@@ -386,9 +390,9 @@ def update_interface(module, array, interface):
if netmask in ["0.0.0.0", "0"]:
netmask = None
else:
- netmask = interface["netmask"]
+ netmask = current_state["netmask"]
if not module.params["gateway"]:
- gateway = interface["gateway"]
+ gateway = current_state["gateway"]
elif module.params["gateway"] in ["0.0.0.0", "::"]:
gateway = None
elif valid_ipv4(address):
@@ -427,7 +431,7 @@ def update_interface(module, array, interface):
changed = True
if (
module.params["servicelist"]
- and sorted(module.params["servicelist"]) != interface["services"]
+ and sorted(module.params["servicelist"]) != current_state["services"]
):
api_version = array._list_available_rest_versions()
if FC_ENABLE_API in api_version:
@@ -450,7 +454,8 @@ def update_interface(module, array, interface):
"Servicelist not updated as pypureclient module is required"
)
if (
- "management" in interface["services"] or "app" in interface["services"]
+ "management" in current_state["services"]
+ or "app" in current_state["services"]
) and address in ["0.0.0.0/0", "::/0"]:
module.fail_json(
msg="Removing IP address from a management or app port is not supported"
diff --git a/ansible_collections/purestorage/flasharray/plugins/modules/purefa_policy.py b/ansible_collections/purestorage/flasharray/plugins/modules/purefa_policy.py
index 7247d376f..636a77c50 100644
--- a/ansible_collections/purestorage/flasharray/plugins/modules/purefa_policy.py
+++ b/ansible_collections/purestorage/flasharray/plugins/modules/purefa_policy.py
@@ -1416,64 +1416,67 @@ def update_policy(module, array, api_version, all_squash):
).items
)
if rules:
- rule_name = ""
for rule in range(0, len(rules)):
- if rules[rule].client_name == module.params["snap_client_name"]:
- rule_name = rules[rule].name
- break
- if not rule_name:
- if module.params["snap_keep_for"] < module.params["snap_every"]:
+ if (
+ rules[rule].client_name == module.params["snap_client_name"]
+ and int(rules[rule].every / 60000)
+ == module.params["snap_every"]
+ and int(rules[rule].keep_for / 60000)
+ == module.params["snap_keep_for"]
+ ):
+ module.exit_json(changed=False)
+ if module.params["snap_keep_for"] < module.params["snap_every"]:
+ module.fail_json(
+ msg="Retention period (snap_keep_for) cannot be less than snapshot interval (snap_every)."
+ )
+ if module.params["snap_at"]:
+ if not module.params["snap_every"] % 1440 == 0:
module.fail_json(
- msg="Retention period (snap_keep_for) cannot be less than snapshot interval (snap_every)."
+ msg="snap_at time can only be set if snap_every is multiple of 1440"
+ )
+ if suffix_enabled:
+ rules = flasharray.PolicyrulesnapshotpostRules(
+ at=convert_to_millisecs(module.params["snap_at"]),
+ client_name=module.params["snap_client_name"],
+ every=module.params["snap_every"] * 60000,
+ keep_for=module.params["snap_keep_for"] * 60000,
+ suffix=module.params["snap_suffix"],
)
- if module.params["snap_at"]:
- if not module.params["snap_every"] % 1440 == 0:
- module.fail_json(
- msg="snap_at time can only be set if snap_every is multiple of 1440"
- )
- if suffix_enabled:
- rules = flasharray.PolicyrulesnapshotpostRules(
- at=convert_to_millisecs(module.params["snap_at"]),
- client_name=module.params["snap_client_name"],
- every=module.params["snap_every"] * 60000,
- keep_for=module.params["snap_keep_for"] * 60000,
- suffix=module.params["snap_suffix"],
- )
- else:
- rules = flasharray.PolicyrulesnapshotpostRules(
- at=convert_to_millisecs(module.params["snap_at"]),
- client_name=module.params["snap_client_name"],
- every=module.params["snap_every"] * 60000,
- keep_for=module.params["snap_keep_for"] * 60000,
- )
else:
- if suffix_enabled:
- rules = flasharray.PolicyrulesnapshotpostRules(
- client_name=module.params["snap_client_name"],
- every=module.params["snap_every"] * 60000,
- keep_for=module.params["snap_keep_for"] * 60000,
- suffix=module.params["snap_suffix"],
- )
- else:
- rules = flasharray.PolicyrulesnapshotpostRules(
- client_name=module.params["snap_client_name"],
- every=module.params["snap_every"] * 60000,
- keep_for=module.params["snap_keep_for"] * 60000,
- )
- rule = flasharray.PolicyRuleSnapshotPost(rules=[rules])
- changed_rule = True
- if not module.check_mode:
- rule_created = array.post_policies_snapshot_rules(
- policy_names=[module.params["name"]], rules=rule
- )
- if rule_created.status_code != 200:
- err_no = len(rule_created.errors) - 1
- module.fail_json(
- msg="Failed to create new rule for Snapshot policy {0}. Error: {1}".format(
- module.params["name"],
- rule_created.errors[err_no].message,
- )
+ rules = flasharray.PolicyrulesnapshotpostRules(
+ at=convert_to_millisecs(module.params["snap_at"]),
+ client_name=module.params["snap_client_name"],
+ every=module.params["snap_every"] * 60000,
+ keep_for=module.params["snap_keep_for"] * 60000,
+ )
+ else:
+ if suffix_enabled:
+ rules = flasharray.PolicyrulesnapshotpostRules(
+ client_name=module.params["snap_client_name"],
+ every=module.params["snap_every"] * 60000,
+ keep_for=module.params["snap_keep_for"] * 60000,
+ suffix=module.params["snap_suffix"],
+ )
+ else:
+ rules = flasharray.PolicyrulesnapshotpostRules(
+ client_name=module.params["snap_client_name"],
+ every=module.params["snap_every"] * 60000,
+ keep_for=module.params["snap_keep_for"] * 60000,
+ )
+ rule = flasharray.PolicyRuleSnapshotPost(rules=[rules])
+ changed_rule = True
+ if not module.check_mode:
+ rule_created = array.post_policies_snapshot_rules(
+ policy_names=[module.params["name"]], rules=rule
+ )
+ if rule_created.status_code != 200:
+ err_no = len(rule_created.errors) - 1
+ module.fail_json(
+ msg="Failed to create new rule for Snapshot policy {0}. Error: {1}".format(
+ module.params["name"],
+ rule_created.errors[err_no].message,
)
+ )
else:
if module.params["snap_keep_for"] < module.params["snap_every"]:
module.fail_json(
diff --git a/ansible_collections/vultr/cloud/.github/workflows/cleanup.yml b/ansible_collections/vultr/cloud/.github/workflows/cleanup.yml
index bb244725e..dc5263255 100644
--- a/ansible_collections/vultr/cloud/.github/workflows/cleanup.yml
+++ b/ansible_collections/vultr/cloud/.github/workflows/cleanup.yml
@@ -20,7 +20,7 @@ jobs:
fail-fast: false
matrix:
ansible:
- - stable-2.15
+ - stable-2.16
python:
- "3.10"
steps:
diff --git a/ansible_collections/vultr/cloud/.github/workflows/integration.yml b/ansible_collections/vultr/cloud/.github/workflows/integration.yml
index a79039300..4f7a25707 100644
--- a/ansible_collections/vultr/cloud/.github/workflows/integration.yml
+++ b/ansible_collections/vultr/cloud/.github/workflows/integration.yml
@@ -24,7 +24,7 @@ jobs:
fail-fast: false
matrix:
ansible:
- - stable-2.15
+ - stable-2.16
python:
- "3.10"
steps:
@@ -83,6 +83,6 @@ jobs:
--requirements
--group-by command
--group-by version
- - uses: codecov/codecov-action@v3
+ - uses: codecov/codecov-action@v4
with:
fail_ci_if_error: false
diff --git a/ansible_collections/vultr/cloud/.github/workflows/sanity.yml b/ansible_collections/vultr/cloud/.github/workflows/sanity.yml
index ab2a3f14c..122f908fa 100644
--- a/ansible_collections/vultr/cloud/.github/workflows/sanity.yml
+++ b/ansible_collections/vultr/cloud/.github/workflows/sanity.yml
@@ -19,11 +19,12 @@ jobs:
run:
working-directory: ansible_collections/vultr/cloud
strategy:
+ # https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html#ansible-core-support-matrix
matrix:
ansible:
- - stable-2.13
- stable-2.14
- stable-2.15
+ - stable-2.16
- devel
steps:
- name: Check out code
diff --git a/ansible_collections/vultr/cloud/.github/workflows/unit.yml b/ansible_collections/vultr/cloud/.github/workflows/unit.yml
index 216a67e5f..035d9b62c 100644
--- a/ansible_collections/vultr/cloud/.github/workflows/unit.yml
+++ b/ansible_collections/vultr/cloud/.github/workflows/unit.yml
@@ -23,7 +23,7 @@ jobs:
fail-fast: false
matrix:
ansible:
- - stable-2.15
+ - stable-2.16
python:
- "3.10"
steps:
@@ -65,6 +65,6 @@ jobs:
--requirements
--group-by command
--group-by version
- - uses: codecov/codecov-action@v3
+ - uses: codecov/codecov-action@v4
with:
fail_ci_if_error: false
diff --git a/ansible_collections/vultr/cloud/CHANGELOG.rst b/ansible_collections/vultr/cloud/CHANGELOG.rst
index 867080cc0..27549f46e 100644
--- a/ansible_collections/vultr/cloud/CHANGELOG.rst
+++ b/ansible_collections/vultr/cloud/CHANGELOG.rst
@@ -5,6 +5,14 @@ Vultr Collection Release Notes
.. contents:: Topics
+v1.13.0
+=======
+
+Minor Changes
+-------------
+
+- instance, bare_metal - Implemented a new option ``skip_wait`` (https://github.com/vultr/ansible-collection-vultr/issues/119).
+
v1.12.1
=======
diff --git a/ansible_collections/vultr/cloud/FILES.json b/ansible_collections/vultr/cloud/FILES.json
index efb60cea2..fca1c624c 100644
--- a/ansible_collections/vultr/cloud/FILES.json
+++ b/ansible_collections/vultr/cloud/FILES.json
@@ -8,24 +8,45 @@
"format": 1
},
{
- "name": "changelogs",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "README.md",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "ac3f49348bbac1c542da5f4ed92d3f4c10661a1cab318822a4be0d408c188fbe",
"format": 1
},
{
- "name": "changelogs/fragments",
+ "name": "CHANGELOG.rst",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "3bd1303b7e0f9df4185875ff42e8c7a39c51297ba2e0118b0d75b5c0b4310596",
+ "format": 1
+ },
+ {
+ "name": "CONTRIBUTING.md",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d61725d614410e2ee0a900fb0f6b6d742ad8fb689ae27c4d6a3a7f89e82fc791",
+ "format": 1
+ },
+ {
+ "name": ".gitignore",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "1467ad38919911ec5759af24376728faf2361c0335d52fb8bea4853e7b698b48",
+ "format": 1
+ },
+ {
+ "name": "changelogs",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "changelogs/fragments/.keep",
+ "name": "changelogs/config.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "0d1c790424791b634a9c7704aa0e9ec56e3ff34a602f269d68a356d87c777b3a",
"format": 1
},
{
@@ -39,14 +60,35 @@
"name": "changelogs/changelog.yaml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4c3acebdf5e2a94572a3152197fa488806a5cc61b797220092efc9c7a10085f2",
+ "chksum_sha256": "8ff62c8a7dacff74daaa67818bd5af0ce5b688d2417116b399b2dd5c06375a36",
"format": 1
},
{
- "name": "changelogs/config.yaml",
+ "name": "changelogs/fragments",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "changelogs/fragments/.keep",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0d1c790424791b634a9c7704aa0e9ec56e3ff34a602f269d68a356d87c777b3a",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "format": 1
+ },
+ {
+ "name": "codecov.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "0271dcfe609d71afb466112f2d1c4c13943580fa97bb42b2baa08a1c37bb1c14",
+ "format": 1
+ },
+ {
+ "name": "COPYING",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "c61f12da7cdad526bdcbed47a4c0a603e60dbbfdaf8b66933cd088e9132c303f",
"format": 1
},
{
@@ -67,35 +109,35 @@
"name": ".github/workflows/unit.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5b6f66b465bab31feebc85b6add960def2e50f82d5bcafa408873d2be5f2e722",
+ "chksum_sha256": "a0e8937cfd18b6fb26568fd47bf3b29380224c028d98b932ff20c9145612eb2b",
"format": 1
},
{
- "name": ".github/workflows/cleanup.yml",
+ "name": ".github/workflows/publish.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "300af2b95f5604bb00257206a589e731d4b36ceab75685d32975a0cddc6dae8d",
+ "chksum_sha256": "25ae9a6854a6ae27d4d9e41fcfde14d07d4272e4364deb5649abfb0db42ddb4d",
"format": 1
},
{
- "name": ".github/workflows/integration.yml",
+ "name": ".github/workflows/sanity.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b197d1be7422ff61a7aab5bad0d4dffc7c741990abab6132abe65db1a1f38586",
+ "chksum_sha256": "0eaa9d1363950caaaeca7a6af8fe9390298cc27c319ea73b3a9a97cf2c8982e2",
"format": 1
},
{
- "name": ".github/workflows/sanity.yml",
+ "name": ".github/workflows/cleanup.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5be1c7e95ed12ccc882570d1504b2273cf11104b0fa1167791cf1da12c97edda",
+ "chksum_sha256": "e696bee79563f077db39c4ee789c612c48f82bedc4d2f6053660ddd4c9b5511c",
"format": 1
},
{
- "name": ".github/workflows/publish.yml",
+ "name": ".github/workflows/integration.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "25ae9a6854a6ae27d4d9e41fcfde14d07d4272e4364deb5649abfb0db42ddb4d",
+ "chksum_sha256": "52e18428692e0c5eaa701734e91abc709f1881c3aa9ba28bdbe769056d827626",
"format": 1
},
{
@@ -106,31 +148,24 @@
"format": 1
},
{
- "name": "CHANGELOG.rst",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "6efbfe17a2c629dd958e13979053d0533e3d9c58aeecd9387e2e9ef4a465df26",
- "format": 1
- },
- {
- "name": "COPYING",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "c61f12da7cdad526bdcbed47a4c0a603e60dbbfdaf8b66933cd088e9132c303f",
+ "name": "meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "CONTRIBUTING.md",
+ "name": "meta/runtime.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d61725d614410e2ee0a900fb0f6b6d742ad8fb689ae27c4d6a3a7f89e82fc791",
+ "chksum_sha256": "5e1e422bd1c479643c7cd120a6935927d999101c1bbcb3345d0ee3a8166c19d0",
"format": 1
},
{
- "name": ".gitignore",
+ "name": ".ansible-lint",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1467ad38919911ec5759af24376728faf2361c0335d52fb8bea4853e7b698b48",
+ "chksum_sha256": "8e858e68d7c1937d65dfcad8b39b28b8d7e41aac65eff967772fba0c3a882a15",
"format": 1
},
{
@@ -141,255 +176,255 @@
"format": 1
},
{
- "name": "plugins/modules",
+ "name": "plugins/module_utils",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/dns_domain.py",
+ "name": "plugins/module_utils/common_instance.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "de781d3439aba96749c83b24b29c15e1f62ac807c8cc793e08ea374ece4c3c68",
+ "chksum_sha256": "3db5675896b5b2a842f34edcaa022e0562ff0e0fc03c976afd602c2de5792527",
"format": 1
},
{
- "name": "plugins/modules/account_info.py",
+ "name": "plugins/module_utils/vultr_v2.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "234df324cb37004dff1e9d5847998f97304c9228e67815f05b5087ab5709e911",
+ "chksum_sha256": "918154c2d0c27fe85bb15eae12b8bdc1415fefcd83e672adab5fd5a3eff442cc",
"format": 1
},
{
- "name": "plugins/modules/instance.py",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "eea21385ceae95edf0115ad3364bf161232ee50119b5b2ef5480fae6d9cef30b",
+ "name": "plugins/modules",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "plugins/modules/firewall_group.py",
+ "name": "plugins/modules/vpc_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7e5482639e8b600dff912fc03f7b0baa24a3cfa70f807eeeeb6c1ebfcf354f94",
+ "chksum_sha256": "903efd1d0f822de7aaaf659d968e13e9104d6af4c3667a782efd8bf056662829",
"format": 1
},
{
- "name": "plugins/modules/object_storage.py",
+ "name": "plugins/modules/firewall_rule.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e69e499783341fb8ec78c21d2d9976d96185c140e18f22864da3993c9774e1bc",
+ "chksum_sha256": "930069d8a2f087f4e7cb1b7b853e42fded5dfa8fbb9c532836132438049125fa",
"format": 1
},
{
- "name": "plugins/modules/firewall_rule_info.py",
+ "name": "plugins/modules/__init__.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "267c9f763168ce7b6c938e874b4048d6492add6766406d3a6cbc3d227ed14da3",
+ "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"format": 1
},
{
- "name": "plugins/modules/snapshot_info.py",
+ "name": "plugins/modules/dns_record.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a60ecc36574d60168903ee239185d42e636c8d26356bb84d6c666d8ab7368e50",
+ "chksum_sha256": "4b29de118b42237350431b7bef3dbf92ff98bfe989183756aea92efb0b73589d",
"format": 1
},
{
- "name": "plugins/modules/instance_info.py",
+ "name": "plugins/modules/firewall_group_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fc787595fdeecaf68aa253d7eff5ce08ab0b2b937c8cc180ae7d1e59c7139adc",
+ "chksum_sha256": "2e4dce46bd2ddbbd7ded11f11f8d4b57f15f603595f69d142e1c10bc8532e936",
"format": 1
},
{
- "name": "plugins/modules/user_info.py",
+ "name": "plugins/modules/vpc2_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ae4e7899a1107f0d9a2dbfac410209528d687ceef6c9f587b3d78cdb974d8e7f",
+ "chksum_sha256": "4d4a3f916a93826f20ed75d12557941dc2da27144bf882f9a68bf378f40bb88b",
"format": 1
},
{
- "name": "plugins/modules/vpc2.py",
+ "name": "plugins/modules/snapshot.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6c77428c298b1cde682cdf39e707d4dae26bc5efd9b056850a85d40b8226b8bd",
+ "chksum_sha256": "361a3cdac8de6b7ff2ba98859389d9d53e6a45b5f003bd069d9ea78a2759bcfc",
"format": 1
},
{
- "name": "plugins/modules/bare_metal.py",
+ "name": "plugins/modules/startup_script.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c16bbdcc107023e4ac71c5cc352d9eee650df7b26a30c2b7f950a63ae557d528",
+ "chksum_sha256": "14be407f3eb256735311fde075a3c983abb9b6932925d412e5782cec26c3e23f",
"format": 1
},
{
- "name": "plugins/modules/region_info.py",
+ "name": "plugins/modules/user.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ee6e31f3192bbe6e0db69cd7efef79a3a3fee40763071aa5c674f5c6951d7bdb",
+ "chksum_sha256": "38d3651f9b624fe512dae40c4ef24e4ccb0eae4ea9c394d260b851ff57dc8365",
"format": 1
},
{
- "name": "plugins/modules/plan_info.py",
+ "name": "plugins/modules/reserved_ip.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3648af6b8fd69f3bb66faf265767fd39a7afd5bebd5f88964db42dc6d0d8d392",
+ "chksum_sha256": "98c4609e5b2183915afef8d49fe3f2f62407f5faeaa1d92da7ff8fd8eaa8375e",
"format": 1
},
{
- "name": "plugins/modules/vpc_info.py",
+ "name": "plugins/modules/account_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "903efd1d0f822de7aaaf659d968e13e9104d6af4c3667a782efd8bf056662829",
+ "chksum_sha256": "234df324cb37004dff1e9d5847998f97304c9228e67815f05b5087ab5709e911",
"format": 1
},
{
- "name": "plugins/modules/block_storage.py",
+ "name": "plugins/modules/firewall_rule_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d2a98391d3d84671552b48b0fad61811f753420f09b5b94626e511b3c74cf93f",
+ "chksum_sha256": "267c9f763168ce7b6c938e874b4048d6492add6766406d3a6cbc3d227ed14da3",
"format": 1
},
{
- "name": "plugins/modules/os_info.py",
+ "name": "plugins/modules/ssh_key.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0143ad73c6b10692c6577730523b0159782bb6a678d3946dc920b3b758565155",
+ "chksum_sha256": "5e1a8e741f196a6b080077b88aa14ec6595878108978ef230224592b161ca44b",
"format": 1
},
{
- "name": "plugins/modules/startup_script.py",
+ "name": "plugins/modules/vpc2.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "14be407f3eb256735311fde075a3c983abb9b6932925d412e5782cec26c3e23f",
+ "chksum_sha256": "6c77428c298b1cde682cdf39e707d4dae26bc5efd9b056850a85d40b8226b8bd",
"format": 1
},
{
- "name": "plugins/modules/snapshot.py",
+ "name": "plugins/modules/object_storage.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "361a3cdac8de6b7ff2ba98859389d9d53e6a45b5f003bd069d9ea78a2759bcfc",
+ "chksum_sha256": "e69e499783341fb8ec78c21d2d9976d96185c140e18f22864da3993c9774e1bc",
"format": 1
},
{
- "name": "plugins/modules/startup_script_info.py",
+ "name": "plugins/modules/plan_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "23134fcb25c56ab61a08afe195100cd4f2eb5ef14b1a8ba6bfc7b82cdea57859",
+ "chksum_sha256": "3648af6b8fd69f3bb66faf265767fd39a7afd5bebd5f88964db42dc6d0d8d392",
"format": 1
},
{
- "name": "plugins/modules/ssh_key_info.py",
+ "name": "plugins/modules/plan_metal_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5cedb298db9d7b8b3888eed9a734309a5ef0b0cae1052211cc0a571a9c2ada22",
+ "chksum_sha256": "93bda0d2911f927951c30540a54a8da71a61a20c7f9cafc7368097d335c47aa5",
"format": 1
},
{
- "name": "plugins/modules/dns_domain_info.py",
+ "name": "plugins/modules/block_storage_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f76e554deed6163146c291612e807058309f4553b8d5ff9798a0fabe27717307",
+ "chksum_sha256": "d2677e90317fc892df48133c755f1c9cf8e1281d610f43937abc54e79f78ee81",
"format": 1
},
{
- "name": "plugins/modules/user.py",
+ "name": "plugins/modules/firewall_group.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "38d3651f9b624fe512dae40c4ef24e4ccb0eae4ea9c394d260b851ff57dc8365",
+ "chksum_sha256": "7e5482639e8b600dff912fc03f7b0baa24a3cfa70f807eeeeb6c1ebfcf354f94",
"format": 1
},
{
- "name": "plugins/modules/plan_metal_info.py",
+ "name": "plugins/modules/region_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "93bda0d2911f927951c30540a54a8da71a61a20c7f9cafc7368097d335c47aa5",
+ "chksum_sha256": "ee6e31f3192bbe6e0db69cd7efef79a3a3fee40763071aa5c674f5c6951d7bdb",
"format": 1
},
{
- "name": "plugins/modules/__init__.py",
+ "name": "plugins/modules/instance_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "chksum_sha256": "fc787595fdeecaf68aa253d7eff5ce08ab0b2b937c8cc180ae7d1e59c7139adc",
"format": 1
},
{
- "name": "plugins/modules/firewall_group_info.py",
+ "name": "plugins/modules/vpc.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2e4dce46bd2ddbbd7ded11f11f8d4b57f15f603595f69d142e1c10bc8532e936",
+ "chksum_sha256": "9ea848792c5ac44d5d25f366dbab63916f6797c977238ede08a2b2e08f74933b",
"format": 1
},
{
- "name": "plugins/modules/ssh_key.py",
+ "name": "plugins/modules/startup_script_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5e1a8e741f196a6b080077b88aa14ec6595878108978ef230224592b161ca44b",
+ "chksum_sha256": "23134fcb25c56ab61a08afe195100cd4f2eb5ef14b1a8ba6bfc7b82cdea57859",
"format": 1
},
{
- "name": "plugins/modules/firewall_rule.py",
+ "name": "plugins/modules/ssh_key_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "930069d8a2f087f4e7cb1b7b853e42fded5dfa8fbb9c532836132438049125fa",
+ "chksum_sha256": "5cedb298db9d7b8b3888eed9a734309a5ef0b0cae1052211cc0a571a9c2ada22",
"format": 1
},
{
- "name": "plugins/modules/dns_record.py",
+ "name": "plugins/modules/block_storage.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4b29de118b42237350431b7bef3dbf92ff98bfe989183756aea92efb0b73589d",
+ "chksum_sha256": "d2a98391d3d84671552b48b0fad61811f753420f09b5b94626e511b3c74cf93f",
"format": 1
},
{
- "name": "plugins/modules/reserved_ip.py",
+ "name": "plugins/modules/snapshot_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "98c4609e5b2183915afef8d49fe3f2f62407f5faeaa1d92da7ff8fd8eaa8375e",
+ "chksum_sha256": "a60ecc36574d60168903ee239185d42e636c8d26356bb84d6c666d8ab7368e50",
"format": 1
},
{
- "name": "plugins/modules/vpc2_info.py",
+ "name": "plugins/modules/bare_metal.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "4d4a3f916a93826f20ed75d12557941dc2da27144bf882f9a68bf378f40bb88b",
+ "chksum_sha256": "69a3fe885f9b66579510b9baaffc7990216530cd59cd1ede6640babccde8122d",
"format": 1
},
{
- "name": "plugins/modules/vpc.py",
+ "name": "plugins/modules/dns_domain.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "9ea848792c5ac44d5d25f366dbab63916f6797c977238ede08a2b2e08f74933b",
+ "chksum_sha256": "de781d3439aba96749c83b24b29c15e1f62ac807c8cc793e08ea374ece4c3c68",
"format": 1
},
{
- "name": "plugins/modules/block_storage_info.py",
+ "name": "plugins/modules/instance.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d2677e90317fc892df48133c755f1c9cf8e1281d610f43937abc54e79f78ee81",
+ "chksum_sha256": "c471e4e21637a2b9bfcd0c9a8af795d32ee90b8dae5f1ae17f7f6cd03be63fd7",
"format": 1
},
{
- "name": "plugins/module_utils",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "plugins/modules/os_info.py",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "0143ad73c6b10692c6577730523b0159782bb6a678d3946dc920b3b758565155",
"format": 1
},
{
- "name": "plugins/module_utils/common_instance.py",
+ "name": "plugins/modules/dns_domain_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3db5675896b5b2a842f34edcaa022e0562ff0e0fc03c976afd602c2de5792527",
+ "chksum_sha256": "f76e554deed6163146c291612e807058309f4553b8d5ff9798a0fabe27717307",
"format": 1
},
{
- "name": "plugins/module_utils/vultr_v2.py",
+ "name": "plugins/modules/user_info.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "adc7cacc8f196b524b1b56fadd81ca90e4901edd5c3f186297752eaaec8fffc6",
+ "chksum_sha256": "ae4e7899a1107f0d9a2dbfac410209528d687ceef6c9f587b3d78cdb974d8e7f",
"format": 1
},
{
@@ -424,7 +459,14 @@
"name": "plugins/doc_fragments/vultr_v2.py",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "64f7eb39cbe633e26394afeb6d125d440046ec37f1cf82d4a4ea072092954afa",
+ "chksum_sha256": "8baec1786c8d19a4115d18e249a85591958732b33461b546cf1fcba1747643da",
+ "format": 1
+ },
+ {
+ "name": ".yamllint",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "5845e18e9f23155f423207df9abac970aed687c638620bc2c9ee06706191054b",
"format": 1
},
{
@@ -456,1249 +498,1144 @@
"format": 1
},
{
- "name": "tests/integration/targets/firewall_group",
+ "name": "tests/integration/targets/account_info",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/firewall_group/aliases",
+ "name": "tests/integration/targets/account_info/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0e6908b2c939974fe0c6636ebb5d908284145edfb8ae78ebf2ca6b73d15a398a",
+ "chksum_sha256": "a9f61a9487700a83508395d74ce06374baecfcaf4306b30f008d8b726c626a69",
"format": 1
},
{
- "name": "tests/integration/targets/firewall_group/tasks",
+ "name": "tests/integration/targets/account_info/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/firewall_group/tasks/tests.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "6f163960098273136e9b6baf9c853e6178c8c34329d584e0b33e41250cd19483",
- "format": 1
- },
- {
- "name": "tests/integration/targets/firewall_group/tasks/main.yml",
+ "name": "tests/integration/targets/account_info/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b3a529e7fe7a88bd74d28e0d1a59444ff919861c898ce936304aa3cf905226",
+ "chksum_sha256": "2b49a94c64afb9d4d95f3cacf9ea2667390dd014585eb65ff4d15e026e346e2e",
"format": 1
},
{
- "name": "tests/integration/targets/firewall_group/meta",
+ "name": "tests/integration/targets/vpc_info",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/firewall_group/meta/main.yml",
+ "name": "tests/integration/targets/vpc_info/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fad8168157b59ea211367da3adec3c025885b88e940a43fbdd8c5b9a8f06a791",
+ "chksum_sha256": "0e6908b2c939974fe0c6636ebb5d908284145edfb8ae78ebf2ca6b73d15a398a",
"format": 1
},
{
- "name": "tests/integration/targets/firewall_group/defaults",
+ "name": "tests/integration/targets/vpc_info/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/firewall_group/defaults/main.yml",
+ "name": "tests/integration/targets/vpc_info/tasks/tests.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ce7cb163662ec14873afee9b83c34874bd0eaf496890d7b7b35d488bf094461b",
+ "chksum_sha256": "c91ecbfd0f3d0c77fc3e66dcfd9ade5b549f078908c75cd50b91356ef2dfab9d",
"format": 1
},
{
- "name": "tests/integration/targets/account_info",
+ "name": "tests/integration/targets/vpc_info/tasks/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "72e3197710fa41991bd5beaeb212d73e3833ee807ae8d30c9d83fcf775180101",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/vpc_info/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/account_info/aliases",
+ "name": "tests/integration/targets/vpc_info/defaults/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a9f61a9487700a83508395d74ce06374baecfcaf4306b30f008d8b726c626a69",
+ "chksum_sha256": "6d0f6b5b87f1f2f2d734750fa026610f0ecccbca7d6c7ccedf1d394daa2c0787",
"format": 1
},
{
- "name": "tests/integration/targets/account_info/tasks",
+ "name": "tests/integration/targets/vpc_info/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/account_info/tasks/main.yml",
+ "name": "tests/integration/targets/vpc_info/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2b49a94c64afb9d4d95f3cacf9ea2667390dd014585eb65ff4d15e026e346e2e",
+ "chksum_sha256": "fad8168157b59ea211367da3adec3c025885b88e940a43fbdd8c5b9a8f06a791",
"format": 1
},
{
- "name": "tests/integration/targets/cleanup",
+ "name": "tests/integration/targets/reserved_ip",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/cleanup/aliases",
+ "name": "tests/integration/targets/reserved_ip/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e084a3683ef795d1cdbf5e9b253f2ca1f783ae0d0d6e47e419acbbc4fc80bbfa",
+ "chksum_sha256": "0e6908b2c939974fe0c6636ebb5d908284145edfb8ae78ebf2ca6b73d15a398a",
"format": 1
},
{
- "name": "tests/integration/targets/cleanup/tasks",
+ "name": "tests/integration/targets/reserved_ip/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/cleanup/tasks/cleanup_firewall_group.yml",
+ "name": "tests/integration/targets/reserved_ip/tasks/tests.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c0c8fb99ae5f40480ba7b0da24f35632388c3865108d7d5148e87dace9adc816",
+ "chksum_sha256": "ac3fc3665287a74b94fdbfc1a34f8fc64069c1c08d9f7121f9e6282ffefa978e",
"format": 1
},
{
- "name": "tests/integration/targets/cleanup/tasks/cleanup_bare_metal.yml",
+ "name": "tests/integration/targets/reserved_ip/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3b74c0f2781eec35a6e75b14be81c287367dbf7a42afa0c276db0f0a56393b28",
+ "chksum_sha256": "ca173941bf646f76a6f8336f10b09a629e352bb355a51f62fb5c1440c55bb204",
"format": 1
},
{
- "name": "tests/integration/targets/cleanup/tasks/cleanup_vpc.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "8cb8e6f9ea326d780e4565a67a967911fdf9ca09a039b904b600f4674955eb59",
+ "name": "tests/integration/targets/reserved_ip/defaults",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/cleanup/tasks/cleanup_reserved_ip.yml",
+ "name": "tests/integration/targets/reserved_ip/defaults/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7f55ef9910de1c95c3f152822b02b1dcc77bc347c559f4d0ea9850ed67ed0466",
+ "chksum_sha256": "41ce444b326fc7a174514d6b0b478290cb0cb35ff665dbf8cc1c96426f1bcf53",
"format": 1
},
{
- "name": "tests/integration/targets/cleanup/tasks/cleanup_block_storage.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "f1b0b7013c9ed7e03d27d7f213415654e0e57b204526029921416e1722dd8c82",
+ "name": "tests/integration/targets/reserved_ip/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/cleanup/tasks/cleanup_user.yml",
+ "name": "tests/integration/targets/reserved_ip/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d045ec23fee0ba28312d3a62b2c177e40e2991bafd1363f4862585f2cbdcfb5b",
+ "chksum_sha256": "fad8168157b59ea211367da3adec3c025885b88e940a43fbdd8c5b9a8f06a791",
"format": 1
},
{
- "name": "tests/integration/targets/cleanup/tasks/cleanup_object_storage.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "55f43e34c09e93cb44ff0c38d845f833ca4c3736a666851ac9ce685a25df9ddf",
+ "name": "tests/integration/targets/region_info",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/cleanup/tasks/cleanup_dns_domain.yml",
+ "name": "tests/integration/targets/region_info/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0f70151df2fe2eb1b434fccf8e2d8eba7e13db7cb110520c1e69784499bdc2a8",
+ "chksum_sha256": "dc2ce6b1319ce6a5d14015a0ce0e61945a5fcb9f4b1cc1e3f2705ba7a5d4b466",
"format": 1
},
{
- "name": "tests/integration/targets/cleanup/tasks/cleanup_network.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "1c6f111d899836a496e790770077aa92db40ceca90633a5d5b72200a388dce2e",
+ "name": "tests/integration/targets/region_info/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/cleanup/tasks/cleanup_vpc2.yml",
+ "name": "tests/integration/targets/region_info/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e004778b3bb4abcfbc4037b7c94d14df1fe5f8b72611547c88b226294a00c28c",
+ "chksum_sha256": "1e9238ef3b628e9be3f45f1907eb44d53a34f973b548064abff1f36d525a8bab",
"format": 1
},
{
- "name": "tests/integration/targets/cleanup/tasks/cleanup_snapshot.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "c4b80c0623be8fc9b3715e96c370274a9544a722c081f2cd176964e403eb70e3",
+ "name": "tests/integration/targets/instance_info",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/cleanup/tasks/cleanup_ssh_key.yml",
+ "name": "tests/integration/targets/instance_info/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "26d0dc382638376f115608d46bba80b2dd36ec96d9d290115c931051532d3f76",
+ "chksum_sha256": "0e6908b2c939974fe0c6636ebb5d908284145edfb8ae78ebf2ca6b73d15a398a",
"format": 1
},
{
- "name": "tests/integration/targets/cleanup/tasks/cleanup_instance.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "ac852429779c61e3db9dcb59dbf066ee2c5e169c54665da6a679c4b60397d2fd",
+ "name": "tests/integration/targets/instance_info/tasks",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/cleanup/tasks/cleanup_startup_script.yml",
+ "name": "tests/integration/targets/instance_info/tasks/tests.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d3f428f0cdeb1bf14cac364c19580a61a845672a552d293c96b23e8f860764f7",
+ "chksum_sha256": "a2c21f33974001d7c039a003393e9f0a546723be4349c50851c639aba0f77117",
"format": 1
},
{
- "name": "tests/integration/targets/cleanup/tasks/cleanup_all.yml",
+ "name": "tests/integration/targets/instance_info/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1b38a4d3f009a2ea2f95e575e4164012bae9e741221b5cb947f49910752980ed",
+ "chksum_sha256": "158f45cabb1bb2f67511f1885c51b65872642bbcaa2915b8605d1f763ce1f832",
"format": 1
},
{
- "name": "tests/integration/targets/cleanup/meta",
+ "name": "tests/integration/targets/instance_info/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/cleanup/meta/main.yml",
+ "name": "tests/integration/targets/instance_info/defaults/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fad8168157b59ea211367da3adec3c025885b88e940a43fbdd8c5b9a8f06a791",
+ "chksum_sha256": "3a42de11dbbb07c09b93bfbb2d99d641f66dacb9aa96752234cb55db517f9e89",
"format": 1
},
{
- "name": "tests/integration/targets/cleanup/defaults",
+ "name": "tests/integration/targets/instance_info/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/cleanup/defaults/main.yml",
+ "name": "tests/integration/targets/instance_info/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b9f3ec8b4e4bf39dc1301f7187db3d08097bedf77f3364c79f2044ffd5db9d12",
+ "chksum_sha256": "fad8168157b59ea211367da3adec3c025885b88e940a43fbdd8c5b9a8f06a791",
"format": 1
},
{
- "name": "tests/integration/targets/user",
+ "name": "tests/integration/targets/startup_script",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/user/aliases",
+ "name": "tests/integration/targets/startup_script/aliases",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "0e6908b2c939974fe0c6636ebb5d908284145edfb8ae78ebf2ca6b73d15a398a",
"format": 1
},
{
- "name": "tests/integration/targets/user/tasks",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
- "format": 1
- },
- {
- "name": "tests/integration/targets/user/tasks/tests.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "c759330d083de4c1388c2a495db6e58cb2e8de6ff80d3c54ee1982e020d4cb1a",
- "format": 1
- },
- {
- "name": "tests/integration/targets/user/tasks/main.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "5caa8e411c0d84429e587e397e283e5c989c4ac2ef39965c255fbb5d18e1b3fa",
- "format": 1
- },
- {
- "name": "tests/integration/targets/user/meta",
+ "name": "tests/integration/targets/startup_script/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/user/meta/main.yml",
+ "name": "tests/integration/targets/startup_script/tasks/tests.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fad8168157b59ea211367da3adec3c025885b88e940a43fbdd8c5b9a8f06a791",
- "format": 1
- },
- {
- "name": "tests/integration/targets/user/defaults",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "c1e7a5d54828ea8746f35c153e693af2519b51ef74ef01a016cd7ef51b578f21",
"format": 1
},
{
- "name": "tests/integration/targets/user/defaults/main.yml",
+ "name": "tests/integration/targets/startup_script/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a554c895ee456e6b7a52b47944b6dde91718e705feae1b17240f11e709d65836",
+ "chksum_sha256": "d5738ca7eeca0a524a80197d73b1d677913903103d5bea0663c8fdc66eb6c09c",
"format": 1
},
{
- "name": "tests/integration/targets/os_info",
+ "name": "tests/integration/targets/startup_script/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/os_info/aliases",
+ "name": "tests/integration/targets/startup_script/defaults/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "dc2ce6b1319ce6a5d14015a0ce0e61945a5fcb9f4b1cc1e3f2705ba7a5d4b466",
+ "chksum_sha256": "8825f1598fe036be8c0f7259ba05ad408acb9a9bf106da582a1e0f58040e1c40",
"format": 1
},
{
- "name": "tests/integration/targets/os_info/tasks",
+ "name": "tests/integration/targets/startup_script/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/os_info/tasks/main.yml",
+ "name": "tests/integration/targets/startup_script/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ab3741f185714f47703a62373e1c60d4555a7cb0604b018ab681cab0bd63568a",
+ "chksum_sha256": "fad8168157b59ea211367da3adec3c025885b88e940a43fbdd8c5b9a8f06a791",
"format": 1
},
{
- "name": "tests/integration/targets/instance_info",
+ "name": "tests/integration/targets/user",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/instance_info/aliases",
+ "name": "tests/integration/targets/user/aliases",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "0e6908b2c939974fe0c6636ebb5d908284145edfb8ae78ebf2ca6b73d15a398a",
"format": 1
},
{
- "name": "tests/integration/targets/instance_info/tasks",
+ "name": "tests/integration/targets/user/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/instance_info/tasks/tests.yml",
+ "name": "tests/integration/targets/user/tasks/tests.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a2c21f33974001d7c039a003393e9f0a546723be4349c50851c639aba0f77117",
+ "chksum_sha256": "c759330d083de4c1388c2a495db6e58cb2e8de6ff80d3c54ee1982e020d4cb1a",
"format": 1
},
{
- "name": "tests/integration/targets/instance_info/tasks/main.yml",
+ "name": "tests/integration/targets/user/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "158f45cabb1bb2f67511f1885c51b65872642bbcaa2915b8605d1f763ce1f832",
+ "chksum_sha256": "5caa8e411c0d84429e587e397e283e5c989c4ac2ef39965c255fbb5d18e1b3fa",
"format": 1
},
{
- "name": "tests/integration/targets/instance_info/meta",
+ "name": "tests/integration/targets/user/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/instance_info/meta/main.yml",
+ "name": "tests/integration/targets/user/defaults/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fad8168157b59ea211367da3adec3c025885b88e940a43fbdd8c5b9a8f06a791",
+ "chksum_sha256": "a554c895ee456e6b7a52b47944b6dde91718e705feae1b17240f11e709d65836",
"format": 1
},
{
- "name": "tests/integration/targets/instance_info/defaults",
+ "name": "tests/integration/targets/user/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/instance_info/defaults/main.yml",
+ "name": "tests/integration/targets/user/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "3a42de11dbbb07c09b93bfbb2d99d641f66dacb9aa96752234cb55db517f9e89",
+ "chksum_sha256": "fad8168157b59ea211367da3adec3c025885b88e940a43fbdd8c5b9a8f06a791",
"format": 1
},
{
- "name": "tests/integration/targets/block_storage",
+ "name": "tests/integration/targets/snapshot",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/block_storage/aliases",
+ "name": "tests/integration/targets/snapshot/aliases",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "0e6908b2c939974fe0c6636ebb5d908284145edfb8ae78ebf2ca6b73d15a398a",
"format": 1
},
{
- "name": "tests/integration/targets/block_storage/tasks",
+ "name": "tests/integration/targets/snapshot/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/block_storage/tasks/tests.yml",
+ "name": "tests/integration/targets/snapshot/tasks/present.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6e9e3942703ae1ca3cfa4a8a3017fc54c0f627edc41fbcf143ea71e4c6782b72",
+ "chksum_sha256": "30c0334aed12e3f1d1d319de71c91e8faa7e90c0ba23ea8af12e7f66eb1dfb9f",
"format": 1
},
{
- "name": "tests/integration/targets/block_storage/tasks/main.yml",
+ "name": "tests/integration/targets/snapshot/tasks/failures.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "52660389f63f9f8b0b0faa24bdbbf8f28ce264f8fceb31a44e4bd0aca32a904d",
+ "chksum_sha256": "bb2a0533a657d8ccdc7d750a4e6607648123016a127b9a8337c92bcfd4faf7dc",
"format": 1
},
{
- "name": "tests/integration/targets/block_storage/tasks/tests_attach_to_server.yml",
+ "name": "tests/integration/targets/snapshot/tasks/tests.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "de45063f8ce9cd2b9a7554cdcf7f1c9596f50cd1376ef69f3bd042481b321c9a",
+ "chksum_sha256": "0284c806f32966951ab87baa051d3be2fe9a8839e37c55c760c695730db110cd",
"format": 1
},
{
- "name": "tests/integration/targets/block_storage/tasks/failures.yml",
+ "name": "tests/integration/targets/snapshot/tasks/absent.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "70a2828cfc6a42aea15ca265c29c3b7c0489427f1e009ebc3e8668ce5dd505f1",
- "format": 1
- },
- {
- "name": "tests/integration/targets/block_storage/defaults",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "63b358651e57593af74d74bf7ab7e7812a77443ba1628f321830199245f55760",
"format": 1
},
{
- "name": "tests/integration/targets/block_storage/defaults/main.yml",
+ "name": "tests/integration/targets/snapshot/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0015e74d8ad929993ebf0840e53120cf208ae9302ddbcb4c03bef85428cd8052",
+ "chksum_sha256": "7ce50b2181ffe6e2121e1692900fdb107636f90e048c195327fcc13737816a31",
"format": 1
},
{
- "name": "tests/integration/targets/user_info",
+ "name": "tests/integration/targets/snapshot/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/user_info/aliases",
+ "name": "tests/integration/targets/snapshot/defaults/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0e6908b2c939974fe0c6636ebb5d908284145edfb8ae78ebf2ca6b73d15a398a",
+ "chksum_sha256": "6a784801be127bde96b005e775b8981f116875928902348ec6c9384de1b5097b",
"format": 1
},
{
- "name": "tests/integration/targets/user_info/tasks",
+ "name": "tests/integration/targets/snapshot/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/user_info/tasks/tests.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "e48cbcad848aa1212a02ed6db988f09267d32b6195eed356882eed9f2e4dd39a",
- "format": 1
- },
- {
- "name": "tests/integration/targets/user_info/tasks/main.yml",
+ "name": "tests/integration/targets/snapshot/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5caa8e411c0d84429e587e397e283e5c989c4ac2ef39965c255fbb5d18e1b3fa",
+ "chksum_sha256": "fad8168157b59ea211367da3adec3c025885b88e940a43fbdd8c5b9a8f06a791",
"format": 1
},
{
- "name": "tests/integration/targets/user_info/meta",
+ "name": "tests/integration/targets/common",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/user_info/meta/main.yml",
+ "name": "tests/integration/targets/common/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fad8168157b59ea211367da3adec3c025885b88e940a43fbdd8c5b9a8f06a791",
+ "chksum_sha256": "e084a3683ef795d1cdbf5e9b253f2ca1f783ae0d0d6e47e419acbbc4fc80bbfa",
"format": 1
},
{
- "name": "tests/integration/targets/user_info/defaults",
+ "name": "tests/integration/targets/common/default",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/user_info/defaults/main.yml",
+ "name": "tests/integration/targets/common/default/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e9a72a5ae88e71690565abf81fe1845976e98e4b60c6025f0679921da414ceac",
+ "chksum_sha256": "11f87c1aa19551822d0b8585fcbb27151354e93cca5298e419581a349a23896d",
"format": 1
},
{
- "name": "tests/integration/targets/region_info",
+ "name": "tests/integration/targets/plan_metal_info",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/region_info/aliases",
+ "name": "tests/integration/targets/plan_metal_info/aliases",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "dc2ce6b1319ce6a5d14015a0ce0e61945a5fcb9f4b1cc1e3f2705ba7a5d4b466",
"format": 1
},
{
- "name": "tests/integration/targets/region_info/tasks",
+ "name": "tests/integration/targets/plan_metal_info/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/region_info/tasks/main.yml",
+ "name": "tests/integration/targets/plan_metal_info/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "1e9238ef3b628e9be3f45f1907eb44d53a34f973b548064abff1f36d525a8bab",
+ "chksum_sha256": "30fa2cd68fb86755e1ba3ba2ef676feea82c54901bfaa77897364e2877d2d8af",
"format": 1
},
{
- "name": "tests/integration/targets/vpc2",
+ "name": "tests/integration/targets/firewall_rule_info",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/vpc2/aliases",
+ "name": "tests/integration/targets/firewall_rule_info/aliases",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "0e6908b2c939974fe0c6636ebb5d908284145edfb8ae78ebf2ca6b73d15a398a",
"format": 1
},
{
- "name": "tests/integration/targets/vpc2/tasks",
+ "name": "tests/integration/targets/firewall_rule_info/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/vpc2/tasks/tests.yml",
+ "name": "tests/integration/targets/firewall_rule_info/tasks/tests.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d4840fa2b094d50ef00423d5572b8f373d3a1009f1a8f347f1c3c253ef6bd070",
+ "chksum_sha256": "f2b150accd2fc9a13c9cb8d4923d0727daa884bad4e78c31b39ca451eb532f92",
"format": 1
},
{
- "name": "tests/integration/targets/vpc2/tasks/main.yml",
+ "name": "tests/integration/targets/firewall_rule_info/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ab1cd92c34ca65a13c41096db34aeb9ecae00bb54d5e1cde618e7ca290a3ee8b",
+ "chksum_sha256": "e3b3a529e7fe7a88bd74d28e0d1a59444ff919861c898ce936304aa3cf905226",
"format": 1
},
{
- "name": "tests/integration/targets/vpc2/meta",
+ "name": "tests/integration/targets/firewall_rule_info/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/vpc2/meta/main.yml",
+ "name": "tests/integration/targets/firewall_rule_info/defaults/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fad8168157b59ea211367da3adec3c025885b88e940a43fbdd8c5b9a8f06a791",
+ "chksum_sha256": "10b22387a8147bd98ea199fa7e8866837cb3742dd173a2bb2f1c4bfefb326741",
"format": 1
},
{
- "name": "tests/integration/targets/vpc2/defaults",
+ "name": "tests/integration/targets/firewall_rule_info/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/vpc2/defaults/main.yml",
+ "name": "tests/integration/targets/firewall_rule_info/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0854a3e8751cc27484b26a8042c2aa753d83bc970654451cfbb129194b05e82f",
+ "chksum_sha256": "fad8168157b59ea211367da3adec3c025885b88e940a43fbdd8c5b9a8f06a791",
"format": 1
},
{
- "name": "tests/integration/targets/startup_script",
+ "name": "tests/integration/targets/dns_domain",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/startup_script/aliases",
+ "name": "tests/integration/targets/dns_domain/aliases",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "0e6908b2c939974fe0c6636ebb5d908284145edfb8ae78ebf2ca6b73d15a398a",
"format": 1
},
{
- "name": "tests/integration/targets/startup_script/tasks",
+ "name": "tests/integration/targets/dns_domain/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/startup_script/tasks/tests.yml",
+ "name": "tests/integration/targets/dns_domain/tasks/tests.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c1e7a5d54828ea8746f35c153e693af2519b51ef74ef01a016cd7ef51b578f21",
+ "chksum_sha256": "40df39c4e807905c239b84032700f2f8afef295404a81d33f456bb8d7d2de2db",
"format": 1
},
{
- "name": "tests/integration/targets/startup_script/tasks/main.yml",
+ "name": "tests/integration/targets/dns_domain/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d5738ca7eeca0a524a80197d73b1d677913903103d5bea0663c8fdc66eb6c09c",
+ "chksum_sha256": "2461155c079932287a72670c97c3251a2821e8ab580d9ca305fa456b02cdff13",
"format": 1
},
{
- "name": "tests/integration/targets/startup_script/meta",
+ "name": "tests/integration/targets/dns_domain/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/startup_script/meta/main.yml",
+ "name": "tests/integration/targets/dns_domain/defaults/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fad8168157b59ea211367da3adec3c025885b88e940a43fbdd8c5b9a8f06a791",
+ "chksum_sha256": "897005cc5012d5148cb547b674f0cdd1883702dd5bf7e06ce57e0df4937eee31",
"format": 1
},
{
- "name": "tests/integration/targets/startup_script/defaults",
+ "name": "tests/integration/targets/dns_domain/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/startup_script/defaults/main.yml",
+ "name": "tests/integration/targets/dns_domain/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8825f1598fe036be8c0f7259ba05ad408acb9a9bf106da582a1e0f58040e1c40",
+ "chksum_sha256": "fad8168157b59ea211367da3adec3c025885b88e940a43fbdd8c5b9a8f06a791",
"format": 1
},
{
- "name": "tests/integration/targets/reserved_ip",
+ "name": "tests/integration/targets/ssh_key_info",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/reserved_ip/aliases",
+ "name": "tests/integration/targets/ssh_key_info/aliases",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "0e6908b2c939974fe0c6636ebb5d908284145edfb8ae78ebf2ca6b73d15a398a",
"format": 1
},
{
- "name": "tests/integration/targets/reserved_ip/tasks",
+ "name": "tests/integration/targets/ssh_key_info/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/reserved_ip/tasks/tests.yml",
+ "name": "tests/integration/targets/ssh_key_info/tasks/tests.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ac3fc3665287a74b94fdbfc1a34f8fc64069c1c08d9f7121f9e6282ffefa978e",
+ "chksum_sha256": "8a08daf014910331e76b5386c45206e2bb3448692f153dd0e28afba215660fc2",
"format": 1
},
{
- "name": "tests/integration/targets/reserved_ip/tasks/main.yml",
+ "name": "tests/integration/targets/ssh_key_info/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ca173941bf646f76a6f8336f10b09a629e352bb355a51f62fb5c1440c55bb204",
+ "chksum_sha256": "44ed0d376e3a684d03aca3056db7bd74b63451f5334639ae37970b4b468e900d",
"format": 1
},
{
- "name": "tests/integration/targets/reserved_ip/meta",
+ "name": "tests/integration/targets/ssh_key_info/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/reserved_ip/meta/main.yml",
+ "name": "tests/integration/targets/ssh_key_info/defaults/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fad8168157b59ea211367da3adec3c025885b88e940a43fbdd8c5b9a8f06a791",
+ "chksum_sha256": "f09f3081e9a29164ed5d17114a52e97f2b8190d31c8c99cd23601d9f53c50e01",
"format": 1
},
{
- "name": "tests/integration/targets/reserved_ip/defaults",
+ "name": "tests/integration/targets/ssh_key_info/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/reserved_ip/defaults/main.yml",
+ "name": "tests/integration/targets/ssh_key_info/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "41ce444b326fc7a174514d6b0b478290cb0cb35ff665dbf8cc1c96426f1bcf53",
+ "chksum_sha256": "fad8168157b59ea211367da3adec3c025885b88e940a43fbdd8c5b9a8f06a791",
"format": 1
},
{
- "name": "tests/integration/targets/plan_info",
+ "name": "tests/integration/targets/dns_record",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/plan_info/aliases",
+ "name": "tests/integration/targets/dns_record/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "dc2ce6b1319ce6a5d14015a0ce0e61945a5fcb9f4b1cc1e3f2705ba7a5d4b466",
+ "chksum_sha256": "0e6908b2c939974fe0c6636ebb5d908284145edfb8ae78ebf2ca6b73d15a398a",
"format": 1
},
{
- "name": "tests/integration/targets/plan_info/tasks",
+ "name": "tests/integration/targets/dns_record/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/plan_info/tasks/main.yml",
+ "name": "tests/integration/targets/dns_record/tasks/remove_record.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "501533396ed8009dc79e310c95d764591704b6cd32091ee08eebd005f8a4b87b",
- "format": 1
- },
- {
- "name": "tests/integration/targets/ssh_key_info",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "8ad3e5a5d87d7dd4872f0a2cb01d7ae045494079415e07deab5f5e66a64985df",
"format": 1
},
{
- "name": "tests/integration/targets/ssh_key_info/aliases",
+ "name": "tests/integration/targets/dns_record/tasks/tests.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0e6908b2c939974fe0c6636ebb5d908284145edfb8ae78ebf2ca6b73d15a398a",
- "format": 1
- },
- {
- "name": "tests/integration/targets/ssh_key_info/tasks",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "b17ff013393d7b1d288a2d6fc4f2c4befddd5684e41544862fc11f4db00310c5",
"format": 1
},
{
- "name": "tests/integration/targets/ssh_key_info/tasks/tests.yml",
+ "name": "tests/integration/targets/dns_record/tasks/record.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8a08daf014910331e76b5386c45206e2bb3448692f153dd0e28afba215660fc2",
+ "chksum_sha256": "ac2ce46179ea10858a1a055df4fa27652e122ca7f0cc19b7494a3028b6894af9",
"format": 1
},
{
- "name": "tests/integration/targets/ssh_key_info/tasks/main.yml",
+ "name": "tests/integration/targets/dns_record/tasks/test_fail_multiple.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "44ed0d376e3a684d03aca3056db7bd74b63451f5334639ae37970b4b468e900d",
- "format": 1
- },
- {
- "name": "tests/integration/targets/ssh_key_info/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "efc91209a8b8d5f616816c27ee3542c437695ad7902df49d110786a75381de9a",
"format": 1
},
{
- "name": "tests/integration/targets/ssh_key_info/meta/main.yml",
+ "name": "tests/integration/targets/dns_record/tasks/create_record.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fad8168157b59ea211367da3adec3c025885b88e940a43fbdd8c5b9a8f06a791",
+ "chksum_sha256": "d4337dc679ec9e0caacc52b9ad92f5102f2ec94b67996ff87f0689a2af2795b2",
"format": 1
},
{
- "name": "tests/integration/targets/ssh_key_info/defaults",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/dns_record/tasks/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "2461155c079932287a72670c97c3251a2821e8ab580d9ca305fa456b02cdff13",
"format": 1
},
{
- "name": "tests/integration/targets/ssh_key_info/defaults/main.yml",
+ "name": "tests/integration/targets/dns_record/tasks/update_record.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f09f3081e9a29164ed5d17114a52e97f2b8190d31c8c99cd23601d9f53c50e01",
+ "chksum_sha256": "c32ca3ea4f65cebd89f11efd6f8abdaf29c1e8b5fb3ab92536ed37a46f04296e",
"format": 1
},
{
- "name": "tests/integration/targets/firewall_group_info",
+ "name": "tests/integration/targets/dns_record/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/firewall_group_info/aliases",
+ "name": "tests/integration/targets/dns_record/defaults/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0e6908b2c939974fe0c6636ebb5d908284145edfb8ae78ebf2ca6b73d15a398a",
+ "chksum_sha256": "2ad76431a193d7ad5a432520302e3a36b8d5bea46f16d60a034f094b061c0378",
"format": 1
},
{
- "name": "tests/integration/targets/firewall_group_info/tasks",
+ "name": "tests/integration/targets/dns_record/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/firewall_group_info/tasks/tests.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "dae200ffc061db7868ab082d3d5d6c1cc17c81d365d81a95eda5e6854bb08da3",
- "format": 1
- },
- {
- "name": "tests/integration/targets/firewall_group_info/tasks/main.yml",
+ "name": "tests/integration/targets/dns_record/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b3a529e7fe7a88bd74d28e0d1a59444ff919861c898ce936304aa3cf905226",
+ "chksum_sha256": "fad8168157b59ea211367da3adec3c025885b88e940a43fbdd8c5b9a8f06a791",
"format": 1
},
{
- "name": "tests/integration/targets/firewall_group_info/meta",
+ "name": "tests/integration/targets/cleanup",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/firewall_group_info/meta/main.yml",
+ "name": "tests/integration/targets/cleanup/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fad8168157b59ea211367da3adec3c025885b88e940a43fbdd8c5b9a8f06a791",
+ "chksum_sha256": "e084a3683ef795d1cdbf5e9b253f2ca1f783ae0d0d6e47e419acbbc4fc80bbfa",
"format": 1
},
{
- "name": "tests/integration/targets/firewall_group_info/defaults",
+ "name": "tests/integration/targets/cleanup/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/firewall_group_info/defaults/main.yml",
+ "name": "tests/integration/targets/cleanup/tasks/cleanup_all.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "10b22387a8147bd98ea199fa7e8866837cb3742dd173a2bb2f1c4bfefb326741",
+ "chksum_sha256": "1b38a4d3f009a2ea2f95e575e4164012bae9e741221b5cb947f49910752980ed",
"format": 1
},
{
- "name": "tests/integration/targets/snapshot_info",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/cleanup/tasks/cleanup_block_storage.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "f1b0b7013c9ed7e03d27d7f213415654e0e57b204526029921416e1722dd8c82",
"format": 1
},
{
- "name": "tests/integration/targets/snapshot_info/aliases",
+ "name": "tests/integration/targets/cleanup/tasks/cleanup_vpc.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0e6908b2c939974fe0c6636ebb5d908284145edfb8ae78ebf2ca6b73d15a398a",
+ "chksum_sha256": "a0cee899af7af74b19c19083340bab785ca51733eb6a5fd095eb97af8143cbff",
"format": 1
},
{
- "name": "tests/integration/targets/snapshot_info/tasks",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/cleanup/tasks/cleanup_bare_metal.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "3b74c0f2781eec35a6e75b14be81c287367dbf7a42afa0c276db0f0a56393b28",
"format": 1
},
{
- "name": "tests/integration/targets/snapshot_info/tasks/tests.yml",
+ "name": "tests/integration/targets/cleanup/tasks/cleanup_network.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a175aee81120e75678aa6477456460b03200a4ef79db94e0828fe737bfa65afa",
+ "chksum_sha256": "1c6f111d899836a496e790770077aa92db40ceca90633a5d5b72200a388dce2e",
"format": 1
},
{
- "name": "tests/integration/targets/snapshot_info/tasks/main.yml",
+ "name": "tests/integration/targets/cleanup/tasks/cleanup_vpc2.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "00c1b19510489064de57f159c3cfae2ed964ad5064e770cff0849b5243cc417e",
+ "chksum_sha256": "e004778b3bb4abcfbc4037b7c94d14df1fe5f8b72611547c88b226294a00c28c",
"format": 1
},
{
- "name": "tests/integration/targets/snapshot_info/meta",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/cleanup/tasks/cleanup_startup_script.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "d3f428f0cdeb1bf14cac364c19580a61a845672a552d293c96b23e8f860764f7",
"format": 1
},
{
- "name": "tests/integration/targets/snapshot_info/meta/main.yml",
+ "name": "tests/integration/targets/cleanup/tasks/cleanup_ssh_key.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fad8168157b59ea211367da3adec3c025885b88e940a43fbdd8c5b9a8f06a791",
+ "chksum_sha256": "26d0dc382638376f115608d46bba80b2dd36ec96d9d290115c931051532d3f76",
"format": 1
},
{
- "name": "tests/integration/targets/snapshot_info/defaults",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/cleanup/tasks/cleanup_instance.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "ac852429779c61e3db9dcb59dbf066ee2c5e169c54665da6a679c4b60397d2fd",
"format": 1
},
{
- "name": "tests/integration/targets/snapshot_info/defaults/main.yml",
+ "name": "tests/integration/targets/cleanup/tasks/cleanup_firewall_group.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f7167834ffb188e37b48bf50ed7112fa6b3854c473f9a27d925f0ff059aa2686",
+ "chksum_sha256": "c0c8fb99ae5f40480ba7b0da24f35632388c3865108d7d5148e87dace9adc816",
"format": 1
},
{
- "name": "tests/integration/targets/dns_domain",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/cleanup/tasks/cleanup_object_storage.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "55f43e34c09e93cb44ff0c38d845f833ca4c3736a666851ac9ce685a25df9ddf",
"format": 1
},
{
- "name": "tests/integration/targets/dns_domain/aliases",
+ "name": "tests/integration/targets/cleanup/tasks/cleanup_user.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0e6908b2c939974fe0c6636ebb5d908284145edfb8ae78ebf2ca6b73d15a398a",
+ "chksum_sha256": "d045ec23fee0ba28312d3a62b2c177e40e2991bafd1363f4862585f2cbdcfb5b",
"format": 1
},
{
- "name": "tests/integration/targets/dns_domain/tasks",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "name": "tests/integration/targets/cleanup/tasks/cleanup_dns_domain.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "0f70151df2fe2eb1b434fccf8e2d8eba7e13db7cb110520c1e69784499bdc2a8",
"format": 1
},
{
- "name": "tests/integration/targets/dns_domain/tasks/tests.yml",
+ "name": "tests/integration/targets/cleanup/tasks/cleanup_reserved_ip.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "40df39c4e807905c239b84032700f2f8afef295404a81d33f456bb8d7d2de2db",
+ "chksum_sha256": "7f55ef9910de1c95c3f152822b02b1dcc77bc347c559f4d0ea9850ed67ed0466",
"format": 1
},
{
- "name": "tests/integration/targets/dns_domain/tasks/main.yml",
+ "name": "tests/integration/targets/cleanup/tasks/cleanup_snapshot.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2461155c079932287a72670c97c3251a2821e8ab580d9ca305fa456b02cdff13",
+ "chksum_sha256": "c4b80c0623be8fc9b3715e96c370274a9544a722c081f2cd176964e403eb70e3",
"format": 1
},
{
- "name": "tests/integration/targets/dns_domain/meta",
+ "name": "tests/integration/targets/cleanup/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/dns_domain/meta/main.yml",
+ "name": "tests/integration/targets/cleanup/defaults/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fad8168157b59ea211367da3adec3c025885b88e940a43fbdd8c5b9a8f06a791",
+ "chksum_sha256": "b9f3ec8b4e4bf39dc1301f7187db3d08097bedf77f3364c79f2044ffd5db9d12",
"format": 1
},
{
- "name": "tests/integration/targets/dns_domain/defaults",
+ "name": "tests/integration/targets/cleanup/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/dns_domain/defaults/main.yml",
+ "name": "tests/integration/targets/cleanup/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "897005cc5012d5148cb547b674f0cdd1883702dd5bf7e06ce57e0df4937eee31",
+ "chksum_sha256": "fad8168157b59ea211367da3adec3c025885b88e940a43fbdd8c5b9a8f06a791",
"format": 1
},
{
- "name": "tests/integration/targets/ssh_key",
+ "name": "tests/integration/targets/startup_script_info",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/ssh_key/aliases",
+ "name": "tests/integration/targets/startup_script_info/aliases",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "0e6908b2c939974fe0c6636ebb5d908284145edfb8ae78ebf2ca6b73d15a398a",
"format": 1
},
{
- "name": "tests/integration/targets/ssh_key/tasks",
+ "name": "tests/integration/targets/startup_script_info/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/ssh_key/tasks/tests.yml",
+ "name": "tests/integration/targets/startup_script_info/tasks/tests.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8352dbed6ddb1ceecbc7f46734bfe3db7d6dda7049f6ca13601847227e524fb0",
+ "chksum_sha256": "5668e06fe3cf14e4ffdbf68decff145135365a9e4b7cd889096308ad4516565e",
"format": 1
},
{
- "name": "tests/integration/targets/ssh_key/tasks/main.yml",
+ "name": "tests/integration/targets/startup_script_info/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "44ed0d376e3a684d03aca3056db7bd74b63451f5334639ae37970b4b468e900d",
+ "chksum_sha256": "d5738ca7eeca0a524a80197d73b1d677913903103d5bea0663c8fdc66eb6c09c",
"format": 1
},
{
- "name": "tests/integration/targets/ssh_key/meta",
+ "name": "tests/integration/targets/startup_script_info/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/ssh_key/meta/main.yml",
+ "name": "tests/integration/targets/startup_script_info/defaults/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fad8168157b59ea211367da3adec3c025885b88e940a43fbdd8c5b9a8f06a791",
+ "chksum_sha256": "81ba8e5671cf9d77b15558eb21489b807a60b071c19dc5a16cbfd9ba6ccfe835",
"format": 1
},
{
- "name": "tests/integration/targets/ssh_key/defaults",
+ "name": "tests/integration/targets/startup_script_info/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/ssh_key/defaults/main.yml",
+ "name": "tests/integration/targets/startup_script_info/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2db62c7b2e06217cc6a92ca02e997ef2a276460c45d37953bba64b9f2ec3078e",
+ "chksum_sha256": "fad8168157b59ea211367da3adec3c025885b88e940a43fbdd8c5b9a8f06a791",
"format": 1
},
{
- "name": "tests/integration/targets/vpc2_info",
+ "name": "tests/integration/targets/snapshot_info",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/vpc2_info/aliases",
+ "name": "tests/integration/targets/snapshot_info/aliases",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "0e6908b2c939974fe0c6636ebb5d908284145edfb8ae78ebf2ca6b73d15a398a",
"format": 1
},
{
- "name": "tests/integration/targets/vpc2_info/tasks",
+ "name": "tests/integration/targets/snapshot_info/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/vpc2_info/tasks/tests.yml",
+ "name": "tests/integration/targets/snapshot_info/tasks/tests.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b3b6826f2bf93e2a16351af89d4b9f8f853e1306da6d750c43161157ba723e3a",
+ "chksum_sha256": "a175aee81120e75678aa6477456460b03200a4ef79db94e0828fe737bfa65afa",
"format": 1
},
{
- "name": "tests/integration/targets/vpc2_info/tasks/main.yml",
+ "name": "tests/integration/targets/snapshot_info/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ab1cd92c34ca65a13c41096db34aeb9ecae00bb54d5e1cde618e7ca290a3ee8b",
+ "chksum_sha256": "00c1b19510489064de57f159c3cfae2ed964ad5064e770cff0849b5243cc417e",
"format": 1
},
{
- "name": "tests/integration/targets/vpc2_info/meta",
+ "name": "tests/integration/targets/snapshot_info/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/vpc2_info/meta/main.yml",
+ "name": "tests/integration/targets/snapshot_info/defaults/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fad8168157b59ea211367da3adec3c025885b88e940a43fbdd8c5b9a8f06a791",
+ "chksum_sha256": "f7167834ffb188e37b48bf50ed7112fa6b3854c473f9a27d925f0ff059aa2686",
"format": 1
},
{
- "name": "tests/integration/targets/vpc2_info/defaults",
+ "name": "tests/integration/targets/snapshot_info/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/vpc2_info/defaults/main.yml",
+ "name": "tests/integration/targets/snapshot_info/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c222144728517c257e2605781d462d26d3e5aafcb631644dd48e51f3447562b1",
+ "chksum_sha256": "fad8168157b59ea211367da3adec3c025885b88e940a43fbdd8c5b9a8f06a791",
"format": 1
},
{
- "name": "tests/integration/targets/dns_record",
+ "name": "tests/integration/targets/ssh_key",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/dns_record/aliases",
+ "name": "tests/integration/targets/ssh_key/aliases",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "0e6908b2c939974fe0c6636ebb5d908284145edfb8ae78ebf2ca6b73d15a398a",
"format": 1
},
{
- "name": "tests/integration/targets/dns_record/tasks",
+ "name": "tests/integration/targets/ssh_key/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/dns_record/tasks/tests.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "b17ff013393d7b1d288a2d6fc4f2c4befddd5684e41544862fc11f4db00310c5",
- "format": 1
- },
- {
- "name": "tests/integration/targets/dns_record/tasks/main.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "2461155c079932287a72670c97c3251a2821e8ab580d9ca305fa456b02cdff13",
- "format": 1
- },
- {
- "name": "tests/integration/targets/dns_record/tasks/create_record.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "d4337dc679ec9e0caacc52b9ad92f5102f2ec94b67996ff87f0689a2af2795b2",
- "format": 1
- },
- {
- "name": "tests/integration/targets/dns_record/tasks/remove_record.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "8ad3e5a5d87d7dd4872f0a2cb01d7ae045494079415e07deab5f5e66a64985df",
- "format": 1
- },
- {
- "name": "tests/integration/targets/dns_record/tasks/test_fail_multiple.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "efc91209a8b8d5f616816c27ee3542c437695ad7902df49d110786a75381de9a",
- "format": 1
- },
- {
- "name": "tests/integration/targets/dns_record/tasks/record.yml",
+ "name": "tests/integration/targets/ssh_key/tasks/tests.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "ac2ce46179ea10858a1a055df4fa27652e122ca7f0cc19b7494a3028b6894af9",
+ "chksum_sha256": "8352dbed6ddb1ceecbc7f46734bfe3db7d6dda7049f6ca13601847227e524fb0",
"format": 1
},
{
- "name": "tests/integration/targets/dns_record/tasks/update_record.yml",
+ "name": "tests/integration/targets/ssh_key/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c32ca3ea4f65cebd89f11efd6f8abdaf29c1e8b5fb3ab92536ed37a46f04296e",
+ "chksum_sha256": "44ed0d376e3a684d03aca3056db7bd74b63451f5334639ae37970b4b468e900d",
"format": 1
},
{
- "name": "tests/integration/targets/dns_record/meta",
+ "name": "tests/integration/targets/ssh_key/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/dns_record/meta/main.yml",
+ "name": "tests/integration/targets/ssh_key/defaults/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fad8168157b59ea211367da3adec3c025885b88e940a43fbdd8c5b9a8f06a791",
+ "chksum_sha256": "2db62c7b2e06217cc6a92ca02e997ef2a276460c45d37953bba64b9f2ec3078e",
"format": 1
},
{
- "name": "tests/integration/targets/dns_record/defaults",
+ "name": "tests/integration/targets/ssh_key/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/dns_record/defaults/main.yml",
+ "name": "tests/integration/targets/ssh_key/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2ad76431a193d7ad5a432520302e3a36b8d5bea46f16d60a034f094b061c0378",
+ "chksum_sha256": "fad8168157b59ea211367da3adec3c025885b88e940a43fbdd8c5b9a8f06a791",
"format": 1
},
{
@@ -1730,136 +1667,115 @@
"format": 1
},
{
- "name": "tests/integration/targets/firewall_rule/tasks/main.yml",
+ "name": "tests/integration/targets/firewall_rule/tasks/rule_present.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "754db3bbf45a9b97110c0523b140919d4540ebf1ab2d0191616f77955dde7ece",
+ "chksum_sha256": "a210bb759c2823ab8feadb105f9a286b1cb21d3f6bcb2837857d3e9de02758ad",
"format": 1
},
{
- "name": "tests/integration/targets/firewall_rule/tasks/rule_absent.yml",
+ "name": "tests/integration/targets/firewall_rule/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "37f1c494e66dd75bd0ad6faab6de98e4be343d282d2f5c9ae3f1210ac7fe720e",
+ "chksum_sha256": "754db3bbf45a9b97110c0523b140919d4540ebf1ab2d0191616f77955dde7ece",
"format": 1
},
{
- "name": "tests/integration/targets/firewall_rule/tasks/rule_present.yml",
+ "name": "tests/integration/targets/firewall_rule/tasks/rule_absent.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "a210bb759c2823ab8feadb105f9a286b1cb21d3f6bcb2837857d3e9de02758ad",
+ "chksum_sha256": "37f1c494e66dd75bd0ad6faab6de98e4be343d282d2f5c9ae3f1210ac7fe720e",
"format": 1
},
{
- "name": "tests/integration/targets/firewall_rule/meta",
+ "name": "tests/integration/targets/firewall_rule/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/firewall_rule/meta/main.yml",
+ "name": "tests/integration/targets/firewall_rule/defaults/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fad8168157b59ea211367da3adec3c025885b88e940a43fbdd8c5b9a8f06a791",
+ "chksum_sha256": "04a36ef586390a4ccd668b3b583ddca434c9a34e971f55334d4a06e6a4c2972d",
"format": 1
},
{
- "name": "tests/integration/targets/firewall_rule/defaults",
+ "name": "tests/integration/targets/firewall_rule/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/firewall_rule/defaults/main.yml",
+ "name": "tests/integration/targets/firewall_rule/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "04a36ef586390a4ccd668b3b583ddca434c9a34e971f55334d4a06e6a4c2972d",
+ "chksum_sha256": "fad8168157b59ea211367da3adec3c025885b88e940a43fbdd8c5b9a8f06a791",
"format": 1
},
{
- "name": "tests/integration/targets/snapshot",
+ "name": "tests/integration/targets/firewall_group",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/snapshot/aliases",
+ "name": "tests/integration/targets/firewall_group/aliases",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "0e6908b2c939974fe0c6636ebb5d908284145edfb8ae78ebf2ca6b73d15a398a",
"format": 1
},
{
- "name": "tests/integration/targets/snapshot/tasks",
+ "name": "tests/integration/targets/firewall_group/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/snapshot/tasks/tests.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "0284c806f32966951ab87baa051d3be2fe9a8839e37c55c760c695730db110cd",
- "format": 1
- },
- {
- "name": "tests/integration/targets/snapshot/tasks/main.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "7ce50b2181ffe6e2121e1692900fdb107636f90e048c195327fcc13737816a31",
- "format": 1
- },
- {
- "name": "tests/integration/targets/snapshot/tasks/absent.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "63b358651e57593af74d74bf7ab7e7812a77443ba1628f321830199245f55760",
- "format": 1
- },
- {
- "name": "tests/integration/targets/snapshot/tasks/failures.yml",
+ "name": "tests/integration/targets/firewall_group/tasks/tests.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "bb2a0533a657d8ccdc7d750a4e6607648123016a127b9a8337c92bcfd4faf7dc",
+ "chksum_sha256": "6f163960098273136e9b6baf9c853e6178c8c34329d584e0b33e41250cd19483",
"format": 1
},
{
- "name": "tests/integration/targets/snapshot/tasks/present.yml",
+ "name": "tests/integration/targets/firewall_group/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "30c0334aed12e3f1d1d319de71c91e8faa7e90c0ba23ea8af12e7f66eb1dfb9f",
+ "chksum_sha256": "e3b3a529e7fe7a88bd74d28e0d1a59444ff919861c898ce936304aa3cf905226",
"format": 1
},
{
- "name": "tests/integration/targets/snapshot/meta",
+ "name": "tests/integration/targets/firewall_group/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/snapshot/meta/main.yml",
+ "name": "tests/integration/targets/firewall_group/defaults/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fad8168157b59ea211367da3adec3c025885b88e940a43fbdd8c5b9a8f06a791",
+ "chksum_sha256": "ce7cb163662ec14873afee9b83c34874bd0eaf496890d7b7b35d488bf094461b",
"format": 1
},
{
- "name": "tests/integration/targets/snapshot/defaults",
+ "name": "tests/integration/targets/firewall_group/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/snapshot/defaults/main.yml",
+ "name": "tests/integration/targets/firewall_group/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6a784801be127bde96b005e775b8981f116875928902348ec6c9384de1b5097b",
+ "chksum_sha256": "fad8168157b59ea211367da3adec3c025885b88e940a43fbdd8c5b9a8f06a791",
"format": 1
},
{
@@ -1884,17 +1800,24 @@
"format": 1
},
{
- "name": "tests/integration/targets/bare_metal/tasks/tests.yml",
+ "name": "tests/integration/targets/bare_metal/tasks/present.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "58cb4e3cf4e146c1eb86b2f10253646a22689af81dec9278dc93352fc6272a80",
+ "chksum_sha256": "d2dd0ff9d8f5d37c43e8c8375cbda630bfdfab96dbe851fefdf38a843b9758df",
"format": 1
},
{
- "name": "tests/integration/targets/bare_metal/tasks/main.yml",
+ "name": "tests/integration/targets/bare_metal/tasks/failures.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d30996ac75b7862f1073cacc39004f74e750fae54df70f525a802e68a4066ea6",
+ "chksum_sha256": "142e80f17df5581c5482614f83a12bb3d36cd32c6d23a40ba54257bd14c198e5",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/bare_metal/tasks/tests.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "58cb4e3cf4e146c1eb86b2f10253646a22689af81dec9278dc93352fc6272a80",
"format": 1
},
{
@@ -1905,17 +1828,24 @@
"format": 1
},
{
- "name": "tests/integration/targets/bare_metal/tasks/failures.yml",
+ "name": "tests/integration/targets/bare_metal/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "142e80f17df5581c5482614f83a12bb3d36cd32c6d23a40ba54257bd14c198e5",
+ "chksum_sha256": "d30996ac75b7862f1073cacc39004f74e750fae54df70f525a802e68a4066ea6",
"format": 1
},
{
- "name": "tests/integration/targets/bare_metal/tasks/present.yml",
+ "name": "tests/integration/targets/bare_metal/defaults",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/bare_metal/defaults/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d2dd0ff9d8f5d37c43e8c8375cbda630bfdfab96dbe851fefdf38a843b9758df",
+ "chksum_sha256": "14136ead44466e15ba46bd96cc8e1e838ba69f7201ab333e18fd6cb3f2a77071",
"format": 1
},
{
@@ -1933,171 +1863,178 @@
"format": 1
},
{
- "name": "tests/integration/targets/bare_metal/defaults",
+ "name": "tests/integration/targets/block_storage_info",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/bare_metal/defaults/main.yml",
+ "name": "tests/integration/targets/block_storage_info/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "14136ead44466e15ba46bd96cc8e1e838ba69f7201ab333e18fd6cb3f2a77071",
+ "chksum_sha256": "0e6908b2c939974fe0c6636ebb5d908284145edfb8ae78ebf2ca6b73d15a398a",
"format": 1
},
{
- "name": "tests/integration/targets/plan_metal_info",
+ "name": "tests/integration/targets/block_storage_info/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/plan_metal_info/aliases",
+ "name": "tests/integration/targets/block_storage_info/tasks/tests.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "dc2ce6b1319ce6a5d14015a0ce0e61945a5fcb9f4b1cc1e3f2705ba7a5d4b466",
+ "chksum_sha256": "50adb7b185f2cdb64f1763c859b4e5e2bd32579ba9120e7ca92c6d82fdfeaee4",
"format": 1
},
{
- "name": "tests/integration/targets/plan_metal_info/tasks",
+ "name": "tests/integration/targets/block_storage_info/tasks/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "74b565de2124e4a53853a1dc2d79d3e765bd00578f25260427c52b082a0aebba",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/block_storage_info/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/plan_metal_info/tasks/main.yml",
+ "name": "tests/integration/targets/block_storage_info/defaults/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "30fa2cd68fb86755e1ba3ba2ef676feea82c54901bfaa77897364e2877d2d8af",
+ "chksum_sha256": "7690ab5d3be0f969367e46923242bed255c52807b27c7ca97c492002e61e4056",
"format": 1
},
{
- "name": "tests/integration/targets/vpc",
+ "name": "tests/integration/targets/vpc2_info",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/vpc/aliases",
+ "name": "tests/integration/targets/vpc2_info/aliases",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "0e6908b2c939974fe0c6636ebb5d908284145edfb8ae78ebf2ca6b73d15a398a",
"format": 1
},
{
- "name": "tests/integration/targets/vpc/tasks",
+ "name": "tests/integration/targets/vpc2_info/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/vpc/tasks/tests.yml",
+ "name": "tests/integration/targets/vpc2_info/tasks/tests.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7069e3b5efc6ccdad26b5ef833d9dc2c361802f879cbfc1255ba7087d7659a1b",
+ "chksum_sha256": "b3b6826f2bf93e2a16351af89d4b9f8f853e1306da6d750c43161157ba723e3a",
"format": 1
},
{
- "name": "tests/integration/targets/vpc/tasks/main.yml",
+ "name": "tests/integration/targets/vpc2_info/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "72e3197710fa41991bd5beaeb212d73e3833ee807ae8d30c9d83fcf775180101",
+ "chksum_sha256": "ab1cd92c34ca65a13c41096db34aeb9ecae00bb54d5e1cde618e7ca290a3ee8b",
"format": 1
},
{
- "name": "tests/integration/targets/vpc/meta",
+ "name": "tests/integration/targets/vpc2_info/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/vpc/meta/main.yml",
+ "name": "tests/integration/targets/vpc2_info/defaults/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fad8168157b59ea211367da3adec3c025885b88e940a43fbdd8c5b9a8f06a791",
+ "chksum_sha256": "c222144728517c257e2605781d462d26d3e5aafcb631644dd48e51f3447562b1",
"format": 1
},
{
- "name": "tests/integration/targets/vpc/defaults",
+ "name": "tests/integration/targets/vpc2_info/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/vpc/defaults/main.yml",
+ "name": "tests/integration/targets/vpc2_info/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0d8a5b2015a66fe090ea2ae5d0e5e654ccbcef2676f34978022ef05b2bb1ca92",
+ "chksum_sha256": "fad8168157b59ea211367da3adec3c025885b88e940a43fbdd8c5b9a8f06a791",
"format": 1
},
{
- "name": "tests/integration/targets/vpc_info",
+ "name": "tests/integration/targets/dns_domain_info",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/vpc_info/aliases",
+ "name": "tests/integration/targets/dns_domain_info/aliases",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "0e6908b2c939974fe0c6636ebb5d908284145edfb8ae78ebf2ca6b73d15a398a",
"format": 1
},
{
- "name": "tests/integration/targets/vpc_info/tasks",
+ "name": "tests/integration/targets/dns_domain_info/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/vpc_info/tasks/tests.yml",
+ "name": "tests/integration/targets/dns_domain_info/tasks/tests.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c91ecbfd0f3d0c77fc3e66dcfd9ade5b549f078908c75cd50b91356ef2dfab9d",
+ "chksum_sha256": "f83fab1071ce3750d26dd1c8daaee56a1997a16454b672c21f98aa2b8d9713fe",
"format": 1
},
{
- "name": "tests/integration/targets/vpc_info/tasks/main.yml",
+ "name": "tests/integration/targets/dns_domain_info/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "72e3197710fa41991bd5beaeb212d73e3833ee807ae8d30c9d83fcf775180101",
+ "chksum_sha256": "87dc91ac04c12df5f20f7535d800a42a2e1336b2310b9371c2a59af44f078ce8",
"format": 1
},
{
- "name": "tests/integration/targets/vpc_info/meta",
+ "name": "tests/integration/targets/dns_domain_info/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/vpc_info/meta/main.yml",
+ "name": "tests/integration/targets/dns_domain_info/defaults/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fad8168157b59ea211367da3adec3c025885b88e940a43fbdd8c5b9a8f06a791",
+ "chksum_sha256": "51c5fca4c8d9360a6379827ab054a14a88137dbf75709539bf2deada8421a92f",
"format": 1
},
{
- "name": "tests/integration/targets/vpc_info/defaults",
+ "name": "tests/integration/targets/dns_domain_info/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/vpc_info/defaults/main.yml",
+ "name": "tests/integration/targets/dns_domain_info/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "6d0f6b5b87f1f2f2d734750fa026610f0ecccbca7d6c7ccedf1d394daa2c0787",
+ "chksum_sha256": "fad8168157b59ea211367da3adec3c025885b88e940a43fbdd8c5b9a8f06a791",
"format": 1
},
{
@@ -2129,143 +2066,122 @@
"format": 1
},
{
- "name": "tests/integration/targets/dns_domain_info",
+ "name": "tests/integration/targets/vpc",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/dns_domain_info/aliases",
+ "name": "tests/integration/targets/vpc/aliases",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "0e6908b2c939974fe0c6636ebb5d908284145edfb8ae78ebf2ca6b73d15a398a",
"format": 1
},
{
- "name": "tests/integration/targets/dns_domain_info/tasks",
+ "name": "tests/integration/targets/vpc/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/dns_domain_info/tasks/tests.yml",
+ "name": "tests/integration/targets/vpc/tasks/tests.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f83fab1071ce3750d26dd1c8daaee56a1997a16454b672c21f98aa2b8d9713fe",
+ "chksum_sha256": "7069e3b5efc6ccdad26b5ef833d9dc2c361802f879cbfc1255ba7087d7659a1b",
"format": 1
},
{
- "name": "tests/integration/targets/dns_domain_info/tasks/main.yml",
+ "name": "tests/integration/targets/vpc/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "87dc91ac04c12df5f20f7535d800a42a2e1336b2310b9371c2a59af44f078ce8",
+ "chksum_sha256": "72e3197710fa41991bd5beaeb212d73e3833ee807ae8d30c9d83fcf775180101",
"format": 1
},
{
- "name": "tests/integration/targets/dns_domain_info/meta",
+ "name": "tests/integration/targets/vpc/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/dns_domain_info/meta/main.yml",
+ "name": "tests/integration/targets/vpc/defaults/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fad8168157b59ea211367da3adec3c025885b88e940a43fbdd8c5b9a8f06a791",
+ "chksum_sha256": "0d8a5b2015a66fe090ea2ae5d0e5e654ccbcef2676f34978022ef05b2bb1ca92",
"format": 1
},
{
- "name": "tests/integration/targets/dns_domain_info/defaults",
+ "name": "tests/integration/targets/vpc/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/dns_domain_info/defaults/main.yml",
+ "name": "tests/integration/targets/vpc/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "51c5fca4c8d9360a6379827ab054a14a88137dbf75709539bf2deada8421a92f",
+ "chksum_sha256": "fad8168157b59ea211367da3adec3c025885b88e940a43fbdd8c5b9a8f06a791",
"format": 1
},
{
- "name": "tests/integration/targets/block_storage_info",
+ "name": "tests/integration/targets/instance",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/block_storage_info/aliases",
+ "name": "tests/integration/targets/instance/aliases",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "0e6908b2c939974fe0c6636ebb5d908284145edfb8ae78ebf2ca6b73d15a398a",
"format": 1
},
{
- "name": "tests/integration/targets/block_storage_info/tasks",
+ "name": "tests/integration/targets/instance/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/block_storage_info/tasks/tests.yml",
+ "name": "tests/integration/targets/instance/tasks/present.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "50adb7b185f2cdb64f1763c859b4e5e2bd32579ba9120e7ca92c6d82fdfeaee4",
+ "chksum_sha256": "975e38a525ca112c8f447899b09fe9e4b5ceebe602bf2c0f62225b64e8956bd4",
"format": 1
},
{
- "name": "tests/integration/targets/block_storage_info/tasks/main.yml",
+ "name": "tests/integration/targets/instance/tasks/failures.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "74b565de2124e4a53853a1dc2d79d3e765bd00578f25260427c52b082a0aebba",
- "format": 1
- },
- {
- "name": "tests/integration/targets/block_storage_info/defaults",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "5bec818fae85199334944642e17ef30a7a6b3c48e8e3705e8d7f3366993b2e0b",
"format": 1
},
{
- "name": "tests/integration/targets/block_storage_info/defaults/main.yml",
+ "name": "tests/integration/targets/instance/tasks/tests.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7690ab5d3be0f969367e46923242bed255c52807b27c7ca97c492002e61e4056",
- "format": 1
- },
- {
- "name": "tests/integration/targets/instance",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
+ "chksum_sha256": "9aa491749f479f00b46a0c71913ec85f1cbc2f9a6d8d96116d453cd770303870",
"format": 1
},
{
- "name": "tests/integration/targets/instance/aliases",
+ "name": "tests/integration/targets/instance/tasks/absent.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0e6908b2c939974fe0c6636ebb5d908284145edfb8ae78ebf2ca6b73d15a398a",
+ "chksum_sha256": "fe9b6632f3389a66c575a47d6dbd98e05dcf4a9621217aa5b99b6925435b5617",
"format": 1
},
{
- "name": "tests/integration/targets/instance/tasks",
- "ftype": "dir",
- "chksum_type": null,
- "chksum_sha256": null,
- "format": 1
- },
- {
- "name": "tests/integration/targets/instance/tasks/tests.yml",
+ "name": "tests/integration/targets/instance/tasks/stop-start.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "025d35e438e475b5cae504dcf1dcb8a425de2a5cbff0502018c99ad6780b2960",
+ "chksum_sha256": "217770633862454ea3c34efe13e29e29a8fb5cd297807c8dd49065f3beec39c3",
"format": 1
},
{
@@ -2276,339 +2192,423 @@
"format": 1
},
{
- "name": "tests/integration/targets/instance/tasks/absent.yml",
+ "name": "tests/integration/targets/instance/tasks/reinstall.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "2d33951d4d498963a2311fea64fdf4a1a50b39ea9712ed72b4955393fc1a90c4",
+ "chksum_sha256": "0cc5b6784d2d8030444d57d871f80b2f1a453132fccdeb002142eb027576ebbe",
"format": 1
},
{
- "name": "tests/integration/targets/instance/tasks/reinstall.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "0cc5b6784d2d8030444d57d871f80b2f1a453132fccdeb002142eb027576ebbe",
+ "name": "tests/integration/targets/instance/defaults",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/instance/tasks/stop-start.yml",
+ "name": "tests/integration/targets/instance/defaults/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "217770633862454ea3c34efe13e29e29a8fb5cd297807c8dd49065f3beec39c3",
+ "chksum_sha256": "ec8ff9cad3fd2518d0990d3712bdb754d40df60f76c681a36689d08e2c02cbbe",
"format": 1
},
{
- "name": "tests/integration/targets/instance/tasks/failures.yml",
- "ftype": "file",
- "chksum_type": "sha256",
- "chksum_sha256": "5bec818fae85199334944642e17ef30a7a6b3c48e8e3705e8d7f3366993b2e0b",
+ "name": "tests/integration/targets/instance/meta",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/instance/tasks/present.yml",
+ "name": "tests/integration/targets/instance/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "975e38a525ca112c8f447899b09fe9e4b5ceebe602bf2c0f62225b64e8956bd4",
+ "chksum_sha256": "fad8168157b59ea211367da3adec3c025885b88e940a43fbdd8c5b9a8f06a791",
"format": 1
},
{
- "name": "tests/integration/targets/instance/meta",
+ "name": "tests/integration/targets/plan_info",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/instance/meta/main.yml",
+ "name": "tests/integration/targets/plan_info/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fad8168157b59ea211367da3adec3c025885b88e940a43fbdd8c5b9a8f06a791",
+ "chksum_sha256": "dc2ce6b1319ce6a5d14015a0ce0e61945a5fcb9f4b1cc1e3f2705ba7a5d4b466",
"format": 1
},
{
- "name": "tests/integration/targets/instance/defaults",
+ "name": "tests/integration/targets/plan_info/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/instance/defaults/main.yml",
+ "name": "tests/integration/targets/plan_info/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "559e5734fd1fe6c2402dd1d30c786b17182982d0e0492f86502963ee7bf3c737",
+ "chksum_sha256": "501533396ed8009dc79e310c95d764591704b6cd32091ee08eebd005f8a4b87b",
"format": 1
},
{
- "name": "tests/integration/targets/object_storage",
+ "name": "tests/integration/targets/user_info",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/object_storage/aliases",
+ "name": "tests/integration/targets/user_info/aliases",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "0e6908b2c939974fe0c6636ebb5d908284145edfb8ae78ebf2ca6b73d15a398a",
"format": 1
},
{
- "name": "tests/integration/targets/object_storage/tasks",
+ "name": "tests/integration/targets/user_info/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/object_storage/tasks/tests.yml",
+ "name": "tests/integration/targets/user_info/tasks/tests.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b59ee60ffbb614b7c66dfd76a0f6be02b21cd99b2c261ec25ab689fa8e7e68cd",
+ "chksum_sha256": "e48cbcad848aa1212a02ed6db988f09267d32b6195eed356882eed9f2e4dd39a",
"format": 1
},
{
- "name": "tests/integration/targets/object_storage/tasks/main.yml",
+ "name": "tests/integration/targets/user_info/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "c41f15bff7732e799b9337d7c29b746f96809b63e3472d1c6fa36bd0074eab0b",
+ "chksum_sha256": "5caa8e411c0d84429e587e397e283e5c989c4ac2ef39965c255fbb5d18e1b3fa",
"format": 1
},
{
- "name": "tests/integration/targets/object_storage/tasks/failures.yml",
+ "name": "tests/integration/targets/user_info/defaults",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/user_info/defaults/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7418967590921122fa856bbd82a918191bdca8f71db2a8d7870beeb6d68bcacf",
+ "chksum_sha256": "e9a72a5ae88e71690565abf81fe1845976e98e4b60c6025f0679921da414ceac",
"format": 1
},
{
- "name": "tests/integration/targets/object_storage/defaults",
+ "name": "tests/integration/targets/user_info/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/object_storage/defaults/main.yml",
+ "name": "tests/integration/targets/user_info/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "499f573764e0b2e21e3165103f219e007b3c74ce084ba5a5af88a8faf3c36ff8",
+ "chksum_sha256": "fad8168157b59ea211367da3adec3c025885b88e940a43fbdd8c5b9a8f06a791",
"format": 1
},
{
- "name": "tests/integration/targets/firewall_rule_info",
+ "name": "tests/integration/targets/vpc2",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/firewall_rule_info/aliases",
+ "name": "tests/integration/targets/vpc2/aliases",
"ftype": "file",
"chksum_type": "sha256",
"chksum_sha256": "0e6908b2c939974fe0c6636ebb5d908284145edfb8ae78ebf2ca6b73d15a398a",
"format": 1
},
{
- "name": "tests/integration/targets/firewall_rule_info/tasks",
+ "name": "tests/integration/targets/vpc2/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/firewall_rule_info/tasks/tests.yml",
+ "name": "tests/integration/targets/vpc2/tasks/tests.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "f2b150accd2fc9a13c9cb8d4923d0727daa884bad4e78c31b39ca451eb532f92",
+ "chksum_sha256": "d4840fa2b094d50ef00423d5572b8f373d3a1009f1a8f347f1c3c253ef6bd070",
"format": 1
},
{
- "name": "tests/integration/targets/firewall_rule_info/tasks/main.yml",
+ "name": "tests/integration/targets/vpc2/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e3b3a529e7fe7a88bd74d28e0d1a59444ff919861c898ce936304aa3cf905226",
+ "chksum_sha256": "ab1cd92c34ca65a13c41096db34aeb9ecae00bb54d5e1cde618e7ca290a3ee8b",
"format": 1
},
{
- "name": "tests/integration/targets/firewall_rule_info/meta",
+ "name": "tests/integration/targets/vpc2/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/firewall_rule_info/meta/main.yml",
+ "name": "tests/integration/targets/vpc2/defaults/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fad8168157b59ea211367da3adec3c025885b88e940a43fbdd8c5b9a8f06a791",
+ "chksum_sha256": "0854a3e8751cc27484b26a8042c2aa753d83bc970654451cfbb129194b05e82f",
"format": 1
},
{
- "name": "tests/integration/targets/firewall_rule_info/defaults",
+ "name": "tests/integration/targets/vpc2/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/firewall_rule_info/defaults/main.yml",
+ "name": "tests/integration/targets/vpc2/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "10b22387a8147bd98ea199fa7e8866837cb3742dd173a2bb2f1c4bfefb326741",
+ "chksum_sha256": "fad8168157b59ea211367da3adec3c025885b88e940a43fbdd8c5b9a8f06a791",
"format": 1
},
{
- "name": "tests/integration/targets/startup_script_info",
+ "name": "tests/integration/targets/os_info",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/startup_script_info/aliases",
+ "name": "tests/integration/targets/os_info/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0e6908b2c939974fe0c6636ebb5d908284145edfb8ae78ebf2ca6b73d15a398a",
+ "chksum_sha256": "dc2ce6b1319ce6a5d14015a0ce0e61945a5fcb9f4b1cc1e3f2705ba7a5d4b466",
"format": 1
},
{
- "name": "tests/integration/targets/startup_script_info/tasks",
+ "name": "tests/integration/targets/os_info/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/startup_script_info/tasks/tests.yml",
+ "name": "tests/integration/targets/os_info/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5668e06fe3cf14e4ffdbf68decff145135365a9e4b7cd889096308ad4516565e",
+ "chksum_sha256": "ab3741f185714f47703a62373e1c60d4555a7cb0604b018ab681cab0bd63568a",
"format": 1
},
{
- "name": "tests/integration/targets/startup_script_info/tasks/main.yml",
+ "name": "tests/integration/targets/firewall_group_info",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/firewall_group_info/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "d5738ca7eeca0a524a80197d73b1d677913903103d5bea0663c8fdc66eb6c09c",
+ "chksum_sha256": "0e6908b2c939974fe0c6636ebb5d908284145edfb8ae78ebf2ca6b73d15a398a",
"format": 1
},
{
- "name": "tests/integration/targets/startup_script_info/meta",
+ "name": "tests/integration/targets/firewall_group_info/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/startup_script_info/meta/main.yml",
+ "name": "tests/integration/targets/firewall_group_info/tasks/tests.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "fad8168157b59ea211367da3adec3c025885b88e940a43fbdd8c5b9a8f06a791",
+ "chksum_sha256": "dae200ffc061db7868ab082d3d5d6c1cc17c81d365d81a95eda5e6854bb08da3",
"format": 1
},
{
- "name": "tests/integration/targets/startup_script_info/defaults",
+ "name": "tests/integration/targets/firewall_group_info/tasks/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "e3b3a529e7fe7a88bd74d28e0d1a59444ff919861c898ce936304aa3cf905226",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/firewall_group_info/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/startup_script_info/defaults/main.yml",
+ "name": "tests/integration/targets/firewall_group_info/defaults/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "81ba8e5671cf9d77b15558eb21489b807a60b071c19dc5a16cbfd9ba6ccfe835",
+ "chksum_sha256": "10b22387a8147bd98ea199fa7e8866837cb3742dd173a2bb2f1c4bfefb326741",
"format": 1
},
{
- "name": "tests/integration/targets/common",
+ "name": "tests/integration/targets/firewall_group_info/meta",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/common/aliases",
+ "name": "tests/integration/targets/firewall_group_info/meta/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "e084a3683ef795d1cdbf5e9b253f2ca1f783ae0d0d6e47e419acbbc4fc80bbfa",
+ "chksum_sha256": "fad8168157b59ea211367da3adec3c025885b88e940a43fbdd8c5b9a8f06a791",
"format": 1
},
{
- "name": "tests/integration/targets/common/default",
+ "name": "tests/integration/targets/object_storage",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/integration/targets/common/default/main.yml",
+ "name": "tests/integration/targets/object_storage/aliases",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "11f87c1aa19551822d0b8585fcbb27151354e93cca5298e419581a349a23896d",
+ "chksum_sha256": "0e6908b2c939974fe0c6636ebb5d908284145edfb8ae78ebf2ca6b73d15a398a",
"format": 1
},
{
- "name": "tests/unit",
+ "name": "tests/integration/targets/object_storage/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/plugins",
+ "name": "tests/integration/targets/object_storage/tasks/failures.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "7418967590921122fa856bbd82a918191bdca8f71db2a8d7870beeb6d68bcacf",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/object_storage/tasks/tests.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "b59ee60ffbb614b7c66dfd76a0f6be02b21cd99b2c261ec25ab689fa8e7e68cd",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/object_storage/tasks/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "c41f15bff7732e799b9337d7c29b746f96809b63e3472d1c6fa36bd0074eab0b",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/object_storage/defaults",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/plugins/inventory",
+ "name": "tests/integration/targets/object_storage/defaults/main.yml",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "499f573764e0b2e21e3165103f219e007b3c74ce084ba5a5af88a8faf3c36ff8",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/block_storage",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/plugins/inventory/fixtures",
+ "name": "tests/integration/targets/block_storage/aliases",
+ "ftype": "file",
+ "chksum_type": "sha256",
+ "chksum_sha256": "0e6908b2c939974fe0c6636ebb5d908284145edfb8ae78ebf2ca6b73d15a398a",
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/block_storage/tasks",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "tests/unit/plugins/inventory/fixtures/vultr_inventory_page1.json",
+ "name": "tests/integration/targets/block_storage/tasks/failures.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "004b664e8ddebb2714c97f7ef2c95be8d7b2e770089eb49b563fb321e4a79823",
+ "chksum_sha256": "70a2828cfc6a42aea15ca265c29c3b7c0489427f1e009ebc3e8668ce5dd505f1",
"format": 1
},
{
- "name": "tests/unit/plugins/inventory/fixtures/empty_vultr_inventory.json",
+ "name": "tests/integration/targets/block_storage/tasks/tests.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "64191612bc4b0099f55554c92444f8f71ac651b38875433e4a2a351ebb86fc1a",
+ "chksum_sha256": "6e9e3942703ae1ca3cfa4a8a3017fc54c0f627edc41fbcf143ea71e4c6782b72",
"format": 1
},
{
- "name": "tests/unit/plugins/inventory/fixtures/vultr_inventory_page2.json",
+ "name": "tests/integration/targets/block_storage/tasks/tests_attach_to_server.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "b1151ecdeb91d94258df245931619c94dfdbee87308e8f126cef86932ea54436",
+ "chksum_sha256": "de45063f8ce9cd2b9a7554cdcf7f1c9596f50cd1376ef69f3bd042481b321c9a",
"format": 1
},
{
- "name": "tests/unit/plugins/inventory/fixtures/unauthorized_vultr_inventory.json",
+ "name": "tests/integration/targets/block_storage/tasks/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "505876bcb387732db0065e9ff092c3060c8436dbe15e7ec98846899cf81603f4",
+ "chksum_sha256": "52660389f63f9f8b0b0faa24bdbbf8f28ce264f8fceb31a44e4bd0aca32a904d",
"format": 1
},
{
- "name": "tests/unit/plugins/inventory/fixtures/vultr_inventory.json",
+ "name": "tests/integration/targets/block_storage/defaults",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/integration/targets/block_storage/defaults/main.yml",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "7966a533c5dbbe1df5eddac1ec56df52ec808b6a39303a51467dfcb74c329cc0",
+ "chksum_sha256": "0015e74d8ad929993ebf0840e53120cf208ae9302ddbcb4c03bef85428cd8052",
+ "format": 1
+ },
+ {
+ "name": "tests/unit",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/unit/plugins",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
+ "format": 1
+ },
+ {
+ "name": "tests/unit/plugins/inventory",
+ "ftype": "dir",
+ "chksum_type": null,
+ "chksum_sha256": null,
"format": 1
},
{
@@ -2619,45 +2619,45 @@
"format": 1
},
{
- "name": "meta",
+ "name": "tests/unit/plugins/inventory/fixtures",
"ftype": "dir",
"chksum_type": null,
"chksum_sha256": null,
"format": 1
},
{
- "name": "meta/runtime.yml",
+ "name": "tests/unit/plugins/inventory/fixtures/vultr_inventory_page2.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5e1e422bd1c479643c7cd120a6935927d999101c1bbcb3345d0ee3a8166c19d0",
+ "chksum_sha256": "b1151ecdeb91d94258df245931619c94dfdbee87308e8f126cef86932ea54436",
"format": 1
},
{
- "name": ".ansible-lint",
+ "name": "tests/unit/plugins/inventory/fixtures/empty_vultr_inventory.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "8e858e68d7c1937d65dfcad8b39b28b8d7e41aac65eff967772fba0c3a882a15",
+ "chksum_sha256": "64191612bc4b0099f55554c92444f8f71ac651b38875433e4a2a351ebb86fc1a",
"format": 1
},
{
- "name": "codecov.yml",
+ "name": "tests/unit/plugins/inventory/fixtures/unauthorized_vultr_inventory.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "0271dcfe609d71afb466112f2d1c4c13943580fa97bb42b2baa08a1c37bb1c14",
+ "chksum_sha256": "505876bcb387732db0065e9ff092c3060c8436dbe15e7ec98846899cf81603f4",
"format": 1
},
{
- "name": "README.md",
+ "name": "tests/unit/plugins/inventory/fixtures/vultr_inventory_page1.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "93a4ac3fa1028eaa4e6e3695e3d6b8e1c5068fb507a3bddd20de12366c563b7e",
+ "chksum_sha256": "004b664e8ddebb2714c97f7ef2c95be8d7b2e770089eb49b563fb321e4a79823",
"format": 1
},
{
- "name": ".yamllint",
+ "name": "tests/unit/plugins/inventory/fixtures/vultr_inventory.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "5845e18e9f23155f423207df9abac970aed687c638620bc2c9ee06706191054b",
+ "chksum_sha256": "7966a533c5dbbe1df5eddac1ec56df52ec808b6a39303a51467dfcb74c329cc0",
"format": 1
}
],
diff --git a/ansible_collections/vultr/cloud/MANIFEST.json b/ansible_collections/vultr/cloud/MANIFEST.json
index d7fc23121..47c0bbeaa 100644
--- a/ansible_collections/vultr/cloud/MANIFEST.json
+++ b/ansible_collections/vultr/cloud/MANIFEST.json
@@ -2,7 +2,7 @@
"collection_info": {
"namespace": "vultr",
"name": "cloud",
- "version": "1.12.1",
+ "version": "1.13.0",
"authors": [
"Ren\u00e9 Moser (@resmo)",
"Yanis Guenane (@Spredzy)",
@@ -26,7 +26,7 @@
"name": "FILES.json",
"ftype": "file",
"chksum_type": "sha256",
- "chksum_sha256": "efeafb855683a649e3e0bbd0fea795ea3d8e0581d11b240365caaa92b0fc55cb",
+ "chksum_sha256": "0786028c6e1a1c60ec9ccd7891fad47f3785c31b79dadc1edd6ab2efdba3d403",
"format": 1
},
"format": 1
diff --git a/ansible_collections/vultr/cloud/README.md b/ansible_collections/vultr/cloud/README.md
index 0af5a70ad..b4cd1c8b6 100644
--- a/ansible_collections/vultr/cloud/README.md
+++ b/ansible_collections/vultr/cloud/README.md
@@ -13,7 +13,7 @@ This repository contains the `vultr.cloud` Ansible Collection . The collection w
## Ansible Version Compatibility
-Tested with Ansible Core versions >= 2.11.
+Tested with Ansible Core versions >= 2.14.
## Release Notes
diff --git a/ansible_collections/vultr/cloud/changelogs/changelog.yaml b/ansible_collections/vultr/cloud/changelogs/changelog.yaml
index 01ecfe6a9..da95e7379 100644
--- a/ansible_collections/vultr/cloud/changelogs/changelog.yaml
+++ b/ansible_collections/vultr/cloud/changelogs/changelog.yaml
@@ -157,6 +157,13 @@ releases:
- fix-keyerror-wait-for-state.yml
- instance_info_fix-alias-for-label.yml
release_date: '2024-01-17'
+ 1.13.0:
+ changes:
+ minor_changes:
+ - instance, bare_metal - Implemented a new option ``skip_wait`` (https://github.com/vultr/ansible-collection-vultr/issues/119).
+ fragments:
+ - instance-skip_wait.yml
+ release_date: '2024-05-31'
1.2.0:
changes:
minor_changes:
diff --git a/ansible_collections/vultr/cloud/plugins/doc_fragments/vultr_v2.py b/ansible_collections/vultr/cloud/plugins/doc_fragments/vultr_v2.py
index 692103e8c..76eaa8c74 100644
--- a/ansible_collections/vultr/cloud/plugins/doc_fragments/vultr_v2.py
+++ b/ansible_collections/vultr/cloud/plugins/doc_fragments/vultr_v2.py
@@ -25,7 +25,10 @@ options:
default: 180
api_retries:
description:
- - Amount of retries in case of the Vultr API retuns an HTTP 503 code.
+ - Amount of retries in case of the Vultr API retuns an HTTP error code, such as
+ - 429 Too Many Requests
+ - 500 Internal Server Error
+ - 504 Gateway Time-out
- Fallback environment variable C(VULTR_API_RETRIES).
type: int
default: 5
diff --git a/ansible_collections/vultr/cloud/plugins/module_utils/vultr_v2.py b/ansible_collections/vultr/cloud/plugins/module_utils/vultr_v2.py
index 602e89605..db8eded0c 100644
--- a/ansible_collections/vultr/cloud/plugins/module_utils/vultr_v2.py
+++ b/ansible_collections/vultr/cloud/plugins/module_utils/vultr_v2.py
@@ -171,6 +171,11 @@ class AnsibleVultr:
# Vultr has a rate limiting requests per second, try to be polite
# Use exponential backoff plus a little bit of randomness
backoff(retry=retry, retry_max_delay=retry_max_delay)
+ else:
+ self.module.fail_json(
+ msg='Failure while calling the Vultr API v2 with %s for "%s" with %s retries' % (method, path, retry + 1),
+ fetch_url_info=info,
+ )
# Success with content
if info["status"] in (200, 201, 202):
@@ -270,7 +275,10 @@ class AnsibleVultr:
resources = self.api_query(path=path, query_params=query_params)
return resources[result_key] if resources else []
- def wait_for_state(self, resource, key, states, cmp="=", retries=60):
+ def wait_for_state(self, resource, key, states, cmp="=", retries=60, skip_wait=False):
+ if skip_wait:
+ return resource
+
resource_id = resource[self.resource_key_id]
for retry in range(0, retries):
resource = self.query_by_id(resource_id=resource_id, skip_transform=False)
diff --git a/ansible_collections/vultr/cloud/plugins/modules/bare_metal.py b/ansible_collections/vultr/cloud/plugins/modules/bare_metal.py
index d2ffe09ef..f700fb937 100644
--- a/ansible_collections/vultr/cloud/plugins/modules/bare_metal.py
+++ b/ansible_collections/vultr/cloud/plugins/modules/bare_metal.py
@@ -101,6 +101,12 @@ options:
- A list of VPCs (VPC 2.0) identified by their description to be assigned to the bare metal machine.
type: list
elements: str
+ skip_wait:
+ description:
+ - Whether to skip the wait for the instance to be completely ready for access.
+ type: bool
+ default: false
+ version_added: "1.13.0"
state:
description:
- State of the bare metal machine.
@@ -376,6 +382,7 @@ def main():
user_data=dict(type="str"),
ssh_keys=dict(type="list", elements="str", no_log=False),
region=dict(type="str", required=True),
+ skip_wait=dict(type="bool", default=False),
state=dict(
choices=[
"present",
diff --git a/ansible_collections/vultr/cloud/plugins/modules/instance.py b/ansible_collections/vultr/cloud/plugins/modules/instance.py
index 7eca359b4..c1165124a 100644
--- a/ansible_collections/vultr/cloud/plugins/modules/instance.py
+++ b/ansible_collections/vultr/cloud/plugins/modules/instance.py
@@ -120,6 +120,12 @@ options:
type: list
elements: str
version_added: "1.5.0"
+ skip_wait:
+ description:
+ - Whether to skip the wait for the instance to be completely ready for access.
+ type: bool
+ default: false
+ version_added: "1.13.0"
state:
description:
- State of the instance.
@@ -439,19 +445,49 @@ class AnsibleVultrInstance(AnsibleVultrCommonInstance):
path="%s/%s/%s" % (self.resource_path, resource[self.resource_key_id], action),
method="POST",
)
- if wait_for_state:
- resource = self.wait_for_state(resource=resource, key="power_status", states=[power_status])
+ resource = self.wait_for_state(
+ resource=resource,
+ key="power_status",
+ states=[power_status],
+ skip_wait=not wait_for_state,
+ )
return resource
def create_or_update(self):
resource = super(AnsibleVultrInstance, self).create_or_update()
if resource:
- resource = self.wait_for_state(resource=resource, key="server_status", states=["none", "locked"], cmp="!=")
+ if not self.module.check_mode and self.module.params.get("state") == "present":
+ resource = self.wait_for_state(
+ resource=resource,
+ key="server_status",
+ states=["none", "locked"],
+ cmp="!=",
+ skip_wait=self.module.params.get("skip_wait", False),
+ )
# Hanlde power status
- resource = self.handle_power_status(resource=resource, state="stopped", action="halt", power_status="stopped")
- resource = self.handle_power_status(resource=resource, state="started", action="start", power_status="running")
- resource = self.handle_power_status(resource=resource, state="restarted", action="reboot", power_status="running", force=True)
+ resource = self.handle_power_status(
+ resource=resource,
+ state="stopped",
+ action="halt",
+ power_status="stopped",
+ wait_for_state=not self.module.params.get("skip_wait", False),
+ )
+ resource = self.handle_power_status(
+ resource=resource,
+ state="started",
+ action="start",
+ power_status="running",
+ wait_for_state=not self.module.params.get("skip_wait", False),
+ )
+ resource = self.handle_power_status(
+ resource=resource,
+ state="restarted",
+ action="reboot",
+ power_status="running",
+ force=True,
+ wait_for_state=not self.module.params.get("skip_wait", False),
+ )
resource = self.handle_power_status(
resource=resource,
state="reinstalled",
@@ -505,6 +541,7 @@ def main():
ssh_keys=dict(type="list", elements="str", no_log=False),
region=dict(type="str", required=True),
user_scheme=dict(type="str", choices=["root", "limited"]),
+ skip_wait=dict(type="bool", default=False),
state=dict(
choices=[
"present",
diff --git a/ansible_collections/vultr/cloud/tests/integration/targets/cleanup/tasks/cleanup_vpc.yml b/ansible_collections/vultr/cloud/tests/integration/targets/cleanup/tasks/cleanup_vpc.yml
index f0c8e2b5c..272f69805 100644
--- a/ansible_collections/vultr/cloud/tests/integration/targets/cleanup/tasks/cleanup_vpc.yml
+++ b/ansible_collections/vultr/cloud/tests/integration/targets/cleanup/tasks/cleanup_vpc.yml
@@ -27,7 +27,7 @@
loop_control:
label: "{{ item.description }}"
pause: 2
- retries: 5
+ retries: 20
delay: 3
register: result
until: result is not failed
diff --git a/ansible_collections/vultr/cloud/tests/integration/targets/instance/defaults/main.yml b/ansible_collections/vultr/cloud/tests/integration/targets/instance/defaults/main.yml
index dd280e9cf..d28013284 100644
--- a/ansible_collections/vultr/cloud/tests/integration/targets/instance/defaults/main.yml
+++ b/ansible_collections/vultr/cloud/tests/integration/targets/instance/defaults/main.yml
@@ -61,24 +61,23 @@ vultr_instances:
- "{{ vultr_resource_prefix }}_instance_vpc_1"
- "{{ vultr_resource_prefix }}_instance_vpc_3"
- #TODO: Disabled, this app does not exist anymore, find a new app
- # - label: "{{ vultr_resource_prefix }}_app1"
- # plan: vc2-1c-1gb
- # plan_update: vc2-1c-2gb
- # region: ams
- # app: Docker on Ubuntu 20.04 x64
- # backups: false
- # backups_update: false
- # ddos_protection: false
- # ddos_protection_update: false
- # enable_ipv6: false
- # enable_ipv6_update: false
- # vpcs:
- # - "{{ vultr_resource_prefix }}_instance_vpc_1"
- # - "{{ vultr_resource_prefix }}_instance_vpc_2"
- # vpcs_update:
- # - "{{ vultr_resource_prefix }}_instance_vpc_1"
- # - "{{ vultr_resource_prefix }}_instance_vpc_3"
+ - label: "{{ vultr_resource_prefix }}_app1"
+ plan: vc2-4c-8gb
+ plan_update: vc2-6c-16gb
+ region: ams
+ app: GitLab on Ubuntu 22.04 x64
+ backups: false
+ backups_update: false
+ ddos_protection: false
+ ddos_protection_update: false
+ enable_ipv6: false
+ enable_ipv6_update: false
+ vpcs:
+ - "{{ vultr_resource_prefix }}_instance_vpc_1"
+ - "{{ vultr_resource_prefix }}_instance_vpc_2"
+ vpcs_update:
+ - "{{ vultr_resource_prefix }}_instance_vpc_1"
+ - "{{ vultr_resource_prefix }}_instance_vpc_3"
- label: "{{ vultr_resource_prefix }}_img1"
plan: vc2-1c-1gb
diff --git a/ansible_collections/vultr/cloud/tests/integration/targets/instance/tasks/absent.yml b/ansible_collections/vultr/cloud/tests/integration/targets/instance/tasks/absent.yml
index 828f7f97e..86fe360e5 100644
--- a/ansible_collections/vultr/cloud/tests/integration/targets/instance/tasks/absent.yml
+++ b/ansible_collections/vultr/cloud/tests/integration/targets/instance/tasks/absent.yml
@@ -20,6 +20,7 @@
label: "{{ instance.label }}"
region: "{{ instance.region }}"
state: absent
+ api_retries: 20
register: result
- name: verify test absent instance
ansible.builtin.assert:
diff --git a/ansible_collections/vultr/cloud/tests/integration/targets/instance/tasks/tests.yml b/ansible_collections/vultr/cloud/tests/integration/targets/instance/tasks/tests.yml
index e35c426de..877ca3db1 100644
--- a/ansible_collections/vultr/cloud/tests/integration/targets/instance/tasks/tests.yml
+++ b/ansible_collections/vultr/cloud/tests/integration/targets/instance/tasks/tests.yml
@@ -64,7 +64,7 @@
region: "{{ item.region }}"
state: absent
with_items: "{{ vutr_instance_vpcs }}"
- retries: 5
+ retries: 20
delay: 3
register: result
until: result is not failed